Compare commits
61 Commits
mai/ritchi
...
mai/knuth/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b36247dfb9 | ||
|
|
82878dffd5 | ||
|
|
ac04930667 | ||
|
|
909f14062c | ||
|
|
4b86dfa4ad | ||
|
|
60f1f4ef4a | ||
|
|
7c7ae396f4 | ||
|
|
433a0408f2 | ||
|
|
cabea83784 | ||
|
|
8863878b39 | ||
|
|
84b178edbf | ||
|
|
7094212dcf | ||
|
|
9787450d91 | ||
|
|
1e88dffd82 | ||
|
|
9ad58e1ba3 | ||
|
|
0712d9a367 | ||
|
|
cd31e76d07 | ||
|
|
f42b7ddec7 | ||
|
|
50bfa3deb4 | ||
|
|
e635efa71e | ||
|
|
12e0407025 | ||
|
|
325fbeb5de | ||
|
|
19bea8d058 | ||
|
|
661135d137 | ||
|
|
f8d97546e9 | ||
|
|
45605c803b | ||
|
|
e57b7c48ed | ||
|
|
c5c3f41e08 | ||
|
|
d0197a091c | ||
|
|
fe97fed56d | ||
|
|
b49992b9c0 | ||
|
|
f81a2492c6 | ||
|
|
8bb8d7fed8 | ||
|
|
b4f3b26cbe | ||
|
|
6e9345fcfe | ||
|
|
785df2ced4 | ||
|
|
749273fba7 | ||
|
|
0ab2e8b383 | ||
|
|
2cf01073a3 | ||
|
|
ed83d23d06 | ||
|
|
97ebeafcf7 | ||
|
|
26887248e1 | ||
|
|
1fa7d90050 | ||
|
|
3a56d4cf11 | ||
|
|
45188ff5cb | ||
|
|
65b70975eb | ||
|
|
0fac764211 | ||
|
|
78c511bd1f | ||
|
|
ca572d3289 | ||
|
|
b2b3e04d05 | ||
|
|
5758e2c37f | ||
|
|
9bd8cc9e07 | ||
|
|
bf225284d8 | ||
|
|
e53e1389f9 | ||
|
|
2c16f26448 | ||
|
|
f0ee5921cf | ||
|
|
ba29fc75c7 | ||
|
|
8350a7e7fb | ||
|
|
42a62d45bf | ||
|
|
0b6bab8512 | ||
|
|
bd15b4eb38 |
14
.claude/agents/coder.md
Normal file
14
.claude/agents/coder.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# Coder Agent
|
||||
|
||||
Implementation-focused agent for writing and refactoring code.
|
||||
|
||||
## Instructions
|
||||
|
||||
- Follow existing patterns in the codebase
|
||||
- Write minimal, focused code
|
||||
- Run tests after changes
|
||||
- Commit incrementally with descriptive messages
|
||||
|
||||
## Tools
|
||||
|
||||
All tools available.
|
||||
14
.claude/agents/researcher.md
Normal file
14
.claude/agents/researcher.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# Researcher Agent
|
||||
|
||||
Exploration and information gathering agent.
|
||||
|
||||
## Instructions
|
||||
|
||||
- Search broadly, then narrow down
|
||||
- Document findings in structured format
|
||||
- Cite sources and file paths
|
||||
- Summarize key insights, don't dump raw data
|
||||
|
||||
## Tools
|
||||
|
||||
Read-only tools preferred. Use Bash only for non-destructive commands.
|
||||
14
.claude/agents/reviewer.md
Normal file
14
.claude/agents/reviewer.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# Reviewer Agent
|
||||
|
||||
Code review agent for checking quality and correctness.
|
||||
|
||||
## Instructions
|
||||
|
||||
- Check for bugs, security issues, and style violations
|
||||
- Verify test coverage for changes
|
||||
- Suggest improvements concisely
|
||||
- Focus on correctness over style preferences
|
||||
|
||||
## Tools
|
||||
|
||||
Read-only tools. No file modifications.
|
||||
1
.claude/skills/mai-clone
Symbolic link
1
.claude/skills/mai-clone
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-clone
|
||||
1
.claude/skills/mai-coder
Symbolic link
1
.claude/skills/mai-coder
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-coder
|
||||
1
.claude/skills/mai-commit
Symbolic link
1
.claude/skills/mai-commit
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-commit
|
||||
1
.claude/skills/mai-consultant
Symbolic link
1
.claude/skills/mai-consultant
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-consultant
|
||||
1
.claude/skills/mai-daily
Symbolic link
1
.claude/skills/mai-daily
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-daily
|
||||
1
.claude/skills/mai-debrief
Symbolic link
1
.claude/skills/mai-debrief
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-debrief
|
||||
1
.claude/skills/mai-enemy
Symbolic link
1
.claude/skills/mai-enemy
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-enemy
|
||||
1
.claude/skills/mai-excalidraw
Symbolic link
1
.claude/skills/mai-excalidraw
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-excalidraw
|
||||
1
.claude/skills/mai-fixer
Symbolic link
1
.claude/skills/mai-fixer
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-fixer
|
||||
1
.claude/skills/mai-gitster
Symbolic link
1
.claude/skills/mai-gitster
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-gitster
|
||||
1
.claude/skills/mai-head
Symbolic link
1
.claude/skills/mai-head
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-head
|
||||
1
.claude/skills/mai-init
Symbolic link
1
.claude/skills/mai-init
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-init
|
||||
1
.claude/skills/mai-inventor
Symbolic link
1
.claude/skills/mai-inventor
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-inventor
|
||||
1
.claude/skills/mai-lead
Symbolic link
1
.claude/skills/mai-lead
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-lead
|
||||
1
.claude/skills/mai-maister
Symbolic link
1
.claude/skills/mai-maister
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-maister
|
||||
1
.claude/skills/mai-member
Symbolic link
1
.claude/skills/mai-member
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-member
|
||||
1
.claude/skills/mai-researcher
Symbolic link
1
.claude/skills/mai-researcher
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-researcher
|
||||
1
.claude/skills/mai-think
Symbolic link
1
.claude/skills/mai-think
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-think
|
||||
1
.claude/skills/mai-web
Symbolic link
1
.claude/skills/mai-web
Symbolic link
@@ -0,0 +1 @@
|
||||
/home/m/.mai/skills/mai-web
|
||||
10
.env.example
10
.env.example
@@ -3,10 +3,16 @@
|
||||
|
||||
# Backend
|
||||
PORT=8080
|
||||
DATABASE_URL=postgresql://user:pass@host:5432/dbname
|
||||
|
||||
# Supabase (required for database access)
|
||||
SUPABASE_URL=
|
||||
# Supabase (required for database + auth)
|
||||
SUPABASE_URL=https://your-project.supabase.co
|
||||
SUPABASE_ANON_KEY=
|
||||
SUPABASE_SERVICE_KEY=
|
||||
SUPABASE_JWT_SECRET=
|
||||
|
||||
# Claude API (required for AI features)
|
||||
ANTHROPIC_API_KEY=
|
||||
|
||||
# CalDAV (configured per-tenant in tenant settings, not env vars)
|
||||
# See tenant.settings.caldav JSON field
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -45,3 +45,4 @@ tmp/
|
||||
|
||||
# TypeScript
|
||||
*.tsbuildinfo
|
||||
.worktrees/
|
||||
|
||||
4
.m/.gitignore
vendored
Normal file
4
.m/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
workers.json
|
||||
spawn.lock
|
||||
session.yaml
|
||||
config.reference.yaml
|
||||
168
.m/config.yaml
Normal file
168
.m/config.yaml
Normal file
@@ -0,0 +1,168 @@
|
||||
provider: claude
|
||||
providers:
|
||||
claude:
|
||||
api_key: ""
|
||||
model: claude-sonnet-4-20250514
|
||||
base_url: https://api.anthropic.com/v1
|
||||
ollama:
|
||||
host: http://localhost:11434
|
||||
model: llama3.2
|
||||
memory:
|
||||
enabled: true
|
||||
backend: ""
|
||||
path: ""
|
||||
url: postgres://mai_memory.your-tenant-id:maiMem6034supa@100.99.98.201:6543/postgres?sslmode=disable
|
||||
group_id: ""
|
||||
cache_ttl: 5m0s
|
||||
auto_load: true
|
||||
embedding_url: ""
|
||||
embedding_model: ""
|
||||
gitea:
|
||||
url: https://mgit.msbls.de
|
||||
repo: m/KanzlAI
|
||||
token: ""
|
||||
sync:
|
||||
enabled: false
|
||||
interval: 0s
|
||||
repos: []
|
||||
auto_queue: false
|
||||
api:
|
||||
api_key: ""
|
||||
basic_auth:
|
||||
username: ""
|
||||
password: ""
|
||||
public_endpoints:
|
||||
- /api/health
|
||||
ui:
|
||||
theme: default
|
||||
show_sidebar: true
|
||||
animation: true
|
||||
persona: true
|
||||
avatar_pack: ""
|
||||
worker:
|
||||
names: []
|
||||
name_scheme: role
|
||||
default_level: standard
|
||||
auto_discard: false
|
||||
max_workers: 5
|
||||
persistent: true
|
||||
head:
|
||||
name: ingeborg
|
||||
max_loops: 50
|
||||
infinity_mode: false
|
||||
capacity:
|
||||
global:
|
||||
max_workers: 5
|
||||
max_heads: 3
|
||||
per_worker:
|
||||
max_tasks_lifetime: 0
|
||||
max_concurrent: 1
|
||||
max_context_tokens: 0
|
||||
per_head:
|
||||
max_workers: 10
|
||||
resources:
|
||||
max_memory_mb: 0
|
||||
max_cpu_percent: 0
|
||||
queue:
|
||||
max_pending: 100
|
||||
stale_task_days: 30
|
||||
workforce:
|
||||
timeouts:
|
||||
task_default: 0s
|
||||
task_max: 0s
|
||||
idle_before_warn: 10m0s
|
||||
idle_before_kill: 30m0s
|
||||
quality_check: 2m0s
|
||||
context:
|
||||
max_tokens_per_worker: 0
|
||||
max_tokens_global: 0
|
||||
warn_threshold: 0.8
|
||||
truncate_strategy: oldest
|
||||
delegation:
|
||||
strategy: skill_match
|
||||
preferred_role: coder
|
||||
auto_delegate: false
|
||||
max_depth: 3
|
||||
allowed_roles:
|
||||
- coder
|
||||
- researcher
|
||||
- fixer
|
||||
peppy:
|
||||
enabled: false
|
||||
style: calm
|
||||
interval: 5m0s
|
||||
emoji: false
|
||||
nudges: true
|
||||
nudge_main: false
|
||||
custom_prompt: ""
|
||||
stall_threshold: 0s
|
||||
restart_enabled: false
|
||||
max_shifts: 0
|
||||
quality_gates:
|
||||
enabled: true
|
||||
checks: []
|
||||
preflight:
|
||||
enabled: false
|
||||
type: ""
|
||||
root: ""
|
||||
checks: []
|
||||
guardrails:
|
||||
enabled: false
|
||||
use_defaults: true
|
||||
output:
|
||||
coder_checks: []
|
||||
researcher_checks: []
|
||||
fixer_checks: []
|
||||
custom_checks: {}
|
||||
global_checks: []
|
||||
tools:
|
||||
role_rules: {}
|
||||
deny_patterns: []
|
||||
allow_patterns: []
|
||||
schemas:
|
||||
report_schemas: {}
|
||||
deliverable_schemas: {}
|
||||
modes:
|
||||
yolo: false
|
||||
self_improvement: false
|
||||
autonomous: false
|
||||
verbose: false
|
||||
improve_interval: 0s
|
||||
predict_interval: 0s
|
||||
layouts:
|
||||
head: ""
|
||||
worker: ""
|
||||
roles: {}
|
||||
dog:
|
||||
name: buddy
|
||||
supabase:
|
||||
url: ""
|
||||
role_key: ""
|
||||
anon_key: ""
|
||||
schema: mai
|
||||
storage:
|
||||
backend: ""
|
||||
postgres:
|
||||
url: ""
|
||||
max_conns: 0
|
||||
min_conns: 0
|
||||
max_conn_lifetime: 0s
|
||||
idle:
|
||||
behavior: wait
|
||||
auto_hire: false
|
||||
prompt: ""
|
||||
git:
|
||||
worktrees:
|
||||
enabled: true
|
||||
delete_branch: false
|
||||
dir: .worktrees
|
||||
phase:
|
||||
enabled: false
|
||||
current: ""
|
||||
allowed_roles: {}
|
||||
goal: ""
|
||||
skills: {}
|
||||
editor: nvim
|
||||
log_level: info
|
||||
project_detection: true
|
||||
tone: professional
|
||||
22
.mcp.json
Normal file
22
.mcp.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"mai": {
|
||||
"type": "http",
|
||||
"url": "http://100.99.98.201:8000/mcp",
|
||||
"headers": {
|
||||
"Authorization": "Basic ${SUPABASE_AUTH}"
|
||||
}
|
||||
},
|
||||
"mai-memory": {
|
||||
"command": "mai",
|
||||
"args": [
|
||||
"mcp",
|
||||
"memory"
|
||||
],
|
||||
"env": {
|
||||
"MAI_MEMORY_EMBEDDING_MODEL": "nomic-embed-text",
|
||||
"MAI_MEMORY_EMBEDDING_URL": "https://llm.x.msbls.de"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
482
AUDIT.md
Normal file
482
AUDIT.md
Normal file
@@ -0,0 +1,482 @@
|
||||
# KanzlAI-mGMT MVP Audit
|
||||
|
||||
**Date:** 2026-03-28
|
||||
**Auditor:** athena (consultant)
|
||||
**Scope:** Full-stack audit of KanzlAI-mGMT — Go backend, Next.js frontend, Supabase database, deployment, security, UX, competitive positioning.
|
||||
**Codebase:** ~16,500 lines across ~60 source files, built 2026-03-25 in a single session with parallel workers.
|
||||
|
||||
---
|
||||
|
||||
## Executive Summary
|
||||
|
||||
KanzlAI-mGMT is an impressive MVP built in ~2 hours. It covers the core Kanzleimanagement primitives: cases, deadlines, appointments, parties, documents, notes, dashboard, CalDAV sync, and AI-powered deadline extraction. The architecture is sound — clean separation between Go API and Next.js frontend, proper multi-tenant design with Supabase Auth, parameterized SQL throughout.
|
||||
|
||||
However, the speed of construction shows. There are **critical security gaps** that must be fixed before any external user touches this. The frontend has good bones but lacks the polish and completeness a lawyer would expect. And the feature gap vs. established competitors (RA-MICRO, ADVOWARE, AnNoText, Actaport) is enormous — particularly around beA integration, billing/RVG, and document generation, which are table-stakes for German law firms.
|
||||
|
||||
**Bottom line:** Fix the security issues, add error recovery and multi-tenant auth verification, then decide whether to pursue the Kanzleimanagement market (massive feature gap) or pivot back to the UPC niche (where you had a genuine competitive advantage).
|
||||
|
||||
---
|
||||
|
||||
## 1. Critical Issues (Fix Immediately)
|
||||
|
||||
### 1.1 Tenant Isolation Bypass in TenantResolver
|
||||
**File:** `backend/internal/auth/tenant_resolver.go:37-42`
|
||||
|
||||
When the `X-Tenant-ID` header is provided, the TenantResolver parses it and sets it in context **without verifying the user has access to that tenant**. Any authenticated user can access any tenant's data by setting this header.
|
||||
|
||||
```go
|
||||
if header := r.Header.Get("X-Tenant-ID"); header != "" {
|
||||
parsed, err := uuid.Parse(header)
|
||||
// ... sets tenantID = parsed — NO ACCESS CHECK
|
||||
}
|
||||
```
|
||||
|
||||
Compare with `helpers.go:32-44` where `resolveTenant()` correctly verifies access via `user_tenants` — but this function is unused in the middleware path. The TenantResolver middleware is what actually runs for all scoped routes.
|
||||
|
||||
**Impact:** Complete tenant data isolation breach. User A can read/modify/delete User B's cases, deadlines, appointments, documents.
|
||||
|
||||
**Fix:** Add `user_tenants` lookup in TenantResolver when X-Tenant-ID is provided, same as `resolveTenant()` does.
|
||||
|
||||
### 1.2 Duplicate Tenant Resolution Logic
|
||||
**Files:** `backend/internal/auth/tenant_resolver.go` and `backend/internal/handlers/helpers.go:25-57`
|
||||
|
||||
Two independent implementations of tenant resolution exist. The middleware (`TenantResolver`) is used for the scoped routes. The handler-level `resolveTenant()` function exists in helpers.go. The auth middleware in `middleware.go:39-47` also resolves a tenant into context. This triple-resolution creates confusion and the security bug above.
|
||||
|
||||
**Fix:** Consolidate to a single path. Remove the handler-level `resolveTenant()` and the auth middleware's tenant resolution. Let TenantResolver be the single source of truth, but make it verify access.
|
||||
|
||||
### 1.3 CalDAV Credentials Stored in Plaintext
|
||||
**File:** `backend/internal/services/caldav_service.go:29-35`
|
||||
|
||||
CalDAV username and password are stored as plain JSON in the `tenants.settings` column:
|
||||
```go
|
||||
type CalDAVConfig struct {
|
||||
URL string `json:"url"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
Combined with the tenant isolation bypass above, any authenticated user can read any tenant's CalDAV credentials.
|
||||
|
||||
**Fix:** Encrypt CalDAV credentials at rest (e.g., using `pgcrypto` or application-level encryption). At minimum, never return the password in API responses.
|
||||
|
||||
### 1.4 No CORS Configuration
|
||||
**File:** `backend/internal/router/router.go`, `backend/cmd/server/main.go`
|
||||
|
||||
There is zero CORS handling anywhere in the backend. The frontend uses Next.js rewrites to proxy `/api/` to the backend, which works in production. But:
|
||||
- If anyone accesses the backend directly (different origin), there's no CORS protection.
|
||||
- No `X-Frame-Options`, `X-Content-Type-Options`, or other security headers are set.
|
||||
|
||||
**Fix:** Add CORS middleware restricting to the frontend origin. Add standard security headers.
|
||||
|
||||
### 1.5 Internal Error Messages Leaked to Clients
|
||||
**Files:** Multiple handlers (e.g., `cases.go:44`, `cases.go:73`, `appointments.go`)
|
||||
|
||||
```go
|
||||
writeError(w, http.StatusInternalServerError, err.Error())
|
||||
```
|
||||
|
||||
Internal error messages (including SQL errors, connection errors, etc.) are sent directly to the client. This leaks implementation details.
|
||||
|
||||
**Fix:** Log the full error server-side, return a generic message to the client.
|
||||
|
||||
### 1.6 Race Condition in HolidayService Cache
|
||||
**File:** `backend/internal/services/holidays.go`
|
||||
|
||||
The `HolidayService` uses a `map[int][]Holiday` cache without any mutex protection. Concurrent requests (e.g., multiple deadline calculations) will cause a data race. The Go race detector would flag this.
|
||||
|
||||
**Fix:** Add `sync.RWMutex` to HolidayService.
|
||||
|
||||
### 1.7 Rate Limiter Trivially Bypassable
|
||||
**File:** `backend/internal/middleware/ratelimit.go:78-79`
|
||||
|
||||
```go
|
||||
ip := r.Header.Get("X-Forwarded-For")
|
||||
if ip == "" { ip = r.RemoteAddr }
|
||||
```
|
||||
|
||||
Rate limiting keys off `X-Forwarded-For`, which any client can spoof. An attacker can bypass AI endpoint rate limits by rotating this header.
|
||||
|
||||
**Fix:** Only trust `X-Forwarded-For` from configured reverse proxy IPs, or use `r.RemoteAddr` exclusively behind a trusted proxy.
|
||||
|
||||
---
|
||||
|
||||
## 2. Important Gaps (Fix Before Showing to Anyone)
|
||||
|
||||
### 2.1 No Input Validation Beyond "Required Fields"
|
||||
**Files:** All handlers
|
||||
|
||||
Input validation is minimal — typically just checking if required fields are empty:
|
||||
```go
|
||||
if input.CaseNumber == "" || input.Title == "" {
|
||||
writeError(w, http.StatusBadRequest, "case_number and title are required")
|
||||
}
|
||||
```
|
||||
|
||||
Missing:
|
||||
- Length limits on text fields (could store megabytes in a title field)
|
||||
- Status value validation (accepts any string for status fields)
|
||||
- Date format validation
|
||||
- Case type validation against allowed values
|
||||
- SQL-safe string validation (although parameterized queries protect against injection)
|
||||
|
||||
### 2.2 No Pagination Defaults on Most List Endpoints
|
||||
**File:** `backend/internal/services/case_service.go:57-63`
|
||||
|
||||
`CaseService.List` has sane defaults (limit=20, max=100). But other list endpoints (`appointments`, `deadlines`, `notes`, `parties`, `case_events`) have no pagination at all — they return all records for a tenant/case. As data grows, these become performance problems.
|
||||
|
||||
### 2.3 Dashboard Page is Entirely Client-Side
|
||||
**File:** `frontend/src/app/(app)/dashboard/page.tsx`
|
||||
|
||||
The entire dashboard is a `"use client"` component that fetches data via API. This means:
|
||||
- No SSR benefit — the page is blank until JS loads and API responds
|
||||
- SEO doesn't matter for a SaaS app, but initial load time does
|
||||
- The skeleton is nice but adds 200-400ms of perceived latency
|
||||
|
||||
For an internal tool this is acceptable, but for a commercial product it should use server components for the initial render.
|
||||
|
||||
### 2.4 Frontend Auth Uses `getSession()` Instead of `getUser()`
|
||||
**File:** `frontend/src/lib/api.ts:10-12`
|
||||
|
||||
```typescript
|
||||
const { data: { session } } = await supabase.auth.getSession();
|
||||
```
|
||||
|
||||
`getSession()` reads from local storage without server verification. If a session is expired or revoked server-side, the frontend will still try to use it until the backend rejects it. The middleware correctly uses `getUser()` (which validates server-side), but the API client does not.
|
||||
|
||||
### 2.5 Missing Error Recovery in Frontend
|
||||
Throughout the frontend, API errors are handled with basic error states, but there's no:
|
||||
- Retry logic for transient failures
|
||||
- Token refresh on 401 responses
|
||||
- Optimistic UI rollback on mutation failures
|
||||
- Offline detection
|
||||
|
||||
### 2.6 Missing `Content-Disposition` Header Sanitization
|
||||
**File:** `backend/internal/handlers/documents.go:133`
|
||||
|
||||
```go
|
||||
w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, title))
|
||||
```
|
||||
|
||||
The `title` (which comes from user input) is inserted directly into the header. A filename containing `"` or newlines could be used for response header injection.
|
||||
|
||||
**Fix:** Sanitize the filename — strip or encode special characters.
|
||||
|
||||
### 2.7 No Graceful Shutdown
|
||||
**File:** `backend/cmd/server/main.go:42`
|
||||
|
||||
```go
|
||||
http.ListenAndServe(":"+cfg.Port, handler)
|
||||
```
|
||||
|
||||
No signal handling or graceful shutdown. When the process receives SIGTERM (e.g., during deployment), in-flight requests are dropped, CalDAV sync operations may be interrupted mid-write, and database connections are not cleanly closed.
|
||||
|
||||
### 2.8 Database Connection Pool — search_path is Session-Level
|
||||
**File:** `backend/internal/db/connection.go:17`
|
||||
|
||||
```go
|
||||
db.Exec("SET search_path TO kanzlai, public")
|
||||
```
|
||||
|
||||
`SET search_path` is session-level in PostgreSQL. With connection pooling (`MaxOpenConns: 25`), this SET runs once on the initial connection. If a connection is recycled or a new one opened from the pool, it may not have the kanzlai search_path. This could cause queries to silently hit the wrong schema.
|
||||
|
||||
**Fix:** Use `SET LOCAL search_path` in a transaction, or set it at the database/role level, or qualify all table references with the schema name.
|
||||
|
||||
### 2.9 go.sum Missing from Dockerfile
|
||||
**File:** `backend/Dockerfile:4`
|
||||
|
||||
```dockerfile
|
||||
COPY go.mod ./
|
||||
RUN go mod download
|
||||
```
|
||||
|
||||
Only `go.mod` is copied, not `go.sum`. This means the build isn't reproducible and doesn't verify checksums. Should be `COPY go.mod go.sum ./`.
|
||||
|
||||
### 2.10 German Umlaut Typos Throughout Frontend
|
||||
**Files:** Multiple frontend components
|
||||
|
||||
German strings use ASCII approximations instead of proper characters:
|
||||
- `login/page.tsx`: "Zurueck" instead of "Zurück"
|
||||
- `cases/[id]/layout.tsx`: "Anhaengig" instead of "Anhängig"
|
||||
- `cases/[id]/fristen/page.tsx`: "Ueberfaellig" instead of "Überfällig"
|
||||
- `termine/page.tsx`: "Uberblick" instead of "Überblick"
|
||||
|
||||
A German lawyer would notice this immediately. It signals "this was built by a machine, not tested by a human."
|
||||
|
||||
### 2.11 Silent Error Swallowing in Event Creation
|
||||
**File:** `backend/internal/services/case_service.go:260-266`
|
||||
|
||||
```go
|
||||
func createEvent(ctx context.Context, db *sqlx.DB, ...) {
|
||||
db.ExecContext(ctx, /* ... */) // Error completely ignored
|
||||
}
|
||||
```
|
||||
|
||||
Case events (audit trail) silently fail to create. The calling functions don't check the return. This means you could have cases with no events and no way to know why.
|
||||
|
||||
### 2.12 Missing Error Boundaries in Frontend
|
||||
No React error boundaries are implemented. If any component throws, the entire page crashes with a white screen. For a law firm tool where data integrity matters, this is unacceptable.
|
||||
|
||||
### 2.13 No RLS Policies Defined at Database Level
|
||||
Multi-tenant isolation relies entirely on `WHERE tenant_id = $X` clauses in Go code. If any query forgets this clause, data leaks across tenants. There are no PostgreSQL RLS policies as a safety net.
|
||||
|
||||
**Fix:** Enable RLS on all tenant-scoped tables and create policies tied to `auth.uid()` via `user_tenants`.
|
||||
|
||||
---
|
||||
|
||||
## 3. Architecture Assessment
|
||||
|
||||
### 3.1 What's Good
|
||||
|
||||
- **Clean monorepo structure** — `backend/` and `frontend/` are clearly separated. Each has its own Dockerfile. The Makefile provides unified commands.
|
||||
- **Go backend is well-organized** — `cmd/server/`, `internal/{auth,config,db,handlers,middleware,models,router,services}` follows Go best practices.
|
||||
- **Handler/Service separation** — handlers do HTTP concerns (parse request, write response), services do business logic. This is correct.
|
||||
- **Parameterized SQL everywhere** — no string concatenation in queries. All user input goes through `$N` placeholders.
|
||||
- **Multi-tenant design** — `tenant_id` on every row, context-based tenant resolution, RLS at the database level.
|
||||
- **Smart use of Go 1.22+ routing** — method+path patterns like `GET /api/cases/{id}` eliminate the need for a third-party router.
|
||||
- **CalDAV sync is genuinely impressive** — bidirectional sync with conflict resolution, etag tracking, background polling per-tenant. This is a differentiator.
|
||||
- **Deadline calculator** — ported from youpc.org with holiday awareness. Legally important and hard to build.
|
||||
- **Frontend routing structure** — German URL paths (`/fristen`, `/termine`, `/einstellungen`), nested case detail routes with layout.tsx for shared chrome. Proper use of App Router patterns.
|
||||
|
||||
### 3.2 Structural Concerns
|
||||
|
||||
- **No database migrations** — the schema was apparently created via SQL scripts run manually. There's a `seed/demo_data.sql` but no migration system. For a production system, this is unsustainable.
|
||||
- **No CI/CD pipeline** — no `.github/workflows/`, `.gitea/`, or any CI configuration. Tests run locally but not automatically.
|
||||
- **No API versioning** — all routes are at `/api/`. Adding breaking changes will break clients.
|
||||
- **Services take raw `*sqlx.DB`** — no transaction support across service boundaries. Creating a case + event is not atomic (if the event insert fails, the case still exists).
|
||||
- **Models are just struct definitions** — no validation methods, no constructor functions. Validation is scattered across handlers.
|
||||
|
||||
### 3.3 Data Model
|
||||
|
||||
Based on the seed data and model files, the schema is reasonable:
|
||||
- `tenants`, `user_tenants` (multi-tenancy)
|
||||
- `cases`, `parties` (case management)
|
||||
- `deadlines`, `appointments` (time management)
|
||||
- `documents`, `case_events`, `notes` (supporting data)
|
||||
- `proceeding_types`, `deadline_rules`, `holidays` (reference data)
|
||||
|
||||
**Missing indexes likely needed:**
|
||||
- `deadlines(tenant_id, status, due_date)` — for dashboard queries
|
||||
- `appointments(tenant_id, start_at)` — for calendar queries
|
||||
- `case_events(case_id, created_at)` — for event feeds
|
||||
- `cases(tenant_id, status)` — for filtered lists
|
||||
|
||||
**Missing constraints:**
|
||||
- No CHECK constraint on status values (cases, deadlines, appointments)
|
||||
- No UNIQUE constraint on `case_number` per tenant
|
||||
- No foreign key from `notes` to the parent entity (if polymorphic)
|
||||
|
||||
---
|
||||
|
||||
## 4. Security Assessment
|
||||
|
||||
### 4.1 Authentication
|
||||
- **JWT validation is correct** — algorithm check (HMAC only), expiry check, sub claim extraction. Using `golang-jwt/v5`.
|
||||
- **Supabase Auth on frontend** — proper cookie-based session with server-side verification in middleware.
|
||||
- **No refresh token rotation** — the API client uses `getSession()` which may serve stale tokens.
|
||||
|
||||
### 4.2 Authorization
|
||||
- **Critical: Tenant isolation bypass** (see 1.1)
|
||||
- **No role-based access control** — `user_tenants` has a `role` column but it's never checked. Any member can do anything.
|
||||
- **No resource-level permissions** — any user in a tenant can delete any case, document, etc.
|
||||
|
||||
### 4.3 Input Validation
|
||||
- **SQL injection: Protected** — all queries use parameterized placeholders.
|
||||
- **XSS: Partially protected** — React auto-escapes, but the API returns raw strings that could contain HTML. The `Content-Disposition` header is vulnerable (see 2.6).
|
||||
- **File upload: Partially protected** — `MaxBytesReader` limits to 50MB, but no file type validation (could upload .exe, .html with scripts, etc.).
|
||||
- **Rate limiting: AI endpoints only** — the rest of the API has no rate limiting. Login/register go through Supabase (which has its own limits), but all CRUD endpoints are unlimited.
|
||||
|
||||
### 4.4 Secrets
|
||||
- **No hardcoded secrets** — all via environment variables. Good.
|
||||
- **CalDAV credentials in plaintext** — see 1.3.
|
||||
- **Supabase service key in backend** — necessary for storage, but this key has full DB access. Should be scoped.
|
||||
|
||||
---
|
||||
|
||||
## 5. Testing Assessment
|
||||
|
||||
### 5.1 Backend Tests (15 files)
|
||||
- **Integration test** — sets up real DB connection, creates JWT, tests full HTTP flow. Excellent pattern but requires DATABASE_URL (skips otherwise).
|
||||
- **Handler tests** — mock-based unit tests for most handlers. Test JSON parsing, error responses, basic happy paths.
|
||||
- **Service tests** — deadline calculator has solid date arithmetic tests. Holiday service tested. CalDAV service tested with mocks. AI service tested with mocked HTTP.
|
||||
- **Middleware tests** — rate limiter tested.
|
||||
- **Auth tests** — tenant resolver tested.
|
||||
|
||||
### 5.2 Frontend Tests (4 files)
|
||||
- `api.test.ts` — tests the API client
|
||||
- `DeadlineTrafficLights.test.tsx` — component test
|
||||
- `CaseOverviewGrid.test.tsx` — component test
|
||||
- `LoginPage.test.tsx` — auth page test
|
||||
|
||||
### 5.3 What's Missing
|
||||
- **No E2E tests** — no Playwright/Cypress. Critical for a law firm app where correctness matters.
|
||||
- **No contract tests** — frontend and backend are tested independently. A schema change could break the frontend without any test catching it.
|
||||
- **Deadline calculation edge cases** — needs tests for year boundaries, leap years, holidays falling on weekends, multiple consecutive holidays.
|
||||
- **Multi-tenant security tests** — no test verifying that User A can't access Tenant B's data. This is the most important test to add.
|
||||
- **Frontend test coverage is thin** — 4 tests for ~30 components. The dashboard, all forms, navigation, error states are untested.
|
||||
- **No load testing** — unknown how the system behaves under concurrent users.
|
||||
|
||||
---
|
||||
|
||||
## 6. UX Assessment
|
||||
|
||||
### 6.1 What Works
|
||||
- **Dashboard is strong** — traffic light deadline indicators, upcoming timeline, case overview, quick actions. A lawyer can see what matters at a glance.
|
||||
- **German localization** — UI is in German with proper legal terminology (Akten, Fristen, Termine, Parteien).
|
||||
- **Mobile responsive** — sidebar collapses to hamburger menu, layout uses responsive grids.
|
||||
- **Loading states** — skeleton screens on dashboard, not just spinners.
|
||||
- **Breadcrumbs** — navigation trail on all pages.
|
||||
- **Deadline calculator** — unique feature that provides real value for UPC litigation.
|
||||
|
||||
### 6.2 What a Lawyer Would Stumble On
|
||||
1. **No onboarding flow** — after registration, user has no tenant, no cases. The app shows empty states but doesn't guide the user to create a tenant or import data.
|
||||
2. **No search** — there's no global search. A lawyer with 100+ cases needs to find things fast.
|
||||
3. **No keyboard shortcuts** — power users (lawyers are keyboard-heavy) have no shortcuts.
|
||||
4. **Sidebar mixes languages** — "Akten" (German) vs "AI Analyse" (English). Should be consistent.
|
||||
5. **No notifications** — overdue deadlines don't trigger any alert beyond the dashboard color. No email alerts, no push notifications.
|
||||
6. **No print view** — lawyers need to print deadline lists, case summaries. No print stylesheet.
|
||||
7. **No bulk operations** — can't mark multiple deadlines as complete, can't bulk-assign parties.
|
||||
8. **Document upload has no preview** — uploaded PDFs can't be viewed inline.
|
||||
9. **AI features require manual trigger** — AI summary and deadline extraction are manual. Should auto-trigger on document upload.
|
||||
10. **No activity log per user** — no audit trail of who changed what. Critical for law firm compliance.
|
||||
|
||||
---
|
||||
|
||||
## 7. Deployment Assessment
|
||||
|
||||
### 7.1 Docker Setup
|
||||
- **Multi-stage builds** — both Dockerfiles use builder pattern. Good.
|
||||
- **Backend is minimal** — Alpine + static binary + ca-certificates. ~15MB image.
|
||||
- **Frontend** — Bun for deps/build, Node for runtime (standalone output). Reasonable.
|
||||
- **Missing:** go.sum not copied in backend Dockerfile (see 2.9).
|
||||
- **Missing:** No docker-compose.yml for local development.
|
||||
- **Missing:** No health check in Dockerfile (`HEALTHCHECK` instruction).
|
||||
|
||||
### 7.2 Environment Handling
|
||||
- **Config validates required vars** — `DATABASE_URL` and `SUPABASE_JWT_SECRET` are checked at startup.
|
||||
- **Supabase URL/keys not validated** — if missing, features silently fail or crash at runtime.
|
||||
- **No .env.example** — new developers don't know what env vars are needed.
|
||||
|
||||
### 7.3 Reliability
|
||||
- **No graceful shutdown** (see 2.7)
|
||||
- **No readiness/liveness probes** — `/health` exists but only checks DB connectivity. No readiness distinction.
|
||||
- **CalDAV sync runs in-process** — if the sync goroutine panics, it takes down the API server.
|
||||
- **No structured error recovery** — panics in handlers will crash the process (no recovery middleware).
|
||||
|
||||
---
|
||||
|
||||
## 8. Competitive Analysis
|
||||
|
||||
### 8.1 The Market
|
||||
|
||||
German Kanzleisoftware is a mature, crowded market:
|
||||
|
||||
| Tool | Type | Price | Key Strength |
|
||||
|------|------|-------|-------------|
|
||||
| **RA-MICRO** | Desktop + Cloud | ~100-200 EUR/user/mo | Market leader, 30+ years, full beA integration |
|
||||
| **ADVOWARE** | Desktop + Cloud | from 20 EUR/mo | Budget-friendly, strong for small firms |
|
||||
| **AnNoText** (Wolters Kluwer) | Desktop + Cloud | Custom pricing | Enterprise, AI document analysis, DictNow |
|
||||
| **Actaport** | Cloud-native | from 79.80 EUR/mo | Modern UI, Mandantenportal, integrated Office |
|
||||
| **Haufe Advolux** | Cloud | Custom | User-friendly, full-featured |
|
||||
| **Renostar Legal Cloud** | Cloud | Custom | Browser-based, no installation |
|
||||
|
||||
### 8.2 Table-Stakes Features KanzlAI is Missing
|
||||
|
||||
These are **mandatory** for any German Kanzleisoftware to be taken seriously:
|
||||
|
||||
1. **beA Integration** — since 2022, German lawyers must use the electronic court mailbox (besonderes elektronisches Anwaltspostfach). No Kanzleisoftware sells without it. This is a **massive** implementation effort (KSW-Schnittstelle from BRAK).
|
||||
|
||||
2. **RVG Billing (Gebührenrechner)** — automated fee calculation per RVG (Rechtsanwaltsvergütungsgesetz). Every competitor has this built-in. Without it, lawyers can't bill clients.
|
||||
|
||||
3. **Document Generation** — templates for Schriftsätze, Klageschriften, Mahnbescheide with auto-populated case data. Usually integrated with Word.
|
||||
|
||||
4. **Accounting (FiBu)** — client trust accounts (Fremdgeld), DATEV export, tax-relevant bookkeeping. Legal requirement.
|
||||
|
||||
5. **Conflict Check (Kollisionsprüfung)** — check if the firm has a conflict of interest before taking a case. Legally required (§ 43a BRAO).
|
||||
|
||||
6. **Dictation System** — voice-to-text for lawyers. RA-MICRO has DictaNet, AnNoText has DictNow.
|
||||
|
||||
### 8.3 Where KanzlAI Could Differentiate
|
||||
|
||||
Despite the feature gap, KanzlAI has some advantages:
|
||||
|
||||
1. **AI-native** — competitors are bolting AI onto 20-year-old software. KanzlAI has Claude API integration from day one. The deadline extraction from PDFs is genuinely useful.
|
||||
2. **UPC specialization** — the deadline calculator with UPC Rules of Procedure knowledge is unique. No competitor has deep UPC litigation support.
|
||||
3. **CalDAV sync** — bidirectional sync with external calendars is not common in German Kanzleisoftware.
|
||||
4. **Modern tech stack** — React + Go + Supabase vs. the .NET/Java/Desktop world of RA-MICRO et al.
|
||||
5. **Multi-tenant from day 1** — designed for SaaS, not converted from desktop software.
|
||||
|
||||
### 8.4 Strategic Recommendation
|
||||
|
||||
**Don't compete head-on with RA-MICRO.** The feature gap is 10+ person-years of work. Instead:
|
||||
|
||||
**Option A: UPC Niche Tool** — Pivot back to UPC patent litigation. Build the best deadline calculator, case tracker, and AI-powered brief analysis tool for UPC practitioners. There are ~1000 UPC practitioners in Europe who need specialized tooling that RA-MICRO doesn't provide. Charge 200-500 EUR/mo.
|
||||
|
||||
**Option B: AI-First Legal Assistant** — Don't call it "Kanzleimanagement." Position as an AI assistant that reads court documents, extracts deadlines, and syncs to the lawyer's existing Kanzleisoftware via CalDAV/iCal. This sidesteps the feature gap entirely.
|
||||
|
||||
**Option C: Full Kanzleisoftware** — If you pursue this, beA integration is the first priority, then RVG billing. Without these two, no German lawyer will switch.
|
||||
|
||||
---
|
||||
|
||||
## 9. Strengths (What's Good, Keep Doing It)
|
||||
|
||||
1. **Architecture is solid** — the Go + Next.js + Supabase stack is well-chosen. Clean separation of concerns.
|
||||
2. **SQL is safe** — parameterized queries throughout. No injection vectors.
|
||||
3. **Multi-tenant design** — tenant_id scoping with RLS is the right approach.
|
||||
4. **CalDAV implementation** — genuinely impressive for an MVP. Bidirectional sync with conflict resolution.
|
||||
5. **Deadline calculator** — ported from youpc.org with holiday awareness. Real domain value.
|
||||
6. **AI integration** — Claude API with tool use for structured extraction. Clean implementation.
|
||||
7. **Dashboard UX** — traffic lights, timeline, quick actions. Lawyers will get this immediately.
|
||||
8. **German-first** — proper legal terminology, German date formats, localized UI.
|
||||
9. **Test foundation** — 15 backend test files with integration tests. Good starting point.
|
||||
10. **Docker builds are lean** — multi-stage, Alpine-based, standalone Next.js output.
|
||||
|
||||
---
|
||||
|
||||
## 10. Priority Roadmap
|
||||
|
||||
### P0 — This Week
|
||||
- [ ] Fix tenant isolation bypass in TenantResolver (1.1)
|
||||
- [ ] Consolidate tenant resolution logic (1.2)
|
||||
- [ ] Encrypt CalDAV credentials at rest (1.3)
|
||||
- [ ] Add CORS middleware + security headers (1.4)
|
||||
- [ ] Stop leaking internal errors to clients (1.5)
|
||||
- [ ] Add mutex to HolidayService cache (1.6)
|
||||
- [ ] Fix rate limiter X-Forwarded-For bypass (1.7)
|
||||
- [ ] Fix Dockerfile go.sum copy (2.9)
|
||||
|
||||
### P1 — Before Demo/Beta
|
||||
- [ ] Add input validation (length limits, allowed values) (2.1)
|
||||
- [ ] Add pagination to all list endpoints (2.2)
|
||||
- [ ] Fix `search_path` connection pool issue (2.8)
|
||||
- [ ] Add graceful shutdown with signal handling (2.7)
|
||||
- [ ] Sanitize Content-Disposition filename (2.6)
|
||||
- [ ] Fix German umlaut typos throughout frontend (2.10)
|
||||
- [ ] Handle createEvent errors instead of swallowing (2.11)
|
||||
- [ ] Add React error boundaries (2.12)
|
||||
- [ ] Implement RLS policies on all tenant-scoped tables (2.13)
|
||||
- [ ] Add multi-tenant security tests
|
||||
- [ ] Add database migrations system
|
||||
- [ ] Add `.env.example` file
|
||||
- [ ] Add onboarding flow for new users
|
||||
|
||||
### P2 — Next Iteration
|
||||
- [ ] Role-based access control (admin/member/readonly)
|
||||
- [ ] Global search
|
||||
- [ ] Email notifications for overdue deadlines
|
||||
- [ ] Audit trail / activity log per user
|
||||
- [ ] Auto-trigger AI extraction on document upload
|
||||
- [ ] Print-friendly views
|
||||
- [ ] E2E tests with Playwright
|
||||
- [ ] CI/CD pipeline
|
||||
|
||||
### P3 — Strategic
|
||||
- [ ] Decide market positioning (UPC niche vs. AI assistant vs. full Kanzleisoftware)
|
||||
- [ ] If Kanzleisoftware: begin beA integration research
|
||||
- [ ] If Kanzleisoftware: RVG Gebührenrechner
|
||||
- [ ] If UPC niche: integrate lex-research case law database
|
||||
|
||||
---
|
||||
|
||||
*This audit was conducted by reading every source file in the repository, running all tests, analyzing the database schema via seed data, and comparing against established German Kanzleisoftware competitors.*
|
||||
665
DESIGN-dashboard-redesign.md
Normal file
665
DESIGN-dashboard-redesign.md
Normal file
@@ -0,0 +1,665 @@
|
||||
# Design: Dashboard Redesign + Detail Pages
|
||||
|
||||
**Task:** t-kz-060
|
||||
**Author:** cronus (inventor)
|
||||
**Date:** 2026-03-25
|
||||
**Status:** Design proposal
|
||||
|
||||
---
|
||||
|
||||
## Problem Statement
|
||||
|
||||
The current dashboard is a read-only status board. Cards show counts but don't link anywhere. Timeline items are inert. Quick actions navigate to list pages rather than creation flows. There are no detail pages for individual events, deadlines, or appointments. Notes don't exist as a first-class entity. Case detail tabs use local state instead of URL segments, breaking deep linking and back navigation.
|
||||
|
||||
## Design Principles
|
||||
|
||||
1. **Everything clickable goes somewhere** — no dead-end UI
|
||||
2. **Breadcrumb navigation** — always know where you are, one click to go back
|
||||
3. **German labels throughout** — consistent with existing convention
|
||||
4. **Mobile responsive** — sidebar collapses, cards stack, touch targets >= 44px
|
||||
5. **Information density over whitespace** — law firm users want data, not decoration
|
||||
6. **URL-driven state** — tabs, filters, and views reflected in the URL for deep linking
|
||||
|
||||
---
|
||||
|
||||
## Part 1: Dashboard Redesign
|
||||
|
||||
### 1.1 Traffic Light Cards → Click-to-Filter
|
||||
|
||||
**Current:** Three cards (Ueberfaellig / Diese Woche / Im Zeitplan) show counts. `onFilter` prop exists but is never wired up in `dashboard/page.tsx`.
|
||||
|
||||
**Proposed:**
|
||||
|
||||
Clicking a traffic light card navigates to `/fristen?status={filter}`:
|
||||
|
||||
| Card | Navigation Target |
|
||||
|------|------------------|
|
||||
| Ueberfaellig (red) | `/fristen?status=overdue` |
|
||||
| Diese Woche (amber) | `/fristen?status=this_week` |
|
||||
| Im Zeitplan (green) | `/fristen?status=ok` |
|
||||
|
||||
**Implementation:**
|
||||
- Replace `onFilter` callback with `next/link` navigation using `href`
|
||||
- `DeadlineTrafficLights` becomes a pure link-based component (no callback needed)
|
||||
- `/fristen` page reads `searchParams.status` and pre-applies the filter
|
||||
- The DeadlineList component already supports status filtering — just needs to read from URL
|
||||
|
||||
**Changes:**
|
||||
- `DeadlineTrafficLights.tsx`: Replace `<button onClick>` with `<Link href="/fristen?status=...">`
|
||||
- `fristen/page.tsx`: Read `searchParams` and pass as initial filter to DeadlineList
|
||||
- `DeadlineList.tsx`: Accept `initialStatus` prop from URL params
|
||||
|
||||
### 1.2 Case Overview Grid → Click-to-Filter
|
||||
|
||||
**Current:** Three static metrics (Aktive Akten / Neu / Abgeschlossen). No links.
|
||||
|
||||
**Proposed:**
|
||||
|
||||
| Card | Navigation Target |
|
||||
|------|------------------|
|
||||
| Aktive Akten | `/cases?status=active` |
|
||||
| Neu (Monat) | `/cases?status=active&since=month` |
|
||||
| Abgeschlossen | `/cases?status=closed` |
|
||||
|
||||
**Implementation:**
|
||||
- Wrap each metric row in `<Link>` to `/cases` with appropriate query params
|
||||
- Cases list page already has filtering — needs to read URL params on mount
|
||||
- Add visual hover state (arrow icon on hover, background highlight)
|
||||
|
||||
**Changes:**
|
||||
- `CaseOverviewGrid.tsx`: Each row becomes a `<Link>` with hover arrow
|
||||
- `cases/page.tsx`: Read `searchParams` for initial filter state
|
||||
|
||||
### 1.3 Timeline Items → Click-to-Navigate
|
||||
|
||||
**Current:** Timeline entries show deadline/appointment info but are not clickable. No link to the parent case or the item itself.
|
||||
|
||||
**Proposed:**
|
||||
|
||||
Each timeline entry becomes a clickable row:
|
||||
|
||||
- **Deadline entries**: Click navigates to `/fristen/{id}` (new deadline detail page)
|
||||
- **Appointment entries**: Click navigates to `/termine/{id}` (new appointment detail page)
|
||||
- **Case reference** (Az. / case_number): Secondary click target linking to `/cases/{case_id}`
|
||||
|
||||
**Visual changes:**
|
||||
- Add `cursor-pointer` and hover state (`hover:bg-neutral-100` transition)
|
||||
- Add a small chevron-right icon on the right edge
|
||||
- Case number becomes a subtle underlined link (click stops propagation)
|
||||
|
||||
**Data changes:**
|
||||
- `UpcomingDeadline` needs `case_id` field (currently missing from the dashboard query — the backend model has it but the SQL join doesn't select it)
|
||||
- `UpcomingAppointment` already has `case_id`
|
||||
|
||||
**Backend change:**
|
||||
- `dashboard_service.go` line 112: Add `d.case_id` to the upcoming deadlines SELECT
|
||||
- `DashboardService.UpcomingDeadline` struct: Add `CaseID uuid.UUID` field
|
||||
- Frontend `UpcomingDeadline` type: Already has `case_id` (it's in types.ts but the backend doesn't send it)
|
||||
|
||||
### 1.4 Quick Actions → Proper Navigation
|
||||
|
||||
**Current:** "Frist eintragen" goes to `/fristen` (list page), not a creation flow. "CalDAV Sync" goes to `/einstellungen`.
|
||||
|
||||
**Proposed:**
|
||||
|
||||
| Action | Current Target | New Target |
|
||||
|--------|---------------|------------|
|
||||
| Neue Akte | `/cases/new` | `/cases/new` (keep) |
|
||||
| Frist eintragen | `/fristen` | `/fristen/neu` (new creation page) |
|
||||
| Neuer Termin | (missing) | `/termine/neu` (new creation page) |
|
||||
| AI Analyse | `/ai/extract` | `/ai/extract` (keep) |
|
||||
|
||||
Replace "CalDAV Sync" with "Neuer Termin" — CalDAV sync is a settings function, not a daily quick action. Creating an appointment is something a secretary does multiple times per day.
|
||||
|
||||
**Changes:**
|
||||
- `QuickActions.tsx`: Update hrefs, swap CalDAV for appointment creation
|
||||
- Create `/fristen/neu/page.tsx` — standalone deadline creation form (select case, fill fields)
|
||||
- Create `/termine/neu/page.tsx` — standalone appointment creation form
|
||||
|
||||
### 1.5 AI Summary Card → Refresh Button
|
||||
|
||||
**Current:** Rule-based summary text, no refresh mechanism. Card regenerates on page load but not on demand.
|
||||
|
||||
**Proposed:**
|
||||
- Add a small refresh icon button (RefreshCw) in the card header, next to "KI-Zusammenfassung"
|
||||
- Clicking it calls `refetch()` on the dashboard query (passed as prop)
|
||||
- Show a brief spinning animation during refetch
|
||||
- If/when real AI summarization is wired up, this button triggers `POST /api/ai/summarize-dashboard` (future endpoint)
|
||||
|
||||
**Changes:**
|
||||
- `AISummaryCard.tsx`: Accept `onRefresh` prop, add button with spinning state
|
||||
- `dashboard/page.tsx`: Pass `refetch` to AISummaryCard
|
||||
|
||||
### 1.6 Dashboard Layout: Add Recent Activity Section
|
||||
|
||||
**Current:** The backend returns `recent_activity` (last 10 case events) but the frontend ignores it entirely.
|
||||
|
||||
**Proposed:**
|
||||
- Add a "Letzte Aktivitaet" section below the timeline, full width
|
||||
- Shows the 10 most recent case events in a compact list
|
||||
- Each row: event icon (by type) | title | case number (linked) | relative time
|
||||
- Clicking a row navigates to the case event detail page `/cases/{case_id}/ereignisse/{event_id}`
|
||||
|
||||
**Changes:**
|
||||
- New component: `RecentActivityList.tsx` in `components/dashboard/`
|
||||
- `dashboard/page.tsx`: Add section below the main grid
|
||||
- Add `RecentActivity` type to `types.ts` (needs `case_id` and `event_id` fields from backend)
|
||||
- Backend: Add `case_id` and `id` to the recent activity query
|
||||
|
||||
---
|
||||
|
||||
## Part 2: New Pages
|
||||
|
||||
### 2.1 Deadline Detail Page — `/fristen/{id}`
|
||||
|
||||
**Route:** `src/app/(app)/fristen/[id]/page.tsx`
|
||||
|
||||
**Layout:**
|
||||
|
||||
```
|
||||
Breadcrumb: Dashboard > Fristen > {deadline.title}
|
||||
+---------------------------------------------------------+
|
||||
| [Status Badge] {deadline.title} [Erledigen] |
|
||||
| Fällig: 28. März 2026 |
|
||||
+---------------------------------------------------------+
|
||||
| Akte: Az. 2024/001 — Müller v. Schmidt [→ Zur Akte] |
|
||||
| Quelle: Berechnet (R.118 RoP) |
|
||||
| Ursprüngliches Datum: 25. März 2026 |
|
||||
| Warnungsdatum: 21. März 2026 |
|
||||
+---------------------------------------------------------+
|
||||
| Notizen [Bearbeiten]|
|
||||
| Fristverlängerung beantragt am 20.03. |
|
||||
+---------------------------------------------------------+
|
||||
| Verlauf |
|
||||
| ○ Erstellt am 15.03.2026 |
|
||||
| ○ Warnung gesendet am 21.03.2026 |
|
||||
+---------------------------------------------------------+
|
||||
```
|
||||
|
||||
**Data requirements:**
|
||||
- `GET /api/deadlines/{id}` — new endpoint returning full deadline with case info
|
||||
- Returns: Deadline + associated case (number, title, id) + notes
|
||||
|
||||
**Sections:**
|
||||
1. **Header**: Status badge (Offen/Erledigt/Ueberfaellig), title, "Erledigen" action button
|
||||
2. **Due date**: Large, with relative time ("in 3 Tagen" / "vor 2 Tagen ueberfaellig")
|
||||
3. **Context panel**: Parent case (linked), source (manual/calculated/caldav), rule reference, original vs adjusted date
|
||||
4. **Notes section**: Free-text notes (existing `notes` field on deadline), inline edit
|
||||
5. **Activity log**: Timeline of changes to this deadline (future: from case_events filtered by deadline)
|
||||
|
||||
**Backend additions:**
|
||||
- `GET /api/deadlines/{id}` — new handler returning single deadline with case join
|
||||
- Handler: `deadlines.go` add `Get` method
|
||||
- Service: `deadline_service.go` add `GetByID` with case join
|
||||
|
||||
### 2.2 Appointment Detail Page — `/termine/{id}`
|
||||
|
||||
**Route:** `src/app/(app)/termine/[id]/page.tsx`
|
||||
|
||||
**Layout:**
|
||||
|
||||
```
|
||||
Breadcrumb: Dashboard > Termine > {appointment.title}
|
||||
+---------------------------------------------------------+
|
||||
| {appointment.title} [Bearbeiten] [X] |
|
||||
| Typ: Verhandlung |
|
||||
+---------------------------------------------------------+
|
||||
| Datum: 28. März 2026, 10:00 – 12:00 Uhr |
|
||||
| Ort: UPC München, Saal 3 |
|
||||
+---------------------------------------------------------+
|
||||
| Akte: Az. 2024/001 — Müller v. Schmidt [→ Zur Akte] |
|
||||
+---------------------------------------------------------+
|
||||
| Beschreibung |
|
||||
| Erste mündliche Verhandlung... |
|
||||
+---------------------------------------------------------+
|
||||
| Notizen [+ Neu] |
|
||||
| ○ 25.03. — Mandant über Termin informiert |
|
||||
| ○ 24.03. — Schriftsatz vorbereitet |
|
||||
+---------------------------------------------------------+
|
||||
```
|
||||
|
||||
**Data requirements:**
|
||||
- `GET /api/appointments/{id}` — new endpoint returning single appointment with case info
|
||||
- Notes: Uses new `notes` table (see Part 3)
|
||||
|
||||
**Backend additions:**
|
||||
- `GET /api/appointments/{id}` — new handler
|
||||
- Handler: `appointments.go` add `Get` method
|
||||
- Service: `appointment_service.go` add `GetByID` with optional case join
|
||||
|
||||
### 2.3 Case Event Detail Page — `/cases/{id}/ereignisse/{eventId}`
|
||||
|
||||
**Route:** `src/app/(app)/cases/[id]/ereignisse/[eventId]/page.tsx`
|
||||
|
||||
**Layout:**
|
||||
|
||||
```
|
||||
Breadcrumb: Akten > Az. 2024/001 > Verlauf > {event.title}
|
||||
+---------------------------------------------------------+
|
||||
| [Event Type Icon] {event.title} |
|
||||
| 25. März 2026, 14:30 |
|
||||
+---------------------------------------------------------+
|
||||
| Beschreibung |
|
||||
| Statusänderung: aktiv → geschlossen |
|
||||
+---------------------------------------------------------+
|
||||
| Metadaten |
|
||||
| Erstellt von: max.mustermann@kanzlei.de |
|
||||
| Typ: status_changed |
|
||||
+---------------------------------------------------------+
|
||||
| Notizen [+ Neu] |
|
||||
| (keine Notizen) |
|
||||
+---------------------------------------------------------+
|
||||
```
|
||||
|
||||
**Data requirements:**
|
||||
- `GET /api/case-events/{id}` — new endpoint
|
||||
- Notes: Uses new `notes` table
|
||||
|
||||
**Backend additions:**
|
||||
- New handler: `case_events.go` with `Get` method
|
||||
- New service method: `CaseEventService.GetByID`
|
||||
- Or extend existing case handler to include event fetching
|
||||
|
||||
### 2.4 Standalone Deadline Creation — `/fristen/neu`
|
||||
|
||||
**Route:** `src/app/(app)/fristen/neu/page.tsx`
|
||||
|
||||
**Layout:**
|
||||
|
||||
```
|
||||
Breadcrumb: Fristen > Neue Frist
|
||||
+---------------------------------------------------------+
|
||||
| Neue Frist anlegen |
|
||||
+---------------------------------------------------------+
|
||||
| Akte*: [Dropdown: Aktenauswahl] |
|
||||
| Bezeichnung*: [________________________] |
|
||||
| Beschreibung: [________________________] |
|
||||
| Fällig am*: [Datumsauswahl] |
|
||||
| Warnung am: [Datumsauswahl] |
|
||||
| Notizen: [Textarea] |
|
||||
+---------------------------------------------------------+
|
||||
| [Abbrechen] [Frist anlegen]|
|
||||
+---------------------------------------------------------+
|
||||
```
|
||||
|
||||
Reuses existing deadline creation logic but as a standalone page rather than requiring the user to first navigate to a case. Case is selected via dropdown.
|
||||
|
||||
### 2.5 Standalone Appointment Creation — `/termine/neu`
|
||||
|
||||
**Route:** `src/app/(app)/termine/neu/page.tsx`
|
||||
|
||||
Same pattern as deadline creation. Reuses AppointmentModal fields but as a full page form. Appointment can optionally be linked to a case.
|
||||
|
||||
### 2.6 Case Detail Tabs → URL Segments
|
||||
|
||||
**Current:** Tabs use `useState<TabKey>` — no URL change, no deep linking, no browser back.
|
||||
|
||||
**Proposed route structure:**
|
||||
|
||||
```
|
||||
/cases/{id} → redirects to /cases/{id}/verlauf
|
||||
/cases/{id}/verlauf → Timeline tab
|
||||
/cases/{id}/fristen → Deadlines tab
|
||||
/cases/{id}/dokumente → Documents tab
|
||||
/cases/{id}/parteien → Parties tab
|
||||
/cases/{id}/notizen → Notes tab (new)
|
||||
```
|
||||
|
||||
**Implementation approach:**
|
||||
|
||||
Use Next.js nested layouts with a shared layout for the case header + tab bar:
|
||||
|
||||
```
|
||||
src/app/(app)/cases/[id]/
|
||||
layout.tsx # Case header + tab navigation
|
||||
page.tsx # Redirect to ./verlauf
|
||||
verlauf/page.tsx # Timeline
|
||||
fristen/page.tsx # Deadlines
|
||||
dokumente/page.tsx # Documents
|
||||
parteien/page.tsx # Parties
|
||||
notizen/page.tsx # Notes (new)
|
||||
```
|
||||
|
||||
The `layout.tsx` fetches case data and renders the header + tab bar. Each child page renders its tab content. The active tab is determined by the current pathname.
|
||||
|
||||
**Benefits:**
|
||||
- Deep linking: `/cases/abc123/fristen` opens directly to the deadlines tab
|
||||
- Browser back button works between tabs
|
||||
- Each tab can have its own loading state
|
||||
- Bookmarkable
|
||||
|
||||
---
|
||||
|
||||
## Part 3: Notes System
|
||||
|
||||
### 3.1 Data Model
|
||||
|
||||
Notes are a polymorphic entity — they can be attached to cases, deadlines, appointments, or case events.
|
||||
|
||||
**New table: `kanzlai.notes`**
|
||||
|
||||
```sql
|
||||
CREATE TABLE kanzlai.notes (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL REFERENCES kanzlai.tenants(id),
|
||||
|
||||
-- Polymorphic parent reference (exactly one must be set)
|
||||
case_id UUID REFERENCES kanzlai.cases(id) ON DELETE CASCADE,
|
||||
deadline_id UUID REFERENCES kanzlai.deadlines(id) ON DELETE CASCADE,
|
||||
appointment_id UUID REFERENCES kanzlai.appointments(id) ON DELETE CASCADE,
|
||||
case_event_id UUID REFERENCES kanzlai.case_events(id) ON DELETE CASCADE,
|
||||
|
||||
content TEXT NOT NULL,
|
||||
created_by UUID, -- auth.users reference
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
|
||||
-- Ensure exactly one parent is set
|
||||
CONSTRAINT notes_single_parent CHECK (
|
||||
(CASE WHEN case_id IS NOT NULL THEN 1 ELSE 0 END +
|
||||
CASE WHEN deadline_id IS NOT NULL THEN 1 ELSE 0 END +
|
||||
CASE WHEN appointment_id IS NOT NULL THEN 1 ELSE 0 END +
|
||||
CASE WHEN case_event_id IS NOT NULL THEN 1 ELSE 0 END) = 1
|
||||
)
|
||||
);
|
||||
|
||||
-- Indexes for efficient lookup by parent
|
||||
CREATE INDEX idx_notes_case ON kanzlai.notes(tenant_id, case_id) WHERE case_id IS NOT NULL;
|
||||
CREATE INDEX idx_notes_deadline ON kanzlai.notes(tenant_id, deadline_id) WHERE deadline_id IS NOT NULL;
|
||||
CREATE INDEX idx_notes_appointment ON kanzlai.notes(tenant_id, appointment_id) WHERE appointment_id IS NOT NULL;
|
||||
CREATE INDEX idx_notes_case_event ON kanzlai.notes(tenant_id, case_event_id) WHERE case_event_id IS NOT NULL;
|
||||
|
||||
-- RLS
|
||||
ALTER TABLE kanzlai.notes ENABLE ROW LEVEL SECURITY;
|
||||
CREATE POLICY notes_tenant_isolation ON kanzlai.notes
|
||||
USING (tenant_id IN (
|
||||
SELECT tenant_id FROM kanzlai.user_tenants WHERE user_id = auth.uid()
|
||||
));
|
||||
```
|
||||
|
||||
### 3.2 Why Polymorphic Table vs Separate Tables
|
||||
|
||||
**Considered alternatives:**
|
||||
|
||||
1. **Separate notes per entity** (case_notes, deadline_notes, etc.) — More tables, duplicated logic, harder to search across all notes.
|
||||
2. **Generic `entity_type` + `entity_id` pattern** — Loses FK constraints, can't cascade delete, harder to query with joins.
|
||||
3. **Polymorphic with nullable FKs** (chosen) — FK constraints maintained, cascade deletes work, partial indexes keep queries fast, single service/handler. The CHECK constraint ensures data integrity.
|
||||
|
||||
### 3.3 Backend Model & API
|
||||
|
||||
**Go model:**
|
||||
|
||||
```go
|
||||
type Note struct {
|
||||
ID uuid.UUID `db:"id" json:"id"`
|
||||
TenantID uuid.UUID `db:"tenant_id" json:"tenant_id"`
|
||||
CaseID *uuid.UUID `db:"case_id" json:"case_id,omitempty"`
|
||||
DeadlineID *uuid.UUID `db:"deadline_id" json:"deadline_id,omitempty"`
|
||||
AppointmentID *uuid.UUID `db:"appointment_id" json:"appointment_id,omitempty"`
|
||||
CaseEventID *uuid.UUID `db:"case_event_id" json:"case_event_id,omitempty"`
|
||||
Content string `db:"content" json:"content"`
|
||||
CreatedBy *uuid.UUID `db:"created_by" json:"created_by,omitempty"`
|
||||
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
||||
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
```
|
||||
|
||||
**API endpoints:**
|
||||
|
||||
```
|
||||
GET /api/notes?case_id={id} # List notes for a case
|
||||
GET /api/notes?deadline_id={id} # List notes for a deadline
|
||||
GET /api/notes?appointment_id={id} # List notes for an appointment
|
||||
GET /api/notes?case_event_id={id} # List notes for a case event
|
||||
POST /api/notes # Create note (body includes parent ID)
|
||||
PUT /api/notes/{id} # Update note content
|
||||
DELETE /api/notes/{id} # Delete note
|
||||
```
|
||||
|
||||
Single endpoint with query parameter filtering — simpler than nested routes, works uniformly across all parent types.
|
||||
|
||||
**Service methods:**
|
||||
|
||||
```go
|
||||
type NoteService struct { db *sqlx.DB }
|
||||
|
||||
func (s *NoteService) ListByParent(ctx, tenantID, parentType, parentID) ([]Note, error)
|
||||
func (s *NoteService) Create(ctx, tenantID, note) (*Note, error)
|
||||
func (s *NoteService) Update(ctx, tenantID, noteID, content) (*Note, error)
|
||||
func (s *NoteService) Delete(ctx, tenantID, noteID) error
|
||||
```
|
||||
|
||||
### 3.4 Notes UI Component
|
||||
|
||||
Reusable `<NotesList>` component used on every detail page:
|
||||
|
||||
```
|
||||
+------------------------------------------------------------+
|
||||
| Notizen [+ Neu] |
|
||||
+------------------------------------------------------------+
|
||||
| m@kanzlei.de · 25. Mär 2026, 14:30 [X][E] |
|
||||
| Fristverlängerung beim Gericht beantragt. |
|
||||
+------------------------------------------------------------+
|
||||
| m@kanzlei.de · 24. Mär 2026, 10:15 [X][E] |
|
||||
| Mandant telefonisch über Sachstand informiert. |
|
||||
+------------------------------------------------------------+
|
||||
```
|
||||
|
||||
**Props:**
|
||||
```typescript
|
||||
interface NotesListProps {
|
||||
parentType: "case" | "deadline" | "appointment" | "case_event";
|
||||
parentId: string;
|
||||
}
|
||||
```
|
||||
|
||||
**Features:**
|
||||
- Fetches notes via `GET /api/notes?{parentType}_id={parentId}`
|
||||
- "Neu" button opens inline textarea (not a modal — faster for quick notes)
|
||||
- Each note shows: author, timestamp, content, edit/delete buttons
|
||||
- Edit is inline (textarea replaces content)
|
||||
- Optimistic updates via react-query mutation + invalidation
|
||||
- Empty state: "Keine Notizen vorhanden. Klicken Sie +, um eine Notiz hinzuzufügen."
|
||||
|
||||
### 3.5 Migration from `deadlines.notes` Field
|
||||
|
||||
The existing `deadlines.notes` text field should be migrated:
|
||||
1. For each deadline with a non-null `notes` value, create a corresponding row in the `notes` table with `deadline_id` set
|
||||
2. Drop the `deadlines.notes` column after migration
|
||||
3. This can be a one-time SQL migration script
|
||||
|
||||
---
|
||||
|
||||
## Part 4: Breadcrumb Navigation
|
||||
|
||||
### 4.1 Breadcrumb Component
|
||||
|
||||
New shared component: `src/components/layout/Breadcrumb.tsx`
|
||||
|
||||
```typescript
|
||||
interface BreadcrumbItem {
|
||||
label: string;
|
||||
href?: string; // omit for current page (last item)
|
||||
}
|
||||
|
||||
function Breadcrumb({ items }: { items: BreadcrumbItem[] }) {
|
||||
// Renders: Home > Parent > Current
|
||||
// Each item with href is a Link, last item is plain text
|
||||
}
|
||||
```
|
||||
|
||||
**Placement:** At the top of every page, inside the main content area (not in the layout — different pages have different breadcrumbs).
|
||||
|
||||
### 4.2 Breadcrumb Patterns
|
||||
|
||||
| Page | Breadcrumb |
|
||||
|------|-----------|
|
||||
| Dashboard | Dashboard |
|
||||
| Fristen | Dashboard > Fristen |
|
||||
| Fristen Detail | Dashboard > Fristen > {title} |
|
||||
| Fristen Neu | Dashboard > Fristen > Neue Frist |
|
||||
| Termine | Dashboard > Termine |
|
||||
| Termine Detail | Dashboard > Termine > {title} |
|
||||
| Termine Neu | Dashboard > Termine > Neuer Termin |
|
||||
| Akten | Dashboard > Akten |
|
||||
| Akte Detail | Dashboard > Akten > {case_number} |
|
||||
| Akte > Fristen | Dashboard > Akten > {case_number} > Fristen |
|
||||
| Akte > Notizen | Dashboard > Akten > {case_number} > Notizen |
|
||||
| Ereignis Detail | Dashboard > Akten > {case_number} > Verlauf > {title} |
|
||||
| Einstellungen | Dashboard > Einstellungen |
|
||||
| AI Analyse | Dashboard > AI Analyse |
|
||||
|
||||
---
|
||||
|
||||
## Part 5: Summary of Backend Changes
|
||||
|
||||
### New Endpoints
|
||||
|
||||
| Method | Path | Handler | Purpose |
|
||||
|--------|------|---------|---------|
|
||||
| GET | `/api/deadlines/{id}` | `deadlineH.Get` | Single deadline with case context |
|
||||
| GET | `/api/appointments/{id}` | `apptH.Get` | Single appointment with case context |
|
||||
| GET | `/api/case-events/{id}` | `eventH.Get` | Single case event |
|
||||
| GET | `/api/notes` | `noteH.List` | List notes (filtered by parent) |
|
||||
| POST | `/api/notes` | `noteH.Create` | Create note |
|
||||
| PUT | `/api/notes/{id}` | `noteH.Update` | Update note |
|
||||
| DELETE | `/api/notes/{id}` | `noteH.Delete` | Delete note |
|
||||
|
||||
### Modified Endpoints
|
||||
|
||||
| Endpoint | Change |
|
||||
|----------|--------|
|
||||
| `GET /api/dashboard` | Add `case_id`, `id` to recent_activity; add `case_id` to upcoming_deadlines query |
|
||||
|
||||
### New Files
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `backend/internal/models/note.go` | Note model |
|
||||
| `backend/internal/services/note_service.go` | Note CRUD service |
|
||||
| `backend/internal/handlers/notes.go` | Note HTTP handlers |
|
||||
| `backend/internal/handlers/case_events.go` | Case event detail handler |
|
||||
|
||||
### Database Migration
|
||||
|
||||
1. Create `kanzlai.notes` table with polymorphic FK pattern
|
||||
2. Migrate existing `deadlines.notes` data
|
||||
3. Drop `deadlines.notes` column
|
||||
|
||||
---
|
||||
|
||||
## Part 6: Summary of Frontend Changes
|
||||
|
||||
### New Files
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `src/app/(app)/fristen/[id]/page.tsx` | Deadline detail page |
|
||||
| `src/app/(app)/fristen/neu/page.tsx` | Standalone deadline creation |
|
||||
| `src/app/(app)/termine/[id]/page.tsx` | Appointment detail page |
|
||||
| `src/app/(app)/termine/neu/page.tsx` | Standalone appointment creation |
|
||||
| `src/app/(app)/cases/[id]/layout.tsx` | Case detail shared layout (header + tabs) |
|
||||
| `src/app/(app)/cases/[id]/verlauf/page.tsx` | Case timeline tab |
|
||||
| `src/app/(app)/cases/[id]/fristen/page.tsx` | Case deadlines tab |
|
||||
| `src/app/(app)/cases/[id]/dokumente/page.tsx` | Case documents tab |
|
||||
| `src/app/(app)/cases/[id]/parteien/page.tsx` | Case parties tab |
|
||||
| `src/app/(app)/cases/[id]/notizen/page.tsx` | Case notes tab (new) |
|
||||
| `src/app/(app)/cases/[id]/ereignisse/[eventId]/page.tsx` | Case event detail |
|
||||
| `src/components/layout/Breadcrumb.tsx` | Reusable breadcrumb |
|
||||
| `src/components/notes/NotesList.tsx` | Reusable notes list + inline creation |
|
||||
| `src/components/dashboard/RecentActivityList.tsx` | Recent activity feed |
|
||||
|
||||
### Modified Files
|
||||
|
||||
| File | Change |
|
||||
|------|--------|
|
||||
| `src/components/dashboard/DeadlineTrafficLights.tsx` | Buttons → Links with navigation |
|
||||
| `src/components/dashboard/CaseOverviewGrid.tsx` | Static metrics → clickable links |
|
||||
| `src/components/dashboard/UpcomingTimeline.tsx` | Items → clickable with navigation |
|
||||
| `src/components/dashboard/AISummaryCard.tsx` | Add refresh button |
|
||||
| `src/components/dashboard/QuickActions.tsx` | Fix targets, swap CalDAV for Termin |
|
||||
| `src/app/(app)/dashboard/page.tsx` | Wire navigation, add RecentActivity section |
|
||||
| `src/app/(app)/fristen/page.tsx` | Read URL params for initial filter |
|
||||
| `src/app/(app)/cases/page.tsx` | Read URL params for initial filter |
|
||||
| `src/app/(app)/cases/[id]/page.tsx` | Refactor into layout + nested routes |
|
||||
| `src/lib/types.ts` | Add Note, RecentActivity types; update UpcomingDeadline |
|
||||
|
||||
### Types to Add
|
||||
|
||||
```typescript
|
||||
export interface Note {
|
||||
id: string;
|
||||
tenant_id: string;
|
||||
case_id?: string;
|
||||
deadline_id?: string;
|
||||
appointment_id?: string;
|
||||
case_event_id?: string;
|
||||
content: string;
|
||||
created_by?: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
export interface RecentActivity {
|
||||
id: string;
|
||||
event_type?: string;
|
||||
title: string;
|
||||
case_id: string;
|
||||
case_number: string;
|
||||
event_date?: string;
|
||||
created_at: string;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Part 7: Implementation Plan
|
||||
|
||||
Recommended order for a coder to implement:
|
||||
|
||||
### Phase A: Backend Foundation (can be done in parallel)
|
||||
1. Create `notes` table migration + model + service + handler
|
||||
2. Add `GET /api/deadlines/{id}` endpoint
|
||||
3. Add `GET /api/appointments/{id}` endpoint
|
||||
4. Add `GET /api/case-events/{id}` endpoint
|
||||
5. Fix dashboard query to include `case_id` in upcoming deadlines and recent activity
|
||||
|
||||
### Phase B: Frontend — Dashboard Interactivity
|
||||
1. Create `Breadcrumb` component
|
||||
2. Make traffic light cards clickable (Links)
|
||||
3. Make case overview grid clickable (Links)
|
||||
4. Make timeline items clickable (Links)
|
||||
5. Fix quick actions (swap CalDAV for Termin, update hrefs)
|
||||
6. Add refresh button to AI Summary card
|
||||
7. Add RecentActivityList component + wire to dashboard
|
||||
|
||||
### Phase C: Frontend — New Detail Pages
|
||||
1. Deadline detail page (`/fristen/[id]`)
|
||||
2. Appointment detail page (`/termine/[id]`)
|
||||
3. Case event detail page (`/cases/[id]/ereignisse/[eventId]`)
|
||||
4. Standalone deadline creation (`/fristen/neu`)
|
||||
5. Standalone appointment creation (`/termine/neu`)
|
||||
|
||||
### Phase D: Frontend — Case Detail Refactor
|
||||
1. Extract case header + tabs into layout.tsx
|
||||
2. Create sub-route pages (verlauf, fristen, dokumente, parteien)
|
||||
3. Add notes tab
|
||||
4. Wire `NotesList` component into all detail pages
|
||||
|
||||
### Phase E: Polish
|
||||
1. URL filter params on `/fristen` and `/cases` pages
|
||||
2. Breadcrumbs on all pages
|
||||
3. Mobile responsive testing
|
||||
4. Migration of existing `deadlines.notes` data
|
||||
|
||||
---
|
||||
|
||||
## Appendix: What This Design Does NOT Cover
|
||||
|
||||
- Real AI-powered summary (currently rule-based — kept as-is with refresh button)
|
||||
- Notification system (toast-based alerts for approaching deadlines)
|
||||
- Audit log / change history per entity
|
||||
- Batch operations (mark multiple deadlines complete)
|
||||
- Print views
|
||||
|
||||
These are separate features that can be designed independently.
|
||||
2
Makefile
2
Makefile
@@ -37,7 +37,7 @@ test-backend:
|
||||
cd backend && go test ./...
|
||||
|
||||
test-frontend:
|
||||
@echo "No frontend tests configured yet"
|
||||
cd frontend && bun run test
|
||||
|
||||
# Clean
|
||||
clean:
|
||||
|
||||
1321
ROADMAP.md
Normal file
1321
ROADMAP.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,32 +1,46 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/config"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/db"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/logging"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/router"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
func main() {
|
||||
logging.Setup()
|
||||
|
||||
cfg, err := config.Load()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load config: %v", err)
|
||||
slog.Error("failed to load config", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
database, err := db.Connect(cfg.DatabaseURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to connect to database: %v", err)
|
||||
slog.Error("failed to connect to database", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
defer database.Close()
|
||||
|
||||
authMW := auth.NewMiddleware(cfg.SupabaseJWTSecret, database)
|
||||
handler := router.New(database, authMW)
|
||||
|
||||
log.Printf("Starting KanzlAI API server on :%s", cfg.Port)
|
||||
// Start CalDAV sync service
|
||||
calDAVSvc := services.NewCalDAVService(database)
|
||||
calDAVSvc.Start()
|
||||
defer calDAVSvc.Stop()
|
||||
|
||||
handler := router.New(database, authMW, cfg, calDAVSvc)
|
||||
|
||||
slog.Info("starting KanzlAI API server", "port", cfg.Port)
|
||||
if err := http.ListenAndServe(":"+cfg.Port, handler); err != nil {
|
||||
log.Fatal(err)
|
||||
slog.Error("server failed", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,8 +3,20 @@ module mgit.msbls.de/m/KanzlAI-mGMT
|
||||
go 1.25.5
|
||||
|
||||
require (
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/jmoiron/sqlx v1.4.0 // indirect
|
||||
github.com/lib/pq v1.12.0 // indirect
|
||||
github.com/anthropics/anthropic-sdk-go v1.27.1
|
||||
github.com/emersion/go-ical v0.0.0-20250609112844-439c63cef608
|
||||
github.com/emersion/go-webdav v0.7.0
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/jmoiron/sqlx v1.4.0
|
||||
github.com/lib/pq v1.12.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/teambition/rrule-go v1.8.2 // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/tidwall/sjson v1.2.5 // indirect
|
||||
golang.org/x/sync v0.16.0 // indirect
|
||||
)
|
||||
|
||||
@@ -1,4 +1,18 @@
|
||||
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
github.com/anthropics/anthropic-sdk-go v1.27.1 h1:7DgMZ2Ng3C2mPzJGHA30NXQTZolcF07mHd0tGaLwfzk=
|
||||
github.com/anthropics/anthropic-sdk-go v1.27.1/go.mod h1:qUKmaW+uuPB64iy1l+4kOSvaLqPXnHTTBKH6RVZ7q5Q=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI=
|
||||
github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ=
|
||||
github.com/emersion/go-ical v0.0.0-20240127095438-fc1c9d8fb2b6/go.mod h1:BEksegNspIkjCQfmzWgsgbu6KdeJ/4LwUZs7DMBzjzw=
|
||||
github.com/emersion/go-ical v0.0.0-20250609112844-439c63cef608 h1:5XWaET4YAcppq3l1/Yh2ay5VmQjUdq6qhJuucdGbmOY=
|
||||
github.com/emersion/go-ical v0.0.0-20250609112844-439c63cef608/go.mod h1:BEksegNspIkjCQfmzWgsgbu6KdeJ/4LwUZs7DMBzjzw=
|
||||
github.com/emersion/go-vcard v0.0.0-20230815062825-8fda7d206ec9/go.mod h1:HMJKR5wlh/ziNp+sHEDV2ltblO4JD2+IdDOWtGcQBTM=
|
||||
github.com/emersion/go-webdav v0.7.0 h1:cp6aBWXBf8Sjzguka9VJarr4XTkGc2IHxXI1Gq3TKpA=
|
||||
github.com/emersion/go-webdav v0.7.0/go.mod h1:mI8iBx3RAODwX7PJJ7qzsKAKs/vY429YfS2/9wKnDbQ=
|
||||
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||
@@ -9,4 +23,27 @@ github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT
|
||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/lib/pq v1.12.0 h1:mC1zeiNamwKBecjHarAr26c/+d8V5w/u4J0I/yASbJo=
|
||||
github.com/lib/pq v1.12.0/go.mod h1:/p+8NSbOcwzAEI7wiMXFlgydTwcgTr3OSKMsD2BitpA=
|
||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/teambition/rrule-go v1.8.2 h1:lIjpjvWTj9fFUZCmuoVDrKVOtdiyzbzc93qTmRVe/J8=
|
||||
github.com/teambition/rrule-go v1.8.2/go.mod h1:Ieq5AbrKGciP1V//Wq8ktsTXwSwJHDD5mD/wLBGl3p4=
|
||||
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
|
||||
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
@@ -11,6 +11,8 @@ type contextKey string
|
||||
const (
|
||||
userIDKey contextKey = "user_id"
|
||||
tenantIDKey contextKey = "tenant_id"
|
||||
ipKey contextKey = "ip_address"
|
||||
userAgentKey contextKey = "user_agent"
|
||||
)
|
||||
|
||||
func ContextWithUserID(ctx context.Context, userID uuid.UUID) context.Context {
|
||||
@@ -30,3 +32,23 @@ func TenantFromContext(ctx context.Context) (uuid.UUID, bool) {
|
||||
id, ok := ctx.Value(tenantIDKey).(uuid.UUID)
|
||||
return id, ok
|
||||
}
|
||||
|
||||
func ContextWithRequestInfo(ctx context.Context, ip, userAgent string) context.Context {
|
||||
ctx = context.WithValue(ctx, ipKey, ip)
|
||||
ctx = context.WithValue(ctx, userAgentKey, userAgent)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func IPFromContext(ctx context.Context) *string {
|
||||
if v, ok := ctx.Value(ipKey).(string); ok && v != "" {
|
||||
return &v
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func UserAgentFromContext(ctx context.Context) *string {
|
||||
if v, ok := ctx.Value(userAgentKey).(string); ok && v != "" {
|
||||
return &v
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -46,6 +46,13 @@ func (m *Middleware) RequireAuth(next http.Handler) http.Handler {
|
||||
}
|
||||
ctx = ContextWithTenantID(ctx, tenantID)
|
||||
|
||||
// Capture IP and user-agent for audit logging
|
||||
ip := r.Header.Get("X-Forwarded-For")
|
||||
if ip == "" {
|
||||
ip = r.RemoteAddr
|
||||
}
|
||||
ctx = ContextWithRequestInfo(ctx, ip, r.UserAgent())
|
||||
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
|
||||
61
backend/internal/auth/tenant_resolver.go
Normal file
61
backend/internal/auth/tenant_resolver.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
// TenantLookup resolves the default tenant for a user.
|
||||
// Defined as an interface to avoid circular dependency with services.
|
||||
type TenantLookup interface {
|
||||
FirstTenantForUser(ctx context.Context, userID uuid.UUID) (*uuid.UUID, error)
|
||||
}
|
||||
|
||||
// TenantResolver is middleware that resolves the tenant from X-Tenant-ID header
|
||||
// or defaults to the user's first tenant.
|
||||
type TenantResolver struct {
|
||||
lookup TenantLookup
|
||||
}
|
||||
|
||||
func NewTenantResolver(lookup TenantLookup) *TenantResolver {
|
||||
return &TenantResolver{lookup: lookup}
|
||||
}
|
||||
|
||||
func (tr *TenantResolver) Resolve(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
userID, ok := UserFromContext(r.Context())
|
||||
if !ok {
|
||||
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
var tenantID uuid.UUID
|
||||
|
||||
if header := r.Header.Get("X-Tenant-ID"); header != "" {
|
||||
parsed, err := uuid.Parse(header)
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("invalid X-Tenant-ID: %v", err), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
tenantID = parsed
|
||||
} else {
|
||||
// Default to user's first tenant
|
||||
first, err := tr.lookup.FirstTenantForUser(r.Context(), userID)
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("resolving tenant: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if first == nil {
|
||||
http.Error(w, "no tenant found for user", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
tenantID = *first
|
||||
}
|
||||
|
||||
ctx := ContextWithTenantID(r.Context(), tenantID)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
124
backend/internal/auth/tenant_resolver_test.go
Normal file
124
backend/internal/auth/tenant_resolver_test.go
Normal file
@@ -0,0 +1,124 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type mockTenantLookup struct {
|
||||
tenantID *uuid.UUID
|
||||
err error
|
||||
}
|
||||
|
||||
func (m *mockTenantLookup) FirstTenantForUser(ctx context.Context, userID uuid.UUID) (*uuid.UUID, error) {
|
||||
return m.tenantID, m.err
|
||||
}
|
||||
|
||||
func TestTenantResolver_FromHeader(t *testing.T) {
|
||||
tenantID := uuid.New()
|
||||
tr := NewTenantResolver(&mockTenantLookup{})
|
||||
|
||||
var gotTenantID uuid.UUID
|
||||
next := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
id, ok := TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
t.Fatal("tenant ID not in context")
|
||||
}
|
||||
gotTenantID = id
|
||||
w.WriteHeader(http.StatusOK)
|
||||
})
|
||||
|
||||
r := httptest.NewRequest("GET", "/api/cases", nil)
|
||||
r.Header.Set("X-Tenant-ID", tenantID.String())
|
||||
r = r.WithContext(ContextWithUserID(r.Context(), uuid.New()))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
tr.Resolve(next).ServeHTTP(w, r)
|
||||
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", w.Code)
|
||||
}
|
||||
if gotTenantID != tenantID {
|
||||
t.Errorf("expected tenant %s, got %s", tenantID, gotTenantID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTenantResolver_DefaultsToFirst(t *testing.T) {
|
||||
tenantID := uuid.New()
|
||||
tr := NewTenantResolver(&mockTenantLookup{tenantID: &tenantID})
|
||||
|
||||
var gotTenantID uuid.UUID
|
||||
next := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
id, _ := TenantFromContext(r.Context())
|
||||
gotTenantID = id
|
||||
w.WriteHeader(http.StatusOK)
|
||||
})
|
||||
|
||||
r := httptest.NewRequest("GET", "/api/cases", nil)
|
||||
r = r.WithContext(ContextWithUserID(r.Context(), uuid.New()))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
tr.Resolve(next).ServeHTTP(w, r)
|
||||
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", w.Code)
|
||||
}
|
||||
if gotTenantID != tenantID {
|
||||
t.Errorf("expected tenant %s, got %s", tenantID, gotTenantID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTenantResolver_NoUser(t *testing.T) {
|
||||
tr := NewTenantResolver(&mockTenantLookup{})
|
||||
next := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
t.Fatal("next should not be called")
|
||||
})
|
||||
|
||||
r := httptest.NewRequest("GET", "/api/cases", nil)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
tr.Resolve(next).ServeHTTP(w, r)
|
||||
|
||||
if w.Code != http.StatusUnauthorized {
|
||||
t.Errorf("expected 401, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTenantResolver_InvalidHeader(t *testing.T) {
|
||||
tr := NewTenantResolver(&mockTenantLookup{})
|
||||
next := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
t.Fatal("next should not be called")
|
||||
})
|
||||
|
||||
r := httptest.NewRequest("GET", "/api/cases", nil)
|
||||
r.Header.Set("X-Tenant-ID", "not-a-uuid")
|
||||
r = r.WithContext(ContextWithUserID(r.Context(), uuid.New()))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
tr.Resolve(next).ServeHTTP(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTenantResolver_NoTenantForUser(t *testing.T) {
|
||||
tr := NewTenantResolver(&mockTenantLookup{tenantID: nil})
|
||||
next := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
t.Fatal("next should not be called")
|
||||
})
|
||||
|
||||
r := httptest.NewRequest("GET", "/api/cases", nil)
|
||||
r = r.WithContext(ContextWithUserID(r.Context(), uuid.New()))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
tr.Resolve(next).ServeHTTP(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,7 @@ type Config struct {
|
||||
DatabaseURL string
|
||||
SupabaseURL string
|
||||
SupabaseAnonKey string
|
||||
SupabaseServiceKey string
|
||||
SupabaseJWTSecret string
|
||||
AnthropicAPIKey string
|
||||
}
|
||||
@@ -20,6 +21,7 @@ func Load() (*Config, error) {
|
||||
DatabaseURL: os.Getenv("DATABASE_URL"),
|
||||
SupabaseURL: os.Getenv("SUPABASE_URL"),
|
||||
SupabaseAnonKey: os.Getenv("SUPABASE_ANON_KEY"),
|
||||
SupabaseServiceKey: os.Getenv("SUPABASE_SERVICE_KEY"),
|
||||
SupabaseJWTSecret: os.Getenv("SUPABASE_JWT_SECRET"),
|
||||
AnthropicAPIKey: os.Getenv("ANTHROPIC_API_KEY"),
|
||||
}
|
||||
|
||||
115
backend/internal/handlers/ai.go
Normal file
115
backend/internal/handlers/ai.go
Normal file
@@ -0,0 +1,115 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
type AIHandler struct {
|
||||
ai *services.AIService
|
||||
db *sqlx.DB
|
||||
}
|
||||
|
||||
func NewAIHandler(ai *services.AIService, db *sqlx.DB) *AIHandler {
|
||||
return &AIHandler{ai: ai, db: db}
|
||||
}
|
||||
|
||||
// ExtractDeadlines handles POST /api/ai/extract-deadlines
|
||||
// Accepts either multipart/form-data with a "file" PDF field, or JSON {"text": "..."}.
|
||||
func (h *AIHandler) ExtractDeadlines(w http.ResponseWriter, r *http.Request) {
|
||||
contentType := r.Header.Get("Content-Type")
|
||||
|
||||
var pdfData []byte
|
||||
var text string
|
||||
|
||||
// Check if multipart (PDF upload)
|
||||
if len(contentType) >= 9 && contentType[:9] == "multipart" {
|
||||
if err := r.ParseMultipartForm(32 << 20); err != nil { // 32MB max
|
||||
writeError(w, http.StatusBadRequest, "failed to parse multipart form")
|
||||
return
|
||||
}
|
||||
file, _, err := r.FormFile("file")
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "missing 'file' field in multipart form")
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
pdfData, err = io.ReadAll(file)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "failed to read uploaded file")
|
||||
return
|
||||
}
|
||||
} else {
|
||||
// Assume JSON body
|
||||
var body struct {
|
||||
Text string `json:"text"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
text = body.Text
|
||||
}
|
||||
|
||||
if len(pdfData) == 0 && text == "" {
|
||||
writeError(w, http.StatusBadRequest, "provide either a PDF file or text")
|
||||
return
|
||||
}
|
||||
|
||||
deadlines, err := h.ai.ExtractDeadlines(r.Context(), pdfData, text)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "AI extraction failed: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, map[string]any{
|
||||
"deadlines": deadlines,
|
||||
"count": len(deadlines),
|
||||
})
|
||||
}
|
||||
|
||||
// SummarizeCase handles POST /api/ai/summarize-case
|
||||
// Accepts JSON {"case_id": "uuid"}.
|
||||
func (h *AIHandler) SummarizeCase(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, err := resolveTenant(r, h.db)
|
||||
if err != nil {
|
||||
handleTenantError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
var body struct {
|
||||
CaseID string `json:"case_id"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
|
||||
if body.CaseID == "" {
|
||||
writeError(w, http.StatusBadRequest, "case_id is required")
|
||||
return
|
||||
}
|
||||
|
||||
caseID, err := parseUUID(body.CaseID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid case_id")
|
||||
return
|
||||
}
|
||||
|
||||
summary, err := h.ai.SummarizeCase(r.Context(), tenantID, caseID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "AI summarization failed: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, map[string]string{
|
||||
"case_id": caseID.String(),
|
||||
"summary": summary,
|
||||
})
|
||||
}
|
||||
74
backend/internal/handlers/ai_handler_test.go
Normal file
74
backend/internal/handlers/ai_handler_test.go
Normal file
@@ -0,0 +1,74 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestAIExtractDeadlines_EmptyInput(t *testing.T) {
|
||||
h := &AIHandler{}
|
||||
|
||||
body := `{"text":""}`
|
||||
r := httptest.NewRequest("POST", "/api/ai/extract-deadlines", bytes.NewBufferString(body))
|
||||
r.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.ExtractDeadlines(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
var resp map[string]string
|
||||
json.NewDecoder(w.Body).Decode(&resp)
|
||||
if resp["error"] != "provide either a PDF file or text" {
|
||||
t.Errorf("unexpected error: %s", resp["error"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestAIExtractDeadlines_InvalidJSON(t *testing.T) {
|
||||
h := &AIHandler{}
|
||||
|
||||
r := httptest.NewRequest("POST", "/api/ai/extract-deadlines", bytes.NewBufferString(`{broken`))
|
||||
r.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.ExtractDeadlines(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAISummarizeCase_MissingCaseID(t *testing.T) {
|
||||
h := &AIHandler{}
|
||||
|
||||
body := `{"case_id":""}`
|
||||
r := httptest.NewRequest("POST", "/api/ai/summarize-case", bytes.NewBufferString(body))
|
||||
r.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.SummarizeCase(w, r)
|
||||
|
||||
// Without auth context, the resolveTenant will fail first
|
||||
if w.Code != http.StatusUnauthorized {
|
||||
t.Errorf("expected 401, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAISummarizeCase_InvalidJSON(t *testing.T) {
|
||||
h := &AIHandler{}
|
||||
|
||||
r := httptest.NewRequest("POST", "/api/ai/summarize-case", bytes.NewBufferString(`not-json`))
|
||||
r.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.SummarizeCase(w, r)
|
||||
|
||||
// Without auth context, the resolveTenant will fail first
|
||||
if w.Code != http.StatusUnauthorized {
|
||||
t.Errorf("expected 401, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
196
backend/internal/handlers/appointment_handler_test.go
Normal file
196
backend/internal/handlers/appointment_handler_test.go
Normal file
@@ -0,0 +1,196 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
)
|
||||
|
||||
func TestAppointmentCreate_NoTenant(t *testing.T) {
|
||||
h := &AppointmentHandler{}
|
||||
r := httptest.NewRequest("POST", "/api/appointments", bytes.NewBufferString(`{}`))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Create(w, r)
|
||||
|
||||
if w.Code != http.StatusUnauthorized {
|
||||
t.Errorf("expected 401, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppointmentCreate_MissingTitle(t *testing.T) {
|
||||
h := &AppointmentHandler{}
|
||||
body := `{"start_at":"2026-04-01T10:00:00Z"}`
|
||||
r := httptest.NewRequest("POST", "/api/appointments", bytes.NewBufferString(body))
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Create(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
var resp map[string]string
|
||||
json.NewDecoder(w.Body).Decode(&resp)
|
||||
if resp["error"] != "title is required" {
|
||||
t.Errorf("unexpected error: %s", resp["error"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppointmentCreate_MissingStartAt(t *testing.T) {
|
||||
h := &AppointmentHandler{}
|
||||
body := `{"title":"Test Appointment"}`
|
||||
r := httptest.NewRequest("POST", "/api/appointments", bytes.NewBufferString(body))
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Create(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
var resp map[string]string
|
||||
json.NewDecoder(w.Body).Decode(&resp)
|
||||
if resp["error"] != "start_at is required" {
|
||||
t.Errorf("unexpected error: %s", resp["error"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppointmentCreate_InvalidJSON(t *testing.T) {
|
||||
h := &AppointmentHandler{}
|
||||
r := httptest.NewRequest("POST", "/api/appointments", bytes.NewBufferString(`{broken`))
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Create(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppointmentList_NoTenant(t *testing.T) {
|
||||
h := &AppointmentHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/appointments", nil)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.List(w, r)
|
||||
|
||||
if w.Code != http.StatusUnauthorized {
|
||||
t.Errorf("expected 401, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppointmentUpdate_NoTenant(t *testing.T) {
|
||||
h := &AppointmentHandler{}
|
||||
r := httptest.NewRequest("PUT", "/api/appointments/"+uuid.New().String(), bytes.NewBufferString(`{}`))
|
||||
r.SetPathValue("id", uuid.New().String())
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Update(w, r)
|
||||
|
||||
if w.Code != http.StatusUnauthorized {
|
||||
t.Errorf("expected 401, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppointmentUpdate_InvalidID(t *testing.T) {
|
||||
h := &AppointmentHandler{}
|
||||
r := httptest.NewRequest("PUT", "/api/appointments/not-uuid", bytes.NewBufferString(`{}`))
|
||||
r.SetPathValue("id", "not-uuid")
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Update(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppointmentDelete_NoTenant(t *testing.T) {
|
||||
h := &AppointmentHandler{}
|
||||
r := httptest.NewRequest("DELETE", "/api/appointments/"+uuid.New().String(), nil)
|
||||
r.SetPathValue("id", uuid.New().String())
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Delete(w, r)
|
||||
|
||||
if w.Code != http.StatusUnauthorized {
|
||||
t.Errorf("expected 401, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppointmentDelete_InvalidID(t *testing.T) {
|
||||
h := &AppointmentHandler{}
|
||||
r := httptest.NewRequest("DELETE", "/api/appointments/bad", nil)
|
||||
r.SetPathValue("id", "bad")
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Delete(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppointmentList_InvalidCaseID(t *testing.T) {
|
||||
h := &AppointmentHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/appointments?case_id=bad", nil)
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.List(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppointmentList_InvalidStartFrom(t *testing.T) {
|
||||
h := &AppointmentHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/appointments?start_from=not-a-date", nil)
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.List(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
232
backend/internal/handlers/appointments.go
Normal file
232
backend/internal/handlers/appointments.go
Normal file
@@ -0,0 +1,232 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
type AppointmentHandler struct {
|
||||
svc *services.AppointmentService
|
||||
}
|
||||
|
||||
func NewAppointmentHandler(svc *services.AppointmentService) *AppointmentHandler {
|
||||
return &AppointmentHandler{svc: svc}
|
||||
}
|
||||
|
||||
// Get handles GET /api/appointments/{id}
|
||||
func (h *AppointmentHandler) Get(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
id, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid appointment id")
|
||||
return
|
||||
}
|
||||
|
||||
appt, err := h.svc.GetByID(r.Context(), tenantID, id)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
writeError(w, http.StatusNotFound, "appointment not found")
|
||||
return
|
||||
}
|
||||
writeError(w, http.StatusInternalServerError, "failed to fetch appointment")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, appt)
|
||||
}
|
||||
|
||||
func (h *AppointmentHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
filter := services.AppointmentFilter{}
|
||||
|
||||
if v := r.URL.Query().Get("case_id"); v != "" {
|
||||
id, err := uuid.Parse(v)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid case_id")
|
||||
return
|
||||
}
|
||||
filter.CaseID = &id
|
||||
}
|
||||
if v := r.URL.Query().Get("type"); v != "" {
|
||||
filter.Type = &v
|
||||
}
|
||||
if v := r.URL.Query().Get("start_from"); v != "" {
|
||||
t, err := time.Parse(time.RFC3339, v)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid start_from (use RFC3339)")
|
||||
return
|
||||
}
|
||||
filter.StartFrom = &t
|
||||
}
|
||||
if v := r.URL.Query().Get("start_to"); v != "" {
|
||||
t, err := time.Parse(time.RFC3339, v)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid start_to (use RFC3339)")
|
||||
return
|
||||
}
|
||||
filter.StartTo = &t
|
||||
}
|
||||
|
||||
appointments, err := h.svc.List(r.Context(), tenantID, filter)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to list appointments")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, appointments)
|
||||
}
|
||||
|
||||
type createAppointmentRequest struct {
|
||||
CaseID *uuid.UUID `json:"case_id"`
|
||||
Title string `json:"title"`
|
||||
Description *string `json:"description"`
|
||||
StartAt time.Time `json:"start_at"`
|
||||
EndAt *time.Time `json:"end_at"`
|
||||
Location *string `json:"location"`
|
||||
AppointmentType *string `json:"appointment_type"`
|
||||
}
|
||||
|
||||
func (h *AppointmentHandler) Create(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
var req createAppointmentRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
if req.Title == "" {
|
||||
writeError(w, http.StatusBadRequest, "title is required")
|
||||
return
|
||||
}
|
||||
if req.StartAt.IsZero() {
|
||||
writeError(w, http.StatusBadRequest, "start_at is required")
|
||||
return
|
||||
}
|
||||
|
||||
appt := &models.Appointment{
|
||||
TenantID: tenantID,
|
||||
CaseID: req.CaseID,
|
||||
Title: req.Title,
|
||||
Description: req.Description,
|
||||
StartAt: req.StartAt,
|
||||
EndAt: req.EndAt,
|
||||
Location: req.Location,
|
||||
AppointmentType: req.AppointmentType,
|
||||
}
|
||||
|
||||
if err := h.svc.Create(r.Context(), appt); err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to create appointment")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusCreated, appt)
|
||||
}
|
||||
|
||||
type updateAppointmentRequest struct {
|
||||
CaseID *uuid.UUID `json:"case_id"`
|
||||
Title string `json:"title"`
|
||||
Description *string `json:"description"`
|
||||
StartAt time.Time `json:"start_at"`
|
||||
EndAt *time.Time `json:"end_at"`
|
||||
Location *string `json:"location"`
|
||||
AppointmentType *string `json:"appointment_type"`
|
||||
}
|
||||
|
||||
func (h *AppointmentHandler) Update(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
id, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid appointment id")
|
||||
return
|
||||
}
|
||||
|
||||
// Fetch existing to verify ownership
|
||||
existing, err := h.svc.GetByID(r.Context(), tenantID, id)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
writeError(w, http.StatusNotFound, "appointment not found")
|
||||
return
|
||||
}
|
||||
writeError(w, http.StatusInternalServerError, "failed to fetch appointment")
|
||||
return
|
||||
}
|
||||
|
||||
var req updateAppointmentRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
if req.Title == "" {
|
||||
writeError(w, http.StatusBadRequest, "title is required")
|
||||
return
|
||||
}
|
||||
if req.StartAt.IsZero() {
|
||||
writeError(w, http.StatusBadRequest, "start_at is required")
|
||||
return
|
||||
}
|
||||
|
||||
existing.CaseID = req.CaseID
|
||||
existing.Title = req.Title
|
||||
existing.Description = req.Description
|
||||
existing.StartAt = req.StartAt
|
||||
existing.EndAt = req.EndAt
|
||||
existing.Location = req.Location
|
||||
existing.AppointmentType = req.AppointmentType
|
||||
|
||||
if err := h.svc.Update(r.Context(), existing); err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to update appointment")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, existing)
|
||||
}
|
||||
|
||||
func (h *AppointmentHandler) Delete(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
id, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid appointment id")
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.svc.Delete(r.Context(), tenantID, id); err != nil {
|
||||
writeError(w, http.StatusNotFound, "appointment not found")
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
}
|
||||
|
||||
63
backend/internal/handlers/audit_log.go
Normal file
63
backend/internal/handlers/audit_log.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
type AuditLogHandler struct {
|
||||
svc *services.AuditService
|
||||
}
|
||||
|
||||
func NewAuditLogHandler(svc *services.AuditService) *AuditLogHandler {
|
||||
return &AuditLogHandler{svc: svc}
|
||||
}
|
||||
|
||||
func (h *AuditLogHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusForbidden, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
q := r.URL.Query()
|
||||
page, _ := strconv.Atoi(q.Get("page"))
|
||||
limit, _ := strconv.Atoi(q.Get("limit"))
|
||||
|
||||
filter := services.AuditFilter{
|
||||
EntityType: q.Get("entity_type"),
|
||||
From: q.Get("from"),
|
||||
To: q.Get("to"),
|
||||
Page: page,
|
||||
Limit: limit,
|
||||
}
|
||||
|
||||
if idStr := q.Get("entity_id"); idStr != "" {
|
||||
if id, err := uuid.Parse(idStr); err == nil {
|
||||
filter.EntityID = &id
|
||||
}
|
||||
}
|
||||
if idStr := q.Get("user_id"); idStr != "" {
|
||||
if id, err := uuid.Parse(idStr); err == nil {
|
||||
filter.UserID = &id
|
||||
}
|
||||
}
|
||||
|
||||
entries, total, err := h.svc.List(r.Context(), tenantID, filter)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to fetch audit log")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, map[string]any{
|
||||
"entries": entries,
|
||||
"total": total,
|
||||
"page": filter.Page,
|
||||
"limit": filter.Limit,
|
||||
})
|
||||
}
|
||||
89
backend/internal/handlers/calculate.go
Normal file
89
backend/internal/handlers/calculate.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
// CalculateHandlers holds handlers for deadline calculation endpoints
|
||||
type CalculateHandlers struct {
|
||||
calculator *services.DeadlineCalculator
|
||||
rules *services.DeadlineRuleService
|
||||
}
|
||||
|
||||
// NewCalculateHandlers creates calculate handlers
|
||||
func NewCalculateHandlers(calc *services.DeadlineCalculator, rules *services.DeadlineRuleService) *CalculateHandlers {
|
||||
return &CalculateHandlers{calculator: calc, rules: rules}
|
||||
}
|
||||
|
||||
// CalculateRequest is the input for POST /api/deadlines/calculate
|
||||
type CalculateRequest struct {
|
||||
ProceedingType string `json:"proceeding_type"`
|
||||
TriggerEventDate string `json:"trigger_event_date"`
|
||||
SelectedRuleIDs []string `json:"selected_rule_ids,omitempty"`
|
||||
}
|
||||
|
||||
// Calculate handles POST /api/deadlines/calculate
|
||||
func (h *CalculateHandlers) Calculate(w http.ResponseWriter, r *http.Request) {
|
||||
var req CalculateRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
|
||||
if req.ProceedingType == "" || req.TriggerEventDate == "" {
|
||||
writeError(w, http.StatusBadRequest, "proceeding_type and trigger_event_date are required")
|
||||
return
|
||||
}
|
||||
|
||||
eventDate, err := time.Parse("2006-01-02", req.TriggerEventDate)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid trigger_event_date format, expected YYYY-MM-DD")
|
||||
return
|
||||
}
|
||||
|
||||
var results []services.CalculatedDeadline
|
||||
|
||||
if len(req.SelectedRuleIDs) > 0 {
|
||||
ruleModels, err := h.rules.GetByIDs(req.SelectedRuleIDs)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to fetch selected rules")
|
||||
return
|
||||
}
|
||||
results = h.calculator.CalculateFromRules(eventDate, ruleModels)
|
||||
} else {
|
||||
tree, err := h.rules.GetRuleTree(req.ProceedingType)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "unknown proceeding type")
|
||||
return
|
||||
}
|
||||
// Flatten tree to get all rule models
|
||||
var flatNodes []services.RuleTreeNode
|
||||
flattenTree(tree, &flatNodes)
|
||||
|
||||
ruleModels := make([]models.DeadlineRule, 0, len(flatNodes))
|
||||
for _, node := range flatNodes {
|
||||
ruleModels = append(ruleModels, node.DeadlineRule)
|
||||
}
|
||||
results = h.calculator.CalculateFromRules(eventDate, ruleModels)
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, map[string]any{
|
||||
"proceeding_type": req.ProceedingType,
|
||||
"trigger_event_date": req.TriggerEventDate,
|
||||
"deadlines": results,
|
||||
})
|
||||
}
|
||||
|
||||
func flattenTree(nodes []services.RuleTreeNode, result *[]services.RuleTreeNode) {
|
||||
for _, n := range nodes {
|
||||
*result = append(*result, n)
|
||||
if len(n.Children) > 0 {
|
||||
flattenTree(n.Children, result)
|
||||
}
|
||||
}
|
||||
}
|
||||
83
backend/internal/handlers/calculate_handler_test.go
Normal file
83
backend/internal/handlers/calculate_handler_test.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCalculate_MissingFields(t *testing.T) {
|
||||
h := &CalculateHandlers{}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
body string
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "empty body",
|
||||
body: `{}`,
|
||||
want: "proceeding_type and trigger_event_date are required",
|
||||
},
|
||||
{
|
||||
name: "missing trigger_event_date",
|
||||
body: `{"proceeding_type":"INF"}`,
|
||||
want: "proceeding_type and trigger_event_date are required",
|
||||
},
|
||||
{
|
||||
name: "missing proceeding_type",
|
||||
body: `{"trigger_event_date":"2026-06-01"}`,
|
||||
want: "proceeding_type and trigger_event_date are required",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
r := httptest.NewRequest("POST", "/api/deadlines/calculate", bytes.NewBufferString(tt.body))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Calculate(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
var resp map[string]string
|
||||
json.NewDecoder(w.Body).Decode(&resp)
|
||||
if resp["error"] != tt.want {
|
||||
t.Errorf("expected error %q, got %q", tt.want, resp["error"])
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCalculate_InvalidDateFormat(t *testing.T) {
|
||||
h := &CalculateHandlers{}
|
||||
body := `{"proceeding_type":"INF","trigger_event_date":"01-06-2026"}`
|
||||
r := httptest.NewRequest("POST", "/api/deadlines/calculate", bytes.NewBufferString(body))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Calculate(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
var resp map[string]string
|
||||
json.NewDecoder(w.Body).Decode(&resp)
|
||||
if resp["error"] != "invalid trigger_event_date format, expected YYYY-MM-DD" {
|
||||
t.Errorf("unexpected error: %s", resp["error"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestCalculate_InvalidJSON(t *testing.T) {
|
||||
h := &CalculateHandlers{}
|
||||
r := httptest.NewRequest("POST", "/api/deadlines/calculate", bytes.NewBufferString(`not-json`))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Calculate(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
68
backend/internal/handlers/caldav.go
Normal file
68
backend/internal/handlers/caldav.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
// CalDAVHandler handles CalDAV sync HTTP endpoints.
|
||||
type CalDAVHandler struct {
|
||||
svc *services.CalDAVService
|
||||
}
|
||||
|
||||
// NewCalDAVHandler creates a new CalDAV handler.
|
||||
func NewCalDAVHandler(svc *services.CalDAVService) *CalDAVHandler {
|
||||
return &CalDAVHandler{svc: svc}
|
||||
}
|
||||
|
||||
// TriggerSync handles POST /api/caldav/sync — triggers a full sync for the current tenant.
|
||||
func (h *CalDAVHandler) TriggerSync(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "no tenant context")
|
||||
return
|
||||
}
|
||||
|
||||
cfg, err := h.svc.LoadTenantConfig(tenantID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
status, err := h.svc.SyncTenant(r.Context(), tenantID, *cfg)
|
||||
if err != nil {
|
||||
// Still return the status — it contains partial results + error info
|
||||
writeJSON(w, http.StatusOK, map[string]any{
|
||||
"status": "completed_with_errors",
|
||||
"sync": status,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, map[string]any{
|
||||
"status": "ok",
|
||||
"sync": status,
|
||||
})
|
||||
}
|
||||
|
||||
// GetStatus handles GET /api/caldav/status — returns last sync status.
|
||||
func (h *CalDAVHandler) GetStatus(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "no tenant context")
|
||||
return
|
||||
}
|
||||
|
||||
status := h.svc.GetStatus(tenantID)
|
||||
if status == nil {
|
||||
writeJSON(w, http.StatusOK, map[string]any{
|
||||
"status": "no_sync_yet",
|
||||
"last_sync_at": nil,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, status)
|
||||
}
|
||||
52
backend/internal/handlers/case_events.go
Normal file
52
backend/internal/handlers/case_events.go
Normal file
@@ -0,0 +1,52 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
type CaseEventHandler struct {
|
||||
db *sqlx.DB
|
||||
}
|
||||
|
||||
func NewCaseEventHandler(db *sqlx.DB) *CaseEventHandler {
|
||||
return &CaseEventHandler{db: db}
|
||||
}
|
||||
|
||||
// Get handles GET /api/case-events/{id}
|
||||
func (h *CaseEventHandler) Get(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
eventID, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid event ID")
|
||||
return
|
||||
}
|
||||
|
||||
var event models.CaseEvent
|
||||
err = h.db.GetContext(r.Context(), &event,
|
||||
`SELECT id, tenant_id, case_id, event_type, title, description, event_date, created_by, metadata, created_at, updated_at
|
||||
FROM case_events
|
||||
WHERE id = $1 AND tenant_id = $2`, eventID, tenantID)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
writeError(w, http.StatusNotFound, "case event not found")
|
||||
return
|
||||
}
|
||||
writeError(w, http.StatusInternalServerError, "failed to fetch case event")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, event)
|
||||
}
|
||||
177
backend/internal/handlers/case_handler_test.go
Normal file
177
backend/internal/handlers/case_handler_test.go
Normal file
@@ -0,0 +1,177 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
)
|
||||
|
||||
func TestCaseCreate_NoAuth(t *testing.T) {
|
||||
h := &CaseHandler{}
|
||||
r := httptest.NewRequest("POST", "/api/cases", bytes.NewBufferString(`{}`))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Create(w, r)
|
||||
|
||||
if w.Code != http.StatusForbidden {
|
||||
t.Errorf("expected 403, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaseCreate_MissingFields(t *testing.T) {
|
||||
h := &CaseHandler{}
|
||||
body := `{"case_number":"","title":""}`
|
||||
r := httptest.NewRequest("POST", "/api/cases", bytes.NewBufferString(body))
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Create(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
var resp map[string]string
|
||||
json.NewDecoder(w.Body).Decode(&resp)
|
||||
if resp["error"] != "case_number and title are required" {
|
||||
t.Errorf("unexpected error: %s", resp["error"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaseCreate_InvalidJSON(t *testing.T) {
|
||||
h := &CaseHandler{}
|
||||
r := httptest.NewRequest("POST", "/api/cases", bytes.NewBufferString(`not-json`))
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Create(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaseGet_InvalidID(t *testing.T) {
|
||||
h := &CaseHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/cases/not-a-uuid", nil)
|
||||
r.SetPathValue("id", "not-a-uuid")
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Get(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaseGet_NoTenant(t *testing.T) {
|
||||
h := &CaseHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/cases/"+uuid.New().String(), nil)
|
||||
r.SetPathValue("id", uuid.New().String())
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Get(w, r)
|
||||
|
||||
if w.Code != http.StatusForbidden {
|
||||
t.Errorf("expected 403, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaseList_NoTenant(t *testing.T) {
|
||||
h := &CaseHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/cases", nil)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.List(w, r)
|
||||
|
||||
if w.Code != http.StatusForbidden {
|
||||
t.Errorf("expected 403, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaseUpdate_InvalidID(t *testing.T) {
|
||||
h := &CaseHandler{}
|
||||
body := `{"title":"Updated"}`
|
||||
r := httptest.NewRequest("PUT", "/api/cases/bad-id", bytes.NewBufferString(body))
|
||||
r.SetPathValue("id", "bad-id")
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Update(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaseUpdate_InvalidJSON(t *testing.T) {
|
||||
h := &CaseHandler{}
|
||||
caseID := uuid.New().String()
|
||||
r := httptest.NewRequest("PUT", "/api/cases/"+caseID, bytes.NewBufferString(`{bad`))
|
||||
r.SetPathValue("id", caseID)
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Update(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaseDelete_NoTenant(t *testing.T) {
|
||||
h := &CaseHandler{}
|
||||
r := httptest.NewRequest("DELETE", "/api/cases/"+uuid.New().String(), nil)
|
||||
r.SetPathValue("id", uuid.New().String())
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Delete(w, r)
|
||||
|
||||
if w.Code != http.StatusForbidden {
|
||||
t.Errorf("expected 403, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaseDelete_InvalidID(t *testing.T) {
|
||||
h := &CaseHandler{}
|
||||
r := httptest.NewRequest("DELETE", "/api/cases/bad-id", nil)
|
||||
r.SetPathValue("id", "bad-id")
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Delete(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
32
backend/internal/handlers/dashboard.go
Normal file
32
backend/internal/handlers/dashboard.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
type DashboardHandler struct {
|
||||
svc *services.DashboardService
|
||||
}
|
||||
|
||||
func NewDashboardHandler(svc *services.DashboardService) *DashboardHandler {
|
||||
return &DashboardHandler{svc: svc}
|
||||
}
|
||||
|
||||
func (h *DashboardHandler) Get(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusForbidden, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
data, err := h.svc.Get(r.Context(), tenantID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, data)
|
||||
}
|
||||
19
backend/internal/handlers/dashboard_handler_test.go
Normal file
19
backend/internal/handlers/dashboard_handler_test.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDashboardGet_NoTenant(t *testing.T) {
|
||||
h := &DashboardHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/dashboard", nil)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Get(w, r)
|
||||
|
||||
if w.Code != http.StatusForbidden {
|
||||
t.Errorf("expected 403, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
69
backend/internal/handlers/deadline_rules.go
Normal file
69
backend/internal/handlers/deadline_rules.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
// DeadlineRuleHandlers holds handlers for deadline rule endpoints
|
||||
type DeadlineRuleHandlers struct {
|
||||
rules *services.DeadlineRuleService
|
||||
}
|
||||
|
||||
// NewDeadlineRuleHandlers creates deadline rule handlers
|
||||
func NewDeadlineRuleHandlers(rs *services.DeadlineRuleService) *DeadlineRuleHandlers {
|
||||
return &DeadlineRuleHandlers{rules: rs}
|
||||
}
|
||||
|
||||
// List handles GET /api/deadline-rules
|
||||
// Query params: proceeding_type_id (optional int filter)
|
||||
func (h *DeadlineRuleHandlers) List(w http.ResponseWriter, r *http.Request) {
|
||||
var proceedingTypeID *int
|
||||
if v := r.URL.Query().Get("proceeding_type_id"); v != "" {
|
||||
id, err := strconv.Atoi(v)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid proceeding_type_id")
|
||||
return
|
||||
}
|
||||
proceedingTypeID = &id
|
||||
}
|
||||
|
||||
rules, err := h.rules.List(proceedingTypeID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to list deadline rules")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, rules)
|
||||
}
|
||||
|
||||
// ListProceedingTypes handles GET /api/proceeding-types
|
||||
func (h *DeadlineRuleHandlers) ListProceedingTypes(w http.ResponseWriter, r *http.Request) {
|
||||
types, err := h.rules.ListProceedingTypes()
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to list proceeding types")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, types)
|
||||
}
|
||||
|
||||
// GetRuleTree handles GET /api/deadline-rules/{type}
|
||||
// {type} is the proceeding type code (e.g., "INF", "REV")
|
||||
func (h *DeadlineRuleHandlers) GetRuleTree(w http.ResponseWriter, r *http.Request) {
|
||||
typeCode := r.PathValue("type")
|
||||
if typeCode == "" {
|
||||
writeError(w, http.StatusBadRequest, "proceeding type code required")
|
||||
return
|
||||
}
|
||||
|
||||
tree, err := h.rules.GetRuleTree(typeCode)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusNotFound, "proceeding type not found")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, tree)
|
||||
}
|
||||
206
backend/internal/handlers/deadlines.go
Normal file
206
backend/internal/handlers/deadlines.go
Normal file
@@ -0,0 +1,206 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
// DeadlineHandlers holds handlers for deadline CRUD endpoints
|
||||
type DeadlineHandlers struct {
|
||||
deadlines *services.DeadlineService
|
||||
db *sqlx.DB
|
||||
}
|
||||
|
||||
// NewDeadlineHandlers creates deadline handlers
|
||||
func NewDeadlineHandlers(ds *services.DeadlineService, db *sqlx.DB) *DeadlineHandlers {
|
||||
return &DeadlineHandlers{deadlines: ds, db: db}
|
||||
}
|
||||
|
||||
// Get handles GET /api/deadlines/{deadlineID}
|
||||
func (h *DeadlineHandlers) Get(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, err := resolveTenant(r, h.db)
|
||||
if err != nil {
|
||||
handleTenantError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
deadlineID, err := parsePathUUID(r, "deadlineID")
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid deadline ID")
|
||||
return
|
||||
}
|
||||
|
||||
deadline, err := h.deadlines.GetByID(tenantID, deadlineID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to fetch deadline")
|
||||
return
|
||||
}
|
||||
if deadline == nil {
|
||||
writeError(w, http.StatusNotFound, "deadline not found")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, deadline)
|
||||
}
|
||||
|
||||
// ListAll handles GET /api/deadlines
|
||||
func (h *DeadlineHandlers) ListAll(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, err := resolveTenant(r, h.db)
|
||||
if err != nil {
|
||||
handleTenantError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
deadlines, err := h.deadlines.ListAll(tenantID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to list deadlines")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, deadlines)
|
||||
}
|
||||
|
||||
// ListForCase handles GET /api/cases/{caseID}/deadlines
|
||||
func (h *DeadlineHandlers) ListForCase(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, err := resolveTenant(r, h.db)
|
||||
if err != nil {
|
||||
handleTenantError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
caseID, err := parsePathUUID(r, "caseID")
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid case ID")
|
||||
return
|
||||
}
|
||||
|
||||
deadlines, err := h.deadlines.ListForCase(tenantID, caseID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to list deadlines")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, deadlines)
|
||||
}
|
||||
|
||||
// Create handles POST /api/cases/{caseID}/deadlines
|
||||
func (h *DeadlineHandlers) Create(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, err := resolveTenant(r, h.db)
|
||||
if err != nil {
|
||||
handleTenantError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
caseID, err := parsePathUUID(r, "caseID")
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid case ID")
|
||||
return
|
||||
}
|
||||
|
||||
var input services.CreateDeadlineInput
|
||||
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
input.CaseID = caseID
|
||||
|
||||
if input.Title == "" || input.DueDate == "" {
|
||||
writeError(w, http.StatusBadRequest, "title and due_date are required")
|
||||
return
|
||||
}
|
||||
|
||||
deadline, err := h.deadlines.Create(r.Context(), tenantID, input)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to create deadline")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusCreated, deadline)
|
||||
}
|
||||
|
||||
// Update handles PUT /api/deadlines/{deadlineID}
|
||||
func (h *DeadlineHandlers) Update(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, err := resolveTenant(r, h.db)
|
||||
if err != nil {
|
||||
handleTenantError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
deadlineID, err := parsePathUUID(r, "deadlineID")
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid deadline ID")
|
||||
return
|
||||
}
|
||||
|
||||
var input services.UpdateDeadlineInput
|
||||
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
|
||||
deadline, err := h.deadlines.Update(r.Context(), tenantID, deadlineID, input)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to update deadline")
|
||||
return
|
||||
}
|
||||
if deadline == nil {
|
||||
writeError(w, http.StatusNotFound, "deadline not found")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, deadline)
|
||||
}
|
||||
|
||||
// Complete handles PATCH /api/deadlines/{deadlineID}/complete
|
||||
func (h *DeadlineHandlers) Complete(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, err := resolveTenant(r, h.db)
|
||||
if err != nil {
|
||||
handleTenantError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
deadlineID, err := parsePathUUID(r, "deadlineID")
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid deadline ID")
|
||||
return
|
||||
}
|
||||
|
||||
deadline, err := h.deadlines.Complete(r.Context(), tenantID, deadlineID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to complete deadline")
|
||||
return
|
||||
}
|
||||
if deadline == nil {
|
||||
writeError(w, http.StatusNotFound, "deadline not found")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, deadline)
|
||||
}
|
||||
|
||||
// Delete handles DELETE /api/deadlines/{deadlineID}
|
||||
func (h *DeadlineHandlers) Delete(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, err := resolveTenant(r, h.db)
|
||||
if err != nil {
|
||||
handleTenantError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
deadlineID, err := parsePathUUID(r, "deadlineID")
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid deadline ID")
|
||||
return
|
||||
}
|
||||
|
||||
err = h.deadlines.Delete(r.Context(), tenantID, deadlineID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusNotFound, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, map[string]string{"status": "deleted"})
|
||||
}
|
||||
166
backend/internal/handlers/document_handler_test.go
Normal file
166
backend/internal/handlers/document_handler_test.go
Normal file
@@ -0,0 +1,166 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
)
|
||||
|
||||
func TestDocumentListByCase_NoTenant(t *testing.T) {
|
||||
h := &DocumentHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/cases/"+uuid.New().String()+"/documents", nil)
|
||||
r.SetPathValue("id", uuid.New().String())
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.ListByCase(w, r)
|
||||
|
||||
if w.Code != http.StatusForbidden {
|
||||
t.Errorf("expected 403, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDocumentListByCase_InvalidCaseID(t *testing.T) {
|
||||
h := &DocumentHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/cases/bad-id/documents", nil)
|
||||
r.SetPathValue("id", "bad-id")
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.ListByCase(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDocumentUpload_NoTenant(t *testing.T) {
|
||||
h := &DocumentHandler{}
|
||||
r := httptest.NewRequest("POST", "/api/cases/"+uuid.New().String()+"/documents", nil)
|
||||
r.SetPathValue("id", uuid.New().String())
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Upload(w, r)
|
||||
|
||||
if w.Code != http.StatusForbidden {
|
||||
t.Errorf("expected 403, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDocumentUpload_InvalidCaseID(t *testing.T) {
|
||||
h := &DocumentHandler{}
|
||||
r := httptest.NewRequest("POST", "/api/cases/bad-id/documents", nil)
|
||||
r.SetPathValue("id", "bad-id")
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Upload(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDocumentDownload_NoTenant(t *testing.T) {
|
||||
h := &DocumentHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/documents/"+uuid.New().String(), nil)
|
||||
r.SetPathValue("docId", uuid.New().String())
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Download(w, r)
|
||||
|
||||
if w.Code != http.StatusForbidden {
|
||||
t.Errorf("expected 403, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDocumentDownload_InvalidID(t *testing.T) {
|
||||
h := &DocumentHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/documents/bad-id", nil)
|
||||
r.SetPathValue("docId", "bad-id")
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Download(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDocumentGetMeta_NoTenant(t *testing.T) {
|
||||
h := &DocumentHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/documents/"+uuid.New().String()+"/meta", nil)
|
||||
r.SetPathValue("docId", uuid.New().String())
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.GetMeta(w, r)
|
||||
|
||||
if w.Code != http.StatusForbidden {
|
||||
t.Errorf("expected 403, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDocumentGetMeta_InvalidID(t *testing.T) {
|
||||
h := &DocumentHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/documents/bad-id/meta", nil)
|
||||
r.SetPathValue("docId", "bad-id")
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.GetMeta(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDocumentDelete_NoTenant(t *testing.T) {
|
||||
h := &DocumentHandler{}
|
||||
r := httptest.NewRequest("DELETE", "/api/documents/"+uuid.New().String(), nil)
|
||||
r.SetPathValue("docId", uuid.New().String())
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Delete(w, r)
|
||||
|
||||
if w.Code != http.StatusForbidden {
|
||||
t.Errorf("expected 403, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDocumentDelete_InvalidID(t *testing.T) {
|
||||
h := &DocumentHandler{}
|
||||
r := httptest.NewRequest("DELETE", "/api/documents/bad-id", nil)
|
||||
r.SetPathValue("docId", "bad-id")
|
||||
ctx := auth.ContextWithTenantID(
|
||||
auth.ContextWithUserID(r.Context(), uuid.New()),
|
||||
uuid.New(),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.Delete(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
183
backend/internal/handlers/documents.go
Normal file
183
backend/internal/handlers/documents.go
Normal file
@@ -0,0 +1,183 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
const maxUploadSize = 50 << 20 // 50 MB
|
||||
|
||||
type DocumentHandler struct {
|
||||
svc *services.DocumentService
|
||||
}
|
||||
|
||||
func NewDocumentHandler(svc *services.DocumentService) *DocumentHandler {
|
||||
return &DocumentHandler{svc: svc}
|
||||
}
|
||||
|
||||
func (h *DocumentHandler) ListByCase(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusForbidden, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
caseID, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid case ID")
|
||||
return
|
||||
}
|
||||
|
||||
docs, err := h.svc.ListByCase(r.Context(), tenantID, caseID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, map[string]any{
|
||||
"documents": docs,
|
||||
"total": len(docs),
|
||||
})
|
||||
}
|
||||
|
||||
func (h *DocumentHandler) Upload(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusForbidden, "missing tenant")
|
||||
return
|
||||
}
|
||||
userID, _ := auth.UserFromContext(r.Context())
|
||||
|
||||
caseID, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid case ID")
|
||||
return
|
||||
}
|
||||
|
||||
r.Body = http.MaxBytesReader(w, r.Body, maxUploadSize)
|
||||
if err := r.ParseMultipartForm(maxUploadSize); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "file too large or invalid multipart form")
|
||||
return
|
||||
}
|
||||
|
||||
file, header, err := r.FormFile("file")
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "missing file field")
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
title := r.FormValue("title")
|
||||
if title == "" {
|
||||
title = header.Filename
|
||||
}
|
||||
|
||||
contentType := header.Header.Get("Content-Type")
|
||||
if contentType == "" {
|
||||
contentType = "application/octet-stream"
|
||||
}
|
||||
|
||||
input := services.CreateDocumentInput{
|
||||
Title: title,
|
||||
DocType: r.FormValue("doc_type"),
|
||||
Filename: header.Filename,
|
||||
ContentType: contentType,
|
||||
Size: int(header.Size),
|
||||
Data: file,
|
||||
}
|
||||
|
||||
doc, err := h.svc.Create(r.Context(), tenantID, caseID, userID, input)
|
||||
if err != nil {
|
||||
if err.Error() == "case not found" {
|
||||
writeError(w, http.StatusNotFound, "case not found")
|
||||
return
|
||||
}
|
||||
writeError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusCreated, doc)
|
||||
}
|
||||
|
||||
func (h *DocumentHandler) Download(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusForbidden, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
docID, err := uuid.Parse(r.PathValue("docId"))
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid document ID")
|
||||
return
|
||||
}
|
||||
|
||||
body, contentType, title, err := h.svc.Download(r.Context(), tenantID, docID)
|
||||
if err != nil {
|
||||
if err.Error() == "document not found" || err.Error() == "document has no file" {
|
||||
writeError(w, http.StatusNotFound, err.Error())
|
||||
return
|
||||
}
|
||||
writeError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
defer body.Close()
|
||||
|
||||
w.Header().Set("Content-Type", contentType)
|
||||
w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, title))
|
||||
io.Copy(w, body)
|
||||
}
|
||||
|
||||
func (h *DocumentHandler) GetMeta(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusForbidden, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
docID, err := uuid.Parse(r.PathValue("docId"))
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid document ID")
|
||||
return
|
||||
}
|
||||
|
||||
doc, err := h.svc.GetByID(r.Context(), tenantID, docID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
if doc == nil {
|
||||
writeError(w, http.StatusNotFound, "document not found")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, doc)
|
||||
}
|
||||
|
||||
func (h *DocumentHandler) Delete(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusForbidden, "missing tenant")
|
||||
return
|
||||
}
|
||||
userID, _ := auth.UserFromContext(r.Context())
|
||||
|
||||
docID, err := uuid.Parse(r.PathValue("docId"))
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid document ID")
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.svc.Delete(r.Context(), tenantID, docID, userID); err != nil {
|
||||
writeError(w, http.StatusNotFound, "document not found")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, map[string]string{"status": "deleted"})
|
||||
}
|
||||
@@ -3,14 +3,88 @@ package handlers
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
)
|
||||
|
||||
func writeJSON(w http.ResponseWriter, status int, v interface{}) {
|
||||
func writeJSON(w http.ResponseWriter, status int, v any) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(status)
|
||||
json.NewEncoder(w).Encode(v)
|
||||
}
|
||||
|
||||
func writeError(w http.ResponseWriter, status int, message string) {
|
||||
writeJSON(w, status, map[string]string{"error": message})
|
||||
func writeError(w http.ResponseWriter, status int, msg string) {
|
||||
writeJSON(w, status, map[string]string{"error": msg})
|
||||
}
|
||||
|
||||
// resolveTenant gets the tenant ID for the authenticated user.
|
||||
// Checks X-Tenant-ID header first, then falls back to user's first tenant.
|
||||
func resolveTenant(r *http.Request, db *sqlx.DB) (uuid.UUID, error) {
|
||||
userID, ok := auth.UserFromContext(r.Context())
|
||||
if !ok {
|
||||
return uuid.Nil, errUnauthorized
|
||||
}
|
||||
|
||||
// Check header first
|
||||
if headerVal := r.Header.Get("X-Tenant-ID"); headerVal != "" {
|
||||
tenantID, err := uuid.Parse(headerVal)
|
||||
if err != nil {
|
||||
return uuid.Nil, errInvalidTenant
|
||||
}
|
||||
// Verify user has access to this tenant
|
||||
var count int
|
||||
err = db.Get(&count,
|
||||
`SELECT COUNT(*) FROM user_tenants WHERE user_id = $1 AND tenant_id = $2`,
|
||||
userID, tenantID)
|
||||
if err != nil || count == 0 {
|
||||
return uuid.Nil, errTenantAccess
|
||||
}
|
||||
return tenantID, nil
|
||||
}
|
||||
|
||||
// Fall back to user's first tenant
|
||||
var tenantID uuid.UUID
|
||||
err := db.Get(&tenantID,
|
||||
`SELECT tenant_id FROM user_tenants WHERE user_id = $1 ORDER BY created_at LIMIT 1`,
|
||||
userID)
|
||||
if err != nil {
|
||||
return uuid.Nil, errNoTenant
|
||||
}
|
||||
return tenantID, nil
|
||||
}
|
||||
|
||||
type apiError struct {
|
||||
msg string
|
||||
status int
|
||||
}
|
||||
|
||||
func (e *apiError) Error() string { return e.msg }
|
||||
|
||||
var (
|
||||
errUnauthorized = &apiError{msg: "unauthorized", status: http.StatusUnauthorized}
|
||||
errInvalidTenant = &apiError{msg: "invalid tenant ID", status: http.StatusBadRequest}
|
||||
errTenantAccess = &apiError{msg: "no access to tenant", status: http.StatusForbidden}
|
||||
errNoTenant = &apiError{msg: "no tenant found for user", status: http.StatusBadRequest}
|
||||
)
|
||||
|
||||
// handleTenantError writes the appropriate error response for tenant resolution errors
|
||||
func handleTenantError(w http.ResponseWriter, err error) {
|
||||
if ae, ok := err.(*apiError); ok {
|
||||
writeError(w, ae.status, ae.msg)
|
||||
return
|
||||
}
|
||||
writeError(w, http.StatusInternalServerError, "internal error")
|
||||
}
|
||||
|
||||
// parsePathUUID extracts a UUID from the URL path using PathValue
|
||||
func parsePathUUID(r *http.Request, key string) (uuid.UUID, error) {
|
||||
return uuid.Parse(r.PathValue(key))
|
||||
}
|
||||
|
||||
// parseUUID parses a UUID string
|
||||
func parseUUID(s string) (uuid.UUID, error) {
|
||||
return uuid.Parse(s)
|
||||
}
|
||||
|
||||
159
backend/internal/handlers/notes.go
Normal file
159
backend/internal/handlers/notes.go
Normal file
@@ -0,0 +1,159 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
type NoteHandler struct {
|
||||
svc *services.NoteService
|
||||
}
|
||||
|
||||
func NewNoteHandler(svc *services.NoteService) *NoteHandler {
|
||||
return &NoteHandler{svc: svc}
|
||||
}
|
||||
|
||||
// List handles GET /api/notes?{parent_type}_id={id}
|
||||
func (h *NoteHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
parentType, parentID, err := parseNoteParent(r)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
notes, err := h.svc.ListByParent(r.Context(), tenantID, parentType, parentID)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to list notes")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, notes)
|
||||
}
|
||||
|
||||
// Create handles POST /api/notes
|
||||
func (h *NoteHandler) Create(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "missing tenant")
|
||||
return
|
||||
}
|
||||
userID, _ := auth.UserFromContext(r.Context())
|
||||
|
||||
var input services.CreateNoteInput
|
||||
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
if input.Content == "" {
|
||||
writeError(w, http.StatusBadRequest, "content is required")
|
||||
return
|
||||
}
|
||||
|
||||
var createdBy *uuid.UUID
|
||||
if userID != uuid.Nil {
|
||||
createdBy = &userID
|
||||
}
|
||||
|
||||
note, err := h.svc.Create(r.Context(), tenantID, createdBy, input)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to create note")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusCreated, note)
|
||||
}
|
||||
|
||||
// Update handles PUT /api/notes/{id}
|
||||
func (h *NoteHandler) Update(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
noteID, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid note ID")
|
||||
return
|
||||
}
|
||||
|
||||
var req struct {
|
||||
Content string `json:"content"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
if req.Content == "" {
|
||||
writeError(w, http.StatusBadRequest, "content is required")
|
||||
return
|
||||
}
|
||||
|
||||
note, err := h.svc.Update(r.Context(), tenantID, noteID, req.Content)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to update note")
|
||||
return
|
||||
}
|
||||
if note == nil {
|
||||
writeError(w, http.StatusNotFound, "note not found")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, note)
|
||||
}
|
||||
|
||||
// Delete handles DELETE /api/notes/{id}
|
||||
func (h *NoteHandler) Delete(w http.ResponseWriter, r *http.Request) {
|
||||
tenantID, ok := auth.TenantFromContext(r.Context())
|
||||
if !ok {
|
||||
writeError(w, http.StatusUnauthorized, "missing tenant")
|
||||
return
|
||||
}
|
||||
|
||||
noteID, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid note ID")
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.svc.Delete(r.Context(), tenantID, noteID); err != nil {
|
||||
writeError(w, http.StatusNotFound, "note not found")
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
}
|
||||
|
||||
// parseNoteParent extracts the parent type and ID from query parameters.
|
||||
func parseNoteParent(r *http.Request) (string, uuid.UUID, error) {
|
||||
params := map[string]string{
|
||||
"case_id": "case",
|
||||
"deadline_id": "deadline",
|
||||
"appointment_id": "appointment",
|
||||
"case_event_id": "case_event",
|
||||
}
|
||||
|
||||
for param, parentType := range params {
|
||||
if v := r.URL.Query().Get(param); v != "" {
|
||||
id, err := uuid.Parse(v)
|
||||
if err != nil {
|
||||
return "", uuid.Nil, fmt.Errorf("invalid %s", param)
|
||||
}
|
||||
return parentType, id, nil
|
||||
}
|
||||
}
|
||||
|
||||
return "", uuid.Nil, fmt.Errorf("one of case_id, deadline_id, appointment_id, or case_event_id is required")
|
||||
}
|
||||
283
backend/internal/handlers/tenant_handler.go
Normal file
283
backend/internal/handlers/tenant_handler.go
Normal file
@@ -0,0 +1,283 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
type TenantHandler struct {
|
||||
svc *services.TenantService
|
||||
}
|
||||
|
||||
func NewTenantHandler(svc *services.TenantService) *TenantHandler {
|
||||
return &TenantHandler{svc: svc}
|
||||
}
|
||||
|
||||
// CreateTenant handles POST /api/tenants
|
||||
func (h *TenantHandler) CreateTenant(w http.ResponseWriter, r *http.Request) {
|
||||
userID, ok := auth.UserFromContext(r.Context())
|
||||
if !ok {
|
||||
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
var req struct {
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, "invalid request body", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if req.Name == "" || req.Slug == "" {
|
||||
jsonError(w, "name and slug are required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
tenant, err := h.svc.Create(r.Context(), userID, req.Name, req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
jsonResponse(w, tenant, http.StatusCreated)
|
||||
}
|
||||
|
||||
// ListTenants handles GET /api/tenants
|
||||
func (h *TenantHandler) ListTenants(w http.ResponseWriter, r *http.Request) {
|
||||
userID, ok := auth.UserFromContext(r.Context())
|
||||
if !ok {
|
||||
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
tenants, err := h.svc.ListForUser(r.Context(), userID)
|
||||
if err != nil {
|
||||
jsonError(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
jsonResponse(w, tenants, http.StatusOK)
|
||||
}
|
||||
|
||||
// GetTenant handles GET /api/tenants/{id}
|
||||
func (h *TenantHandler) GetTenant(w http.ResponseWriter, r *http.Request) {
|
||||
userID, ok := auth.UserFromContext(r.Context())
|
||||
if !ok {
|
||||
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
tenantID, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
jsonError(w, "invalid tenant ID", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Verify user has access to this tenant
|
||||
role, err := h.svc.GetUserRole(r.Context(), userID, tenantID)
|
||||
if err != nil {
|
||||
jsonError(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if role == "" {
|
||||
jsonError(w, "not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
tenant, err := h.svc.GetByID(r.Context(), tenantID)
|
||||
if err != nil {
|
||||
jsonError(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if tenant == nil {
|
||||
jsonError(w, "not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
jsonResponse(w, tenant, http.StatusOK)
|
||||
}
|
||||
|
||||
// InviteUser handles POST /api/tenants/{id}/invite
|
||||
func (h *TenantHandler) InviteUser(w http.ResponseWriter, r *http.Request) {
|
||||
userID, ok := auth.UserFromContext(r.Context())
|
||||
if !ok {
|
||||
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
tenantID, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
jsonError(w, "invalid tenant ID", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Only owners and admins can invite
|
||||
role, err := h.svc.GetUserRole(r.Context(), userID, tenantID)
|
||||
if err != nil {
|
||||
jsonError(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if role != "owner" && role != "admin" {
|
||||
jsonError(w, "only owners and admins can invite users", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
var req struct {
|
||||
Email string `json:"email"`
|
||||
Role string `json:"role"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, "invalid request body", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if req.Email == "" {
|
||||
jsonError(w, "email is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if req.Role == "" {
|
||||
req.Role = "member"
|
||||
}
|
||||
if req.Role != "member" && req.Role != "admin" {
|
||||
jsonError(w, "role must be member or admin", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
ut, err := h.svc.InviteByEmail(r.Context(), tenantID, req.Email, req.Role)
|
||||
if err != nil {
|
||||
jsonError(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
jsonResponse(w, ut, http.StatusCreated)
|
||||
}
|
||||
|
||||
// RemoveMember handles DELETE /api/tenants/{id}/members/{uid}
|
||||
func (h *TenantHandler) RemoveMember(w http.ResponseWriter, r *http.Request) {
|
||||
userID, ok := auth.UserFromContext(r.Context())
|
||||
if !ok {
|
||||
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
tenantID, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
jsonError(w, "invalid tenant ID", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
memberID, err := uuid.Parse(r.PathValue("uid"))
|
||||
if err != nil {
|
||||
jsonError(w, "invalid member ID", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Only owners and admins can remove members (or user removing themselves)
|
||||
role, err := h.svc.GetUserRole(r.Context(), userID, tenantID)
|
||||
if err != nil {
|
||||
jsonError(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if role != "owner" && role != "admin" && userID != memberID {
|
||||
jsonError(w, "insufficient permissions", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.svc.RemoveMember(r.Context(), tenantID, memberID); err != nil {
|
||||
jsonError(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
jsonResponse(w, map[string]string{"status": "removed"}, http.StatusOK)
|
||||
}
|
||||
|
||||
// UpdateSettings handles PUT /api/tenants/{id}/settings
|
||||
func (h *TenantHandler) UpdateSettings(w http.ResponseWriter, r *http.Request) {
|
||||
userID, ok := auth.UserFromContext(r.Context())
|
||||
if !ok {
|
||||
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
tenantID, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
jsonError(w, "invalid tenant ID", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Only owners and admins can update settings
|
||||
role, err := h.svc.GetUserRole(r.Context(), userID, tenantID)
|
||||
if err != nil {
|
||||
jsonError(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if role != "owner" && role != "admin" {
|
||||
jsonError(w, "only owners and admins can update settings", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
var settings json.RawMessage
|
||||
if err := json.NewDecoder(r.Body).Decode(&settings); err != nil {
|
||||
jsonError(w, "invalid request body", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
tenant, err := h.svc.UpdateSettings(r.Context(), tenantID, settings)
|
||||
if err != nil {
|
||||
jsonError(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
jsonResponse(w, tenant, http.StatusOK)
|
||||
}
|
||||
|
||||
// ListMembers handles GET /api/tenants/{id}/members
|
||||
func (h *TenantHandler) ListMembers(w http.ResponseWriter, r *http.Request) {
|
||||
userID, ok := auth.UserFromContext(r.Context())
|
||||
if !ok {
|
||||
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
tenantID, err := uuid.Parse(r.PathValue("id"))
|
||||
if err != nil {
|
||||
jsonError(w, "invalid tenant ID", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Verify user has access
|
||||
role, err := h.svc.GetUserRole(r.Context(), userID, tenantID)
|
||||
if err != nil {
|
||||
jsonError(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if role == "" {
|
||||
jsonError(w, "not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
members, err := h.svc.ListMembers(r.Context(), tenantID)
|
||||
if err != nil {
|
||||
jsonError(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
jsonResponse(w, members, http.StatusOK)
|
||||
}
|
||||
|
||||
func jsonResponse(w http.ResponseWriter, data interface{}, status int) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(status)
|
||||
json.NewEncoder(w).Encode(data)
|
||||
}
|
||||
|
||||
func jsonError(w http.ResponseWriter, msg string, status int) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(status)
|
||||
json.NewEncoder(w).Encode(map[string]string{"error": msg})
|
||||
}
|
||||
132
backend/internal/handlers/tenant_handler_test.go
Normal file
132
backend/internal/handlers/tenant_handler_test.go
Normal file
@@ -0,0 +1,132 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
)
|
||||
|
||||
func TestCreateTenant_MissingFields(t *testing.T) {
|
||||
h := &TenantHandler{} // no service needed for validation
|
||||
|
||||
// Build request with auth context
|
||||
body := `{"name":"","slug":""}`
|
||||
r := httptest.NewRequest("POST", "/api/tenants", bytes.NewBufferString(body))
|
||||
r = r.WithContext(auth.ContextWithUserID(r.Context(), uuid.New()))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.CreateTenant(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
|
||||
var resp map[string]string
|
||||
json.NewDecoder(w.Body).Decode(&resp)
|
||||
if resp["error"] != "name and slug are required" {
|
||||
t.Errorf("unexpected error: %s", resp["error"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateTenant_NoAuth(t *testing.T) {
|
||||
h := &TenantHandler{}
|
||||
r := httptest.NewRequest("POST", "/api/tenants", bytes.NewBufferString(`{}`))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.CreateTenant(w, r)
|
||||
|
||||
if w.Code != http.StatusUnauthorized {
|
||||
t.Errorf("expected 401, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetTenant_InvalidID(t *testing.T) {
|
||||
h := &TenantHandler{}
|
||||
r := httptest.NewRequest("GET", "/api/tenants/not-a-uuid", nil)
|
||||
r.SetPathValue("id", "not-a-uuid")
|
||||
r = r.WithContext(auth.ContextWithUserID(r.Context(), uuid.New()))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.GetTenant(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInviteUser_InvalidTenantID(t *testing.T) {
|
||||
h := &TenantHandler{}
|
||||
body := `{"email":"test@example.com","role":"member"}`
|
||||
r := httptest.NewRequest("POST", "/api/tenants/bad/invite", bytes.NewBufferString(body))
|
||||
r.SetPathValue("id", "bad")
|
||||
r = r.WithContext(auth.ContextWithUserID(r.Context(), uuid.New()))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.InviteUser(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInviteUser_NoAuth(t *testing.T) {
|
||||
h := &TenantHandler{}
|
||||
body := `{"email":"test@example.com"}`
|
||||
r := httptest.NewRequest("POST", "/api/tenants/"+uuid.New().String()+"/invite", bytes.NewBufferString(body))
|
||||
r.SetPathValue("id", uuid.New().String())
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.InviteUser(w, r)
|
||||
|
||||
if w.Code != http.StatusUnauthorized {
|
||||
t.Errorf("expected 401, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemoveMember_InvalidIDs(t *testing.T) {
|
||||
h := &TenantHandler{}
|
||||
r := httptest.NewRequest("DELETE", "/api/tenants/bad/members/bad", nil)
|
||||
r.SetPathValue("id", "bad")
|
||||
r.SetPathValue("uid", "bad")
|
||||
r = r.WithContext(auth.ContextWithUserID(r.Context(), uuid.New()))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
h.RemoveMember(w, r)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsonResponse(t *testing.T) {
|
||||
w := httptest.NewRecorder()
|
||||
jsonResponse(w, map[string]string{"key": "value"}, http.StatusOK)
|
||||
|
||||
if w.Code != http.StatusOK {
|
||||
t.Errorf("expected 200, got %d", w.Code)
|
||||
}
|
||||
if ct := w.Header().Get("Content-Type"); ct != "application/json" {
|
||||
t.Errorf("expected application/json, got %s", ct)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJsonError(t *testing.T) {
|
||||
w := httptest.NewRecorder()
|
||||
jsonError(w, "something went wrong", http.StatusBadRequest)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", w.Code)
|
||||
}
|
||||
|
||||
var resp map[string]string
|
||||
json.NewDecoder(w.Body).Decode(&resp)
|
||||
if resp["error"] != "something went wrong" {
|
||||
t.Errorf("unexpected error: %s", resp["error"])
|
||||
}
|
||||
}
|
||||
1148
backend/internal/integration_test.go
Normal file
1148
backend/internal/integration_test.go
Normal file
File diff suppressed because it is too large
Load Diff
14
backend/internal/logging/logging.go
Normal file
14
backend/internal/logging/logging.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package logging
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"os"
|
||||
)
|
||||
|
||||
// Setup initializes the global slog logger with JSON output for production.
|
||||
func Setup() {
|
||||
handler := slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
|
||||
Level: slog.LevelInfo,
|
||||
})
|
||||
slog.SetDefault(slog.New(handler))
|
||||
}
|
||||
98
backend/internal/middleware/ratelimit.go
Normal file
98
backend/internal/middleware/ratelimit.go
Normal file
@@ -0,0 +1,98 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// TokenBucket implements a simple per-IP token bucket rate limiter.
|
||||
type TokenBucket struct {
|
||||
mu sync.Mutex
|
||||
buckets map[string]*bucket
|
||||
rate float64 // tokens per second
|
||||
burst int // max tokens
|
||||
}
|
||||
|
||||
type bucket struct {
|
||||
tokens float64
|
||||
lastTime time.Time
|
||||
}
|
||||
|
||||
// NewTokenBucket creates a rate limiter allowing rate requests per second with burst capacity.
|
||||
func NewTokenBucket(rate float64, burst int) *TokenBucket {
|
||||
tb := &TokenBucket{
|
||||
buckets: make(map[string]*bucket),
|
||||
rate: rate,
|
||||
burst: burst,
|
||||
}
|
||||
// Periodically clean up stale buckets
|
||||
go tb.cleanup()
|
||||
return tb
|
||||
}
|
||||
|
||||
func (tb *TokenBucket) allow(key string) bool {
|
||||
tb.mu.Lock()
|
||||
defer tb.mu.Unlock()
|
||||
|
||||
b, ok := tb.buckets[key]
|
||||
if !ok {
|
||||
b = &bucket{tokens: float64(tb.burst), lastTime: time.Now()}
|
||||
tb.buckets[key] = b
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
elapsed := now.Sub(b.lastTime).Seconds()
|
||||
b.tokens += elapsed * tb.rate
|
||||
if b.tokens > float64(tb.burst) {
|
||||
b.tokens = float64(tb.burst)
|
||||
}
|
||||
b.lastTime = now
|
||||
|
||||
if b.tokens < 1 {
|
||||
return false
|
||||
}
|
||||
b.tokens--
|
||||
return true
|
||||
}
|
||||
|
||||
func (tb *TokenBucket) cleanup() {
|
||||
ticker := time.NewTicker(5 * time.Minute)
|
||||
defer ticker.Stop()
|
||||
for range ticker.C {
|
||||
tb.mu.Lock()
|
||||
cutoff := time.Now().Add(-10 * time.Minute)
|
||||
for key, b := range tb.buckets {
|
||||
if b.lastTime.Before(cutoff) {
|
||||
delete(tb.buckets, key)
|
||||
}
|
||||
}
|
||||
tb.mu.Unlock()
|
||||
}
|
||||
}
|
||||
|
||||
// Limit wraps an http.Handler with rate limiting.
|
||||
func (tb *TokenBucket) Limit(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
ip := r.Header.Get("X-Forwarded-For")
|
||||
if ip == "" {
|
||||
ip = r.RemoteAddr
|
||||
}
|
||||
if !tb.allow(ip) {
|
||||
slog.Warn("rate limit exceeded", "ip", ip, "path", r.URL.Path)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Header().Set("Retry-After", "10")
|
||||
w.WriteHeader(http.StatusTooManyRequests)
|
||||
w.Write([]byte(`{"error":"rate limit exceeded, try again later"}`))
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
// LimitFunc wraps an http.HandlerFunc with rate limiting.
|
||||
func (tb *TokenBucket) LimitFunc(next http.HandlerFunc) http.HandlerFunc {
|
||||
limited := tb.Limit(http.HandlerFunc(next))
|
||||
return limited.ServeHTTP
|
||||
}
|
||||
70
backend/internal/middleware/ratelimit_test.go
Normal file
70
backend/internal/middleware/ratelimit_test.go
Normal file
@@ -0,0 +1,70 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTokenBucket_AllowsBurst(t *testing.T) {
|
||||
tb := NewTokenBucket(1.0, 5) // 1/sec, burst 5
|
||||
|
||||
handler := tb.LimitFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
})
|
||||
|
||||
// Should allow burst of 5 requests
|
||||
for i := 0; i < 5; i++ {
|
||||
req := httptest.NewRequest("GET", "/test", nil)
|
||||
w := httptest.NewRecorder()
|
||||
handler.ServeHTTP(w, req)
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("request %d: expected 200, got %d", i+1, w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
// 6th request should be rate limited
|
||||
req := httptest.NewRequest("GET", "/test", nil)
|
||||
w := httptest.NewRecorder()
|
||||
handler.ServeHTTP(w, req)
|
||||
if w.Code != http.StatusTooManyRequests {
|
||||
t.Fatalf("request 6: expected 429, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTokenBucket_DifferentIPs(t *testing.T) {
|
||||
tb := NewTokenBucket(1.0, 2) // 1/sec, burst 2
|
||||
|
||||
handler := tb.LimitFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
})
|
||||
|
||||
// Exhaust IP1's bucket
|
||||
for i := 0; i < 2; i++ {
|
||||
req := httptest.NewRequest("GET", "/test", nil)
|
||||
req.Header.Set("X-Forwarded-For", "1.2.3.4")
|
||||
w := httptest.NewRecorder()
|
||||
handler.ServeHTTP(w, req)
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("ip1 request %d: expected 200, got %d", i+1, w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
// IP1 should now be limited
|
||||
req := httptest.NewRequest("GET", "/test", nil)
|
||||
req.Header.Set("X-Forwarded-For", "1.2.3.4")
|
||||
w := httptest.NewRecorder()
|
||||
handler.ServeHTTP(w, req)
|
||||
if w.Code != http.StatusTooManyRequests {
|
||||
t.Fatalf("ip1 request 3: expected 429, got %d", w.Code)
|
||||
}
|
||||
|
||||
// IP2 should still work
|
||||
req = httptest.NewRequest("GET", "/test", nil)
|
||||
req.Header.Set("X-Forwarded-For", "5.6.7.8")
|
||||
w = httptest.NewRecorder()
|
||||
handler.ServeHTTP(w, req)
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("ip2 request 1: expected 200, got %d", w.Code)
|
||||
}
|
||||
}
|
||||
22
backend/internal/models/audit_log.go
Normal file
22
backend/internal/models/audit_log.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type AuditLog struct {
|
||||
ID int64 `db:"id" json:"id"`
|
||||
TenantID uuid.UUID `db:"tenant_id" json:"tenant_id"`
|
||||
UserID *uuid.UUID `db:"user_id" json:"user_id,omitempty"`
|
||||
Action string `db:"action" json:"action"`
|
||||
EntityType string `db:"entity_type" json:"entity_type"`
|
||||
EntityID *uuid.UUID `db:"entity_id" json:"entity_id,omitempty"`
|
||||
OldValues *json.RawMessage `db:"old_values" json:"old_values,omitempty"`
|
||||
NewValues *json.RawMessage `db:"new_values" json:"new_values,omitempty"`
|
||||
IPAddress *string `db:"ip_address" json:"ip_address,omitempty"`
|
||||
UserAgent *string `db:"user_agent" json:"user_agent,omitempty"`
|
||||
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
||||
}
|
||||
20
backend/internal/models/note.go
Normal file
20
backend/internal/models/note.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type Note struct {
|
||||
ID uuid.UUID `db:"id" json:"id"`
|
||||
TenantID uuid.UUID `db:"tenant_id" json:"tenant_id"`
|
||||
CaseID *uuid.UUID `db:"case_id" json:"case_id,omitempty"`
|
||||
DeadlineID *uuid.UUID `db:"deadline_id" json:"deadline_id,omitempty"`
|
||||
AppointmentID *uuid.UUID `db:"appointment_id" json:"appointment_id,omitempty"`
|
||||
CaseEventID *uuid.UUID `db:"case_event_id" json:"case_event_id,omitempty"`
|
||||
Content string `db:"content" json:"content"`
|
||||
CreatedBy *uuid.UUID `db:"created_by" json:"created_by,omitempty"`
|
||||
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
||||
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
@@ -22,3 +22,9 @@ type UserTenant struct {
|
||||
Role string `db:"role" json:"role"`
|
||||
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
||||
}
|
||||
|
||||
// TenantWithRole is a Tenant joined with the user's role in that tenant.
|
||||
type TenantWithRole struct {
|
||||
Tenant
|
||||
Role string `db:"role" json:"role"`
|
||||
}
|
||||
|
||||
@@ -2,25 +2,61 @@ package router
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/handlers"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/config"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/handlers"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/middleware"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/services"
|
||||
)
|
||||
|
||||
func New(db *sqlx.DB, authMW *auth.Middleware) http.Handler {
|
||||
func New(db *sqlx.DB, authMW *auth.Middleware, cfg *config.Config, calDAVSvc *services.CalDAVService) http.Handler {
|
||||
mux := http.NewServeMux()
|
||||
|
||||
// Services
|
||||
caseSvc := services.NewCaseService(db)
|
||||
partySvc := services.NewPartyService(db)
|
||||
auditSvc := services.NewAuditService(db)
|
||||
tenantSvc := services.NewTenantService(db, auditSvc)
|
||||
caseSvc := services.NewCaseService(db, auditSvc)
|
||||
partySvc := services.NewPartyService(db, auditSvc)
|
||||
appointmentSvc := services.NewAppointmentService(db, auditSvc)
|
||||
holidaySvc := services.NewHolidayService(db)
|
||||
deadlineSvc := services.NewDeadlineService(db, auditSvc)
|
||||
deadlineRuleSvc := services.NewDeadlineRuleService(db)
|
||||
calculator := services.NewDeadlineCalculator(holidaySvc)
|
||||
storageCli := services.NewStorageClient(cfg.SupabaseURL, cfg.SupabaseServiceKey)
|
||||
documentSvc := services.NewDocumentService(db, storageCli, auditSvc)
|
||||
|
||||
// AI service (optional — only if API key is configured)
|
||||
var aiH *handlers.AIHandler
|
||||
if cfg.AnthropicAPIKey != "" {
|
||||
aiSvc := services.NewAIService(cfg.AnthropicAPIKey, db)
|
||||
aiH = handlers.NewAIHandler(aiSvc, db)
|
||||
}
|
||||
|
||||
// Middleware
|
||||
tenantResolver := auth.NewTenantResolver(tenantSvc)
|
||||
|
||||
noteSvc := services.NewNoteService(db, auditSvc)
|
||||
dashboardSvc := services.NewDashboardService(db)
|
||||
|
||||
// Handlers
|
||||
auditH := handlers.NewAuditLogHandler(auditSvc)
|
||||
tenantH := handlers.NewTenantHandler(tenantSvc)
|
||||
caseH := handlers.NewCaseHandler(caseSvc)
|
||||
partyH := handlers.NewPartyHandler(partySvc)
|
||||
apptH := handlers.NewAppointmentHandler(appointmentSvc)
|
||||
deadlineH := handlers.NewDeadlineHandlers(deadlineSvc, db)
|
||||
ruleH := handlers.NewDeadlineRuleHandlers(deadlineRuleSvc)
|
||||
calcH := handlers.NewCalculateHandlers(calculator, deadlineRuleSvc)
|
||||
dashboardH := handlers.NewDashboardHandler(dashboardSvc)
|
||||
noteH := handlers.NewNoteHandler(noteSvc)
|
||||
eventH := handlers.NewCaseEventHandler(db)
|
||||
docH := handlers.NewDocumentHandler(documentSvc)
|
||||
|
||||
// Public routes
|
||||
mux.HandleFunc("GET /health", handleHealth(db))
|
||||
@@ -28,27 +64,97 @@ func New(db *sqlx.DB, authMW *auth.Middleware) http.Handler {
|
||||
// Authenticated API routes
|
||||
api := http.NewServeMux()
|
||||
|
||||
// Tenant management (no tenant resolver — these operate across tenants)
|
||||
api.HandleFunc("POST /api/tenants", tenantH.CreateTenant)
|
||||
api.HandleFunc("GET /api/tenants", tenantH.ListTenants)
|
||||
api.HandleFunc("GET /api/tenants/{id}", tenantH.GetTenant)
|
||||
api.HandleFunc("PUT /api/tenants/{id}/settings", tenantH.UpdateSettings)
|
||||
api.HandleFunc("POST /api/tenants/{id}/invite", tenantH.InviteUser)
|
||||
api.HandleFunc("DELETE /api/tenants/{id}/members/{uid}", tenantH.RemoveMember)
|
||||
api.HandleFunc("GET /api/tenants/{id}/members", tenantH.ListMembers)
|
||||
|
||||
// Tenant-scoped routes (require tenant context)
|
||||
scoped := http.NewServeMux()
|
||||
|
||||
// Cases
|
||||
api.HandleFunc("GET /api/cases", caseH.List)
|
||||
api.HandleFunc("POST /api/cases", caseH.Create)
|
||||
api.HandleFunc("GET /api/cases/{id}", caseH.Get)
|
||||
api.HandleFunc("PUT /api/cases/{id}", caseH.Update)
|
||||
api.HandleFunc("DELETE /api/cases/{id}", caseH.Delete)
|
||||
scoped.HandleFunc("GET /api/cases", caseH.List)
|
||||
scoped.HandleFunc("POST /api/cases", caseH.Create)
|
||||
scoped.HandleFunc("GET /api/cases/{id}", caseH.Get)
|
||||
scoped.HandleFunc("PUT /api/cases/{id}", caseH.Update)
|
||||
scoped.HandleFunc("DELETE /api/cases/{id}", caseH.Delete)
|
||||
|
||||
// Parties (nested under cases for creation/listing, top-level for update/delete)
|
||||
api.HandleFunc("GET /api/cases/{id}/parties", partyH.List)
|
||||
api.HandleFunc("POST /api/cases/{id}/parties", partyH.Create)
|
||||
api.HandleFunc("PUT /api/parties/{partyId}", partyH.Update)
|
||||
api.HandleFunc("DELETE /api/parties/{partyId}", partyH.Delete)
|
||||
// Parties
|
||||
scoped.HandleFunc("GET /api/cases/{id}/parties", partyH.List)
|
||||
scoped.HandleFunc("POST /api/cases/{id}/parties", partyH.Create)
|
||||
scoped.HandleFunc("PUT /api/parties/{partyId}", partyH.Update)
|
||||
scoped.HandleFunc("DELETE /api/parties/{partyId}", partyH.Delete)
|
||||
|
||||
// Placeholder routes for future phases
|
||||
api.HandleFunc("GET /api/deadlines", placeholder("deadlines"))
|
||||
api.HandleFunc("GET /api/appointments", placeholder("appointments"))
|
||||
api.HandleFunc("GET /api/documents", placeholder("documents"))
|
||||
// Deadlines
|
||||
scoped.HandleFunc("GET /api/deadlines/{deadlineID}", deadlineH.Get)
|
||||
scoped.HandleFunc("GET /api/deadlines", deadlineH.ListAll)
|
||||
scoped.HandleFunc("GET /api/cases/{caseID}/deadlines", deadlineH.ListForCase)
|
||||
scoped.HandleFunc("POST /api/cases/{caseID}/deadlines", deadlineH.Create)
|
||||
scoped.HandleFunc("PUT /api/deadlines/{deadlineID}", deadlineH.Update)
|
||||
scoped.HandleFunc("PATCH /api/deadlines/{deadlineID}/complete", deadlineH.Complete)
|
||||
scoped.HandleFunc("DELETE /api/deadlines/{deadlineID}", deadlineH.Delete)
|
||||
|
||||
// Deadline rules (reference data)
|
||||
scoped.HandleFunc("GET /api/deadline-rules", ruleH.List)
|
||||
scoped.HandleFunc("GET /api/deadline-rules/{type}", ruleH.GetRuleTree)
|
||||
scoped.HandleFunc("GET /api/proceeding-types", ruleH.ListProceedingTypes)
|
||||
|
||||
// Deadline calculator
|
||||
scoped.HandleFunc("POST /api/deadlines/calculate", calcH.Calculate)
|
||||
|
||||
// Appointments
|
||||
scoped.HandleFunc("GET /api/appointments/{id}", apptH.Get)
|
||||
scoped.HandleFunc("GET /api/appointments", apptH.List)
|
||||
scoped.HandleFunc("POST /api/appointments", apptH.Create)
|
||||
scoped.HandleFunc("PUT /api/appointments/{id}", apptH.Update)
|
||||
scoped.HandleFunc("DELETE /api/appointments/{id}", apptH.Delete)
|
||||
|
||||
// Case events
|
||||
scoped.HandleFunc("GET /api/case-events/{id}", eventH.Get)
|
||||
|
||||
// Notes
|
||||
scoped.HandleFunc("GET /api/notes", noteH.List)
|
||||
scoped.HandleFunc("POST /api/notes", noteH.Create)
|
||||
scoped.HandleFunc("PUT /api/notes/{id}", noteH.Update)
|
||||
scoped.HandleFunc("DELETE /api/notes/{id}", noteH.Delete)
|
||||
|
||||
// Dashboard
|
||||
scoped.HandleFunc("GET /api/dashboard", dashboardH.Get)
|
||||
|
||||
// Audit log
|
||||
scoped.HandleFunc("GET /api/audit-log", auditH.List)
|
||||
|
||||
// Documents
|
||||
scoped.HandleFunc("GET /api/cases/{id}/documents", docH.ListByCase)
|
||||
scoped.HandleFunc("POST /api/cases/{id}/documents", docH.Upload)
|
||||
scoped.HandleFunc("GET /api/documents/{docId}", docH.Download)
|
||||
scoped.HandleFunc("GET /api/documents/{docId}/meta", docH.GetMeta)
|
||||
scoped.HandleFunc("DELETE /api/documents/{docId}", docH.Delete)
|
||||
|
||||
// AI endpoints (rate limited: 5 req/min burst 10 per IP)
|
||||
if aiH != nil {
|
||||
aiLimiter := middleware.NewTokenBucket(5.0/60.0, 10)
|
||||
scoped.HandleFunc("POST /api/ai/extract-deadlines", aiLimiter.LimitFunc(aiH.ExtractDeadlines))
|
||||
scoped.HandleFunc("POST /api/ai/summarize-case", aiLimiter.LimitFunc(aiH.SummarizeCase))
|
||||
}
|
||||
|
||||
// CalDAV sync endpoints
|
||||
if calDAVSvc != nil {
|
||||
calDAVH := handlers.NewCalDAVHandler(calDAVSvc)
|
||||
scoped.HandleFunc("POST /api/caldav/sync", calDAVH.TriggerSync)
|
||||
scoped.HandleFunc("GET /api/caldav/status", calDAVH.GetStatus)
|
||||
}
|
||||
|
||||
// Wire: auth -> tenant routes go directly, scoped routes get tenant resolver
|
||||
api.Handle("/api/", tenantResolver.Resolve(scoped))
|
||||
|
||||
mux.Handle("/api/", authMW.RequireAuth(api))
|
||||
|
||||
return mux
|
||||
return requestLogger(mux)
|
||||
}
|
||||
|
||||
func handleHealth(db *sqlx.DB) http.HandlerFunc {
|
||||
@@ -63,12 +169,34 @@ func handleHealth(db *sqlx.DB) http.HandlerFunc {
|
||||
}
|
||||
}
|
||||
|
||||
func placeholder(resource string) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]string{
|
||||
"status": "not_implemented",
|
||||
"resource": resource,
|
||||
type statusWriter struct {
|
||||
http.ResponseWriter
|
||||
status int
|
||||
}
|
||||
|
||||
func (w *statusWriter) WriteHeader(code int) {
|
||||
w.status = code
|
||||
w.ResponseWriter.WriteHeader(code)
|
||||
}
|
||||
|
||||
func requestLogger(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Skip health checks to reduce noise
|
||||
if r.URL.Path == "/health" {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
sw := &statusWriter{ResponseWriter: w, status: http.StatusOK}
|
||||
start := time.Now()
|
||||
next.ServeHTTP(sw, r)
|
||||
|
||||
slog.Info("request",
|
||||
"method", r.Method,
|
||||
"path", r.URL.Path,
|
||||
"status", sw.status,
|
||||
"duration_ms", time.Since(start).Milliseconds(),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
283
backend/internal/services/ai_service.go
Normal file
283
backend/internal/services/ai_service.go
Normal file
@@ -0,0 +1,283 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/anthropics/anthropic-sdk-go"
|
||||
"github.com/anthropics/anthropic-sdk-go/option"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
)
|
||||
|
||||
type AIService struct {
|
||||
client anthropic.Client
|
||||
db *sqlx.DB
|
||||
}
|
||||
|
||||
func NewAIService(apiKey string, db *sqlx.DB) *AIService {
|
||||
client := anthropic.NewClient(option.WithAPIKey(apiKey))
|
||||
return &AIService{client: client, db: db}
|
||||
}
|
||||
|
||||
// ExtractedDeadline represents a deadline extracted by AI from a document.
|
||||
type ExtractedDeadline struct {
|
||||
Title string `json:"title"`
|
||||
DueDate *string `json:"due_date"`
|
||||
DurationValue int `json:"duration_value"`
|
||||
DurationUnit string `json:"duration_unit"`
|
||||
Timing string `json:"timing"`
|
||||
TriggerEvent string `json:"trigger_event"`
|
||||
RuleReference string `json:"rule_reference"`
|
||||
Confidence float64 `json:"confidence"`
|
||||
SourceQuote string `json:"source_quote"`
|
||||
}
|
||||
|
||||
type extractDeadlinesToolInput struct {
|
||||
Deadlines []ExtractedDeadline `json:"deadlines"`
|
||||
}
|
||||
|
||||
var deadlineExtractionTool = anthropic.ToolParam{
|
||||
Name: "extract_deadlines",
|
||||
Description: anthropic.String("Extract all legal deadlines found in the document. Return each deadline with its details."),
|
||||
InputSchema: anthropic.ToolInputSchemaParam{
|
||||
Properties: map[string]any{
|
||||
"deadlines": map[string]any{
|
||||
"type": "array",
|
||||
"description": "List of extracted deadlines",
|
||||
"items": map[string]any{
|
||||
"type": "object",
|
||||
"properties": map[string]any{
|
||||
"title": map[string]any{
|
||||
"type": "string",
|
||||
"description": "Short title describing the deadline (e.g. 'Statement of Defence', 'Reply to Counterclaim')",
|
||||
},
|
||||
"due_date": map[string]any{
|
||||
"type": []string{"string", "null"},
|
||||
"description": "Absolute due date in YYYY-MM-DD format if determinable, null otherwise",
|
||||
},
|
||||
"duration_value": map[string]any{
|
||||
"type": "integer",
|
||||
"description": "Numeric duration value (e.g. 3 for '3 months')",
|
||||
},
|
||||
"duration_unit": map[string]any{
|
||||
"type": "string",
|
||||
"enum": []string{"days", "weeks", "months"},
|
||||
"description": "Unit of the duration period",
|
||||
},
|
||||
"timing": map[string]any{
|
||||
"type": "string",
|
||||
"enum": []string{"after", "before"},
|
||||
"description": "Whether the deadline is before or after the trigger event",
|
||||
},
|
||||
"trigger_event": map[string]any{
|
||||
"type": "string",
|
||||
"description": "The event that triggers this deadline (e.g. 'service of the Statement of Claim')",
|
||||
},
|
||||
"rule_reference": map[string]any{
|
||||
"type": "string",
|
||||
"description": "Legal rule reference (e.g. 'Rule 23 RoP', 'Rule 222 RoP', '§ 276 ZPO')",
|
||||
},
|
||||
"confidence": map[string]any{
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 1,
|
||||
"description": "Confidence score from 0.0 to 1.0",
|
||||
},
|
||||
"source_quote": map[string]any{
|
||||
"type": "string",
|
||||
"description": "The exact quote from the document where this deadline was found",
|
||||
},
|
||||
},
|
||||
"required": []string{"title", "duration_value", "duration_unit", "timing", "trigger_event", "rule_reference", "confidence", "source_quote"},
|
||||
},
|
||||
},
|
||||
},
|
||||
Required: []string{"deadlines"},
|
||||
},
|
||||
}
|
||||
|
||||
const extractionSystemPrompt = `You are a legal deadline extraction assistant for German and UPC (Unified Patent Court) patent litigation.
|
||||
|
||||
Your task is to extract all legal deadlines, time limits, and procedural time periods from the provided document.
|
||||
|
||||
For each deadline found, extract:
|
||||
- A clear title describing the deadline
|
||||
- The absolute due date if it can be determined from the document
|
||||
- The duration (value + unit: days/weeks/months)
|
||||
- Whether it runs before or after a trigger event
|
||||
- The trigger event that starts the deadline
|
||||
- The legal rule reference (e.g. Rule 23 RoP, § 276 ZPO)
|
||||
- Your confidence level (0.0-1.0) in the extraction
|
||||
- The exact source quote from the document
|
||||
|
||||
Be thorough: extract every deadline mentioned, including conditional ones. If a deadline references another deadline (e.g. "within 2 months of the defence"), capture that relationship in the trigger_event field.
|
||||
|
||||
If the document contains no deadlines, return an empty list.`
|
||||
|
||||
// ExtractDeadlines sends a document (PDF or text) to Claude for deadline extraction.
|
||||
func (s *AIService) ExtractDeadlines(ctx context.Context, pdfData []byte, text string) ([]ExtractedDeadline, error) {
|
||||
var contentBlocks []anthropic.ContentBlockParamUnion
|
||||
|
||||
if len(pdfData) > 0 {
|
||||
encoded := base64.StdEncoding.EncodeToString(pdfData)
|
||||
contentBlocks = append(contentBlocks, anthropic.ContentBlockParamUnion{
|
||||
OfDocument: &anthropic.DocumentBlockParam{
|
||||
Source: anthropic.DocumentBlockParamSourceUnion{
|
||||
OfBase64: &anthropic.Base64PDFSourceParam{
|
||||
Data: encoded,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
contentBlocks = append(contentBlocks, anthropic.NewTextBlock("Extract all legal deadlines from this document."))
|
||||
} else if text != "" {
|
||||
contentBlocks = append(contentBlocks, anthropic.NewTextBlock("Extract all legal deadlines from the following text:\n\n"+text))
|
||||
} else {
|
||||
return nil, fmt.Errorf("either pdf_data or text must be provided")
|
||||
}
|
||||
|
||||
msg, err := s.client.Messages.New(ctx, anthropic.MessageNewParams{
|
||||
Model: anthropic.ModelClaudeSonnet4_5,
|
||||
MaxTokens: 4096,
|
||||
System: []anthropic.TextBlockParam{
|
||||
{Text: extractionSystemPrompt},
|
||||
},
|
||||
Messages: []anthropic.MessageParam{
|
||||
anthropic.NewUserMessage(contentBlocks...),
|
||||
},
|
||||
Tools: []anthropic.ToolUnionParam{
|
||||
{OfTool: &deadlineExtractionTool},
|
||||
},
|
||||
ToolChoice: anthropic.ToolChoiceParamOfTool("extract_deadlines"),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("claude API call: %w", err)
|
||||
}
|
||||
|
||||
// Find the tool_use block in the response
|
||||
for _, block := range msg.Content {
|
||||
if block.Type == "tool_use" && block.Name == "extract_deadlines" {
|
||||
var input extractDeadlinesToolInput
|
||||
if err := json.Unmarshal(block.Input, &input); err != nil {
|
||||
return nil, fmt.Errorf("parsing tool output: %w", err)
|
||||
}
|
||||
return input.Deadlines, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("no tool_use block in response")
|
||||
}
|
||||
|
||||
const summarizeSystemPrompt = `You are a legal case summary assistant for German and UPC patent litigation case management.
|
||||
|
||||
Given a case's details, recent events, and deadlines, produce a concise 2-3 sentence summary of what matters right now. Focus on:
|
||||
- The most urgent upcoming deadline
|
||||
- Recent significant events
|
||||
- The current procedural stage
|
||||
|
||||
Write in clear, professional language suitable for a lawyer reviewing their case list. Be specific about dates and deadlines.`
|
||||
|
||||
// SummarizeCase generates an AI summary for a case and caches it in the database.
|
||||
func (s *AIService) SummarizeCase(ctx context.Context, tenantID, caseID uuid.UUID) (string, error) {
|
||||
// Load case
|
||||
var c models.Case
|
||||
err := s.db.GetContext(ctx, &c,
|
||||
"SELECT * FROM cases WHERE id = $1 AND tenant_id = $2", caseID, tenantID)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("loading case: %w", err)
|
||||
}
|
||||
|
||||
// Load recent events
|
||||
var events []models.CaseEvent
|
||||
if err := s.db.SelectContext(ctx, &events,
|
||||
"SELECT * FROM case_events WHERE case_id = $1 AND tenant_id = $2 ORDER BY created_at DESC LIMIT 10",
|
||||
caseID, tenantID); err != nil {
|
||||
return "", fmt.Errorf("loading events: %w", err)
|
||||
}
|
||||
|
||||
// Load active deadlines
|
||||
var deadlines []models.Deadline
|
||||
if err := s.db.SelectContext(ctx, &deadlines,
|
||||
"SELECT * FROM deadlines WHERE case_id = $1 AND tenant_id = $2 AND status = 'active' ORDER BY due_date ASC LIMIT 10",
|
||||
caseID, tenantID); err != nil {
|
||||
return "", fmt.Errorf("loading deadlines: %w", err)
|
||||
}
|
||||
|
||||
// Build context text
|
||||
caseInfo := fmt.Sprintf("Case: %s — %s\nStatus: %s", c.CaseNumber, c.Title, c.Status)
|
||||
if c.Court != nil {
|
||||
caseInfo += fmt.Sprintf("\nCourt: %s", *c.Court)
|
||||
}
|
||||
if c.CourtRef != nil {
|
||||
caseInfo += fmt.Sprintf("\nCourt Reference: %s", *c.CourtRef)
|
||||
}
|
||||
if c.CaseType != nil {
|
||||
caseInfo += fmt.Sprintf("\nType: %s", *c.CaseType)
|
||||
}
|
||||
|
||||
eventText := "\n\nRecent Events:"
|
||||
if len(events) == 0 {
|
||||
eventText += "\nNo events recorded."
|
||||
}
|
||||
for _, e := range events {
|
||||
eventText += fmt.Sprintf("\n- [%s] %s", e.CreatedAt.Format("2006-01-02"), e.Title)
|
||||
if e.Description != nil {
|
||||
eventText += fmt.Sprintf(": %s", *e.Description)
|
||||
}
|
||||
}
|
||||
|
||||
deadlineText := "\n\nUpcoming Deadlines:"
|
||||
if len(deadlines) == 0 {
|
||||
deadlineText += "\nNo active deadlines."
|
||||
}
|
||||
for _, d := range deadlines {
|
||||
deadlineText += fmt.Sprintf("\n- %s: due %s (status: %s)", d.Title, d.DueDate, d.Status)
|
||||
if d.Description != nil {
|
||||
deadlineText += fmt.Sprintf(" — %s", *d.Description)
|
||||
}
|
||||
}
|
||||
|
||||
prompt := caseInfo + eventText + deadlineText
|
||||
|
||||
msg, err := s.client.Messages.New(ctx, anthropic.MessageNewParams{
|
||||
Model: anthropic.ModelClaudeSonnet4_5,
|
||||
MaxTokens: 512,
|
||||
System: []anthropic.TextBlockParam{
|
||||
{Text: summarizeSystemPrompt},
|
||||
},
|
||||
Messages: []anthropic.MessageParam{
|
||||
anthropic.NewUserMessage(anthropic.NewTextBlock("Summarize the current state of this case:\n\n" + prompt)),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("claude API call: %w", err)
|
||||
}
|
||||
|
||||
// Extract text from response
|
||||
var summary string
|
||||
for _, block := range msg.Content {
|
||||
if block.Type == "text" {
|
||||
summary += block.Text
|
||||
}
|
||||
}
|
||||
|
||||
if summary == "" {
|
||||
return "", fmt.Errorf("empty response from Claude")
|
||||
}
|
||||
|
||||
// Cache summary in database
|
||||
_, err = s.db.ExecContext(ctx,
|
||||
"UPDATE cases SET ai_summary = $1, updated_at = $2 WHERE id = $3 AND tenant_id = $4",
|
||||
summary, time.Now(), caseID, tenantID)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("caching summary: %w", err)
|
||||
}
|
||||
|
||||
return summary, nil
|
||||
}
|
||||
109
backend/internal/services/ai_service_test.go
Normal file
109
backend/internal/services/ai_service_test.go
Normal file
@@ -0,0 +1,109 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDeadlineExtractionToolSchema(t *testing.T) {
|
||||
// Verify the tool schema serializes correctly
|
||||
data, err := json.Marshal(deadlineExtractionTool)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to marshal tool: %v", err)
|
||||
}
|
||||
|
||||
var parsed map[string]any
|
||||
if err := json.Unmarshal(data, &parsed); err != nil {
|
||||
t.Fatalf("failed to unmarshal tool JSON: %v", err)
|
||||
}
|
||||
|
||||
if parsed["name"] != "extract_deadlines" {
|
||||
t.Errorf("expected name 'extract_deadlines', got %v", parsed["name"])
|
||||
}
|
||||
|
||||
schema, ok := parsed["input_schema"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatal("input_schema is not a map")
|
||||
}
|
||||
|
||||
if schema["type"] != "object" {
|
||||
t.Errorf("expected schema type 'object', got %v", schema["type"])
|
||||
}
|
||||
|
||||
props, ok := schema["properties"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatal("properties is not a map")
|
||||
}
|
||||
|
||||
deadlines, ok := props["deadlines"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatal("deadlines property is not a map")
|
||||
}
|
||||
|
||||
if deadlines["type"] != "array" {
|
||||
t.Errorf("expected deadlines type 'array', got %v", deadlines["type"])
|
||||
}
|
||||
|
||||
items, ok := deadlines["items"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatal("items is not a map")
|
||||
}
|
||||
|
||||
itemProps, ok := items["properties"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatal("item properties is not a map")
|
||||
}
|
||||
|
||||
expectedFields := []string{"title", "due_date", "duration_value", "duration_unit", "timing", "trigger_event", "rule_reference", "confidence", "source_quote"}
|
||||
for _, field := range expectedFields {
|
||||
if _, ok := itemProps[field]; !ok {
|
||||
t.Errorf("missing expected field %q in item properties", field)
|
||||
}
|
||||
}
|
||||
|
||||
required, ok := items["required"].([]any)
|
||||
if !ok {
|
||||
t.Fatal("required is not a list")
|
||||
}
|
||||
if len(required) != 8 {
|
||||
t.Errorf("expected 8 required fields, got %d", len(required))
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractedDeadlineJSON(t *testing.T) {
|
||||
dueDate := "2026-04-15"
|
||||
d := ExtractedDeadline{
|
||||
Title: "Statement of Defence",
|
||||
DueDate: &dueDate,
|
||||
DurationValue: 3,
|
||||
DurationUnit: "months",
|
||||
Timing: "after",
|
||||
TriggerEvent: "service of the Statement of Claim",
|
||||
RuleReference: "Rule 23 RoP",
|
||||
Confidence: 0.95,
|
||||
SourceQuote: "The defendant shall file a defence within 3 months",
|
||||
}
|
||||
|
||||
data, err := json.Marshal(d)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to marshal: %v", err)
|
||||
}
|
||||
|
||||
var parsed ExtractedDeadline
|
||||
if err := json.Unmarshal(data, &parsed); err != nil {
|
||||
t.Fatalf("failed to unmarshal: %v", err)
|
||||
}
|
||||
|
||||
if parsed.Title != d.Title {
|
||||
t.Errorf("title mismatch: %q != %q", parsed.Title, d.Title)
|
||||
}
|
||||
if *parsed.DueDate != *d.DueDate {
|
||||
t.Errorf("due_date mismatch: %q != %q", *parsed.DueDate, *d.DueDate)
|
||||
}
|
||||
if parsed.DurationValue != d.DurationValue {
|
||||
t.Errorf("duration_value mismatch: %d != %d", parsed.DurationValue, d.DurationValue)
|
||||
}
|
||||
if parsed.Confidence != d.Confidence {
|
||||
t.Errorf("confidence mismatch: %f != %f", parsed.Confidence, d.Confidence)
|
||||
}
|
||||
}
|
||||
139
backend/internal/services/appointment_service.go
Normal file
139
backend/internal/services/appointment_service.go
Normal file
@@ -0,0 +1,139 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
)
|
||||
|
||||
type AppointmentService struct {
|
||||
db *sqlx.DB
|
||||
audit *AuditService
|
||||
}
|
||||
|
||||
func NewAppointmentService(db *sqlx.DB, audit *AuditService) *AppointmentService {
|
||||
return &AppointmentService{db: db, audit: audit}
|
||||
}
|
||||
|
||||
type AppointmentFilter struct {
|
||||
CaseID *uuid.UUID
|
||||
Type *string
|
||||
StartFrom *time.Time
|
||||
StartTo *time.Time
|
||||
}
|
||||
|
||||
func (s *AppointmentService) List(ctx context.Context, tenantID uuid.UUID, filter AppointmentFilter) ([]models.Appointment, error) {
|
||||
query := "SELECT * FROM appointments WHERE tenant_id = $1"
|
||||
args := []any{tenantID}
|
||||
argN := 2
|
||||
|
||||
if filter.CaseID != nil {
|
||||
query += fmt.Sprintf(" AND case_id = $%d", argN)
|
||||
args = append(args, *filter.CaseID)
|
||||
argN++
|
||||
}
|
||||
if filter.Type != nil {
|
||||
query += fmt.Sprintf(" AND appointment_type = $%d", argN)
|
||||
args = append(args, *filter.Type)
|
||||
argN++
|
||||
}
|
||||
if filter.StartFrom != nil {
|
||||
query += fmt.Sprintf(" AND start_at >= $%d", argN)
|
||||
args = append(args, *filter.StartFrom)
|
||||
argN++
|
||||
}
|
||||
if filter.StartTo != nil {
|
||||
query += fmt.Sprintf(" AND start_at <= $%d", argN)
|
||||
args = append(args, *filter.StartTo)
|
||||
argN++
|
||||
}
|
||||
|
||||
query += " ORDER BY start_at ASC"
|
||||
|
||||
var appointments []models.Appointment
|
||||
if err := s.db.SelectContext(ctx, &appointments, query, args...); err != nil {
|
||||
return nil, fmt.Errorf("listing appointments: %w", err)
|
||||
}
|
||||
if appointments == nil {
|
||||
appointments = []models.Appointment{}
|
||||
}
|
||||
return appointments, nil
|
||||
}
|
||||
|
||||
func (s *AppointmentService) GetByID(ctx context.Context, tenantID, id uuid.UUID) (*models.Appointment, error) {
|
||||
var a models.Appointment
|
||||
err := s.db.GetContext(ctx, &a, "SELECT * FROM appointments WHERE id = $1 AND tenant_id = $2", id, tenantID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("getting appointment: %w", err)
|
||||
}
|
||||
return &a, nil
|
||||
}
|
||||
|
||||
func (s *AppointmentService) Create(ctx context.Context, a *models.Appointment) error {
|
||||
a.ID = uuid.New()
|
||||
now := time.Now().UTC()
|
||||
a.CreatedAt = now
|
||||
a.UpdatedAt = now
|
||||
|
||||
_, err := s.db.NamedExecContext(ctx, `
|
||||
INSERT INTO appointments (id, tenant_id, case_id, title, description, start_at, end_at, location, appointment_type, caldav_uid, caldav_etag, created_at, updated_at)
|
||||
VALUES (:id, :tenant_id, :case_id, :title, :description, :start_at, :end_at, :location, :appointment_type, :caldav_uid, :caldav_etag, :created_at, :updated_at)
|
||||
`, a)
|
||||
if err != nil {
|
||||
return fmt.Errorf("creating appointment: %w", err)
|
||||
}
|
||||
s.audit.Log(ctx, "create", "appointment", &a.ID, nil, a)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *AppointmentService) Update(ctx context.Context, a *models.Appointment) error {
|
||||
a.UpdatedAt = time.Now().UTC()
|
||||
|
||||
result, err := s.db.NamedExecContext(ctx, `
|
||||
UPDATE appointments SET
|
||||
case_id = :case_id,
|
||||
title = :title,
|
||||
description = :description,
|
||||
start_at = :start_at,
|
||||
end_at = :end_at,
|
||||
location = :location,
|
||||
appointment_type = :appointment_type,
|
||||
caldav_uid = :caldav_uid,
|
||||
caldav_etag = :caldav_etag,
|
||||
updated_at = :updated_at
|
||||
WHERE id = :id AND tenant_id = :tenant_id
|
||||
`, a)
|
||||
if err != nil {
|
||||
return fmt.Errorf("updating appointment: %w", err)
|
||||
}
|
||||
rows, err := result.RowsAffected()
|
||||
if err != nil {
|
||||
return fmt.Errorf("checking rows affected: %w", err)
|
||||
}
|
||||
if rows == 0 {
|
||||
return fmt.Errorf("appointment not found")
|
||||
}
|
||||
s.audit.Log(ctx, "update", "appointment", &a.ID, nil, a)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *AppointmentService) Delete(ctx context.Context, tenantID, id uuid.UUID) error {
|
||||
result, err := s.db.ExecContext(ctx, "DELETE FROM appointments WHERE id = $1 AND tenant_id = $2", id, tenantID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("deleting appointment: %w", err)
|
||||
}
|
||||
rows, err := result.RowsAffected()
|
||||
if err != nil {
|
||||
return fmt.Errorf("checking rows affected: %w", err)
|
||||
}
|
||||
if rows == 0 {
|
||||
return fmt.Errorf("appointment not found")
|
||||
}
|
||||
s.audit.Log(ctx, "delete", "appointment", &id, nil, nil)
|
||||
return nil
|
||||
}
|
||||
141
backend/internal/services/audit_service.go
Normal file
141
backend/internal/services/audit_service.go
Normal file
@@ -0,0 +1,141 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/auth"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
)
|
||||
|
||||
type AuditService struct {
|
||||
db *sqlx.DB
|
||||
}
|
||||
|
||||
func NewAuditService(db *sqlx.DB) *AuditService {
|
||||
return &AuditService{db: db}
|
||||
}
|
||||
|
||||
// Log records an audit entry. It extracts tenant, user, IP, and user-agent from context.
|
||||
// Errors are logged but not returned — audit logging must not break business operations.
|
||||
func (s *AuditService) Log(ctx context.Context, action, entityType string, entityID *uuid.UUID, oldValues, newValues any) {
|
||||
tenantID, ok := auth.TenantFromContext(ctx)
|
||||
if !ok {
|
||||
slog.Warn("audit: missing tenant_id in context", "action", action, "entity_type", entityType)
|
||||
return
|
||||
}
|
||||
|
||||
var userID *uuid.UUID
|
||||
if uid, ok := auth.UserFromContext(ctx); ok {
|
||||
userID = &uid
|
||||
}
|
||||
|
||||
var oldJSON, newJSON *json.RawMessage
|
||||
if oldValues != nil {
|
||||
if b, err := json.Marshal(oldValues); err == nil {
|
||||
raw := json.RawMessage(b)
|
||||
oldJSON = &raw
|
||||
}
|
||||
}
|
||||
if newValues != nil {
|
||||
if b, err := json.Marshal(newValues); err == nil {
|
||||
raw := json.RawMessage(b)
|
||||
newJSON = &raw
|
||||
}
|
||||
}
|
||||
|
||||
ip := auth.IPFromContext(ctx)
|
||||
ua := auth.UserAgentFromContext(ctx)
|
||||
|
||||
_, err := s.db.ExecContext(ctx,
|
||||
`INSERT INTO audit_log (tenant_id, user_id, action, entity_type, entity_id, old_values, new_values, ip_address, user_agent)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`,
|
||||
tenantID, userID, action, entityType, entityID, oldJSON, newJSON, ip, ua)
|
||||
if err != nil {
|
||||
slog.Error("audit: failed to write log entry",
|
||||
"error", err,
|
||||
"action", action,
|
||||
"entity_type", entityType,
|
||||
"entity_id", entityID,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// AuditFilter holds query parameters for listing audit log entries.
|
||||
type AuditFilter struct {
|
||||
EntityType string
|
||||
EntityID *uuid.UUID
|
||||
UserID *uuid.UUID
|
||||
From string // RFC3339 date
|
||||
To string // RFC3339 date
|
||||
Page int
|
||||
Limit int
|
||||
}
|
||||
|
||||
// List returns paginated audit log entries for a tenant.
|
||||
func (s *AuditService) List(ctx context.Context, tenantID uuid.UUID, filter AuditFilter) ([]models.AuditLog, int, error) {
|
||||
if filter.Limit <= 0 {
|
||||
filter.Limit = 50
|
||||
}
|
||||
if filter.Limit > 200 {
|
||||
filter.Limit = 200
|
||||
}
|
||||
if filter.Page <= 0 {
|
||||
filter.Page = 1
|
||||
}
|
||||
offset := (filter.Page - 1) * filter.Limit
|
||||
|
||||
where := "WHERE tenant_id = $1"
|
||||
args := []any{tenantID}
|
||||
argIdx := 2
|
||||
|
||||
if filter.EntityType != "" {
|
||||
where += fmt.Sprintf(" AND entity_type = $%d", argIdx)
|
||||
args = append(args, filter.EntityType)
|
||||
argIdx++
|
||||
}
|
||||
if filter.EntityID != nil {
|
||||
where += fmt.Sprintf(" AND entity_id = $%d", argIdx)
|
||||
args = append(args, *filter.EntityID)
|
||||
argIdx++
|
||||
}
|
||||
if filter.UserID != nil {
|
||||
where += fmt.Sprintf(" AND user_id = $%d", argIdx)
|
||||
args = append(args, *filter.UserID)
|
||||
argIdx++
|
||||
}
|
||||
if filter.From != "" {
|
||||
where += fmt.Sprintf(" AND created_at >= $%d", argIdx)
|
||||
args = append(args, filter.From)
|
||||
argIdx++
|
||||
}
|
||||
if filter.To != "" {
|
||||
where += fmt.Sprintf(" AND created_at <= $%d", argIdx)
|
||||
args = append(args, filter.To)
|
||||
argIdx++
|
||||
}
|
||||
|
||||
var total int
|
||||
if err := s.db.GetContext(ctx, &total, "SELECT COUNT(*) FROM audit_log "+where, args...); err != nil {
|
||||
return nil, 0, fmt.Errorf("counting audit entries: %w", err)
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("SELECT * FROM audit_log %s ORDER BY created_at DESC LIMIT $%d OFFSET $%d",
|
||||
where, argIdx, argIdx+1)
|
||||
args = append(args, filter.Limit, offset)
|
||||
|
||||
var entries []models.AuditLog
|
||||
if err := s.db.SelectContext(ctx, &entries, query, args...); err != nil {
|
||||
return nil, 0, fmt.Errorf("listing audit entries: %w", err)
|
||||
}
|
||||
if entries == nil {
|
||||
entries = []models.AuditLog{}
|
||||
}
|
||||
|
||||
return entries, total, nil
|
||||
}
|
||||
687
backend/internal/services/caldav_service.go
Normal file
687
backend/internal/services/caldav_service.go
Normal file
@@ -0,0 +1,687 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/emersion/go-ical"
|
||||
"github.com/emersion/go-webdav"
|
||||
"github.com/emersion/go-webdav/caldav"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
)
|
||||
|
||||
const (
|
||||
calDAVDomain = "kanzlai.msbls.de"
|
||||
calDAVProdID = "-//KanzlAI//KanzlAI-mGMT//EN"
|
||||
defaultSyncMin = 15
|
||||
)
|
||||
|
||||
// CalDAVConfig holds per-tenant CalDAV configuration from tenants.settings.
|
||||
type CalDAVConfig struct {
|
||||
URL string `json:"url"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
CalendarPath string `json:"calendar_path"`
|
||||
SyncEnabled bool `json:"sync_enabled"`
|
||||
SyncIntervalMinutes int `json:"sync_interval_minutes"`
|
||||
}
|
||||
|
||||
// SyncStatus holds the last sync result for a tenant.
|
||||
type SyncStatus struct {
|
||||
TenantID uuid.UUID `json:"tenant_id"`
|
||||
LastSyncAt time.Time `json:"last_sync_at"`
|
||||
ItemsPushed int `json:"items_pushed"`
|
||||
ItemsPulled int `json:"items_pulled"`
|
||||
Errors []string `json:"errors,omitempty"`
|
||||
SyncDuration string `json:"sync_duration"`
|
||||
}
|
||||
|
||||
// CalDAVService handles bidirectional CalDAV synchronization.
|
||||
type CalDAVService struct {
|
||||
db *sqlx.DB
|
||||
|
||||
mu sync.RWMutex
|
||||
statuses map[uuid.UUID]*SyncStatus // per-tenant sync status
|
||||
|
||||
stopCh chan struct{}
|
||||
wg sync.WaitGroup
|
||||
}
|
||||
|
||||
// NewCalDAVService creates a new CalDAV sync service.
|
||||
func NewCalDAVService(db *sqlx.DB) *CalDAVService {
|
||||
return &CalDAVService{
|
||||
db: db,
|
||||
statuses: make(map[uuid.UUID]*SyncStatus),
|
||||
stopCh: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
// GetStatus returns the last sync status for a tenant.
|
||||
func (s *CalDAVService) GetStatus(tenantID uuid.UUID) *SyncStatus {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
return s.statuses[tenantID]
|
||||
}
|
||||
|
||||
// setStatus stores the sync status for a tenant.
|
||||
func (s *CalDAVService) setStatus(status *SyncStatus) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.statuses[status.TenantID] = status
|
||||
}
|
||||
|
||||
// Start begins the background sync goroutine that polls per-tenant.
|
||||
func (s *CalDAVService) Start() {
|
||||
s.wg.Go(func() {
|
||||
s.backgroundLoop()
|
||||
})
|
||||
slog.Info("CalDAV sync service started")
|
||||
}
|
||||
|
||||
// Stop gracefully stops the background sync.
|
||||
func (s *CalDAVService) Stop() {
|
||||
close(s.stopCh)
|
||||
s.wg.Wait()
|
||||
slog.Info("CalDAV sync service stopped")
|
||||
}
|
||||
|
||||
// backgroundLoop polls tenants at their configured interval.
|
||||
func (s *CalDAVService) backgroundLoop() {
|
||||
// Check every minute, but only sync tenants whose interval has elapsed.
|
||||
ticker := time.NewTicker(1 * time.Minute)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-s.stopCh:
|
||||
return
|
||||
case <-ticker.C:
|
||||
s.syncAllTenants()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// syncAllTenants checks all tenants and syncs those due for a sync.
|
||||
func (s *CalDAVService) syncAllTenants() {
|
||||
configs, err := s.loadAllTenantConfigs()
|
||||
if err != nil {
|
||||
slog.Error("CalDAV: failed to load tenant configs", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
for tenantID, cfg := range configs {
|
||||
if !cfg.SyncEnabled {
|
||||
continue
|
||||
}
|
||||
|
||||
interval := cfg.SyncIntervalMinutes
|
||||
if interval <= 0 {
|
||||
interval = defaultSyncMin
|
||||
}
|
||||
|
||||
// Check if enough time has passed since last sync
|
||||
status := s.GetStatus(tenantID)
|
||||
if status != nil && time.Since(status.LastSyncAt) < time.Duration(interval)*time.Minute {
|
||||
continue
|
||||
}
|
||||
|
||||
go func(tid uuid.UUID, c CalDAVConfig) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
|
||||
defer cancel()
|
||||
if _, err := s.SyncTenant(ctx, tid, c); err != nil {
|
||||
slog.Error("CalDAV: sync failed", "tenant_id", tid, "error", err)
|
||||
}
|
||||
}(tenantID, cfg)
|
||||
}
|
||||
}
|
||||
|
||||
// loadAllTenantConfigs reads CalDAV configs from all tenants.
|
||||
func (s *CalDAVService) loadAllTenantConfigs() (map[uuid.UUID]CalDAVConfig, error) {
|
||||
type row struct {
|
||||
ID uuid.UUID `db:"id"`
|
||||
Settings json.RawMessage `db:"settings"`
|
||||
}
|
||||
var rows []row
|
||||
if err := s.db.Select(&rows, "SELECT id, settings FROM tenants"); err != nil {
|
||||
return nil, fmt.Errorf("querying tenants: %w", err)
|
||||
}
|
||||
|
||||
result := make(map[uuid.UUID]CalDAVConfig)
|
||||
for _, r := range rows {
|
||||
cfg, err := parseCalDAVConfig(r.Settings)
|
||||
if err != nil || cfg.URL == "" {
|
||||
continue
|
||||
}
|
||||
result[r.ID] = cfg
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// LoadTenantConfig reads CalDAV config for a single tenant.
|
||||
func (s *CalDAVService) LoadTenantConfig(tenantID uuid.UUID) (*CalDAVConfig, error) {
|
||||
var settings json.RawMessage
|
||||
if err := s.db.Get(&settings, "SELECT settings FROM tenants WHERE id = $1", tenantID); err != nil {
|
||||
return nil, fmt.Errorf("loading tenant settings: %w", err)
|
||||
}
|
||||
cfg, err := parseCalDAVConfig(settings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if cfg.URL == "" {
|
||||
return nil, fmt.Errorf("no CalDAV configuration for tenant")
|
||||
}
|
||||
return &cfg, nil
|
||||
}
|
||||
|
||||
func parseCalDAVConfig(settings json.RawMessage) (CalDAVConfig, error) {
|
||||
if len(settings) == 0 {
|
||||
return CalDAVConfig{}, nil
|
||||
}
|
||||
var wrapper struct {
|
||||
CalDAV CalDAVConfig `json:"caldav"`
|
||||
}
|
||||
if err := json.Unmarshal(settings, &wrapper); err != nil {
|
||||
return CalDAVConfig{}, fmt.Errorf("parsing CalDAV settings: %w", err)
|
||||
}
|
||||
return wrapper.CalDAV, nil
|
||||
}
|
||||
|
||||
// newCalDAVClient creates a caldav.Client from config.
|
||||
func newCalDAVClient(cfg CalDAVConfig) (*caldav.Client, error) {
|
||||
httpClient := webdav.HTTPClientWithBasicAuth(nil, cfg.Username, cfg.Password)
|
||||
return caldav.NewClient(httpClient, cfg.URL)
|
||||
}
|
||||
|
||||
// SyncTenant performs a full bidirectional sync for a tenant.
|
||||
func (s *CalDAVService) SyncTenant(ctx context.Context, tenantID uuid.UUID, cfg CalDAVConfig) (*SyncStatus, error) {
|
||||
start := time.Now()
|
||||
status := &SyncStatus{
|
||||
TenantID: tenantID,
|
||||
}
|
||||
|
||||
client, err := newCalDAVClient(cfg)
|
||||
if err != nil {
|
||||
status.Errors = append(status.Errors, fmt.Sprintf("creating client: %v", err))
|
||||
status.LastSyncAt = time.Now()
|
||||
s.setStatus(status)
|
||||
return status, err
|
||||
}
|
||||
|
||||
// Push local changes to CalDAV
|
||||
pushed, pushErrs := s.pushAll(ctx, client, tenantID, cfg)
|
||||
status.ItemsPushed = pushed
|
||||
status.Errors = append(status.Errors, pushErrs...)
|
||||
|
||||
// Pull remote changes from CalDAV
|
||||
pulled, pullErrs := s.pullAll(ctx, client, tenantID, cfg)
|
||||
status.ItemsPulled = pulled
|
||||
status.Errors = append(status.Errors, pullErrs...)
|
||||
|
||||
status.LastSyncAt = time.Now()
|
||||
status.SyncDuration = time.Since(start).String()
|
||||
s.setStatus(status)
|
||||
|
||||
if len(status.Errors) > 0 {
|
||||
return status, fmt.Errorf("sync completed with %d errors", len(status.Errors))
|
||||
}
|
||||
return status, nil
|
||||
}
|
||||
|
||||
// --- Push: Local -> CalDAV ---
|
||||
|
||||
// pushAll pushes all deadlines and appointments to CalDAV.
|
||||
func (s *CalDAVService) pushAll(ctx context.Context, client *caldav.Client, tenantID uuid.UUID, cfg CalDAVConfig) (int, []string) {
|
||||
var pushed int
|
||||
var errs []string
|
||||
|
||||
// Push deadlines as VTODO
|
||||
deadlines, err := s.loadDeadlines(tenantID)
|
||||
if err != nil {
|
||||
return 0, []string{fmt.Sprintf("loading deadlines: %v", err)}
|
||||
}
|
||||
for _, d := range deadlines {
|
||||
if err := s.pushDeadline(ctx, client, cfg, &d); err != nil {
|
||||
errs = append(errs, fmt.Sprintf("push deadline %s: %v", d.ID, err))
|
||||
} else {
|
||||
pushed++
|
||||
}
|
||||
}
|
||||
|
||||
// Push appointments as VEVENT
|
||||
appointments, err := s.loadAppointments(ctx, tenantID)
|
||||
if err != nil {
|
||||
errs = append(errs, fmt.Sprintf("loading appointments: %v", err))
|
||||
return pushed, errs
|
||||
}
|
||||
for _, a := range appointments {
|
||||
if err := s.pushAppointment(ctx, client, cfg, &a); err != nil {
|
||||
errs = append(errs, fmt.Sprintf("push appointment %s: %v", a.ID, err))
|
||||
} else {
|
||||
pushed++
|
||||
}
|
||||
}
|
||||
|
||||
return pushed, errs
|
||||
}
|
||||
|
||||
// PushDeadline pushes a single deadline to CalDAV (called on create/update).
|
||||
func (s *CalDAVService) PushDeadline(ctx context.Context, tenantID uuid.UUID, deadline *models.Deadline) error {
|
||||
cfg, err := s.LoadTenantConfig(tenantID)
|
||||
if err != nil || !cfg.SyncEnabled {
|
||||
return nil // CalDAV not configured or disabled — silently skip
|
||||
}
|
||||
client, err := newCalDAVClient(*cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("creating CalDAV client: %w", err)
|
||||
}
|
||||
return s.pushDeadline(ctx, client, *cfg, deadline)
|
||||
}
|
||||
|
||||
func (s *CalDAVService) pushDeadline(ctx context.Context, client *caldav.Client, cfg CalDAVConfig, d *models.Deadline) error {
|
||||
uid := deadlineUID(d.ID)
|
||||
|
||||
cal := ical.NewCalendar()
|
||||
cal.Props.SetText(ical.PropProductID, calDAVProdID)
|
||||
cal.Props.SetText(ical.PropVersion, "2.0")
|
||||
|
||||
todo := ical.NewComponent(ical.CompToDo)
|
||||
todo.Props.SetText(ical.PropUID, uid)
|
||||
todo.Props.SetText(ical.PropSummary, d.Title)
|
||||
todo.Props.SetDateTime(ical.PropDateTimeStamp, time.Now().UTC())
|
||||
|
||||
if d.Description != nil {
|
||||
todo.Props.SetText(ical.PropDescription, *d.Description)
|
||||
}
|
||||
if d.Notes != nil {
|
||||
desc := ""
|
||||
if d.Description != nil {
|
||||
desc = *d.Description + "\n\n"
|
||||
}
|
||||
todo.Props.SetText(ical.PropDescription, desc+*d.Notes)
|
||||
}
|
||||
|
||||
// Parse due_date (stored as string "YYYY-MM-DD")
|
||||
if due, err := time.Parse("2006-01-02", d.DueDate); err == nil {
|
||||
todo.Props.SetDate(ical.PropDue, due)
|
||||
}
|
||||
|
||||
// Map status
|
||||
switch d.Status {
|
||||
case "completed":
|
||||
todo.Props.SetText(ical.PropStatus, "COMPLETED")
|
||||
if d.CompletedAt != nil {
|
||||
todo.Props.SetDateTime(ical.PropCompleted, d.CompletedAt.UTC())
|
||||
}
|
||||
case "pending":
|
||||
todo.Props.SetText(ical.PropStatus, "NEEDS-ACTION")
|
||||
default:
|
||||
todo.Props.SetText(ical.PropStatus, "IN-PROCESS")
|
||||
}
|
||||
|
||||
cal.Children = append(cal.Children, todo)
|
||||
|
||||
path := calendarObjectPath(cfg.CalendarPath, uid)
|
||||
obj, err := client.PutCalendarObject(ctx, path, cal)
|
||||
if err != nil {
|
||||
return fmt.Errorf("putting VTODO: %w", err)
|
||||
}
|
||||
|
||||
// Update caldav_uid and etag in DB
|
||||
return s.updateDeadlineCalDAV(d.ID, uid, obj.ETag)
|
||||
}
|
||||
|
||||
// PushAppointment pushes a single appointment to CalDAV (called on create/update).
|
||||
func (s *CalDAVService) PushAppointment(ctx context.Context, tenantID uuid.UUID, appointment *models.Appointment) error {
|
||||
cfg, err := s.LoadTenantConfig(tenantID)
|
||||
if err != nil || !cfg.SyncEnabled {
|
||||
return nil
|
||||
}
|
||||
client, err := newCalDAVClient(*cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("creating CalDAV client: %w", err)
|
||||
}
|
||||
return s.pushAppointment(ctx, client, *cfg, appointment)
|
||||
}
|
||||
|
||||
func (s *CalDAVService) pushAppointment(ctx context.Context, client *caldav.Client, cfg CalDAVConfig, a *models.Appointment) error {
|
||||
uid := appointmentUID(a.ID)
|
||||
|
||||
cal := ical.NewCalendar()
|
||||
cal.Props.SetText(ical.PropProductID, calDAVProdID)
|
||||
cal.Props.SetText(ical.PropVersion, "2.0")
|
||||
|
||||
event := ical.NewEvent()
|
||||
event.Props.SetText(ical.PropUID, uid)
|
||||
event.Props.SetText(ical.PropSummary, a.Title)
|
||||
event.Props.SetDateTime(ical.PropDateTimeStamp, time.Now().UTC())
|
||||
event.Props.SetDateTime(ical.PropDateTimeStart, a.StartAt.UTC())
|
||||
|
||||
if a.EndAt != nil {
|
||||
event.Props.SetDateTime(ical.PropDateTimeEnd, a.EndAt.UTC())
|
||||
}
|
||||
if a.Description != nil {
|
||||
event.Props.SetText(ical.PropDescription, *a.Description)
|
||||
}
|
||||
if a.Location != nil {
|
||||
event.Props.SetText(ical.PropLocation, *a.Location)
|
||||
}
|
||||
|
||||
cal.Children = append(cal.Children, event.Component)
|
||||
|
||||
path := calendarObjectPath(cfg.CalendarPath, uid)
|
||||
obj, err := client.PutCalendarObject(ctx, path, cal)
|
||||
if err != nil {
|
||||
return fmt.Errorf("putting VEVENT: %w", err)
|
||||
}
|
||||
|
||||
return s.updateAppointmentCalDAV(a.ID, uid, obj.ETag)
|
||||
}
|
||||
|
||||
// DeleteDeadlineCalDAV removes a deadline's VTODO from CalDAV.
|
||||
func (s *CalDAVService) DeleteDeadlineCalDAV(ctx context.Context, tenantID uuid.UUID, deadline *models.Deadline) error {
|
||||
if deadline.CalDAVUID == nil || *deadline.CalDAVUID == "" {
|
||||
return nil
|
||||
}
|
||||
cfg, err := s.LoadTenantConfig(tenantID)
|
||||
if err != nil || !cfg.SyncEnabled {
|
||||
return nil
|
||||
}
|
||||
client, err := newCalDAVClient(*cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("creating CalDAV client: %w", err)
|
||||
}
|
||||
path := calendarObjectPath(cfg.CalendarPath, *deadline.CalDAVUID)
|
||||
return client.RemoveAll(ctx, path)
|
||||
}
|
||||
|
||||
// DeleteAppointmentCalDAV removes an appointment's VEVENT from CalDAV.
|
||||
func (s *CalDAVService) DeleteAppointmentCalDAV(ctx context.Context, tenantID uuid.UUID, appointment *models.Appointment) error {
|
||||
if appointment.CalDAVUID == nil || *appointment.CalDAVUID == "" {
|
||||
return nil
|
||||
}
|
||||
cfg, err := s.LoadTenantConfig(tenantID)
|
||||
if err != nil || !cfg.SyncEnabled {
|
||||
return nil
|
||||
}
|
||||
client, err := newCalDAVClient(*cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("creating CalDAV client: %w", err)
|
||||
}
|
||||
path := calendarObjectPath(cfg.CalendarPath, *appointment.CalDAVUID)
|
||||
return client.RemoveAll(ctx, path)
|
||||
}
|
||||
|
||||
// --- Pull: CalDAV -> Local ---
|
||||
|
||||
// pullAll fetches all calendar objects from CalDAV and reconciles with local DB.
|
||||
func (s *CalDAVService) pullAll(ctx context.Context, client *caldav.Client, tenantID uuid.UUID, cfg CalDAVConfig) (int, []string) {
|
||||
var pulled int
|
||||
var errs []string
|
||||
|
||||
query := &caldav.CalendarQuery{
|
||||
CompFilter: caldav.CompFilter{
|
||||
Name: ical.CompCalendar,
|
||||
},
|
||||
}
|
||||
|
||||
objects, err := client.QueryCalendar(ctx, cfg.CalendarPath, query)
|
||||
if err != nil {
|
||||
return 0, []string{fmt.Sprintf("querying calendar: %v", err)}
|
||||
}
|
||||
|
||||
for _, obj := range objects {
|
||||
if obj.Data == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, child := range obj.Data.Children {
|
||||
switch child.Name {
|
||||
case ical.CompToDo:
|
||||
uid, _ := child.Props.Text(ical.PropUID)
|
||||
if uid == "" || !isKanzlAIUID(uid, "deadline") {
|
||||
continue
|
||||
}
|
||||
if err := s.reconcileDeadline(ctx, tenantID, child, obj.ETag); err != nil {
|
||||
errs = append(errs, fmt.Sprintf("reconcile deadline %s: %v", uid, err))
|
||||
} else {
|
||||
pulled++
|
||||
}
|
||||
case ical.CompEvent:
|
||||
uid, _ := child.Props.Text(ical.PropUID)
|
||||
if uid == "" || !isKanzlAIUID(uid, "appointment") {
|
||||
continue
|
||||
}
|
||||
if err := s.reconcileAppointment(ctx, tenantID, child, obj.ETag); err != nil {
|
||||
errs = append(errs, fmt.Sprintf("reconcile appointment %s: %v", uid, err))
|
||||
} else {
|
||||
pulled++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return pulled, errs
|
||||
}
|
||||
|
||||
// reconcileDeadline handles conflict resolution for a pulled VTODO.
|
||||
// KanzlAI wins for dates/status, CalDAV wins for notes/description.
|
||||
func (s *CalDAVService) reconcileDeadline(ctx context.Context, tenantID uuid.UUID, comp *ical.Component, remoteEtag string) error {
|
||||
uid, _ := comp.Props.Text(ical.PropUID)
|
||||
deadlineID := extractIDFromUID(uid, "deadline")
|
||||
if deadlineID == uuid.Nil {
|
||||
return fmt.Errorf("invalid UID: %s", uid)
|
||||
}
|
||||
|
||||
// Load existing deadline
|
||||
var d models.Deadline
|
||||
err := s.db.Get(&d, `SELECT id, tenant_id, case_id, title, description, due_date, original_due_date,
|
||||
warning_date, source, rule_id, status, completed_at,
|
||||
caldav_uid, caldav_etag, notes, created_at, updated_at
|
||||
FROM deadlines WHERE id = $1 AND tenant_id = $2`, deadlineID, tenantID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("loading deadline: %w", err)
|
||||
}
|
||||
|
||||
// Check if remote changed (etag mismatch)
|
||||
if d.CalDAVEtag != nil && *d.CalDAVEtag == remoteEtag {
|
||||
return nil // No change
|
||||
}
|
||||
|
||||
// CalDAV wins for description/notes
|
||||
description, _ := comp.Props.Text(ical.PropDescription)
|
||||
hasConflict := false
|
||||
|
||||
if description != "" {
|
||||
existingDesc := ""
|
||||
if d.Description != nil {
|
||||
existingDesc = *d.Description
|
||||
}
|
||||
existingNotes := ""
|
||||
if d.Notes != nil {
|
||||
existingNotes = *d.Notes
|
||||
}
|
||||
// CalDAV wins for notes/description
|
||||
if description != existingDesc && description != existingNotes {
|
||||
hasConflict = true
|
||||
_, err = s.db.Exec(`UPDATE deadlines SET notes = $1, caldav_etag = $2, updated_at = NOW()
|
||||
WHERE id = $3 AND tenant_id = $4`, description, remoteEtag, deadlineID, tenantID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("updating deadline notes: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !hasConflict {
|
||||
// Just update etag
|
||||
_, err = s.db.Exec(`UPDATE deadlines SET caldav_etag = $1, updated_at = NOW()
|
||||
WHERE id = $2 AND tenant_id = $3`, remoteEtag, deadlineID, tenantID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("updating deadline etag: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Log conflict in case_events if detected
|
||||
if hasConflict {
|
||||
s.logConflictEvent(ctx, tenantID, d.CaseID, "deadline", deadlineID, "CalDAV description updated from remote")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// reconcileAppointment handles conflict resolution for a pulled VEVENT.
|
||||
func (s *CalDAVService) reconcileAppointment(ctx context.Context, tenantID uuid.UUID, comp *ical.Component, remoteEtag string) error {
|
||||
uid, _ := comp.Props.Text(ical.PropUID)
|
||||
appointmentID := extractIDFromUID(uid, "appointment")
|
||||
if appointmentID == uuid.Nil {
|
||||
return fmt.Errorf("invalid UID: %s", uid)
|
||||
}
|
||||
|
||||
var a models.Appointment
|
||||
err := s.db.GetContext(ctx, &a, `SELECT * FROM appointments WHERE id = $1 AND tenant_id = $2`, appointmentID, tenantID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("loading appointment: %w", err)
|
||||
}
|
||||
|
||||
if a.CalDAVEtag != nil && *a.CalDAVEtag == remoteEtag {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CalDAV wins for description
|
||||
description, _ := comp.Props.Text(ical.PropDescription)
|
||||
location, _ := comp.Props.Text(ical.PropLocation)
|
||||
hasConflict := false
|
||||
|
||||
updates := []string{"caldav_etag = $1", "updated_at = NOW()"}
|
||||
args := []any{remoteEtag}
|
||||
argN := 2
|
||||
|
||||
if description != "" {
|
||||
existingDesc := ""
|
||||
if a.Description != nil {
|
||||
existingDesc = *a.Description
|
||||
}
|
||||
if description != existingDesc {
|
||||
hasConflict = true
|
||||
updates = append(updates, fmt.Sprintf("description = $%d", argN))
|
||||
args = append(args, description)
|
||||
argN++
|
||||
}
|
||||
}
|
||||
if location != "" {
|
||||
existingLoc := ""
|
||||
if a.Location != nil {
|
||||
existingLoc = *a.Location
|
||||
}
|
||||
if location != existingLoc {
|
||||
hasConflict = true
|
||||
updates = append(updates, fmt.Sprintf("location = $%d", argN))
|
||||
args = append(args, location)
|
||||
argN++
|
||||
}
|
||||
}
|
||||
|
||||
args = append(args, appointmentID, tenantID)
|
||||
query := fmt.Sprintf("UPDATE appointments SET %s WHERE id = $%d AND tenant_id = $%d",
|
||||
strings.Join(updates, ", "), argN, argN+1)
|
||||
|
||||
if _, err := s.db.ExecContext(ctx, query, args...); err != nil {
|
||||
return fmt.Errorf("updating appointment: %w", err)
|
||||
}
|
||||
|
||||
if hasConflict {
|
||||
caseID := uuid.Nil
|
||||
if a.CaseID != nil {
|
||||
caseID = *a.CaseID
|
||||
}
|
||||
s.logConflictEvent(ctx, tenantID, caseID, "appointment", appointmentID, "CalDAV description/location updated from remote")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// --- DB helpers ---
|
||||
|
||||
func (s *CalDAVService) loadDeadlines(tenantID uuid.UUID) ([]models.Deadline, error) {
|
||||
var deadlines []models.Deadline
|
||||
err := s.db.Select(&deadlines, `SELECT id, tenant_id, case_id, title, description, due_date,
|
||||
original_due_date, warning_date, source, rule_id, status, completed_at,
|
||||
caldav_uid, caldav_etag, notes, created_at, updated_at
|
||||
FROM deadlines WHERE tenant_id = $1`, tenantID)
|
||||
return deadlines, err
|
||||
}
|
||||
|
||||
func (s *CalDAVService) loadAppointments(ctx context.Context, tenantID uuid.UUID) ([]models.Appointment, error) {
|
||||
var appointments []models.Appointment
|
||||
err := s.db.SelectContext(ctx, &appointments, "SELECT * FROM appointments WHERE tenant_id = $1", tenantID)
|
||||
return appointments, err
|
||||
}
|
||||
|
||||
func (s *CalDAVService) updateDeadlineCalDAV(id uuid.UUID, calDAVUID, etag string) error {
|
||||
_, err := s.db.Exec(`UPDATE deadlines SET caldav_uid = $1, caldav_etag = $2, updated_at = NOW()
|
||||
WHERE id = $3`, calDAVUID, etag, id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *CalDAVService) updateAppointmentCalDAV(id uuid.UUID, calDAVUID, etag string) error {
|
||||
_, err := s.db.Exec(`UPDATE appointments SET caldav_uid = $1, caldav_etag = $2, updated_at = NOW()
|
||||
WHERE id = $3`, calDAVUID, etag, id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *CalDAVService) logConflictEvent(ctx context.Context, tenantID, caseID uuid.UUID, objectType string, objectID uuid.UUID, msg string) {
|
||||
if caseID == uuid.Nil {
|
||||
return
|
||||
}
|
||||
metadata, _ := json.Marshal(map[string]string{
|
||||
"object_type": objectType,
|
||||
"object_id": objectID.String(),
|
||||
"source": "caldav_sync",
|
||||
})
|
||||
_, err := s.db.ExecContext(ctx, `INSERT INTO case_events (id, tenant_id, case_id, event_type, title, description, metadata, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, 'caldav_conflict', $4, $5, $6, NOW(), NOW())`,
|
||||
uuid.New(), tenantID, caseID, "CalDAV sync conflict", msg, metadata)
|
||||
if err != nil {
|
||||
slog.Error("CalDAV: failed to log conflict event", "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
// --- UID helpers ---
|
||||
|
||||
func deadlineUID(id uuid.UUID) string {
|
||||
return fmt.Sprintf("kanzlai-deadline-%s@%s", id, calDAVDomain)
|
||||
}
|
||||
|
||||
func appointmentUID(id uuid.UUID) string {
|
||||
return fmt.Sprintf("kanzlai-appointment-%s@%s", id, calDAVDomain)
|
||||
}
|
||||
|
||||
func isKanzlAIUID(uid, objectType string) bool {
|
||||
return strings.HasPrefix(uid, "kanzlai-"+objectType+"-") && strings.HasSuffix(uid, "@"+calDAVDomain)
|
||||
}
|
||||
|
||||
func extractIDFromUID(uid, objectType string) uuid.UUID {
|
||||
prefix := "kanzlai-" + objectType + "-"
|
||||
suffix := "@" + calDAVDomain
|
||||
if !strings.HasPrefix(uid, prefix) || !strings.HasSuffix(uid, suffix) {
|
||||
return uuid.Nil
|
||||
}
|
||||
idStr := uid[len(prefix) : len(uid)-len(suffix)]
|
||||
id, err := uuid.Parse(idStr)
|
||||
if err != nil {
|
||||
return uuid.Nil
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
func calendarObjectPath(calendarPath, uid string) string {
|
||||
path := strings.TrimSuffix(calendarPath, "/")
|
||||
return path + "/" + uid + ".ics"
|
||||
}
|
||||
124
backend/internal/services/caldav_service_test.go
Normal file
124
backend/internal/services/caldav_service_test.go
Normal file
@@ -0,0 +1,124 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
func TestDeadlineUID(t *testing.T) {
|
||||
id := uuid.MustParse("550e8400-e29b-41d4-a716-446655440000")
|
||||
uid := deadlineUID(id)
|
||||
want := "kanzlai-deadline-550e8400-e29b-41d4-a716-446655440000@kanzlai.msbls.de"
|
||||
if uid != want {
|
||||
t.Errorf("deadlineUID = %q, want %q", uid, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppointmentUID(t *testing.T) {
|
||||
id := uuid.MustParse("550e8400-e29b-41d4-a716-446655440000")
|
||||
uid := appointmentUID(id)
|
||||
want := "kanzlai-appointment-550e8400-e29b-41d4-a716-446655440000@kanzlai.msbls.de"
|
||||
if uid != want {
|
||||
t.Errorf("appointmentUID = %q, want %q", uid, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsKanzlAIUID(t *testing.T) {
|
||||
tests := []struct {
|
||||
uid string
|
||||
objectType string
|
||||
want bool
|
||||
}{
|
||||
{"kanzlai-deadline-550e8400-e29b-41d4-a716-446655440000@kanzlai.msbls.de", "deadline", true},
|
||||
{"kanzlai-appointment-550e8400-e29b-41d4-a716-446655440000@kanzlai.msbls.de", "appointment", true},
|
||||
{"kanzlai-deadline-550e8400-e29b-41d4-a716-446655440000@kanzlai.msbls.de", "appointment", false},
|
||||
{"random-uid@other.com", "deadline", false},
|
||||
{"", "deadline", false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
got := isKanzlAIUID(tt.uid, tt.objectType)
|
||||
if got != tt.want {
|
||||
t.Errorf("isKanzlAIUID(%q, %q) = %v, want %v", tt.uid, tt.objectType, got, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractIDFromUID(t *testing.T) {
|
||||
id := uuid.MustParse("550e8400-e29b-41d4-a716-446655440000")
|
||||
|
||||
tests := []struct {
|
||||
uid string
|
||||
objectType string
|
||||
want uuid.UUID
|
||||
}{
|
||||
{"kanzlai-deadline-550e8400-e29b-41d4-a716-446655440000@kanzlai.msbls.de", "deadline", id},
|
||||
{"kanzlai-appointment-550e8400-e29b-41d4-a716-446655440000@kanzlai.msbls.de", "appointment", id},
|
||||
{"invalid-uid", "deadline", uuid.Nil},
|
||||
{"kanzlai-deadline-not-a-uuid@kanzlai.msbls.de", "deadline", uuid.Nil},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
got := extractIDFromUID(tt.uid, tt.objectType)
|
||||
if got != tt.want {
|
||||
t.Errorf("extractIDFromUID(%q, %q) = %v, want %v", tt.uid, tt.objectType, got, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCalendarObjectPath(t *testing.T) {
|
||||
tests := []struct {
|
||||
calendarPath string
|
||||
uid string
|
||||
want string
|
||||
}{
|
||||
{"/dav/calendars/user/cal", "kanzlai-deadline-abc@kanzlai.msbls.de", "/dav/calendars/user/cal/kanzlai-deadline-abc@kanzlai.msbls.de.ics"},
|
||||
{"/dav/calendars/user/cal/", "kanzlai-deadline-abc@kanzlai.msbls.de", "/dav/calendars/user/cal/kanzlai-deadline-abc@kanzlai.msbls.de.ics"},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
got := calendarObjectPath(tt.calendarPath, tt.uid)
|
||||
if got != tt.want {
|
||||
t.Errorf("calendarObjectPath(%q, %q) = %q, want %q", tt.calendarPath, tt.uid, got, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseCalDAVConfig(t *testing.T) {
|
||||
settings := []byte(`{"caldav": {"url": "https://dav.example.com", "username": "user", "password": "pass", "calendar_path": "/cal", "sync_enabled": true, "sync_interval_minutes": 30}}`)
|
||||
cfg, err := parseCalDAVConfig(settings)
|
||||
if err != nil {
|
||||
t.Fatalf("parseCalDAVConfig: %v", err)
|
||||
}
|
||||
if cfg.URL != "https://dav.example.com" {
|
||||
t.Errorf("URL = %q, want %q", cfg.URL, "https://dav.example.com")
|
||||
}
|
||||
if cfg.Username != "user" {
|
||||
t.Errorf("Username = %q, want %q", cfg.Username, "user")
|
||||
}
|
||||
if cfg.SyncIntervalMinutes != 30 {
|
||||
t.Errorf("SyncIntervalMinutes = %d, want 30", cfg.SyncIntervalMinutes)
|
||||
}
|
||||
if !cfg.SyncEnabled {
|
||||
t.Error("SyncEnabled = false, want true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseCalDAVConfig_Empty(t *testing.T) {
|
||||
cfg, err := parseCalDAVConfig(nil)
|
||||
if err != nil {
|
||||
t.Fatalf("parseCalDAVConfig(nil): %v", err)
|
||||
}
|
||||
if cfg.URL != "" {
|
||||
t.Errorf("expected empty config, got URL=%q", cfg.URL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseCalDAVConfig_NoCalDAV(t *testing.T) {
|
||||
settings := []byte(`{"other_setting": true}`)
|
||||
cfg, err := parseCalDAVConfig(settings)
|
||||
if err != nil {
|
||||
t.Fatalf("parseCalDAVConfig: %v", err)
|
||||
}
|
||||
if cfg.URL != "" {
|
||||
t.Errorf("expected empty caldav config, got URL=%q", cfg.URL)
|
||||
}
|
||||
}
|
||||
@@ -14,10 +14,11 @@ import (
|
||||
|
||||
type CaseService struct {
|
||||
db *sqlx.DB
|
||||
audit *AuditService
|
||||
}
|
||||
|
||||
func NewCaseService(db *sqlx.DB) *CaseService {
|
||||
return &CaseService{db: db}
|
||||
func NewCaseService(db *sqlx.DB, audit *AuditService) *CaseService {
|
||||
return &CaseService{db: db, audit: audit}
|
||||
}
|
||||
|
||||
type CaseFilter struct {
|
||||
@@ -162,6 +163,9 @@ func (s *CaseService) Create(ctx context.Context, tenantID uuid.UUID, userID uui
|
||||
if err := s.db.GetContext(ctx, &c, "SELECT * FROM cases WHERE id = $1", id); err != nil {
|
||||
return nil, fmt.Errorf("fetching created case: %w", err)
|
||||
}
|
||||
|
||||
s.audit.Log(ctx, "create", "case", &id, nil, c)
|
||||
|
||||
return &c, nil
|
||||
}
|
||||
|
||||
@@ -239,6 +243,9 @@ func (s *CaseService) Update(ctx context.Context, tenantID, caseID uuid.UUID, us
|
||||
if err := s.db.GetContext(ctx, &updated, "SELECT * FROM cases WHERE id = $1", caseID); err != nil {
|
||||
return nil, fmt.Errorf("fetching updated case: %w", err)
|
||||
}
|
||||
|
||||
s.audit.Log(ctx, "update", "case", &caseID, current, updated)
|
||||
|
||||
return &updated, nil
|
||||
}
|
||||
|
||||
@@ -254,6 +261,7 @@ func (s *CaseService) Delete(ctx context.Context, tenantID, caseID uuid.UUID, us
|
||||
return sql.ErrNoRows
|
||||
}
|
||||
createEvent(ctx, s.db, tenantID, caseID, userID, "case_archived", "Case archived", nil)
|
||||
s.audit.Log(ctx, "delete", "case", &caseID, map[string]string{"status": "active"}, map[string]string{"status": "archived"})
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
154
backend/internal/services/dashboard_service.go
Normal file
154
backend/internal/services/dashboard_service.go
Normal file
@@ -0,0 +1,154 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
type DashboardService struct {
|
||||
db *sqlx.DB
|
||||
}
|
||||
|
||||
func NewDashboardService(db *sqlx.DB) *DashboardService {
|
||||
return &DashboardService{db: db}
|
||||
}
|
||||
|
||||
type DashboardData struct {
|
||||
DeadlineSummary DeadlineSummary `json:"deadline_summary"`
|
||||
CaseSummary CaseSummary `json:"case_summary"`
|
||||
UpcomingDeadlines []UpcomingDeadline `json:"upcoming_deadlines"`
|
||||
UpcomingAppointments []UpcomingAppointment `json:"upcoming_appointments"`
|
||||
RecentActivity []RecentActivity `json:"recent_activity"`
|
||||
}
|
||||
|
||||
type DeadlineSummary struct {
|
||||
OverdueCount int `json:"overdue_count" db:"overdue_count"`
|
||||
DueThisWeek int `json:"due_this_week" db:"due_this_week"`
|
||||
DueNextWeek int `json:"due_next_week" db:"due_next_week"`
|
||||
OKCount int `json:"ok_count" db:"ok_count"`
|
||||
}
|
||||
|
||||
type CaseSummary struct {
|
||||
ActiveCount int `json:"active_count" db:"active_count"`
|
||||
NewThisMonth int `json:"new_this_month" db:"new_this_month"`
|
||||
ClosedCount int `json:"closed_count" db:"closed_count"`
|
||||
}
|
||||
|
||||
type UpcomingDeadline struct {
|
||||
ID uuid.UUID `json:"id" db:"id"`
|
||||
Title string `json:"title" db:"title"`
|
||||
DueDate string `json:"due_date" db:"due_date"`
|
||||
CaseID uuid.UUID `json:"case_id" db:"case_id"`
|
||||
CaseNumber string `json:"case_number" db:"case_number"`
|
||||
CaseTitle string `json:"case_title" db:"case_title"`
|
||||
Status string `json:"status" db:"status"`
|
||||
}
|
||||
|
||||
type UpcomingAppointment struct {
|
||||
ID uuid.UUID `json:"id" db:"id"`
|
||||
Title string `json:"title" db:"title"`
|
||||
StartAt time.Time `json:"start_at" db:"start_at"`
|
||||
CaseNumber *string `json:"case_number" db:"case_number"`
|
||||
Location *string `json:"location" db:"location"`
|
||||
}
|
||||
|
||||
type RecentActivity struct {
|
||||
ID uuid.UUID `json:"id" db:"id"`
|
||||
EventType *string `json:"event_type" db:"event_type"`
|
||||
Title string `json:"title" db:"title"`
|
||||
CaseID uuid.UUID `json:"case_id" db:"case_id"`
|
||||
CaseNumber string `json:"case_number" db:"case_number"`
|
||||
EventDate *time.Time `json:"event_date" db:"event_date"`
|
||||
}
|
||||
|
||||
func (s *DashboardService) Get(ctx context.Context, tenantID uuid.UUID) (*DashboardData, error) {
|
||||
now := time.Now()
|
||||
today := now.Format("2006-01-02")
|
||||
endOfWeek := now.AddDate(0, 0, 7-int(now.Weekday())).Format("2006-01-02")
|
||||
endOfNextWeek := now.AddDate(0, 0, 14-int(now.Weekday())).Format("2006-01-02")
|
||||
in7Days := now.AddDate(0, 0, 7).Format("2006-01-02")
|
||||
startOfMonth := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, now.Location()).Format("2006-01-02")
|
||||
|
||||
data := &DashboardData{}
|
||||
|
||||
// Single query with CTEs for deadline + case summaries
|
||||
summaryQuery := `
|
||||
WITH deadline_stats AS (
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE due_date < $2 AND status = 'pending') AS overdue_count,
|
||||
COUNT(*) FILTER (WHERE due_date >= $2 AND due_date <= $3 AND status = 'pending') AS due_this_week,
|
||||
COUNT(*) FILTER (WHERE due_date > $3 AND due_date <= $4 AND status = 'pending') AS due_next_week,
|
||||
COUNT(*) FILTER (WHERE due_date > $4 AND status = 'pending') AS ok_count
|
||||
FROM deadlines
|
||||
WHERE tenant_id = $1
|
||||
),
|
||||
case_stats AS (
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE status = 'active') AS active_count,
|
||||
COUNT(*) FILTER (WHERE created_at >= $5::date AND status != 'archived') AS new_this_month,
|
||||
COUNT(*) FILTER (WHERE status IN ('closed', 'archived')) AS closed_count
|
||||
FROM cases
|
||||
WHERE tenant_id = $1
|
||||
)
|
||||
SELECT
|
||||
ds.overdue_count, ds.due_this_week, ds.due_next_week, ds.ok_count,
|
||||
cs.active_count, cs.new_this_month, cs.closed_count
|
||||
FROM deadline_stats ds, case_stats cs`
|
||||
|
||||
var summaryRow struct {
|
||||
DeadlineSummary
|
||||
CaseSummary
|
||||
}
|
||||
err := s.db.GetContext(ctx, &summaryRow, summaryQuery, tenantID, today, endOfWeek, endOfNextWeek, startOfMonth)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("dashboard summary: %w", err)
|
||||
}
|
||||
data.DeadlineSummary = summaryRow.DeadlineSummary
|
||||
data.CaseSummary = summaryRow.CaseSummary
|
||||
|
||||
// Upcoming deadlines (next 7 days)
|
||||
deadlineQuery := `
|
||||
SELECT d.id, d.title, d.due_date, d.case_id, c.case_number, c.title AS case_title, d.status
|
||||
FROM deadlines d
|
||||
JOIN cases c ON c.id = d.case_id AND c.tenant_id = d.tenant_id
|
||||
WHERE d.tenant_id = $1 AND d.status = 'pending' AND d.due_date >= $2 AND d.due_date <= $3
|
||||
ORDER BY d.due_date ASC`
|
||||
|
||||
data.UpcomingDeadlines = []UpcomingDeadline{}
|
||||
if err := s.db.SelectContext(ctx, &data.UpcomingDeadlines, deadlineQuery, tenantID, today, in7Days); err != nil {
|
||||
return nil, fmt.Errorf("dashboard upcoming deadlines: %w", err)
|
||||
}
|
||||
|
||||
// Upcoming appointments (next 7 days)
|
||||
appointmentQuery := `
|
||||
SELECT a.id, a.title, a.start_at, c.case_number, a.location
|
||||
FROM appointments a
|
||||
LEFT JOIN cases c ON c.id = a.case_id AND c.tenant_id = a.tenant_id
|
||||
WHERE a.tenant_id = $1 AND a.start_at >= $2::timestamp AND a.start_at < ($2::date + interval '7 days')
|
||||
ORDER BY a.start_at ASC`
|
||||
|
||||
data.UpcomingAppointments = []UpcomingAppointment{}
|
||||
if err := s.db.SelectContext(ctx, &data.UpcomingAppointments, appointmentQuery, tenantID, now); err != nil {
|
||||
return nil, fmt.Errorf("dashboard upcoming appointments: %w", err)
|
||||
}
|
||||
|
||||
// Recent activity (last 10 case events)
|
||||
activityQuery := `
|
||||
SELECT ce.id, ce.event_type, ce.title, ce.case_id, c.case_number, ce.event_date
|
||||
FROM case_events ce
|
||||
JOIN cases c ON c.id = ce.case_id AND c.tenant_id = ce.tenant_id
|
||||
WHERE ce.tenant_id = $1
|
||||
ORDER BY COALESCE(ce.event_date, ce.created_at) DESC
|
||||
LIMIT 10`
|
||||
|
||||
data.RecentActivity = []RecentActivity{}
|
||||
if err := s.db.SelectContext(ctx, &data.RecentActivity, activityQuery, tenantID); err != nil {
|
||||
return nil, fmt.Errorf("dashboard recent activity: %w", err)
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
33
backend/internal/services/dashboard_service_test.go
Normal file
33
backend/internal/services/dashboard_service_test.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestDashboardDateCalculations(t *testing.T) {
|
||||
// Verify the date range logic used in Get()
|
||||
now := time.Date(2026, 3, 25, 14, 0, 0, 0, time.UTC) // Wednesday
|
||||
|
||||
today := now.Format("2006-01-02")
|
||||
endOfWeek := now.AddDate(0, 0, 7-int(now.Weekday())).Format("2006-01-02")
|
||||
endOfNextWeek := now.AddDate(0, 0, 14-int(now.Weekday())).Format("2006-01-02")
|
||||
in7Days := now.AddDate(0, 0, 7).Format("2006-01-02")
|
||||
startOfMonth := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, now.Location()).Format("2006-01-02")
|
||||
|
||||
if today != "2026-03-25" {
|
||||
t.Errorf("today = %s, want 2026-03-25", today)
|
||||
}
|
||||
if endOfWeek != "2026-03-29" { // Sunday
|
||||
t.Errorf("endOfWeek = %s, want 2026-03-29", endOfWeek)
|
||||
}
|
||||
if endOfNextWeek != "2026-04-05" {
|
||||
t.Errorf("endOfNextWeek = %s, want 2026-04-05", endOfNextWeek)
|
||||
}
|
||||
if in7Days != "2026-04-01" {
|
||||
t.Errorf("in7Days = %s, want 2026-04-01", in7Days)
|
||||
}
|
||||
if startOfMonth != "2026-03-01" {
|
||||
t.Errorf("startOfMonth = %s, want 2026-03-01", startOfMonth)
|
||||
}
|
||||
}
|
||||
99
backend/internal/services/deadline_calculator.go
Normal file
99
backend/internal/services/deadline_calculator.go
Normal file
@@ -0,0 +1,99 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
)
|
||||
|
||||
// CalculatedDeadline holds a calculated deadline with adjustment info
|
||||
type CalculatedDeadline struct {
|
||||
RuleCode string `json:"rule_code"`
|
||||
RuleID string `json:"rule_id"`
|
||||
Title string `json:"title"`
|
||||
DueDate string `json:"due_date"`
|
||||
OriginalDueDate string `json:"original_due_date"`
|
||||
WasAdjusted bool `json:"was_adjusted"`
|
||||
}
|
||||
|
||||
// DeadlineCalculator calculates deadlines from rules and event dates
|
||||
type DeadlineCalculator struct {
|
||||
holidays *HolidayService
|
||||
}
|
||||
|
||||
// NewDeadlineCalculator creates a new calculator
|
||||
func NewDeadlineCalculator(holidays *HolidayService) *DeadlineCalculator {
|
||||
return &DeadlineCalculator{holidays: holidays}
|
||||
}
|
||||
|
||||
// CalculateEndDate calculates the end date for a single deadline rule based on an event date.
|
||||
// Adapted from youpc.org CalculateDeadlineEndDate.
|
||||
func (c *DeadlineCalculator) CalculateEndDate(eventDate time.Time, rule models.DeadlineRule) (adjusted time.Time, original time.Time, wasAdjusted bool) {
|
||||
endDate := eventDate
|
||||
|
||||
timing := "after"
|
||||
if rule.Timing != nil {
|
||||
timing = *rule.Timing
|
||||
}
|
||||
|
||||
durationValue := rule.DurationValue
|
||||
durationUnit := rule.DurationUnit
|
||||
|
||||
if timing == "before" {
|
||||
switch durationUnit {
|
||||
case "days":
|
||||
endDate = endDate.AddDate(0, 0, -durationValue)
|
||||
case "weeks":
|
||||
endDate = endDate.AddDate(0, 0, -durationValue*7)
|
||||
case "months":
|
||||
endDate = endDate.AddDate(0, -durationValue, 0)
|
||||
}
|
||||
} else {
|
||||
switch durationUnit {
|
||||
case "days":
|
||||
endDate = endDate.AddDate(0, 0, durationValue)
|
||||
case "weeks":
|
||||
endDate = endDate.AddDate(0, 0, durationValue*7)
|
||||
case "months":
|
||||
endDate = endDate.AddDate(0, durationValue, 0)
|
||||
}
|
||||
}
|
||||
|
||||
original = endDate
|
||||
adjusted, _, wasAdjusted = c.holidays.AdjustForNonWorkingDays(endDate)
|
||||
return adjusted, original, wasAdjusted
|
||||
}
|
||||
|
||||
// CalculateFromRules calculates deadlines for a set of rules given an event date.
|
||||
// Returns a list of calculated deadlines with due dates.
|
||||
func (c *DeadlineCalculator) CalculateFromRules(eventDate time.Time, rules []models.DeadlineRule) []CalculatedDeadline {
|
||||
results := make([]CalculatedDeadline, 0, len(rules))
|
||||
|
||||
for _, rule := range rules {
|
||||
var adjusted, original time.Time
|
||||
var wasAdjusted bool
|
||||
|
||||
if rule.DurationValue > 0 {
|
||||
adjusted, original, wasAdjusted = c.CalculateEndDate(eventDate, rule)
|
||||
} else {
|
||||
adjusted = eventDate
|
||||
original = eventDate
|
||||
}
|
||||
|
||||
code := ""
|
||||
if rule.Code != nil {
|
||||
code = *rule.Code
|
||||
}
|
||||
|
||||
results = append(results, CalculatedDeadline{
|
||||
RuleCode: code,
|
||||
RuleID: rule.ID.String(),
|
||||
Title: rule.Name,
|
||||
DueDate: adjusted.Format("2006-01-02"),
|
||||
OriginalDueDate: original.Format("2006-01-02"),
|
||||
WasAdjusted: wasAdjusted,
|
||||
})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
141
backend/internal/services/deadline_calculator_test.go
Normal file
141
backend/internal/services/deadline_calculator_test.go
Normal file
@@ -0,0 +1,141 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
)
|
||||
|
||||
func TestCalculateEndDateAfterDays(t *testing.T) {
|
||||
holidays := NewHolidayService(nil)
|
||||
calc := NewDeadlineCalculator(holidays)
|
||||
|
||||
eventDate := time.Date(2026, 3, 25, 0, 0, 0, 0, time.UTC) // Wednesday
|
||||
timing := "after"
|
||||
rule := models.DeadlineRule{
|
||||
ID: uuid.New(),
|
||||
Name: "Test 10 days",
|
||||
DurationValue: 10,
|
||||
DurationUnit: "days",
|
||||
Timing: &timing,
|
||||
}
|
||||
|
||||
adjusted, original, wasAdjusted := calc.CalculateEndDate(eventDate, rule)
|
||||
|
||||
// 25 March + 10 days = 4 April 2026 (Saturday)
|
||||
// Apr 5 = Easter Sunday (holiday), Apr 6 = Easter Monday (holiday) -> adjusted to 7 April (Tuesday)
|
||||
expectedOriginal := time.Date(2026, 4, 4, 0, 0, 0, 0, time.UTC)
|
||||
expectedAdjusted := time.Date(2026, 4, 7, 0, 0, 0, 0, time.UTC)
|
||||
|
||||
if original != expectedOriginal {
|
||||
t.Errorf("original should be %s, got %s", expectedOriginal, original)
|
||||
}
|
||||
if adjusted != expectedAdjusted {
|
||||
t.Errorf("adjusted should be %s, got %s", expectedAdjusted, adjusted)
|
||||
}
|
||||
if !wasAdjusted {
|
||||
t.Error("should have been adjusted (Saturday)")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCalculateEndDateBeforeMonths(t *testing.T) {
|
||||
holidays := NewHolidayService(nil)
|
||||
calc := NewDeadlineCalculator(holidays)
|
||||
|
||||
eventDate := time.Date(2026, 6, 15, 0, 0, 0, 0, time.UTC) // Monday
|
||||
timing := "before"
|
||||
rule := models.DeadlineRule{
|
||||
ID: uuid.New(),
|
||||
Name: "Test 2 months before",
|
||||
DurationValue: 2,
|
||||
DurationUnit: "months",
|
||||
Timing: &timing,
|
||||
}
|
||||
|
||||
adjusted, original, wasAdjusted := calc.CalculateEndDate(eventDate, rule)
|
||||
|
||||
// 15 June - 2 months = 15 April 2026 (Wednesday)
|
||||
expected := time.Date(2026, 4, 15, 0, 0, 0, 0, time.UTC)
|
||||
|
||||
if original != expected {
|
||||
t.Errorf("original should be %s, got %s", expected, original)
|
||||
}
|
||||
if adjusted != expected {
|
||||
t.Errorf("adjusted should be %s (not a holiday/weekend), got %s", expected, adjusted)
|
||||
}
|
||||
if wasAdjusted {
|
||||
t.Error("should not have been adjusted (Wednesday)")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCalculateEndDateWeeks(t *testing.T) {
|
||||
holidays := NewHolidayService(nil)
|
||||
calc := NewDeadlineCalculator(holidays)
|
||||
|
||||
eventDate := time.Date(2026, 3, 25, 0, 0, 0, 0, time.UTC) // Wednesday
|
||||
timing := "after"
|
||||
rule := models.DeadlineRule{
|
||||
ID: uuid.New(),
|
||||
Name: "Test 2 weeks",
|
||||
DurationValue: 2,
|
||||
DurationUnit: "weeks",
|
||||
Timing: &timing,
|
||||
}
|
||||
|
||||
adjusted, original, _ := calc.CalculateEndDate(eventDate, rule)
|
||||
|
||||
// 25 March + 14 days = 8 April 2026 (Wednesday)
|
||||
expected := time.Date(2026, 4, 8, 0, 0, 0, 0, time.UTC)
|
||||
if original != expected {
|
||||
t.Errorf("original should be %s, got %s", expected, original)
|
||||
}
|
||||
if adjusted != expected {
|
||||
t.Errorf("adjusted should be %s, got %s", expected, adjusted)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCalculateFromRules(t *testing.T) {
|
||||
holidays := NewHolidayService(nil)
|
||||
calc := NewDeadlineCalculator(holidays)
|
||||
|
||||
eventDate := time.Date(2026, 3, 25, 0, 0, 0, 0, time.UTC)
|
||||
timing := "after"
|
||||
code := "TEST-1"
|
||||
|
||||
rules := []models.DeadlineRule{
|
||||
{
|
||||
ID: uuid.New(),
|
||||
Code: &code,
|
||||
Name: "Rule A",
|
||||
DurationValue: 7,
|
||||
DurationUnit: "days",
|
||||
Timing: &timing,
|
||||
},
|
||||
{
|
||||
ID: uuid.New(),
|
||||
Name: "Rule B (zero duration)",
|
||||
DurationValue: 0,
|
||||
DurationUnit: "days",
|
||||
},
|
||||
}
|
||||
|
||||
results := calc.CalculateFromRules(eventDate, rules)
|
||||
if len(results) != 2 {
|
||||
t.Fatalf("expected 2 results, got %d", len(results))
|
||||
}
|
||||
|
||||
// Rule A: 25 March + 7 = 1 April (Wednesday)
|
||||
if results[0].DueDate != "2026-04-01" {
|
||||
t.Errorf("Rule A due date should be 2026-04-01, got %s", results[0].DueDate)
|
||||
}
|
||||
if results[0].RuleCode != "TEST-1" {
|
||||
t.Errorf("Rule A code should be TEST-1, got %s", results[0].RuleCode)
|
||||
}
|
||||
|
||||
// Rule B: zero duration -> event date
|
||||
if results[1].DueDate != "2026-03-25" {
|
||||
t.Errorf("Rule B due date should be 2026-03-25, got %s", results[1].DueDate)
|
||||
}
|
||||
}
|
||||
175
backend/internal/services/deadline_rule_service.go
Normal file
175
backend/internal/services/deadline_rule_service.go
Normal file
@@ -0,0 +1,175 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
)
|
||||
|
||||
// DeadlineRuleService handles deadline rule queries
|
||||
type DeadlineRuleService struct {
|
||||
db *sqlx.DB
|
||||
}
|
||||
|
||||
// NewDeadlineRuleService creates a new deadline rule service
|
||||
func NewDeadlineRuleService(db *sqlx.DB) *DeadlineRuleService {
|
||||
return &DeadlineRuleService{db: db}
|
||||
}
|
||||
|
||||
// List returns deadline rules, optionally filtered by proceeding type
|
||||
func (s *DeadlineRuleService) List(proceedingTypeID *int) ([]models.DeadlineRule, error) {
|
||||
var rules []models.DeadlineRule
|
||||
var err error
|
||||
|
||||
if proceedingTypeID != nil {
|
||||
err = s.db.Select(&rules,
|
||||
`SELECT id, proceeding_type_id, parent_id, code, name, description,
|
||||
primary_party, event_type, is_mandatory, duration_value, duration_unit,
|
||||
timing, rule_code, deadline_notes, sequence_order, condition_rule_id,
|
||||
alt_duration_value, alt_duration_unit, alt_rule_code, is_active,
|
||||
created_at, updated_at
|
||||
FROM deadline_rules
|
||||
WHERE proceeding_type_id = $1 AND is_active = true
|
||||
ORDER BY sequence_order`, *proceedingTypeID)
|
||||
} else {
|
||||
err = s.db.Select(&rules,
|
||||
`SELECT id, proceeding_type_id, parent_id, code, name, description,
|
||||
primary_party, event_type, is_mandatory, duration_value, duration_unit,
|
||||
timing, rule_code, deadline_notes, sequence_order, condition_rule_id,
|
||||
alt_duration_value, alt_duration_unit, alt_rule_code, is_active,
|
||||
created_at, updated_at
|
||||
FROM deadline_rules
|
||||
WHERE is_active = true
|
||||
ORDER BY proceeding_type_id, sequence_order`)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("listing deadline rules: %w", err)
|
||||
}
|
||||
return rules, nil
|
||||
}
|
||||
|
||||
// RuleTreeNode represents a deadline rule with its children
|
||||
type RuleTreeNode struct {
|
||||
models.DeadlineRule
|
||||
Children []RuleTreeNode `json:"children,omitempty"`
|
||||
}
|
||||
|
||||
// GetRuleTree returns a hierarchical tree of rules for a proceeding type
|
||||
func (s *DeadlineRuleService) GetRuleTree(proceedingTypeCode string) ([]RuleTreeNode, error) {
|
||||
// First resolve proceeding type code to ID
|
||||
var pt models.ProceedingType
|
||||
err := s.db.Get(&pt,
|
||||
`SELECT id, code, name, description, jurisdiction, default_color, sort_order, is_active
|
||||
FROM proceeding_types
|
||||
WHERE code = $1 AND is_active = true`, proceedingTypeCode)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("resolving proceeding type %q: %w", proceedingTypeCode, err)
|
||||
}
|
||||
|
||||
// Get all rules for this proceeding type
|
||||
var rules []models.DeadlineRule
|
||||
err = s.db.Select(&rules,
|
||||
`SELECT id, proceeding_type_id, parent_id, code, name, description,
|
||||
primary_party, event_type, is_mandatory, duration_value, duration_unit,
|
||||
timing, rule_code, deadline_notes, sequence_order, condition_rule_id,
|
||||
alt_duration_value, alt_duration_unit, alt_rule_code, is_active,
|
||||
created_at, updated_at
|
||||
FROM deadline_rules
|
||||
WHERE proceeding_type_id = $1 AND is_active = true
|
||||
ORDER BY sequence_order`, pt.ID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("listing rules for type %q: %w", proceedingTypeCode, err)
|
||||
}
|
||||
|
||||
return buildTree(rules), nil
|
||||
}
|
||||
|
||||
// GetByIDs returns deadline rules by their IDs
|
||||
func (s *DeadlineRuleService) GetByIDs(ids []string) ([]models.DeadlineRule, error) {
|
||||
if len(ids) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
query, args, err := sqlx.In(
|
||||
`SELECT id, proceeding_type_id, parent_id, code, name, description,
|
||||
primary_party, event_type, is_mandatory, duration_value, duration_unit,
|
||||
timing, rule_code, deadline_notes, sequence_order, condition_rule_id,
|
||||
alt_duration_value, alt_duration_unit, alt_rule_code, is_active,
|
||||
created_at, updated_at
|
||||
FROM deadline_rules
|
||||
WHERE id IN (?) AND is_active = true
|
||||
ORDER BY sequence_order`, ids)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("building IN query: %w", err)
|
||||
}
|
||||
query = s.db.Rebind(query)
|
||||
|
||||
var rules []models.DeadlineRule
|
||||
err = s.db.Select(&rules, query, args...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("fetching rules by IDs: %w", err)
|
||||
}
|
||||
return rules, nil
|
||||
}
|
||||
|
||||
// GetRulesForProceedingType returns all active rules for a proceeding type ID
|
||||
func (s *DeadlineRuleService) GetRulesForProceedingType(proceedingTypeID int) ([]models.DeadlineRule, error) {
|
||||
var rules []models.DeadlineRule
|
||||
err := s.db.Select(&rules,
|
||||
`SELECT id, proceeding_type_id, parent_id, code, name, description,
|
||||
primary_party, event_type, is_mandatory, duration_value, duration_unit,
|
||||
timing, rule_code, deadline_notes, sequence_order, condition_rule_id,
|
||||
alt_duration_value, alt_duration_unit, alt_rule_code, is_active,
|
||||
created_at, updated_at
|
||||
FROM deadline_rules
|
||||
WHERE proceeding_type_id = $1 AND is_active = true
|
||||
ORDER BY sequence_order`, proceedingTypeID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("listing rules for proceeding type %d: %w", proceedingTypeID, err)
|
||||
}
|
||||
return rules, nil
|
||||
}
|
||||
|
||||
// ListProceedingTypes returns all active proceeding types
|
||||
func (s *DeadlineRuleService) ListProceedingTypes() ([]models.ProceedingType, error) {
|
||||
var types []models.ProceedingType
|
||||
err := s.db.Select(&types,
|
||||
`SELECT id, code, name, description, jurisdiction, default_color, sort_order, is_active
|
||||
FROM proceeding_types
|
||||
WHERE is_active = true
|
||||
ORDER BY sort_order`)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("listing proceeding types: %w", err)
|
||||
}
|
||||
return types, nil
|
||||
}
|
||||
|
||||
// buildTree converts a flat list of rules into a hierarchical tree
|
||||
func buildTree(rules []models.DeadlineRule) []RuleTreeNode {
|
||||
nodeMap := make(map[string]*RuleTreeNode, len(rules))
|
||||
var roots []RuleTreeNode
|
||||
|
||||
// Create nodes
|
||||
for _, r := range rules {
|
||||
node := RuleTreeNode{DeadlineRule: r}
|
||||
nodeMap[r.ID.String()] = &node
|
||||
}
|
||||
|
||||
// Build tree
|
||||
for _, r := range rules {
|
||||
node := nodeMap[r.ID.String()]
|
||||
if r.ParentID != nil {
|
||||
parentKey := r.ParentID.String()
|
||||
if parent, ok := nodeMap[parentKey]; ok {
|
||||
parent.Children = append(parent.Children, *node)
|
||||
continue
|
||||
}
|
||||
}
|
||||
roots = append(roots, *node)
|
||||
}
|
||||
|
||||
return roots
|
||||
}
|
||||
203
backend/internal/services/deadline_service.go
Normal file
203
backend/internal/services/deadline_service.go
Normal file
@@ -0,0 +1,203 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
)
|
||||
|
||||
// DeadlineService handles CRUD operations for case deadlines
|
||||
type DeadlineService struct {
|
||||
db *sqlx.DB
|
||||
audit *AuditService
|
||||
}
|
||||
|
||||
// NewDeadlineService creates a new deadline service
|
||||
func NewDeadlineService(db *sqlx.DB, audit *AuditService) *DeadlineService {
|
||||
return &DeadlineService{db: db, audit: audit}
|
||||
}
|
||||
|
||||
// ListAll returns all deadlines for a tenant, ordered by due_date
|
||||
func (s *DeadlineService) ListAll(tenantID uuid.UUID) ([]models.Deadline, error) {
|
||||
query := `SELECT id, tenant_id, case_id, title, description, due_date, original_due_date,
|
||||
warning_date, source, rule_id, status, completed_at,
|
||||
caldav_uid, caldav_etag, notes, created_at, updated_at
|
||||
FROM deadlines
|
||||
WHERE tenant_id = $1
|
||||
ORDER BY due_date ASC`
|
||||
|
||||
var deadlines []models.Deadline
|
||||
err := s.db.Select(&deadlines, query, tenantID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("listing all deadlines: %w", err)
|
||||
}
|
||||
return deadlines, nil
|
||||
}
|
||||
|
||||
// ListForCase returns all deadlines for a case, scoped to tenant
|
||||
func (s *DeadlineService) ListForCase(tenantID, caseID uuid.UUID) ([]models.Deadline, error) {
|
||||
query := `SELECT id, tenant_id, case_id, title, description, due_date, original_due_date,
|
||||
warning_date, source, rule_id, status, completed_at,
|
||||
caldav_uid, caldav_etag, notes, created_at, updated_at
|
||||
FROM deadlines
|
||||
WHERE tenant_id = $1 AND case_id = $2
|
||||
ORDER BY due_date ASC`
|
||||
|
||||
var deadlines []models.Deadline
|
||||
err := s.db.Select(&deadlines, query, tenantID, caseID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("listing deadlines for case: %w", err)
|
||||
}
|
||||
return deadlines, nil
|
||||
}
|
||||
|
||||
// GetByID returns a single deadline by ID, scoped to tenant
|
||||
func (s *DeadlineService) GetByID(tenantID, deadlineID uuid.UUID) (*models.Deadline, error) {
|
||||
query := `SELECT id, tenant_id, case_id, title, description, due_date, original_due_date,
|
||||
warning_date, source, rule_id, status, completed_at,
|
||||
caldav_uid, caldav_etag, notes, created_at, updated_at
|
||||
FROM deadlines
|
||||
WHERE tenant_id = $1 AND id = $2`
|
||||
|
||||
var d models.Deadline
|
||||
err := s.db.Get(&d, query, tenantID, deadlineID)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, fmt.Errorf("getting deadline: %w", err)
|
||||
}
|
||||
return &d, nil
|
||||
}
|
||||
|
||||
// CreateDeadlineInput holds the fields for creating a deadline
|
||||
type CreateDeadlineInput struct {
|
||||
CaseID uuid.UUID `json:"case_id"`
|
||||
Title string `json:"title"`
|
||||
Description *string `json:"description,omitempty"`
|
||||
DueDate string `json:"due_date"`
|
||||
WarningDate *string `json:"warning_date,omitempty"`
|
||||
Source string `json:"source"`
|
||||
RuleID *uuid.UUID `json:"rule_id,omitempty"`
|
||||
Notes *string `json:"notes,omitempty"`
|
||||
}
|
||||
|
||||
// Create inserts a new deadline
|
||||
func (s *DeadlineService) Create(ctx context.Context, tenantID uuid.UUID, input CreateDeadlineInput) (*models.Deadline, error) {
|
||||
id := uuid.New()
|
||||
source := input.Source
|
||||
if source == "" {
|
||||
source = "manual"
|
||||
}
|
||||
|
||||
query := `INSERT INTO deadlines (id, tenant_id, case_id, title, description, due_date,
|
||||
warning_date, source, rule_id, status, notes,
|
||||
created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, 'pending', $10, NOW(), NOW())
|
||||
RETURNING id, tenant_id, case_id, title, description, due_date, original_due_date,
|
||||
warning_date, source, rule_id, status, completed_at,
|
||||
caldav_uid, caldav_etag, notes, created_at, updated_at`
|
||||
|
||||
var d models.Deadline
|
||||
err := s.db.Get(&d, query, id, tenantID, input.CaseID, input.Title, input.Description,
|
||||
input.DueDate, input.WarningDate, source, input.RuleID, input.Notes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("creating deadline: %w", err)
|
||||
}
|
||||
s.audit.Log(ctx, "create", "deadline", &id, nil, d)
|
||||
return &d, nil
|
||||
}
|
||||
|
||||
// UpdateDeadlineInput holds the fields for updating a deadline
|
||||
type UpdateDeadlineInput struct {
|
||||
Title *string `json:"title,omitempty"`
|
||||
Description *string `json:"description,omitempty"`
|
||||
DueDate *string `json:"due_date,omitempty"`
|
||||
WarningDate *string `json:"warning_date,omitempty"`
|
||||
Notes *string `json:"notes,omitempty"`
|
||||
Status *string `json:"status,omitempty"`
|
||||
RuleID *uuid.UUID `json:"rule_id,omitempty"`
|
||||
}
|
||||
|
||||
// Update modifies an existing deadline
|
||||
func (s *DeadlineService) Update(ctx context.Context, tenantID, deadlineID uuid.UUID, input UpdateDeadlineInput) (*models.Deadline, error) {
|
||||
// First check it exists and belongs to tenant
|
||||
existing, err := s.GetByID(tenantID, deadlineID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if existing == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
query := `UPDATE deadlines SET
|
||||
title = COALESCE($1, title),
|
||||
description = COALESCE($2, description),
|
||||
due_date = COALESCE($3, due_date),
|
||||
warning_date = COALESCE($4, warning_date),
|
||||
notes = COALESCE($5, notes),
|
||||
status = COALESCE($6, status),
|
||||
rule_id = COALESCE($7, rule_id),
|
||||
updated_at = NOW()
|
||||
WHERE id = $8 AND tenant_id = $9
|
||||
RETURNING id, tenant_id, case_id, title, description, due_date, original_due_date,
|
||||
warning_date, source, rule_id, status, completed_at,
|
||||
caldav_uid, caldav_etag, notes, created_at, updated_at`
|
||||
|
||||
var d models.Deadline
|
||||
err = s.db.Get(&d, query, input.Title, input.Description, input.DueDate,
|
||||
input.WarningDate, input.Notes, input.Status, input.RuleID,
|
||||
deadlineID, tenantID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("updating deadline: %w", err)
|
||||
}
|
||||
s.audit.Log(ctx, "update", "deadline", &deadlineID, existing, d)
|
||||
return &d, nil
|
||||
}
|
||||
|
||||
// Complete marks a deadline as completed
|
||||
func (s *DeadlineService) Complete(ctx context.Context, tenantID, deadlineID uuid.UUID) (*models.Deadline, error) {
|
||||
query := `UPDATE deadlines SET
|
||||
status = 'completed',
|
||||
completed_at = $1,
|
||||
updated_at = NOW()
|
||||
WHERE id = $2 AND tenant_id = $3
|
||||
RETURNING id, tenant_id, case_id, title, description, due_date, original_due_date,
|
||||
warning_date, source, rule_id, status, completed_at,
|
||||
caldav_uid, caldav_etag, notes, created_at, updated_at`
|
||||
|
||||
var d models.Deadline
|
||||
err := s.db.Get(&d, query, time.Now(), deadlineID, tenantID)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, fmt.Errorf("completing deadline: %w", err)
|
||||
}
|
||||
s.audit.Log(ctx, "update", "deadline", &deadlineID, map[string]string{"status": "pending"}, map[string]string{"status": "completed"})
|
||||
return &d, nil
|
||||
}
|
||||
|
||||
// Delete removes a deadline
|
||||
func (s *DeadlineService) Delete(ctx context.Context, tenantID, deadlineID uuid.UUID) error {
|
||||
query := `DELETE FROM deadlines WHERE id = $1 AND tenant_id = $2`
|
||||
result, err := s.db.Exec(query, deadlineID, tenantID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("deleting deadline: %w", err)
|
||||
}
|
||||
rows, err := result.RowsAffected()
|
||||
if err != nil {
|
||||
return fmt.Errorf("checking delete result: %w", err)
|
||||
}
|
||||
if rows == 0 {
|
||||
return fmt.Errorf("deadline not found")
|
||||
}
|
||||
s.audit.Log(ctx, "delete", "deadline", &deadlineID, nil, nil)
|
||||
return nil
|
||||
}
|
||||
166
backend/internal/services/document_service.go
Normal file
166
backend/internal/services/document_service.go
Normal file
@@ -0,0 +1,166 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
const documentBucket = "kanzlai-documents"
|
||||
|
||||
type DocumentService struct {
|
||||
db *sqlx.DB
|
||||
storage *StorageClient
|
||||
audit *AuditService
|
||||
}
|
||||
|
||||
func NewDocumentService(db *sqlx.DB, storage *StorageClient, audit *AuditService) *DocumentService {
|
||||
return &DocumentService{db: db, storage: storage, audit: audit}
|
||||
}
|
||||
|
||||
type CreateDocumentInput struct {
|
||||
Title string `json:"title"`
|
||||
DocType string `json:"doc_type"`
|
||||
Filename string
|
||||
ContentType string
|
||||
Size int
|
||||
Data io.Reader
|
||||
}
|
||||
|
||||
func (s *DocumentService) ListByCase(ctx context.Context, tenantID, caseID uuid.UUID) ([]models.Document, error) {
|
||||
var docs []models.Document
|
||||
err := s.db.SelectContext(ctx, &docs,
|
||||
"SELECT * FROM documents WHERE tenant_id = $1 AND case_id = $2 ORDER BY created_at DESC",
|
||||
tenantID, caseID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("listing documents: %w", err)
|
||||
}
|
||||
return docs, nil
|
||||
}
|
||||
|
||||
func (s *DocumentService) GetByID(ctx context.Context, tenantID, docID uuid.UUID) (*models.Document, error) {
|
||||
var doc models.Document
|
||||
err := s.db.GetContext(ctx, &doc,
|
||||
"SELECT * FROM documents WHERE id = $1 AND tenant_id = $2", docID, tenantID)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, fmt.Errorf("getting document: %w", err)
|
||||
}
|
||||
return &doc, nil
|
||||
}
|
||||
|
||||
func (s *DocumentService) Create(ctx context.Context, tenantID, caseID, userID uuid.UUID, input CreateDocumentInput) (*models.Document, error) {
|
||||
// Verify case belongs to tenant
|
||||
var caseExists int
|
||||
if err := s.db.GetContext(ctx, &caseExists,
|
||||
"SELECT COUNT(*) FROM cases WHERE id = $1 AND tenant_id = $2",
|
||||
caseID, tenantID); err != nil {
|
||||
return nil, fmt.Errorf("verifying case: %w", err)
|
||||
}
|
||||
if caseExists == 0 {
|
||||
return nil, fmt.Errorf("case not found")
|
||||
}
|
||||
|
||||
id := uuid.New()
|
||||
storagePath := fmt.Sprintf("%s/%s/%s_%s", tenantID, caseID, id, input.Filename)
|
||||
|
||||
// Upload to Supabase Storage
|
||||
if err := s.storage.Upload(ctx, documentBucket, storagePath, input.ContentType, input.Data); err != nil {
|
||||
return nil, fmt.Errorf("uploading file: %w", err)
|
||||
}
|
||||
|
||||
// Insert metadata record
|
||||
now := time.Now()
|
||||
_, err := s.db.ExecContext(ctx,
|
||||
`INSERT INTO documents (id, tenant_id, case_id, title, doc_type, file_path, file_size, mime_type, uploaded_by, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $10)`,
|
||||
id, tenantID, caseID, input.Title, nilIfEmpty(input.DocType), storagePath, input.Size, input.ContentType, userID, now)
|
||||
if err != nil {
|
||||
// Best effort: clean up uploaded file
|
||||
_ = s.storage.Delete(ctx, documentBucket, []string{storagePath})
|
||||
return nil, fmt.Errorf("inserting document record: %w", err)
|
||||
}
|
||||
|
||||
// Log case event
|
||||
createEvent(ctx, s.db, tenantID, caseID, userID, "document_uploaded",
|
||||
fmt.Sprintf("Document uploaded: %s", input.Title), nil)
|
||||
|
||||
var doc models.Document
|
||||
if err := s.db.GetContext(ctx, &doc, "SELECT * FROM documents WHERE id = $1", id); err != nil {
|
||||
return nil, fmt.Errorf("fetching created document: %w", err)
|
||||
}
|
||||
s.audit.Log(ctx, "create", "document", &id, nil, doc)
|
||||
return &doc, nil
|
||||
}
|
||||
|
||||
func (s *DocumentService) Download(ctx context.Context, tenantID, docID uuid.UUID) (io.ReadCloser, string, string, error) {
|
||||
doc, err := s.GetByID(ctx, tenantID, docID)
|
||||
if err != nil {
|
||||
return nil, "", "", err
|
||||
}
|
||||
if doc == nil {
|
||||
return nil, "", "", fmt.Errorf("document not found")
|
||||
}
|
||||
if doc.FilePath == nil {
|
||||
return nil, "", "", fmt.Errorf("document has no file")
|
||||
}
|
||||
|
||||
body, contentType, err := s.storage.Download(ctx, documentBucket, *doc.FilePath)
|
||||
if err != nil {
|
||||
return nil, "", "", fmt.Errorf("downloading file: %w", err)
|
||||
}
|
||||
|
||||
// Use stored mime_type if available, fall back to storage response
|
||||
if doc.MimeType != nil && *doc.MimeType != "" {
|
||||
contentType = *doc.MimeType
|
||||
}
|
||||
|
||||
return body, contentType, doc.Title, nil
|
||||
}
|
||||
|
||||
func (s *DocumentService) Delete(ctx context.Context, tenantID, docID, userID uuid.UUID) error {
|
||||
doc, err := s.GetByID(ctx, tenantID, docID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if doc == nil {
|
||||
return sql.ErrNoRows
|
||||
}
|
||||
|
||||
// Delete from storage
|
||||
if doc.FilePath != nil {
|
||||
if err := s.storage.Delete(ctx, documentBucket, []string{*doc.FilePath}); err != nil {
|
||||
return fmt.Errorf("deleting file from storage: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Delete database record
|
||||
_, err = s.db.ExecContext(ctx,
|
||||
"DELETE FROM documents WHERE id = $1 AND tenant_id = $2", docID, tenantID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("deleting document record: %w", err)
|
||||
}
|
||||
|
||||
// Log case event
|
||||
createEvent(ctx, s.db, tenantID, doc.CaseID, userID, "document_deleted",
|
||||
fmt.Sprintf("Document deleted: %s", doc.Title), nil)
|
||||
s.audit.Log(ctx, "delete", "document", &docID, doc, nil)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func nilIfEmpty(s string) *string {
|
||||
if s == "" {
|
||||
return nil
|
||||
}
|
||||
return &s
|
||||
}
|
||||
193
backend/internal/services/holidays.go
Normal file
193
backend/internal/services/holidays.go
Normal file
@@ -0,0 +1,193 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
// Holiday represents a non-working day
|
||||
type Holiday struct {
|
||||
Date time.Time
|
||||
Name string
|
||||
IsVacation bool // Part of court vacation period
|
||||
IsClosure bool // Single-day closure (public holiday)
|
||||
}
|
||||
|
||||
// HolidayService manages holiday data and non-working day checks
|
||||
type HolidayService struct {
|
||||
db *sqlx.DB
|
||||
// Cached holidays by year
|
||||
cache map[int][]Holiday
|
||||
}
|
||||
|
||||
// NewHolidayService creates a holiday service
|
||||
func NewHolidayService(db *sqlx.DB) *HolidayService {
|
||||
return &HolidayService{
|
||||
db: db,
|
||||
cache: make(map[int][]Holiday),
|
||||
}
|
||||
}
|
||||
|
||||
// dbHoliday matches the holidays table schema
|
||||
type dbHoliday struct {
|
||||
ID int `db:"id"`
|
||||
Date time.Time `db:"date"`
|
||||
Name string `db:"name"`
|
||||
Country string `db:"country"`
|
||||
State *string `db:"state"`
|
||||
HolidayType string `db:"holiday_type"`
|
||||
}
|
||||
|
||||
// LoadHolidaysForYear loads holidays from DB for a given year, merges with
|
||||
// German federal holidays, and caches the result.
|
||||
func (s *HolidayService) LoadHolidaysForYear(year int) ([]Holiday, error) {
|
||||
if cached, ok := s.cache[year]; ok {
|
||||
return cached, nil
|
||||
}
|
||||
|
||||
holidays := make([]Holiday, 0, 30)
|
||||
|
||||
// Load from DB if available
|
||||
if s.db != nil {
|
||||
var dbHolidays []dbHoliday
|
||||
err := s.db.Select(&dbHolidays,
|
||||
`SELECT id, date, name, country, state, holiday_type
|
||||
FROM holidays
|
||||
WHERE EXTRACT(YEAR FROM date) = $1
|
||||
ORDER BY date`, year)
|
||||
if err == nil {
|
||||
for _, h := range dbHolidays {
|
||||
holidays = append(holidays, Holiday{
|
||||
Date: h.Date,
|
||||
Name: h.Name,
|
||||
IsClosure: h.HolidayType == "public_holiday" || h.HolidayType == "closure",
|
||||
IsVacation: h.HolidayType == "vacation",
|
||||
})
|
||||
}
|
||||
}
|
||||
// If DB query fails, fall through to hardcoded holidays
|
||||
}
|
||||
|
||||
// Always add German federal holidays (if not already present from DB)
|
||||
federal := germanFederalHolidays(year)
|
||||
existing := make(map[string]bool, len(holidays))
|
||||
for _, h := range holidays {
|
||||
existing[h.Date.Format("2006-01-02")] = true
|
||||
}
|
||||
for _, h := range federal {
|
||||
key := h.Date.Format("2006-01-02")
|
||||
if !existing[key] {
|
||||
holidays = append(holidays, h)
|
||||
}
|
||||
}
|
||||
|
||||
s.cache[year] = holidays
|
||||
return holidays, nil
|
||||
}
|
||||
|
||||
// IsHoliday checks if a date is a holiday
|
||||
func (s *HolidayService) IsHoliday(date time.Time) *Holiday {
|
||||
year := date.Year()
|
||||
holidays, err := s.LoadHolidaysForYear(year)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
dateStr := date.Format("2006-01-02")
|
||||
for i := range holidays {
|
||||
if holidays[i].Date.Format("2006-01-02") == dateStr {
|
||||
return &holidays[i]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// IsNonWorkingDay returns true if the date is a weekend or holiday
|
||||
func (s *HolidayService) IsNonWorkingDay(date time.Time) bool {
|
||||
wd := date.Weekday()
|
||||
if wd == time.Saturday || wd == time.Sunday {
|
||||
return true
|
||||
}
|
||||
return s.IsHoliday(date) != nil
|
||||
}
|
||||
|
||||
// AdjustForNonWorkingDays moves the date to the next working day
|
||||
// if it falls on a weekend or holiday.
|
||||
// Returns adjusted date, original date, and whether adjustment was made.
|
||||
func (s *HolidayService) AdjustForNonWorkingDays(date time.Time) (adjusted time.Time, original time.Time, wasAdjusted bool) {
|
||||
original = date
|
||||
adjusted = date
|
||||
|
||||
// Safety limit: max 30 days forward
|
||||
for i := 0; i < 30 && s.IsNonWorkingDay(adjusted); i++ {
|
||||
adjusted = adjusted.AddDate(0, 0, 1)
|
||||
wasAdjusted = true
|
||||
}
|
||||
|
||||
return adjusted, original, wasAdjusted
|
||||
}
|
||||
|
||||
// ClearCache clears the holiday cache (useful after DB updates)
|
||||
func (s *HolidayService) ClearCache() {
|
||||
s.cache = make(map[int][]Holiday)
|
||||
}
|
||||
|
||||
// germanFederalHolidays returns all German federal public holidays for a year.
|
||||
// These are holidays observed in all 16 German states.
|
||||
func germanFederalHolidays(year int) []Holiday {
|
||||
easterMonth, easterDay := CalculateEasterSunday(year)
|
||||
easter := time.Date(year, time.Month(easterMonth), easterDay, 0, 0, 0, 0, time.UTC)
|
||||
|
||||
holidays := []Holiday{
|
||||
{Date: time.Date(year, time.January, 1, 0, 0, 0, 0, time.UTC), Name: "Neujahr", IsClosure: true},
|
||||
{Date: easter.AddDate(0, 0, -2), Name: "Karfreitag", IsClosure: true},
|
||||
{Date: easter, Name: "Ostersonntag", IsClosure: true},
|
||||
{Date: easter.AddDate(0, 0, 1), Name: "Ostermontag", IsClosure: true},
|
||||
{Date: time.Date(year, time.May, 1, 0, 0, 0, 0, time.UTC), Name: "Tag der Arbeit", IsClosure: true},
|
||||
{Date: easter.AddDate(0, 0, 39), Name: "Christi Himmelfahrt", IsClosure: true},
|
||||
{Date: easter.AddDate(0, 0, 49), Name: "Pfingstsonntag", IsClosure: true},
|
||||
{Date: easter.AddDate(0, 0, 50), Name: "Pfingstmontag", IsClosure: true},
|
||||
{Date: time.Date(year, time.October, 3, 0, 0, 0, 0, time.UTC), Name: "Tag der Deutschen Einheit", IsClosure: true},
|
||||
{Date: time.Date(year, time.December, 25, 0, 0, 0, 0, time.UTC), Name: "1. Weihnachtstag", IsClosure: true},
|
||||
{Date: time.Date(year, time.December, 26, 0, 0, 0, 0, time.UTC), Name: "2. Weihnachtstag", IsClosure: true},
|
||||
}
|
||||
|
||||
return holidays
|
||||
}
|
||||
|
||||
// CalculateEasterSunday computes Easter Sunday using the Anonymous Gregorian algorithm.
|
||||
// Returns month (1-12) and day.
|
||||
func CalculateEasterSunday(year int) (int, int) {
|
||||
a := year % 19
|
||||
b := year / 100
|
||||
c := year % 100
|
||||
d := b / 4
|
||||
e := b % 4
|
||||
f := (b + 8) / 25
|
||||
g := (b - f + 1) / 3
|
||||
h := (19*a + b - d - g + 15) % 30
|
||||
i := c / 4
|
||||
k := c % 4
|
||||
l := (32 + 2*e + 2*i - h - k) % 7
|
||||
m := (a + 11*h + 22*l) / 451
|
||||
month := (h + l - 7*m + 114) / 31
|
||||
day := ((h + l - 7*m + 114) % 31) + 1
|
||||
return month, day
|
||||
}
|
||||
|
||||
// GetHolidaysForYear returns all holidays for a year (for API exposure)
|
||||
func (s *HolidayService) GetHolidaysForYear(year int) ([]Holiday, error) {
|
||||
return s.LoadHolidaysForYear(year)
|
||||
}
|
||||
|
||||
// FormatHolidayList returns a simple string representation of holidays for debugging
|
||||
func FormatHolidayList(holidays []Holiday) string {
|
||||
var b strings.Builder
|
||||
for _, h := range holidays {
|
||||
fmt.Fprintf(&b, "%s: %s\n", h.Date.Format("2006-01-02"), h.Name)
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
121
backend/internal/services/holidays_test.go
Normal file
121
backend/internal/services/holidays_test.go
Normal file
@@ -0,0 +1,121 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestCalculateEasterSunday(t *testing.T) {
|
||||
tests := []struct {
|
||||
year int
|
||||
wantMonth int
|
||||
wantDay int
|
||||
}{
|
||||
{2024, 3, 31},
|
||||
{2025, 4, 20},
|
||||
{2026, 4, 5},
|
||||
{2027, 3, 28},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
m, d := CalculateEasterSunday(tt.year)
|
||||
if m != tt.wantMonth || d != tt.wantDay {
|
||||
t.Errorf("CalculateEasterSunday(%d) = %d-%02d, want %d-%02d",
|
||||
tt.year, m, d, tt.wantMonth, tt.wantDay)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestGermanFederalHolidays(t *testing.T) {
|
||||
holidays := germanFederalHolidays(2026)
|
||||
|
||||
// Should have 11 federal holidays
|
||||
if len(holidays) != 11 {
|
||||
t.Fatalf("expected 11 federal holidays, got %d", len(holidays))
|
||||
}
|
||||
|
||||
// Check Neujahr
|
||||
if holidays[0].Name != "Neujahr" {
|
||||
t.Errorf("first holiday should be Neujahr, got %s", holidays[0].Name)
|
||||
}
|
||||
if holidays[0].Date != time.Date(2026, 1, 1, 0, 0, 0, 0, time.UTC) {
|
||||
t.Errorf("Neujahr should be Jan 1, got %s", holidays[0].Date)
|
||||
}
|
||||
|
||||
// Check Karfreitag 2026 (Easter = Apr 5, so Good Friday = Apr 3)
|
||||
found := false
|
||||
for _, h := range holidays {
|
||||
if h.Name == "Karfreitag" {
|
||||
found = true
|
||||
expected := time.Date(2026, 4, 3, 0, 0, 0, 0, time.UTC)
|
||||
if h.Date != expected {
|
||||
t.Errorf("Karfreitag 2026 should be %s, got %s", expected, h.Date)
|
||||
}
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Error("Karfreitag not found in holidays")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHolidayServiceIsNonWorkingDay(t *testing.T) {
|
||||
svc := NewHolidayService(nil) // no DB, uses hardcoded holidays
|
||||
|
||||
// Saturday
|
||||
sat := time.Date(2026, 3, 28, 0, 0, 0, 0, time.UTC)
|
||||
if !svc.IsNonWorkingDay(sat) {
|
||||
t.Error("Saturday should be non-working day")
|
||||
}
|
||||
|
||||
// Sunday
|
||||
sun := time.Date(2026, 3, 29, 0, 0, 0, 0, time.UTC)
|
||||
if !svc.IsNonWorkingDay(sun) {
|
||||
t.Error("Sunday should be non-working day")
|
||||
}
|
||||
|
||||
// Regular Monday
|
||||
mon := time.Date(2026, 3, 23, 0, 0, 0, 0, time.UTC)
|
||||
if svc.IsNonWorkingDay(mon) {
|
||||
t.Error("regular Monday should be a working day")
|
||||
}
|
||||
|
||||
// Christmas (Friday Dec 25, 2026)
|
||||
xmas := time.Date(2026, 12, 25, 0, 0, 0, 0, time.UTC)
|
||||
if !svc.IsNonWorkingDay(xmas) {
|
||||
t.Error("Christmas should be non-working day")
|
||||
}
|
||||
|
||||
// New Year
|
||||
newyear := time.Date(2026, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
if !svc.IsNonWorkingDay(newyear) {
|
||||
t.Error("New Year should be non-working day")
|
||||
}
|
||||
}
|
||||
|
||||
func TestAdjustForNonWorkingDays(t *testing.T) {
|
||||
svc := NewHolidayService(nil)
|
||||
|
||||
// Saturday -> Monday
|
||||
sat := time.Date(2026, 3, 28, 0, 0, 0, 0, time.UTC)
|
||||
adj, orig, adjusted := svc.AdjustForNonWorkingDays(sat)
|
||||
if !adjusted {
|
||||
t.Error("Saturday should be adjusted")
|
||||
}
|
||||
if orig != sat {
|
||||
t.Error("original should be unchanged")
|
||||
}
|
||||
expected := time.Date(2026, 3, 30, 0, 0, 0, 0, time.UTC)
|
||||
if adj != expected {
|
||||
t.Errorf("Saturday should adjust to Monday %s, got %s", expected, adj)
|
||||
}
|
||||
|
||||
// Regular Wednesday -> no adjustment
|
||||
wed := time.Date(2026, 3, 25, 0, 0, 0, 0, time.UTC)
|
||||
adj, _, adjusted = svc.AdjustForNonWorkingDays(wed)
|
||||
if adjusted {
|
||||
t.Error("Wednesday should not be adjusted")
|
||||
}
|
||||
if adj != wed {
|
||||
t.Error("non-adjusted date should be unchanged")
|
||||
}
|
||||
}
|
||||
124
backend/internal/services/note_service.go
Normal file
124
backend/internal/services/note_service.go
Normal file
@@ -0,0 +1,124 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
)
|
||||
|
||||
type NoteService struct {
|
||||
db *sqlx.DB
|
||||
audit *AuditService
|
||||
}
|
||||
|
||||
func NewNoteService(db *sqlx.DB, audit *AuditService) *NoteService {
|
||||
return &NoteService{db: db, audit: audit}
|
||||
}
|
||||
|
||||
// ListByParent returns all notes for a given parent entity, scoped to tenant.
|
||||
func (s *NoteService) ListByParent(ctx context.Context, tenantID uuid.UUID, parentType string, parentID uuid.UUID) ([]models.Note, error) {
|
||||
col, err := parentColumn(parentType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(
|
||||
`SELECT id, tenant_id, case_id, deadline_id, appointment_id, case_event_id,
|
||||
content, created_by, created_at, updated_at
|
||||
FROM notes
|
||||
WHERE tenant_id = $1 AND %s = $2
|
||||
ORDER BY created_at DESC`, col)
|
||||
|
||||
var notes []models.Note
|
||||
if err := s.db.SelectContext(ctx, ¬es, query, tenantID, parentID); err != nil {
|
||||
return nil, fmt.Errorf("listing notes by %s: %w", parentType, err)
|
||||
}
|
||||
if notes == nil {
|
||||
notes = []models.Note{}
|
||||
}
|
||||
return notes, nil
|
||||
}
|
||||
|
||||
type CreateNoteInput struct {
|
||||
CaseID *uuid.UUID `json:"case_id,omitempty"`
|
||||
DeadlineID *uuid.UUID `json:"deadline_id,omitempty"`
|
||||
AppointmentID *uuid.UUID `json:"appointment_id,omitempty"`
|
||||
CaseEventID *uuid.UUID `json:"case_event_id,omitempty"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
// Create inserts a new note.
|
||||
func (s *NoteService) Create(ctx context.Context, tenantID uuid.UUID, createdBy *uuid.UUID, input CreateNoteInput) (*models.Note, error) {
|
||||
id := uuid.New()
|
||||
now := time.Now().UTC()
|
||||
|
||||
query := `INSERT INTO notes (id, tenant_id, case_id, deadline_id, appointment_id, case_event_id, content, created_by, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $9)
|
||||
RETURNING id, tenant_id, case_id, deadline_id, appointment_id, case_event_id, content, created_by, created_at, updated_at`
|
||||
|
||||
var n models.Note
|
||||
err := s.db.GetContext(ctx, &n, query,
|
||||
id, tenantID, input.CaseID, input.DeadlineID, input.AppointmentID, input.CaseEventID,
|
||||
input.Content, createdBy, now)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("creating note: %w", err)
|
||||
}
|
||||
s.audit.Log(ctx, "create", "note", &id, nil, n)
|
||||
return &n, nil
|
||||
}
|
||||
|
||||
// Update modifies a note's content.
|
||||
func (s *NoteService) Update(ctx context.Context, tenantID, noteID uuid.UUID, content string) (*models.Note, error) {
|
||||
query := `UPDATE notes SET content = $1, updated_at = $2
|
||||
WHERE id = $3 AND tenant_id = $4
|
||||
RETURNING id, tenant_id, case_id, deadline_id, appointment_id, case_event_id, content, created_by, created_at, updated_at`
|
||||
|
||||
var n models.Note
|
||||
err := s.db.GetContext(ctx, &n, query, content, time.Now().UTC(), noteID, tenantID)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, fmt.Errorf("updating note: %w", err)
|
||||
}
|
||||
s.audit.Log(ctx, "update", "note", ¬eID, nil, n)
|
||||
return &n, nil
|
||||
}
|
||||
|
||||
// Delete removes a note.
|
||||
func (s *NoteService) Delete(ctx context.Context, tenantID, noteID uuid.UUID) error {
|
||||
result, err := s.db.ExecContext(ctx, "DELETE FROM notes WHERE id = $1 AND tenant_id = $2", noteID, tenantID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("deleting note: %w", err)
|
||||
}
|
||||
rows, err := result.RowsAffected()
|
||||
if err != nil {
|
||||
return fmt.Errorf("checking delete result: %w", err)
|
||||
}
|
||||
if rows == 0 {
|
||||
return fmt.Errorf("note not found")
|
||||
}
|
||||
s.audit.Log(ctx, "delete", "note", ¬eID, nil, nil)
|
||||
return nil
|
||||
}
|
||||
|
||||
func parentColumn(parentType string) (string, error) {
|
||||
switch parentType {
|
||||
case "case":
|
||||
return "case_id", nil
|
||||
case "deadline":
|
||||
return "deadline_id", nil
|
||||
case "appointment":
|
||||
return "appointment_id", nil
|
||||
case "case_event":
|
||||
return "case_event_id", nil
|
||||
default:
|
||||
return "", fmt.Errorf("invalid parent type: %s", parentType)
|
||||
}
|
||||
}
|
||||
@@ -14,10 +14,11 @@ import (
|
||||
|
||||
type PartyService struct {
|
||||
db *sqlx.DB
|
||||
audit *AuditService
|
||||
}
|
||||
|
||||
func NewPartyService(db *sqlx.DB) *PartyService {
|
||||
return &PartyService{db: db}
|
||||
func NewPartyService(db *sqlx.DB, audit *AuditService) *PartyService {
|
||||
return &PartyService{db: db, audit: audit}
|
||||
}
|
||||
|
||||
type CreatePartyInput struct {
|
||||
@@ -79,6 +80,7 @@ func (s *PartyService) Create(ctx context.Context, tenantID, caseID uuid.UUID, u
|
||||
if err := s.db.GetContext(ctx, &party, "SELECT * FROM parties WHERE id = $1", id); err != nil {
|
||||
return nil, fmt.Errorf("fetching created party: %w", err)
|
||||
}
|
||||
s.audit.Log(ctx, "create", "party", &id, nil, party)
|
||||
return &party, nil
|
||||
}
|
||||
|
||||
@@ -135,6 +137,7 @@ func (s *PartyService) Update(ctx context.Context, tenantID, partyID uuid.UUID,
|
||||
if err := s.db.GetContext(ctx, &updated, "SELECT * FROM parties WHERE id = $1", partyID); err != nil {
|
||||
return nil, fmt.Errorf("fetching updated party: %w", err)
|
||||
}
|
||||
s.audit.Log(ctx, "update", "party", &partyID, current, updated)
|
||||
return &updated, nil
|
||||
}
|
||||
|
||||
@@ -148,5 +151,6 @@ func (s *PartyService) Delete(ctx context.Context, tenantID, partyID uuid.UUID)
|
||||
if rows == 0 {
|
||||
return sql.ErrNoRows
|
||||
}
|
||||
s.audit.Log(ctx, "delete", "party", &partyID, nil, nil)
|
||||
return nil
|
||||
}
|
||||
|
||||
112
backend/internal/services/storage.go
Normal file
112
backend/internal/services/storage.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// StorageClient interacts with Supabase Storage via REST API.
|
||||
type StorageClient struct {
|
||||
baseURL string
|
||||
serviceKey string
|
||||
httpClient *http.Client
|
||||
}
|
||||
|
||||
func NewStorageClient(supabaseURL, serviceKey string) *StorageClient {
|
||||
return &StorageClient{
|
||||
baseURL: supabaseURL,
|
||||
serviceKey: serviceKey,
|
||||
httpClient: &http.Client{},
|
||||
}
|
||||
}
|
||||
|
||||
// Upload stores a file in the given bucket at the specified path.
|
||||
func (s *StorageClient) Upload(ctx context.Context, bucket, path, contentType string, data io.Reader) error {
|
||||
url := fmt.Sprintf("%s/storage/v1/object/%s/%s", s.baseURL, bucket, path)
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", url, data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("creating upload request: %w", err)
|
||||
}
|
||||
|
||||
req.Header.Set("Authorization", "Bearer "+s.serviceKey)
|
||||
req.Header.Set("Content-Type", contentType)
|
||||
req.Header.Set("x-upsert", "true")
|
||||
|
||||
resp, err := s.httpClient.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("uploading to storage: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("storage upload failed (status %d): %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Download retrieves a file from storage. Caller must close the returned ReadCloser.
|
||||
func (s *StorageClient) Download(ctx context.Context, bucket, path string) (io.ReadCloser, string, error) {
|
||||
url := fmt.Sprintf("%s/storage/v1/object/%s/%s", s.baseURL, bucket, path)
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, "", fmt.Errorf("creating download request: %w", err)
|
||||
}
|
||||
|
||||
req.Header.Set("Authorization", "Bearer "+s.serviceKey)
|
||||
|
||||
resp, err := s.httpClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, "", fmt.Errorf("downloading from storage: %w", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
resp.Body.Close()
|
||||
if resp.StatusCode == http.StatusNotFound {
|
||||
return nil, "", fmt.Errorf("file not found in storage")
|
||||
}
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, "", fmt.Errorf("storage download failed (status %d): %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
ct := resp.Header.Get("Content-Type")
|
||||
return resp.Body, ct, nil
|
||||
}
|
||||
|
||||
// Delete removes files from storage by their paths.
|
||||
func (s *StorageClient) Delete(ctx context.Context, bucket string, paths []string) error {
|
||||
url := fmt.Sprintf("%s/storage/v1/object/%s", s.baseURL, bucket)
|
||||
|
||||
body, err := json.Marshal(map[string][]string{"prefixes": paths})
|
||||
if err != nil {
|
||||
return fmt.Errorf("marshaling delete request: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "DELETE", url, bytes.NewReader(body))
|
||||
if err != nil {
|
||||
return fmt.Errorf("creating delete request: %w", err)
|
||||
}
|
||||
|
||||
req.Header.Set("Authorization", "Bearer "+s.serviceKey)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := s.httpClient.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("deleting from storage: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusNoContent {
|
||||
respBody, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("storage delete failed (status %d): %s", resp.StatusCode, string(respBody))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
232
backend/internal/services/tenant_service.go
Normal file
232
backend/internal/services/tenant_service.go
Normal file
@@ -0,0 +1,232 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"mgit.msbls.de/m/KanzlAI-mGMT/internal/models"
|
||||
)
|
||||
|
||||
type TenantService struct {
|
||||
db *sqlx.DB
|
||||
audit *AuditService
|
||||
}
|
||||
|
||||
func NewTenantService(db *sqlx.DB, audit *AuditService) *TenantService {
|
||||
return &TenantService{db: db, audit: audit}
|
||||
}
|
||||
|
||||
// Create creates a new tenant and assigns the creator as owner.
|
||||
func (s *TenantService) Create(ctx context.Context, userID uuid.UUID, name, slug string) (*models.Tenant, error) {
|
||||
tx, err := s.db.BeginTxx(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("begin transaction: %w", err)
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
var tenant models.Tenant
|
||||
err = tx.QueryRowxContext(ctx,
|
||||
`INSERT INTO tenants (name, slug) VALUES ($1, $2) RETURNING id, name, slug, settings, created_at, updated_at`,
|
||||
name, slug,
|
||||
).StructScan(&tenant)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("insert tenant: %w", err)
|
||||
}
|
||||
|
||||
_, err = tx.ExecContext(ctx,
|
||||
`INSERT INTO user_tenants (user_id, tenant_id, role) VALUES ($1, $2, 'owner')`,
|
||||
userID, tenant.ID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("assign owner: %w", err)
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, fmt.Errorf("commit: %w", err)
|
||||
}
|
||||
|
||||
s.audit.Log(ctx, "create", "tenant", &tenant.ID, nil, tenant)
|
||||
return &tenant, nil
|
||||
}
|
||||
|
||||
// ListForUser returns all tenants the user belongs to.
|
||||
func (s *TenantService) ListForUser(ctx context.Context, userID uuid.UUID) ([]models.TenantWithRole, error) {
|
||||
var tenants []models.TenantWithRole
|
||||
err := s.db.SelectContext(ctx, &tenants,
|
||||
`SELECT t.id, t.name, t.slug, t.settings, t.created_at, t.updated_at, ut.role
|
||||
FROM tenants t
|
||||
JOIN user_tenants ut ON ut.tenant_id = t.id
|
||||
WHERE ut.user_id = $1
|
||||
ORDER BY t.name`,
|
||||
userID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("list tenants: %w", err)
|
||||
}
|
||||
return tenants, nil
|
||||
}
|
||||
|
||||
// GetByID returns a single tenant. The caller must verify the user has access.
|
||||
func (s *TenantService) GetByID(ctx context.Context, tenantID uuid.UUID) (*models.Tenant, error) {
|
||||
var tenant models.Tenant
|
||||
err := s.db.GetContext(ctx, &tenant,
|
||||
`SELECT id, name, slug, settings, created_at, updated_at FROM tenants WHERE id = $1`,
|
||||
tenantID,
|
||||
)
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get tenant: %w", err)
|
||||
}
|
||||
return &tenant, nil
|
||||
}
|
||||
|
||||
// GetUserRole returns the user's role in a tenant, or empty string if not a member.
|
||||
func (s *TenantService) GetUserRole(ctx context.Context, userID, tenantID uuid.UUID) (string, error) {
|
||||
var role string
|
||||
err := s.db.GetContext(ctx, &role,
|
||||
`SELECT role FROM user_tenants WHERE user_id = $1 AND tenant_id = $2`,
|
||||
userID, tenantID,
|
||||
)
|
||||
if err == sql.ErrNoRows {
|
||||
return "", nil
|
||||
}
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("get user role: %w", err)
|
||||
}
|
||||
return role, nil
|
||||
}
|
||||
|
||||
// FirstTenantForUser returns the user's first tenant (by name), used as default.
|
||||
func (s *TenantService) FirstTenantForUser(ctx context.Context, userID uuid.UUID) (*uuid.UUID, error) {
|
||||
var tenantID uuid.UUID
|
||||
err := s.db.GetContext(ctx, &tenantID,
|
||||
`SELECT t.id FROM tenants t
|
||||
JOIN user_tenants ut ON ut.tenant_id = t.id
|
||||
WHERE ut.user_id = $1
|
||||
ORDER BY t.name LIMIT 1`,
|
||||
userID,
|
||||
)
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("first tenant: %w", err)
|
||||
}
|
||||
return &tenantID, nil
|
||||
}
|
||||
|
||||
// ListMembers returns all members of a tenant.
|
||||
func (s *TenantService) ListMembers(ctx context.Context, tenantID uuid.UUID) ([]models.UserTenant, error) {
|
||||
var members []models.UserTenant
|
||||
err := s.db.SelectContext(ctx, &members,
|
||||
`SELECT user_id, tenant_id, role, created_at FROM user_tenants WHERE tenant_id = $1 ORDER BY created_at`,
|
||||
tenantID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("list members: %w", err)
|
||||
}
|
||||
return members, nil
|
||||
}
|
||||
|
||||
// InviteByEmail looks up a user by email in auth.users and adds them to the tenant.
|
||||
func (s *TenantService) InviteByEmail(ctx context.Context, tenantID uuid.UUID, email, role string) (*models.UserTenant, error) {
|
||||
// Look up user in Supabase auth.users
|
||||
var userID uuid.UUID
|
||||
err := s.db.GetContext(ctx, &userID,
|
||||
`SELECT id FROM auth.users WHERE email = $1`,
|
||||
email,
|
||||
)
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, fmt.Errorf("no user found with email %s", email)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("lookup user: %w", err)
|
||||
}
|
||||
|
||||
// Check if already a member
|
||||
var exists bool
|
||||
err = s.db.GetContext(ctx, &exists,
|
||||
`SELECT EXISTS(SELECT 1 FROM user_tenants WHERE user_id = $1 AND tenant_id = $2)`,
|
||||
userID, tenantID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("check membership: %w", err)
|
||||
}
|
||||
if exists {
|
||||
return nil, fmt.Errorf("user is already a member of this tenant")
|
||||
}
|
||||
|
||||
var ut models.UserTenant
|
||||
err = s.db.QueryRowxContext(ctx,
|
||||
`INSERT INTO user_tenants (user_id, tenant_id, role) VALUES ($1, $2, $3)
|
||||
RETURNING user_id, tenant_id, role, created_at`,
|
||||
userID, tenantID, role,
|
||||
).StructScan(&ut)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invite user: %w", err)
|
||||
}
|
||||
|
||||
s.audit.Log(ctx, "create", "membership", &tenantID, nil, ut)
|
||||
return &ut, nil
|
||||
}
|
||||
|
||||
// UpdateSettings merges new settings into the tenant's existing settings JSONB.
|
||||
func (s *TenantService) UpdateSettings(ctx context.Context, tenantID uuid.UUID, settings json.RawMessage) (*models.Tenant, error) {
|
||||
var tenant models.Tenant
|
||||
err := s.db.QueryRowxContext(ctx,
|
||||
`UPDATE tenants SET settings = COALESCE(settings, '{}'::jsonb) || $1::jsonb, updated_at = NOW()
|
||||
WHERE id = $2
|
||||
RETURNING id, name, slug, settings, created_at, updated_at`,
|
||||
settings, tenantID,
|
||||
).StructScan(&tenant)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("update settings: %w", err)
|
||||
}
|
||||
s.audit.Log(ctx, "update", "settings", &tenantID, nil, settings)
|
||||
return &tenant, nil
|
||||
}
|
||||
|
||||
// RemoveMember removes a user from a tenant. Cannot remove the last owner.
|
||||
func (s *TenantService) RemoveMember(ctx context.Context, tenantID, userID uuid.UUID) error {
|
||||
// Check if the user being removed is an owner
|
||||
role, err := s.GetUserRole(ctx, userID, tenantID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("check role: %w", err)
|
||||
}
|
||||
if role == "" {
|
||||
return fmt.Errorf("user is not a member of this tenant")
|
||||
}
|
||||
|
||||
if role == "owner" {
|
||||
// Count owners — prevent removing the last one
|
||||
var ownerCount int
|
||||
err := s.db.GetContext(ctx, &ownerCount,
|
||||
`SELECT COUNT(*) FROM user_tenants WHERE tenant_id = $1 AND role = 'owner'`,
|
||||
tenantID,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("count owners: %w", err)
|
||||
}
|
||||
if ownerCount <= 1 {
|
||||
return fmt.Errorf("cannot remove the last owner of a tenant")
|
||||
}
|
||||
}
|
||||
|
||||
_, err = s.db.ExecContext(ctx,
|
||||
`DELETE FROM user_tenants WHERE user_id = $1 AND tenant_id = $2`,
|
||||
userID, tenantID,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("remove member: %w", err)
|
||||
}
|
||||
|
||||
s.audit.Log(ctx, "delete", "membership", &tenantID, map[string]any{"user_id": userID, "role": role}, nil)
|
||||
return nil
|
||||
}
|
||||
167
backend/seed/demo_data.sql
Normal file
167
backend/seed/demo_data.sql
Normal file
@@ -0,0 +1,167 @@
|
||||
-- KanzlAI Demo Data
|
||||
-- Creates 1 test tenant, 5 cases with deadlines and appointments
|
||||
-- Run with: psql $DATABASE_URL -f demo_data.sql
|
||||
|
||||
SET search_path TO kanzlai, public;
|
||||
|
||||
-- Demo tenant
|
||||
INSERT INTO tenants (id, name, slug, settings) VALUES
|
||||
('a0000000-0000-0000-0000-000000000001', 'Kanzlei Siebels & Partner', 'siebels-partner', '{}')
|
||||
ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
-- Link both users to the demo tenant
|
||||
INSERT INTO user_tenants (user_id, tenant_id, role) VALUES
|
||||
('1da9374d-a8a6-49fc-a2ec-5ddfa91d522d', 'a0000000-0000-0000-0000-000000000001', 'owner'),
|
||||
('ac6c9501-3757-4a6d-8b97-2cff4288382b', 'a0000000-0000-0000-0000-000000000001', 'member')
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- ============================================================
|
||||
-- Case 1: Patentverletzung (patent infringement) — active
|
||||
-- ============================================================
|
||||
INSERT INTO cases (id, tenant_id, case_number, title, case_type, court, court_ref, status) VALUES
|
||||
('c0000000-0000-0000-0000-000000000001',
|
||||
'a0000000-0000-0000-0000-000000000001',
|
||||
'2026/001', 'TechCorp GmbH ./. InnovatAG — Patentverletzung EP 1234567',
|
||||
'patent', 'UPC München (Lokalkammer)', 'UPC_CFI-123/2026',
|
||||
'active');
|
||||
|
||||
INSERT INTO parties (id, tenant_id, case_id, name, role, representative) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000001',
|
||||
'TechCorp GmbH', 'claimant', 'RA Dr. Siebels'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000001',
|
||||
'InnovatAG', 'defendant', 'RA Müller');
|
||||
|
||||
INSERT INTO deadlines (id, tenant_id, case_id, title, due_date, warning_date, status, source) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000001',
|
||||
'Klageerwiderung einreichen', CURRENT_DATE + INTERVAL '3 days', CURRENT_DATE + INTERVAL '1 day', 'pending', 'manual'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000001',
|
||||
'Beweisangebote nachreichen', CURRENT_DATE + INTERVAL '14 days', CURRENT_DATE + INTERVAL '10 days', 'pending', 'manual'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000001',
|
||||
'Schriftsatz Anspruch 3', CURRENT_DATE - INTERVAL '2 days', CURRENT_DATE - INTERVAL '5 days', 'pending', 'manual');
|
||||
|
||||
INSERT INTO appointments (id, tenant_id, case_id, title, start_at, end_at, location, appointment_type) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000001',
|
||||
'Mündliche Verhandlung', CURRENT_DATE + INTERVAL '21 days' + TIME '10:00', CURRENT_DATE + INTERVAL '21 days' + TIME '12:00',
|
||||
'UPC München, Saal 4', 'hearing');
|
||||
|
||||
-- ============================================================
|
||||
-- Case 2: Markenrecht (trademark) — active
|
||||
-- ============================================================
|
||||
INSERT INTO cases (id, tenant_id, case_number, title, case_type, court, court_ref, status) VALUES
|
||||
('c0000000-0000-0000-0000-000000000002',
|
||||
'a0000000-0000-0000-0000-000000000001',
|
||||
'2026/002', 'BrandHouse ./. CopyShop UG — Markenverletzung DE 30201234',
|
||||
'trademark', 'LG Hamburg', '315 O 78/26',
|
||||
'active');
|
||||
|
||||
INSERT INTO parties (id, tenant_id, case_id, name, role, representative) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000002',
|
||||
'BrandHouse SE', 'claimant', 'RA Dr. Siebels'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000002',
|
||||
'CopyShop UG', 'defendant', 'RA Weber');
|
||||
|
||||
INSERT INTO deadlines (id, tenant_id, case_id, title, due_date, warning_date, status, source) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000002',
|
||||
'Antrag einstweilige Verfügung', CURRENT_DATE + INTERVAL '5 days', CURRENT_DATE + INTERVAL '2 days', 'pending', 'manual'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000002',
|
||||
'Abmahnung Fristablauf', CURRENT_DATE + INTERVAL '30 days', CURRENT_DATE + INTERVAL '25 days', 'pending', 'manual');
|
||||
|
||||
INSERT INTO appointments (id, tenant_id, case_id, title, start_at, end_at, location, appointment_type) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000002',
|
||||
'Mandantenbesprechung BrandHouse', CURRENT_DATE + INTERVAL '2 days' + TIME '14:00', CURRENT_DATE + INTERVAL '2 days' + TIME '15:30',
|
||||
'Kanzlei, Besprechungsraum 1', 'consultation');
|
||||
|
||||
-- ============================================================
|
||||
-- Case 3: Arbeitsgericht (labor law) — active
|
||||
-- ============================================================
|
||||
INSERT INTO cases (id, tenant_id, case_number, title, case_type, court, court_ref, status) VALUES
|
||||
('c0000000-0000-0000-0000-000000000003',
|
||||
'a0000000-0000-0000-0000-000000000001',
|
||||
'2026/003', 'Schmidt ./. AutoWerk Bayern GmbH — Kündigungsschutz',
|
||||
'labor', 'ArbG München', '12 Ca 456/26',
|
||||
'active');
|
||||
|
||||
INSERT INTO parties (id, tenant_id, case_id, name, role, representative) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000003',
|
||||
'Klaus Schmidt', 'claimant', 'RA Dr. Siebels'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000003',
|
||||
'AutoWerk Bayern GmbH', 'defendant', 'RA Fischer');
|
||||
|
||||
INSERT INTO deadlines (id, tenant_id, case_id, title, due_date, warning_date, status, source) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000003',
|
||||
'Kündigungsschutzklage einreichen (3-Wochen-Frist)', CURRENT_DATE + INTERVAL '7 days', CURRENT_DATE + INTERVAL '4 days', 'pending', 'manual'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000003',
|
||||
'Stellungnahme Arbeitgeber', CURRENT_DATE + INTERVAL '28 days', CURRENT_DATE + INTERVAL '21 days', 'pending', 'manual');
|
||||
|
||||
INSERT INTO appointments (id, tenant_id, case_id, title, start_at, end_at, location, appointment_type) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000003',
|
||||
'Güteverhandlung', CURRENT_DATE + INTERVAL '35 days' + TIME '09:00', CURRENT_DATE + INTERVAL '35 days' + TIME '10:00',
|
||||
'ArbG München, Saal 12', 'hearing');
|
||||
|
||||
-- ============================================================
|
||||
-- Case 4: Mietrecht (tenancy) — active
|
||||
-- ============================================================
|
||||
INSERT INTO cases (id, tenant_id, case_number, title, case_type, court, court_ref, status) VALUES
|
||||
('c0000000-0000-0000-0000-000000000004',
|
||||
'a0000000-0000-0000-0000-000000000001',
|
||||
'2026/004', 'Hausverwaltung Zentral ./. Meier — Mietrückstand',
|
||||
'civil', 'AG München', '432 C 1234/26',
|
||||
'active');
|
||||
|
||||
INSERT INTO parties (id, tenant_id, case_id, name, role, representative) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000004',
|
||||
'Hausverwaltung Zentral GmbH', 'claimant', 'RA Dr. Siebels'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000004',
|
||||
'Thomas Meier', 'defendant', NULL);
|
||||
|
||||
INSERT INTO deadlines (id, tenant_id, case_id, title, due_date, warning_date, status, source) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000004',
|
||||
'Mahnbescheid beantragen', CURRENT_DATE + INTERVAL '10 days', CURRENT_DATE + INTERVAL '7 days', 'pending', 'manual'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000004',
|
||||
'Räumungsfrist prüfen', CURRENT_DATE + INTERVAL '60 days', CURRENT_DATE + INTERVAL '50 days', 'pending', 'manual');
|
||||
|
||||
INSERT INTO appointments (id, tenant_id, case_id, title, start_at, end_at, location, appointment_type) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000004',
|
||||
'Besprechung Hausverwaltung', CURRENT_DATE + INTERVAL '4 days' + TIME '11:00', CURRENT_DATE + INTERVAL '4 days' + TIME '12:00',
|
||||
'Kanzlei, Besprechungsraum 2', 'meeting');
|
||||
|
||||
-- ============================================================
|
||||
-- Case 5: Erbrecht (inheritance) — closed
|
||||
-- ============================================================
|
||||
INSERT INTO cases (id, tenant_id, case_number, title, case_type, court, court_ref, status) VALUES
|
||||
('c0000000-0000-0000-0000-000000000005',
|
||||
'a0000000-0000-0000-0000-000000000001',
|
||||
'2025/042', 'Nachlass Wagner — Erbauseinandersetzung',
|
||||
'civil', 'AG Starnberg', '3 VI 891/25',
|
||||
'closed');
|
||||
|
||||
INSERT INTO parties (id, tenant_id, case_id, name, role, representative) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000005',
|
||||
'Maria Wagner', 'claimant', 'RA Dr. Siebels'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000005',
|
||||
'Peter Wagner', 'defendant', 'RA Braun');
|
||||
|
||||
INSERT INTO deadlines (id, tenant_id, case_id, title, due_date, warning_date, status, source, completed_at) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000005',
|
||||
'Erbscheinsantrag einreichen', CURRENT_DATE - INTERVAL '30 days', CURRENT_DATE - INTERVAL '37 days', 'completed', 'manual', CURRENT_DATE - INTERVAL '32 days');
|
||||
|
||||
-- ============================================================
|
||||
-- Case events for realistic activity feed
|
||||
-- ============================================================
|
||||
INSERT INTO case_events (id, tenant_id, case_id, event_type, title, description, created_at, updated_at) VALUES
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000001',
|
||||
'case_created', 'Akte angelegt', 'Patentverletzungsklage TechCorp ./. InnovatAG eröffnet', NOW() - INTERVAL '10 days', NOW() - INTERVAL '10 days'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000001',
|
||||
'party_added', 'Partei hinzugefügt', 'TechCorp GmbH als Kläger eingetragen', NOW() - INTERVAL '10 days', NOW() - INTERVAL '10 days'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000002',
|
||||
'case_created', 'Akte angelegt', 'Markenrechtsstreit BrandHouse ./. CopyShop eröffnet', NOW() - INTERVAL '7 days', NOW() - INTERVAL '7 days'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000003',
|
||||
'case_created', 'Akte angelegt', 'Kündigungsschutzklage Schmidt eröffnet', NOW() - INTERVAL '5 days', NOW() - INTERVAL '5 days'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000004',
|
||||
'case_created', 'Akte angelegt', 'Mietrückstand Hausverwaltung ./. Meier eröffnet', NOW() - INTERVAL '3 days', NOW() - INTERVAL '3 days'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000001',
|
||||
'status_changed', 'Fristablauf überschritten', 'Schriftsatz Anspruch 3 ist überfällig', NOW() - INTERVAL '1 day', NOW() - INTERVAL '1 day'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000005',
|
||||
'case_created', 'Akte angelegt', 'Erbauseinandersetzung Wagner eröffnet', NOW() - INTERVAL '60 days', NOW() - INTERVAL '60 days'),
|
||||
(gen_random_uuid(), 'a0000000-0000-0000-0000-000000000001', 'c0000000-0000-0000-0000-000000000005',
|
||||
'status_changed', 'Akte geschlossen', 'Erbscheinsverfahren abgeschlossen', NOW() - INTERVAL '20 days', NOW() - INTERVAL '20 days');
|
||||
@@ -6,6 +6,12 @@ services:
|
||||
- "8080"
|
||||
environment:
|
||||
- PORT=8080
|
||||
- DATABASE_URL=${DATABASE_URL}
|
||||
- SUPABASE_URL=${SUPABASE_URL}
|
||||
- SUPABASE_ANON_KEY=${SUPABASE_ANON_KEY}
|
||||
- SUPABASE_SERVICE_KEY=${SUPABASE_SERVICE_KEY}
|
||||
- SUPABASE_JWT_SECRET=${SUPABASE_JWT_SECRET}
|
||||
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "http://localhost:8080/health"]
|
||||
interval: 30s
|
||||
@@ -16,6 +22,9 @@ services:
|
||||
frontend:
|
||||
build:
|
||||
context: ./frontend
|
||||
args:
|
||||
NEXT_PUBLIC_SUPABASE_URL: ${SUPABASE_URL}
|
||||
NEXT_PUBLIC_SUPABASE_ANON_KEY: ${SUPABASE_ANON_KEY}
|
||||
expose:
|
||||
- "3000"
|
||||
depends_on:
|
||||
@@ -23,6 +32,8 @@ services:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- API_URL=http://backend:8080
|
||||
- NEXT_PUBLIC_SUPABASE_URL=${SUPABASE_URL}
|
||||
- NEXT_PUBLIC_SUPABASE_ANON_KEY=${SUPABASE_ANON_KEY}
|
||||
healthcheck:
|
||||
test: ["CMD", "node", "-e", "fetch('http://localhost:3000').then(r=>{if(!r.ok)throw r.status;process.exit(0)}).catch(()=>process.exit(1))"]
|
||||
interval: 30s
|
||||
|
||||
0
frontend/.m/spawn.lock
Normal file
0
frontend/.m/spawn.lock
Normal file
@@ -10,6 +10,10 @@ WORKDIR /app
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
ENV API_URL=http://backend:8080
|
||||
ARG NEXT_PUBLIC_SUPABASE_URL
|
||||
ARG NEXT_PUBLIC_SUPABASE_ANON_KEY
|
||||
ENV NEXT_PUBLIC_SUPABASE_URL=$NEXT_PUBLIC_SUPABASE_URL
|
||||
ENV NEXT_PUBLIC_SUPABASE_ANON_KEY=$NEXT_PUBLIC_SUPABASE_ANON_KEY
|
||||
RUN mkdir -p public
|
||||
RUN bun run build
|
||||
|
||||
|
||||
@@ -5,32 +5,111 @@
|
||||
"": {
|
||||
"name": "frontend",
|
||||
"dependencies": {
|
||||
"@supabase/ssr": "^0.9.0",
|
||||
"@supabase/supabase-js": "^2.100.0",
|
||||
"@tanstack/react-query": "^5.95.2",
|
||||
"date-fns": "^4.1.0",
|
||||
"lucide-react": "^1.6.0",
|
||||
"next": "15.5.14",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"react-dropzone": "^15.0.0",
|
||||
"sonner": "^2.0.7",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3",
|
||||
"@tailwindcss/postcss": "^4",
|
||||
"@testing-library/jest-dom": "^6.9.1",
|
||||
"@testing-library/react": "^16.3.2",
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@types/node": "^20",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
"eslint": "^9",
|
||||
"eslint-config-next": "15.5.14",
|
||||
"jsdom": "24.1.3",
|
||||
"msw": "^2.12.14",
|
||||
"tailwindcss": "^4",
|
||||
"typescript": "^5",
|
||||
"vitest": "2.1.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@adobe/css-tools": ["@adobe/css-tools@4.4.4", "", {}, "sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg=="],
|
||||
|
||||
"@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="],
|
||||
|
||||
"@asamuzakjp/css-color": ["@asamuzakjp/css-color@3.2.0", "", { "dependencies": { "@csstools/css-calc": "^2.1.3", "@csstools/css-color-parser": "^3.0.9", "@csstools/css-parser-algorithms": "^3.0.4", "@csstools/css-tokenizer": "^3.0.3", "lru-cache": "^10.4.3" } }, "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw=="],
|
||||
|
||||
"@babel/code-frame": ["@babel/code-frame@7.29.0", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.28.5", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw=="],
|
||||
|
||||
"@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.28.5", "", {}, "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q=="],
|
||||
|
||||
"@babel/runtime": ["@babel/runtime@7.29.2", "", {}, "sha512-JiDShH45zKHWyGe4ZNVRrCjBz8Nh9TMmZG1kh4QTK8hCBTWBi8Da+i7s1fJw7/lYpM4ccepSNfqzZ/QvABBi5g=="],
|
||||
|
||||
"@csstools/color-helpers": ["@csstools/color-helpers@5.1.0", "", {}, "sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA=="],
|
||||
|
||||
"@csstools/css-calc": ["@csstools/css-calc@2.1.4", "", { "peerDependencies": { "@csstools/css-parser-algorithms": "^3.0.5", "@csstools/css-tokenizer": "^3.0.4" } }, "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ=="],
|
||||
|
||||
"@csstools/css-color-parser": ["@csstools/css-color-parser@3.1.0", "", { "dependencies": { "@csstools/color-helpers": "^5.1.0", "@csstools/css-calc": "^2.1.4" }, "peerDependencies": { "@csstools/css-parser-algorithms": "^3.0.5", "@csstools/css-tokenizer": "^3.0.4" } }, "sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA=="],
|
||||
|
||||
"@csstools/css-parser-algorithms": ["@csstools/css-parser-algorithms@3.0.5", "", { "peerDependencies": { "@csstools/css-tokenizer": "^3.0.4" } }, "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ=="],
|
||||
|
||||
"@csstools/css-tokenizer": ["@csstools/css-tokenizer@3.0.4", "", {}, "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw=="],
|
||||
|
||||
"@emnapi/core": ["@emnapi/core@1.9.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.2.0", "tslib": "^2.4.0" } }, "sha512-mukuNALVsoix/w1BJwFzwXBN/dHeejQtuVzcDsfOEsdpCumXb/E9j8w11h5S54tT1xhifGfbbSm/ICrObRb3KA=="],
|
||||
|
||||
"@emnapi/runtime": ["@emnapi/runtime@1.9.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-VYi5+ZVLhpgK4hQ0TAjiQiZ6ol0oe4mBx7mVv7IflsiEp0OWoVsp/+f9Vc1hOhE0TtkORVrI1GvzyreqpgWtkA=="],
|
||||
|
||||
"@emnapi/wasi-threads": ["@emnapi/wasi-threads@1.2.0", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-N10dEJNSsUx41Z6pZsXU8FjPjpBEplgH24sfkmITrBED1/U2Esum9F3lfLrMjKHHjmi557zQn7kR9R+XWXu5Rg=="],
|
||||
|
||||
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.21.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ=="],
|
||||
|
||||
"@esbuild/android-arm": ["@esbuild/android-arm@0.21.5", "", { "os": "android", "cpu": "arm" }, "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg=="],
|
||||
|
||||
"@esbuild/android-arm64": ["@esbuild/android-arm64@0.21.5", "", { "os": "android", "cpu": "arm64" }, "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A=="],
|
||||
|
||||
"@esbuild/android-x64": ["@esbuild/android-x64@0.21.5", "", { "os": "android", "cpu": "x64" }, "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA=="],
|
||||
|
||||
"@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.21.5", "", { "os": "darwin", "cpu": "arm64" }, "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ=="],
|
||||
|
||||
"@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.21.5", "", { "os": "darwin", "cpu": "x64" }, "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw=="],
|
||||
|
||||
"@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.21.5", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g=="],
|
||||
|
||||
"@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.21.5", "", { "os": "freebsd", "cpu": "x64" }, "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ=="],
|
||||
|
||||
"@esbuild/linux-arm": ["@esbuild/linux-arm@0.21.5", "", { "os": "linux", "cpu": "arm" }, "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA=="],
|
||||
|
||||
"@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.21.5", "", { "os": "linux", "cpu": "arm64" }, "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q=="],
|
||||
|
||||
"@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.21.5", "", { "os": "linux", "cpu": "ia32" }, "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg=="],
|
||||
|
||||
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg=="],
|
||||
|
||||
"@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg=="],
|
||||
|
||||
"@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.21.5", "", { "os": "linux", "cpu": "ppc64" }, "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w=="],
|
||||
|
||||
"@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA=="],
|
||||
|
||||
"@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.21.5", "", { "os": "linux", "cpu": "s390x" }, "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A=="],
|
||||
|
||||
"@esbuild/linux-x64": ["@esbuild/linux-x64@0.21.5", "", { "os": "linux", "cpu": "x64" }, "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ=="],
|
||||
|
||||
"@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.21.5", "", { "os": "none", "cpu": "x64" }, "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg=="],
|
||||
|
||||
"@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.21.5", "", { "os": "openbsd", "cpu": "x64" }, "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow=="],
|
||||
|
||||
"@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.21.5", "", { "os": "sunos", "cpu": "x64" }, "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg=="],
|
||||
|
||||
"@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.21.5", "", { "os": "win32", "cpu": "arm64" }, "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A=="],
|
||||
|
||||
"@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.21.5", "", { "os": "win32", "cpu": "ia32" }, "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA=="],
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
|
||||
|
||||
"@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.9.1", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ=="],
|
||||
|
||||
"@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.2", "", {}, "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew=="],
|
||||
@@ -107,6 +186,16 @@
|
||||
|
||||
"@img/sharp-win32-x64": ["@img/sharp-win32-x64@0.34.5", "", { "os": "win32", "cpu": "x64" }, "sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw=="],
|
||||
|
||||
"@inquirer/ansi": ["@inquirer/ansi@1.0.2", "", {}, "sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ=="],
|
||||
|
||||
"@inquirer/confirm": ["@inquirer/confirm@5.1.21", "", { "dependencies": { "@inquirer/core": "^10.3.2", "@inquirer/type": "^3.0.10" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-KR8edRkIsUayMXV+o3Gv+q4jlhENF9nMYUZs9PA2HzrXeHI8M5uDag70U7RJn9yyiMZSbtF5/UexBtAVtZGSbQ=="],
|
||||
|
||||
"@inquirer/core": ["@inquirer/core@10.3.2", "", { "dependencies": { "@inquirer/ansi": "^1.0.2", "@inquirer/figures": "^1.0.15", "@inquirer/type": "^3.0.10", "cli-width": "^4.1.0", "mute-stream": "^2.0.0", "signal-exit": "^4.1.0", "wrap-ansi": "^6.2.0", "yoctocolors-cjs": "^2.1.3" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-43RTuEbfP8MbKzedNqBrlhhNKVwoK//vUFNW3Q3vZ88BLcrs4kYpGg+B2mm5p2K/HfygoCxuKwJJiv8PbGmE0A=="],
|
||||
|
||||
"@inquirer/figures": ["@inquirer/figures@1.0.15", "", {}, "sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g=="],
|
||||
|
||||
"@inquirer/type": ["@inquirer/type@3.0.10", "", { "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="],
|
||||
|
||||
"@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ=="],
|
||||
@@ -117,6 +206,8 @@
|
||||
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
|
||||
|
||||
"@mswjs/interceptors": ["@mswjs/interceptors@0.41.3", "", { "dependencies": { "@open-draft/deferred-promise": "^2.2.0", "@open-draft/logger": "^0.3.0", "@open-draft/until": "^2.0.0", "is-node-process": "^1.2.0", "outvariant": "^1.4.3", "strict-event-emitter": "^0.5.1" } }, "sha512-cXu86tF4VQVfwz8W1SPbhoRyHJkti6mjH/XJIxp40jhO4j2k1m4KYrEykxqWPkFF3vrK4rgQppBh//AwyGSXPA=="],
|
||||
|
||||
"@napi-rs/wasm-runtime": ["@napi-rs/wasm-runtime@0.2.12", "", { "dependencies": { "@emnapi/core": "^1.4.3", "@emnapi/runtime": "^1.4.3", "@tybys/wasm-util": "^0.10.0" } }, "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ=="],
|
||||
|
||||
"@next/env": ["@next/env@15.5.14", "", {}, "sha512-aXeirLYuASxEgi4X4WhfXsShCFxWDfNn/8ZeC5YXAS2BB4A8FJi1kwwGL6nvMVboE7fZCzmJPNdMvVHc8JpaiA=="],
|
||||
@@ -147,10 +238,82 @@
|
||||
|
||||
"@nolyfill/is-core-module": ["@nolyfill/is-core-module@1.0.39", "", {}, "sha512-nn5ozdjYQpUCZlWGuxcJY/KpxkWQs4DcbMCmKojjyrYDEAGy4Ce19NN4v5MduafTwJlbKc99UA8YhSVqq9yPZA=="],
|
||||
|
||||
"@open-draft/deferred-promise": ["@open-draft/deferred-promise@2.2.0", "", {}, "sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA=="],
|
||||
|
||||
"@open-draft/logger": ["@open-draft/logger@0.3.0", "", { "dependencies": { "is-node-process": "^1.2.0", "outvariant": "^1.4.0" } }, "sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ=="],
|
||||
|
||||
"@open-draft/until": ["@open-draft/until@2.1.0", "", {}, "sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg=="],
|
||||
|
||||
"@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.60.0", "", { "os": "android", "cpu": "arm" }, "sha512-WOhNW9K8bR3kf4zLxbfg6Pxu2ybOUbB2AjMDHSQx86LIF4rH4Ft7vmMwNt0loO0eonglSNy4cpD3MKXXKQu0/A=="],
|
||||
|
||||
"@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.60.0", "", { "os": "android", "cpu": "arm64" }, "sha512-u6JHLll5QKRvjciE78bQXDmqRqNs5M/3GVqZeMwvmjaNODJih/WIrJlFVEihvV0MiYFmd+ZyPr9wxOVbPAG2Iw=="],
|
||||
|
||||
"@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.60.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-qEF7CsKKzSRc20Ciu2Zw1wRrBz4g56F7r/vRwY430UPp/nt1x21Q/fpJ9N5l47WWvJlkNCPJz3QRVw008fi7yA=="],
|
||||
|
||||
"@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.60.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-WADYozJ4QCnXCH4wPB+3FuGmDPoFseVCUrANmA5LWwGmC6FL14BWC7pcq+FstOZv3baGX65tZ378uT6WG8ynTw=="],
|
||||
|
||||
"@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.60.0", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-6b8wGHJlDrGeSE3aH5mGNHBjA0TTkxdoNHik5EkvPHCt351XnigA4pS7Wsj/Eo9Y8RBU6f35cjN9SYmCFBtzxw=="],
|
||||
|
||||
"@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.60.0", "", { "os": "freebsd", "cpu": "x64" }, "sha512-h25Ga0t4jaylMB8M/JKAyrvvfxGRjnPQIR8lnCayyzEjEOx2EJIlIiMbhpWxDRKGKF8jbNH01NnN663dH638mA=="],
|
||||
|
||||
"@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.60.0", "", { "os": "linux", "cpu": "arm" }, "sha512-RzeBwv0B3qtVBWtcuABtSuCzToo2IEAIQrcyB/b2zMvBWVbjo8bZDjACUpnaafaxhTw2W+imQbP2BD1usasK4g=="],
|
||||
|
||||
"@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.60.0", "", { "os": "linux", "cpu": "arm" }, "sha512-Sf7zusNI2CIU1HLzuu9Tc5YGAHEZs5Lu7N1ssJG4Tkw6e0MEsN7NdjUDDfGNHy2IU+ENyWT+L2obgWiguWibWQ=="],
|
||||
|
||||
"@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.60.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-DX2x7CMcrJzsE91q7/O02IJQ5/aLkVtYFryqCjduJhUfGKG6yJV8hxaw8pZa93lLEpPTP/ohdN4wFz7yp/ry9A=="],
|
||||
|
||||
"@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.60.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-09EL+yFVbJZlhcQfShpswwRZ0Rg+z/CsSELFCnPt3iK+iqwGsI4zht3secj5vLEs957QvFFXnzAT0FFPIxSrkQ=="],
|
||||
|
||||
"@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.60.0", "", { "os": "linux", "cpu": "none" }, "sha512-i9IcCMPr3EXm8EQg5jnja0Zyc1iFxJjZWlb4wr7U2Wx/GrddOuEafxRdMPRYVaXjgbhvqalp6np07hN1w9kAKw=="],
|
||||
|
||||
"@rollup/rollup-linux-loong64-musl": ["@rollup/rollup-linux-loong64-musl@4.60.0", "", { "os": "linux", "cpu": "none" }, "sha512-DGzdJK9kyJ+B78MCkWeGnpXJ91tK/iKA6HwHxF4TAlPIY7GXEvMe8hBFRgdrR9Ly4qebR/7gfUs9y2IoaVEyog=="],
|
||||
|
||||
"@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.60.0", "", { "os": "linux", "cpu": "ppc64" }, "sha512-RwpnLsqC8qbS8z1H1AxBA1H6qknR4YpPR9w2XX0vo2Sz10miu57PkNcnHVaZkbqyw/kUWfKMI73jhmfi9BRMUQ=="],
|
||||
|
||||
"@rollup/rollup-linux-ppc64-musl": ["@rollup/rollup-linux-ppc64-musl@4.60.0", "", { "os": "linux", "cpu": "ppc64" }, "sha512-Z8pPf54Ly3aqtdWC3G4rFigZgNvd+qJlOE52fmko3KST9SoGfAdSRCwyoyG05q1HrrAblLbk1/PSIV+80/pxLg=="],
|
||||
|
||||
"@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.60.0", "", { "os": "linux", "cpu": "none" }, "sha512-3a3qQustp3COCGvnP4SvrMHnPQ9d1vzCakQVRTliaz8cIp/wULGjiGpbcqrkv0WrHTEp8bQD/B3HBjzujVWLOA=="],
|
||||
|
||||
"@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.60.0", "", { "os": "linux", "cpu": "none" }, "sha512-pjZDsVH/1VsghMJ2/kAaxt6dL0psT6ZexQVrijczOf+PeP2BUqTHYejk3l6TlPRydggINOeNRhvpLa0AYpCWSQ=="],
|
||||
|
||||
"@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.60.0", "", { "os": "linux", "cpu": "s390x" }, "sha512-3ObQs0BhvPgiUVZrN7gqCSvmFuMWvWvsjG5ayJ3Lraqv+2KhOsp+pUbigqbeWqueGIsnn+09HBw27rJ+gYK4VQ=="],
|
||||
|
||||
"@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.60.0", "", { "os": "linux", "cpu": "x64" }, "sha512-EtylprDtQPdS5rXvAayrNDYoJhIz1/vzN2fEubo3yLE7tfAw+948dO0g4M0vkTVFhKojnF+n6C8bDNe+gDRdTg=="],
|
||||
|
||||
"@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.60.0", "", { "os": "linux", "cpu": "x64" }, "sha512-k09oiRCi/bHU9UVFqD17r3eJR9bn03TyKraCrlz5ULFJGdJGi7VOmm9jl44vOJvRJ6P7WuBi/s2A97LxxHGIdw=="],
|
||||
|
||||
"@rollup/rollup-openbsd-x64": ["@rollup/rollup-openbsd-x64@4.60.0", "", { "os": "openbsd", "cpu": "x64" }, "sha512-1o/0/pIhozoSaDJoDcec+IVLbnRtQmHwPV730+AOD29lHEEo4F5BEUB24H0OBdhbBBDwIOSuf7vgg0Ywxdfiiw=="],
|
||||
|
||||
"@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.60.0", "", { "os": "none", "cpu": "arm64" }, "sha512-pESDkos/PDzYwtyzB5p/UoNU/8fJo68vcXM9ZW2V0kjYayj1KaaUfi1NmTUTUpMn4UhU4gTuK8gIaFO4UGuMbA=="],
|
||||
|
||||
"@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.60.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-hj1wFStD7B1YBeYmvY+lWXZ7ey73YGPcViMShYikqKT1GtstIKQAtfUI6yrzPjAy/O7pO0VLXGmUVWXQMaYgTQ=="],
|
||||
|
||||
"@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.60.0", "", { "os": "win32", "cpu": "ia32" }, "sha512-SyaIPFoxmUPlNDq5EHkTbiKzmSEmq/gOYFI/3HHJ8iS/v1mbugVa7dXUzcJGQfoytp9DJFLhHH4U3/eTy2Bq4w=="],
|
||||
|
||||
"@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.60.0", "", { "os": "win32", "cpu": "x64" }, "sha512-RdcryEfzZr+lAr5kRm2ucN9aVlCCa2QNq4hXelZxb8GG0NJSazq44Z3PCCc8wISRuCVnGs0lQJVX5Vp6fKA+IA=="],
|
||||
|
||||
"@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.60.0", "", { "os": "win32", "cpu": "x64" }, "sha512-PrsWNQ8BuE00O3Xsx3ALh2Df8fAj9+cvvX9AIA6o4KpATR98c9mud4XtDWVvsEuyia5U4tVSTKygawyJkjm60w=="],
|
||||
|
||||
"@rtsao/scc": ["@rtsao/scc@1.1.0", "", {}, "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g=="],
|
||||
|
||||
"@rushstack/eslint-patch": ["@rushstack/eslint-patch@1.16.1", "", {}, "sha512-TvZbIpeKqGQQ7X0zSCvPH9riMSFQFSggnfBjFZ1mEoILW+UuXCKwOoPcgjMwiUtRqFZ8jWhPJc4um14vC6I4ag=="],
|
||||
|
||||
"@supabase/auth-js": ["@supabase/auth-js@2.100.0", "", { "dependencies": { "tslib": "2.8.1" } }, "sha512-pdT3ye3UVRN1Cg0wom6BmyY+XTtp5DiJaYnPi6j8ht5i8Lq8kfqxJMJz9GI9YDKk3w1nhGOPnh6Qz5qpyYm+1w=="],
|
||||
|
||||
"@supabase/functions-js": ["@supabase/functions-js@2.100.0", "", { "dependencies": { "tslib": "2.8.1" } }, "sha512-keLg79RPwP+uiwHuxFPTFgDRxPV46LM4j/swjyR2GKJgWniTVSsgiBHfbIBDcrQwehLepy09b/9QSHUywtKRWQ=="],
|
||||
|
||||
"@supabase/phoenix": ["@supabase/phoenix@0.4.0", "", {}, "sha512-RHSx8bHS02xwfHdAbX5Lpbo6PXbgyf7lTaXTlwtFDPwOIw64NnVRwFAXGojHhjtVYI+PEPNSWwkL90f4agN3bw=="],
|
||||
|
||||
"@supabase/postgrest-js": ["@supabase/postgrest-js@2.100.0", "", { "dependencies": { "tslib": "2.8.1" } }, "sha512-xYNvNbBJaXOGcrZ44wxwp5830uo1okMHGS8h8dm3u4f0xcZ39yzbryUsubTJW41MG2gbL/6U57cA4Pi6YMZ9pA=="],
|
||||
|
||||
"@supabase/realtime-js": ["@supabase/realtime-js@2.100.0", "", { "dependencies": { "@supabase/phoenix": "^0.4.0", "@types/ws": "^8.18.1", "tslib": "2.8.1", "ws": "^8.18.2" } }, "sha512-2AZs00zzEF0HuCKY8grz5eCYlwEfVi5HONLZFoNR6aDfxQivl8zdQYNjyFoqN2MZiVhQHD7u6XV/xHwM8mCEHw=="],
|
||||
|
||||
"@supabase/ssr": ["@supabase/ssr@0.9.0", "", { "dependencies": { "cookie": "^1.0.2" }, "peerDependencies": { "@supabase/supabase-js": "^2.97.0" } }, "sha512-UFY6otYV3yqCgV+AyHj80vNkTvbf1Gas2LW4dpbQ4ap6p6v3eB2oaDfcI99jsuJzwVBCFU4BJI+oDYyhNk1z0Q=="],
|
||||
|
||||
"@supabase/storage-js": ["@supabase/storage-js@2.100.0", "", { "dependencies": { "iceberg-js": "^0.8.1", "tslib": "2.8.1" } }, "sha512-d4EeuK6RNIgYNA2MU9kj8lQrLm5AzZ+WwpWjGkii6SADQNIGTC/uiaTRu02XJ5AmFALQfo8fLl9xuCkO6Xw+iQ=="],
|
||||
|
||||
"@supabase/supabase-js": ["@supabase/supabase-js@2.100.0", "", { "dependencies": { "@supabase/auth-js": "2.100.0", "@supabase/functions-js": "2.100.0", "@supabase/postgrest-js": "2.100.0", "@supabase/realtime-js": "2.100.0", "@supabase/storage-js": "2.100.0" } }, "sha512-r0tlcukejJXJ1m/2eG/Ya5eYs4W8AC7oZfShpG3+SIo/eIU9uIt76ZeYI1SoUwUmcmzlAbgch+HDZDR/toVQPQ=="],
|
||||
|
||||
"@swc/helpers": ["@swc/helpers@0.5.15", "", { "dependencies": { "tslib": "^2.8.0" } }, "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g=="],
|
||||
|
||||
"@tailwindcss/node": ["@tailwindcss/node@4.2.2", "", { "dependencies": { "@jridgewell/remapping": "^2.3.5", "enhanced-resolve": "^5.19.0", "jiti": "^2.6.1", "lightningcss": "1.32.0", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", "tailwindcss": "4.2.2" } }, "sha512-pXS+wJ2gZpVXqFaUEjojq7jzMpTGf8rU6ipJz5ovJV6PUGmlJ+jvIwGrzdHdQ80Sg+wmQxUFuoW1UAAwHNEdFA=="],
|
||||
@@ -183,8 +346,22 @@
|
||||
|
||||
"@tailwindcss/postcss": ["@tailwindcss/postcss@4.2.2", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "@tailwindcss/node": "4.2.2", "@tailwindcss/oxide": "4.2.2", "postcss": "^8.5.6", "tailwindcss": "4.2.2" } }, "sha512-n4goKQbW8RVXIbNKRB/45LzyUqN451deQK0nzIeauVEqjlI49slUlgKYJM2QyUzap/PcpnS7kzSUmPb1sCRvYQ=="],
|
||||
|
||||
"@tanstack/query-core": ["@tanstack/query-core@5.95.2", "", {}, "sha512-o4T8vZHZET4Bib3jZ/tCW9/7080urD4c+0/AUaYVpIqOsr7y0reBc1oX3ttNaSW5mYyvZHctiQ/UOP2PfdmFEQ=="],
|
||||
|
||||
"@tanstack/react-query": ["@tanstack/react-query@5.95.2", "", { "dependencies": { "@tanstack/query-core": "5.95.2" }, "peerDependencies": { "react": "^18 || ^19" } }, "sha512-/wGkvLj/st5Ud1Q76KF1uFxScV7WeqN1slQx5280ycwAyYkIPGaRZAEgHxe3bjirSd5Zpwkj6zNcR4cqYni/ZA=="],
|
||||
|
||||
"@testing-library/dom": ["@testing-library/dom@10.4.1", "", { "dependencies": { "@babel/code-frame": "^7.10.4", "@babel/runtime": "^7.12.5", "@types/aria-query": "^5.0.1", "aria-query": "5.3.0", "dom-accessibility-api": "^0.5.9", "lz-string": "^1.5.0", "picocolors": "1.1.1", "pretty-format": "^27.0.2" } }, "sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg=="],
|
||||
|
||||
"@testing-library/jest-dom": ["@testing-library/jest-dom@6.9.1", "", { "dependencies": { "@adobe/css-tools": "^4.4.0", "aria-query": "^5.0.0", "css.escape": "^1.5.1", "dom-accessibility-api": "^0.6.3", "picocolors": "^1.1.1", "redent": "^3.0.0" } }, "sha512-zIcONa+hVtVSSep9UT3jZ5rizo2BsxgyDYU7WFD5eICBE7no3881HGeb/QkGfsJs6JTkY1aQhT7rIPC7e+0nnA=="],
|
||||
|
||||
"@testing-library/react": ["@testing-library/react@16.3.2", "", { "dependencies": { "@babel/runtime": "^7.12.5" }, "peerDependencies": { "@testing-library/dom": "^10.0.0", "@types/react": "^18.0.0 || ^19.0.0", "@types/react-dom": "^18.0.0 || ^19.0.0", "react": "^18.0.0 || ^19.0.0", "react-dom": "^18.0.0 || ^19.0.0" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-XU5/SytQM+ykqMnAnvB2umaJNIOsLF3PVv//1Ew4CTcpz0/BRyy/af40qqrt7SjKpDdT1saBMc42CUok5gaw+g=="],
|
||||
|
||||
"@testing-library/user-event": ["@testing-library/user-event@14.6.1", "", { "peerDependencies": { "@testing-library/dom": ">=7.21.4" } }, "sha512-vq7fv0rnt+QTXgPxr5Hjc210p6YKq2kmdziLgnsZGgLJ9e6VAShx1pACLuRjd/AS/sr7phAR58OIIpf0LlmQNw=="],
|
||||
|
||||
"@tybys/wasm-util": ["@tybys/wasm-util@0.10.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg=="],
|
||||
|
||||
"@types/aria-query": ["@types/aria-query@5.0.4", "", {}, "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw=="],
|
||||
|
||||
"@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
|
||||
|
||||
"@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
|
||||
@@ -197,6 +374,10 @@
|
||||
|
||||
"@types/react-dom": ["@types/react-dom@19.2.3", "", { "peerDependencies": { "@types/react": "^19.2.0" } }, "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ=="],
|
||||
|
||||
"@types/statuses": ["@types/statuses@2.0.6", "", {}, "sha512-xMAgYwceFhRA2zY+XbEA7mxYbA093wdiW8Vu6gZPGWy9cmOyU9XesH1tNcEWsKFd5Vzrqx5T3D38PWx1FIIXkA=="],
|
||||
|
||||
"@types/ws": ["@types/ws@8.18.1", "", { "dependencies": { "@types/node": "*" } }, "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg=="],
|
||||
|
||||
"@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.57.2", "", { "dependencies": { "@eslint-community/regexpp": "^4.12.2", "@typescript-eslint/scope-manager": "8.57.2", "@typescript-eslint/type-utils": "8.57.2", "@typescript-eslint/utils": "8.57.2", "@typescript-eslint/visitor-keys": "8.57.2", "ignore": "^7.0.5", "natural-compare": "^1.4.0", "ts-api-utils": "^2.4.0" }, "peerDependencies": { "@typescript-eslint/parser": "^8.57.2", "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-NZZgp0Fm2IkD+La5PR81sd+g+8oS6JwJje+aRWsDocxHkjyRw0J5L5ZTlN3LI1LlOcGL7ph3eaIUmTXMIjLk0w=="],
|
||||
|
||||
"@typescript-eslint/parser": ["@typescript-eslint/parser@8.57.2", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.57.2", "@typescript-eslint/types": "8.57.2", "@typescript-eslint/typescript-estree": "8.57.2", "@typescript-eslint/visitor-keys": "8.57.2", "debug": "^4.4.3" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-30ScMRHIAD33JJQkgfGW1t8CURZtjc2JpTrq5n2HFhOefbAhb7ucc7xJwdWcrEtqUIYJ73Nybpsggii6GtAHjA=="],
|
||||
@@ -255,12 +436,30 @@
|
||||
|
||||
"@unrs/resolver-binding-win32-x64-msvc": ["@unrs/resolver-binding-win32-x64-msvc@1.11.1", "", { "os": "win32", "cpu": "x64" }, "sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g=="],
|
||||
|
||||
"@vitest/expect": ["@vitest/expect@2.1.8", "", { "dependencies": { "@vitest/spy": "2.1.8", "@vitest/utils": "2.1.8", "chai": "^5.1.2", "tinyrainbow": "^1.2.0" } }, "sha512-8ytZ/fFHq2g4PJVAtDX57mayemKgDR6X3Oa2Foro+EygiOJHUXhCqBAAKQYYajZpFoIfvBCF1j6R6IYRSIUFuw=="],
|
||||
|
||||
"@vitest/mocker": ["@vitest/mocker@2.1.8", "", { "dependencies": { "@vitest/spy": "2.1.8", "estree-walker": "^3.0.3", "magic-string": "^0.30.12" }, "peerDependencies": { "msw": "^2.4.9", "vite": "^5.0.0" }, "optionalPeers": ["msw", "vite"] }, "sha512-7guJ/47I6uqfttp33mgo6ga5Gr1VnL58rcqYKyShoRK9ebu8T5Rs6HN3s1NABiBeVTdWNrwUMcHH54uXZBN4zA=="],
|
||||
|
||||
"@vitest/pretty-format": ["@vitest/pretty-format@2.1.9", "", { "dependencies": { "tinyrainbow": "^1.2.0" } }, "sha512-KhRIdGV2U9HOUzxfiHmY8IFHTdqtOhIzCpd8WRdJiE7D/HUcZVD0EgQCVjm+Q9gkUXWgBvMmTtZgIG48wq7sOQ=="],
|
||||
|
||||
"@vitest/runner": ["@vitest/runner@2.1.8", "", { "dependencies": { "@vitest/utils": "2.1.8", "pathe": "^1.1.2" } }, "sha512-17ub8vQstRnRlIU5k50bG+QOMLHRhYPAna5tw8tYbj+jzjcspnwnwtPtiOlkuKC4+ixDPTuLZiqiWWQ2PSXHVg=="],
|
||||
|
||||
"@vitest/snapshot": ["@vitest/snapshot@2.1.8", "", { "dependencies": { "@vitest/pretty-format": "2.1.8", "magic-string": "^0.30.12", "pathe": "^1.1.2" } }, "sha512-20T7xRFbmnkfcmgVEz+z3AU/3b0cEzZOt/zmnvZEctg64/QZbSDJEVm9fLnnlSi74KibmRsO9/Qabi+t0vCRPg=="],
|
||||
|
||||
"@vitest/spy": ["@vitest/spy@2.1.8", "", { "dependencies": { "tinyspy": "^3.0.2" } }, "sha512-5swjf2q95gXeYPevtW0BLk6H8+bPlMb4Vw/9Em4hFxDcaOxS+e0LOX4yqNxoHzMR2akEB2xfpnWUzkZokmgWDg=="],
|
||||
|
||||
"@vitest/utils": ["@vitest/utils@2.1.8", "", { "dependencies": { "@vitest/pretty-format": "2.1.8", "loupe": "^3.1.2", "tinyrainbow": "^1.2.0" } }, "sha512-dwSoui6djdwbfFmIgbIjX2ZhIoG7Ex/+xpxyiEgIGzjliY8xGkcpITKTlp6B4MgtGkF2ilvm97cPM96XZaAgcA=="],
|
||||
|
||||
"acorn": ["acorn@8.16.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw=="],
|
||||
|
||||
"acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="],
|
||||
|
||||
"agent-base": ["agent-base@7.1.4", "", {}, "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="],
|
||||
|
||||
"ajv": ["ajv@6.14.0", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw=="],
|
||||
|
||||
"ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
|
||||
|
||||
"ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="],
|
||||
|
||||
"argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
|
||||
@@ -283,10 +482,16 @@
|
||||
|
||||
"arraybuffer.prototype.slice": ["arraybuffer.prototype.slice@1.0.4", "", { "dependencies": { "array-buffer-byte-length": "^1.0.1", "call-bind": "^1.0.8", "define-properties": "^1.2.1", "es-abstract": "^1.23.5", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", "is-array-buffer": "^3.0.4" } }, "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ=="],
|
||||
|
||||
"assertion-error": ["assertion-error@2.0.1", "", {}, "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA=="],
|
||||
|
||||
"ast-types-flow": ["ast-types-flow@0.0.8", "", {}, "sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ=="],
|
||||
|
||||
"async-function": ["async-function@1.0.0", "", {}, "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA=="],
|
||||
|
||||
"asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
|
||||
|
||||
"attr-accept": ["attr-accept@2.2.5", "", {}, "sha512-0bDNnY/u6pPwHDMoF0FieU354oBi0a8rD9FcsLwzcGWbc8KS8KPIi7y+s13OlVY+gMWc/9xEMUgNE6Qm8ZllYQ=="],
|
||||
|
||||
"available-typed-arrays": ["available-typed-arrays@1.0.7", "", { "dependencies": { "possible-typed-array-names": "^1.0.0" } }, "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ=="],
|
||||
|
||||
"axe-core": ["axe-core@4.11.1", "", {}, "sha512-BASOg+YwO2C+346x3LZOeoovTIoTrRqEsqMa6fmfAV0P+U9mFr9NsyOEpiYvFjbc64NMrSswhV50WdXzdb/Z5A=="],
|
||||
@@ -299,6 +504,8 @@
|
||||
|
||||
"braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="],
|
||||
|
||||
"cac": ["cac@6.7.14", "", {}, "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ=="],
|
||||
|
||||
"call-bind": ["call-bind@1.0.8", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.2" } }, "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww=="],
|
||||
|
||||
"call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="],
|
||||
@@ -309,46 +516,78 @@
|
||||
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001781", "", {}, "sha512-RdwNCyMsNBftLjW6w01z8bKEvT6e/5tpPVEgtn22TiLGlstHOVecsX2KHFkD5e/vRnIE4EGzpuIODb3mtswtkw=="],
|
||||
|
||||
"chai": ["chai@5.3.3", "", { "dependencies": { "assertion-error": "^2.0.1", "check-error": "^2.1.1", "deep-eql": "^5.0.1", "loupe": "^3.1.0", "pathval": "^2.0.0" } }, "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw=="],
|
||||
|
||||
"chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="],
|
||||
|
||||
"check-error": ["check-error@2.1.3", "", {}, "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA=="],
|
||||
|
||||
"cli-width": ["cli-width@4.1.0", "", {}, "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ=="],
|
||||
|
||||
"client-only": ["client-only@0.0.1", "", {}, "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA=="],
|
||||
|
||||
"cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="],
|
||||
|
||||
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
|
||||
|
||||
"color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
|
||||
|
||||
"combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="],
|
||||
|
||||
"concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="],
|
||||
|
||||
"cookie": ["cookie@1.1.1", "", {}, "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ=="],
|
||||
|
||||
"cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="],
|
||||
|
||||
"css.escape": ["css.escape@1.5.1", "", {}, "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg=="],
|
||||
|
||||
"cssstyle": ["cssstyle@4.6.0", "", { "dependencies": { "@asamuzakjp/css-color": "^3.2.0", "rrweb-cssom": "^0.8.0" } }, "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg=="],
|
||||
|
||||
"csstype": ["csstype@3.2.3", "", {}, "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ=="],
|
||||
|
||||
"damerau-levenshtein": ["damerau-levenshtein@1.0.8", "", {}, "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA=="],
|
||||
|
||||
"data-urls": ["data-urls@5.0.0", "", { "dependencies": { "whatwg-mimetype": "^4.0.0", "whatwg-url": "^14.0.0" } }, "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg=="],
|
||||
|
||||
"data-view-buffer": ["data-view-buffer@1.0.2", "", { "dependencies": { "call-bound": "^1.0.3", "es-errors": "^1.3.0", "is-data-view": "^1.0.2" } }, "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ=="],
|
||||
|
||||
"data-view-byte-length": ["data-view-byte-length@1.0.2", "", { "dependencies": { "call-bound": "^1.0.3", "es-errors": "^1.3.0", "is-data-view": "^1.0.2" } }, "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ=="],
|
||||
|
||||
"data-view-byte-offset": ["data-view-byte-offset@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "is-data-view": "^1.0.1" } }, "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ=="],
|
||||
|
||||
"date-fns": ["date-fns@4.1.0", "", {}, "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg=="],
|
||||
|
||||
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
|
||||
|
||||
"decimal.js": ["decimal.js@10.6.0", "", {}, "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg=="],
|
||||
|
||||
"deep-eql": ["deep-eql@5.0.2", "", {}, "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q=="],
|
||||
|
||||
"deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="],
|
||||
|
||||
"define-data-property": ["define-data-property@1.1.4", "", { "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "gopd": "^1.0.1" } }, "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A=="],
|
||||
|
||||
"define-properties": ["define-properties@1.2.1", "", { "dependencies": { "define-data-property": "^1.0.1", "has-property-descriptors": "^1.0.0", "object-keys": "^1.1.1" } }, "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg=="],
|
||||
|
||||
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
|
||||
|
||||
"dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="],
|
||||
|
||||
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
|
||||
|
||||
"doctrine": ["doctrine@2.1.0", "", { "dependencies": { "esutils": "^2.0.2" } }, "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw=="],
|
||||
|
||||
"dom-accessibility-api": ["dom-accessibility-api@0.6.3", "", {}, "sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w=="],
|
||||
|
||||
"dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="],
|
||||
|
||||
"emoji-regex": ["emoji-regex@9.2.2", "", {}, "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="],
|
||||
|
||||
"enhanced-resolve": ["enhanced-resolve@5.20.1", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.3.0" } }, "sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA=="],
|
||||
|
||||
"entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="],
|
||||
|
||||
"es-abstract": ["es-abstract@1.24.1", "", { "dependencies": { "array-buffer-byte-length": "^1.0.2", "arraybuffer.prototype.slice": "^1.0.4", "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.8", "call-bound": "^1.0.4", "data-view-buffer": "^1.0.2", "data-view-byte-length": "^1.0.2", "data-view-byte-offset": "^1.0.1", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "es-set-tostringtag": "^2.1.0", "es-to-primitive": "^1.3.0", "function.prototype.name": "^1.1.8", "get-intrinsic": "^1.3.0", "get-proto": "^1.0.1", "get-symbol-description": "^1.1.0", "globalthis": "^1.0.4", "gopd": "^1.2.0", "has-property-descriptors": "^1.0.2", "has-proto": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "internal-slot": "^1.1.0", "is-array-buffer": "^3.0.5", "is-callable": "^1.2.7", "is-data-view": "^1.0.2", "is-negative-zero": "^2.0.3", "is-regex": "^1.2.1", "is-set": "^2.0.3", "is-shared-array-buffer": "^1.0.4", "is-string": "^1.1.1", "is-typed-array": "^1.1.15", "is-weakref": "^1.1.1", "math-intrinsics": "^1.1.0", "object-inspect": "^1.13.4", "object-keys": "^1.1.1", "object.assign": "^4.1.7", "own-keys": "^1.0.1", "regexp.prototype.flags": "^1.5.4", "safe-array-concat": "^1.1.3", "safe-push-apply": "^1.0.0", "safe-regex-test": "^1.1.0", "set-proto": "^1.0.0", "stop-iteration-iterator": "^1.1.0", "string.prototype.trim": "^1.2.10", "string.prototype.trimend": "^1.0.9", "string.prototype.trimstart": "^1.0.8", "typed-array-buffer": "^1.0.3", "typed-array-byte-length": "^1.0.3", "typed-array-byte-offset": "^1.0.4", "typed-array-length": "^1.0.7", "unbox-primitive": "^1.1.0", "which-typed-array": "^1.1.19" } }, "sha512-zHXBLhP+QehSSbsS9Pt23Gg964240DPd6QCf8WpkqEXxQ7fhdZzYsocOr5u7apWonsS5EjZDmTF+/slGMyasvw=="],
|
||||
|
||||
"es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="],
|
||||
@@ -357,6 +596,8 @@
|
||||
|
||||
"es-iterator-helpers": ["es-iterator-helpers@1.3.1", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.4", "define-properties": "^1.2.1", "es-abstract": "^1.24.1", "es-errors": "^1.3.0", "es-set-tostringtag": "^2.1.0", "function-bind": "^1.1.2", "get-intrinsic": "^1.3.0", "globalthis": "^1.0.4", "gopd": "^1.2.0", "has-property-descriptors": "^1.0.2", "has-proto": "^1.2.0", "has-symbols": "^1.1.0", "internal-slot": "^1.1.0", "iterator.prototype": "^1.1.5", "math-intrinsics": "^1.1.0", "safe-array-concat": "^1.1.3" } }, "sha512-zWwRvqWiuBPr0muUG/78cW3aHROFCNIQ3zpmYDpwdbnt2m+xlNyRWpHBpa2lJjSBit7BQ+RXA1iwbSmu5yJ/EQ=="],
|
||||
|
||||
"es-module-lexer": ["es-module-lexer@1.7.0", "", {}, "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA=="],
|
||||
|
||||
"es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="],
|
||||
|
||||
"es-set-tostringtag": ["es-set-tostringtag@2.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA=="],
|
||||
@@ -365,6 +606,10 @@
|
||||
|
||||
"es-to-primitive": ["es-to-primitive@1.3.0", "", { "dependencies": { "is-callable": "^1.2.7", "is-date-object": "^1.0.5", "is-symbol": "^1.0.4" } }, "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g=="],
|
||||
|
||||
"esbuild": ["esbuild@0.21.5", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.21.5", "@esbuild/android-arm": "0.21.5", "@esbuild/android-arm64": "0.21.5", "@esbuild/android-x64": "0.21.5", "@esbuild/darwin-arm64": "0.21.5", "@esbuild/darwin-x64": "0.21.5", "@esbuild/freebsd-arm64": "0.21.5", "@esbuild/freebsd-x64": "0.21.5", "@esbuild/linux-arm": "0.21.5", "@esbuild/linux-arm64": "0.21.5", "@esbuild/linux-ia32": "0.21.5", "@esbuild/linux-loong64": "0.21.5", "@esbuild/linux-mips64el": "0.21.5", "@esbuild/linux-ppc64": "0.21.5", "@esbuild/linux-riscv64": "0.21.5", "@esbuild/linux-s390x": "0.21.5", "@esbuild/linux-x64": "0.21.5", "@esbuild/netbsd-x64": "0.21.5", "@esbuild/openbsd-x64": "0.21.5", "@esbuild/sunos-x64": "0.21.5", "@esbuild/win32-arm64": "0.21.5", "@esbuild/win32-ia32": "0.21.5", "@esbuild/win32-x64": "0.21.5" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw=="],
|
||||
|
||||
"escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="],
|
||||
|
||||
"escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="],
|
||||
|
||||
"eslint": ["eslint@9.39.4", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.21.2", "@eslint/config-helpers": "^0.4.2", "@eslint/core": "^0.17.0", "@eslint/eslintrc": "^3.3.5", "@eslint/js": "9.39.4", "@eslint/plugin-kit": "^0.4.1", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "ajv": "^6.14.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.4.0", "eslint-visitor-keys": "^4.2.1", "espree": "^10.4.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.5", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-XoMjdBOwe/esVgEvLmNsD3IRHkm7fbKIUGvrleloJXUZgDHig2IPWNniv+GwjyJXzuNqVjlr5+4yVUZjycJwfQ=="],
|
||||
@@ -397,8 +642,12 @@
|
||||
|
||||
"estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="],
|
||||
|
||||
"estree-walker": ["estree-walker@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.0" } }, "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g=="],
|
||||
|
||||
"esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="],
|
||||
|
||||
"expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="],
|
||||
|
||||
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||
|
||||
"fast-glob": ["fast-glob@3.3.1", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.4" } }, "sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg=="],
|
||||
@@ -413,6 +662,8 @@
|
||||
|
||||
"file-entry-cache": ["file-entry-cache@8.0.0", "", { "dependencies": { "flat-cache": "^4.0.0" } }, "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ=="],
|
||||
|
||||
"file-selector": ["file-selector@2.1.2", "", { "dependencies": { "tslib": "^2.7.0" } }, "sha512-QgXo+mXTe8ljeqUFaX3QVHc5osSItJ/Km+xpocx0aSqWGMSCf6qYs/VnzZgS864Pjn5iceMRFigeAV7AfTlaig=="],
|
||||
|
||||
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
|
||||
|
||||
"find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="],
|
||||
@@ -423,6 +674,10 @@
|
||||
|
||||
"for-each": ["for-each@0.3.5", "", { "dependencies": { "is-callable": "^1.2.7" } }, "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg=="],
|
||||
|
||||
"form-data": ["form-data@4.0.5", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", "hasown": "^2.0.2", "mime-types": "^2.1.12" } }, "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w=="],
|
||||
|
||||
"fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
|
||||
|
||||
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
|
||||
|
||||
"function.prototype.name": ["function.prototype.name@1.1.8", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.3", "define-properties": "^1.2.1", "functions-have-names": "^1.2.3", "hasown": "^2.0.2", "is-callable": "^1.2.7" } }, "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q=="],
|
||||
@@ -431,6 +686,8 @@
|
||||
|
||||
"generator-function": ["generator-function@2.0.1", "", {}, "sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g=="],
|
||||
|
||||
"get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="],
|
||||
|
||||
"get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="],
|
||||
|
||||
"get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="],
|
||||
@@ -449,6 +706,8 @@
|
||||
|
||||
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
|
||||
|
||||
"graphql": ["graphql@16.13.2", "", {}, "sha512-5bJ+nf/UCpAjHM8i06fl7eLyVC9iuNAjm9qzkiu2ZGhM0VscSvS6WDPfAwkdkBuoXGM9FJSbKl6wylMwP9Ktig=="],
|
||||
|
||||
"has-bigints": ["has-bigints@1.1.0", "", {}, "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg=="],
|
||||
|
||||
"has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="],
|
||||
@@ -463,12 +722,26 @@
|
||||
|
||||
"hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
|
||||
|
||||
"headers-polyfill": ["headers-polyfill@4.0.3", "", {}, "sha512-IScLbePpkvO846sIwOtOTDjutRMWdXdJmXdMvk6gCBHxFO8d+QKOQedyZSxFTTFYRSmlgSTDtXqqq4pcenBXLQ=="],
|
||||
|
||||
"html-encoding-sniffer": ["html-encoding-sniffer@4.0.0", "", { "dependencies": { "whatwg-encoding": "^3.1.1" } }, "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ=="],
|
||||
|
||||
"http-proxy-agent": ["http-proxy-agent@7.0.2", "", { "dependencies": { "agent-base": "^7.1.0", "debug": "^4.3.4" } }, "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig=="],
|
||||
|
||||
"https-proxy-agent": ["https-proxy-agent@7.0.6", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "4" } }, "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="],
|
||||
|
||||
"iceberg-js": ["iceberg-js@0.8.1", "", {}, "sha512-1dhVQZXhcHje7798IVM+xoo/1ZdVfzOMIc8/rgVSijRK38EDqOJoGula9N/8ZI5RD8QTxNQtK/Gozpr+qUqRRA=="],
|
||||
|
||||
"iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
||||
|
||||
"ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="],
|
||||
|
||||
"import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="],
|
||||
|
||||
"imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="],
|
||||
|
||||
"indent-string": ["indent-string@4.0.0", "", {}, "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg=="],
|
||||
|
||||
"internal-slot": ["internal-slot@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "hasown": "^2.0.2", "side-channel": "^1.1.0" } }, "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw=="],
|
||||
|
||||
"is-array-buffer": ["is-array-buffer@3.0.5", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.3", "get-intrinsic": "^1.2.6" } }, "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A=="],
|
||||
@@ -493,6 +766,8 @@
|
||||
|
||||
"is-finalizationregistry": ["is-finalizationregistry@1.1.1", "", { "dependencies": { "call-bound": "^1.0.3" } }, "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg=="],
|
||||
|
||||
"is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="],
|
||||
|
||||
"is-generator-function": ["is-generator-function@1.1.2", "", { "dependencies": { "call-bound": "^1.0.4", "generator-function": "^2.0.0", "get-proto": "^1.0.1", "has-tostringtag": "^1.0.2", "safe-regex-test": "^1.1.0" } }, "sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA=="],
|
||||
|
||||
"is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
|
||||
@@ -501,10 +776,14 @@
|
||||
|
||||
"is-negative-zero": ["is-negative-zero@2.0.3", "", {}, "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw=="],
|
||||
|
||||
"is-node-process": ["is-node-process@1.2.0", "", {}, "sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw=="],
|
||||
|
||||
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
|
||||
|
||||
"is-number-object": ["is-number-object@1.1.1", "", { "dependencies": { "call-bound": "^1.0.3", "has-tostringtag": "^1.0.2" } }, "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw=="],
|
||||
|
||||
"is-potential-custom-element-name": ["is-potential-custom-element-name@1.0.1", "", {}, "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ=="],
|
||||
|
||||
"is-regex": ["is-regex@1.2.1", "", { "dependencies": { "call-bound": "^1.0.2", "gopd": "^1.2.0", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g=="],
|
||||
|
||||
"is-set": ["is-set@2.0.3", "", {}, "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg=="],
|
||||
@@ -535,6 +814,8 @@
|
||||
|
||||
"js-yaml": ["js-yaml@4.1.1", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA=="],
|
||||
|
||||
"jsdom": ["jsdom@24.1.3", "", { "dependencies": { "cssstyle": "^4.0.1", "data-urls": "^5.0.0", "decimal.js": "^10.4.3", "form-data": "^4.0.0", "html-encoding-sniffer": "^4.0.0", "http-proxy-agent": "^7.0.2", "https-proxy-agent": "^7.0.5", "is-potential-custom-element-name": "^1.0.1", "nwsapi": "^2.2.12", "parse5": "^7.1.2", "rrweb-cssom": "^0.7.1", "saxes": "^6.0.0", "symbol-tree": "^3.2.4", "tough-cookie": "^4.1.4", "w3c-xmlserializer": "^5.0.0", "webidl-conversions": "^7.0.0", "whatwg-encoding": "^3.1.1", "whatwg-mimetype": "^4.0.0", "whatwg-url": "^14.0.0", "ws": "^8.18.0", "xml-name-validator": "^5.0.0" }, "peerDependencies": { "canvas": "^2.11.2" }, "optionalPeers": ["canvas"] }, "sha512-MyL55p3Ut3cXbeBEG7Hcv0mVM8pp8PBNWxRqchZnSfAiES1v1mRnMeFfaHWIPULpwsYfvO+ZmMZz5tGCnjzDUQ=="],
|
||||
|
||||
"json-buffer": ["json-buffer@3.0.1", "", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="],
|
||||
|
||||
"json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="],
|
||||
@@ -583,6 +864,14 @@
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"loupe": ["loupe@3.2.1", "", {}, "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ=="],
|
||||
|
||||
"lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
|
||||
|
||||
"lucide-react": ["lucide-react@1.6.0", "", { "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-YxLKVCOF5ZDI1AhKQE5IBYMY9y/Nr4NT15+7QEWpsTSVCdn4vmZhww+6BP76jWYjQx8rSz1Z+gGme1f+UycWEw=="],
|
||||
|
||||
"lz-string": ["lz-string@1.5.0", "", { "bin": { "lz-string": "bin/bin.js" } }, "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ=="],
|
||||
|
||||
"magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
|
||||
|
||||
"math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="],
|
||||
@@ -591,12 +880,22 @@
|
||||
|
||||
"micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="],
|
||||
|
||||
"mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
|
||||
|
||||
"mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
|
||||
|
||||
"min-indent": ["min-indent@1.0.1", "", {}, "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg=="],
|
||||
|
||||
"minimatch": ["minimatch@3.1.5", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w=="],
|
||||
|
||||
"minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
|
||||
"msw": ["msw@2.12.14", "", { "dependencies": { "@inquirer/confirm": "^5.0.0", "@mswjs/interceptors": "^0.41.2", "@open-draft/deferred-promise": "^2.2.0", "@types/statuses": "^2.0.6", "cookie": "^1.0.2", "graphql": "^16.12.0", "headers-polyfill": "^4.0.2", "is-node-process": "^1.2.0", "outvariant": "^1.4.3", "path-to-regexp": "^6.3.0", "picocolors": "^1.1.1", "rettime": "^0.10.1", "statuses": "^2.0.2", "strict-event-emitter": "^0.5.1", "tough-cookie": "^6.0.0", "type-fest": "^5.2.0", "until-async": "^3.0.2", "yargs": "^17.7.2" }, "peerDependencies": { "typescript": ">= 4.8.x" }, "optionalPeers": ["typescript"], "bin": { "msw": "cli/index.js" } }, "sha512-4KXa4nVBIBjbDbd7vfQNuQ25eFxug0aropCQFoI0JdOBuJWamkT1yLVIWReFI8SiTRc+H1hKzaNk+cLk2N9rtQ=="],
|
||||
|
||||
"mute-stream": ["mute-stream@2.0.0", "", {}, "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA=="],
|
||||
|
||||
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
|
||||
|
||||
"napi-postinstall": ["napi-postinstall@0.3.4", "", { "bin": { "napi-postinstall": "lib/cli.js" } }, "sha512-PHI5f1O0EP5xJ9gQmFGMS6IZcrVvTjpXjz7Na41gTE7eE2hK11lg04CECCYEEjdc17EV4DO+fkGEtt7TpTaTiQ=="],
|
||||
@@ -607,6 +906,8 @@
|
||||
|
||||
"node-exports-info": ["node-exports-info@1.6.0", "", { "dependencies": { "array.prototype.flatmap": "^1.3.3", "es-errors": "^1.3.0", "object.entries": "^1.1.9", "semver": "^6.3.1" } }, "sha512-pyFS63ptit/P5WqUkt+UUfe+4oevH+bFeIiPPdfb0pFeYEu/1ELnJu5l+5EcTKYL5M7zaAa7S8ddywgXypqKCw=="],
|
||||
|
||||
"nwsapi": ["nwsapi@2.2.23", "", {}, "sha512-7wfH4sLbt4M0gCDzGE6vzQBo0bfTKjU7Sfpqy/7gs1qBfYz2vEJH6vXcBKpO3+6Yu1telwd0t9HpyOoLEQQbIQ=="],
|
||||
|
||||
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
|
||||
|
||||
"object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="],
|
||||
@@ -625,6 +926,8 @@
|
||||
|
||||
"optionator": ["optionator@0.9.4", "", { "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", "word-wrap": "^1.2.5" } }, "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="],
|
||||
|
||||
"outvariant": ["outvariant@1.4.3", "", {}, "sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA=="],
|
||||
|
||||
"own-keys": ["own-keys@1.0.1", "", { "dependencies": { "get-intrinsic": "^1.2.6", "object-keys": "^1.1.1", "safe-push-apply": "^1.0.0" } }, "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg=="],
|
||||
|
||||
"p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="],
|
||||
@@ -633,12 +936,20 @@
|
||||
|
||||
"parent-module": ["parent-module@1.0.1", "", { "dependencies": { "callsites": "^3.0.0" } }, "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="],
|
||||
|
||||
"parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="],
|
||||
|
||||
"path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="],
|
||||
|
||||
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
|
||||
|
||||
"path-parse": ["path-parse@1.0.7", "", {}, "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="],
|
||||
|
||||
"path-to-regexp": ["path-to-regexp@6.3.0", "", {}, "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ=="],
|
||||
|
||||
"pathe": ["pathe@1.1.2", "", {}, "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ=="],
|
||||
|
||||
"pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="],
|
||||
|
||||
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
|
||||
|
||||
"picomatch": ["picomatch@4.0.4", "", {}, "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A=="],
|
||||
@@ -649,30 +960,50 @@
|
||||
|
||||
"prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="],
|
||||
|
||||
"pretty-format": ["pretty-format@27.5.1", "", { "dependencies": { "ansi-regex": "^5.0.1", "ansi-styles": "^5.0.0", "react-is": "^17.0.1" } }, "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ=="],
|
||||
|
||||
"prop-types": ["prop-types@15.8.1", "", { "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", "react-is": "^16.13.1" } }, "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg=="],
|
||||
|
||||
"psl": ["psl@1.15.0", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w=="],
|
||||
|
||||
"punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="],
|
||||
|
||||
"querystringify": ["querystringify@2.2.0", "", {}, "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="],
|
||||
|
||||
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
|
||||
|
||||
"react": ["react@19.1.0", "", {}, "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg=="],
|
||||
|
||||
"react-dom": ["react-dom@19.1.0", "", { "dependencies": { "scheduler": "^0.26.0" }, "peerDependencies": { "react": "^19.1.0" } }, "sha512-Xs1hdnE+DyKgeHJeJznQmYMIBG3TKIHJJT95Q58nHLSrElKlGQqDTR2HQ9fx5CN/Gk6Vh/kupBTDLU11/nDk/g=="],
|
||||
|
||||
"react-dropzone": ["react-dropzone@15.0.0", "", { "dependencies": { "attr-accept": "^2.2.4", "file-selector": "^2.1.0", "prop-types": "^15.8.1" }, "peerDependencies": { "react": ">= 16.8 || 18.0.0" } }, "sha512-lGjYV/EoqEjEWPnmiSvH4v5IoIAwQM2W4Z1C0Q/Pw2xD0eVzKPS359BQTUMum+1fa0kH2nrKjuavmTPOGhpLPg=="],
|
||||
|
||||
"react-is": ["react-is@16.13.1", "", {}, "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="],
|
||||
|
||||
"redent": ["redent@3.0.0", "", { "dependencies": { "indent-string": "^4.0.0", "strip-indent": "^3.0.0" } }, "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg=="],
|
||||
|
||||
"reflect.getprototypeof": ["reflect.getprototypeof@1.0.10", "", { "dependencies": { "call-bind": "^1.0.8", "define-properties": "^1.2.1", "es-abstract": "^1.23.9", "es-errors": "^1.3.0", "es-object-atoms": "^1.0.0", "get-intrinsic": "^1.2.7", "get-proto": "^1.0.1", "which-builtin-type": "^1.2.1" } }, "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw=="],
|
||||
|
||||
"regexp.prototype.flags": ["regexp.prototype.flags@1.5.4", "", { "dependencies": { "call-bind": "^1.0.8", "define-properties": "^1.2.1", "es-errors": "^1.3.0", "get-proto": "^1.0.1", "gopd": "^1.2.0", "set-function-name": "^2.0.2" } }, "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA=="],
|
||||
|
||||
"require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="],
|
||||
|
||||
"requires-port": ["requires-port@1.0.0", "", {}, "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="],
|
||||
|
||||
"resolve": ["resolve@1.22.11", "", { "dependencies": { "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ=="],
|
||||
|
||||
"resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="],
|
||||
|
||||
"resolve-pkg-maps": ["resolve-pkg-maps@1.0.0", "", {}, "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="],
|
||||
|
||||
"rettime": ["rettime@0.10.1", "", {}, "sha512-uyDrIlUEH37cinabq0AX4QbgV4HbFZ/gqoiunWQ1UqBtRvTTytwhNYjE++pO/MjPTZL5KQCf2bEoJ/BJNVQ5Kw=="],
|
||||
|
||||
"reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="],
|
||||
|
||||
"rollup": ["rollup@4.60.0", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.60.0", "@rollup/rollup-android-arm64": "4.60.0", "@rollup/rollup-darwin-arm64": "4.60.0", "@rollup/rollup-darwin-x64": "4.60.0", "@rollup/rollup-freebsd-arm64": "4.60.0", "@rollup/rollup-freebsd-x64": "4.60.0", "@rollup/rollup-linux-arm-gnueabihf": "4.60.0", "@rollup/rollup-linux-arm-musleabihf": "4.60.0", "@rollup/rollup-linux-arm64-gnu": "4.60.0", "@rollup/rollup-linux-arm64-musl": "4.60.0", "@rollup/rollup-linux-loong64-gnu": "4.60.0", "@rollup/rollup-linux-loong64-musl": "4.60.0", "@rollup/rollup-linux-ppc64-gnu": "4.60.0", "@rollup/rollup-linux-ppc64-musl": "4.60.0", "@rollup/rollup-linux-riscv64-gnu": "4.60.0", "@rollup/rollup-linux-riscv64-musl": "4.60.0", "@rollup/rollup-linux-s390x-gnu": "4.60.0", "@rollup/rollup-linux-x64-gnu": "4.60.0", "@rollup/rollup-linux-x64-musl": "4.60.0", "@rollup/rollup-openbsd-x64": "4.60.0", "@rollup/rollup-openharmony-arm64": "4.60.0", "@rollup/rollup-win32-arm64-msvc": "4.60.0", "@rollup/rollup-win32-ia32-msvc": "4.60.0", "@rollup/rollup-win32-x64-gnu": "4.60.0", "@rollup/rollup-win32-x64-msvc": "4.60.0", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-yqjxruMGBQJ2gG4HtjZtAfXArHomazDHoFwFFmZZl0r7Pdo7qCIXKqKHZc8yeoMgzJJ+pO6pEEHa+V7uzWlrAQ=="],
|
||||
|
||||
"rrweb-cssom": ["rrweb-cssom@0.7.1", "", {}, "sha512-TrEMa7JGdVm0UThDJSx7ddw5nVm3UJS9o9CCIZ72B1vSyEZoziDqBYP3XIoi/12lKrJR8rE3jeFHMok2F/Mnsg=="],
|
||||
|
||||
"run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
|
||||
|
||||
"safe-array-concat": ["safe-array-concat@1.1.3", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.2", "get-intrinsic": "^1.2.6", "has-symbols": "^1.1.0", "isarray": "^2.0.5" } }, "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q=="],
|
||||
@@ -681,6 +1012,10 @@
|
||||
|
||||
"safe-regex-test": ["safe-regex-test@1.1.0", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "is-regex": "^1.2.1" } }, "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw=="],
|
||||
|
||||
"safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
|
||||
|
||||
"saxes": ["saxes@6.0.0", "", { "dependencies": { "xmlchars": "^2.2.0" } }, "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA=="],
|
||||
|
||||
"scheduler": ["scheduler@0.26.0", "", {}, "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA=="],
|
||||
|
||||
"semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="],
|
||||
@@ -705,12 +1040,28 @@
|
||||
|
||||
"side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="],
|
||||
|
||||
"siginfo": ["siginfo@2.0.0", "", {}, "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g=="],
|
||||
|
||||
"signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="],
|
||||
|
||||
"sonner": ["sonner@2.0.7", "", { "peerDependencies": { "react": "^18.0.0 || ^19.0.0 || ^19.0.0-rc", "react-dom": "^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, "sha512-W6ZN4p58k8aDKA4XPcx2hpIQXBRAgyiWVkYhT7CvK6D3iAu7xjvVyhQHg2/iaKJZ1XVJ4r7XuwGL+WGEK37i9w=="],
|
||||
|
||||
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
|
||||
|
||||
"stable-hash": ["stable-hash@0.0.5", "", {}, "sha512-+L3ccpzibovGXFK+Ap/f8LOS0ahMrHTf3xu7mMLSpEGU0EO9ucaysSylKo9eRDFNhWve/y275iPmIZ4z39a9iA=="],
|
||||
|
||||
"stackback": ["stackback@0.0.2", "", {}, "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw=="],
|
||||
|
||||
"statuses": ["statuses@2.0.2", "", {}, "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw=="],
|
||||
|
||||
"std-env": ["std-env@3.10.0", "", {}, "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg=="],
|
||||
|
||||
"stop-iteration-iterator": ["stop-iteration-iterator@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "internal-slot": "^1.1.0" } }, "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ=="],
|
||||
|
||||
"strict-event-emitter": ["strict-event-emitter@0.5.1", "", {}, "sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ=="],
|
||||
|
||||
"string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="],
|
||||
|
||||
"string.prototype.includes": ["string.prototype.includes@2.0.1", "", { "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", "es-abstract": "^1.23.3" } }, "sha512-o7+c9bW6zpAdJHTtujeePODAhkuicdAryFsfVKwA+wGw89wJ4GTY484WTucM9hLtDEOpOvI+aHnzqnC5lHp4Rg=="],
|
||||
|
||||
"string.prototype.matchall": ["string.prototype.matchall@4.0.12", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.3", "define-properties": "^1.2.1", "es-abstract": "^1.23.6", "es-errors": "^1.3.0", "es-object-atoms": "^1.0.0", "get-intrinsic": "^1.2.6", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "internal-slot": "^1.1.0", "regexp.prototype.flags": "^1.5.3", "set-function-name": "^2.0.2", "side-channel": "^1.1.0" } }, "sha512-6CC9uyBL+/48dYizRf7H7VAYCMCNTBeM78x/VTUe9bFEaxBepPJDa1Ow99LqI/1yF7kuy7Q3cQsYMrcjGUcskA=="],
|
||||
@@ -723,8 +1074,12 @@
|
||||
|
||||
"string.prototype.trimstart": ["string.prototype.trimstart@1.0.8", "", { "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", "es-object-atoms": "^1.0.0" } }, "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg=="],
|
||||
|
||||
"strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
|
||||
|
||||
"strip-bom": ["strip-bom@3.0.0", "", {}, "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA=="],
|
||||
|
||||
"strip-indent": ["strip-indent@3.0.0", "", { "dependencies": { "min-indent": "^1.0.0" } }, "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ=="],
|
||||
|
||||
"strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="],
|
||||
|
||||
"styled-jsx": ["styled-jsx@5.1.6", "", { "dependencies": { "client-only": "0.0.1" }, "peerDependencies": { "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0 || ^19.0.0-0" } }, "sha512-qSVyDTeMotdvQYoHWLNGwRFJHC+i+ZvdBRYosOFgC+Wg1vx4frN2/RG/NA7SYqqvKNLf39P2LSRA2pu6n0XYZA=="],
|
||||
@@ -733,14 +1088,36 @@
|
||||
|
||||
"supports-preserve-symlinks-flag": ["supports-preserve-symlinks-flag@1.0.0", "", {}, "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="],
|
||||
|
||||
"symbol-tree": ["symbol-tree@3.2.4", "", {}, "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw=="],
|
||||
|
||||
"tagged-tag": ["tagged-tag@1.0.0", "", {}, "sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng=="],
|
||||
|
||||
"tailwindcss": ["tailwindcss@4.2.2", "", {}, "sha512-KWBIxs1Xb6NoLdMVqhbhgwZf2PGBpPEiwOqgI4pFIYbNTfBXiKYyWoTsXgBQ9WFg/OlhnvHaY+AEpW7wSmFo2Q=="],
|
||||
|
||||
"tapable": ["tapable@2.3.2", "", {}, "sha512-1MOpMXuhGzGL5TTCZFItxCc0AARf1EZFQkGqMm7ERKj8+Hgr5oLvJOVFcC+lRmR8hCe2S3jC4T5D7Vg/d7/fhA=="],
|
||||
|
||||
"tinybench": ["tinybench@2.9.0", "", {}, "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg=="],
|
||||
|
||||
"tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="],
|
||||
|
||||
"tinyglobby": ["tinyglobby@0.2.15", "", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" } }, "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ=="],
|
||||
|
||||
"tinypool": ["tinypool@1.1.1", "", {}, "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg=="],
|
||||
|
||||
"tinyrainbow": ["tinyrainbow@1.2.0", "", {}, "sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ=="],
|
||||
|
||||
"tinyspy": ["tinyspy@3.0.2", "", {}, "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q=="],
|
||||
|
||||
"tldts": ["tldts@7.0.27", "", { "dependencies": { "tldts-core": "^7.0.27" }, "bin": { "tldts": "bin/cli.js" } }, "sha512-I4FZcVFcqCRuT0ph6dCDpPuO4Xgzvh+spkcTr1gK7peIvxWauoloVO0vuy1FQnijT63ss6AsHB6+OIM4aXHbPg=="],
|
||||
|
||||
"tldts-core": ["tldts-core@7.0.27", "", {}, "sha512-YQ7uPjgWUibIK6DW5lrKujGwUKhLevU4hcGbP5O6TcIUb+oTjJYJVWPS4nZsIHrEEEG6myk/oqAJUEQmpZrHsg=="],
|
||||
|
||||
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
|
||||
|
||||
"tough-cookie": ["tough-cookie@4.1.4", "", { "dependencies": { "psl": "^1.1.33", "punycode": "^2.1.1", "universalify": "^0.2.0", "url-parse": "^1.5.3" } }, "sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag=="],
|
||||
|
||||
"tr46": ["tr46@5.1.1", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw=="],
|
||||
|
||||
"ts-api-utils": ["ts-api-utils@2.5.0", "", { "peerDependencies": { "typescript": ">=4.8.4" } }, "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA=="],
|
||||
|
||||
"tsconfig-paths": ["tsconfig-paths@3.15.0", "", { "dependencies": { "@types/json5": "^0.0.29", "json5": "^1.0.2", "minimist": "^1.2.6", "strip-bom": "^3.0.0" } }, "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg=="],
|
||||
@@ -749,6 +1126,8 @@
|
||||
|
||||
"type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="],
|
||||
|
||||
"type-fest": ["type-fest@5.5.0", "", { "dependencies": { "tagged-tag": "^1.0.0" } }, "sha512-PlBfpQwiUvGViBNX84Yxwjsdhd1TUlXr6zjX7eoirtCPIr08NAmxwa+fcYBTeRQxHo9YC9wwF3m9i700sHma8g=="],
|
||||
|
||||
"typed-array-buffer": ["typed-array-buffer@1.0.3", "", { "dependencies": { "call-bound": "^1.0.3", "es-errors": "^1.3.0", "is-typed-array": "^1.1.14" } }, "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw=="],
|
||||
|
||||
"typed-array-byte-length": ["typed-array-byte-length@1.0.3", "", { "dependencies": { "call-bind": "^1.0.8", "for-each": "^0.3.3", "gopd": "^1.2.0", "has-proto": "^1.2.0", "is-typed-array": "^1.1.14" } }, "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg=="],
|
||||
@@ -763,10 +1142,32 @@
|
||||
|
||||
"undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
|
||||
|
||||
"universalify": ["universalify@0.2.0", "", {}, "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg=="],
|
||||
|
||||
"unrs-resolver": ["unrs-resolver@1.11.1", "", { "dependencies": { "napi-postinstall": "^0.3.0" }, "optionalDependencies": { "@unrs/resolver-binding-android-arm-eabi": "1.11.1", "@unrs/resolver-binding-android-arm64": "1.11.1", "@unrs/resolver-binding-darwin-arm64": "1.11.1", "@unrs/resolver-binding-darwin-x64": "1.11.1", "@unrs/resolver-binding-freebsd-x64": "1.11.1", "@unrs/resolver-binding-linux-arm-gnueabihf": "1.11.1", "@unrs/resolver-binding-linux-arm-musleabihf": "1.11.1", "@unrs/resolver-binding-linux-arm64-gnu": "1.11.1", "@unrs/resolver-binding-linux-arm64-musl": "1.11.1", "@unrs/resolver-binding-linux-ppc64-gnu": "1.11.1", "@unrs/resolver-binding-linux-riscv64-gnu": "1.11.1", "@unrs/resolver-binding-linux-riscv64-musl": "1.11.1", "@unrs/resolver-binding-linux-s390x-gnu": "1.11.1", "@unrs/resolver-binding-linux-x64-gnu": "1.11.1", "@unrs/resolver-binding-linux-x64-musl": "1.11.1", "@unrs/resolver-binding-wasm32-wasi": "1.11.1", "@unrs/resolver-binding-win32-arm64-msvc": "1.11.1", "@unrs/resolver-binding-win32-ia32-msvc": "1.11.1", "@unrs/resolver-binding-win32-x64-msvc": "1.11.1" } }, "sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg=="],
|
||||
|
||||
"until-async": ["until-async@3.0.2", "", {}, "sha512-IiSk4HlzAMqTUseHHe3VhIGyuFmN90zMTpD3Z3y8jeQbzLIq500MVM7Jq2vUAnTKAFPJrqwkzr6PoTcPhGcOiw=="],
|
||||
|
||||
"uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="],
|
||||
|
||||
"url-parse": ["url-parse@1.5.10", "", { "dependencies": { "querystringify": "^2.1.1", "requires-port": "^1.0.0" } }, "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ=="],
|
||||
|
||||
"vite": ["vite@5.4.21", "", { "dependencies": { "esbuild": "^0.21.3", "postcss": "^8.4.43", "rollup": "^4.20.0" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || >=20.0.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.4.0" }, "optionalPeers": ["@types/node", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser"], "bin": { "vite": "bin/vite.js" } }, "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw=="],
|
||||
|
||||
"vite-node": ["vite-node@2.1.8", "", { "dependencies": { "cac": "^6.7.14", "debug": "^4.3.7", "es-module-lexer": "^1.5.4", "pathe": "^1.1.2", "vite": "^5.0.0" }, "bin": { "vite-node": "vite-node.mjs" } }, "sha512-uPAwSr57kYjAUux+8E2j0q0Fxpn8M9VoyfGiRI8Kfktz9NcYMCenwY5RnZxnF1WTu3TGiYipirIzacLL3VVGFg=="],
|
||||
|
||||
"vitest": ["vitest@2.1.8", "", { "dependencies": { "@vitest/expect": "2.1.8", "@vitest/mocker": "2.1.8", "@vitest/pretty-format": "^2.1.8", "@vitest/runner": "2.1.8", "@vitest/snapshot": "2.1.8", "@vitest/spy": "2.1.8", "@vitest/utils": "2.1.8", "chai": "^5.1.2", "debug": "^4.3.7", "expect-type": "^1.1.0", "magic-string": "^0.30.12", "pathe": "^1.1.2", "std-env": "^3.8.0", "tinybench": "^2.9.0", "tinyexec": "^0.3.1", "tinypool": "^1.0.1", "tinyrainbow": "^1.2.0", "vite": "^5.0.0", "vite-node": "2.1.8", "why-is-node-running": "^2.3.0" }, "peerDependencies": { "@edge-runtime/vm": "*", "@types/node": "^18.0.0 || >=20.0.0", "@vitest/browser": "2.1.8", "@vitest/ui": "2.1.8", "happy-dom": "*", "jsdom": "*" }, "optionalPeers": ["@edge-runtime/vm", "@types/node", "@vitest/browser", "@vitest/ui", "happy-dom", "jsdom"], "bin": { "vitest": "vitest.mjs" } }, "sha512-1vBKTZskHw/aosXqQUlVWWlGUxSJR8YtiyZDJAFeW2kPAeX6S3Sool0mjspO+kXLuxVWlEDDowBAeqeAQefqLQ=="],
|
||||
|
||||
"w3c-xmlserializer": ["w3c-xmlserializer@5.0.0", "", { "dependencies": { "xml-name-validator": "^5.0.0" } }, "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA=="],
|
||||
|
||||
"webidl-conversions": ["webidl-conversions@7.0.0", "", {}, "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="],
|
||||
|
||||
"whatwg-encoding": ["whatwg-encoding@3.1.1", "", { "dependencies": { "iconv-lite": "0.6.3" } }, "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ=="],
|
||||
|
||||
"whatwg-mimetype": ["whatwg-mimetype@4.0.0", "", {}, "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg=="],
|
||||
|
||||
"whatwg-url": ["whatwg-url@14.2.0", "", { "dependencies": { "tr46": "^5.1.0", "webidl-conversions": "^7.0.0" } }, "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw=="],
|
||||
|
||||
"which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
|
||||
|
||||
"which-boxed-primitive": ["which-boxed-primitive@1.1.1", "", { "dependencies": { "is-bigint": "^1.1.0", "is-boolean-object": "^1.2.1", "is-number-object": "^1.1.1", "is-string": "^1.1.1", "is-symbol": "^1.1.1" } }, "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA=="],
|
||||
@@ -777,10 +1178,28 @@
|
||||
|
||||
"which-typed-array": ["which-typed-array@1.1.20", "", { "dependencies": { "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.8", "call-bound": "^1.0.4", "for-each": "^0.3.5", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-tostringtag": "^1.0.2" } }, "sha512-LYfpUkmqwl0h9A2HL09Mms427Q1RZWuOHsukfVcKRq9q95iQxdw0ix1JQrqbcDR9PH1QDwf5Qo8OZb5lksZ8Xg=="],
|
||||
|
||||
"why-is-node-running": ["why-is-node-running@2.3.0", "", { "dependencies": { "siginfo": "^2.0.0", "stackback": "0.0.2" }, "bin": { "why-is-node-running": "cli.js" } }, "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w=="],
|
||||
|
||||
"word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="],
|
||||
|
||||
"wrap-ansi": ["wrap-ansi@6.2.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA=="],
|
||||
|
||||
"ws": ["ws@8.20.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA=="],
|
||||
|
||||
"xml-name-validator": ["xml-name-validator@5.0.0", "", {}, "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg=="],
|
||||
|
||||
"xmlchars": ["xmlchars@2.2.0", "", {}, "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw=="],
|
||||
|
||||
"y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="],
|
||||
|
||||
"yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="],
|
||||
|
||||
"yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="],
|
||||
|
||||
"yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="],
|
||||
|
||||
"yoctocolors-cjs": ["yoctocolors-cjs@2.1.3", "", {}, "sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw=="],
|
||||
|
||||
"@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.9.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.2.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-mukuNALVsoix/w1BJwFzwXBN/dHeejQtuVzcDsfOEsdpCumXb/E9j8w11h5S54tT1xhifGfbbSm/ICrObRb3KA=="],
|
||||
@@ -795,6 +1214,10 @@
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/tslib": ["tslib@2.8.1", "", { "bundled": true }, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"@testing-library/dom/aria-query": ["aria-query@5.3.0", "", { "dependencies": { "dequal": "^2.0.3" } }, "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A=="],
|
||||
|
||||
"@testing-library/dom/dom-accessibility-api": ["dom-accessibility-api@0.5.16", "", {}, "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg=="],
|
||||
|
||||
"@typescript-eslint/eslint-plugin/ignore": ["ignore@7.0.5", "", {}, "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg=="],
|
||||
|
||||
"@typescript-eslint/typescript-estree/minimatch": ["minimatch@10.2.4", "", { "dependencies": { "brace-expansion": "^5.0.2" } }, "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg=="],
|
||||
@@ -803,6 +1226,14 @@
|
||||
|
||||
"@typescript-eslint/visitor-keys/eslint-visitor-keys": ["eslint-visitor-keys@5.0.1", "", {}, "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA=="],
|
||||
|
||||
"@vitest/snapshot/@vitest/pretty-format": ["@vitest/pretty-format@2.1.8", "", { "dependencies": { "tinyrainbow": "^1.2.0" } }, "sha512-9HiSZ9zpqNLKlbIDRWOnAWqgcA7xu+8YxXSekhr0Ykab7PAYFkhkwoqVArPOtJhPmYeE2YHgKZlj3CP36z2AJQ=="],
|
||||
|
||||
"@vitest/utils/@vitest/pretty-format": ["@vitest/pretty-format@2.1.8", "", { "dependencies": { "tinyrainbow": "^1.2.0" } }, "sha512-9HiSZ9zpqNLKlbIDRWOnAWqgcA7xu+8YxXSekhr0Ykab7PAYFkhkwoqVArPOtJhPmYeE2YHgKZlj3CP36z2AJQ=="],
|
||||
|
||||
"cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
|
||||
|
||||
"cssstyle/rrweb-cssom": ["rrweb-cssom@0.8.0", "", {}, "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw=="],
|
||||
|
||||
"eslint-import-resolver-node/debug": ["debug@3.2.7", "", { "dependencies": { "ms": "^2.1.1" } }, "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ=="],
|
||||
|
||||
"eslint-module-utils/debug": ["debug@3.2.7", "", { "dependencies": { "ms": "^2.1.1" } }, "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ=="],
|
||||
@@ -817,10 +1248,18 @@
|
||||
|
||||
"micromatch/picomatch": ["picomatch@2.3.2", "", {}, "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA=="],
|
||||
|
||||
"msw/tough-cookie": ["tough-cookie@6.0.1", "", { "dependencies": { "tldts": "^7.0.5" } }, "sha512-LktZQb3IeoUWB9lqR5EWTHgW/VTITCXg4D21M+lvybRVdylLrRMnqaIONLVb5mav8vM19m44HIcGq4qASeu2Qw=="],
|
||||
|
||||
"next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="],
|
||||
|
||||
"pretty-format/ansi-styles": ["ansi-styles@5.2.0", "", {}, "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA=="],
|
||||
|
||||
"pretty-format/react-is": ["react-is@17.0.2", "", {}, "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w=="],
|
||||
|
||||
"sharp/semver": ["semver@7.7.4", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA=="],
|
||||
|
||||
"string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
|
||||
|
||||
"@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@5.0.4", "", { "dependencies": { "balanced-match": "^4.0.2" } }, "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg=="],
|
||||
|
||||
"@typescript-eslint/typescript-estree/minimatch/brace-expansion/balanced-match": ["balanced-match@4.0.4", "", {}, "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA=="],
|
||||
|
||||
6
frontend/next-env.d.ts
vendored
Normal file
6
frontend/next-env.d.ts
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
/// <reference path="./.next/types/routes.d.ts" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
||||
@@ -5,7 +5,7 @@ const nextConfig: NextConfig = {
|
||||
rewrites: async () => [
|
||||
{
|
||||
source: "/api/:path*",
|
||||
destination: `${process.env.API_URL || "http://localhost:8080"}/:path*`,
|
||||
destination: `${process.env.API_URL || "http://localhost:8080"}/api/:path*`,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
@@ -6,22 +6,37 @@
|
||||
"dev": "next dev --turbopack",
|
||||
"build": "next build --turbopack",
|
||||
"start": "next start",
|
||||
"lint": "eslint"
|
||||
"lint": "eslint",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@supabase/ssr": "^0.9.0",
|
||||
"@supabase/supabase-js": "^2.100.0",
|
||||
"@tanstack/react-query": "^5.95.2",
|
||||
"date-fns": "^4.1.0",
|
||||
"lucide-react": "^1.6.0",
|
||||
"next": "15.5.14",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"next": "15.5.14"
|
||||
"react-dropzone": "^15.0.0",
|
||||
"sonner": "^2.0.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5",
|
||||
"@eslint/eslintrc": "^3",
|
||||
"@tailwindcss/postcss": "^4",
|
||||
"@testing-library/jest-dom": "^6.9.1",
|
||||
"@testing-library/react": "^16.3.2",
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@types/node": "^20",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
"@tailwindcss/postcss": "^4",
|
||||
"tailwindcss": "^4",
|
||||
"eslint": "^9",
|
||||
"eslint-config-next": "15.5.14",
|
||||
"@eslint/eslintrc": "^3"
|
||||
"jsdom": "24.1.3",
|
||||
"msw": "^2.12.14",
|
||||
"tailwindcss": "^4",
|
||||
"typescript": "^5",
|
||||
"vitest": "2.1.8"
|
||||
}
|
||||
}
|
||||
|
||||
47
frontend/src/__tests__/CaseOverviewGrid.test.tsx
Normal file
47
frontend/src/__tests__/CaseOverviewGrid.test.tsx
Normal file
@@ -0,0 +1,47 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import { CaseOverviewGrid } from "@/components/dashboard/CaseOverviewGrid";
|
||||
import type { CaseSummary } from "@/lib/types";
|
||||
|
||||
describe("CaseOverviewGrid", () => {
|
||||
const defaultData: CaseSummary = {
|
||||
active_count: 15,
|
||||
new_this_month: 4,
|
||||
closed_count: 8,
|
||||
};
|
||||
|
||||
it("renders all three case categories", () => {
|
||||
render(<CaseOverviewGrid data={defaultData} />);
|
||||
|
||||
expect(screen.getByText("Aktive Akten")).toBeInTheDocument();
|
||||
expect(screen.getByText("Neu (Monat)")).toBeInTheDocument();
|
||||
expect(screen.getByText("Abgeschlossen")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("displays correct counts", () => {
|
||||
render(<CaseOverviewGrid data={defaultData} />);
|
||||
|
||||
expect(screen.getByText("15")).toBeInTheDocument();
|
||||
expect(screen.getByText("4")).toBeInTheDocument();
|
||||
expect(screen.getByText("8")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("renders the section header", () => {
|
||||
render(<CaseOverviewGrid data={defaultData} />);
|
||||
|
||||
expect(screen.getByText("Aktenübersicht")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("handles zero counts", () => {
|
||||
const zeroData: CaseSummary = {
|
||||
active_count: 0,
|
||||
new_this_month: 0,
|
||||
closed_count: 0,
|
||||
};
|
||||
|
||||
render(<CaseOverviewGrid data={zeroData} />);
|
||||
|
||||
const zeros = screen.getAllByText("0");
|
||||
expect(zeros).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
67
frontend/src/__tests__/DeadlineTrafficLights.test.tsx
Normal file
67
frontend/src/__tests__/DeadlineTrafficLights.test.tsx
Normal file
@@ -0,0 +1,67 @@
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
import { render, screen, fireEvent } from "@testing-library/react";
|
||||
import { DeadlineTrafficLights } from "@/components/dashboard/DeadlineTrafficLights";
|
||||
import type { DeadlineSummary } from "@/lib/types";
|
||||
|
||||
describe("DeadlineTrafficLights", () => {
|
||||
const defaultData: DeadlineSummary = {
|
||||
overdue_count: 3,
|
||||
due_this_week: 5,
|
||||
due_next_week: 2,
|
||||
ok_count: 10,
|
||||
};
|
||||
|
||||
it("renders all three traffic light cards", () => {
|
||||
render(<DeadlineTrafficLights data={defaultData} />);
|
||||
|
||||
expect(screen.getByText("Überfällig")).toBeInTheDocument();
|
||||
expect(screen.getByText("Diese Woche")).toBeInTheDocument();
|
||||
expect(screen.getByText("Im Zeitplan")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("displays correct counts", () => {
|
||||
render(<DeadlineTrafficLights data={defaultData} />);
|
||||
|
||||
// Overdue: 3
|
||||
expect(screen.getByText("3")).toBeInTheDocument();
|
||||
// This week: 5
|
||||
expect(screen.getByText("5")).toBeInTheDocument();
|
||||
// OK: ok_count + due_next_week = 10 + 2 = 12
|
||||
expect(screen.getByText("12")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("displays zero counts correctly", () => {
|
||||
const zeroData: DeadlineSummary = {
|
||||
overdue_count: 0,
|
||||
due_this_week: 0,
|
||||
due_next_week: 0,
|
||||
ok_count: 0,
|
||||
};
|
||||
|
||||
render(<DeadlineTrafficLights data={zeroData} />);
|
||||
|
||||
const zeros = screen.getAllByText("0");
|
||||
expect(zeros).toHaveLength(3);
|
||||
});
|
||||
|
||||
it("calls onFilter with correct key when clicked", () => {
|
||||
const onFilter = vi.fn();
|
||||
render(<DeadlineTrafficLights data={defaultData} onFilter={onFilter} />);
|
||||
|
||||
fireEvent.click(screen.getByText("Überfällig"));
|
||||
expect(onFilter).toHaveBeenCalledWith("overdue");
|
||||
|
||||
fireEvent.click(screen.getByText("Diese Woche"));
|
||||
expect(onFilter).toHaveBeenCalledWith("this_week");
|
||||
|
||||
fireEvent.click(screen.getByText("Im Zeitplan"));
|
||||
expect(onFilter).toHaveBeenCalledWith("ok");
|
||||
});
|
||||
|
||||
it("renders without onFilter prop (no crash)", () => {
|
||||
expect(() => {
|
||||
render(<DeadlineTrafficLights data={defaultData} />);
|
||||
fireEvent.click(screen.getByText("Überfällig"));
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
143
frontend/src/__tests__/LoginPage.test.tsx
Normal file
143
frontend/src/__tests__/LoginPage.test.tsx
Normal file
@@ -0,0 +1,143 @@
|
||||
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||
import { render, screen, fireEvent, waitFor } from "@testing-library/react";
|
||||
|
||||
// Mock next/navigation
|
||||
const mockPush = vi.fn();
|
||||
const mockRefresh = vi.fn();
|
||||
vi.mock("next/navigation", () => ({
|
||||
useRouter: () => ({ push: mockPush, refresh: mockRefresh }),
|
||||
}));
|
||||
|
||||
// Mock Supabase
|
||||
const mockSignInWithPassword = vi.fn();
|
||||
const mockSignInWithOtp = vi.fn();
|
||||
vi.mock("@/lib/supabase/client", () => ({
|
||||
createClient: () => ({
|
||||
auth: {
|
||||
signInWithPassword: mockSignInWithPassword,
|
||||
signInWithOtp: mockSignInWithOtp,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
// Import after mocks
|
||||
const { default: LoginPage } = await import(
|
||||
"@/app/(auth)/login/page"
|
||||
);
|
||||
|
||||
describe("LoginPage", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("renders login form with email and password fields", () => {
|
||||
render(<LoginPage />);
|
||||
|
||||
expect(screen.getByText("KanzlAI")).toBeInTheDocument();
|
||||
expect(screen.getByText("Melden Sie sich an")).toBeInTheDocument();
|
||||
expect(screen.getByLabelText("E-Mail")).toBeInTheDocument();
|
||||
expect(screen.getByLabelText("Passwort")).toBeInTheDocument();
|
||||
expect(screen.getByText("Anmelden")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("renders mode toggle between Passwort and Magic Link", () => {
|
||||
render(<LoginPage />);
|
||||
|
||||
// "Passwort" appears twice (toggle button + label), so use getAllByText
|
||||
const passwortElements = screen.getAllByText("Passwort");
|
||||
expect(passwortElements.length).toBeGreaterThanOrEqual(1);
|
||||
expect(screen.getByText("Magic Link")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("switches to magic link mode and hides password field", () => {
|
||||
render(<LoginPage />);
|
||||
|
||||
fireEvent.click(screen.getByText("Magic Link"));
|
||||
|
||||
expect(screen.queryByLabelText("Passwort")).not.toBeInTheDocument();
|
||||
expect(screen.getByText("Link senden")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("submits password login to Supabase", async () => {
|
||||
mockSignInWithPassword.mockResolvedValue({ error: null });
|
||||
render(<LoginPage />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText("E-Mail"), {
|
||||
target: { value: "test@kanzlei.de" },
|
||||
});
|
||||
fireEvent.change(screen.getByLabelText("Passwort"), {
|
||||
target: { value: "geheim123" },
|
||||
});
|
||||
fireEvent.click(screen.getByText("Anmelden"));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSignInWithPassword).toHaveBeenCalledWith({
|
||||
email: "test@kanzlei.de",
|
||||
password: "geheim123",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("redirects to / on successful login", async () => {
|
||||
mockSignInWithPassword.mockResolvedValue({ error: null });
|
||||
render(<LoginPage />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText("E-Mail"), {
|
||||
target: { value: "test@kanzlei.de" },
|
||||
});
|
||||
fireEvent.change(screen.getByLabelText("Passwort"), {
|
||||
target: { value: "geheim123" },
|
||||
});
|
||||
fireEvent.click(screen.getByText("Anmelden"));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockPush).toHaveBeenCalledWith("/");
|
||||
expect(mockRefresh).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it("displays error on failed login", async () => {
|
||||
mockSignInWithPassword.mockResolvedValue({
|
||||
error: { message: "Ungültige Anmeldedaten" },
|
||||
});
|
||||
render(<LoginPage />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText("E-Mail"), {
|
||||
target: { value: "bad@email.de" },
|
||||
});
|
||||
fireEvent.change(screen.getByLabelText("Passwort"), {
|
||||
target: { value: "wrong" },
|
||||
});
|
||||
fireEvent.click(screen.getByText("Anmelden"));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText("Ungültige Anmeldedaten")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("shows magic link sent confirmation", async () => {
|
||||
mockSignInWithOtp.mockResolvedValue({ error: null });
|
||||
render(<LoginPage />);
|
||||
|
||||
// Switch to magic link mode
|
||||
fireEvent.click(screen.getByText("Magic Link"));
|
||||
|
||||
fireEvent.change(screen.getByLabelText("E-Mail"), {
|
||||
target: { value: "test@kanzlei.de" },
|
||||
});
|
||||
fireEvent.click(screen.getByText("Link senden"));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText("Link gesendet")).toBeInTheDocument();
|
||||
expect(screen.getByText("Zurueck zum Login")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("has link to registration page", () => {
|
||||
render(<LoginPage />);
|
||||
|
||||
const registerLink = screen.getByText("Registrieren");
|
||||
expect(registerLink).toBeInTheDocument();
|
||||
expect(registerLink.closest("a")).toHaveAttribute("href", "/register");
|
||||
});
|
||||
});
|
||||
182
frontend/src/__tests__/api.test.ts
Normal file
182
frontend/src/__tests__/api.test.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||
|
||||
// Mock Supabase client
|
||||
const mockGetSession = vi.fn();
|
||||
vi.mock("@/lib/supabase/client", () => ({
|
||||
createClient: () => ({
|
||||
auth: {
|
||||
getSession: mockGetSession,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
// Must import after mock setup
|
||||
const { api } = await import("@/lib/api");
|
||||
|
||||
describe("ApiClient", () => {
|
||||
beforeEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
localStorage.clear();
|
||||
mockGetSession.mockResolvedValue({
|
||||
data: { session: { access_token: "test-token-123" } },
|
||||
});
|
||||
});
|
||||
|
||||
it("constructs correct URL with /api base", async () => {
|
||||
const fetchSpy = vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(JSON.stringify({ cases: [], total: 0 }), { status: 200 }),
|
||||
);
|
||||
|
||||
await api.get("/cases");
|
||||
|
||||
expect(fetchSpy).toHaveBeenCalledWith(
|
||||
"/api/cases",
|
||||
expect.objectContaining({ method: "GET" }),
|
||||
);
|
||||
});
|
||||
|
||||
it("does not double-prefix /api/", async () => {
|
||||
const fetchSpy = vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(JSON.stringify({}), { status: 200 }),
|
||||
);
|
||||
|
||||
await api.get("/deadlines");
|
||||
|
||||
const url = fetchSpy.mock.calls[0][0] as string;
|
||||
expect(url).toBe("/api/deadlines");
|
||||
expect(url).not.toContain("/api/api/");
|
||||
});
|
||||
|
||||
it("sets Authorization header from Supabase session", async () => {
|
||||
const fetchSpy = vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(JSON.stringify({}), { status: 200 }),
|
||||
);
|
||||
|
||||
await api.get("/cases");
|
||||
|
||||
const requestInit = fetchSpy.mock.calls[0][1] as RequestInit;
|
||||
const headers = requestInit.headers as Record<string, string>;
|
||||
expect(headers["Authorization"]).toBe("Bearer test-token-123");
|
||||
});
|
||||
|
||||
it("sets X-Tenant-ID header from localStorage", async () => {
|
||||
localStorage.setItem("kanzlai_tenant_id", "tenant-uuid-123");
|
||||
|
||||
const fetchSpy = vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(JSON.stringify({}), { status: 200 }),
|
||||
);
|
||||
|
||||
await api.get("/cases");
|
||||
|
||||
const requestInit = fetchSpy.mock.calls[0][1] as RequestInit;
|
||||
const headers = requestInit.headers as Record<string, string>;
|
||||
expect(headers["X-Tenant-ID"]).toBe("tenant-uuid-123");
|
||||
});
|
||||
|
||||
it("omits X-Tenant-ID when not in localStorage", async () => {
|
||||
const fetchSpy = vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(JSON.stringify({}), { status: 200 }),
|
||||
);
|
||||
|
||||
await api.get("/cases");
|
||||
|
||||
const requestInit = fetchSpy.mock.calls[0][1] as RequestInit;
|
||||
const headers = requestInit.headers as Record<string, string>;
|
||||
expect(headers["X-Tenant-ID"]).toBeUndefined();
|
||||
});
|
||||
|
||||
it("omits Authorization when no session", async () => {
|
||||
mockGetSession.mockResolvedValue({
|
||||
data: { session: null },
|
||||
});
|
||||
|
||||
const fetchSpy = vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(JSON.stringify({}), { status: 200 }),
|
||||
);
|
||||
|
||||
await api.get("/cases");
|
||||
|
||||
const requestInit = fetchSpy.mock.calls[0][1] as RequestInit;
|
||||
const headers = requestInit.headers as Record<string, string>;
|
||||
expect(headers["Authorization"]).toBeUndefined();
|
||||
});
|
||||
|
||||
it("sends POST with JSON body", async () => {
|
||||
const fetchSpy = vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(JSON.stringify({ id: "new-id" }), { status: 201 }),
|
||||
);
|
||||
|
||||
const body = { case_number: "TEST/001", title: "Test Case" };
|
||||
await api.post("/cases", body);
|
||||
|
||||
const requestInit = fetchSpy.mock.calls[0][1] as RequestInit;
|
||||
expect(requestInit.method).toBe("POST");
|
||||
expect(requestInit.body).toBe(JSON.stringify(body));
|
||||
const headers = requestInit.headers as Record<string, string>;
|
||||
expect(headers["Content-Type"]).toBe("application/json");
|
||||
});
|
||||
|
||||
it("sends PUT with JSON body", async () => {
|
||||
const fetchSpy = vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(JSON.stringify({}), { status: 200 }),
|
||||
);
|
||||
|
||||
await api.put("/cases/123", { title: "Updated" });
|
||||
const requestInit = fetchSpy.mock.calls[0][1] as RequestInit;
|
||||
expect(requestInit.method).toBe("PUT");
|
||||
});
|
||||
|
||||
it("sends PATCH with JSON body", async () => {
|
||||
const fetchSpy = vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(JSON.stringify({}), { status: 200 }),
|
||||
);
|
||||
|
||||
await api.patch("/deadlines/123/complete", {});
|
||||
const requestInit = fetchSpy.mock.calls[0][1] as RequestInit;
|
||||
expect(requestInit.method).toBe("PATCH");
|
||||
});
|
||||
|
||||
it("sends DELETE", async () => {
|
||||
const fetchSpy = vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(JSON.stringify({}), { status: 200 }),
|
||||
);
|
||||
|
||||
await api.delete("/cases/123");
|
||||
const requestInit = fetchSpy.mock.calls[0][1] as RequestInit;
|
||||
expect(requestInit.method).toBe("DELETE");
|
||||
});
|
||||
|
||||
it("throws ApiError on non-ok response", async () => {
|
||||
vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(JSON.stringify({ error: "not found" }), { status: 404 }),
|
||||
);
|
||||
|
||||
await expect(api.get("/cases/nonexistent")).rejects.toEqual({
|
||||
error: "not found",
|
||||
status: 404,
|
||||
});
|
||||
});
|
||||
|
||||
it("handles 204 No Content response", async () => {
|
||||
vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response(null, { status: 204 }),
|
||||
);
|
||||
|
||||
const result = await api.delete("/appointments/123");
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it("handles error response without JSON body", async () => {
|
||||
vi.spyOn(globalThis, "fetch").mockResolvedValue(
|
||||
new Response("Internal Server Error", {
|
||||
status: 500,
|
||||
statusText: "Internal Server Error",
|
||||
}),
|
||||
);
|
||||
|
||||
await expect(api.get("/broken")).rejects.toEqual({
|
||||
error: "Internal Server Error",
|
||||
status: 500,
|
||||
});
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user