FossilRepo

Add MCP server, batch API, agent workspaces, task claiming, SSE events, code review API MCP Server: 17 tools for AI agent access to fossilrepo — browse code, read files, search, manage tickets, view timeline/diffs/blame, create tickets, run SQL queries. Runs as standalone process via stdio. 45 tests. Batch API: POST /api/batch executes up to 25 API calls in one request, routing internally via Python function dispatch. Zero HTTP overhead. Agent Workspaces: isolated Fossil branches per agent with checkout dirs. Create/commit/merge/abandon lifecycle. AgentWorkspace model tracking files changed and commits made. Task Claiming: atomic ticket claiming via select_for_update to prevent agent collisions. Claim/release/submit workflow. Unclaimed ticket listing. SSE Events: StreamingHttpResponse-based Server-Sent Events for real-time checkin, ticket claim, and workspace notifications. 5s polling with heartbeat. Code Review API: submit diffs, add inline comments, approve/request changes/merge workflow. CodeReview + ReviewComment models. 56 tests.

lmata 2026-04-07 17:35 trunk
Commit e75576eece58a17fa0c3c5779f87f92a4c5dc663c0a288e3660411d45f937204
--- fossil/admin.py
+++ fossil/admin.py
@@ -1,21 +1,24 @@
11
from django.contrib import admin
22
33
from core.admin import BaseCoreAdmin
44
5
+from .agent_claims import TicketClaim
56
from .api_tokens import APIToken
67
from .branch_protection import BranchProtection
78
from .ci import StatusCheck
9
+from .code_reviews import CodeReview, ReviewComment
810
from .forum import ForumPost
911
from .models import FossilRepository, FossilSnapshot
1012
from .notifications import Notification, NotificationPreference, ProjectWatch
1113
from .releases import Release, ReleaseAsset
1214
from .sync_models import GitMirror, SSHKey, SyncLog
1315
from .ticket_fields import TicketFieldDefinition
1416
from .ticket_reports import TicketReport
1517
from .user_keys import UserSSHKey
1618
from .webhooks import Webhook, WebhookDelivery
19
+from .workspaces import AgentWorkspace
1720
1821
1922
class FossilSnapshotInline(admin.TabularInline):
2023
model = FossilSnapshot
2124
extra = 0
@@ -179,5 +182,43 @@
179182
class TicketReportAdmin(BaseCoreAdmin):
180183
list_display = ("title", "repository", "is_public", "created_at")
181184
list_filter = ("is_public",)
182185
search_fields = ("title", "description")
183186
raw_id_fields = ("repository",)
187
+
188
+
189
+@admin.register(AgentWorkspace)
190
+class AgentWorkspaceAdmin(BaseCoreAdmin):
191
+ list_display = ("name", "repository", "branch", "status", "agent_id", "commits_made", "created_at")
192
+ list_filter = ("status",)
193
+ search_fields = ("name", "agent_id", "branch")
194
+ raw_id_fields = ("repository",)
195
+
196
+
197
+@admin.register(TicketClaim)
198
+class TicketClaimAdmin(BaseCoreAdmin):
199
+ list_display = ("ticket_uuid", "repository", "agent_id", "status", "claimed_at", "released_at")
200
+ list_filter = ("status",)
201
+ search_fields = ("ticket_uuid", "agent_id")
202
+ raw_id_fields = ("repository", "workspace")
203
+
204
+
205
+class ReviewCommentInline(admin.TabularInline):
206
+ model = ReviewComment
207
+ extra = 0
208
+ readonly_fields = ("author", "file_path", "line_number", "body", "created_at")
209
+
210
+
211
+@admin.register(CodeReview)
212
+class CodeReviewAdmin(BaseCoreAdmin):
213
+ list_display = ("title", "repository", "status", "agent_id", "created_at")
214
+ list_filter = ("status",)
215
+ search_fields = ("title", "agent_id", "ticket_uuid")
216
+ raw_id_fields = ("repository", "workspace")
217
+ inlines = [ReviewCommentInline]
218
+
219
+
220
+@admin.register(ReviewComment)
221
+class ReviewCommentAdmin(BaseCoreAdmin):
222
+ list_display = ("review", "author", "file_path", "line_number", "created_at")
223
+ search_fields = ("body", "author", "file_path")
224
+ raw_id_fields = ("review",)
184225
185226
ADDED fossil/agent_claims.py
--- fossil/admin.py
+++ fossil/admin.py
@@ -1,21 +1,24 @@
1 from django.contrib import admin
2
3 from core.admin import BaseCoreAdmin
4
 
5 from .api_tokens import APIToken
6 from .branch_protection import BranchProtection
7 from .ci import StatusCheck
 
8 from .forum import ForumPost
9 from .models import FossilRepository, FossilSnapshot
10 from .notifications import Notification, NotificationPreference, ProjectWatch
11 from .releases import Release, ReleaseAsset
12 from .sync_models import GitMirror, SSHKey, SyncLog
13 from .ticket_fields import TicketFieldDefinition
14 from .ticket_reports import TicketReport
15 from .user_keys import UserSSHKey
16 from .webhooks import Webhook, WebhookDelivery
 
17
18
19 class FossilSnapshotInline(admin.TabularInline):
20 model = FossilSnapshot
21 extra = 0
@@ -179,5 +182,43 @@
179 class TicketReportAdmin(BaseCoreAdmin):
180 list_display = ("title", "repository", "is_public", "created_at")
181 list_filter = ("is_public",)
182 search_fields = ("title", "description")
183 raw_id_fields = ("repository",)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
184
185 DDED fossil/agent_claims.py
--- fossil/admin.py
+++ fossil/admin.py
@@ -1,21 +1,24 @@
1 from django.contrib import admin
2
3 from core.admin import BaseCoreAdmin
4
5 from .agent_claims import TicketClaim
6 from .api_tokens import APIToken
7 from .branch_protection import BranchProtection
8 from .ci import StatusCheck
9 from .code_reviews import CodeReview, ReviewComment
10 from .forum import ForumPost
11 from .models import FossilRepository, FossilSnapshot
12 from .notifications import Notification, NotificationPreference, ProjectWatch
13 from .releases import Release, ReleaseAsset
14 from .sync_models import GitMirror, SSHKey, SyncLog
15 from .ticket_fields import TicketFieldDefinition
16 from .ticket_reports import TicketReport
17 from .user_keys import UserSSHKey
18 from .webhooks import Webhook, WebhookDelivery
19 from .workspaces import AgentWorkspace
20
21
22 class FossilSnapshotInline(admin.TabularInline):
23 model = FossilSnapshot
24 extra = 0
@@ -179,5 +182,43 @@
182 class TicketReportAdmin(BaseCoreAdmin):
183 list_display = ("title", "repository", "is_public", "created_at")
184 list_filter = ("is_public",)
185 search_fields = ("title", "description")
186 raw_id_fields = ("repository",)
187
188
189 @admin.register(AgentWorkspace)
190 class AgentWorkspaceAdmin(BaseCoreAdmin):
191 list_display = ("name", "repository", "branch", "status", "agent_id", "commits_made", "created_at")
192 list_filter = ("status",)
193 search_fields = ("name", "agent_id", "branch")
194 raw_id_fields = ("repository",)
195
196
197 @admin.register(TicketClaim)
198 class TicketClaimAdmin(BaseCoreAdmin):
199 list_display = ("ticket_uuid", "repository", "agent_id", "status", "claimed_at", "released_at")
200 list_filter = ("status",)
201 search_fields = ("ticket_uuid", "agent_id")
202 raw_id_fields = ("repository", "workspace")
203
204
205 class ReviewCommentInline(admin.TabularInline):
206 model = ReviewComment
207 extra = 0
208 readonly_fields = ("author", "file_path", "line_number", "body", "created_at")
209
210
211 @admin.register(CodeReview)
212 class CodeReviewAdmin(BaseCoreAdmin):
213 list_display = ("title", "repository", "status", "agent_id", "created_at")
214 list_filter = ("status",)
215 search_fields = ("title", "agent_id", "ticket_uuid")
216 raw_id_fields = ("repository", "workspace")
217 inlines = [ReviewCommentInline]
218
219
220 @admin.register(ReviewComment)
221 class ReviewCommentAdmin(BaseCoreAdmin):
222 list_display = ("review", "author", "file_path", "line_number", "created_at")
223 search_fields = ("body", "author", "file_path")
224 raw_id_fields = ("review",)
225
226 DDED fossil/agent_claims.py
--- a/fossil/agent_claims.py
+++ b/fossil/agent_claims.py
@@ -0,0 +1,43 @@
1
+"""Agent ticket claim tracking for exclusive work assignment.
2
+
3
+When multiple agents are working on a repository, they need a way to atomically
4
+claim tickets so two agents don't work on the same issue simultaneously.
5
+Claims are Django-side since Fossil tickets live in SQLite.
6
+"""
7
+
8
+from django.db import models
9
+
10
+from core.models import ActiveManager, Tracking
11
+
12
+
13
+class TicketClaim(Tracking):
14
+ """Tracks which agent has claimed a Fossil ticket for exclusive work."""
15
+
16
+ class Status(models.TextChoices):
17
+ CLAIMED = "claimed", "Claimed"
18
+ SUBMITTED = "submitted", "Submitted"
19
+ MERGED = "merged", "Merged"
20
+ RELEASED = "released", "Released"
21
+
22
+ repository = models.ForeignKey("fossil.FossilRepository", on_delete=models.CASCADE, related_name="ticket_claims")
23
+ ticket_uuid = models.CharField(max_length=64)
24
+ agent_id = models.CharField(max_length=200)
25
+ workspace = models.ForeignKey("fossil.AgentWorkspace", null=True, blank=True, on_delete=models.SET_NULL, related_name="claims")
26
+ claimed_at = models.DateTimeField(auto_now_add=True)
27
+ released_at = models.DateTimeField(null=True, blank=True)
28
+ status = models.CharField(max_length=20, choices=Status.choices, default=Status.CLAIMED)
29
+ summary = models.TextField(blank=True, default="", help_text="Work summary when submitted")
30
+ files_changed = models.JSONField(default=list, blank=True, help_text="List of files changed")
31
+
32
+ objects = ActiveManager()
33
+ all_objects = models.Manager()
34
+
35
+ class Meta:
36
+ # Uniqueness for active claims is enforced at the application level
37
+ # using select_for_update in the claim endpoint. We cannot use
38
+ # unique_together because soft-deleted rows would violate the
39
+ # constraint when the ticket is reclaimed.
40
+ ordering = ["-claimed_at"]
41
+
42
+ def __str__(self):
43
+ return f"{self.ticket_uuid[:12]} claimed by {self.agent_id}"
--- a/fossil/agent_claims.py
+++ b/fossil/agent_claims.py
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/fossil/agent_claims.py
+++ b/fossil/agent_claims.py
@@ -0,0 +1,43 @@
1 """Agent ticket claim tracking for exclusive work assignment.
2
3 When multiple agents are working on a repository, they need a way to atomically
4 claim tickets so two agents don't work on the same issue simultaneously.
5 Claims are Django-side since Fossil tickets live in SQLite.
6 """
7
8 from django.db import models
9
10 from core.models import ActiveManager, Tracking
11
12
13 class TicketClaim(Tracking):
14 """Tracks which agent has claimed a Fossil ticket for exclusive work."""
15
16 class Status(models.TextChoices):
17 CLAIMED = "claimed", "Claimed"
18 SUBMITTED = "submitted", "Submitted"
19 MERGED = "merged", "Merged"
20 RELEASED = "released", "Released"
21
22 repository = models.ForeignKey("fossil.FossilRepository", on_delete=models.CASCADE, related_name="ticket_claims")
23 ticket_uuid = models.CharField(max_length=64)
24 agent_id = models.CharField(max_length=200)
25 workspace = models.ForeignKey("fossil.AgentWorkspace", null=True, blank=True, on_delete=models.SET_NULL, related_name="claims")
26 claimed_at = models.DateTimeField(auto_now_add=True)
27 released_at = models.DateTimeField(null=True, blank=True)
28 status = models.CharField(max_length=20, choices=Status.choices, default=Status.CLAIMED)
29 summary = models.TextField(blank=True, default="", help_text="Work summary when submitted")
30 files_changed = models.JSONField(default=list, blank=True, help_text="List of files changed")
31
32 objects = ActiveManager()
33 all_objects = models.Manager()
34
35 class Meta:
36 # Uniqueness for active claims is enforced at the application level
37 # using select_for_update in the claim endpoint. We cannot use
38 # unique_together because soft-deleted rows would violate the
39 # constraint when the ticket is reclaimed.
40 ordering = ["-claimed_at"]
41
42 def __str__(self):
43 return f"{self.ticket_uuid[:12]} claimed by {self.agent_id}"
+1440 -2
--- fossil/api_views.py
+++ fossil/api_views.py
@@ -3,22 +3,34 @@
33
All endpoints live under /projects/<slug>/fossil/api/.
44
Auth: Bearer token (APIToken or PersonalAccessToken) or session cookie.
55
All responses are JSON. All read endpoints check can_read_project.
66
"""
77
8
+import json
9
+import logging
810
import math
11
+import re
12
+import shutil
13
+import subprocess
14
+import tempfile
15
+import time
916
10
-from django.http import JsonResponse
17
+from django.db import transaction
18
+from django.http import JsonResponse, StreamingHttpResponse
1119
from django.shortcuts import get_object_or_404
20
+from django.test import RequestFactory
21
+from django.utils import timezone
1222
from django.views.decorators.csrf import csrf_exempt
1323
from django.views.decorators.http import require_GET
1424
1525
from fossil.api_auth import authenticate_request
1626
from fossil.models import FossilRepository
1727
from fossil.reader import FossilReader
18
-from projects.access import can_read_project
28
+from projects.access import can_read_project, can_write_project
1929
from projects.models import Project
30
+
31
+logger = logging.getLogger(__name__)
2032
2133
2234
def _get_repo(slug):
2335
"""Look up project and repository by slug, or return 404 JSON."""
2436
project = get_object_or_404(Project, slug=slug, deleted_at__isnull=True)
@@ -97,10 +109,87 @@
97109
{"method": "GET", "path": f"{base}/wiki/<name>", "description": "Single wiki page with content"},
98110
{"method": "GET", "path": f"{base}/branches", "description": "Branch list"},
99111
{"method": "GET", "path": f"{base}/tags", "description": "Tag list"},
100112
{"method": "GET", "path": f"{base}/releases", "description": "Release list"},
101113
{"method": "GET", "path": f"{base}/search", "description": "Search across checkins, tickets, wiki", "params": "q"},
114
+ {
115
+ "method": "POST",
116
+ "path": f"{base}/batch",
117
+ "description": "Execute multiple API calls in a single request (max 25)",
118
+ "body": '{"requests": [{"method": "GET", "path": "/api/timeline", "params": {}}]}',
119
+ },
120
+ {"method": "GET", "path": f"{base}/workspaces", "description": "List agent workspaces", "params": "status"},
121
+ {
122
+ "method": "POST",
123
+ "path": f"{base}/workspaces/create",
124
+ "description": "Create an isolated agent workspace",
125
+ "body": '{"name": "...", "description": "...", "agent_id": "..."}',
126
+ },
127
+ {"method": "GET", "path": f"{base}/workspaces/<name>", "description": "Get workspace details"},
128
+ {
129
+ "method": "POST",
130
+ "path": f"{base}/workspaces/<name>/commit",
131
+ "description": "Commit changes in a workspace",
132
+ "body": '{"message": "...", "files": []}',
133
+ },
134
+ {
135
+ "method": "POST",
136
+ "path": f"{base}/workspaces/<name>/merge",
137
+ "description": "Merge workspace branch back to trunk",
138
+ "body": '{"target_branch": "trunk"}',
139
+ },
140
+ {
141
+ "method": "DELETE",
142
+ "path": f"{base}/workspaces/<name>/abandon",
143
+ "description": "Abandon and clean up a workspace",
144
+ },
145
+ {
146
+ "method": "POST",
147
+ "path": f"{base}/tickets/<uuid>/claim",
148
+ "description": "Claim a ticket for exclusive agent work",
149
+ "body": '{"agent_id": "...", "workspace": "..."}',
150
+ },
151
+ {
152
+ "method": "POST",
153
+ "path": f"{base}/tickets/<uuid>/release",
154
+ "description": "Release a ticket claim",
155
+ },
156
+ {
157
+ "method": "POST",
158
+ "path": f"{base}/tickets/<uuid>/submit",
159
+ "description": "Submit completed work for a claimed ticket",
160
+ "body": '{"summary": "...", "files_changed": [...]}',
161
+ },
162
+ {
163
+ "method": "GET",
164
+ "path": f"{base}/tickets/unclaimed",
165
+ "description": "List tickets not claimed by any agent",
166
+ "params": "status, limit",
167
+ },
168
+ {"method": "GET", "path": f"{base}/events", "description": "Server-Sent Events stream for real-time events"},
169
+ {
170
+ "method": "POST",
171
+ "path": f"{base}/reviews/create",
172
+ "description": "Submit code changes for review",
173
+ "body": '{"title": "...", "diff": "...", "files_changed": [...], "agent_id": "..."}',
174
+ },
175
+ {
176
+ "method": "GET",
177
+ "path": f"{base}/reviews",
178
+ "description": "List code reviews",
179
+ "params": "status, page, per_page",
180
+ },
181
+ {"method": "GET", "path": f"{base}/reviews/<id>", "description": "Get review with comments"},
182
+ {
183
+ "method": "POST",
184
+ "path": f"{base}/reviews/<id>/comment",
185
+ "description": "Add a comment to a review",
186
+ "body": '{"body": "...", "file_path": "...", "line_number": 42, "author": "..."}',
187
+ },
188
+ {"method": "POST", "path": f"{base}/reviews/<id>/approve", "description": "Approve a review"},
189
+ {"method": "POST", "path": f"{base}/reviews/<id>/request-changes", "description": "Request changes on a review"},
190
+ {"method": "POST", "path": f"{base}/reviews/<id>/merge", "description": "Merge an approved review"},
102191
],
103192
"auth": "Bearer token (Authorization: Bearer <token>) or session cookie",
104193
}
105194
)
106195
@@ -471,5 +560,1354 @@
471560
checkin["timestamp"] = _isoformat(checkin.get("timestamp"))
472561
for ticket in results.get("tickets", []):
473562
ticket["created"] = _isoformat(ticket.get("created"))
474563
475564
return JsonResponse(results)
565
+
566
+
567
+# --- Batch API ---
568
+
569
+# Map API paths to (view_function, extra_path_regex_or_None).
570
+# Entries with a regex capture group extract path params (e.g. ticket uuid, wiki page name).
571
+_BATCH_STATIC_ROUTES = {
572
+ "/api/project": api_project,
573
+ "/api/timeline": api_timeline,
574
+ "/api/tickets": api_tickets,
575
+ "/api/wiki": api_wiki_list,
576
+ "/api/branches": api_branches,
577
+ "/api/tags": api_tags,
578
+ "/api/releases": api_releases,
579
+ "/api/search": api_search,
580
+}
581
+
582
+_BATCH_DYNAMIC_ROUTES = [
583
+ (re.compile(r"^/api/tickets/([0-9a-fA-F-]+)$"), api_ticket_detail, "ticket_uuid"),
584
+ (re.compile(r"^/api/wiki/(.+)$"), api_wiki_page, "page_name"),
585
+]
586
+
587
+_BATCH_MAX_REQUESTS = 25
588
+
589
+
590
+def _resolve_batch_route(path):
591
+ """Resolve a batch sub-request path to (view_func, kwargs) or (None, None)."""
592
+ view_func = _BATCH_STATIC_ROUTES.get(path)
593
+ if view_func is not None:
594
+ return view_func, {}
595
+
596
+ for pattern, view_func, kwarg_name in _BATCH_DYNAMIC_ROUTES:
597
+ m = pattern.match(path)
598
+ if m:
599
+ return view_func, {kwarg_name: m.group(1)}
600
+
601
+ return None, None
602
+
603
+
604
+@csrf_exempt
605
+def api_batch(request, slug):
606
+ """Execute multiple API calls in a single request.
607
+
608
+ POST /projects/<slug>/fossil/api/batch
609
+ {
610
+ "requests": [
611
+ {"method": "GET", "path": "/api/timeline", "params": {"per_page": 5}},
612
+ {"method": "GET", "path": "/api/tickets", "params": {"status": "Open"}},
613
+ {"method": "GET", "path": "/api/wiki/Home"}
614
+ ]
615
+ }
616
+
617
+ Returns:
618
+ {
619
+ "responses": [
620
+ {"status": 200, "body": {...}},
621
+ {"status": 200, "body": {...}},
622
+ {"status": 200, "body": {...}}
623
+ ]
624
+ }
625
+
626
+ Auth: same as other API endpoints (Bearer token or session).
627
+ Limit: 25 sub-requests per batch.
628
+ Only GET sub-requests are supported.
629
+ """
630
+ if request.method != "POST":
631
+ return JsonResponse({"error": "POST required"}, status=405)
632
+
633
+ # Auth check -- same as every other API endpoint
634
+ project, repo = _get_repo(slug)
635
+ user, token, err = _check_api_auth(request, project, repo)
636
+ if err is not None:
637
+ return err
638
+
639
+ try:
640
+ body = json.loads(request.body)
641
+ except (json.JSONDecodeError, ValueError):
642
+ return JsonResponse({"error": "Invalid JSON body"}, status=400)
643
+
644
+ requests_list = body.get("requests")
645
+ if not isinstance(requests_list, list):
646
+ return JsonResponse({"error": "'requests' must be a list"}, status=400)
647
+
648
+ if len(requests_list) > _BATCH_MAX_REQUESTS:
649
+ return JsonResponse({"error": f"Maximum {_BATCH_MAX_REQUESTS} requests per batch"}, status=400)
650
+
651
+ if len(requests_list) == 0:
652
+ return JsonResponse({"responses": []})
653
+
654
+ factory = RequestFactory()
655
+ responses = []
656
+
657
+ for sub in requests_list:
658
+ if not isinstance(sub, dict):
659
+ responses.append({"status": 400, "body": {"error": "Each request must be an object"}})
660
+ continue
661
+
662
+ method = (sub.get("method") or "GET").upper()
663
+ path = sub.get("path", "")
664
+ params = sub.get("params") or {}
665
+
666
+ if method != "GET":
667
+ responses.append({"status": 405, "body": {"error": "Only GET is supported in batch requests"}})
668
+ continue
669
+
670
+ if not path:
671
+ responses.append({"status": 400, "body": {"error": "Missing 'path'"}})
672
+ continue
673
+
674
+ view_func, extra_kwargs = _resolve_batch_route(path)
675
+ if view_func is None:
676
+ responses.append({"status": 404, "body": {"error": f"Unknown API path: {path}"}})
677
+ continue
678
+
679
+ # Build a synthetic GET request preserving auth from the outer request
680
+ full_path = f"/projects/{slug}/fossil{path}"
681
+ synthetic = factory.get(full_path, data=params)
682
+
683
+ # Carry over auth state so sub-requests don't re-authenticate
684
+ synthetic.user = request.user
685
+ synthetic.session = request.session
686
+ if "HTTP_AUTHORIZATION" in request.META:
687
+ synthetic.META["HTTP_AUTHORIZATION"] = request.META["HTTP_AUTHORIZATION"]
688
+
689
+ try:
690
+ sub_response = view_func(synthetic, slug=slug, **extra_kwargs)
691
+ try:
692
+ response_body = json.loads(sub_response.content)
693
+ except (json.JSONDecodeError, ValueError):
694
+ response_body = {"raw": sub_response.content.decode("utf-8", errors="replace")}
695
+ responses.append({"status": sub_response.status_code, "body": response_body})
696
+ except Exception:
697
+ logger.exception("Batch sub-request failed: %s %s", method, path)
698
+ responses.append({"status": 500, "body": {"error": "Internal error processing sub-request"}})
699
+
700
+ return JsonResponse({"responses": responses})
701
+
702
+
703
+# --- Agent Workspace API ---
704
+
705
+
706
+def _get_workspace(repo, workspace_name):
707
+ """Look up an active workspace by name, or return 404 JSON."""
708
+ from fossil.workspaces import AgentWorkspace
709
+
710
+ workspace = AgentWorkspace.objects.filter(repository=repo, name=workspace_name).first()
711
+ if workspace is None:
712
+ return None
713
+ return workspace
714
+
715
+
716
+@csrf_exempt
717
+def api_workspace_list(request, slug):
718
+ """List agent workspaces for a repository.
719
+
720
+ GET /projects/<slug>/fossil/api/workspaces
721
+ Optional query params: status (active, merged, abandoned)
722
+ """
723
+ if request.method != "GET":
724
+ return JsonResponse({"error": "GET required"}, status=405)
725
+
726
+ project, repo = _get_repo(slug)
727
+ user, token, err = _check_api_auth(request, project, repo)
728
+ if err is not None:
729
+ return err
730
+
731
+ from fossil.workspaces import AgentWorkspace
732
+
733
+ qs = AgentWorkspace.objects.filter(repository=repo)
734
+ status_filter = request.GET.get("status", "").strip()
735
+ if status_filter:
736
+ qs = qs.filter(status=status_filter)
737
+
738
+ workspaces = []
739
+ for ws in qs:
740
+ workspaces.append(
741
+ {
742
+ "name": ws.name,
743
+ "branch": ws.branch,
744
+ "status": ws.status,
745
+ "agent_id": ws.agent_id,
746
+ "description": ws.description,
747
+ "files_changed": ws.files_changed,
748
+ "commits_made": ws.commits_made,
749
+ "created_at": _isoformat(ws.created_at),
750
+ }
751
+ )
752
+
753
+ return JsonResponse({"workspaces": workspaces})
754
+
755
+
756
+@csrf_exempt
757
+def api_workspace_create(request, slug):
758
+ """Create an isolated agent workspace.
759
+
760
+ POST /projects/<slug>/fossil/api/workspaces/create
761
+ {"name": "agent-fix-123", "description": "Fixing bug #123", "agent_id": "claude-abc"}
762
+
763
+ Creates a new Fossil branch and checkout directory for the agent.
764
+ """
765
+ if request.method != "POST":
766
+ return JsonResponse({"error": "POST required"}, status=405)
767
+
768
+ project, repo = _get_repo(slug)
769
+ user, token, err = _check_api_auth(request, project, repo)
770
+ if err is not None:
771
+ return err
772
+
773
+ # Write access required to create workspaces
774
+ if token is None and (user is None or not can_write_project(user, project)):
775
+ return JsonResponse({"error": "Write access required"}, status=403)
776
+
777
+ try:
778
+ data = json.loads(request.body)
779
+ except (json.JSONDecodeError, ValueError):
780
+ return JsonResponse({"error": "Invalid JSON body"}, status=400)
781
+
782
+ name = (data.get("name") or "").strip()
783
+ if not name:
784
+ return JsonResponse({"error": "Workspace name is required"}, status=400)
785
+
786
+ if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9._-]{0,198}$", name):
787
+ return JsonResponse(
788
+ {"error": "Invalid workspace name. Use alphanumeric characters, hyphens, dots, and underscores."},
789
+ status=400,
790
+ )
791
+
792
+ from fossil.workspaces import AgentWorkspace
793
+
794
+ if AgentWorkspace.objects.filter(repository=repo, name=name).exists():
795
+ return JsonResponse({"error": f"Workspace '{name}' already exists"}, status=409)
796
+
797
+ branch = f"workspace/{name}"
798
+
799
+ # Create workspace checkout directory
800
+ checkout_dir = tempfile.mkdtemp(prefix=f"fossilrepo-ws-{name}-")
801
+
802
+ from fossil.cli import FossilCLI
803
+
804
+ cli = FossilCLI()
805
+
806
+ # Open a checkout in the workspace dir
807
+ result = subprocess.run(
808
+ [cli.binary, "open", str(repo.full_path), "--workdir", checkout_dir],
809
+ capture_output=True,
810
+ text=True,
811
+ timeout=30,
812
+ env=cli._env,
813
+ cwd=checkout_dir,
814
+ )
815
+ if result.returncode != 0:
816
+ shutil.rmtree(checkout_dir, ignore_errors=True)
817
+ return JsonResponse({"error": "Failed to open Fossil checkout", "detail": result.stderr.strip()}, status=500)
818
+
819
+ # Create the branch from trunk
820
+ result = subprocess.run(
821
+ [cli.binary, "branch", "new", branch, "trunk"],
822
+ capture_output=True,
823
+ text=True,
824
+ timeout=30,
825
+ env=cli._env,
826
+ cwd=checkout_dir,
827
+ )
828
+ if result.returncode != 0:
829
+ # Clean up on failure
830
+ subprocess.run([cli.binary, "close", "--force"], capture_output=True, cwd=checkout_dir, timeout=10, env=cli._env)
831
+ shutil.rmtree(checkout_dir, ignore_errors=True)
832
+ return JsonResponse({"error": "Failed to create branch", "detail": result.stderr.strip()}, status=500)
833
+
834
+ # Switch to the new branch
835
+ result = subprocess.run(
836
+ [cli.binary, "update", branch],
837
+ capture_output=True,
838
+ text=True,
839
+ timeout=30,
840
+ env=cli._env,
841
+ cwd=checkout_dir,
842
+ )
843
+ if result.returncode != 0:
844
+ subprocess.run([cli.binary, "close", "--force"], capture_output=True, cwd=checkout_dir, timeout=10, env=cli._env)
845
+ shutil.rmtree(checkout_dir, ignore_errors=True)
846
+ return JsonResponse({"error": "Failed to switch to branch", "detail": result.stderr.strip()}, status=500)
847
+
848
+ workspace = AgentWorkspace.objects.create(
849
+ repository=repo,
850
+ name=name,
851
+ branch=branch,
852
+ agent_id=data.get("agent_id", ""),
853
+ description=data.get("description", ""),
854
+ checkout_path=checkout_dir,
855
+ created_by=user,
856
+ )
857
+
858
+ return JsonResponse(
859
+ {
860
+ "name": workspace.name,
861
+ "branch": workspace.branch,
862
+ "status": workspace.status,
863
+ "agent_id": workspace.agent_id,
864
+ "description": workspace.description,
865
+ "checkout_path": workspace.checkout_path,
866
+ "created_at": _isoformat(workspace.created_at),
867
+ },
868
+ status=201,
869
+ )
870
+
871
+
872
+@csrf_exempt
873
+def api_workspace_detail(request, slug, workspace_name):
874
+ """Get details of a specific workspace.
875
+
876
+ GET /projects/<slug>/fossil/api/workspaces/<name>
877
+ """
878
+ if request.method != "GET":
879
+ return JsonResponse({"error": "GET required"}, status=405)
880
+
881
+ project, repo = _get_repo(slug)
882
+ user, token, err = _check_api_auth(request, project, repo)
883
+ if err is not None:
884
+ return err
885
+
886
+ workspace = _get_workspace(repo, workspace_name)
887
+ if workspace is None:
888
+ return JsonResponse({"error": "Workspace not found"}, status=404)
889
+
890
+ return JsonResponse(
891
+ {
892
+ "name": workspace.name,
893
+ "branch": workspace.branch,
894
+ "status": workspace.status,
895
+ "agent_id": workspace.agent_id,
896
+ "description": workspace.description,
897
+ "checkout_path": workspace.checkout_path,
898
+ "files_changed": workspace.files_changed,
899
+ "commits_made": workspace.commits_made,
900
+ "created_at": _isoformat(workspace.created_at),
901
+ "updated_at": _isoformat(workspace.updated_at),
902
+ }
903
+ )
904
+
905
+
906
+@csrf_exempt
907
+def api_workspace_commit(request, slug, workspace_name):
908
+ """Commit changes in a workspace.
909
+
910
+ POST /projects/<slug>/fossil/api/workspaces/<name>/commit
911
+ {"message": "Fix bug #123", "files": ["src/foo.py"]}
912
+
913
+ If files is empty or omitted, commits all changed files.
914
+ """
915
+ if request.method != "POST":
916
+ return JsonResponse({"error": "POST required"}, status=405)
917
+
918
+ project, repo = _get_repo(slug)
919
+ user, token, err = _check_api_auth(request, project, repo)
920
+ if err is not None:
921
+ return err
922
+
923
+ if token is None and (user is None or not can_write_project(user, project)):
924
+ return JsonResponse({"error": "Write access required"}, status=403)
925
+
926
+ workspace = _get_workspace(repo, workspace_name)
927
+ if workspace is None:
928
+ return JsonResponse({"error": "Workspace not found"}, status=404)
929
+
930
+ if workspace.status != "active":
931
+ return JsonResponse({"error": f"Workspace is {workspace.status}, cannot commit"}, status=409)
932
+
933
+ try:
934
+ data = json.loads(request.body)
935
+ except (json.JSONDecodeError, ValueError):
936
+ return JsonResponse({"error": "Invalid JSON body"}, status=400)
937
+
938
+ message = (data.get("message") or "").strip()
939
+ if not message:
940
+ return JsonResponse({"error": "Commit message is required"}, status=400)
941
+
942
+ files = data.get("files") or []
943
+ checkout_dir = workspace.checkout_path
944
+
945
+ from fossil.cli import FossilCLI
946
+
947
+ cli = FossilCLI()
948
+
949
+ # Add files if specified, otherwise add all changes
950
+ if files:
951
+ for f in files:
952
+ subprocess.run(
953
+ [cli.binary, "add", f],
954
+ capture_output=True,
955
+ text=True,
956
+ timeout=30,
957
+ env=cli._env,
958
+ cwd=checkout_dir,
959
+ )
960
+ else:
961
+ subprocess.run(
962
+ [cli.binary, "addremove"],
963
+ capture_output=True,
964
+ text=True,
965
+ timeout=30,
966
+ env=cli._env,
967
+ cwd=checkout_dir,
968
+ )
969
+
970
+ # Commit
971
+ commit_cmd = [cli.binary, "commit", "-m", message, "--no-warnings"]
972
+ result = subprocess.run(
973
+ commit_cmd,
974
+ capture_output=True,
975
+ text=True,
976
+ timeout=60,
977
+ env=cli._env,
978
+ cwd=checkout_dir,
979
+ )
980
+
981
+ if result.returncode != 0:
982
+ stderr = result.stderr.strip()
983
+ # "nothing has changed" is not really an error
984
+ if "nothing has changed" in stderr.lower() or "nothing has changed" in result.stdout.lower():
985
+ return JsonResponse({"error": "Nothing to commit"}, status=409)
986
+ return JsonResponse({"error": "Commit failed", "detail": stderr}, status=500)
987
+
988
+ workspace.commits_made += 1
989
+ workspace.save(update_fields=["commits_made", "updated_at", "version"])
990
+
991
+ return JsonResponse(
992
+ {
993
+ "name": workspace.name,
994
+ "branch": workspace.branch,
995
+ "commits_made": workspace.commits_made,
996
+ "message": message,
997
+ "output": result.stdout.strip(),
998
+ }
999
+ )
1000
+
1001
+
1002
+@csrf_exempt
1003
+def api_workspace_merge(request, slug, workspace_name):
1004
+ """Merge workspace branch back to trunk.
1005
+
1006
+ POST /projects/<slug>/fossil/api/workspaces/<name>/merge
1007
+ {"target_branch": "trunk"}
1008
+
1009
+ Merges the workspace branch into the target branch (default: trunk),
1010
+ then closes the workspace checkout and cleans up the directory.
1011
+ """
1012
+ if request.method != "POST":
1013
+ return JsonResponse({"error": "POST required"}, status=405)
1014
+
1015
+ project, repo = _get_repo(slug)
1016
+ user, token, err = _check_api_auth(request, project, repo)
1017
+ if err is not None:
1018
+ return err
1019
+
1020
+ if token is None and (user is None or not can_write_project(user, project)):
1021
+ return JsonResponse({"error": "Write access required"}, status=403)
1022
+
1023
+ workspace = _get_workspace(repo, workspace_name)
1024
+ if workspace is None:
1025
+ return JsonResponse({"error": "Workspace not found"}, status=404)
1026
+
1027
+ if workspace.status != "active":
1028
+ return JsonResponse({"error": f"Workspace is {workspace.status}, cannot merge"}, status=409)
1029
+
1030
+ try:
1031
+ data = json.loads(request.body) if request.body else {}
1032
+ except (json.JSONDecodeError, ValueError):
1033
+ data = {}
1034
+
1035
+ target_branch = (data.get("target_branch") or "trunk").strip()
1036
+
1037
+ from fossil.cli import FossilCLI
1038
+
1039
+ cli = FossilCLI()
1040
+ checkout_dir = workspace.checkout_path
1041
+
1042
+ # Switch to target branch
1043
+ result = subprocess.run(
1044
+ [cli.binary, "update", target_branch],
1045
+ capture_output=True,
1046
+ text=True,
1047
+ timeout=30,
1048
+ env=cli._env,
1049
+ cwd=checkout_dir,
1050
+ )
1051
+ if result.returncode != 0:
1052
+ return JsonResponse({"error": "Failed to switch to target branch", "detail": result.stderr.strip()}, status=500)
1053
+
1054
+ # Merge workspace branch into target
1055
+ result = subprocess.run(
1056
+ [cli.binary, "merge", workspace.branch],
1057
+ capture_output=True,
1058
+ text=True,
1059
+ timeout=60,
1060
+ env=cli._env,
1061
+ cwd=checkout_dir,
1062
+ )
1063
+ if result.returncode != 0:
1064
+ return JsonResponse({"error": "Merge failed", "detail": result.stderr.strip()}, status=500)
1065
+
1066
+ # Commit the merge
1067
+ merge_msg = f"Merge {workspace.branch} into {target_branch}"
1068
+ commit_result = subprocess.run(
1069
+ [cli.binary, "commit", "-m", merge_msg, "--no-warnings"],
1070
+ capture_output=True,
1071
+ text=True,
1072
+ timeout=60,
1073
+ env=cli._env,
1074
+ cwd=checkout_dir,
1075
+ )
1076
+
1077
+ # Close the checkout and clean up
1078
+ subprocess.run([cli.binary, "close", "--force"], capture_output=True, cwd=checkout_dir, timeout=10, env=cli._env)
1079
+ shutil.rmtree(checkout_dir, ignore_errors=True)
1080
+
1081
+ workspace.status = "merged"
1082
+ workspace.checkout_path = ""
1083
+ workspace.save(update_fields=["status", "checkout_path", "updated_at", "version"])
1084
+
1085
+ return JsonResponse(
1086
+ {
1087
+ "name": workspace.name,
1088
+ "branch": workspace.branch,
1089
+ "status": workspace.status,
1090
+ "target_branch": target_branch,
1091
+ "merge_output": result.stdout.strip(),
1092
+ "commit_output": commit_result.stdout.strip() if commit_result.returncode == 0 else "",
1093
+ }
1094
+ )
1095
+
1096
+
1097
+@csrf_exempt
1098
+def api_workspace_abandon(request, slug, workspace_name):
1099
+ """Abandon a workspace, closing the checkout and cleaning up.
1100
+
1101
+ DELETE /projects/<slug>/fossil/api/workspaces/<name>/abandon
1102
+
1103
+ The branch remains in Fossil history but the checkout directory is removed.
1104
+ """
1105
+ if request.method != "DELETE":
1106
+ return JsonResponse({"error": "DELETE required"}, status=405)
1107
+
1108
+ project, repo = _get_repo(slug)
1109
+ user, token, err = _check_api_auth(request, project, repo)
1110
+ if err is not None:
1111
+ return err
1112
+
1113
+ if token is None and (user is None or not can_write_project(user, project)):
1114
+ return JsonResponse({"error": "Write access required"}, status=403)
1115
+
1116
+ workspace = _get_workspace(repo, workspace_name)
1117
+ if workspace is None:
1118
+ return JsonResponse({"error": "Workspace not found"}, status=404)
1119
+
1120
+ if workspace.status != "active":
1121
+ return JsonResponse({"error": f"Workspace is already {workspace.status}"}, status=409)
1122
+
1123
+ from fossil.cli import FossilCLI
1124
+
1125
+ cli = FossilCLI()
1126
+ checkout_dir = workspace.checkout_path
1127
+
1128
+ # Close checkout and clean up directory
1129
+ if checkout_dir:
1130
+ subprocess.run([cli.binary, "close", "--force"], capture_output=True, cwd=checkout_dir, timeout=10, env=cli._env)
1131
+ shutil.rmtree(checkout_dir, ignore_errors=True)
1132
+
1133
+ workspace.status = "abandoned"
1134
+ workspace.checkout_path = ""
1135
+ workspace.save(update_fields=["status", "checkout_path", "updated_at", "version"])
1136
+
1137
+ return JsonResponse(
1138
+ {
1139
+ "name": workspace.name,
1140
+ "branch": workspace.branch,
1141
+ "status": workspace.status,
1142
+ }
1143
+ )
1144
+
1145
+
1146
+# --- Ticket Claiming ---
1147
+
1148
+
1149
+@csrf_exempt
1150
+def api_ticket_claim(request, slug, ticket_uuid):
1151
+ """Claim a ticket for exclusive agent work.
1152
+
1153
+ POST /projects/<slug>/fossil/api/tickets/<uuid>/claim
1154
+ {"agent_id": "claude-abc", "workspace": "agent-fix-123"}
1155
+
1156
+ Returns 200 if claimed, 409 if already claimed by another agent.
1157
+ Uses the unique_together constraint on (repository, ticket_uuid) for atomicity.
1158
+ """
1159
+ if request.method != "POST":
1160
+ return JsonResponse({"error": "POST required"}, status=405)
1161
+
1162
+ project, repo = _get_repo(slug)
1163
+ user, token, err = _check_api_auth(request, project, repo)
1164
+ if err is not None:
1165
+ return err
1166
+
1167
+ if token is None and (user is None or not can_write_project(user, project)):
1168
+ return JsonResponse({"error": "Write access required"}, status=403)
1169
+
1170
+ try:
1171
+ data = json.loads(request.body)
1172
+ except (json.JSONDecodeError, ValueError):
1173
+ return JsonResponse({"error": "Invalid JSON body"}, status=400)
1174
+
1175
+ agent_id = (data.get("agent_id") or "").strip()
1176
+ if not agent_id:
1177
+ return JsonResponse({"error": "agent_id is required"}, status=400)
1178
+
1179
+ # Verify the ticket exists in Fossil
1180
+ reader = FossilReader(repo.full_path)
1181
+ with reader:
1182
+ ticket = reader.get_ticket_detail(ticket_uuid)
1183
+ if ticket is None:
1184
+ return JsonResponse({"error": "Ticket not found in repository"}, status=404)
1185
+
1186
+ # Resolve optional workspace reference
1187
+ workspace_name = (data.get("workspace") or "").strip()
1188
+ workspace_obj = None
1189
+ if workspace_name:
1190
+ from fossil.workspaces import AgentWorkspace
1191
+
1192
+ workspace_obj = AgentWorkspace.objects.filter(repository=repo, name=workspace_name).first()
1193
+
1194
+ from fossil.agent_claims import TicketClaim
1195
+
1196
+ with transaction.atomic():
1197
+ # Check for existing active claim (not soft-deleted) with row lock
1198
+ existing = TicketClaim.objects.select_for_update().filter(repository=repo, ticket_uuid=ticket_uuid).first()
1199
+
1200
+ if existing:
1201
+ if existing.agent_id == agent_id:
1202
+ # Idempotent: same agent re-claiming
1203
+ return JsonResponse(
1204
+ {
1205
+ "ticket_uuid": existing.ticket_uuid,
1206
+ "agent_id": existing.agent_id,
1207
+ "status": existing.status,
1208
+ "claimed_at": _isoformat(existing.claimed_at),
1209
+ "message": "Already claimed by you",
1210
+ }
1211
+ )
1212
+ return JsonResponse(
1213
+ {
1214
+ "error": "Ticket already claimed",
1215
+ "claimed_by": existing.agent_id,
1216
+ "claimed_at": _isoformat(existing.claimed_at),
1217
+ },
1218
+ status=409,
1219
+ )
1220
+
1221
+ claim = TicketClaim.objects.create(
1222
+ repository=repo,
1223
+ ticket_uuid=ticket_uuid,
1224
+ agent_id=agent_id,
1225
+ workspace=workspace_obj,
1226
+ created_by=user,
1227
+ )
1228
+
1229
+ return JsonResponse(
1230
+ {
1231
+ "ticket_uuid": claim.ticket_uuid,
1232
+ "agent_id": claim.agent_id,
1233
+ "status": claim.status,
1234
+ "claimed_at": _isoformat(claim.claimed_at),
1235
+ "workspace": workspace_name or None,
1236
+ },
1237
+ status=201,
1238
+ )
1239
+
1240
+
1241
+@csrf_exempt
1242
+def api_ticket_release(request, slug, ticket_uuid):
1243
+ """Release a ticket claim.
1244
+
1245
+ POST /projects/<slug>/fossil/api/tickets/<uuid>/release
1246
+ {"agent_id": "claude-abc"}
1247
+
1248
+ Soft-deletes the claim record so the unique constraint slot is freed.
1249
+ """
1250
+ if request.method != "POST":
1251
+ return JsonResponse({"error": "POST required"}, status=405)
1252
+
1253
+ project, repo = _get_repo(slug)
1254
+ user, token, err = _check_api_auth(request, project, repo)
1255
+ if err is not None:
1256
+ return err
1257
+
1258
+ if token is None and (user is None or not can_write_project(user, project)):
1259
+ return JsonResponse({"error": "Write access required"}, status=403)
1260
+
1261
+ from fossil.agent_claims import TicketClaim
1262
+
1263
+ claim = TicketClaim.objects.filter(repository=repo, ticket_uuid=ticket_uuid).first()
1264
+ if claim is None:
1265
+ return JsonResponse({"error": "No active claim for this ticket"}, status=404)
1266
+
1267
+ claim.status = "released"
1268
+ claim.released_at = timezone.now()
1269
+ claim.save(update_fields=["status", "released_at", "updated_at", "version"])
1270
+ # Soft-delete to free the unique constraint slot for future claims
1271
+ claim.soft_delete(user=user)
1272
+
1273
+ return JsonResponse(
1274
+ {
1275
+ "ticket_uuid": claim.ticket_uuid,
1276
+ "agent_id": claim.agent_id,
1277
+ "status": "released",
1278
+ "released_at": _isoformat(claim.released_at),
1279
+ }
1280
+ )
1281
+
1282
+
1283
+@csrf_exempt
1284
+def api_ticket_submit(request, slug, ticket_uuid):
1285
+ """Submit completed work for a claimed ticket.
1286
+
1287
+ POST /projects/<slug>/fossil/api/tickets/<uuid>/submit
1288
+ {
1289
+ "agent_id": "claude-abc",
1290
+ "workspace": "agent-fix-123",
1291
+ "summary": "Fixed the bug by ...",
1292
+ "files_changed": ["src/foo.py", "tests/test_foo.py"]
1293
+ }
1294
+
1295
+ Updates the claim status to "submitted" and records the work summary.
1296
+ Optionally adds a comment to the Fossil ticket.
1297
+ """
1298
+ if request.method != "POST":
1299
+ return JsonResponse({"error": "POST required"}, status=405)
1300
+
1301
+ project, repo = _get_repo(slug)
1302
+ user, token, err = _check_api_auth(request, project, repo)
1303
+ if err is not None:
1304
+ return err
1305
+
1306
+ if token is None and (user is None or not can_write_project(user, project)):
1307
+ return JsonResponse({"error": "Write access required"}, status=403)
1308
+
1309
+ try:
1310
+ data = json.loads(request.body)
1311
+ except (json.JSONDecodeError, ValueError):
1312
+ return JsonResponse({"error": "Invalid JSON body"}, status=400)
1313
+
1314
+ from fossil.agent_claims import TicketClaim
1315
+
1316
+ claim = TicketClaim.objects.filter(repository=repo, ticket_uuid=ticket_uuid).first()
1317
+ if claim is None:
1318
+ return JsonResponse({"error": "No active claim for this ticket"}, status=404)
1319
+
1320
+ if claim.status != "claimed":
1321
+ return JsonResponse({"error": f"Claim is already {claim.status}"}, status=409)
1322
+
1323
+ summary = (data.get("summary") or "").strip()
1324
+ files_changed = data.get("files_changed") or []
1325
+
1326
+ claim.status = "submitted"
1327
+ claim.summary = summary
1328
+ claim.files_changed = files_changed
1329
+ claim.save(update_fields=["status", "summary", "files_changed", "updated_at", "version"])
1330
+
1331
+ # Optionally add a comment to the Fossil ticket via CLI
1332
+ if summary:
1333
+ from fossil.cli import FossilCLI
1334
+
1335
+ cli = FossilCLI()
1336
+ comment_text = f"[Agent: {claim.agent_id}] Work submitted.\n\n{summary}"
1337
+ if files_changed:
1338
+ comment_text += f"\n\nFiles changed: {', '.join(files_changed)}"
1339
+ cli.ticket_change(repo.full_path, ticket_uuid, {"comment": comment_text})
1340
+
1341
+ return JsonResponse(
1342
+ {
1343
+ "ticket_uuid": claim.ticket_uuid,
1344
+ "agent_id": claim.agent_id,
1345
+ "status": claim.status,
1346
+ "summary": claim.summary,
1347
+ "files_changed": claim.files_changed,
1348
+ }
1349
+ )
1350
+
1351
+
1352
+@csrf_exempt
1353
+def api_tickets_unclaimed(request, slug):
1354
+ """List open tickets that aren't claimed by any agent.
1355
+
1356
+ GET /projects/<slug>/fossil/api/tickets/unclaimed
1357
+ Optional query params: status (default: Open), limit (default: 50)
1358
+ """
1359
+ if request.method != "GET":
1360
+ return JsonResponse({"error": "GET required"}, status=405)
1361
+
1362
+ project, repo = _get_repo(slug)
1363
+ user, token, err = _check_api_auth(request, project, repo)
1364
+ if err is not None:
1365
+ return err
1366
+
1367
+ status_filter = request.GET.get("status", "Open").strip()
1368
+ try:
1369
+ limit = min(200, max(1, int(request.GET.get("limit", "50"))))
1370
+ except (ValueError, TypeError):
1371
+ limit = 50
1372
+
1373
+ # Get open tickets from Fossil
1374
+ reader = FossilReader(repo.full_path)
1375
+ with reader:
1376
+ all_tickets = reader.get_tickets(status=status_filter, limit=500)
1377
+
1378
+ # Get currently claimed ticket UUIDs
1379
+ from fossil.agent_claims import TicketClaim
1380
+
1381
+ claimed_uuids = set(TicketClaim.objects.filter(repository=repo).values_list("ticket_uuid", flat=True))
1382
+
1383
+ # Filter out claimed tickets
1384
+ unclaimed = []
1385
+ for t in all_tickets:
1386
+ if t.uuid not in claimed_uuids:
1387
+ unclaimed.append(
1388
+ {
1389
+ "uuid": t.uuid,
1390
+ "title": t.title,
1391
+ "status": t.status,
1392
+ "type": t.type,
1393
+ "priority": t.priority,
1394
+ "severity": t.severity,
1395
+ "created": _isoformat(t.created),
1396
+ }
1397
+ )
1398
+ if len(unclaimed) >= limit:
1399
+ break
1400
+
1401
+ return JsonResponse({"tickets": unclaimed, "total": len(unclaimed)})
1402
+
1403
+
1404
+# --- Server-Sent Events ---
1405
+
1406
+
1407
+@csrf_exempt
1408
+def api_events(request, slug):
1409
+ """Server-Sent Events stream for real-time repository events.
1410
+
1411
+ GET /projects/<slug>/fossil/api/events
1412
+
1413
+ Streams events as SSE:
1414
+ - checkin: new checkin pushed
1415
+ - ticket: ticket created/updated (by count change)
1416
+ - claim: ticket claimed/released/submitted
1417
+ - workspace: workspace created/merged/abandoned
1418
+ - review: code review created/updated
1419
+
1420
+ Heartbeat sent every 15 seconds if no events. Poll interval: 5 seconds.
1421
+ """
1422
+ if request.method != "GET":
1423
+ return JsonResponse({"error": "GET required"}, status=405)
1424
+
1425
+ project, repo = _get_repo(slug)
1426
+ user, token, err = _check_api_auth(request, project, repo)
1427
+ if err is not None:
1428
+ return err
1429
+
1430
+ def event_stream():
1431
+ from fossil.agent_claims import TicketClaim
1432
+ from fossil.code_reviews import CodeReview
1433
+ from fossil.workspaces import AgentWorkspace
1434
+
1435
+ # Snapshot current state to detect changes
1436
+ last_checkin_count = 0
1437
+ try:
1438
+ with FossilReader(repo.full_path) as reader:
1439
+ last_checkin_count = reader.get_checkin_count()
1440
+ except Exception:
1441
+ pass
1442
+
1443
+ last_claim_id = TicketClaim.all_objects.filter(repository=repo).order_by("-pk").values_list("pk", flat=True).first() or 0
1444
+ last_workspace_id = AgentWorkspace.all_objects.filter(repository=repo).order_by("-pk").values_list("pk", flat=True).first() or 0
1445
+ last_review_id = CodeReview.all_objects.filter(repository=repo).order_by("-pk").values_list("pk", flat=True).first() or 0
1446
+
1447
+ heartbeat_counter = 0
1448
+
1449
+ while True:
1450
+ events = []
1451
+
1452
+ # Check for new checkins
1453
+ try:
1454
+ with FossilReader(repo.full_path) as reader:
1455
+ current_count = reader.get_checkin_count()
1456
+ if current_count > last_checkin_count:
1457
+ new_count = current_count - last_checkin_count
1458
+ timeline = reader.get_timeline(limit=new_count, event_type="ci")
1459
+ for entry in timeline:
1460
+ events.append(
1461
+ {
1462
+ "type": "checkin",
1463
+ "data": {
1464
+ "uuid": entry.uuid,
1465
+ "user": entry.user,
1466
+ "comment": entry.comment,
1467
+ "branch": entry.branch,
1468
+ "timestamp": _isoformat(entry.timestamp),
1469
+ },
1470
+ }
1471
+ )
1472
+ last_checkin_count = current_count
1473
+ except Exception:
1474
+ pass
1475
+
1476
+ # Check for new claims
1477
+ new_claims = TicketClaim.all_objects.filter(repository=repo, pk__gt=last_claim_id).order_by("pk")
1478
+ for claim in new_claims:
1479
+ events.append(
1480
+ {
1481
+ "type": "claim",
1482
+ "data": {
1483
+ "ticket_uuid": claim.ticket_uuid,
1484
+ "agent_id": claim.agent_id,
1485
+ "status": claim.status,
1486
+ "claimed_at": _isoformat(claim.claimed_at),
1487
+ },
1488
+ }
1489
+ )
1490
+ last_claim_id = claim.pk
1491
+
1492
+ # Check for new workspaces
1493
+ new_workspaces = AgentWorkspace.all_objects.filter(repository=repo, pk__gt=last_workspace_id).order_by("pk")
1494
+ for ws in new_workspaces:
1495
+ events.append(
1496
+ {
1497
+ "type": "workspace",
1498
+ "data": {
1499
+ "name": ws.name,
1500
+ "branch": ws.branch,
1501
+ "status": ws.status,
1502
+ "agent_id": ws.agent_id,
1503
+ },
1504
+ }
1505
+ )
1506
+ last_workspace_id = ws.pk
1507
+
1508
+ # Check for new code reviews
1509
+ new_reviews = CodeReview.all_objects.filter(repository=repo, pk__gt=last_review_id).order_by("pk")
1510
+ for review in new_reviews:
1511
+ events.append(
1512
+ {
1513
+ "type": "review",
1514
+ "data": {
1515
+ "id": review.pk,
1516
+ "title": review.title,
1517
+ "status": review.status,
1518
+ "agent_id": review.agent_id,
1519
+ },
1520
+ }
1521
+ )
1522
+ last_review_id = review.pk
1523
+
1524
+ # Yield events
1525
+ for event in events:
1526
+ yield f"event: {event['type']}\ndata: {json.dumps(event['data'])}\n\n"
1527
+
1528
+ # Heartbeat every ~15 seconds (3 iterations * 5s sleep)
1529
+ heartbeat_counter += 1
1530
+ if not events and heartbeat_counter >= 3:
1531
+ yield ": heartbeat\n\n"
1532
+ heartbeat_counter = 0
1533
+
1534
+ time.sleep(5)
1535
+
1536
+ response = StreamingHttpResponse(event_stream(), content_type="text/event-stream")
1537
+ response["Cache-Control"] = "no-cache"
1538
+ response["X-Accel-Buffering"] = "no"
1539
+ return response
1540
+
1541
+
1542
+# --- Code Review API ---
1543
+
1544
+
1545
+@csrf_exempt
1546
+def api_review_create(request, slug):
1547
+ """Submit code changes for review.
1548
+
1549
+ POST /projects/<slug>/fossil/api/reviews/create
1550
+ {
1551
+ "title": "Fix null pointer in auth module",
1552
+ "description": "The auth check was failing when ...",
1553
+ "diff": "--- a/src/auth.py\\n+++ b/src/auth.py\\n...",
1554
+ "files_changed": ["src/auth.py", "tests/test_auth.py"],
1555
+ "agent_id": "claude-abc",
1556
+ "workspace": "agent-fix-123",
1557
+ "ticket_uuid": "abc123..."
1558
+ }
1559
+ """
1560
+ if request.method != "POST":
1561
+ return JsonResponse({"error": "POST required"}, status=405)
1562
+
1563
+ project, repo = _get_repo(slug)
1564
+ user, token, err = _check_api_auth(request, project, repo)
1565
+ if err is not None:
1566
+ return err
1567
+
1568
+ if token is None and (user is None or not can_write_project(user, project)):
1569
+ return JsonResponse({"error": "Write access required"}, status=403)
1570
+
1571
+ try:
1572
+ data = json.loads(request.body)
1573
+ except (json.JSONDecodeError, ValueError):
1574
+ return JsonResponse({"error": "Invalid JSON body"}, status=400)
1575
+
1576
+ title = (data.get("title") or "").strip()
1577
+ if not title:
1578
+ return JsonResponse({"error": "Review title is required"}, status=400)
1579
+
1580
+ diff = (data.get("diff") or "").strip()
1581
+ if not diff:
1582
+ return JsonResponse({"error": "Diff is required"}, status=400)
1583
+
1584
+ # Resolve optional workspace reference
1585
+ workspace_name = (data.get("workspace") or "").strip()
1586
+ workspace_obj = None
1587
+ if workspace_name:
1588
+ from fossil.workspaces import AgentWorkspace
1589
+
1590
+ workspace_obj = AgentWorkspace.objects.filter(repository=repo, name=workspace_name).first()
1591
+
1592
+ from fossil.code_reviews import CodeReview
1593
+
1594
+ review = CodeReview.objects.create(
1595
+ repository=repo,
1596
+ workspace=workspace_obj,
1597
+ title=title,
1598
+ description=data.get("description", ""),
1599
+ diff=diff,
1600
+ files_changed=data.get("files_changed", []),
1601
+ agent_id=data.get("agent_id", ""),
1602
+ ticket_uuid=data.get("ticket_uuid", ""),
1603
+ created_by=user,
1604
+ )
1605
+
1606
+ return JsonResponse(
1607
+ {
1608
+ "id": review.pk,
1609
+ "title": review.title,
1610
+ "description": review.description,
1611
+ "status": review.status,
1612
+ "agent_id": review.agent_id,
1613
+ "files_changed": review.files_changed,
1614
+ "created_at": _isoformat(review.created_at),
1615
+ },
1616
+ status=201,
1617
+ )
1618
+
1619
+
1620
+@csrf_exempt
1621
+def api_review_list(request, slug):
1622
+ """List code reviews for a repository, optionally filtered by status.
1623
+
1624
+ GET /projects/<slug>/fossil/api/reviews
1625
+ Optional query params: status (pending, approved, changes_requested, merged)
1626
+ """
1627
+ if request.method != "GET":
1628
+ return JsonResponse({"error": "GET required"}, status=405)
1629
+
1630
+ project, repo = _get_repo(slug)
1631
+ user, token, err = _check_api_auth(request, project, repo)
1632
+ if err is not None:
1633
+ return err
1634
+
1635
+ from fossil.code_reviews import CodeReview
1636
+
1637
+ qs = CodeReview.objects.filter(repository=repo)
1638
+ status_filter = request.GET.get("status", "").strip()
1639
+ if status_filter:
1640
+ qs = qs.filter(status=status_filter)
1641
+
1642
+ page, per_page = _paginate_params(request)
1643
+ total = qs.count()
1644
+ total_pages = max(1, math.ceil(total / per_page))
1645
+ page = min(page, total_pages)
1646
+ reviews_page = qs[(page - 1) * per_page : page * per_page]
1647
+
1648
+ reviews = []
1649
+ for r in reviews_page:
1650
+ reviews.append(
1651
+ {
1652
+ "id": r.pk,
1653
+ "title": r.title,
1654
+ "status": r.status,
1655
+ "agent_id": r.agent_id,
1656
+ "files_changed": r.files_changed,
1657
+ "comment_count": r.comments.count(),
1658
+ "created_at": _isoformat(r.created_at),
1659
+ "updated_at": _isoformat(r.updated_at),
1660
+ }
1661
+ )
1662
+
1663
+ return JsonResponse(
1664
+ {
1665
+ "reviews": reviews,
1666
+ "total": total,
1667
+ "page": page,
1668
+ "per_page": per_page,
1669
+ "total_pages": total_pages,
1670
+ }
1671
+ )
1672
+
1673
+
1674
+@csrf_exempt
1675
+def api_review_detail(request, slug, review_id):
1676
+ """Get a code review with its comments.
1677
+
1678
+ GET /projects/<slug>/fossil/api/reviews/<id>
1679
+ """
1680
+ if request.method != "GET":
1681
+ return JsonResponse({"error": "GET required"}, status=405)
1682
+
1683
+ project, repo = _get_repo(slug)
1684
+ user, token, err = _check_api_auth(request, project, repo)
1685
+ if err is not None:
1686
+ return err
1687
+
1688
+ from fossil.code_reviews import CodeReview
1689
+
1690
+ review = CodeReview.objects.filter(repository=repo, pk=review_id).first()
1691
+ if review is None:
1692
+ return JsonResponse({"error": "Review not found"}, status=404)
1693
+
1694
+ comments = []
1695
+ for c in review.comments.all():
1696
+ comments.append(
1697
+ {
1698
+ "id": c.pk,
1699
+ "body": c.body,
1700
+ "file_path": c.file_path,
1701
+ "line_number": c.line_number,
1702
+ "author": c.author,
1703
+ "created_at": _isoformat(c.created_at),
1704
+ }
1705
+ )
1706
+
1707
+ return JsonResponse(
1708
+ {
1709
+ "id": review.pk,
1710
+ "title": review.title,
1711
+ "description": review.description,
1712
+ "diff": review.diff,
1713
+ "status": review.status,
1714
+ "agent_id": review.agent_id,
1715
+ "files_changed": review.files_changed,
1716
+ "ticket_uuid": review.ticket_uuid,
1717
+ "workspace": review.workspace.name if review.workspace else None,
1718
+ "comments": comments,
1719
+ "created_at": _isoformat(review.created_at),
1720
+ "updated_at": _isoformat(review.updated_at),
1721
+ }
1722
+ )
1723
+
1724
+
1725
+@csrf_exempt
1726
+def api_review_comment(request, slug, review_id):
1727
+ """Add a comment to a code review.
1728
+
1729
+ POST /projects/<slug>/fossil/api/reviews/<id>/comment
1730
+ {
1731
+ "body": "This looks good but consider...",
1732
+ "file_path": "src/auth.py",
1733
+ "line_number": 42,
1734
+ "author": "human-reviewer"
1735
+ }
1736
+ """
1737
+ if request.method != "POST":
1738
+ return JsonResponse({"error": "POST required"}, status=405)
1739
+
1740
+ project, repo = _get_repo(slug)
1741
+ user, token, err = _check_api_auth(request, project, repo)
1742
+ if err is not None:
1743
+ return err
1744
+
1745
+ from fossil.code_reviews import CodeReview, ReviewComment
1746
+
1747
+ review = CodeReview.objects.filter(repository=repo, pk=review_id).first()
1748
+ if review is None:
1749
+ return JsonResponse({"error": "Review not found"}, status=404)
1750
+
1751
+ try:
1752
+ data = json.loads(request.body)
1753
+ except (json.JSONDecodeError, ValueError):
1754
+ return JsonResponse({"error": "Invalid JSON body"}, status=400)
1755
+
1756
+ body = (data.get("body") or "").strip()
1757
+ if not body:
1758
+ return JsonResponse({"error": "Comment body is required"}, status=400)
1759
+
1760
+ author = (data.get("author") or "").strip()
1761
+ if not author and user:
1762
+ author = user.username
1763
+ if not author:
1764
+ return JsonResponse({"error": "Author is required"}, status=400)
1765
+
1766
+ comment = ReviewComment.objects.create(
1767
+ review=review,
1768
+ body=body,
1769
+ file_path=data.get("file_path", ""),
1770
+ line_number=data.get("line_number"),
1771
+ author=author,
1772
+ created_by=user,
1773
+ )
1774
+
1775
+ return JsonResponse(
1776
+ {
1777
+ "id": comment.pk,
1778
+ "body": comment.body,
1779
+ "file_path": comment.file_path,
1780
+ "line_number": comment.line_number,
1781
+ "author": comment.author,
1782
+ "created_at": _isoformat(comment.created_at),
1783
+ },
1784
+ status=201,
1785
+ )
1786
+
1787
+
1788
+@csrf_exempt
1789
+def api_review_approve(request, slug, review_id):
1790
+ """Approve a code review.
1791
+
1792
+ POST /projects/<slug>/fossil/api/reviews/<id>/approve
1793
+ """
1794
+ if request.method != "POST":
1795
+ return JsonResponse({"error": "POST required"}, status=405)
1796
+
1797
+ project, repo = _get_repo(slug)
1798
+ user, token, err = _check_api_auth(request, project, repo)
1799
+ if err is not None:
1800
+ return err
1801
+
1802
+ if token is None and (user is None or not can_write_project(user, project)):
1803
+ return JsonResponse({"error": "Write access required"}, status=403)
1804
+
1805
+ from fossil.code_reviews import CodeReview
1806
+
1807
+ review = CodeReview.objects.filter(repository=repo, pk=review_id).first()
1808
+ if review is None:
1809
+ return JsonResponse({"error": "Review not found"}, status=404)
1810
+
1811
+ if review.status == "merged":
1812
+ return JsonResponse({"error": "Review is already merged"}, status=409)
1813
+
1814
+ review.status = "approved"
1815
+ review.save(update_fields=["status", "updated_at", "version"])
1816
+
1817
+ return JsonResponse({"id": review.pk, "status": review.status})
1818
+
1819
+
1820
+@csrf_exempt
1821
+def api_review_request_changes(request, slug, review_id):
1822
+ """Request changes on a code review.
1823
+
1824
+ POST /projects/<slug>/fossil/api/reviews/<id>/request-changes
1825
+ {"comment": "Please fix the error handling in auth.py"}
1826
+ """
1827
+ if request.method != "POST":
1828
+ return JsonResponse({"error": "POST required"}, status=405)
1829
+
1830
+ project, repo = _get_repo(slug)
1831
+ user, token, err = _check_api_auth(request, project, repo)
1832
+ if err is not None:
1833
+ return err
1834
+
1835
+ if token is None and (user is None or not can_write_project(user, project)):
1836
+ return JsonResponse({"error": "Write access required"}, status=403)
1837
+
1838
+ from fossil.code_reviews import CodeReview, ReviewComment
1839
+
1840
+ review = CodeReview.objects.filter(repository=repo, pk=review_id).first()
1841
+ if review is None:
1842
+ return JsonResponse({"error": "Review not found"}, status=404)
1843
+
1844
+ if review.status == "merged":
1845
+ return JsonResponse({"error": "Review is already merged"}, status=409)
1846
+
1847
+ review.status = "changes_requested"
1848
+ review.save(update_fields=["status", "updated_at", "version"])
1849
+
1850
+ # Optionally add a comment with the change request
1851
+ try:
1852
+ data = json.loads(request.body) if request.body else {}
1853
+ except (json.JSONDecodeError, ValueError):
1854
+ data = {}
1855
+
1856
+ comment_body = (data.get("comment") or "").strip()
1857
+ if comment_body:
1858
+ author = user.username if user else "reviewer"
1859
+ ReviewComment.objects.create(
1860
+ review=review,
1861
+ body=comment_body,
1862
+ author=author,
1863
+ created_by=user,
1864
+ )
1865
+
1866
+ return JsonResponse({"id": review.pk, "status": review.status})
1867
+
1868
+
1869
+@csrf_exempt
1870
+def api_review_merge(request, slug, review_id):
1871
+ """Merge an approved code review.
1872
+
1873
+ POST /projects/<slug>/fossil/api/reviews/<id>/merge
1874
+
1875
+ Only approved reviews can be merged. If the review is linked to a workspace,
1876
+ the workspace merge is triggered.
1877
+ """
1878
+ if request.method != "POST":
1879
+ return JsonResponse({"error": "POST required"}, status=405)
1880
+
1881
+ project, repo = _get_repo(slug)
1882
+ user, token, err = _check_api_auth(request, project, repo)
1883
+ if err is not None:
1884
+ return err
1885
+
1886
+ if token is None and (user is None or not can_write_project(user, project)):
1887
+ return JsonResponse({"error": "Write access required"}, status=403)
1888
+
1889
+ from fossil.code_reviews import CodeReview
1890
+
1891
+ review = CodeReview.objects.filter(repository=repo, pk=review_id).first()
1892
+ if review is None:
1893
+ return JsonResponse({"error": "Review not found"}, status=404)
1894
+
1895
+ if review.status == "merged":
1896
+ return JsonResponse({"error": "Review is already merged"}, status=409)
1897
+
1898
+ if review.status != "approved":
1899
+ return JsonResponse({"error": "Review must be approved before merging"}, status=409)
1900
+
1901
+ review.status = "merged"
1902
+ review.save(update_fields=["status", "updated_at", "version"])
1903
+
1904
+ # If linked to a ticket claim, update the claim status
1905
+ if review.ticket_uuid:
1906
+ from fossil.agent_claims import TicketClaim
1907
+
1908
+ claim = TicketClaim.objects.filter(repository=repo, ticket_uuid=review.ticket_uuid).first()
1909
+ if claim and claim.status in ("claimed", "submitted"):
1910
+ claim.status = "merged"
1911
+ claim.save(update_fields=["status", "updated_at", "version"])
1912
+
1913
+ return JsonResponse({"id": review.pk, "status": review.status, "title": review.title})
4761914
4771915
ADDED fossil/code_reviews.py
4781916
ADDED fossil/migrations/0010_historicalagentworkspace_agentworkspace.py
4791917
ADDED fossil/migrations/0011_codereview_historicalcodereview_and_more.py
4801918
ADDED fossil/migrations/0012_alter_ticketclaim_unique_together.py
--- fossil/api_views.py
+++ fossil/api_views.py
@@ -3,22 +3,34 @@
3 All endpoints live under /projects/<slug>/fossil/api/.
4 Auth: Bearer token (APIToken or PersonalAccessToken) or session cookie.
5 All responses are JSON. All read endpoints check can_read_project.
6 """
7
 
 
8 import math
 
 
 
 
 
9
10 from django.http import JsonResponse
 
11 from django.shortcuts import get_object_or_404
 
 
12 from django.views.decorators.csrf import csrf_exempt
13 from django.views.decorators.http import require_GET
14
15 from fossil.api_auth import authenticate_request
16 from fossil.models import FossilRepository
17 from fossil.reader import FossilReader
18 from projects.access import can_read_project
19 from projects.models import Project
 
 
20
21
22 def _get_repo(slug):
23 """Look up project and repository by slug, or return 404 JSON."""
24 project = get_object_or_404(Project, slug=slug, deleted_at__isnull=True)
@@ -97,10 +109,87 @@
97 {"method": "GET", "path": f"{base}/wiki/<name>", "description": "Single wiki page with content"},
98 {"method": "GET", "path": f"{base}/branches", "description": "Branch list"},
99 {"method": "GET", "path": f"{base}/tags", "description": "Tag list"},
100 {"method": "GET", "path": f"{base}/releases", "description": "Release list"},
101 {"method": "GET", "path": f"{base}/search", "description": "Search across checkins, tickets, wiki", "params": "q"},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
102 ],
103 "auth": "Bearer token (Authorization: Bearer <token>) or session cookie",
104 }
105 )
106
@@ -471,5 +560,1354 @@
471 checkin["timestamp"] = _isoformat(checkin.get("timestamp"))
472 for ticket in results.get("tickets", []):
473 ticket["created"] = _isoformat(ticket.get("created"))
474
475 return JsonResponse(results)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
476
477 DDED fossil/code_reviews.py
478 DDED fossil/migrations/0010_historicalagentworkspace_agentworkspace.py
479 DDED fossil/migrations/0011_codereview_historicalcodereview_and_more.py
480 DDED fossil/migrations/0012_alter_ticketclaim_unique_together.py
--- fossil/api_views.py
+++ fossil/api_views.py
@@ -3,22 +3,34 @@
3 All endpoints live under /projects/<slug>/fossil/api/.
4 Auth: Bearer token (APIToken or PersonalAccessToken) or session cookie.
5 All responses are JSON. All read endpoints check can_read_project.
6 """
7
8 import json
9 import logging
10 import math
11 import re
12 import shutil
13 import subprocess
14 import tempfile
15 import time
16
17 from django.db import transaction
18 from django.http import JsonResponse, StreamingHttpResponse
19 from django.shortcuts import get_object_or_404
20 from django.test import RequestFactory
21 from django.utils import timezone
22 from django.views.decorators.csrf import csrf_exempt
23 from django.views.decorators.http import require_GET
24
25 from fossil.api_auth import authenticate_request
26 from fossil.models import FossilRepository
27 from fossil.reader import FossilReader
28 from projects.access import can_read_project, can_write_project
29 from projects.models import Project
30
31 logger = logging.getLogger(__name__)
32
33
34 def _get_repo(slug):
35 """Look up project and repository by slug, or return 404 JSON."""
36 project = get_object_or_404(Project, slug=slug, deleted_at__isnull=True)
@@ -97,10 +109,87 @@
109 {"method": "GET", "path": f"{base}/wiki/<name>", "description": "Single wiki page with content"},
110 {"method": "GET", "path": f"{base}/branches", "description": "Branch list"},
111 {"method": "GET", "path": f"{base}/tags", "description": "Tag list"},
112 {"method": "GET", "path": f"{base}/releases", "description": "Release list"},
113 {"method": "GET", "path": f"{base}/search", "description": "Search across checkins, tickets, wiki", "params": "q"},
114 {
115 "method": "POST",
116 "path": f"{base}/batch",
117 "description": "Execute multiple API calls in a single request (max 25)",
118 "body": '{"requests": [{"method": "GET", "path": "/api/timeline", "params": {}}]}',
119 },
120 {"method": "GET", "path": f"{base}/workspaces", "description": "List agent workspaces", "params": "status"},
121 {
122 "method": "POST",
123 "path": f"{base}/workspaces/create",
124 "description": "Create an isolated agent workspace",
125 "body": '{"name": "...", "description": "...", "agent_id": "..."}',
126 },
127 {"method": "GET", "path": f"{base}/workspaces/<name>", "description": "Get workspace details"},
128 {
129 "method": "POST",
130 "path": f"{base}/workspaces/<name>/commit",
131 "description": "Commit changes in a workspace",
132 "body": '{"message": "...", "files": []}',
133 },
134 {
135 "method": "POST",
136 "path": f"{base}/workspaces/<name>/merge",
137 "description": "Merge workspace branch back to trunk",
138 "body": '{"target_branch": "trunk"}',
139 },
140 {
141 "method": "DELETE",
142 "path": f"{base}/workspaces/<name>/abandon",
143 "description": "Abandon and clean up a workspace",
144 },
145 {
146 "method": "POST",
147 "path": f"{base}/tickets/<uuid>/claim",
148 "description": "Claim a ticket for exclusive agent work",
149 "body": '{"agent_id": "...", "workspace": "..."}',
150 },
151 {
152 "method": "POST",
153 "path": f"{base}/tickets/<uuid>/release",
154 "description": "Release a ticket claim",
155 },
156 {
157 "method": "POST",
158 "path": f"{base}/tickets/<uuid>/submit",
159 "description": "Submit completed work for a claimed ticket",
160 "body": '{"summary": "...", "files_changed": [...]}',
161 },
162 {
163 "method": "GET",
164 "path": f"{base}/tickets/unclaimed",
165 "description": "List tickets not claimed by any agent",
166 "params": "status, limit",
167 },
168 {"method": "GET", "path": f"{base}/events", "description": "Server-Sent Events stream for real-time events"},
169 {
170 "method": "POST",
171 "path": f"{base}/reviews/create",
172 "description": "Submit code changes for review",
173 "body": '{"title": "...", "diff": "...", "files_changed": [...], "agent_id": "..."}',
174 },
175 {
176 "method": "GET",
177 "path": f"{base}/reviews",
178 "description": "List code reviews",
179 "params": "status, page, per_page",
180 },
181 {"method": "GET", "path": f"{base}/reviews/<id>", "description": "Get review with comments"},
182 {
183 "method": "POST",
184 "path": f"{base}/reviews/<id>/comment",
185 "description": "Add a comment to a review",
186 "body": '{"body": "...", "file_path": "...", "line_number": 42, "author": "..."}',
187 },
188 {"method": "POST", "path": f"{base}/reviews/<id>/approve", "description": "Approve a review"},
189 {"method": "POST", "path": f"{base}/reviews/<id>/request-changes", "description": "Request changes on a review"},
190 {"method": "POST", "path": f"{base}/reviews/<id>/merge", "description": "Merge an approved review"},
191 ],
192 "auth": "Bearer token (Authorization: Bearer <token>) or session cookie",
193 }
194 )
195
@@ -471,5 +560,1354 @@
560 checkin["timestamp"] = _isoformat(checkin.get("timestamp"))
561 for ticket in results.get("tickets", []):
562 ticket["created"] = _isoformat(ticket.get("created"))
563
564 return JsonResponse(results)
565
566
567 # --- Batch API ---
568
569 # Map API paths to (view_function, extra_path_regex_or_None).
570 # Entries with a regex capture group extract path params (e.g. ticket uuid, wiki page name).
571 _BATCH_STATIC_ROUTES = {
572 "/api/project": api_project,
573 "/api/timeline": api_timeline,
574 "/api/tickets": api_tickets,
575 "/api/wiki": api_wiki_list,
576 "/api/branches": api_branches,
577 "/api/tags": api_tags,
578 "/api/releases": api_releases,
579 "/api/search": api_search,
580 }
581
582 _BATCH_DYNAMIC_ROUTES = [
583 (re.compile(r"^/api/tickets/([0-9a-fA-F-]+)$"), api_ticket_detail, "ticket_uuid"),
584 (re.compile(r"^/api/wiki/(.+)$"), api_wiki_page, "page_name"),
585 ]
586
587 _BATCH_MAX_REQUESTS = 25
588
589
590 def _resolve_batch_route(path):
591 """Resolve a batch sub-request path to (view_func, kwargs) or (None, None)."""
592 view_func = _BATCH_STATIC_ROUTES.get(path)
593 if view_func is not None:
594 return view_func, {}
595
596 for pattern, view_func, kwarg_name in _BATCH_DYNAMIC_ROUTES:
597 m = pattern.match(path)
598 if m:
599 return view_func, {kwarg_name: m.group(1)}
600
601 return None, None
602
603
604 @csrf_exempt
605 def api_batch(request, slug):
606 """Execute multiple API calls in a single request.
607
608 POST /projects/<slug>/fossil/api/batch
609 {
610 "requests": [
611 {"method": "GET", "path": "/api/timeline", "params": {"per_page": 5}},
612 {"method": "GET", "path": "/api/tickets", "params": {"status": "Open"}},
613 {"method": "GET", "path": "/api/wiki/Home"}
614 ]
615 }
616
617 Returns:
618 {
619 "responses": [
620 {"status": 200, "body": {...}},
621 {"status": 200, "body": {...}},
622 {"status": 200, "body": {...}}
623 ]
624 }
625
626 Auth: same as other API endpoints (Bearer token or session).
627 Limit: 25 sub-requests per batch.
628 Only GET sub-requests are supported.
629 """
630 if request.method != "POST":
631 return JsonResponse({"error": "POST required"}, status=405)
632
633 # Auth check -- same as every other API endpoint
634 project, repo = _get_repo(slug)
635 user, token, err = _check_api_auth(request, project, repo)
636 if err is not None:
637 return err
638
639 try:
640 body = json.loads(request.body)
641 except (json.JSONDecodeError, ValueError):
642 return JsonResponse({"error": "Invalid JSON body"}, status=400)
643
644 requests_list = body.get("requests")
645 if not isinstance(requests_list, list):
646 return JsonResponse({"error": "'requests' must be a list"}, status=400)
647
648 if len(requests_list) > _BATCH_MAX_REQUESTS:
649 return JsonResponse({"error": f"Maximum {_BATCH_MAX_REQUESTS} requests per batch"}, status=400)
650
651 if len(requests_list) == 0:
652 return JsonResponse({"responses": []})
653
654 factory = RequestFactory()
655 responses = []
656
657 for sub in requests_list:
658 if not isinstance(sub, dict):
659 responses.append({"status": 400, "body": {"error": "Each request must be an object"}})
660 continue
661
662 method = (sub.get("method") or "GET").upper()
663 path = sub.get("path", "")
664 params = sub.get("params") or {}
665
666 if method != "GET":
667 responses.append({"status": 405, "body": {"error": "Only GET is supported in batch requests"}})
668 continue
669
670 if not path:
671 responses.append({"status": 400, "body": {"error": "Missing 'path'"}})
672 continue
673
674 view_func, extra_kwargs = _resolve_batch_route(path)
675 if view_func is None:
676 responses.append({"status": 404, "body": {"error": f"Unknown API path: {path}"}})
677 continue
678
679 # Build a synthetic GET request preserving auth from the outer request
680 full_path = f"/projects/{slug}/fossil{path}"
681 synthetic = factory.get(full_path, data=params)
682
683 # Carry over auth state so sub-requests don't re-authenticate
684 synthetic.user = request.user
685 synthetic.session = request.session
686 if "HTTP_AUTHORIZATION" in request.META:
687 synthetic.META["HTTP_AUTHORIZATION"] = request.META["HTTP_AUTHORIZATION"]
688
689 try:
690 sub_response = view_func(synthetic, slug=slug, **extra_kwargs)
691 try:
692 response_body = json.loads(sub_response.content)
693 except (json.JSONDecodeError, ValueError):
694 response_body = {"raw": sub_response.content.decode("utf-8", errors="replace")}
695 responses.append({"status": sub_response.status_code, "body": response_body})
696 except Exception:
697 logger.exception("Batch sub-request failed: %s %s", method, path)
698 responses.append({"status": 500, "body": {"error": "Internal error processing sub-request"}})
699
700 return JsonResponse({"responses": responses})
701
702
703 # --- Agent Workspace API ---
704
705
706 def _get_workspace(repo, workspace_name):
707 """Look up an active workspace by name, or return 404 JSON."""
708 from fossil.workspaces import AgentWorkspace
709
710 workspace = AgentWorkspace.objects.filter(repository=repo, name=workspace_name).first()
711 if workspace is None:
712 return None
713 return workspace
714
715
716 @csrf_exempt
717 def api_workspace_list(request, slug):
718 """List agent workspaces for a repository.
719
720 GET /projects/<slug>/fossil/api/workspaces
721 Optional query params: status (active, merged, abandoned)
722 """
723 if request.method != "GET":
724 return JsonResponse({"error": "GET required"}, status=405)
725
726 project, repo = _get_repo(slug)
727 user, token, err = _check_api_auth(request, project, repo)
728 if err is not None:
729 return err
730
731 from fossil.workspaces import AgentWorkspace
732
733 qs = AgentWorkspace.objects.filter(repository=repo)
734 status_filter = request.GET.get("status", "").strip()
735 if status_filter:
736 qs = qs.filter(status=status_filter)
737
738 workspaces = []
739 for ws in qs:
740 workspaces.append(
741 {
742 "name": ws.name,
743 "branch": ws.branch,
744 "status": ws.status,
745 "agent_id": ws.agent_id,
746 "description": ws.description,
747 "files_changed": ws.files_changed,
748 "commits_made": ws.commits_made,
749 "created_at": _isoformat(ws.created_at),
750 }
751 )
752
753 return JsonResponse({"workspaces": workspaces})
754
755
756 @csrf_exempt
757 def api_workspace_create(request, slug):
758 """Create an isolated agent workspace.
759
760 POST /projects/<slug>/fossil/api/workspaces/create
761 {"name": "agent-fix-123", "description": "Fixing bug #123", "agent_id": "claude-abc"}
762
763 Creates a new Fossil branch and checkout directory for the agent.
764 """
765 if request.method != "POST":
766 return JsonResponse({"error": "POST required"}, status=405)
767
768 project, repo = _get_repo(slug)
769 user, token, err = _check_api_auth(request, project, repo)
770 if err is not None:
771 return err
772
773 # Write access required to create workspaces
774 if token is None and (user is None or not can_write_project(user, project)):
775 return JsonResponse({"error": "Write access required"}, status=403)
776
777 try:
778 data = json.loads(request.body)
779 except (json.JSONDecodeError, ValueError):
780 return JsonResponse({"error": "Invalid JSON body"}, status=400)
781
782 name = (data.get("name") or "").strip()
783 if not name:
784 return JsonResponse({"error": "Workspace name is required"}, status=400)
785
786 if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9._-]{0,198}$", name):
787 return JsonResponse(
788 {"error": "Invalid workspace name. Use alphanumeric characters, hyphens, dots, and underscores."},
789 status=400,
790 )
791
792 from fossil.workspaces import AgentWorkspace
793
794 if AgentWorkspace.objects.filter(repository=repo, name=name).exists():
795 return JsonResponse({"error": f"Workspace '{name}' already exists"}, status=409)
796
797 branch = f"workspace/{name}"
798
799 # Create workspace checkout directory
800 checkout_dir = tempfile.mkdtemp(prefix=f"fossilrepo-ws-{name}-")
801
802 from fossil.cli import FossilCLI
803
804 cli = FossilCLI()
805
806 # Open a checkout in the workspace dir
807 result = subprocess.run(
808 [cli.binary, "open", str(repo.full_path), "--workdir", checkout_dir],
809 capture_output=True,
810 text=True,
811 timeout=30,
812 env=cli._env,
813 cwd=checkout_dir,
814 )
815 if result.returncode != 0:
816 shutil.rmtree(checkout_dir, ignore_errors=True)
817 return JsonResponse({"error": "Failed to open Fossil checkout", "detail": result.stderr.strip()}, status=500)
818
819 # Create the branch from trunk
820 result = subprocess.run(
821 [cli.binary, "branch", "new", branch, "trunk"],
822 capture_output=True,
823 text=True,
824 timeout=30,
825 env=cli._env,
826 cwd=checkout_dir,
827 )
828 if result.returncode != 0:
829 # Clean up on failure
830 subprocess.run([cli.binary, "close", "--force"], capture_output=True, cwd=checkout_dir, timeout=10, env=cli._env)
831 shutil.rmtree(checkout_dir, ignore_errors=True)
832 return JsonResponse({"error": "Failed to create branch", "detail": result.stderr.strip()}, status=500)
833
834 # Switch to the new branch
835 result = subprocess.run(
836 [cli.binary, "update", branch],
837 capture_output=True,
838 text=True,
839 timeout=30,
840 env=cli._env,
841 cwd=checkout_dir,
842 )
843 if result.returncode != 0:
844 subprocess.run([cli.binary, "close", "--force"], capture_output=True, cwd=checkout_dir, timeout=10, env=cli._env)
845 shutil.rmtree(checkout_dir, ignore_errors=True)
846 return JsonResponse({"error": "Failed to switch to branch", "detail": result.stderr.strip()}, status=500)
847
848 workspace = AgentWorkspace.objects.create(
849 repository=repo,
850 name=name,
851 branch=branch,
852 agent_id=data.get("agent_id", ""),
853 description=data.get("description", ""),
854 checkout_path=checkout_dir,
855 created_by=user,
856 )
857
858 return JsonResponse(
859 {
860 "name": workspace.name,
861 "branch": workspace.branch,
862 "status": workspace.status,
863 "agent_id": workspace.agent_id,
864 "description": workspace.description,
865 "checkout_path": workspace.checkout_path,
866 "created_at": _isoformat(workspace.created_at),
867 },
868 status=201,
869 )
870
871
872 @csrf_exempt
873 def api_workspace_detail(request, slug, workspace_name):
874 """Get details of a specific workspace.
875
876 GET /projects/<slug>/fossil/api/workspaces/<name>
877 """
878 if request.method != "GET":
879 return JsonResponse({"error": "GET required"}, status=405)
880
881 project, repo = _get_repo(slug)
882 user, token, err = _check_api_auth(request, project, repo)
883 if err is not None:
884 return err
885
886 workspace = _get_workspace(repo, workspace_name)
887 if workspace is None:
888 return JsonResponse({"error": "Workspace not found"}, status=404)
889
890 return JsonResponse(
891 {
892 "name": workspace.name,
893 "branch": workspace.branch,
894 "status": workspace.status,
895 "agent_id": workspace.agent_id,
896 "description": workspace.description,
897 "checkout_path": workspace.checkout_path,
898 "files_changed": workspace.files_changed,
899 "commits_made": workspace.commits_made,
900 "created_at": _isoformat(workspace.created_at),
901 "updated_at": _isoformat(workspace.updated_at),
902 }
903 )
904
905
906 @csrf_exempt
907 def api_workspace_commit(request, slug, workspace_name):
908 """Commit changes in a workspace.
909
910 POST /projects/<slug>/fossil/api/workspaces/<name>/commit
911 {"message": "Fix bug #123", "files": ["src/foo.py"]}
912
913 If files is empty or omitted, commits all changed files.
914 """
915 if request.method != "POST":
916 return JsonResponse({"error": "POST required"}, status=405)
917
918 project, repo = _get_repo(slug)
919 user, token, err = _check_api_auth(request, project, repo)
920 if err is not None:
921 return err
922
923 if token is None and (user is None or not can_write_project(user, project)):
924 return JsonResponse({"error": "Write access required"}, status=403)
925
926 workspace = _get_workspace(repo, workspace_name)
927 if workspace is None:
928 return JsonResponse({"error": "Workspace not found"}, status=404)
929
930 if workspace.status != "active":
931 return JsonResponse({"error": f"Workspace is {workspace.status}, cannot commit"}, status=409)
932
933 try:
934 data = json.loads(request.body)
935 except (json.JSONDecodeError, ValueError):
936 return JsonResponse({"error": "Invalid JSON body"}, status=400)
937
938 message = (data.get("message") or "").strip()
939 if not message:
940 return JsonResponse({"error": "Commit message is required"}, status=400)
941
942 files = data.get("files") or []
943 checkout_dir = workspace.checkout_path
944
945 from fossil.cli import FossilCLI
946
947 cli = FossilCLI()
948
949 # Add files if specified, otherwise add all changes
950 if files:
951 for f in files:
952 subprocess.run(
953 [cli.binary, "add", f],
954 capture_output=True,
955 text=True,
956 timeout=30,
957 env=cli._env,
958 cwd=checkout_dir,
959 )
960 else:
961 subprocess.run(
962 [cli.binary, "addremove"],
963 capture_output=True,
964 text=True,
965 timeout=30,
966 env=cli._env,
967 cwd=checkout_dir,
968 )
969
970 # Commit
971 commit_cmd = [cli.binary, "commit", "-m", message, "--no-warnings"]
972 result = subprocess.run(
973 commit_cmd,
974 capture_output=True,
975 text=True,
976 timeout=60,
977 env=cli._env,
978 cwd=checkout_dir,
979 )
980
981 if result.returncode != 0:
982 stderr = result.stderr.strip()
983 # "nothing has changed" is not really an error
984 if "nothing has changed" in stderr.lower() or "nothing has changed" in result.stdout.lower():
985 return JsonResponse({"error": "Nothing to commit"}, status=409)
986 return JsonResponse({"error": "Commit failed", "detail": stderr}, status=500)
987
988 workspace.commits_made += 1
989 workspace.save(update_fields=["commits_made", "updated_at", "version"])
990
991 return JsonResponse(
992 {
993 "name": workspace.name,
994 "branch": workspace.branch,
995 "commits_made": workspace.commits_made,
996 "message": message,
997 "output": result.stdout.strip(),
998 }
999 )
1000
1001
1002 @csrf_exempt
1003 def api_workspace_merge(request, slug, workspace_name):
1004 """Merge workspace branch back to trunk.
1005
1006 POST /projects/<slug>/fossil/api/workspaces/<name>/merge
1007 {"target_branch": "trunk"}
1008
1009 Merges the workspace branch into the target branch (default: trunk),
1010 then closes the workspace checkout and cleans up the directory.
1011 """
1012 if request.method != "POST":
1013 return JsonResponse({"error": "POST required"}, status=405)
1014
1015 project, repo = _get_repo(slug)
1016 user, token, err = _check_api_auth(request, project, repo)
1017 if err is not None:
1018 return err
1019
1020 if token is None and (user is None or not can_write_project(user, project)):
1021 return JsonResponse({"error": "Write access required"}, status=403)
1022
1023 workspace = _get_workspace(repo, workspace_name)
1024 if workspace is None:
1025 return JsonResponse({"error": "Workspace not found"}, status=404)
1026
1027 if workspace.status != "active":
1028 return JsonResponse({"error": f"Workspace is {workspace.status}, cannot merge"}, status=409)
1029
1030 try:
1031 data = json.loads(request.body) if request.body else {}
1032 except (json.JSONDecodeError, ValueError):
1033 data = {}
1034
1035 target_branch = (data.get("target_branch") or "trunk").strip()
1036
1037 from fossil.cli import FossilCLI
1038
1039 cli = FossilCLI()
1040 checkout_dir = workspace.checkout_path
1041
1042 # Switch to target branch
1043 result = subprocess.run(
1044 [cli.binary, "update", target_branch],
1045 capture_output=True,
1046 text=True,
1047 timeout=30,
1048 env=cli._env,
1049 cwd=checkout_dir,
1050 )
1051 if result.returncode != 0:
1052 return JsonResponse({"error": "Failed to switch to target branch", "detail": result.stderr.strip()}, status=500)
1053
1054 # Merge workspace branch into target
1055 result = subprocess.run(
1056 [cli.binary, "merge", workspace.branch],
1057 capture_output=True,
1058 text=True,
1059 timeout=60,
1060 env=cli._env,
1061 cwd=checkout_dir,
1062 )
1063 if result.returncode != 0:
1064 return JsonResponse({"error": "Merge failed", "detail": result.stderr.strip()}, status=500)
1065
1066 # Commit the merge
1067 merge_msg = f"Merge {workspace.branch} into {target_branch}"
1068 commit_result = subprocess.run(
1069 [cli.binary, "commit", "-m", merge_msg, "--no-warnings"],
1070 capture_output=True,
1071 text=True,
1072 timeout=60,
1073 env=cli._env,
1074 cwd=checkout_dir,
1075 )
1076
1077 # Close the checkout and clean up
1078 subprocess.run([cli.binary, "close", "--force"], capture_output=True, cwd=checkout_dir, timeout=10, env=cli._env)
1079 shutil.rmtree(checkout_dir, ignore_errors=True)
1080
1081 workspace.status = "merged"
1082 workspace.checkout_path = ""
1083 workspace.save(update_fields=["status", "checkout_path", "updated_at", "version"])
1084
1085 return JsonResponse(
1086 {
1087 "name": workspace.name,
1088 "branch": workspace.branch,
1089 "status": workspace.status,
1090 "target_branch": target_branch,
1091 "merge_output": result.stdout.strip(),
1092 "commit_output": commit_result.stdout.strip() if commit_result.returncode == 0 else "",
1093 }
1094 )
1095
1096
1097 @csrf_exempt
1098 def api_workspace_abandon(request, slug, workspace_name):
1099 """Abandon a workspace, closing the checkout and cleaning up.
1100
1101 DELETE /projects/<slug>/fossil/api/workspaces/<name>/abandon
1102
1103 The branch remains in Fossil history but the checkout directory is removed.
1104 """
1105 if request.method != "DELETE":
1106 return JsonResponse({"error": "DELETE required"}, status=405)
1107
1108 project, repo = _get_repo(slug)
1109 user, token, err = _check_api_auth(request, project, repo)
1110 if err is not None:
1111 return err
1112
1113 if token is None and (user is None or not can_write_project(user, project)):
1114 return JsonResponse({"error": "Write access required"}, status=403)
1115
1116 workspace = _get_workspace(repo, workspace_name)
1117 if workspace is None:
1118 return JsonResponse({"error": "Workspace not found"}, status=404)
1119
1120 if workspace.status != "active":
1121 return JsonResponse({"error": f"Workspace is already {workspace.status}"}, status=409)
1122
1123 from fossil.cli import FossilCLI
1124
1125 cli = FossilCLI()
1126 checkout_dir = workspace.checkout_path
1127
1128 # Close checkout and clean up directory
1129 if checkout_dir:
1130 subprocess.run([cli.binary, "close", "--force"], capture_output=True, cwd=checkout_dir, timeout=10, env=cli._env)
1131 shutil.rmtree(checkout_dir, ignore_errors=True)
1132
1133 workspace.status = "abandoned"
1134 workspace.checkout_path = ""
1135 workspace.save(update_fields=["status", "checkout_path", "updated_at", "version"])
1136
1137 return JsonResponse(
1138 {
1139 "name": workspace.name,
1140 "branch": workspace.branch,
1141 "status": workspace.status,
1142 }
1143 )
1144
1145
1146 # --- Ticket Claiming ---
1147
1148
1149 @csrf_exempt
1150 def api_ticket_claim(request, slug, ticket_uuid):
1151 """Claim a ticket for exclusive agent work.
1152
1153 POST /projects/<slug>/fossil/api/tickets/<uuid>/claim
1154 {"agent_id": "claude-abc", "workspace": "agent-fix-123"}
1155
1156 Returns 200 if claimed, 409 if already claimed by another agent.
1157 Uses the unique_together constraint on (repository, ticket_uuid) for atomicity.
1158 """
1159 if request.method != "POST":
1160 return JsonResponse({"error": "POST required"}, status=405)
1161
1162 project, repo = _get_repo(slug)
1163 user, token, err = _check_api_auth(request, project, repo)
1164 if err is not None:
1165 return err
1166
1167 if token is None and (user is None or not can_write_project(user, project)):
1168 return JsonResponse({"error": "Write access required"}, status=403)
1169
1170 try:
1171 data = json.loads(request.body)
1172 except (json.JSONDecodeError, ValueError):
1173 return JsonResponse({"error": "Invalid JSON body"}, status=400)
1174
1175 agent_id = (data.get("agent_id") or "").strip()
1176 if not agent_id:
1177 return JsonResponse({"error": "agent_id is required"}, status=400)
1178
1179 # Verify the ticket exists in Fossil
1180 reader = FossilReader(repo.full_path)
1181 with reader:
1182 ticket = reader.get_ticket_detail(ticket_uuid)
1183 if ticket is None:
1184 return JsonResponse({"error": "Ticket not found in repository"}, status=404)
1185
1186 # Resolve optional workspace reference
1187 workspace_name = (data.get("workspace") or "").strip()
1188 workspace_obj = None
1189 if workspace_name:
1190 from fossil.workspaces import AgentWorkspace
1191
1192 workspace_obj = AgentWorkspace.objects.filter(repository=repo, name=workspace_name).first()
1193
1194 from fossil.agent_claims import TicketClaim
1195
1196 with transaction.atomic():
1197 # Check for existing active claim (not soft-deleted) with row lock
1198 existing = TicketClaim.objects.select_for_update().filter(repository=repo, ticket_uuid=ticket_uuid).first()
1199
1200 if existing:
1201 if existing.agent_id == agent_id:
1202 # Idempotent: same agent re-claiming
1203 return JsonResponse(
1204 {
1205 "ticket_uuid": existing.ticket_uuid,
1206 "agent_id": existing.agent_id,
1207 "status": existing.status,
1208 "claimed_at": _isoformat(existing.claimed_at),
1209 "message": "Already claimed by you",
1210 }
1211 )
1212 return JsonResponse(
1213 {
1214 "error": "Ticket already claimed",
1215 "claimed_by": existing.agent_id,
1216 "claimed_at": _isoformat(existing.claimed_at),
1217 },
1218 status=409,
1219 )
1220
1221 claim = TicketClaim.objects.create(
1222 repository=repo,
1223 ticket_uuid=ticket_uuid,
1224 agent_id=agent_id,
1225 workspace=workspace_obj,
1226 created_by=user,
1227 )
1228
1229 return JsonResponse(
1230 {
1231 "ticket_uuid": claim.ticket_uuid,
1232 "agent_id": claim.agent_id,
1233 "status": claim.status,
1234 "claimed_at": _isoformat(claim.claimed_at),
1235 "workspace": workspace_name or None,
1236 },
1237 status=201,
1238 )
1239
1240
1241 @csrf_exempt
1242 def api_ticket_release(request, slug, ticket_uuid):
1243 """Release a ticket claim.
1244
1245 POST /projects/<slug>/fossil/api/tickets/<uuid>/release
1246 {"agent_id": "claude-abc"}
1247
1248 Soft-deletes the claim record so the unique constraint slot is freed.
1249 """
1250 if request.method != "POST":
1251 return JsonResponse({"error": "POST required"}, status=405)
1252
1253 project, repo = _get_repo(slug)
1254 user, token, err = _check_api_auth(request, project, repo)
1255 if err is not None:
1256 return err
1257
1258 if token is None and (user is None or not can_write_project(user, project)):
1259 return JsonResponse({"error": "Write access required"}, status=403)
1260
1261 from fossil.agent_claims import TicketClaim
1262
1263 claim = TicketClaim.objects.filter(repository=repo, ticket_uuid=ticket_uuid).first()
1264 if claim is None:
1265 return JsonResponse({"error": "No active claim for this ticket"}, status=404)
1266
1267 claim.status = "released"
1268 claim.released_at = timezone.now()
1269 claim.save(update_fields=["status", "released_at", "updated_at", "version"])
1270 # Soft-delete to free the unique constraint slot for future claims
1271 claim.soft_delete(user=user)
1272
1273 return JsonResponse(
1274 {
1275 "ticket_uuid": claim.ticket_uuid,
1276 "agent_id": claim.agent_id,
1277 "status": "released",
1278 "released_at": _isoformat(claim.released_at),
1279 }
1280 )
1281
1282
1283 @csrf_exempt
1284 def api_ticket_submit(request, slug, ticket_uuid):
1285 """Submit completed work for a claimed ticket.
1286
1287 POST /projects/<slug>/fossil/api/tickets/<uuid>/submit
1288 {
1289 "agent_id": "claude-abc",
1290 "workspace": "agent-fix-123",
1291 "summary": "Fixed the bug by ...",
1292 "files_changed": ["src/foo.py", "tests/test_foo.py"]
1293 }
1294
1295 Updates the claim status to "submitted" and records the work summary.
1296 Optionally adds a comment to the Fossil ticket.
1297 """
1298 if request.method != "POST":
1299 return JsonResponse({"error": "POST required"}, status=405)
1300
1301 project, repo = _get_repo(slug)
1302 user, token, err = _check_api_auth(request, project, repo)
1303 if err is not None:
1304 return err
1305
1306 if token is None and (user is None or not can_write_project(user, project)):
1307 return JsonResponse({"error": "Write access required"}, status=403)
1308
1309 try:
1310 data = json.loads(request.body)
1311 except (json.JSONDecodeError, ValueError):
1312 return JsonResponse({"error": "Invalid JSON body"}, status=400)
1313
1314 from fossil.agent_claims import TicketClaim
1315
1316 claim = TicketClaim.objects.filter(repository=repo, ticket_uuid=ticket_uuid).first()
1317 if claim is None:
1318 return JsonResponse({"error": "No active claim for this ticket"}, status=404)
1319
1320 if claim.status != "claimed":
1321 return JsonResponse({"error": f"Claim is already {claim.status}"}, status=409)
1322
1323 summary = (data.get("summary") or "").strip()
1324 files_changed = data.get("files_changed") or []
1325
1326 claim.status = "submitted"
1327 claim.summary = summary
1328 claim.files_changed = files_changed
1329 claim.save(update_fields=["status", "summary", "files_changed", "updated_at", "version"])
1330
1331 # Optionally add a comment to the Fossil ticket via CLI
1332 if summary:
1333 from fossil.cli import FossilCLI
1334
1335 cli = FossilCLI()
1336 comment_text = f"[Agent: {claim.agent_id}] Work submitted.\n\n{summary}"
1337 if files_changed:
1338 comment_text += f"\n\nFiles changed: {', '.join(files_changed)}"
1339 cli.ticket_change(repo.full_path, ticket_uuid, {"comment": comment_text})
1340
1341 return JsonResponse(
1342 {
1343 "ticket_uuid": claim.ticket_uuid,
1344 "agent_id": claim.agent_id,
1345 "status": claim.status,
1346 "summary": claim.summary,
1347 "files_changed": claim.files_changed,
1348 }
1349 )
1350
1351
1352 @csrf_exempt
1353 def api_tickets_unclaimed(request, slug):
1354 """List open tickets that aren't claimed by any agent.
1355
1356 GET /projects/<slug>/fossil/api/tickets/unclaimed
1357 Optional query params: status (default: Open), limit (default: 50)
1358 """
1359 if request.method != "GET":
1360 return JsonResponse({"error": "GET required"}, status=405)
1361
1362 project, repo = _get_repo(slug)
1363 user, token, err = _check_api_auth(request, project, repo)
1364 if err is not None:
1365 return err
1366
1367 status_filter = request.GET.get("status", "Open").strip()
1368 try:
1369 limit = min(200, max(1, int(request.GET.get("limit", "50"))))
1370 except (ValueError, TypeError):
1371 limit = 50
1372
1373 # Get open tickets from Fossil
1374 reader = FossilReader(repo.full_path)
1375 with reader:
1376 all_tickets = reader.get_tickets(status=status_filter, limit=500)
1377
1378 # Get currently claimed ticket UUIDs
1379 from fossil.agent_claims import TicketClaim
1380
1381 claimed_uuids = set(TicketClaim.objects.filter(repository=repo).values_list("ticket_uuid", flat=True))
1382
1383 # Filter out claimed tickets
1384 unclaimed = []
1385 for t in all_tickets:
1386 if t.uuid not in claimed_uuids:
1387 unclaimed.append(
1388 {
1389 "uuid": t.uuid,
1390 "title": t.title,
1391 "status": t.status,
1392 "type": t.type,
1393 "priority": t.priority,
1394 "severity": t.severity,
1395 "created": _isoformat(t.created),
1396 }
1397 )
1398 if len(unclaimed) >= limit:
1399 break
1400
1401 return JsonResponse({"tickets": unclaimed, "total": len(unclaimed)})
1402
1403
1404 # --- Server-Sent Events ---
1405
1406
1407 @csrf_exempt
1408 def api_events(request, slug):
1409 """Server-Sent Events stream for real-time repository events.
1410
1411 GET /projects/<slug>/fossil/api/events
1412
1413 Streams events as SSE:
1414 - checkin: new checkin pushed
1415 - ticket: ticket created/updated (by count change)
1416 - claim: ticket claimed/released/submitted
1417 - workspace: workspace created/merged/abandoned
1418 - review: code review created/updated
1419
1420 Heartbeat sent every 15 seconds if no events. Poll interval: 5 seconds.
1421 """
1422 if request.method != "GET":
1423 return JsonResponse({"error": "GET required"}, status=405)
1424
1425 project, repo = _get_repo(slug)
1426 user, token, err = _check_api_auth(request, project, repo)
1427 if err is not None:
1428 return err
1429
1430 def event_stream():
1431 from fossil.agent_claims import TicketClaim
1432 from fossil.code_reviews import CodeReview
1433 from fossil.workspaces import AgentWorkspace
1434
1435 # Snapshot current state to detect changes
1436 last_checkin_count = 0
1437 try:
1438 with FossilReader(repo.full_path) as reader:
1439 last_checkin_count = reader.get_checkin_count()
1440 except Exception:
1441 pass
1442
1443 last_claim_id = TicketClaim.all_objects.filter(repository=repo).order_by("-pk").values_list("pk", flat=True).first() or 0
1444 last_workspace_id = AgentWorkspace.all_objects.filter(repository=repo).order_by("-pk").values_list("pk", flat=True).first() or 0
1445 last_review_id = CodeReview.all_objects.filter(repository=repo).order_by("-pk").values_list("pk", flat=True).first() or 0
1446
1447 heartbeat_counter = 0
1448
1449 while True:
1450 events = []
1451
1452 # Check for new checkins
1453 try:
1454 with FossilReader(repo.full_path) as reader:
1455 current_count = reader.get_checkin_count()
1456 if current_count > last_checkin_count:
1457 new_count = current_count - last_checkin_count
1458 timeline = reader.get_timeline(limit=new_count, event_type="ci")
1459 for entry in timeline:
1460 events.append(
1461 {
1462 "type": "checkin",
1463 "data": {
1464 "uuid": entry.uuid,
1465 "user": entry.user,
1466 "comment": entry.comment,
1467 "branch": entry.branch,
1468 "timestamp": _isoformat(entry.timestamp),
1469 },
1470 }
1471 )
1472 last_checkin_count = current_count
1473 except Exception:
1474 pass
1475
1476 # Check for new claims
1477 new_claims = TicketClaim.all_objects.filter(repository=repo, pk__gt=last_claim_id).order_by("pk")
1478 for claim in new_claims:
1479 events.append(
1480 {
1481 "type": "claim",
1482 "data": {
1483 "ticket_uuid": claim.ticket_uuid,
1484 "agent_id": claim.agent_id,
1485 "status": claim.status,
1486 "claimed_at": _isoformat(claim.claimed_at),
1487 },
1488 }
1489 )
1490 last_claim_id = claim.pk
1491
1492 # Check for new workspaces
1493 new_workspaces = AgentWorkspace.all_objects.filter(repository=repo, pk__gt=last_workspace_id).order_by("pk")
1494 for ws in new_workspaces:
1495 events.append(
1496 {
1497 "type": "workspace",
1498 "data": {
1499 "name": ws.name,
1500 "branch": ws.branch,
1501 "status": ws.status,
1502 "agent_id": ws.agent_id,
1503 },
1504 }
1505 )
1506 last_workspace_id = ws.pk
1507
1508 # Check for new code reviews
1509 new_reviews = CodeReview.all_objects.filter(repository=repo, pk__gt=last_review_id).order_by("pk")
1510 for review in new_reviews:
1511 events.append(
1512 {
1513 "type": "review",
1514 "data": {
1515 "id": review.pk,
1516 "title": review.title,
1517 "status": review.status,
1518 "agent_id": review.agent_id,
1519 },
1520 }
1521 )
1522 last_review_id = review.pk
1523
1524 # Yield events
1525 for event in events:
1526 yield f"event: {event['type']}\ndata: {json.dumps(event['data'])}\n\n"
1527
1528 # Heartbeat every ~15 seconds (3 iterations * 5s sleep)
1529 heartbeat_counter += 1
1530 if not events and heartbeat_counter >= 3:
1531 yield ": heartbeat\n\n"
1532 heartbeat_counter = 0
1533
1534 time.sleep(5)
1535
1536 response = StreamingHttpResponse(event_stream(), content_type="text/event-stream")
1537 response["Cache-Control"] = "no-cache"
1538 response["X-Accel-Buffering"] = "no"
1539 return response
1540
1541
1542 # --- Code Review API ---
1543
1544
1545 @csrf_exempt
1546 def api_review_create(request, slug):
1547 """Submit code changes for review.
1548
1549 POST /projects/<slug>/fossil/api/reviews/create
1550 {
1551 "title": "Fix null pointer in auth module",
1552 "description": "The auth check was failing when ...",
1553 "diff": "--- a/src/auth.py\\n+++ b/src/auth.py\\n...",
1554 "files_changed": ["src/auth.py", "tests/test_auth.py"],
1555 "agent_id": "claude-abc",
1556 "workspace": "agent-fix-123",
1557 "ticket_uuid": "abc123..."
1558 }
1559 """
1560 if request.method != "POST":
1561 return JsonResponse({"error": "POST required"}, status=405)
1562
1563 project, repo = _get_repo(slug)
1564 user, token, err = _check_api_auth(request, project, repo)
1565 if err is not None:
1566 return err
1567
1568 if token is None and (user is None or not can_write_project(user, project)):
1569 return JsonResponse({"error": "Write access required"}, status=403)
1570
1571 try:
1572 data = json.loads(request.body)
1573 except (json.JSONDecodeError, ValueError):
1574 return JsonResponse({"error": "Invalid JSON body"}, status=400)
1575
1576 title = (data.get("title") or "").strip()
1577 if not title:
1578 return JsonResponse({"error": "Review title is required"}, status=400)
1579
1580 diff = (data.get("diff") or "").strip()
1581 if not diff:
1582 return JsonResponse({"error": "Diff is required"}, status=400)
1583
1584 # Resolve optional workspace reference
1585 workspace_name = (data.get("workspace") or "").strip()
1586 workspace_obj = None
1587 if workspace_name:
1588 from fossil.workspaces import AgentWorkspace
1589
1590 workspace_obj = AgentWorkspace.objects.filter(repository=repo, name=workspace_name).first()
1591
1592 from fossil.code_reviews import CodeReview
1593
1594 review = CodeReview.objects.create(
1595 repository=repo,
1596 workspace=workspace_obj,
1597 title=title,
1598 description=data.get("description", ""),
1599 diff=diff,
1600 files_changed=data.get("files_changed", []),
1601 agent_id=data.get("agent_id", ""),
1602 ticket_uuid=data.get("ticket_uuid", ""),
1603 created_by=user,
1604 )
1605
1606 return JsonResponse(
1607 {
1608 "id": review.pk,
1609 "title": review.title,
1610 "description": review.description,
1611 "status": review.status,
1612 "agent_id": review.agent_id,
1613 "files_changed": review.files_changed,
1614 "created_at": _isoformat(review.created_at),
1615 },
1616 status=201,
1617 )
1618
1619
1620 @csrf_exempt
1621 def api_review_list(request, slug):
1622 """List code reviews for a repository, optionally filtered by status.
1623
1624 GET /projects/<slug>/fossil/api/reviews
1625 Optional query params: status (pending, approved, changes_requested, merged)
1626 """
1627 if request.method != "GET":
1628 return JsonResponse({"error": "GET required"}, status=405)
1629
1630 project, repo = _get_repo(slug)
1631 user, token, err = _check_api_auth(request, project, repo)
1632 if err is not None:
1633 return err
1634
1635 from fossil.code_reviews import CodeReview
1636
1637 qs = CodeReview.objects.filter(repository=repo)
1638 status_filter = request.GET.get("status", "").strip()
1639 if status_filter:
1640 qs = qs.filter(status=status_filter)
1641
1642 page, per_page = _paginate_params(request)
1643 total = qs.count()
1644 total_pages = max(1, math.ceil(total / per_page))
1645 page = min(page, total_pages)
1646 reviews_page = qs[(page - 1) * per_page : page * per_page]
1647
1648 reviews = []
1649 for r in reviews_page:
1650 reviews.append(
1651 {
1652 "id": r.pk,
1653 "title": r.title,
1654 "status": r.status,
1655 "agent_id": r.agent_id,
1656 "files_changed": r.files_changed,
1657 "comment_count": r.comments.count(),
1658 "created_at": _isoformat(r.created_at),
1659 "updated_at": _isoformat(r.updated_at),
1660 }
1661 )
1662
1663 return JsonResponse(
1664 {
1665 "reviews": reviews,
1666 "total": total,
1667 "page": page,
1668 "per_page": per_page,
1669 "total_pages": total_pages,
1670 }
1671 )
1672
1673
1674 @csrf_exempt
1675 def api_review_detail(request, slug, review_id):
1676 """Get a code review with its comments.
1677
1678 GET /projects/<slug>/fossil/api/reviews/<id>
1679 """
1680 if request.method != "GET":
1681 return JsonResponse({"error": "GET required"}, status=405)
1682
1683 project, repo = _get_repo(slug)
1684 user, token, err = _check_api_auth(request, project, repo)
1685 if err is not None:
1686 return err
1687
1688 from fossil.code_reviews import CodeReview
1689
1690 review = CodeReview.objects.filter(repository=repo, pk=review_id).first()
1691 if review is None:
1692 return JsonResponse({"error": "Review not found"}, status=404)
1693
1694 comments = []
1695 for c in review.comments.all():
1696 comments.append(
1697 {
1698 "id": c.pk,
1699 "body": c.body,
1700 "file_path": c.file_path,
1701 "line_number": c.line_number,
1702 "author": c.author,
1703 "created_at": _isoformat(c.created_at),
1704 }
1705 )
1706
1707 return JsonResponse(
1708 {
1709 "id": review.pk,
1710 "title": review.title,
1711 "description": review.description,
1712 "diff": review.diff,
1713 "status": review.status,
1714 "agent_id": review.agent_id,
1715 "files_changed": review.files_changed,
1716 "ticket_uuid": review.ticket_uuid,
1717 "workspace": review.workspace.name if review.workspace else None,
1718 "comments": comments,
1719 "created_at": _isoformat(review.created_at),
1720 "updated_at": _isoformat(review.updated_at),
1721 }
1722 )
1723
1724
1725 @csrf_exempt
1726 def api_review_comment(request, slug, review_id):
1727 """Add a comment to a code review.
1728
1729 POST /projects/<slug>/fossil/api/reviews/<id>/comment
1730 {
1731 "body": "This looks good but consider...",
1732 "file_path": "src/auth.py",
1733 "line_number": 42,
1734 "author": "human-reviewer"
1735 }
1736 """
1737 if request.method != "POST":
1738 return JsonResponse({"error": "POST required"}, status=405)
1739
1740 project, repo = _get_repo(slug)
1741 user, token, err = _check_api_auth(request, project, repo)
1742 if err is not None:
1743 return err
1744
1745 from fossil.code_reviews import CodeReview, ReviewComment
1746
1747 review = CodeReview.objects.filter(repository=repo, pk=review_id).first()
1748 if review is None:
1749 return JsonResponse({"error": "Review not found"}, status=404)
1750
1751 try:
1752 data = json.loads(request.body)
1753 except (json.JSONDecodeError, ValueError):
1754 return JsonResponse({"error": "Invalid JSON body"}, status=400)
1755
1756 body = (data.get("body") or "").strip()
1757 if not body:
1758 return JsonResponse({"error": "Comment body is required"}, status=400)
1759
1760 author = (data.get("author") or "").strip()
1761 if not author and user:
1762 author = user.username
1763 if not author:
1764 return JsonResponse({"error": "Author is required"}, status=400)
1765
1766 comment = ReviewComment.objects.create(
1767 review=review,
1768 body=body,
1769 file_path=data.get("file_path", ""),
1770 line_number=data.get("line_number"),
1771 author=author,
1772 created_by=user,
1773 )
1774
1775 return JsonResponse(
1776 {
1777 "id": comment.pk,
1778 "body": comment.body,
1779 "file_path": comment.file_path,
1780 "line_number": comment.line_number,
1781 "author": comment.author,
1782 "created_at": _isoformat(comment.created_at),
1783 },
1784 status=201,
1785 )
1786
1787
1788 @csrf_exempt
1789 def api_review_approve(request, slug, review_id):
1790 """Approve a code review.
1791
1792 POST /projects/<slug>/fossil/api/reviews/<id>/approve
1793 """
1794 if request.method != "POST":
1795 return JsonResponse({"error": "POST required"}, status=405)
1796
1797 project, repo = _get_repo(slug)
1798 user, token, err = _check_api_auth(request, project, repo)
1799 if err is not None:
1800 return err
1801
1802 if token is None and (user is None or not can_write_project(user, project)):
1803 return JsonResponse({"error": "Write access required"}, status=403)
1804
1805 from fossil.code_reviews import CodeReview
1806
1807 review = CodeReview.objects.filter(repository=repo, pk=review_id).first()
1808 if review is None:
1809 return JsonResponse({"error": "Review not found"}, status=404)
1810
1811 if review.status == "merged":
1812 return JsonResponse({"error": "Review is already merged"}, status=409)
1813
1814 review.status = "approved"
1815 review.save(update_fields=["status", "updated_at", "version"])
1816
1817 return JsonResponse({"id": review.pk, "status": review.status})
1818
1819
1820 @csrf_exempt
1821 def api_review_request_changes(request, slug, review_id):
1822 """Request changes on a code review.
1823
1824 POST /projects/<slug>/fossil/api/reviews/<id>/request-changes
1825 {"comment": "Please fix the error handling in auth.py"}
1826 """
1827 if request.method != "POST":
1828 return JsonResponse({"error": "POST required"}, status=405)
1829
1830 project, repo = _get_repo(slug)
1831 user, token, err = _check_api_auth(request, project, repo)
1832 if err is not None:
1833 return err
1834
1835 if token is None and (user is None or not can_write_project(user, project)):
1836 return JsonResponse({"error": "Write access required"}, status=403)
1837
1838 from fossil.code_reviews import CodeReview, ReviewComment
1839
1840 review = CodeReview.objects.filter(repository=repo, pk=review_id).first()
1841 if review is None:
1842 return JsonResponse({"error": "Review not found"}, status=404)
1843
1844 if review.status == "merged":
1845 return JsonResponse({"error": "Review is already merged"}, status=409)
1846
1847 review.status = "changes_requested"
1848 review.save(update_fields=["status", "updated_at", "version"])
1849
1850 # Optionally add a comment with the change request
1851 try:
1852 data = json.loads(request.body) if request.body else {}
1853 except (json.JSONDecodeError, ValueError):
1854 data = {}
1855
1856 comment_body = (data.get("comment") or "").strip()
1857 if comment_body:
1858 author = user.username if user else "reviewer"
1859 ReviewComment.objects.create(
1860 review=review,
1861 body=comment_body,
1862 author=author,
1863 created_by=user,
1864 )
1865
1866 return JsonResponse({"id": review.pk, "status": review.status})
1867
1868
1869 @csrf_exempt
1870 def api_review_merge(request, slug, review_id):
1871 """Merge an approved code review.
1872
1873 POST /projects/<slug>/fossil/api/reviews/<id>/merge
1874
1875 Only approved reviews can be merged. If the review is linked to a workspace,
1876 the workspace merge is triggered.
1877 """
1878 if request.method != "POST":
1879 return JsonResponse({"error": "POST required"}, status=405)
1880
1881 project, repo = _get_repo(slug)
1882 user, token, err = _check_api_auth(request, project, repo)
1883 if err is not None:
1884 return err
1885
1886 if token is None and (user is None or not can_write_project(user, project)):
1887 return JsonResponse({"error": "Write access required"}, status=403)
1888
1889 from fossil.code_reviews import CodeReview
1890
1891 review = CodeReview.objects.filter(repository=repo, pk=review_id).first()
1892 if review is None:
1893 return JsonResponse({"error": "Review not found"}, status=404)
1894
1895 if review.status == "merged":
1896 return JsonResponse({"error": "Review is already merged"}, status=409)
1897
1898 if review.status != "approved":
1899 return JsonResponse({"error": "Review must be approved before merging"}, status=409)
1900
1901 review.status = "merged"
1902 review.save(update_fields=["status", "updated_at", "version"])
1903
1904 # If linked to a ticket claim, update the claim status
1905 if review.ticket_uuid:
1906 from fossil.agent_claims import TicketClaim
1907
1908 claim = TicketClaim.objects.filter(repository=repo, ticket_uuid=review.ticket_uuid).first()
1909 if claim and claim.status in ("claimed", "submitted"):
1910 claim.status = "merged"
1911 claim.save(update_fields=["status", "updated_at", "version"])
1912
1913 return JsonResponse({"id": review.pk, "status": review.status, "title": review.title})
1914
1915 DDED fossil/code_reviews.py
1916 DDED fossil/migrations/0010_historicalagentworkspace_agentworkspace.py
1917 DDED fossil/migrations/0011_codereview_historicalcodereview_and_more.py
1918 DDED fossil/migrations/0012_alter_ticketclaim_unique_together.py
--- a/fossil/code_reviews.py
+++ b/fossil/code_reviews.py
@@ -0,0 +1,58 @@
1
+"""Code review models for agent-submitted review requests.
2
+
3
+Agents working in workspaces submit diffs/patches for human review.
4
+Reviews track the diff, comments, and approval workflow.
5
+"""
6
+
7
+from django.db import models
8
+
9
+from core.models import ActiveManager, Tracking
10
+
11
+
12
+class CodeReview(Tracking):
13
+ """Agent-submitted code review request with diff and approval workflow."""
14
+
15
+ class Status(models.TextChoices):
16
+ PENDING = "pending", "Pending Review"
17
+ APPROVED = "approved", "Approved"
18
+ CHANGES_REQUESTED = "changes_requested", "Changes Requested"
19
+ MERGED = "merged", "Merged"
20
+
21
+ repository = models.ForeignKey("fossil.FossilRepository", on_delete=models.CASCADE, related_name="code_reviews")
22
+ workspace = models.ForeignKey("fossil.AgentWorkspace", null=True, blank=True, on_delete=models.SET_NULL, related_name="reviews")
23
+ title = models.CharField(max_length=300)
24
+ description = models.TextField(blank=True, default="")
25
+ diff = models.TextField(help_text="Unified diff of proposed changes")
26
+ files_changed = models.JSONField(default=list)
27
+ agent_id = models.CharField(max_length=200, blank=True, default="")
28
+ status = models.CharField(max_length=20, choices=Status.choices, default=Status.PENDING)
29
+ ticket_uuid = models.CharField(max_length=64, blank=True, default="", help_text="Related ticket UUID if any")
30
+
31
+ objects = ActiveManager()
32
+ all_objects = models.Manager()
33
+
34
+ class Meta:
35
+ ordering = ["-created_at"]
36
+
37
+ def __str__(self):
38
+ return f"Review: {self.title} ({self.status})"
39
+
40
+
41
+class ReviewComment(Tracking):
42
+ """Inline or general comment on a code review."""
43
+
44
+ review = models.ForeignKey(CodeReview, on_delete=models.CASCADE, related_name="comments")
45
+ body = models.TextField()
46
+ file_path = models.CharField(max_length=500, blank=True, default="")
47
+ line_number = models.IntegerField(null=True, blank=True)
48
+ author = models.CharField(max_length=200, help_text="Agent ID or username")
49
+
50
+ objects = ActiveManager()
51
+ all_objects = models.Manager()
52
+
53
+ class Meta:
54
+ ordering = ["created_at"]
55
+
56
+ def __str__(self):
57
+ prefix = f"{self.file_path}:{self.line_number}" if self.file_path else "general"
58
+ return f"Comment on {self.review_id} ({prefix})"
--- a/fossil/code_reviews.py
+++ b/fossil/code_reviews.py
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/fossil/code_reviews.py
+++ b/fossil/code_reviews.py
@@ -0,0 +1,58 @@
1 """Code review models for agent-submitted review requests.
2
3 Agents working in workspaces submit diffs/patches for human review.
4 Reviews track the diff, comments, and approval workflow.
5 """
6
7 from django.db import models
8
9 from core.models import ActiveManager, Tracking
10
11
12 class CodeReview(Tracking):
13 """Agent-submitted code review request with diff and approval workflow."""
14
15 class Status(models.TextChoices):
16 PENDING = "pending", "Pending Review"
17 APPROVED = "approved", "Approved"
18 CHANGES_REQUESTED = "changes_requested", "Changes Requested"
19 MERGED = "merged", "Merged"
20
21 repository = models.ForeignKey("fossil.FossilRepository", on_delete=models.CASCADE, related_name="code_reviews")
22 workspace = models.ForeignKey("fossil.AgentWorkspace", null=True, blank=True, on_delete=models.SET_NULL, related_name="reviews")
23 title = models.CharField(max_length=300)
24 description = models.TextField(blank=True, default="")
25 diff = models.TextField(help_text="Unified diff of proposed changes")
26 files_changed = models.JSONField(default=list)
27 agent_id = models.CharField(max_length=200, blank=True, default="")
28 status = models.CharField(max_length=20, choices=Status.choices, default=Status.PENDING)
29 ticket_uuid = models.CharField(max_length=64, blank=True, default="", help_text="Related ticket UUID if any")
30
31 objects = ActiveManager()
32 all_objects = models.Manager()
33
34 class Meta:
35 ordering = ["-created_at"]
36
37 def __str__(self):
38 return f"Review: {self.title} ({self.status})"
39
40
41 class ReviewComment(Tracking):
42 """Inline or general comment on a code review."""
43
44 review = models.ForeignKey(CodeReview, on_delete=models.CASCADE, related_name="comments")
45 body = models.TextField()
46 file_path = models.CharField(max_length=500, blank=True, default="")
47 line_number = models.IntegerField(null=True, blank=True)
48 author = models.CharField(max_length=200, help_text="Agent ID or username")
49
50 objects = ActiveManager()
51 all_objects = models.Manager()
52
53 class Meta:
54 ordering = ["created_at"]
55
56 def __str__(self):
57 prefix = f"{self.file_path}:{self.line_number}" if self.file_path else "general"
58 return f"Comment on {self.review_id} ({prefix})"
--- a/fossil/migrations/0010_historicalagentworkspace_agentworkspace.py
+++ b/fossil/migrations/0010_historicalagentworkspace_agentworkspace.py
@@ -0,0 +1,259 @@
1
+# Generated by Django 5.2.12 on 2026-04-07 17:15
2
+
3
+import django.db.models.deletion
4
+import simple_history.models
5
+from django.conf import settings
6
+from django.db import migrations, models
7
+
8
+
9
+class Migration(migrations.Migration):
10
+ dependencies = [
11
+ ("fossil", "0009_historicalticketfielddefinition_and_more"),
12
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
13
+ ]
14
+
15
+ operations = [
16
+ migrations.CreateModel(
17
+ name="HistoricalAgentWorkspace",
18
+ fields=[
19
+ (
20
+ "id",
21
+ models.BigIntegerField(auto_created=True, blank=True, db_index=True, verbose_name="ID"),
22
+ ),
23
+ ("version", models.PositiveIntegerField(default=1, editable=False)),
24
+ ("created_at", models.DateTimeField(blank=True, editable=False)),
25
+ ("updated_at", models.DateTimeField(blank=True, editable=False)),
26
+ ("deleted_at", models.DateTimeField(blank=True, null=True)),
27
+ (
28
+ "name",
29
+ models.CharField(
30
+ help_text="Workspace name (e.g., agent-fix-bug-123)",
31
+ max_length=200,
32
+ ),
33
+ ),
34
+ (
35
+ "branch",
36
+ models.CharField(
37
+ help_text="Fossil branch name for this workspace",
38
+ max_length=200,
39
+ ),
40
+ ),
41
+ (
42
+ "status",
43
+ models.CharField(
44
+ choices=[
45
+ ("active", "Active"),
46
+ ("merged", "Merged"),
47
+ ("abandoned", "Abandoned"),
48
+ ],
49
+ default="active",
50
+ max_length=20,
51
+ ),
52
+ ),
53
+ (
54
+ "agent_id",
55
+ models.CharField(
56
+ blank=True,
57
+ default="",
58
+ help_text="Agent identifier",
59
+ max_length=200,
60
+ ),
61
+ ),
62
+ (
63
+ "description",
64
+ models.CharField(blank=True, default="", max_length=500),
65
+ ),
66
+ (
67
+ "checkout_path",
68
+ models.CharField(
69
+ blank=True,
70
+ default="",
71
+ help_text="Path to workspace checkout directory",
72
+ max_length=500,
73
+ ),
74
+ ),
75
+ ("files_changed", models.IntegerField(default=0)),
76
+ ("commits_made", models.IntegerField(default=0)),
77
+ ("history_id", models.AutoField(primary_key=True, serialize=False)),
78
+ ("history_date", models.DateTimeField(db_index=True)),
79
+ ("history_change_reason", models.CharField(max_length=100, null=True)),
80
+ (
81
+ "history_type",
82
+ models.CharField(
83
+ choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")],
84
+ max_length=1,
85
+ ),
86
+ ),
87
+ (
88
+ "created_by",
89
+ models.ForeignKey(
90
+ blank=True,
91
+ db_constraint=False,
92
+ null=True,
93
+ on_delete=django.db.models.deletion.DO_NOTHING,
94
+ related_name="+",
95
+ to=settings.AUTH_USER_MODEL,
96
+ ),
97
+ ),
98
+ (
99
+ "deleted_by",
100
+ models.ForeignKey(
101
+ blank=True,
102
+ db_constraint=False,
103
+ null=True,
104
+ on_delete=django.db.models.deletion.DO_NOTHING,
105
+ related_name="+",
106
+ to=settings.AUTH_USER_MODEL,
107
+ ),
108
+ ),
109
+ (
110
+ "history_user",
111
+ models.ForeignKey(
112
+ null=True,
113
+ on_delete=django.db.models.deletion.SET_NULL,
114
+ related_name="+",
115
+ to=settings.AUTH_USER_MODEL,
116
+ ),
117
+ ),
118
+ (
119
+ "repository",
120
+ models.ForeignKey(
121
+ blank=True,
122
+ db_constraint=False,
123
+ null=True,
124
+ on_delete=django.db.models.deletion.DO_NOTHING,
125
+ related_name="+",
126
+ to="fossil.fossilrepository",
127
+ ),
128
+ ),
129
+ (
130
+ "updated_by",
131
+ models.ForeignKey(
132
+ blank=True,
133
+ db_constraint=False,
134
+ null=True,
135
+ on_delete=django.db.models.deletion.DO_NOTHING,
136
+ related_name="+",
137
+ to=settings.AUTH_USER_MODEL,
138
+ ),
139
+ ),
140
+ ],
141
+ options={
142
+ "verbose_name": "historical agent workspace",
143
+ "verbose_name_plural": "historical agent workspaces",
144
+ "ordering": ("-history_date", "-history_id"),
145
+ "get_latest_by": ("history_date", "history_id"),
146
+ },
147
+ bases=(simple_history.models.HistoricalChanges, models.Model),
148
+ ),
149
+ migrations.CreateModel(
150
+ name="AgentWorkspace",
151
+ fields=[
152
+ (
153
+ "id",
154
+ models.BigAutoField(
155
+ auto_created=True,
156
+ primary_key=True,
157
+ serialize=False,
158
+ verbose_name="ID",
159
+ ),
160
+ ),
161
+ ("version", models.PositiveIntegerField(default=1, editable=False)),
162
+ ("created_at", models.DateTimeField(auto_now_add=True)),
163
+ ("updated_at", models.DateTimeField(auto_now=True)),
164
+ ("deleted_at", models.DateTimeField(blank=True, null=True)),
165
+ (
166
+ "name",
167
+ models.CharField(
168
+ help_text="Workspace name (e.g., agent-fix-bug-123)",
169
+ max_length=200,
170
+ ),
171
+ ),
172
+ (
173
+ "branch",
174
+ models.CharField(
175
+ help_text="Fossil branch name for this workspace",
176
+ max_length=200,
177
+ ),
178
+ ),
179
+ (
180
+ "status",
181
+ models.CharField(
182
+ choices=[
183
+ ("active", "Active"),
184
+ ("merged", "Merged"),
185
+ ("abandoned", "Abandoned"),
186
+ ],
187
+ default="active",
188
+ max_length=20,
189
+ ),
190
+ ),
191
+ (
192
+ "agent_id",
193
+ models.CharField(
194
+ blank=True,
195
+ default="",
196
+ help_text="Agent identifier",
197
+ max_length=200,
198
+ ),
199
+ ),
200
+ (
201
+ "description",
202
+ models.CharField(blank=True, default="", max_length=500),
203
+ ),
204
+ (
205
+ "checkout_path",
206
+ models.CharField(
207
+ blank=True,
208
+ default="",
209
+ help_text="Path to workspace checkout directory",
210
+ max_length=500,
211
+ ),
212
+ ),
213
+ ("files_changed", models.IntegerField(default=0)),
214
+ ("commits_made", models.IntegerField(default=0)),
215
+ (
216
+ "created_by",
217
+ models.ForeignKey(
218
+ blank=True,
219
+ null=True,
220
+ on_delete=django.db.models.deletion.SET_NULL,
221
+ related_name="+",
222
+ to=settings.AUTH_USER_MODEL,
223
+ ),
224
+ ),
225
+ (
226
+ "deleted_by",
227
+ models.ForeignKey(
228
+ blank=True,
229
+ null=True,
230
+ on_delete=django.db.models.deletion.SET_NULL,
231
+ related_name="+",
232
+ to=settings.AUTH_USER_MODEL,
233
+ ),
234
+ ),
235
+ (
236
+ "repository",
237
+ models.ForeignKey(
238
+ on_delete=django.db.models.deletion.CASCADE,
239
+ related_name="workspaces",
240
+ to="fossil.fossilrepository",
241
+ ),
242
+ ),
243
+ (
244
+ "updated_by",
245
+ models.ForeignKey(
246
+ blank=True,
247
+ null=True,
248
+ on_delete=django.db.models.deletion.SET_NULL,
249
+ related_name="+",
250
+ to=settings.AUTH_USER_MODEL,
251
+ ),
252
+ ),
253
+ ],
254
+ options={
255
+ "ordering": ["-created_at"],
256
+ "unique_together": {("repository", "name")},
257
+ },
258
+ ),
259
+ ]
--- a/fossil/migrations/0010_historicalagentworkspace_agentworkspace.py
+++ b/fossil/migrations/0010_historicalagentworkspace_agentworkspace.py
@@ -0,0 +1,259 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/fossil/migrations/0010_historicalagentworkspace_agentworkspace.py
+++ b/fossil/migrations/0010_historicalagentworkspace_agentworkspace.py
@@ -0,0 +1,259 @@
1 # Generated by Django 5.2.12 on 2026-04-07 17:15
2
3 import django.db.models.deletion
4 import simple_history.models
5 from django.conf import settings
6 from django.db import migrations, models
7
8
9 class Migration(migrations.Migration):
10 dependencies = [
11 ("fossil", "0009_historicalticketfielddefinition_and_more"),
12 migrations.swappable_dependency(settings.AUTH_USER_MODEL),
13 ]
14
15 operations = [
16 migrations.CreateModel(
17 name="HistoricalAgentWorkspace",
18 fields=[
19 (
20 "id",
21 models.BigIntegerField(auto_created=True, blank=True, db_index=True, verbose_name="ID"),
22 ),
23 ("version", models.PositiveIntegerField(default=1, editable=False)),
24 ("created_at", models.DateTimeField(blank=True, editable=False)),
25 ("updated_at", models.DateTimeField(blank=True, editable=False)),
26 ("deleted_at", models.DateTimeField(blank=True, null=True)),
27 (
28 "name",
29 models.CharField(
30 help_text="Workspace name (e.g., agent-fix-bug-123)",
31 max_length=200,
32 ),
33 ),
34 (
35 "branch",
36 models.CharField(
37 help_text="Fossil branch name for this workspace",
38 max_length=200,
39 ),
40 ),
41 (
42 "status",
43 models.CharField(
44 choices=[
45 ("active", "Active"),
46 ("merged", "Merged"),
47 ("abandoned", "Abandoned"),
48 ],
49 default="active",
50 max_length=20,
51 ),
52 ),
53 (
54 "agent_id",
55 models.CharField(
56 blank=True,
57 default="",
58 help_text="Agent identifier",
59 max_length=200,
60 ),
61 ),
62 (
63 "description",
64 models.CharField(blank=True, default="", max_length=500),
65 ),
66 (
67 "checkout_path",
68 models.CharField(
69 blank=True,
70 default="",
71 help_text="Path to workspace checkout directory",
72 max_length=500,
73 ),
74 ),
75 ("files_changed", models.IntegerField(default=0)),
76 ("commits_made", models.IntegerField(default=0)),
77 ("history_id", models.AutoField(primary_key=True, serialize=False)),
78 ("history_date", models.DateTimeField(db_index=True)),
79 ("history_change_reason", models.CharField(max_length=100, null=True)),
80 (
81 "history_type",
82 models.CharField(
83 choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")],
84 max_length=1,
85 ),
86 ),
87 (
88 "created_by",
89 models.ForeignKey(
90 blank=True,
91 db_constraint=False,
92 null=True,
93 on_delete=django.db.models.deletion.DO_NOTHING,
94 related_name="+",
95 to=settings.AUTH_USER_MODEL,
96 ),
97 ),
98 (
99 "deleted_by",
100 models.ForeignKey(
101 blank=True,
102 db_constraint=False,
103 null=True,
104 on_delete=django.db.models.deletion.DO_NOTHING,
105 related_name="+",
106 to=settings.AUTH_USER_MODEL,
107 ),
108 ),
109 (
110 "history_user",
111 models.ForeignKey(
112 null=True,
113 on_delete=django.db.models.deletion.SET_NULL,
114 related_name="+",
115 to=settings.AUTH_USER_MODEL,
116 ),
117 ),
118 (
119 "repository",
120 models.ForeignKey(
121 blank=True,
122 db_constraint=False,
123 null=True,
124 on_delete=django.db.models.deletion.DO_NOTHING,
125 related_name="+",
126 to="fossil.fossilrepository",
127 ),
128 ),
129 (
130 "updated_by",
131 models.ForeignKey(
132 blank=True,
133 db_constraint=False,
134 null=True,
135 on_delete=django.db.models.deletion.DO_NOTHING,
136 related_name="+",
137 to=settings.AUTH_USER_MODEL,
138 ),
139 ),
140 ],
141 options={
142 "verbose_name": "historical agent workspace",
143 "verbose_name_plural": "historical agent workspaces",
144 "ordering": ("-history_date", "-history_id"),
145 "get_latest_by": ("history_date", "history_id"),
146 },
147 bases=(simple_history.models.HistoricalChanges, models.Model),
148 ),
149 migrations.CreateModel(
150 name="AgentWorkspace",
151 fields=[
152 (
153 "id",
154 models.BigAutoField(
155 auto_created=True,
156 primary_key=True,
157 serialize=False,
158 verbose_name="ID",
159 ),
160 ),
161 ("version", models.PositiveIntegerField(default=1, editable=False)),
162 ("created_at", models.DateTimeField(auto_now_add=True)),
163 ("updated_at", models.DateTimeField(auto_now=True)),
164 ("deleted_at", models.DateTimeField(blank=True, null=True)),
165 (
166 "name",
167 models.CharField(
168 help_text="Workspace name (e.g., agent-fix-bug-123)",
169 max_length=200,
170 ),
171 ),
172 (
173 "branch",
174 models.CharField(
175 help_text="Fossil branch name for this workspace",
176 max_length=200,
177 ),
178 ),
179 (
180 "status",
181 models.CharField(
182 choices=[
183 ("active", "Active"),
184 ("merged", "Merged"),
185 ("abandoned", "Abandoned"),
186 ],
187 default="active",
188 max_length=20,
189 ),
190 ),
191 (
192 "agent_id",
193 models.CharField(
194 blank=True,
195 default="",
196 help_text="Agent identifier",
197 max_length=200,
198 ),
199 ),
200 (
201 "description",
202 models.CharField(blank=True, default="", max_length=500),
203 ),
204 (
205 "checkout_path",
206 models.CharField(
207 blank=True,
208 default="",
209 help_text="Path to workspace checkout directory",
210 max_length=500,
211 ),
212 ),
213 ("files_changed", models.IntegerField(default=0)),
214 ("commits_made", models.IntegerField(default=0)),
215 (
216 "created_by",
217 models.ForeignKey(
218 blank=True,
219 null=True,
220 on_delete=django.db.models.deletion.SET_NULL,
221 related_name="+",
222 to=settings.AUTH_USER_MODEL,
223 ),
224 ),
225 (
226 "deleted_by",
227 models.ForeignKey(
228 blank=True,
229 null=True,
230 on_delete=django.db.models.deletion.SET_NULL,
231 related_name="+",
232 to=settings.AUTH_USER_MODEL,
233 ),
234 ),
235 (
236 "repository",
237 models.ForeignKey(
238 on_delete=django.db.models.deletion.CASCADE,
239 related_name="workspaces",
240 to="fossil.fossilrepository",
241 ),
242 ),
243 (
244 "updated_by",
245 models.ForeignKey(
246 blank=True,
247 null=True,
248 on_delete=django.db.models.deletion.SET_NULL,
249 related_name="+",
250 to=settings.AUTH_USER_MODEL,
251 ),
252 ),
253 ],
254 options={
255 "ordering": ["-created_at"],
256 "unique_together": {("repository", "name")},
257 },
258 ),
259 ]
--- a/fossil/migrations/0011_codereview_historicalcodereview_and_more.py
+++ b/fossil/migrations/0011_codereview_historicalcodereview_and_more.py
@@ -0,0 +1,609 @@
1
+# Generated by Django 5.2.12 on 2026-04-07 17:24
2
+
3
+import django.db.models.deletion
4
+import simple_history.models
5
+from django.conf import settings
6
+from django.db import migrations, models
7
+
8
+
9
+class Migration(migrations.Migration):
10
+ dependencies = [
11
+ ("fossil", "0010_historicalagentworkspace_agentworkspace"),
12
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
13
+ ]
14
+
15
+ operations = [
16
+ migrations.CreateModel(
17
+ name="CodeReview",
18
+ fields=[
19
+ (
20
+ "id",
21
+ models.BigAutoField(
22
+ auto_created=True,
23
+ primary_key=True,
24
+ serialize=False,
25
+ verbose_name="ID",
26
+ ),
27
+ ),
28
+ ("version", models.PositiveIntegerField(default=1, editable=False)),
29
+ ("created_at", models.DateTimeField(auto_now_add=True)),
30
+ ("updated_at", models.DateTimeField(auto_now=True)),
31
+ ("deleted_at", models.DateTimeField(blank=True, null=True)),
32
+ ("title", models.CharField(max_length=300)),
33
+ ("description", models.TextField(blank=True, default="")),
34
+ (
35
+ "diff",
36
+ models.TextField(help_text="Unified diff of proposed changes"),
37
+ ),
38
+ ("files_changed", models.JSONField(default=list)),
39
+ ("agent_id", models.CharField(blank=True, default="", max_length=200)),
40
+ (
41
+ "status",
42
+ models.CharField(
43
+ choices=[
44
+ ("pending", "Pending Review"),
45
+ ("approved", "Approved"),
46
+ ("changes_requested", "Changes Requested"),
47
+ ("merged", "Merged"),
48
+ ],
49
+ default="pending",
50
+ max_length=20,
51
+ ),
52
+ ),
53
+ (
54
+ "ticket_uuid",
55
+ models.CharField(
56
+ blank=True,
57
+ default="",
58
+ help_text="Related ticket UUID if any",
59
+ max_length=64,
60
+ ),
61
+ ),
62
+ (
63
+ "created_by",
64
+ models.ForeignKey(
65
+ blank=True,
66
+ null=True,
67
+ on_delete=django.db.models.deletion.SET_NULL,
68
+ related_name="+",
69
+ to=settings.AUTH_USER_MODEL,
70
+ ),
71
+ ),
72
+ (
73
+ "deleted_by",
74
+ models.ForeignKey(
75
+ blank=True,
76
+ null=True,
77
+ on_delete=django.db.models.deletion.SET_NULL,
78
+ related_name="+",
79
+ to=settings.AUTH_USER_MODEL,
80
+ ),
81
+ ),
82
+ (
83
+ "repository",
84
+ models.ForeignKey(
85
+ on_delete=django.db.models.deletion.CASCADE,
86
+ related_name="code_reviews",
87
+ to="fossil.fossilrepository",
88
+ ),
89
+ ),
90
+ (
91
+ "updated_by",
92
+ models.ForeignKey(
93
+ blank=True,
94
+ null=True,
95
+ on_delete=django.db.models.deletion.SET_NULL,
96
+ related_name="+",
97
+ to=settings.AUTH_USER_MODEL,
98
+ ),
99
+ ),
100
+ (
101
+ "workspace",
102
+ models.ForeignKey(
103
+ blank=True,
104
+ null=True,
105
+ on_delete=django.db.models.deletion.SET_NULL,
106
+ related_name="reviews",
107
+ to="fossil.agentworkspace",
108
+ ),
109
+ ),
110
+ ],
111
+ options={
112
+ "ordering": ["-created_at"],
113
+ },
114
+ ),
115
+ migrations.CreateModel(
116
+ name="HistoricalCodeReview",
117
+ fields=[
118
+ (
119
+ "id",
120
+ models.BigIntegerField(auto_created=True, blank=True, db_index=True, verbose_name="ID"),
121
+ ),
122
+ ("version", models.PositiveIntegerField(default=1, editable=False)),
123
+ ("created_at", models.DateTimeField(blank=True, editable=False)),
124
+ ("updated_at", models.DateTimeField(blank=True, editable=False)),
125
+ ("deleted_at", models.DateTimeField(blank=True, null=True)),
126
+ ("title", models.CharField(max_length=300)),
127
+ ("description", models.TextField(blank=True, default="")),
128
+ (
129
+ "diff",
130
+ models.TextField(help_text="Unified diff of proposed changes"),
131
+ ),
132
+ ("files_changed", models.JSONField(default=list)),
133
+ ("agent_id", models.CharField(blank=True, default="", max_length=200)),
134
+ (
135
+ "status",
136
+ models.CharField(
137
+ choices=[
138
+ ("pending", "Pending Review"),
139
+ ("approved", "Approved"),
140
+ ("changes_requested", "Changes Requested"),
141
+ ("merged", "Merged"),
142
+ ],
143
+ default="pending",
144
+ max_length=20,
145
+ ),
146
+ ),
147
+ (
148
+ "ticket_uuid",
149
+ models.CharField(
150
+ blank=True,
151
+ default="",
152
+ help_text="Related ticket UUID if any",
153
+ max_length=64,
154
+ ),
155
+ ),
156
+ ("history_id", models.AutoField(primary_key=True, serialize=False)),
157
+ ("history_date", models.DateTimeField(db_index=True)),
158
+ ("history_change_reason", models.CharField(max_length=100, null=True)),
159
+ (
160
+ "history_type",
161
+ models.CharField(
162
+ choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")],
163
+ max_length=1,
164
+ ),
165
+ ),
166
+ (
167
+ "created_by",
168
+ models.ForeignKey(
169
+ blank=True,
170
+ db_constraint=False,
171
+ null=True,
172
+ on_delete=django.db.models.deletion.DO_NOTHING,
173
+ related_name="+",
174
+ to=settings.AUTH_USER_MODEL,
175
+ ),
176
+ ),
177
+ (
178
+ "deleted_by",
179
+ models.ForeignKey(
180
+ blank=True,
181
+ db_constraint=False,
182
+ null=True,
183
+ on_delete=django.db.models.deletion.DO_NOTHING,
184
+ related_name="+",
185
+ to=settings.AUTH_USER_MODEL,
186
+ ),
187
+ ),
188
+ (
189
+ "history_user",
190
+ models.ForeignKey(
191
+ null=True,
192
+ on_delete=django.db.models.deletion.SET_NULL,
193
+ related_name="+",
194
+ to=settings.AUTH_USER_MODEL,
195
+ ),
196
+ ),
197
+ (
198
+ "repository",
199
+ models.ForeignKey(
200
+ blank=True,
201
+ db_constraint=False,
202
+ null=True,
203
+ on_delete=django.db.models.deletion.DO_NOTHING,
204
+ related_name="+",
205
+ to="fossil.fossilrepository",
206
+ ),
207
+ ),
208
+ (
209
+ "updated_by",
210
+ models.ForeignKey(
211
+ blank=True,
212
+ db_constraint=False,
213
+ null=True,
214
+ on_delete=django.db.models.deletion.DO_NOTHING,
215
+ related_name="+",
216
+ to=settings.AUTH_USER_MODEL,
217
+ ),
218
+ ),
219
+ (
220
+ "workspace",
221
+ models.ForeignKey(
222
+ blank=True,
223
+ db_constraint=False,
224
+ null=True,
225
+ on_delete=django.db.models.deletion.DO_NOTHING,
226
+ related_name="+",
227
+ to="fossil.agentworkspace",
228
+ ),
229
+ ),
230
+ ],
231
+ options={
232
+ "verbose_name": "historical code review",
233
+ "verbose_name_plural": "historical code reviews",
234
+ "ordering": ("-history_date", "-history_id"),
235
+ "get_latest_by": ("history_date", "history_id"),
236
+ },
237
+ bases=(simple_history.models.HistoricalChanges, models.Model),
238
+ ),
239
+ migrations.CreateModel(
240
+ name="HistoricalReviewComment",
241
+ fields=[
242
+ (
243
+ "id",
244
+ models.BigIntegerField(auto_created=True, blank=True, db_index=True, verbose_name="ID"),
245
+ ),
246
+ ("version", models.PositiveIntegerField(default=1, editable=False)),
247
+ ("created_at", models.DateTimeField(blank=True, editable=False)),
248
+ ("updated_at", models.DateTimeField(blank=True, editable=False)),
249
+ ("deleted_at", models.DateTimeField(blank=True, null=True)),
250
+ ("body", models.TextField()),
251
+ ("file_path", models.CharField(blank=True, default="", max_length=500)),
252
+ ("line_number", models.IntegerField(blank=True, null=True)),
253
+ (
254
+ "author",
255
+ models.CharField(help_text="Agent ID or username", max_length=200),
256
+ ),
257
+ ("history_id", models.AutoField(primary_key=True, serialize=False)),
258
+ ("history_date", models.DateTimeField(db_index=True)),
259
+ ("history_change_reason", models.CharField(max_length=100, null=True)),
260
+ (
261
+ "history_type",
262
+ models.CharField(
263
+ choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")],
264
+ max_length=1,
265
+ ),
266
+ ),
267
+ (
268
+ "created_by",
269
+ models.ForeignKey(
270
+ blank=True,
271
+ db_constraint=False,
272
+ null=True,
273
+ on_delete=django.db.models.deletion.DO_NOTHING,
274
+ related_name="+",
275
+ to=settings.AUTH_USER_MODEL,
276
+ ),
277
+ ),
278
+ (
279
+ "deleted_by",
280
+ models.ForeignKey(
281
+ blank=True,
282
+ db_constraint=False,
283
+ null=True,
284
+ on_delete=django.db.models.deletion.DO_NOTHING,
285
+ related_name="+",
286
+ to=settings.AUTH_USER_MODEL,
287
+ ),
288
+ ),
289
+ (
290
+ "history_user",
291
+ models.ForeignKey(
292
+ null=True,
293
+ on_delete=django.db.models.deletion.SET_NULL,
294
+ related_name="+",
295
+ to=settings.AUTH_USER_MODEL,
296
+ ),
297
+ ),
298
+ (
299
+ "review",
300
+ models.ForeignKey(
301
+ blank=True,
302
+ db_constraint=False,
303
+ null=True,
304
+ on_delete=django.db.models.deletion.DO_NOTHING,
305
+ related_name="+",
306
+ to="fossil.codereview",
307
+ ),
308
+ ),
309
+ (
310
+ "updated_by",
311
+ models.ForeignKey(
312
+ blank=True,
313
+ db_constraint=False,
314
+ null=True,
315
+ on_delete=django.db.models.deletion.DO_NOTHING,
316
+ related_name="+",
317
+ to=settings.AUTH_USER_MODEL,
318
+ ),
319
+ ),
320
+ ],
321
+ options={
322
+ "verbose_name": "historical review comment",
323
+ "verbose_name_plural": "historical review comments",
324
+ "ordering": ("-history_date", "-history_id"),
325
+ "get_latest_by": ("history_date", "history_id"),
326
+ },
327
+ bases=(simple_history.models.HistoricalChanges, models.Model),
328
+ ),
329
+ migrations.CreateModel(
330
+ name="HistoricalTicketClaim",
331
+ fields=[
332
+ (
333
+ "id",
334
+ models.BigIntegerField(auto_created=True, blank=True, db_index=True, verbose_name="ID"),
335
+ ),
336
+ ("version", models.PositiveIntegerField(default=1, editable=False)),
337
+ ("created_at", models.DateTimeField(blank=True, editable=False)),
338
+ ("updated_at", models.DateTimeField(blank=True, editable=False)),
339
+ ("deleted_at", models.DateTimeField(blank=True, null=True)),
340
+ ("ticket_uuid", models.CharField(max_length=64)),
341
+ ("agent_id", models.CharField(max_length=200)),
342
+ ("claimed_at", models.DateTimeField(blank=True, editable=False)),
343
+ ("released_at", models.DateTimeField(blank=True, null=True)),
344
+ (
345
+ "status",
346
+ models.CharField(
347
+ choices=[
348
+ ("claimed", "Claimed"),
349
+ ("submitted", "Submitted"),
350
+ ("merged", "Merged"),
351
+ ("released", "Released"),
352
+ ],
353
+ default="claimed",
354
+ max_length=20,
355
+ ),
356
+ ),
357
+ (
358
+ "summary",
359
+ models.TextField(blank=True, default="", help_text="Work summary when submitted"),
360
+ ),
361
+ (
362
+ "files_changed",
363
+ models.JSONField(blank=True, default=list, help_text="List of files changed"),
364
+ ),
365
+ ("history_id", models.AutoField(primary_key=True, serialize=False)),
366
+ ("history_date", models.DateTimeField(db_index=True)),
367
+ ("history_change_reason", models.CharField(max_length=100, null=True)),
368
+ (
369
+ "history_type",
370
+ models.CharField(
371
+ choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")],
372
+ max_length=1,
373
+ ),
374
+ ),
375
+ (
376
+ "created_by",
377
+ models.ForeignKey(
378
+ blank=True,
379
+ db_constraint=False,
380
+ null=True,
381
+ on_delete=django.db.models.deletion.DO_NOTHING,
382
+ related_name="+",
383
+ to=settings.AUTH_USER_MODEL,
384
+ ),
385
+ ),
386
+ (
387
+ "deleted_by",
388
+ models.ForeignKey(
389
+ blank=True,
390
+ db_constraint=False,
391
+ null=True,
392
+ on_delete=django.db.models.deletion.DO_NOTHING,
393
+ related_name="+",
394
+ to=settings.AUTH_USER_MODEL,
395
+ ),
396
+ ),
397
+ (
398
+ "history_user",
399
+ models.ForeignKey(
400
+ null=True,
401
+ on_delete=django.db.models.deletion.SET_NULL,
402
+ related_name="+",
403
+ to=settings.AUTH_USER_MODEL,
404
+ ),
405
+ ),
406
+ (
407
+ "repository",
408
+ models.ForeignKey(
409
+ blank=True,
410
+ db_constraint=False,
411
+ null=True,
412
+ on_delete=django.db.models.deletion.DO_NOTHING,
413
+ related_name="+",
414
+ to="fossil.fossilrepository",
415
+ ),
416
+ ),
417
+ (
418
+ "updated_by",
419
+ models.ForeignKey(
420
+ blank=True,
421
+ db_constraint=False,
422
+ null=True,
423
+ on_delete=django.db.models.deletion.DO_NOTHING,
424
+ related_name="+",
425
+ to=settings.AUTH_USER_MODEL,
426
+ ),
427
+ ),
428
+ (
429
+ "workspace",
430
+ models.ForeignKey(
431
+ blank=True,
432
+ db_constraint=False,
433
+ null=True,
434
+ on_delete=django.db.models.deletion.DO_NOTHING,
435
+ related_name="+",
436
+ to="fossil.agentworkspace",
437
+ ),
438
+ ),
439
+ ],
440
+ options={
441
+ "verbose_name": "historical ticket claim",
442
+ "verbose_name_plural": "historical ticket claims",
443
+ "ordering": ("-history_date", "-history_id"),
444
+ "get_latest_by": ("history_date", "history_id"),
445
+ },
446
+ bases=(simple_history.models.HistoricalChanges, models.Model),
447
+ ),
448
+ migrations.CreateModel(
449
+ name="ReviewComment",
450
+ fields=[
451
+ (
452
+ "id",
453
+ models.BigAutoField(
454
+ auto_created=True,
455
+ primary_key=True,
456
+ serialize=False,
457
+ verbose_name="ID",
458
+ ),
459
+ ),
460
+ ("version", models.PositiveIntegerField(default=1, editable=False)),
461
+ ("created_at", models.DateTimeField(auto_now_add=True)),
462
+ ("updated_at", models.DateTimeField(auto_now=True)),
463
+ ("deleted_at", models.DateTimeField(blank=True, null=True)),
464
+ ("body", models.TextField()),
465
+ ("file_path", models.CharField(blank=True, default="", max_length=500)),
466
+ ("line_number", models.IntegerField(blank=True, null=True)),
467
+ (
468
+ "author",
469
+ models.CharField(help_text="Agent ID or username", max_length=200),
470
+ ),
471
+ (
472
+ "created_by",
473
+ models.ForeignKey(
474
+ blank=True,
475
+ null=True,
476
+ on_delete=django.db.models.deletion.SET_NULL,
477
+ related_name="+",
478
+ to=settings.AUTH_USER_MODEL,
479
+ ),
480
+ ),
481
+ (
482
+ "deleted_by",
483
+ models.ForeignKey(
484
+ blank=True,
485
+ null=True,
486
+ on_delete=django.db.models.deletion.SET_NULL,
487
+ related_name="+",
488
+ to=settings.AUTH_USER_MODEL,
489
+ ),
490
+ ),
491
+ (
492
+ "review",
493
+ models.ForeignKey(
494
+ on_delete=django.db.models.deletion.CASCADE,
495
+ related_name="comments",
496
+ to="fossil.codereview",
497
+ ),
498
+ ),
499
+ (
500
+ "updated_by",
501
+ models.ForeignKey(
502
+ blank=True,
503
+ null=True,
504
+ on_delete=django.db.models.deletion.SET_NULL,
505
+ related_name="+",
506
+ to=settings.AUTH_USER_MODEL,
507
+ ),
508
+ ),
509
+ ],
510
+ options={
511
+ "ordering": ["created_at"],
512
+ },
513
+ ),
514
+ migrations.CreateModel(
515
+ name="TicketClaim",
516
+ fields=[
517
+ (
518
+ "id",
519
+ models.BigAutoField(
520
+ auto_created=True,
521
+ primary_key=True,
522
+ serialize=False,
523
+ verbose_name="ID",
524
+ ),
525
+ ),
526
+ ("version", models.PositiveIntegerField(default=1, editable=False)),
527
+ ("created_at", models.DateTimeField(auto_now_add=True)),
528
+ ("updated_at", models.DateTimeField(auto_now=True)),
529
+ ("deleted_at", models.DateTimeField(blank=True, null=True)),
530
+ ("ticket_uuid", models.CharField(max_length=64)),
531
+ ("agent_id", models.CharField(max_length=200)),
532
+ ("claimed_at", models.DateTimeField(auto_now_add=True)),
533
+ ("released_at", models.DateTimeField(blank=True, null=True)),
534
+ (
535
+ "status",
536
+ models.CharField(
537
+ choices=[
538
+ ("claimed", "Claimed"),
539
+ ("submitted", "Submitted"),
540
+ ("merged", "Merged"),
541
+ ("released", "Released"),
542
+ ],
543
+ default="claimed",
544
+ max_length=20,
545
+ ),
546
+ ),
547
+ (
548
+ "summary",
549
+ models.TextField(blank=True, default="", help_text="Work summary when submitted"),
550
+ ),
551
+ (
552
+ "files_changed",
553
+ models.JSONField(blank=True, default=list, help_text="List of files changed"),
554
+ ),
555
+ (
556
+ "created_by",
557
+ models.ForeignKey(
558
+ blank=True,
559
+ null=True,
560
+ on_delete=django.db.models.deletion.SET_NULL,
561
+ related_name="+",
562
+ to=settings.AUTH_USER_MODEL,
563
+ ),
564
+ ),
565
+ (
566
+ "deleted_by",
567
+ models.ForeignKey(
568
+ blank=True,
569
+ null=True,
570
+ on_delete=django.db.models.deletion.SET_NULL,
571
+ related_name="+",
572
+ to=settings.AUTH_USER_MODEL,
573
+ ),
574
+ ),
575
+ (
576
+ "repository",
577
+ models.ForeignKey(
578
+ on_delete=django.db.models.deletion.CASCADE,
579
+ related_name="ticket_claims",
580
+ to="fossil.fossilrepository",
581
+ ),
582
+ ),
583
+ (
584
+ "updated_by",
585
+ models.ForeignKey(
586
+ blank=True,
587
+ null=True,
588
+ on_delete=django.db.models.deletion.SET_NULL,
589
+ related_name="+",
590
+ to=settings.AUTH_USER_MODEL,
591
+ ),
592
+ ),
593
+ (
594
+ "workspace",
595
+ models.ForeignKey(
596
+ blank=True,
597
+ null=True,
598
+ on_delete=django.db.models.deletion.SET_NULL,
599
+ related_name="claims",
600
+ to="fossil.agentworkspace",
601
+ ),
602
+ ),
603
+ ],
604
+ options={
605
+ "ordering": ["-claimed_at"],
606
+ "unique_together": {("repository", "ticket_uuid")},
607
+ },
608
+ ),
609
+ ]
--- a/fossil/migrations/0011_codereview_historicalcodereview_and_more.py
+++ b/fossil/migrations/0011_codereview_historicalcodereview_and_more.py
@@ -0,0 +1,609 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/fossil/migrations/0011_codereview_historicalcodereview_and_more.py
+++ b/fossil/migrations/0011_codereview_historicalcodereview_and_more.py
@@ -0,0 +1,609 @@
1 # Generated by Django 5.2.12 on 2026-04-07 17:24
2
3 import django.db.models.deletion
4 import simple_history.models
5 from django.conf import settings
6 from django.db import migrations, models
7
8
9 class Migration(migrations.Migration):
10 dependencies = [
11 ("fossil", "0010_historicalagentworkspace_agentworkspace"),
12 migrations.swappable_dependency(settings.AUTH_USER_MODEL),
13 ]
14
15 operations = [
16 migrations.CreateModel(
17 name="CodeReview",
18 fields=[
19 (
20 "id",
21 models.BigAutoField(
22 auto_created=True,
23 primary_key=True,
24 serialize=False,
25 verbose_name="ID",
26 ),
27 ),
28 ("version", models.PositiveIntegerField(default=1, editable=False)),
29 ("created_at", models.DateTimeField(auto_now_add=True)),
30 ("updated_at", models.DateTimeField(auto_now=True)),
31 ("deleted_at", models.DateTimeField(blank=True, null=True)),
32 ("title", models.CharField(max_length=300)),
33 ("description", models.TextField(blank=True, default="")),
34 (
35 "diff",
36 models.TextField(help_text="Unified diff of proposed changes"),
37 ),
38 ("files_changed", models.JSONField(default=list)),
39 ("agent_id", models.CharField(blank=True, default="", max_length=200)),
40 (
41 "status",
42 models.CharField(
43 choices=[
44 ("pending", "Pending Review"),
45 ("approved", "Approved"),
46 ("changes_requested", "Changes Requested"),
47 ("merged", "Merged"),
48 ],
49 default="pending",
50 max_length=20,
51 ),
52 ),
53 (
54 "ticket_uuid",
55 models.CharField(
56 blank=True,
57 default="",
58 help_text="Related ticket UUID if any",
59 max_length=64,
60 ),
61 ),
62 (
63 "created_by",
64 models.ForeignKey(
65 blank=True,
66 null=True,
67 on_delete=django.db.models.deletion.SET_NULL,
68 related_name="+",
69 to=settings.AUTH_USER_MODEL,
70 ),
71 ),
72 (
73 "deleted_by",
74 models.ForeignKey(
75 blank=True,
76 null=True,
77 on_delete=django.db.models.deletion.SET_NULL,
78 related_name="+",
79 to=settings.AUTH_USER_MODEL,
80 ),
81 ),
82 (
83 "repository",
84 models.ForeignKey(
85 on_delete=django.db.models.deletion.CASCADE,
86 related_name="code_reviews",
87 to="fossil.fossilrepository",
88 ),
89 ),
90 (
91 "updated_by",
92 models.ForeignKey(
93 blank=True,
94 null=True,
95 on_delete=django.db.models.deletion.SET_NULL,
96 related_name="+",
97 to=settings.AUTH_USER_MODEL,
98 ),
99 ),
100 (
101 "workspace",
102 models.ForeignKey(
103 blank=True,
104 null=True,
105 on_delete=django.db.models.deletion.SET_NULL,
106 related_name="reviews",
107 to="fossil.agentworkspace",
108 ),
109 ),
110 ],
111 options={
112 "ordering": ["-created_at"],
113 },
114 ),
115 migrations.CreateModel(
116 name="HistoricalCodeReview",
117 fields=[
118 (
119 "id",
120 models.BigIntegerField(auto_created=True, blank=True, db_index=True, verbose_name="ID"),
121 ),
122 ("version", models.PositiveIntegerField(default=1, editable=False)),
123 ("created_at", models.DateTimeField(blank=True, editable=False)),
124 ("updated_at", models.DateTimeField(blank=True, editable=False)),
125 ("deleted_at", models.DateTimeField(blank=True, null=True)),
126 ("title", models.CharField(max_length=300)),
127 ("description", models.TextField(blank=True, default="")),
128 (
129 "diff",
130 models.TextField(help_text="Unified diff of proposed changes"),
131 ),
132 ("files_changed", models.JSONField(default=list)),
133 ("agent_id", models.CharField(blank=True, default="", max_length=200)),
134 (
135 "status",
136 models.CharField(
137 choices=[
138 ("pending", "Pending Review"),
139 ("approved", "Approved"),
140 ("changes_requested", "Changes Requested"),
141 ("merged", "Merged"),
142 ],
143 default="pending",
144 max_length=20,
145 ),
146 ),
147 (
148 "ticket_uuid",
149 models.CharField(
150 blank=True,
151 default="",
152 help_text="Related ticket UUID if any",
153 max_length=64,
154 ),
155 ),
156 ("history_id", models.AutoField(primary_key=True, serialize=False)),
157 ("history_date", models.DateTimeField(db_index=True)),
158 ("history_change_reason", models.CharField(max_length=100, null=True)),
159 (
160 "history_type",
161 models.CharField(
162 choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")],
163 max_length=1,
164 ),
165 ),
166 (
167 "created_by",
168 models.ForeignKey(
169 blank=True,
170 db_constraint=False,
171 null=True,
172 on_delete=django.db.models.deletion.DO_NOTHING,
173 related_name="+",
174 to=settings.AUTH_USER_MODEL,
175 ),
176 ),
177 (
178 "deleted_by",
179 models.ForeignKey(
180 blank=True,
181 db_constraint=False,
182 null=True,
183 on_delete=django.db.models.deletion.DO_NOTHING,
184 related_name="+",
185 to=settings.AUTH_USER_MODEL,
186 ),
187 ),
188 (
189 "history_user",
190 models.ForeignKey(
191 null=True,
192 on_delete=django.db.models.deletion.SET_NULL,
193 related_name="+",
194 to=settings.AUTH_USER_MODEL,
195 ),
196 ),
197 (
198 "repository",
199 models.ForeignKey(
200 blank=True,
201 db_constraint=False,
202 null=True,
203 on_delete=django.db.models.deletion.DO_NOTHING,
204 related_name="+",
205 to="fossil.fossilrepository",
206 ),
207 ),
208 (
209 "updated_by",
210 models.ForeignKey(
211 blank=True,
212 db_constraint=False,
213 null=True,
214 on_delete=django.db.models.deletion.DO_NOTHING,
215 related_name="+",
216 to=settings.AUTH_USER_MODEL,
217 ),
218 ),
219 (
220 "workspace",
221 models.ForeignKey(
222 blank=True,
223 db_constraint=False,
224 null=True,
225 on_delete=django.db.models.deletion.DO_NOTHING,
226 related_name="+",
227 to="fossil.agentworkspace",
228 ),
229 ),
230 ],
231 options={
232 "verbose_name": "historical code review",
233 "verbose_name_plural": "historical code reviews",
234 "ordering": ("-history_date", "-history_id"),
235 "get_latest_by": ("history_date", "history_id"),
236 },
237 bases=(simple_history.models.HistoricalChanges, models.Model),
238 ),
239 migrations.CreateModel(
240 name="HistoricalReviewComment",
241 fields=[
242 (
243 "id",
244 models.BigIntegerField(auto_created=True, blank=True, db_index=True, verbose_name="ID"),
245 ),
246 ("version", models.PositiveIntegerField(default=1, editable=False)),
247 ("created_at", models.DateTimeField(blank=True, editable=False)),
248 ("updated_at", models.DateTimeField(blank=True, editable=False)),
249 ("deleted_at", models.DateTimeField(blank=True, null=True)),
250 ("body", models.TextField()),
251 ("file_path", models.CharField(blank=True, default="", max_length=500)),
252 ("line_number", models.IntegerField(blank=True, null=True)),
253 (
254 "author",
255 models.CharField(help_text="Agent ID or username", max_length=200),
256 ),
257 ("history_id", models.AutoField(primary_key=True, serialize=False)),
258 ("history_date", models.DateTimeField(db_index=True)),
259 ("history_change_reason", models.CharField(max_length=100, null=True)),
260 (
261 "history_type",
262 models.CharField(
263 choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")],
264 max_length=1,
265 ),
266 ),
267 (
268 "created_by",
269 models.ForeignKey(
270 blank=True,
271 db_constraint=False,
272 null=True,
273 on_delete=django.db.models.deletion.DO_NOTHING,
274 related_name="+",
275 to=settings.AUTH_USER_MODEL,
276 ),
277 ),
278 (
279 "deleted_by",
280 models.ForeignKey(
281 blank=True,
282 db_constraint=False,
283 null=True,
284 on_delete=django.db.models.deletion.DO_NOTHING,
285 related_name="+",
286 to=settings.AUTH_USER_MODEL,
287 ),
288 ),
289 (
290 "history_user",
291 models.ForeignKey(
292 null=True,
293 on_delete=django.db.models.deletion.SET_NULL,
294 related_name="+",
295 to=settings.AUTH_USER_MODEL,
296 ),
297 ),
298 (
299 "review",
300 models.ForeignKey(
301 blank=True,
302 db_constraint=False,
303 null=True,
304 on_delete=django.db.models.deletion.DO_NOTHING,
305 related_name="+",
306 to="fossil.codereview",
307 ),
308 ),
309 (
310 "updated_by",
311 models.ForeignKey(
312 blank=True,
313 db_constraint=False,
314 null=True,
315 on_delete=django.db.models.deletion.DO_NOTHING,
316 related_name="+",
317 to=settings.AUTH_USER_MODEL,
318 ),
319 ),
320 ],
321 options={
322 "verbose_name": "historical review comment",
323 "verbose_name_plural": "historical review comments",
324 "ordering": ("-history_date", "-history_id"),
325 "get_latest_by": ("history_date", "history_id"),
326 },
327 bases=(simple_history.models.HistoricalChanges, models.Model),
328 ),
329 migrations.CreateModel(
330 name="HistoricalTicketClaim",
331 fields=[
332 (
333 "id",
334 models.BigIntegerField(auto_created=True, blank=True, db_index=True, verbose_name="ID"),
335 ),
336 ("version", models.PositiveIntegerField(default=1, editable=False)),
337 ("created_at", models.DateTimeField(blank=True, editable=False)),
338 ("updated_at", models.DateTimeField(blank=True, editable=False)),
339 ("deleted_at", models.DateTimeField(blank=True, null=True)),
340 ("ticket_uuid", models.CharField(max_length=64)),
341 ("agent_id", models.CharField(max_length=200)),
342 ("claimed_at", models.DateTimeField(blank=True, editable=False)),
343 ("released_at", models.DateTimeField(blank=True, null=True)),
344 (
345 "status",
346 models.CharField(
347 choices=[
348 ("claimed", "Claimed"),
349 ("submitted", "Submitted"),
350 ("merged", "Merged"),
351 ("released", "Released"),
352 ],
353 default="claimed",
354 max_length=20,
355 ),
356 ),
357 (
358 "summary",
359 models.TextField(blank=True, default="", help_text="Work summary when submitted"),
360 ),
361 (
362 "files_changed",
363 models.JSONField(blank=True, default=list, help_text="List of files changed"),
364 ),
365 ("history_id", models.AutoField(primary_key=True, serialize=False)),
366 ("history_date", models.DateTimeField(db_index=True)),
367 ("history_change_reason", models.CharField(max_length=100, null=True)),
368 (
369 "history_type",
370 models.CharField(
371 choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")],
372 max_length=1,
373 ),
374 ),
375 (
376 "created_by",
377 models.ForeignKey(
378 blank=True,
379 db_constraint=False,
380 null=True,
381 on_delete=django.db.models.deletion.DO_NOTHING,
382 related_name="+",
383 to=settings.AUTH_USER_MODEL,
384 ),
385 ),
386 (
387 "deleted_by",
388 models.ForeignKey(
389 blank=True,
390 db_constraint=False,
391 null=True,
392 on_delete=django.db.models.deletion.DO_NOTHING,
393 related_name="+",
394 to=settings.AUTH_USER_MODEL,
395 ),
396 ),
397 (
398 "history_user",
399 models.ForeignKey(
400 null=True,
401 on_delete=django.db.models.deletion.SET_NULL,
402 related_name="+",
403 to=settings.AUTH_USER_MODEL,
404 ),
405 ),
406 (
407 "repository",
408 models.ForeignKey(
409 blank=True,
410 db_constraint=False,
411 null=True,
412 on_delete=django.db.models.deletion.DO_NOTHING,
413 related_name="+",
414 to="fossil.fossilrepository",
415 ),
416 ),
417 (
418 "updated_by",
419 models.ForeignKey(
420 blank=True,
421 db_constraint=False,
422 null=True,
423 on_delete=django.db.models.deletion.DO_NOTHING,
424 related_name="+",
425 to=settings.AUTH_USER_MODEL,
426 ),
427 ),
428 (
429 "workspace",
430 models.ForeignKey(
431 blank=True,
432 db_constraint=False,
433 null=True,
434 on_delete=django.db.models.deletion.DO_NOTHING,
435 related_name="+",
436 to="fossil.agentworkspace",
437 ),
438 ),
439 ],
440 options={
441 "verbose_name": "historical ticket claim",
442 "verbose_name_plural": "historical ticket claims",
443 "ordering": ("-history_date", "-history_id"),
444 "get_latest_by": ("history_date", "history_id"),
445 },
446 bases=(simple_history.models.HistoricalChanges, models.Model),
447 ),
448 migrations.CreateModel(
449 name="ReviewComment",
450 fields=[
451 (
452 "id",
453 models.BigAutoField(
454 auto_created=True,
455 primary_key=True,
456 serialize=False,
457 verbose_name="ID",
458 ),
459 ),
460 ("version", models.PositiveIntegerField(default=1, editable=False)),
461 ("created_at", models.DateTimeField(auto_now_add=True)),
462 ("updated_at", models.DateTimeField(auto_now=True)),
463 ("deleted_at", models.DateTimeField(blank=True, null=True)),
464 ("body", models.TextField()),
465 ("file_path", models.CharField(blank=True, default="", max_length=500)),
466 ("line_number", models.IntegerField(blank=True, null=True)),
467 (
468 "author",
469 models.CharField(help_text="Agent ID or username", max_length=200),
470 ),
471 (
472 "created_by",
473 models.ForeignKey(
474 blank=True,
475 null=True,
476 on_delete=django.db.models.deletion.SET_NULL,
477 related_name="+",
478 to=settings.AUTH_USER_MODEL,
479 ),
480 ),
481 (
482 "deleted_by",
483 models.ForeignKey(
484 blank=True,
485 null=True,
486 on_delete=django.db.models.deletion.SET_NULL,
487 related_name="+",
488 to=settings.AUTH_USER_MODEL,
489 ),
490 ),
491 (
492 "review",
493 models.ForeignKey(
494 on_delete=django.db.models.deletion.CASCADE,
495 related_name="comments",
496 to="fossil.codereview",
497 ),
498 ),
499 (
500 "updated_by",
501 models.ForeignKey(
502 blank=True,
503 null=True,
504 on_delete=django.db.models.deletion.SET_NULL,
505 related_name="+",
506 to=settings.AUTH_USER_MODEL,
507 ),
508 ),
509 ],
510 options={
511 "ordering": ["created_at"],
512 },
513 ),
514 migrations.CreateModel(
515 name="TicketClaim",
516 fields=[
517 (
518 "id",
519 models.BigAutoField(
520 auto_created=True,
521 primary_key=True,
522 serialize=False,
523 verbose_name="ID",
524 ),
525 ),
526 ("version", models.PositiveIntegerField(default=1, editable=False)),
527 ("created_at", models.DateTimeField(auto_now_add=True)),
528 ("updated_at", models.DateTimeField(auto_now=True)),
529 ("deleted_at", models.DateTimeField(blank=True, null=True)),
530 ("ticket_uuid", models.CharField(max_length=64)),
531 ("agent_id", models.CharField(max_length=200)),
532 ("claimed_at", models.DateTimeField(auto_now_add=True)),
533 ("released_at", models.DateTimeField(blank=True, null=True)),
534 (
535 "status",
536 models.CharField(
537 choices=[
538 ("claimed", "Claimed"),
539 ("submitted", "Submitted"),
540 ("merged", "Merged"),
541 ("released", "Released"),
542 ],
543 default="claimed",
544 max_length=20,
545 ),
546 ),
547 (
548 "summary",
549 models.TextField(blank=True, default="", help_text="Work summary when submitted"),
550 ),
551 (
552 "files_changed",
553 models.JSONField(blank=True, default=list, help_text="List of files changed"),
554 ),
555 (
556 "created_by",
557 models.ForeignKey(
558 blank=True,
559 null=True,
560 on_delete=django.db.models.deletion.SET_NULL,
561 related_name="+",
562 to=settings.AUTH_USER_MODEL,
563 ),
564 ),
565 (
566 "deleted_by",
567 models.ForeignKey(
568 blank=True,
569 null=True,
570 on_delete=django.db.models.deletion.SET_NULL,
571 related_name="+",
572 to=settings.AUTH_USER_MODEL,
573 ),
574 ),
575 (
576 "repository",
577 models.ForeignKey(
578 on_delete=django.db.models.deletion.CASCADE,
579 related_name="ticket_claims",
580 to="fossil.fossilrepository",
581 ),
582 ),
583 (
584 "updated_by",
585 models.ForeignKey(
586 blank=True,
587 null=True,
588 on_delete=django.db.models.deletion.SET_NULL,
589 related_name="+",
590 to=settings.AUTH_USER_MODEL,
591 ),
592 ),
593 (
594 "workspace",
595 models.ForeignKey(
596 blank=True,
597 null=True,
598 on_delete=django.db.models.deletion.SET_NULL,
599 related_name="claims",
600 to="fossil.agentworkspace",
601 ),
602 ),
603 ],
604 options={
605 "ordering": ["-claimed_at"],
606 "unique_together": {("repository", "ticket_uuid")},
607 },
608 ),
609 ]
--- a/fossil/migrations/0012_alter_ticketclaim_unique_together.py
+++ b/fossil/migrations/0012_alter_ticketclaim_unique_together.py
@@ -0,0 +1,16 @@
1
+# Generated by Django 5.2.12 on 2026-04-07 17:32
2
+
3
+from django.db import migrations
4
+
5
+
6
+class Migration(migrations.Migration):
7
+ dependencies = [
8
+ ("fossil", "0011_codereview_historicalcodereview_and_more"),
9
+ ]
10
+
11
+ operations = [
12
+ migrations.AlterUniqueTogether(
13
+ name="ticketclaim",
14
+ unique_together=set(),
15
+ ),
16
+ ]
--- a/fossil/migrations/0012_alter_ticketclaim_unique_together.py
+++ b/fossil/migrations/0012_alter_ticketclaim_unique_together.py
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/fossil/migrations/0012_alter_ticketclaim_unique_together.py
+++ b/fossil/migrations/0012_alter_ticketclaim_unique_together.py
@@ -0,0 +1,16 @@
1 # Generated by Django 5.2.12 on 2026-04-07 17:32
2
3 from django.db import migrations
4
5
6 class Migration(migrations.Migration):
7 dependencies = [
8 ("fossil", "0011_codereview_historicalcodereview_and_more"),
9 ]
10
11 operations = [
12 migrations.AlterUniqueTogether(
13 name="ticketclaim",
14 unique_together=set(),
15 ),
16 ]
--- fossil/models.py
+++ fossil/models.py
@@ -65,16 +65,19 @@
6565
def __str__(self):
6666
return f"{self.repository.filename} @ {self.created_at:%Y-%m-%d %H:%M}" if self.created_at else self.repository.filename
6767
6868
6969
# Import related models so they're discoverable by Django
70
+from fossil.agent_claims import TicketClaim # noqa: E402, F401
7071
from fossil.api_tokens import APIToken # noqa: E402, F401
7172
from fossil.branch_protection import BranchProtection # noqa: E402, F401
7273
from fossil.ci import StatusCheck # noqa: E402, F401
74
+from fossil.code_reviews import CodeReview, ReviewComment # noqa: E402, F401
7375
from fossil.forum import ForumPost # noqa: E402, F401
7476
from fossil.notifications import Notification, NotificationPreference, ProjectWatch # noqa: E402, F401
7577
from fossil.releases import Release, ReleaseAsset # noqa: E402, F401
7678
from fossil.sync_models import GitMirror, SSHKey, SyncLog # noqa: E402, F401
7779
from fossil.ticket_fields import TicketFieldDefinition # noqa: E402, F401
7880
from fossil.ticket_reports import TicketReport # noqa: E402, F401
7981
from fossil.user_keys import UserSSHKey # noqa: E402, F401
8082
from fossil.webhooks import Webhook, WebhookDelivery # noqa: E402, F401
83
+from fossil.workspaces import AgentWorkspace # noqa: E402, F401
8184
--- fossil/models.py
+++ fossil/models.py
@@ -65,16 +65,19 @@
65 def __str__(self):
66 return f"{self.repository.filename} @ {self.created_at:%Y-%m-%d %H:%M}" if self.created_at else self.repository.filename
67
68
69 # Import related models so they're discoverable by Django
 
70 from fossil.api_tokens import APIToken # noqa: E402, F401
71 from fossil.branch_protection import BranchProtection # noqa: E402, F401
72 from fossil.ci import StatusCheck # noqa: E402, F401
 
73 from fossil.forum import ForumPost # noqa: E402, F401
74 from fossil.notifications import Notification, NotificationPreference, ProjectWatch # noqa: E402, F401
75 from fossil.releases import Release, ReleaseAsset # noqa: E402, F401
76 from fossil.sync_models import GitMirror, SSHKey, SyncLog # noqa: E402, F401
77 from fossil.ticket_fields import TicketFieldDefinition # noqa: E402, F401
78 from fossil.ticket_reports import TicketReport # noqa: E402, F401
79 from fossil.user_keys import UserSSHKey # noqa: E402, F401
80 from fossil.webhooks import Webhook, WebhookDelivery # noqa: E402, F401
 
81
--- fossil/models.py
+++ fossil/models.py
@@ -65,16 +65,19 @@
65 def __str__(self):
66 return f"{self.repository.filename} @ {self.created_at:%Y-%m-%d %H:%M}" if self.created_at else self.repository.filename
67
68
69 # Import related models so they're discoverable by Django
70 from fossil.agent_claims import TicketClaim # noqa: E402, F401
71 from fossil.api_tokens import APIToken # noqa: E402, F401
72 from fossil.branch_protection import BranchProtection # noqa: E402, F401
73 from fossil.ci import StatusCheck # noqa: E402, F401
74 from fossil.code_reviews import CodeReview, ReviewComment # noqa: E402, F401
75 from fossil.forum import ForumPost # noqa: E402, F401
76 from fossil.notifications import Notification, NotificationPreference, ProjectWatch # noqa: E402, F401
77 from fossil.releases import Release, ReleaseAsset # noqa: E402, F401
78 from fossil.sync_models import GitMirror, SSHKey, SyncLog # noqa: E402, F401
79 from fossil.ticket_fields import TicketFieldDefinition # noqa: E402, F401
80 from fossil.ticket_reports import TicketReport # noqa: E402, F401
81 from fossil.user_keys import UserSSHKey # noqa: E402, F401
82 from fossil.webhooks import Webhook, WebhookDelivery # noqa: E402, F401
83 from fossil.workspaces import AgentWorkspace # noqa: E402, F401
84
--- fossil/urls.py
+++ fossil/urls.py
@@ -8,17 +8,42 @@
88
# JSON API
99
path("api/", api_views.api_docs, name="api_docs"),
1010
path("api/project", api_views.api_project, name="api_project"),
1111
path("api/timeline", api_views.api_timeline, name="api_timeline"),
1212
path("api/tickets", api_views.api_tickets, name="api_tickets"),
13
+ # Unclaimed must be before <str:ticket_uuid> to avoid matching "unclaimed" as a UUID
14
+ path("api/tickets/unclaimed", api_views.api_tickets_unclaimed, name="api_tickets_unclaimed"),
1315
path("api/tickets/<str:ticket_uuid>", api_views.api_ticket_detail, name="api_ticket_detail"),
1416
path("api/wiki", api_views.api_wiki_list, name="api_wiki_list"),
1517
path("api/wiki/<path:page_name>", api_views.api_wiki_page, name="api_wiki_page"),
1618
path("api/branches", api_views.api_branches, name="api_branches"),
1719
path("api/tags", api_views.api_tags, name="api_tags"),
1820
path("api/releases", api_views.api_releases, name="api_releases"),
1921
path("api/search", api_views.api_search, name="api_search"),
22
+ # Batch API
23
+ path("api/batch", api_views.api_batch, name="api_batch"),
24
+ # Agent Workspaces
25
+ path("api/workspaces", api_views.api_workspace_list, name="api_workspace_list"),
26
+ path("api/workspaces/create", api_views.api_workspace_create, name="api_workspace_create"),
27
+ path("api/workspaces/<str:workspace_name>", api_views.api_workspace_detail, name="api_workspace_detail"),
28
+ path("api/workspaces/<str:workspace_name>/commit", api_views.api_workspace_commit, name="api_workspace_commit"),
29
+ path("api/workspaces/<str:workspace_name>/merge", api_views.api_workspace_merge, name="api_workspace_merge"),
30
+ path("api/workspaces/<str:workspace_name>/abandon", api_views.api_workspace_abandon, name="api_workspace_abandon"),
31
+ # Task Claiming
32
+ path("api/tickets/<str:ticket_uuid>/claim", api_views.api_ticket_claim, name="api_ticket_claim"),
33
+ path("api/tickets/<str:ticket_uuid>/release", api_views.api_ticket_release, name="api_ticket_release"),
34
+ path("api/tickets/<str:ticket_uuid>/submit", api_views.api_ticket_submit, name="api_ticket_submit"),
35
+ # Server-Sent Events
36
+ path("api/events", api_views.api_events, name="api_events"),
37
+ # Code Reviews
38
+ path("api/reviews", api_views.api_review_list, name="api_review_list"),
39
+ path("api/reviews/create", api_views.api_review_create, name="api_review_create"),
40
+ path("api/reviews/<int:review_id>", api_views.api_review_detail, name="api_review_detail"),
41
+ path("api/reviews/<int:review_id>/comment", api_views.api_review_comment, name="api_review_comment"),
42
+ path("api/reviews/<int:review_id>/approve", api_views.api_review_approve, name="api_review_approve"),
43
+ path("api/reviews/<int:review_id>/request-changes", api_views.api_review_request_changes, name="api_review_request_changes"),
44
+ path("api/reviews/<int:review_id>/merge", api_views.api_review_merge, name="api_review_merge"),
2045
#
2146
path("code/", views.code_browser, name="code"),
2247
path("code/tree/<path:dirpath>/", views.code_browser, name="code_dir"),
2348
path("code/file/<path:filepath>", views.code_file, name="code_file"),
2449
path("timeline/", views.timeline, name="timeline"),
2550
2651
ADDED fossil/workspaces.py
2752
ADDED mcp_server/__init__.py
2853
ADDED mcp_server/__main__.py
2954
ADDED mcp_server/config.json
3055
ADDED mcp_server/server.py
3156
ADDED mcp_server/tools.py
--- fossil/urls.py
+++ fossil/urls.py
@@ -8,17 +8,42 @@
8 # JSON API
9 path("api/", api_views.api_docs, name="api_docs"),
10 path("api/project", api_views.api_project, name="api_project"),
11 path("api/timeline", api_views.api_timeline, name="api_timeline"),
12 path("api/tickets", api_views.api_tickets, name="api_tickets"),
 
 
13 path("api/tickets/<str:ticket_uuid>", api_views.api_ticket_detail, name="api_ticket_detail"),
14 path("api/wiki", api_views.api_wiki_list, name="api_wiki_list"),
15 path("api/wiki/<path:page_name>", api_views.api_wiki_page, name="api_wiki_page"),
16 path("api/branches", api_views.api_branches, name="api_branches"),
17 path("api/tags", api_views.api_tags, name="api_tags"),
18 path("api/releases", api_views.api_releases, name="api_releases"),
19 path("api/search", api_views.api_search, name="api_search"),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20 #
21 path("code/", views.code_browser, name="code"),
22 path("code/tree/<path:dirpath>/", views.code_browser, name="code_dir"),
23 path("code/file/<path:filepath>", views.code_file, name="code_file"),
24 path("timeline/", views.timeline, name="timeline"),
25
26 DDED fossil/workspaces.py
27 DDED mcp_server/__init__.py
28 DDED mcp_server/__main__.py
29 DDED mcp_server/config.json
30 DDED mcp_server/server.py
31 DDED mcp_server/tools.py
--- fossil/urls.py
+++ fossil/urls.py
@@ -8,17 +8,42 @@
8 # JSON API
9 path("api/", api_views.api_docs, name="api_docs"),
10 path("api/project", api_views.api_project, name="api_project"),
11 path("api/timeline", api_views.api_timeline, name="api_timeline"),
12 path("api/tickets", api_views.api_tickets, name="api_tickets"),
13 # Unclaimed must be before <str:ticket_uuid> to avoid matching "unclaimed" as a UUID
14 path("api/tickets/unclaimed", api_views.api_tickets_unclaimed, name="api_tickets_unclaimed"),
15 path("api/tickets/<str:ticket_uuid>", api_views.api_ticket_detail, name="api_ticket_detail"),
16 path("api/wiki", api_views.api_wiki_list, name="api_wiki_list"),
17 path("api/wiki/<path:page_name>", api_views.api_wiki_page, name="api_wiki_page"),
18 path("api/branches", api_views.api_branches, name="api_branches"),
19 path("api/tags", api_views.api_tags, name="api_tags"),
20 path("api/releases", api_views.api_releases, name="api_releases"),
21 path("api/search", api_views.api_search, name="api_search"),
22 # Batch API
23 path("api/batch", api_views.api_batch, name="api_batch"),
24 # Agent Workspaces
25 path("api/workspaces", api_views.api_workspace_list, name="api_workspace_list"),
26 path("api/workspaces/create", api_views.api_workspace_create, name="api_workspace_create"),
27 path("api/workspaces/<str:workspace_name>", api_views.api_workspace_detail, name="api_workspace_detail"),
28 path("api/workspaces/<str:workspace_name>/commit", api_views.api_workspace_commit, name="api_workspace_commit"),
29 path("api/workspaces/<str:workspace_name>/merge", api_views.api_workspace_merge, name="api_workspace_merge"),
30 path("api/workspaces/<str:workspace_name>/abandon", api_views.api_workspace_abandon, name="api_workspace_abandon"),
31 # Task Claiming
32 path("api/tickets/<str:ticket_uuid>/claim", api_views.api_ticket_claim, name="api_ticket_claim"),
33 path("api/tickets/<str:ticket_uuid>/release", api_views.api_ticket_release, name="api_ticket_release"),
34 path("api/tickets/<str:ticket_uuid>/submit", api_views.api_ticket_submit, name="api_ticket_submit"),
35 # Server-Sent Events
36 path("api/events", api_views.api_events, name="api_events"),
37 # Code Reviews
38 path("api/reviews", api_views.api_review_list, name="api_review_list"),
39 path("api/reviews/create", api_views.api_review_create, name="api_review_create"),
40 path("api/reviews/<int:review_id>", api_views.api_review_detail, name="api_review_detail"),
41 path("api/reviews/<int:review_id>/comment", api_views.api_review_comment, name="api_review_comment"),
42 path("api/reviews/<int:review_id>/approve", api_views.api_review_approve, name="api_review_approve"),
43 path("api/reviews/<int:review_id>/request-changes", api_views.api_review_request_changes, name="api_review_request_changes"),
44 path("api/reviews/<int:review_id>/merge", api_views.api_review_merge, name="api_review_merge"),
45 #
46 path("code/", views.code_browser, name="code"),
47 path("code/tree/<path:dirpath>/", views.code_browser, name="code_dir"),
48 path("code/file/<path:filepath>", views.code_file, name="code_file"),
49 path("timeline/", views.timeline, name="timeline"),
50
51 DDED fossil/workspaces.py
52 DDED mcp_server/__init__.py
53 DDED mcp_server/__main__.py
54 DDED mcp_server/config.json
55 DDED mcp_server/server.py
56 DDED mcp_server/tools.py
--- a/fossil/workspaces.py
+++ b/fossil/workspaces.py
@@ -0,0 +1,40 @@
1
+"""Agent workspace model for isolated parallel development.
2
+
3
+Each workspace corresponds to a Fossil branch and a temporary checkout directory
4
+on disk. Agents can create, commit to, merge, and abandon workspaces independently.
5
+"""
6
+
7
+from django.db import models
8
+
9
+from core.models import ActiveManager, Tracking
10
+
11
+
12
+class AgentWorkspace(Tracking):
13
+ """Isolated workspace for an agent working on a repository."""
14
+
15
+ class Status(models.TextChoices):
16
+ ACTIVE = "active", "Active"
17
+ MERGED = "merged", "Merged"
18
+ ABANDONED = "abandoned", "Abandoned"
19
+
20
+ repository = models.ForeignKey("fossil.FossilRepository", on_delete=models.CASCADE, related_name="workspaces")
21
+ name = models.CharField(max_length=200, help_text="Workspace name (e.g., agent-fix-bug-123)")
22
+ branch = models.CharField(max_length=200, help_text="Fossil branch name for this workspace")
23
+ status = models.CharField(max_length=20, choices=Status.choices, default=Status.ACTIVE)
24
+ agent_id = models.CharField(max_length=200, blank=True, default="", help_text="Agent identifier")
25
+ description = models.CharField(max_length=500, blank=True, default="")
26
+ checkout_path = models.CharField(max_length=500, blank=True, default="", help_text="Path to workspace checkout directory")
27
+
28
+ # Work tracking
29
+ files_changed = models.IntegerField(default=0)
30
+ commits_made = models.IntegerField(default=0)
31
+
32
+ objects = ActiveManager()
33
+ all_objects = models.Manager()
34
+
35
+ class Meta:
36
+ ordering = ["-created_at"]
37
+ unique_together = [("repository", "name")]
38
+
39
+ def __str__(self):
40
+ return f"{self.name} ({self.status})"
--- a/fossil/workspaces.py
+++ b/fossil/workspaces.py
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/fossil/workspaces.py
+++ b/fossil/workspaces.py
@@ -0,0 +1,40 @@
1 """Agent workspace model for isolated parallel development.
2
3 Each workspace corresponds to a Fossil branch and a temporary checkout directory
4 on disk. Agents can create, commit to, merge, and abandon workspaces independently.
5 """
6
7 from django.db import models
8
9 from core.models import ActiveManager, Tracking
10
11
12 class AgentWorkspace(Tracking):
13 """Isolated workspace for an agent working on a repository."""
14
15 class Status(models.TextChoices):
16 ACTIVE = "active", "Active"
17 MERGED = "merged", "Merged"
18 ABANDONED = "abandoned", "Abandoned"
19
20 repository = models.ForeignKey("fossil.FossilRepository", on_delete=models.CASCADE, related_name="workspaces")
21 name = models.CharField(max_length=200, help_text="Workspace name (e.g., agent-fix-bug-123)")
22 branch = models.CharField(max_length=200, help_text="Fossil branch name for this workspace")
23 status = models.CharField(max_length=20, choices=Status.choices, default=Status.ACTIVE)
24 agent_id = models.CharField(max_length=200, blank=True, default="", help_text="Agent identifier")
25 description = models.CharField(max_length=500, blank=True, default="")
26 checkout_path = models.CharField(max_length=500, blank=True, default="", help_text="Path to workspace checkout directory")
27
28 # Work tracking
29 files_changed = models.IntegerField(default=0)
30 commits_made = models.IntegerField(default=0)
31
32 objects = ActiveManager()
33 all_objects = models.Manager()
34
35 class Meta:
36 ordering = ["-created_at"]
37 unique_together = [("repository", "name")]
38
39 def __str__(self):
40 return f"{self.name} ({self.status})"

No diff available

--- a/mcp_server/__main__.py
+++ b/mcp_server/__main__.py
@@ -0,0 +1,19 @@
1
+"""Entry point for the fossilrepo MCP server.
2
+
3
+Usage:
4
+ python -m mcp_server
5
+ fossilrepo-mcp (via pyproject.toml script entry)
6
+"""
7
+
8
+import asyncio
9
+
10
+from mcp_server.server import main
11
+
12
+
13
+def run():
14
+ """Synchronous entry point for pyproject.toml [project.scripts]."""
15
+ asyncio.run(main())
16
+
17
+
18
+if __name__ == "__main__":
19
+ run()
--- a/mcp_server/__main__.py
+++ b/mcp_server/__main__.py
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/mcp_server/__main__.py
+++ b/mcp_server/__main__.py
@@ -0,0 +1,19 @@
1 """Entry point for the fossilrepo MCP server.
2
3 Usage:
4 python -m mcp_server
5 fossilrepo-mcp (via pyproject.toml script entry)
6 """
7
8 import asyncio
9
10 from mcp_server.server import main
11
12
13 def run():
14 """Synchronous entry point for pyproject.toml [project.scripts]."""
15 asyncio.run(main())
16
17
18 if __name__ == "__main__":
19 run()
--- a/mcp_server/config.json
+++ b/mcp_server/config.json
@@ -0,0 +1,10 @@
1
+{
2
+ "mcpServers": {
3
+ "fossilrepo": {
4
+ "command": "fossilrepo-mcp",
5
+ "env": {
6
+ "DJANGO_SETTINGS_MODULE": "config.settings"
7
+ }
8
+ }
9
+ }
10
+}
--- a/mcp_server/config.json
+++ b/mcp_server/config.json
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
--- a/mcp_server/config.json
+++ b/mcp_server/config.json
@@ -0,0 +1,10 @@
1 {
2 "mcpServers": {
3 "fossilrepo": {
4 "command": "fossilrepo-mcp",
5 "env": {
6 "DJANGO_SETTINGS_MODULE": "config.settings"
7 }
8 }
9 }
10 }
--- a/mcp_server/server.py
+++ b/mcp_server/server.py
@@ -0,0 +1,39 @@
1
+"""Fossilrepo MCP Server -- exposes repo operations to AI tools.
2
+
3
+Runs as a standalone process communicating over stdio using JSON-RPC 2.0
4
+(Model Context Protocol). Imports Django models directly for DB access.
5
+"""
6
+
7
+import json
8
+import os
9
+
10
+# Setup Django before any model imports
11
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
12
+
13
+import django # noqa: E402
14
+
15
+django.setup()
16
+
17
+from mcp.server import Server # noqa: E402
18
+from mcp.server.stdio import stdio_server # noqa: E402
19
+from mcp.types import TextContent # noqa: E402
20
+
21
+from mcp_server.tools import TOOLS, execute_tool # noqa: E402
22
+
23
+server = Server("fossilrepo")
24
+
25
+
26
+@server.list_tools()
27
+async def list_tools():
28
+ return TOOLS
29
+
30
+
31
+@server.call_tool()
32
+async def call_tool(name: str, arguments: dict):
33
+ result = execute_tool(name, arguments)
34
+ return [TextContent(type="text", text=json.dumps(result, indent=2, default=str))]
35
+
36
+
37
+async def main():
38
+ async with stdio_server() as (read_stream, write_stream):
39
+ await server.run(read_stream, write_stream, server.create_initialization_options())
--- a/mcp_server/server.py
+++ b/mcp_server/server.py
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/mcp_server/server.py
+++ b/mcp_server/server.py
@@ -0,0 +1,39 @@
1 """Fossilrepo MCP Server -- exposes repo operations to AI tools.
2
3 Runs as a standalone process communicating over stdio using JSON-RPC 2.0
4 (Model Context Protocol). Imports Django models directly for DB access.
5 """
6
7 import json
8 import os
9
10 # Setup Django before any model imports
11 os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
12
13 import django # noqa: E402
14
15 django.setup()
16
17 from mcp.server import Server # noqa: E402
18 from mcp.server.stdio import stdio_server # noqa: E402
19 from mcp.types import TextContent # noqa: E402
20
21 from mcp_server.tools import TOOLS, execute_tool # noqa: E402
22
23 server = Server("fossilrepo")
24
25
26 @server.list_tools()
27 async def list_tools():
28 return TOOLS
29
30
31 @server.call_tool()
32 async def call_tool(name: str, arguments: dict):
33 result = execute_tool(name, arguments)
34 return [TextContent(type="text", text=json.dumps(result, indent=2, default=str))]
35
36
37 async def main():
38 async with stdio_server() as (read_stream, write_stream):
39 await server.run(read_stream, write_stream, server.create_initialization_options())
--- a/mcp_server/tools.py
+++ b/mcp_server/tools.py
@@ -0,0 +1,673 @@
1
+"""Tool definitions and handlers for the fossilrepo MCP server.
2
+
3
+Each tool maps to a Fossil repository operation -- reads go through
4
+FossilReader (direct SQLite), writes go through FossilCLI (fossil binary).
5
+"""
6
+
7
+from mcp.types import Tool
8
+
9
+TOOLS = [
10
+ Tool(
11
+ name="list_projects",
12
+ description="List all projects in the fossilrepo instance",
13
+ inputSchema={"type": "object", "properties": {}, "required": []},
14
+ ),
15
+ Tool(
16
+ name="get_project",
17
+ description="Get details about a specific project including repo stats",
18
+ inputSchema={
19
+ "type": "object",
20
+ "properties": {"slug": {"type": "string", "description": "Project slug"}},
21
+ "required": ["slug"],
22
+ },
23
+ ),
24
+ Tool(
25
+ name="browse_code",
26
+ description="List files in a directory of a project's repository",
27
+ inputSchema={
28
+ "type": "object",
29
+ "properties": {
30
+ "slug": {"type": "string", "description": "Project slug"},
31
+ "path": {"type": "string", "description": "Directory path (empty for root)", "default": ""},
32
+ },
33
+ "required": ["slug"],
34
+ },
35
+ ),
36
+ Tool(
37
+ name="read_file",
38
+ description="Read the content of a file from a project's repository",
39
+ inputSchema={
40
+ "type": "object",
41
+ "properties": {
42
+ "slug": {"type": "string", "description": "Project slug"},
43
+ "filepath": {"type": "string", "description": "File path in the repo"},
44
+ },
45
+ "required": ["slug", "filepath"],
46
+ },
47
+ ),
48
+ Tool(
49
+ name="get_timeline",
50
+ description="Get recent checkins/commits for a project",
51
+ inputSchema={
52
+ "type": "object",
53
+ "properties": {
54
+ "slug": {"type": "string", "description": "Project slug"},
55
+ "limit": {"type": "integer", "description": "Number of entries", "default": 25},
56
+ "branch": {"type": "string", "description": "Filter by branch", "default": ""},
57
+ },
58
+ "required": ["slug"],
59
+ },
60
+ ),
61
+ Tool(
62
+ name="get_checkin",
63
+ description="Get details of a specific checkin including file changes",
64
+ inputSchema={
65
+ "type": "object",
66
+ "properties": {
67
+ "slug": {"type": "string", "description": "Project slug"},
68
+ "uuid": {"type": "string", "description": "Checkin UUID (or prefix)"},
69
+ },
70
+ "required": ["slug", "uuid"],
71
+ },
72
+ ),
73
+ Tool(
74
+ name="search_code",
75
+ description="Search across checkins, tickets, and wiki pages",
76
+ inputSchema={
77
+ "type": "object",
78
+ "properties": {
79
+ "slug": {"type": "string", "description": "Project slug"},
80
+ "query": {"type": "string", "description": "Search query"},
81
+ "limit": {"type": "integer", "default": 25},
82
+ },
83
+ "required": ["slug", "query"],
84
+ },
85
+ ),
86
+ Tool(
87
+ name="list_tickets",
88
+ description="List tickets for a project with optional status filter",
89
+ inputSchema={
90
+ "type": "object",
91
+ "properties": {
92
+ "slug": {"type": "string", "description": "Project slug"},
93
+ "status": {"type": "string", "description": "Filter by status (Open, Fixed, Closed)", "default": ""},
94
+ "limit": {"type": "integer", "default": 50},
95
+ },
96
+ "required": ["slug"],
97
+ },
98
+ ),
99
+ Tool(
100
+ name="get_ticket",
101
+ description="Get ticket details including comments",
102
+ inputSchema={
103
+ "type": "object",
104
+ "properties": {
105
+ "slug": {"type": "string", "description": "Project slug"},
106
+ "uuid": {"type": "string", "description": "Ticket UUID (or prefix)"},
107
+ },
108
+ "required": ["slug", "uuid"],
109
+ },
110
+ ),
111
+ Tool(
112
+ name="create_ticket",
113
+ description="Create a new ticket in a project",
114
+ inputSchema={
115
+ "type": "object",
116
+ "properties": {
117
+ "slug": {"type": "string", "description": "Project slug"},
118
+ "title": {"type": "string"},
119
+ "body": {"type": "string", "description": "Ticket description"},
120
+ "type": {"type": "string", "default": "Code_Defect"},
121
+ "severity": {"type": "string", "default": "Important"},
122
+ "priority": {"type": "string", "default": "Medium"},
123
+ },
124
+ "required": ["slug", "title", "body"],
125
+ },
126
+ ),
127
+ Tool(
128
+ name="update_ticket",
129
+ description="Update a ticket's status, add a comment",
130
+ inputSchema={
131
+ "type": "object",
132
+ "properties": {
133
+ "slug": {"type": "string", "description": "Project slug"},
134
+ "uuid": {"type": "string", "description": "Ticket UUID"},
135
+ "status": {"type": "string", "description": "New status", "default": ""},
136
+ "comment": {"type": "string", "description": "Comment to add", "default": ""},
137
+ },
138
+ "required": ["slug", "uuid"],
139
+ },
140
+ ),
141
+ Tool(
142
+ name="list_wiki_pages",
143
+ description="List all wiki pages in a project",
144
+ inputSchema={
145
+ "type": "object",
146
+ "properties": {"slug": {"type": "string", "description": "Project slug"}},
147
+ "required": ["slug"],
148
+ },
149
+ ),
150
+ Tool(
151
+ name="get_wiki_page",
152
+ description="Read a wiki page's content",
153
+ inputSchema={
154
+ "type": "object",
155
+ "properties": {
156
+ "slug": {"type": "string", "description": "Project slug"},
157
+ "page_name": {"type": "string", "description": "Wiki page name"},
158
+ },
159
+ "required": ["slug", "page_name"],
160
+ },
161
+ ),
162
+ Tool(
163
+ name="list_branches",
164
+ description="List all branches in a project's repository",
165
+ inputSchema={
166
+ "type": "object",
167
+ "properties": {"slug": {"type": "string", "description": "Project slug"}},
168
+ "required": ["slug"],
169
+ },
170
+ ),
171
+ Tool(
172
+ name="get_file_blame",
173
+ description="Get blame annotations for a file showing who changed each line",
174
+ inputSchema={
175
+ "type": "object",
176
+ "properties": {
177
+ "slug": {"type": "string", "description": "Project slug"},
178
+ "filepath": {"type": "string", "description": "File path"},
179
+ },
180
+ "required": ["slug", "filepath"],
181
+ },
182
+ ),
183
+ Tool(
184
+ name="get_file_history",
185
+ description="Get commit history for a specific file",
186
+ inputSchema={
187
+ "type": "object",
188
+ "properties": {
189
+ "slug": {"type": "string", "description": "Project slug"},
190
+ "filepath": {"type": "string", "description": "File path"},
191
+ "limit": {"type": "integer", "default": 25},
192
+ },
193
+ "required": ["slug", "filepath"],
194
+ },
195
+ ),
196
+ Tool(
197
+ name="sql_query",
198
+ description="Run a read-only SQL query against the Fossil SQLite database. Only SELECT allowed.",
199
+ inputSchema={
200
+ "type": "object",
201
+ "properties": {
202
+ "slug": {"type": "string", "description": "Project slug"},
203
+ "sql": {"type": "string", "description": "SQL query (SELECT only)"},
204
+ },
205
+ "required": ["slug", "sql"],
206
+ },
207
+ ),
208
+]
209
+
210
+
211
+def _isoformat(dt):
212
+ """Safely format a datetime to ISO 8601, or None."""
213
+ if dt is None:
214
+ return None
215
+ return dt.isoformat()
216
+
217
+
218
+def _get_repo(slug):
219
+ """Look up project and its FossilRepository by slug.
220
+
221
+ Raises Project.DoesNotExist or FossilRepository.DoesNotExist on miss.
222
+ """
223
+ from fossil.models import FossilRepository
224
+ from projects.models import Project
225
+
226
+ project = Project.objects.get(slug=slug, deleted_at__isnull=True)
227
+ repo = FossilRepository.objects.get(project=project, deleted_at__isnull=True)
228
+ return project, repo
229
+
230
+
231
+def execute_tool(name: str, arguments: dict) -> dict:
232
+ """Dispatch a tool call to the appropriate handler."""
233
+ handlers = {
234
+ "list_projects": _list_projects,
235
+ "get_project": _get_project,
236
+ "browse_code": _browse_code,
237
+ "read_file": _read_file,
238
+ "get_timeline": _get_timeline,
239
+ "get_checkin": _get_checkin,
240
+ "search_code": _search_code,
241
+ "list_tickets": _list_tickets,
242
+ "get_ticket": _get_ticket,
243
+ "create_ticket": _create_ticket,
244
+ "update_ticket": _update_ticket,
245
+ "list_wiki_pages": _list_wiki_pages,
246
+ "get_wiki_page": _get_wiki_page,
247
+ "list_branches": _list_branches,
248
+ "get_file_blame": _get_file_blame,
249
+ "get_file_history": _get_file_history,
250
+ "sql_query": _sql_query,
251
+ }
252
+ handler = handlers.get(name)
253
+ if not handler:
254
+ return {"error": f"Unknown tool: {name}"}
255
+ try:
256
+ return handler(arguments)
257
+ except Exception as e:
258
+ return {"error": str(e)}
259
+
260
+
261
+# ---------------------------------------------------------------------------
262
+# Read-only handlers (FossilReader)
263
+# ---------------------------------------------------------------------------
264
+
265
+
266
+def _list_projects(args):
267
+ from projects.models import Project
268
+
269
+ projects = Project.objects.filter(deleted_at__isnull=True)
270
+ return {
271
+ "projects": [
272
+ {
273
+ "name": p.name,
274
+ "slug": p.slug,
275
+ "description": p.description or "",
276
+ "visibility": p.visibility,
277
+ }
278
+ for p in projects
279
+ ]
280
+ }
281
+
282
+
283
+def _get_project(args):
284
+ from fossil.reader import FossilReader
285
+
286
+ project, repo = _get_repo(args["slug"])
287
+ result = {
288
+ "name": project.name,
289
+ "slug": project.slug,
290
+ "description": project.description or "",
291
+ "visibility": project.visibility,
292
+ "star_count": project.star_count,
293
+ "filename": repo.filename,
294
+ "file_size_bytes": repo.file_size_bytes,
295
+ "checkin_count": repo.checkin_count,
296
+ "last_checkin_at": _isoformat(repo.last_checkin_at),
297
+ }
298
+ if repo.exists_on_disk:
299
+ with FossilReader(repo.full_path) as reader:
300
+ meta = reader.get_metadata()
301
+ result["fossil_project_name"] = meta.project_name
302
+ result["fossil_checkin_count"] = meta.checkin_count
303
+ result["fossil_ticket_count"] = meta.ticket_count
304
+ result["fossil_wiki_page_count"] = meta.wiki_page_count
305
+ return result
306
+
307
+
308
+def _browse_code(args):
309
+ from fossil.reader import FossilReader
310
+
311
+ project, repo = _get_repo(args["slug"])
312
+ path = args.get("path", "")
313
+
314
+ with FossilReader(repo.full_path) as reader:
315
+ checkin = reader.get_latest_checkin_uuid()
316
+ if not checkin:
317
+ return {"files": [], "error": "No checkins in repository"}
318
+
319
+ files = reader.get_files_at_checkin(checkin)
320
+
321
+ # Filter to requested directory
322
+ if path:
323
+ path = path.rstrip("/") + "/"
324
+ files = [f for f in files if f.name.startswith(path)]
325
+
326
+ return {
327
+ "checkin": checkin,
328
+ "path": path,
329
+ "files": [
330
+ {
331
+ "name": f.name,
332
+ "uuid": f.uuid,
333
+ "size": f.size,
334
+ "last_commit_message": f.last_commit_message,
335
+ "last_commit_user": f.last_commit_user,
336
+ "last_commit_time": _isoformat(f.last_commit_time),
337
+ }
338
+ for f in files
339
+ ],
340
+ }
341
+
342
+
343
+def _read_file(args):
344
+ from fossil.reader import FossilReader
345
+
346
+ project, repo = _get_repo(args["slug"])
347
+
348
+ with FossilReader(repo.full_path) as reader:
349
+ checkin = reader.get_latest_checkin_uuid()
350
+ if not checkin:
351
+ return {"error": "No checkins in repository"}
352
+
353
+ files = reader.get_files_at_checkin(checkin)
354
+ target = args["filepath"]
355
+
356
+ for f in files:
357
+ if f.name == target:
358
+ content = reader.get_file_content(f.uuid)
359
+ if isinstance(content, bytes):
360
+ try:
361
+ content = content.decode("utf-8")
362
+ except UnicodeDecodeError:
363
+ return {"filepath": target, "binary": True, "size": len(content)}
364
+ return {"filepath": target, "content": content}
365
+
366
+ return {"error": f"File not found: {target}"}
367
+
368
+
369
+def _get_timeline(args):
370
+ from fossil.reader import FossilReader
371
+
372
+ project, repo = _get_repo(args["slug"])
373
+ limit = args.get("limit", 25)
374
+ branch_filter = args.get("branch", "")
375
+
376
+ with FossilReader(repo.full_path) as reader:
377
+ entries = reader.get_timeline(limit=limit, event_type="ci")
378
+
379
+ checkins = []
380
+ for e in entries:
381
+ entry = {
382
+ "uuid": e.uuid,
383
+ "timestamp": _isoformat(e.timestamp),
384
+ "user": e.user,
385
+ "comment": e.comment,
386
+ "branch": e.branch,
387
+ }
388
+ checkins.append(entry)
389
+
390
+ if branch_filter:
391
+ checkins = [c for c in checkins if c["branch"] == branch_filter]
392
+
393
+ return {"checkins": checkins, "total": len(checkins)}
394
+
395
+
396
+def _get_checkin(args):
397
+ from fossil.reader import FossilReader
398
+
399
+ project, repo = _get_repo(args["slug"])
400
+
401
+ with FossilReader(repo.full_path) as reader:
402
+ detail = reader.get_checkin_detail(args["uuid"])
403
+
404
+ if detail is None:
405
+ return {"error": "Checkin not found"}
406
+
407
+ return {
408
+ "uuid": detail.uuid,
409
+ "timestamp": _isoformat(detail.timestamp),
410
+ "user": detail.user,
411
+ "comment": detail.comment,
412
+ "branch": detail.branch,
413
+ "parent_uuid": detail.parent_uuid,
414
+ "is_merge": detail.is_merge,
415
+ "files_changed": detail.files_changed,
416
+ }
417
+
418
+
419
+def _search_code(args):
420
+ from fossil.reader import FossilReader
421
+
422
+ project, repo = _get_repo(args["slug"])
423
+ query = args["query"]
424
+ limit = args.get("limit", 25)
425
+
426
+ with FossilReader(repo.full_path) as reader:
427
+ results = reader.search(query, limit=limit)
428
+
429
+ # Serialize datetimes in results
430
+ for checkin in results.get("checkins", []):
431
+ checkin["timestamp"] = _isoformat(checkin.get("timestamp"))
432
+ for ticket in results.get("tickets", []):
433
+ ticket["created"] = _isoformat(ticket.get("created"))
434
+
435
+ return results
436
+
437
+
438
+def _list_tickets(args):
439
+ from fossil.reader import FossilReader
440
+
441
+ project, repo = _get_repo(args["slug"])
442
+ status_filter = args.get("status", "") or None
443
+ limit = args.get("limit", 50)
444
+
445
+ with FossilReader(repo.full_path) as reader:
446
+ tickets = reader.get_tickets(status=status_filter, limit=limit)
447
+
448
+ return {
449
+ "tickets": [
450
+ {
451
+ "uuid": t.uuid,
452
+ "title": t.title,
453
+ "status": t.status,
454
+ "type": t.type,
455
+ "subsystem": t.subsystem,
456
+ "priority": t.priority,
457
+ "created": _isoformat(t.created),
458
+ }
459
+ for t in tickets
460
+ ],
461
+ "total": len(tickets),
462
+ }
463
+
464
+
465
+def _get_ticket(args):
466
+ from fossil.reader import FossilReader
467
+
468
+ project, repo = _get_repo(args["slug"])
469
+
470
+ with FossilReader(repo.full_path) as reader:
471
+ ticket = reader.get_ticket_detail(args["uuid"])
472
+ if ticket is None:
473
+ return {"error": "Ticket not found"}
474
+ comments = reader.get_ticket_comments(args["uuid"])
475
+
476
+ return {
477
+ "uuid": ticket.uuid,
478
+ "title": ticket.title,
479
+ "status": ticket.status,
480
+ "type": ticket.type,
481
+ "subsystem": ticket.subsystem,
482
+ "priority": ticket.priority,
483
+ "severity": ticket.severity,
484
+ "resolution": ticket.resolution,
485
+ "body": ticket.body,
486
+ "created": _isoformat(ticket.created),
487
+ "comments": [
488
+ {
489
+ "timestamp": _isoformat(c.get("timestamp")),
490
+ "user": c.get("user", ""),
491
+ "comment": c.get("comment", ""),
492
+ "mimetype": c.get("mimetype", "text/plain"),
493
+ }
494
+ for c in comments
495
+ ],
496
+ }
497
+
498
+
499
+# ---------------------------------------------------------------------------
500
+# Write handlers (FossilCLI)
501
+# ---------------------------------------------------------------------------
502
+
503
+
504
+def _create_ticket(args):
505
+ from fossil.cli import FossilCLI
506
+
507
+ project, repo = _get_repo(args["slug"])
508
+
509
+ cli = FossilCLI()
510
+ cli.ensure_default_user(repo.full_path)
511
+
512
+ fields = {
513
+ "title": args["title"],
514
+ "comment": args["body"],
515
+ "type": args.get("type", "Code_Defect"),
516
+ "severity": args.get("severity", "Important"),
517
+ "priority": args.get("priority", "Medium"),
518
+ "status": "Open",
519
+ }
520
+
521
+ success = cli.ticket_add(repo.full_path, fields)
522
+ if not success:
523
+ return {"error": "Failed to create ticket"}
524
+
525
+ return {"success": True, "title": args["title"]}
526
+
527
+
528
+def _update_ticket(args):
529
+ from fossil.cli import FossilCLI
530
+
531
+ project, repo = _get_repo(args["slug"])
532
+
533
+ cli = FossilCLI()
534
+ cli.ensure_default_user(repo.full_path)
535
+
536
+ fields = {}
537
+ if args.get("status"):
538
+ fields["status"] = args["status"]
539
+ if args.get("comment"):
540
+ fields["icomment"] = args["comment"]
541
+
542
+ if not fields:
543
+ return {"error": "No fields to update (provide status or comment)"}
544
+
545
+ success = cli.ticket_change(repo.full_path, args["uuid"], fields)
546
+ if not success:
547
+ return {"error": "Failed to update ticket"}
548
+
549
+ return {"success": True, "uuid": args["uuid"]}
550
+
551
+
552
+# ---------------------------------------------------------------------------
553
+# Wiki handlers (FossilReader for reads)
554
+# ---------------------------------------------------------------------------
555
+
556
+
557
+def _list_wiki_pages(args):
558
+ from fossil.reader import FossilReader
559
+
560
+ project, repo = _get_repo(args["slug"])
561
+
562
+ with FossilReader(repo.full_path) as reader:
563
+ pages = reader.get_wiki_pages()
564
+
565
+ return {
566
+ "pages": [
567
+ {
568
+ "name": p.name,
569
+ "last_modified": _isoformat(p.last_modified),
570
+ "user": p.user,
571
+ }
572
+ for p in pages
573
+ ]
574
+ }
575
+
576
+
577
+def _get_wiki_page(args):
578
+ from fossil.reader import FossilReader
579
+
580
+ project, repo = _get_repo(args["slug"])
581
+
582
+ with FossilReader(repo.full_path) as reader:
583
+ page = reader.get_wiki_page(args["page_name"])
584
+
585
+ if page is None:
586
+ return {"error": f"Wiki page not found: {args['page_name']}"}
587
+
588
+ return {
589
+ "name": page.name,
590
+ "content": page.content,
591
+ "last_modified": _isoformat(page.last_modified),
592
+ "user": page.user,
593
+ }
594
+
595
+
596
+# ---------------------------------------------------------------------------
597
+# Branch and file history handlers
598
+# ---------------------------------------------------------------------------
599
+
600
+
601
+def _list_branches(args):
602
+ from fossil.reader import FossilReader
603
+
604
+ project, repo = _get_repo(args["slug"])
605
+
606
+ with FossilReader(repo.full_path) as reader:
607
+ branches = reader.get_branches()
608
+
609
+ return {
610
+ "branches": [
611
+ {
612
+ "name": b["name"],
613
+ "last_checkin": _isoformat(b["last_checkin"]),
614
+ "last_user": b["last_user"],
615
+ "checkin_count": b["checkin_count"],
616
+ "last_uuid": b["last_uuid"],
617
+ }
618
+ for b in branches
619
+ ]
620
+ }
621
+
622
+
623
+def _get_file_blame(args):
624
+ from fossil.cli import FossilCLI
625
+
626
+ project, repo = _get_repo(args["slug"])
627
+
628
+ cli = FossilCLI()
629
+ lines = cli.blame(repo.full_path, args["filepath"])
630
+ return {"filepath": args["filepath"], "lines": lines, "total": len(lines)}
631
+
632
+
633
+def _get_file_history(args):
634
+ from fossil.reader import FossilReader
635
+
636
+ project, repo = _get_repo(args["slug"])
637
+ limit = args.get("limit", 25)
638
+
639
+ with FossilReader(repo.full_path) as reader:
640
+ history = reader.get_file_history(args["filepath"], limit=limit)
641
+
642
+ for entry in history:
643
+ entry["timestamp"] = _isoformat(entry.get("timestamp"))
644
+
645
+ return {"filepath": args["filepath"], "history": history, "total": len(history)}
646
+
647
+
648
+# ---------------------------------------------------------------------------
649
+# SQL query handler
650
+# ---------------------------------------------------------------------------
651
+
652
+
653
+def _sql_query(args):
654
+ from fossil.reader import FossilReader
655
+ from fossil.ticket_reports import TicketReport
656
+
657
+ sql = args["sql"]
658
+ error = TicketReport.validate_sql(sql)
659
+ if error:
660
+ return {"error": error}
661
+
662
+ project, repo = _get_repo(args["slug"])
663
+
664
+ with FossilReader(repo.full_path) as reader:
665
+ cursor = reader.conn.cursor()
666
+ cursor.execute(sql)
667
+ columns = [desc[0] for desc in cursor.description] if cursor.description else []
668
+ rows = cursor.fetchmany(500)
669
+ return {
670
+ "columns": columns,
671
+ "rows": [list(row) for row in rows],
672
+ "count": len(rows),
673
+ }
--- a/mcp_server/tools.py
+++ b/mcp_server/tools.py
@@ -0,0 +1,673 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/mcp_server/tools.py
+++ b/mcp_server/tools.py
@@ -0,0 +1,673 @@
1 """Tool definitions and handlers for the fossilrepo MCP server.
2
3 Each tool maps to a Fossil repository operation -- reads go through
4 FossilReader (direct SQLite), writes go through FossilCLI (fossil binary).
5 """
6
7 from mcp.types import Tool
8
9 TOOLS = [
10 Tool(
11 name="list_projects",
12 description="List all projects in the fossilrepo instance",
13 inputSchema={"type": "object", "properties": {}, "required": []},
14 ),
15 Tool(
16 name="get_project",
17 description="Get details about a specific project including repo stats",
18 inputSchema={
19 "type": "object",
20 "properties": {"slug": {"type": "string", "description": "Project slug"}},
21 "required": ["slug"],
22 },
23 ),
24 Tool(
25 name="browse_code",
26 description="List files in a directory of a project's repository",
27 inputSchema={
28 "type": "object",
29 "properties": {
30 "slug": {"type": "string", "description": "Project slug"},
31 "path": {"type": "string", "description": "Directory path (empty for root)", "default": ""},
32 },
33 "required": ["slug"],
34 },
35 ),
36 Tool(
37 name="read_file",
38 description="Read the content of a file from a project's repository",
39 inputSchema={
40 "type": "object",
41 "properties": {
42 "slug": {"type": "string", "description": "Project slug"},
43 "filepath": {"type": "string", "description": "File path in the repo"},
44 },
45 "required": ["slug", "filepath"],
46 },
47 ),
48 Tool(
49 name="get_timeline",
50 description="Get recent checkins/commits for a project",
51 inputSchema={
52 "type": "object",
53 "properties": {
54 "slug": {"type": "string", "description": "Project slug"},
55 "limit": {"type": "integer", "description": "Number of entries", "default": 25},
56 "branch": {"type": "string", "description": "Filter by branch", "default": ""},
57 },
58 "required": ["slug"],
59 },
60 ),
61 Tool(
62 name="get_checkin",
63 description="Get details of a specific checkin including file changes",
64 inputSchema={
65 "type": "object",
66 "properties": {
67 "slug": {"type": "string", "description": "Project slug"},
68 "uuid": {"type": "string", "description": "Checkin UUID (or prefix)"},
69 },
70 "required": ["slug", "uuid"],
71 },
72 ),
73 Tool(
74 name="search_code",
75 description="Search across checkins, tickets, and wiki pages",
76 inputSchema={
77 "type": "object",
78 "properties": {
79 "slug": {"type": "string", "description": "Project slug"},
80 "query": {"type": "string", "description": "Search query"},
81 "limit": {"type": "integer", "default": 25},
82 },
83 "required": ["slug", "query"],
84 },
85 ),
86 Tool(
87 name="list_tickets",
88 description="List tickets for a project with optional status filter",
89 inputSchema={
90 "type": "object",
91 "properties": {
92 "slug": {"type": "string", "description": "Project slug"},
93 "status": {"type": "string", "description": "Filter by status (Open, Fixed, Closed)", "default": ""},
94 "limit": {"type": "integer", "default": 50},
95 },
96 "required": ["slug"],
97 },
98 ),
99 Tool(
100 name="get_ticket",
101 description="Get ticket details including comments",
102 inputSchema={
103 "type": "object",
104 "properties": {
105 "slug": {"type": "string", "description": "Project slug"},
106 "uuid": {"type": "string", "description": "Ticket UUID (or prefix)"},
107 },
108 "required": ["slug", "uuid"],
109 },
110 ),
111 Tool(
112 name="create_ticket",
113 description="Create a new ticket in a project",
114 inputSchema={
115 "type": "object",
116 "properties": {
117 "slug": {"type": "string", "description": "Project slug"},
118 "title": {"type": "string"},
119 "body": {"type": "string", "description": "Ticket description"},
120 "type": {"type": "string", "default": "Code_Defect"},
121 "severity": {"type": "string", "default": "Important"},
122 "priority": {"type": "string", "default": "Medium"},
123 },
124 "required": ["slug", "title", "body"],
125 },
126 ),
127 Tool(
128 name="update_ticket",
129 description="Update a ticket's status, add a comment",
130 inputSchema={
131 "type": "object",
132 "properties": {
133 "slug": {"type": "string", "description": "Project slug"},
134 "uuid": {"type": "string", "description": "Ticket UUID"},
135 "status": {"type": "string", "description": "New status", "default": ""},
136 "comment": {"type": "string", "description": "Comment to add", "default": ""},
137 },
138 "required": ["slug", "uuid"],
139 },
140 ),
141 Tool(
142 name="list_wiki_pages",
143 description="List all wiki pages in a project",
144 inputSchema={
145 "type": "object",
146 "properties": {"slug": {"type": "string", "description": "Project slug"}},
147 "required": ["slug"],
148 },
149 ),
150 Tool(
151 name="get_wiki_page",
152 description="Read a wiki page's content",
153 inputSchema={
154 "type": "object",
155 "properties": {
156 "slug": {"type": "string", "description": "Project slug"},
157 "page_name": {"type": "string", "description": "Wiki page name"},
158 },
159 "required": ["slug", "page_name"],
160 },
161 ),
162 Tool(
163 name="list_branches",
164 description="List all branches in a project's repository",
165 inputSchema={
166 "type": "object",
167 "properties": {"slug": {"type": "string", "description": "Project slug"}},
168 "required": ["slug"],
169 },
170 ),
171 Tool(
172 name="get_file_blame",
173 description="Get blame annotations for a file showing who changed each line",
174 inputSchema={
175 "type": "object",
176 "properties": {
177 "slug": {"type": "string", "description": "Project slug"},
178 "filepath": {"type": "string", "description": "File path"},
179 },
180 "required": ["slug", "filepath"],
181 },
182 ),
183 Tool(
184 name="get_file_history",
185 description="Get commit history for a specific file",
186 inputSchema={
187 "type": "object",
188 "properties": {
189 "slug": {"type": "string", "description": "Project slug"},
190 "filepath": {"type": "string", "description": "File path"},
191 "limit": {"type": "integer", "default": 25},
192 },
193 "required": ["slug", "filepath"],
194 },
195 ),
196 Tool(
197 name="sql_query",
198 description="Run a read-only SQL query against the Fossil SQLite database. Only SELECT allowed.",
199 inputSchema={
200 "type": "object",
201 "properties": {
202 "slug": {"type": "string", "description": "Project slug"},
203 "sql": {"type": "string", "description": "SQL query (SELECT only)"},
204 },
205 "required": ["slug", "sql"],
206 },
207 ),
208 ]
209
210
211 def _isoformat(dt):
212 """Safely format a datetime to ISO 8601, or None."""
213 if dt is None:
214 return None
215 return dt.isoformat()
216
217
218 def _get_repo(slug):
219 """Look up project and its FossilRepository by slug.
220
221 Raises Project.DoesNotExist or FossilRepository.DoesNotExist on miss.
222 """
223 from fossil.models import FossilRepository
224 from projects.models import Project
225
226 project = Project.objects.get(slug=slug, deleted_at__isnull=True)
227 repo = FossilRepository.objects.get(project=project, deleted_at__isnull=True)
228 return project, repo
229
230
231 def execute_tool(name: str, arguments: dict) -> dict:
232 """Dispatch a tool call to the appropriate handler."""
233 handlers = {
234 "list_projects": _list_projects,
235 "get_project": _get_project,
236 "browse_code": _browse_code,
237 "read_file": _read_file,
238 "get_timeline": _get_timeline,
239 "get_checkin": _get_checkin,
240 "search_code": _search_code,
241 "list_tickets": _list_tickets,
242 "get_ticket": _get_ticket,
243 "create_ticket": _create_ticket,
244 "update_ticket": _update_ticket,
245 "list_wiki_pages": _list_wiki_pages,
246 "get_wiki_page": _get_wiki_page,
247 "list_branches": _list_branches,
248 "get_file_blame": _get_file_blame,
249 "get_file_history": _get_file_history,
250 "sql_query": _sql_query,
251 }
252 handler = handlers.get(name)
253 if not handler:
254 return {"error": f"Unknown tool: {name}"}
255 try:
256 return handler(arguments)
257 except Exception as e:
258 return {"error": str(e)}
259
260
261 # ---------------------------------------------------------------------------
262 # Read-only handlers (FossilReader)
263 # ---------------------------------------------------------------------------
264
265
266 def _list_projects(args):
267 from projects.models import Project
268
269 projects = Project.objects.filter(deleted_at__isnull=True)
270 return {
271 "projects": [
272 {
273 "name": p.name,
274 "slug": p.slug,
275 "description": p.description or "",
276 "visibility": p.visibility,
277 }
278 for p in projects
279 ]
280 }
281
282
283 def _get_project(args):
284 from fossil.reader import FossilReader
285
286 project, repo = _get_repo(args["slug"])
287 result = {
288 "name": project.name,
289 "slug": project.slug,
290 "description": project.description or "",
291 "visibility": project.visibility,
292 "star_count": project.star_count,
293 "filename": repo.filename,
294 "file_size_bytes": repo.file_size_bytes,
295 "checkin_count": repo.checkin_count,
296 "last_checkin_at": _isoformat(repo.last_checkin_at),
297 }
298 if repo.exists_on_disk:
299 with FossilReader(repo.full_path) as reader:
300 meta = reader.get_metadata()
301 result["fossil_project_name"] = meta.project_name
302 result["fossil_checkin_count"] = meta.checkin_count
303 result["fossil_ticket_count"] = meta.ticket_count
304 result["fossil_wiki_page_count"] = meta.wiki_page_count
305 return result
306
307
308 def _browse_code(args):
309 from fossil.reader import FossilReader
310
311 project, repo = _get_repo(args["slug"])
312 path = args.get("path", "")
313
314 with FossilReader(repo.full_path) as reader:
315 checkin = reader.get_latest_checkin_uuid()
316 if not checkin:
317 return {"files": [], "error": "No checkins in repository"}
318
319 files = reader.get_files_at_checkin(checkin)
320
321 # Filter to requested directory
322 if path:
323 path = path.rstrip("/") + "/"
324 files = [f for f in files if f.name.startswith(path)]
325
326 return {
327 "checkin": checkin,
328 "path": path,
329 "files": [
330 {
331 "name": f.name,
332 "uuid": f.uuid,
333 "size": f.size,
334 "last_commit_message": f.last_commit_message,
335 "last_commit_user": f.last_commit_user,
336 "last_commit_time": _isoformat(f.last_commit_time),
337 }
338 for f in files
339 ],
340 }
341
342
343 def _read_file(args):
344 from fossil.reader import FossilReader
345
346 project, repo = _get_repo(args["slug"])
347
348 with FossilReader(repo.full_path) as reader:
349 checkin = reader.get_latest_checkin_uuid()
350 if not checkin:
351 return {"error": "No checkins in repository"}
352
353 files = reader.get_files_at_checkin(checkin)
354 target = args["filepath"]
355
356 for f in files:
357 if f.name == target:
358 content = reader.get_file_content(f.uuid)
359 if isinstance(content, bytes):
360 try:
361 content = content.decode("utf-8")
362 except UnicodeDecodeError:
363 return {"filepath": target, "binary": True, "size": len(content)}
364 return {"filepath": target, "content": content}
365
366 return {"error": f"File not found: {target}"}
367
368
369 def _get_timeline(args):
370 from fossil.reader import FossilReader
371
372 project, repo = _get_repo(args["slug"])
373 limit = args.get("limit", 25)
374 branch_filter = args.get("branch", "")
375
376 with FossilReader(repo.full_path) as reader:
377 entries = reader.get_timeline(limit=limit, event_type="ci")
378
379 checkins = []
380 for e in entries:
381 entry = {
382 "uuid": e.uuid,
383 "timestamp": _isoformat(e.timestamp),
384 "user": e.user,
385 "comment": e.comment,
386 "branch": e.branch,
387 }
388 checkins.append(entry)
389
390 if branch_filter:
391 checkins = [c for c in checkins if c["branch"] == branch_filter]
392
393 return {"checkins": checkins, "total": len(checkins)}
394
395
396 def _get_checkin(args):
397 from fossil.reader import FossilReader
398
399 project, repo = _get_repo(args["slug"])
400
401 with FossilReader(repo.full_path) as reader:
402 detail = reader.get_checkin_detail(args["uuid"])
403
404 if detail is None:
405 return {"error": "Checkin not found"}
406
407 return {
408 "uuid": detail.uuid,
409 "timestamp": _isoformat(detail.timestamp),
410 "user": detail.user,
411 "comment": detail.comment,
412 "branch": detail.branch,
413 "parent_uuid": detail.parent_uuid,
414 "is_merge": detail.is_merge,
415 "files_changed": detail.files_changed,
416 }
417
418
419 def _search_code(args):
420 from fossil.reader import FossilReader
421
422 project, repo = _get_repo(args["slug"])
423 query = args["query"]
424 limit = args.get("limit", 25)
425
426 with FossilReader(repo.full_path) as reader:
427 results = reader.search(query, limit=limit)
428
429 # Serialize datetimes in results
430 for checkin in results.get("checkins", []):
431 checkin["timestamp"] = _isoformat(checkin.get("timestamp"))
432 for ticket in results.get("tickets", []):
433 ticket["created"] = _isoformat(ticket.get("created"))
434
435 return results
436
437
438 def _list_tickets(args):
439 from fossil.reader import FossilReader
440
441 project, repo = _get_repo(args["slug"])
442 status_filter = args.get("status", "") or None
443 limit = args.get("limit", 50)
444
445 with FossilReader(repo.full_path) as reader:
446 tickets = reader.get_tickets(status=status_filter, limit=limit)
447
448 return {
449 "tickets": [
450 {
451 "uuid": t.uuid,
452 "title": t.title,
453 "status": t.status,
454 "type": t.type,
455 "subsystem": t.subsystem,
456 "priority": t.priority,
457 "created": _isoformat(t.created),
458 }
459 for t in tickets
460 ],
461 "total": len(tickets),
462 }
463
464
465 def _get_ticket(args):
466 from fossil.reader import FossilReader
467
468 project, repo = _get_repo(args["slug"])
469
470 with FossilReader(repo.full_path) as reader:
471 ticket = reader.get_ticket_detail(args["uuid"])
472 if ticket is None:
473 return {"error": "Ticket not found"}
474 comments = reader.get_ticket_comments(args["uuid"])
475
476 return {
477 "uuid": ticket.uuid,
478 "title": ticket.title,
479 "status": ticket.status,
480 "type": ticket.type,
481 "subsystem": ticket.subsystem,
482 "priority": ticket.priority,
483 "severity": ticket.severity,
484 "resolution": ticket.resolution,
485 "body": ticket.body,
486 "created": _isoformat(ticket.created),
487 "comments": [
488 {
489 "timestamp": _isoformat(c.get("timestamp")),
490 "user": c.get("user", ""),
491 "comment": c.get("comment", ""),
492 "mimetype": c.get("mimetype", "text/plain"),
493 }
494 for c in comments
495 ],
496 }
497
498
499 # ---------------------------------------------------------------------------
500 # Write handlers (FossilCLI)
501 # ---------------------------------------------------------------------------
502
503
504 def _create_ticket(args):
505 from fossil.cli import FossilCLI
506
507 project, repo = _get_repo(args["slug"])
508
509 cli = FossilCLI()
510 cli.ensure_default_user(repo.full_path)
511
512 fields = {
513 "title": args["title"],
514 "comment": args["body"],
515 "type": args.get("type", "Code_Defect"),
516 "severity": args.get("severity", "Important"),
517 "priority": args.get("priority", "Medium"),
518 "status": "Open",
519 }
520
521 success = cli.ticket_add(repo.full_path, fields)
522 if not success:
523 return {"error": "Failed to create ticket"}
524
525 return {"success": True, "title": args["title"]}
526
527
528 def _update_ticket(args):
529 from fossil.cli import FossilCLI
530
531 project, repo = _get_repo(args["slug"])
532
533 cli = FossilCLI()
534 cli.ensure_default_user(repo.full_path)
535
536 fields = {}
537 if args.get("status"):
538 fields["status"] = args["status"]
539 if args.get("comment"):
540 fields["icomment"] = args["comment"]
541
542 if not fields:
543 return {"error": "No fields to update (provide status or comment)"}
544
545 success = cli.ticket_change(repo.full_path, args["uuid"], fields)
546 if not success:
547 return {"error": "Failed to update ticket"}
548
549 return {"success": True, "uuid": args["uuid"]}
550
551
552 # ---------------------------------------------------------------------------
553 # Wiki handlers (FossilReader for reads)
554 # ---------------------------------------------------------------------------
555
556
557 def _list_wiki_pages(args):
558 from fossil.reader import FossilReader
559
560 project, repo = _get_repo(args["slug"])
561
562 with FossilReader(repo.full_path) as reader:
563 pages = reader.get_wiki_pages()
564
565 return {
566 "pages": [
567 {
568 "name": p.name,
569 "last_modified": _isoformat(p.last_modified),
570 "user": p.user,
571 }
572 for p in pages
573 ]
574 }
575
576
577 def _get_wiki_page(args):
578 from fossil.reader import FossilReader
579
580 project, repo = _get_repo(args["slug"])
581
582 with FossilReader(repo.full_path) as reader:
583 page = reader.get_wiki_page(args["page_name"])
584
585 if page is None:
586 return {"error": f"Wiki page not found: {args['page_name']}"}
587
588 return {
589 "name": page.name,
590 "content": page.content,
591 "last_modified": _isoformat(page.last_modified),
592 "user": page.user,
593 }
594
595
596 # ---------------------------------------------------------------------------
597 # Branch and file history handlers
598 # ---------------------------------------------------------------------------
599
600
601 def _list_branches(args):
602 from fossil.reader import FossilReader
603
604 project, repo = _get_repo(args["slug"])
605
606 with FossilReader(repo.full_path) as reader:
607 branches = reader.get_branches()
608
609 return {
610 "branches": [
611 {
612 "name": b["name"],
613 "last_checkin": _isoformat(b["last_checkin"]),
614 "last_user": b["last_user"],
615 "checkin_count": b["checkin_count"],
616 "last_uuid": b["last_uuid"],
617 }
618 for b in branches
619 ]
620 }
621
622
623 def _get_file_blame(args):
624 from fossil.cli import FossilCLI
625
626 project, repo = _get_repo(args["slug"])
627
628 cli = FossilCLI()
629 lines = cli.blame(repo.full_path, args["filepath"])
630 return {"filepath": args["filepath"], "lines": lines, "total": len(lines)}
631
632
633 def _get_file_history(args):
634 from fossil.reader import FossilReader
635
636 project, repo = _get_repo(args["slug"])
637 limit = args.get("limit", 25)
638
639 with FossilReader(repo.full_path) as reader:
640 history = reader.get_file_history(args["filepath"], limit=limit)
641
642 for entry in history:
643 entry["timestamp"] = _isoformat(entry.get("timestamp"))
644
645 return {"filepath": args["filepath"], "history": history, "total": len(history)}
646
647
648 # ---------------------------------------------------------------------------
649 # SQL query handler
650 # ---------------------------------------------------------------------------
651
652
653 def _sql_query(args):
654 from fossil.reader import FossilReader
655 from fossil.ticket_reports import TicketReport
656
657 sql = args["sql"]
658 error = TicketReport.validate_sql(sql)
659 if error:
660 return {"error": error}
661
662 project, repo = _get_repo(args["slug"])
663
664 with FossilReader(repo.full_path) as reader:
665 cursor = reader.conn.cursor()
666 cursor.execute(sql)
667 columns = [desc[0] for desc in cursor.description] if cursor.description else []
668 rows = cursor.fetchmany(500)
669 return {
670 "columns": columns,
671 "rows": [list(row) for row in rows],
672 "count": len(rows),
673 }
+4 -2
--- pyproject.toml
+++ pyproject.toml
@@ -43,10 +43,11 @@
4343
"click>=8.1",
4444
"rich>=13.0",
4545
"markdown>=3.6",
4646
"requests>=2.31",
4747
"cryptography>=43.0",
48
+ "mcp>=1.0",
4849
]
4950
5051
[project.urls]
5152
Homepage = "https://fossilrepo.dev"
5253
Documentation = "https://fossilrepo.dev"
@@ -54,10 +55,11 @@
5455
Issues = "https://github.com/ConflictHQ/fossilrepo/issues"
5556
Demo = "https://fossilrepo.io"
5657
5758
[project.scripts]
5859
fossilrepo-ctl = "ctl.main:cli"
60
+fossilrepo-mcp = "mcp_server.__main__:run"
5961
6062
[project.optional-dependencies]
6163
dev = [
6264
"ruff>=0.7",
6365
"pytest>=8.3",
@@ -75,11 +77,11 @@
7577
[tool.ruff.lint]
7678
select = ["E", "F", "I", "W", "UP", "B", "SIM", "N"]
7779
ignore = ["E501"]
7880
7981
[tool.ruff.lint.isort]
80
-known-first-party = ["config", "core", "accounts", "organization", "projects", "pages", "fossil", "testdata", "ctl"]
82
+known-first-party = ["config", "core", "accounts", "organization", "projects", "pages", "fossil", "testdata", "ctl", "mcp_server"]
8183
8284
[tool.ruff.format]
8385
quote-style = "double"
8486
8587
[tool.pytest.ini_options]
@@ -96,10 +98,10 @@
9698
[tool.coverage.report]
9799
fail_under = 80
98100
show_missing = true
99101
100102
[tool.hatch.build.targets.wheel]
101
-packages = ["ctl", "core", "accounts", "organization", "projects", "pages", "fossil", "config"]
103
+packages = ["ctl", "core", "accounts", "organization", "projects", "pages", "fossil", "config", "mcp_server"]
102104
103105
[build-system]
104106
requires = ["hatchling"]
105107
build-backend = "hatchling.build"
106108
107109
ADDED tests/test_agent_coordination.py
108110
ADDED tests/test_json_api.py
109111
ADDED tests/test_mcp_server.py
--- pyproject.toml
+++ pyproject.toml
@@ -43,10 +43,11 @@
43 "click>=8.1",
44 "rich>=13.0",
45 "markdown>=3.6",
46 "requests>=2.31",
47 "cryptography>=43.0",
 
48 ]
49
50 [project.urls]
51 Homepage = "https://fossilrepo.dev"
52 Documentation = "https://fossilrepo.dev"
@@ -54,10 +55,11 @@
54 Issues = "https://github.com/ConflictHQ/fossilrepo/issues"
55 Demo = "https://fossilrepo.io"
56
57 [project.scripts]
58 fossilrepo-ctl = "ctl.main:cli"
 
59
60 [project.optional-dependencies]
61 dev = [
62 "ruff>=0.7",
63 "pytest>=8.3",
@@ -75,11 +77,11 @@
75 [tool.ruff.lint]
76 select = ["E", "F", "I", "W", "UP", "B", "SIM", "N"]
77 ignore = ["E501"]
78
79 [tool.ruff.lint.isort]
80 known-first-party = ["config", "core", "accounts", "organization", "projects", "pages", "fossil", "testdata", "ctl"]
81
82 [tool.ruff.format]
83 quote-style = "double"
84
85 [tool.pytest.ini_options]
@@ -96,10 +98,10 @@
96 [tool.coverage.report]
97 fail_under = 80
98 show_missing = true
99
100 [tool.hatch.build.targets.wheel]
101 packages = ["ctl", "core", "accounts", "organization", "projects", "pages", "fossil", "config"]
102
103 [build-system]
104 requires = ["hatchling"]
105 build-backend = "hatchling.build"
106
107 DDED tests/test_agent_coordination.py
108 DDED tests/test_json_api.py
109 DDED tests/test_mcp_server.py
--- pyproject.toml
+++ pyproject.toml
@@ -43,10 +43,11 @@
43 "click>=8.1",
44 "rich>=13.0",
45 "markdown>=3.6",
46 "requests>=2.31",
47 "cryptography>=43.0",
48 "mcp>=1.0",
49 ]
50
51 [project.urls]
52 Homepage = "https://fossilrepo.dev"
53 Documentation = "https://fossilrepo.dev"
@@ -54,10 +55,11 @@
55 Issues = "https://github.com/ConflictHQ/fossilrepo/issues"
56 Demo = "https://fossilrepo.io"
57
58 [project.scripts]
59 fossilrepo-ctl = "ctl.main:cli"
60 fossilrepo-mcp = "mcp_server.__main__:run"
61
62 [project.optional-dependencies]
63 dev = [
64 "ruff>=0.7",
65 "pytest>=8.3",
@@ -75,11 +77,11 @@
77 [tool.ruff.lint]
78 select = ["E", "F", "I", "W", "UP", "B", "SIM", "N"]
79 ignore = ["E501"]
80
81 [tool.ruff.lint.isort]
82 known-first-party = ["config", "core", "accounts", "organization", "projects", "pages", "fossil", "testdata", "ctl", "mcp_server"]
83
84 [tool.ruff.format]
85 quote-style = "double"
86
87 [tool.pytest.ini_options]
@@ -96,10 +98,10 @@
98 [tool.coverage.report]
99 fail_under = 80
100 show_missing = true
101
102 [tool.hatch.build.targets.wheel]
103 packages = ["ctl", "core", "accounts", "organization", "projects", "pages", "fossil", "config", "mcp_server"]
104
105 [build-system]
106 requires = ["hatchling"]
107 build-backend = "hatchling.build"
108
109 DDED tests/test_agent_coordination.py
110 DDED tests/test_json_api.py
111 DDED tests/test_mcp_server.py
--- a/tests/test_agent_coordination.py
+++ b/tests/test_agent_coordination.py
@@ -0,0 +1,17 @@
1
+"""Tests for agent coordination features: ticket claiming, SSE, code reviews.
2
+
3
+Tests use session auth (admin_client) since the API endpoints accept session
4
+cookies as well as Bearer tokens. We create Django-side objects directly rather
5
+than going through Fossil's SQLite for ticket verification in claiming tests.
6
+"""
7
+
8
+import json
9
+from unittest.mock import MagicMock, patch
10
+
11
+import pytest
12
+from django.contrib.auth.models import User
13
+from django.test import Client
14
+
15
+from fossil.agent_claims import TicketClaim
16
+from fossil.code_reviews import CodeReview, ReviewComment
17
+from fossil.models import FossilR
--- a/tests/test_agent_coordination.py
+++ b/tests/test_agent_coordination.py
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/tests/test_agent_coordination.py
+++ b/tests/test_agent_coordination.py
@@ -0,0 +1,17 @@
1 """Tests for agent coordination features: ticket claiming, SSE, code reviews.
2
3 Tests use session auth (admin_client) since the API endpoints accept session
4 cookies as well as Bearer tokens. We create Django-side objects directly rather
5 than going through Fossil's SQLite for ticket verification in claiming tests.
6 """
7
8 import json
9 from unittest.mock import MagicMock, patch
10
11 import pytest
12 from django.contrib.auth.models import User
13 from django.test import Client
14
15 from fossil.agent_claims import TicketClaim
16 from fossil.code_reviews import CodeReview, ReviewComment
17 from fossil.models import FossilR
--- a/tests/test_json_api.py
+++ b/tests/test_json_api.py
@@ -0,0 +1,1044 @@
1
+"""Tests for JSON API endpoints at /projects/<slug>/fossil/api/.
2
+
3
+Covers:
4
+- Authentication: Bearer tokens (APIToken, PersonalAccessToken), session fallback,
5
+ invalid/expired tokens
6
+- Each endpoint: basic response shape, pagination, filtering
7
+- Access control: public vs private projects, anonymous vs authenticated
8
+"""
9
+
10
+from datetime import UTC, datetime, timedelta
11
+from unittest.mock import MagicMock, PropertyMock, patch
12
+
13
+import pytest
14
+from django.contrib.auth.models import User
15
+from django.test import Client
16
+from django.utils import timezone
17
+
18
+from accounts.models import PersonalAccessToken
19
+from fossil.api_tokens import APIToken
20
+from fossil.models import FossilRepository
21
+from fossil.reader import TicketEntry, TimelineEntry, WikiPage
22
+from fossil.releases import Release, ReleaseAsset
23
+from organization.models import Team
24
+from projects.models import Project, ProjectTeam
25
+
26
+# ---------------------------------------------------------------------------
27
+# Fixtures
28
+# ---------------------------------------------------------------------------
29
+
30
+
31
+@pytest.fixture
32
+def fossil_repo_obj(sample_project):
33
+ """Return the auto-created FossilRepository for sample_project."""
34
+ return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True)
35
+
36
+
37
+@pytest.fixture
38
+def api_token(fossil_repo_obj, admin_user):
39
+ """Create a project-scoped API token and return (APIToken, raw_token)."""
40
+ raw, token_hash, prefix = APIToken.generate()
41
+ token = APIToken.objects.create(
42
+ repository=fossil_repo_obj,
43
+ name="Test API Token",
44
+ token_hash=token_hash,
45
+ token_prefix=prefix,
46
+ permissions="*",
47
+ created_by=admin_user,
48
+ )
49
+ return token, raw
50
+
51
+
52
+@pytest.fixture
53
+def expired_api_token(fossil_repo_obj, admin_user):
54
+ """Create an expired project-scoped API token."""
55
+ raw, token_hash, prefix = APIToken.generate()
56
+ token = APIToken.objects.create(
57
+ repository=fossil_repo_obj,
58
+ name="Expired Token",
59
+ token_hash=token_hash,
60
+ token_prefix=prefix,
61
+ permissions="*",
62
+ expires_at=timezone.now() - timedelta(days=1),
63
+ created_by=admin_user,
64
+ )
65
+ return token, raw
66
+
67
+
68
+@pytest.fixture
69
+def pat_token(admin_user):
70
+ """Create a user-scoped PersonalAccessToken and return (PAT, raw_token)."""
71
+ raw, token_hash, prefix = PersonalAccessToken.generate()
72
+ pat = PersonalAccessToken.objects.create(
73
+ user=admin_user,
74
+ name="Test PAT",
75
+ token_hash=token_hash,
76
+ token_prefix=prefix,
77
+ scopes="read,write",
78
+ )
79
+ return pat, raw
80
+
81
+
82
+@pytest.fixture
83
+def expired_pat(admin_user):
84
+ """Create an expired PersonalAccessToken."""
85
+ raw, token_hash, prefix = PersonalAccessToken.generate()
86
+ pat = PersonalAccessToken.objects.create(
87
+ user=admin_user,
88
+ name="Expired PAT",
89
+ token_hash=token_hash,
90
+ token_prefix=prefix,
91
+ scopes="read",
92
+ expires_at=timezone.now() - timedelta(days=1),
93
+ )
94
+ return pat, raw
95
+
96
+
97
+@pytest.fixture
98
+def revoked_pat(admin_user):
99
+ """Create a revoked PersonalAccessToken."""
100
+ raw, token_hash, prefix = PersonalAccessToken.generate()
101
+ pat = PersonalAccessToken.objects.create(
102
+ user=admin_user,
103
+ name="Revoked PAT",
104
+ token_hash=token_hash,
105
+ token_prefix=prefix,
106
+ scopes="read",
107
+ revoked_at=timezone.now() - timedelta(hours=1),
108
+ )
109
+ return pat, raw
110
+
111
+
112
+@pytest.fixture
113
+def public_project(db, org, admin_user, sample_team):
114
+ """A public project visible to anonymous users."""
115
+ project = Project.objects.create(
116
+ name="Public API Project",
117
+ organization=org,
118
+ visibility="public",
119
+ created_by=admin_user,
120
+ )
121
+ ProjectTeam.objects.create(project=project, team=sample_team, role="write", created_by=admin_user)
122
+ return project
123
+
124
+
125
+@pytest.fixture
126
+def public_fossil_repo(public_project):
127
+ """Return the auto-created FossilRepository for the public project."""
128
+ return FossilRepository.objects.get(project=public_project, deleted_at__isnull=True)
129
+
130
+
131
+@pytest.fixture
132
+def no_access_user(db, org, admin_user):
133
+ """User with no team access to any project."""
134
+ return User.objects.create_user(username="noaccess_api", password="testpass123")
135
+
136
+
137
+@pytest.fixture
138
+def no_access_pat(no_access_user):
139
+ """PAT for a user with no project access."""
140
+ raw, token_hash, prefix = PersonalAccessToken.generate()
141
+ pat = PersonalAccessToken.objects.create(
142
+ user=no_access_user,
143
+ name="No Access PAT",
144
+ token_hash=token_hash,
145
+ token_prefix=prefix,
146
+ scopes="read",
147
+ )
148
+ return pat, raw
149
+
150
+
151
+@pytest.fixture
152
+def anon_client():
153
+ """Unauthenticated client."""
154
+ return Client()
155
+
156
+
157
+# ---------------------------------------------------------------------------
158
+# Mock helpers
159
+# ---------------------------------------------------------------------------
160
+
161
+
162
+def _mock_fossil_reader():
163
+ """Return a context-manager mock that satisfies FossilReader usage in api_views."""
164
+ reader = MagicMock()
165
+ reader.__enter__ = MagicMock(return_value=reader)
166
+ reader.__exit__ = MagicMock(return_value=False)
167
+
168
+ # Timeline
169
+ reader.get_timeline.return_value = [
170
+ TimelineEntry(
171
+ rid=1,
172
+ uuid="abc123def456",
173
+ event_type="ci",
174
+ timestamp=datetime(2025, 1, 15, 10, 30, 0, tzinfo=UTC),
175
+ user="alice",
176
+ comment="Initial commit",
177
+ branch="trunk",
178
+ ),
179
+ TimelineEntry(
180
+ rid=2,
181
+ uuid="def456abc789",
182
+ event_type="ci",
183
+ timestamp=datetime(2025, 1, 14, 9, 0, 0, tzinfo=UTC),
184
+ user="bob",
185
+ comment="Add readme",
186
+ branch="trunk",
187
+ ),
188
+ ]
189
+ reader.get_checkin_count.return_value = 42
190
+
191
+ # Tickets
192
+ reader.get_tickets.return_value = [
193
+ TicketEntry(
194
+ uuid="tkt-001-uuid",
195
+ title="Fix login bug",
196
+ status="Open",
197
+ type="Code_Defect",
198
+ created=datetime(2025, 1, 10, 8, 0, 0, tzinfo=UTC),
199
+ owner="alice",
200
+ subsystem="auth",
201
+ priority="Immediate",
202
+ severity="Critical",
203
+ ),
204
+ TicketEntry(
205
+ uuid="tkt-002-uuid",
206
+ title="Add dark mode",
207
+ status="Open",
208
+ type="Feature_Request",
209
+ created=datetime(2025, 1, 11, 12, 0, 0, tzinfo=UTC),
210
+ owner="bob",
211
+ subsystem="ui",
212
+ priority="Medium",
213
+ severity="Minor",
214
+ ),
215
+ ]
216
+ reader.get_ticket_detail.return_value = TicketEntry(
217
+ uuid="tkt-001-uuid",
218
+ title="Fix login bug",
219
+ status="Open",
220
+ type="Code_Defect",
221
+ created=datetime(2025, 1, 10, 8, 0, 0, tzinfo=UTC),
222
+ owner="alice",
223
+ subsystem="auth",
224
+ priority="Immediate",
225
+ severity="Critical",
226
+ resolution="",
227
+ body="Login fails when session expires.",
228
+ )
229
+ reader.get_ticket_comments.return_value = [
230
+ {
231
+ "timestamp": datetime(2025, 1, 11, 9, 0, 0, tzinfo=UTC),
232
+ "user": "bob",
233
+ "comment": "I can reproduce this.",
234
+ "mimetype": "text/plain",
235
+ },
236
+ ]
237
+
238
+ # Wiki
239
+ reader.get_wiki_pages.return_value = [
240
+ WikiPage(
241
+ name="Home",
242
+ content="# Welcome",
243
+ last_modified=datetime(2025, 1, 12, 15, 0, 0, tzinfo=UTC),
244
+ user="alice",
245
+ ),
246
+ WikiPage(
247
+ name="FAQ",
248
+ content="# FAQ\nQ: ...",
249
+ last_modified=datetime(2025, 1, 13, 10, 0, 0, tzinfo=UTC),
250
+ user="bob",
251
+ ),
252
+ ]
253
+ reader.get_wiki_page.return_value = WikiPage(
254
+ name="Home",
255
+ content="# Welcome\nThis is the home page.",
256
+ last_modified=datetime(2025, 1, 12, 15, 0, 0, tzinfo=UTC),
257
+ user="alice",
258
+ )
259
+
260
+ # Branches
261
+ reader.get_branches.return_value = [
262
+ {
263
+ "name": "trunk",
264
+ "last_checkin": datetime(2025, 1, 15, 10, 30, 0, tzinfo=UTC),
265
+ "last_user": "alice",
266
+ "checkin_count": 30,
267
+ "last_uuid": "abc123def456",
268
+ },
269
+ {
270
+ "name": "feature-x",
271
+ "last_checkin": datetime(2025, 1, 14, 9, 0, 0, tzinfo=UTC),
272
+ "last_user": "bob",
273
+ "checkin_count": 5,
274
+ "last_uuid": "def456abc789",
275
+ },
276
+ ]
277
+
278
+ # Tags
279
+ reader.get_tags.return_value = [
280
+ {
281
+ "name": "v1.0.0",
282
+ "timestamp": datetime(2025, 1, 15, 10, 30, 0, tzinfo=UTC),
283
+ "user": "alice",
284
+ "uuid": "tag-uuid-100",
285
+ },
286
+ ]
287
+
288
+ # Search
289
+ reader.search.return_value = {
290
+ "checkins": [
291
+ {
292
+ "uuid": "abc123def456",
293
+ "timestamp": datetime(2025, 1, 15, 10, 30, 0, tzinfo=UTC),
294
+ "user": "alice",
295
+ "comment": "Initial commit",
296
+ }
297
+ ],
298
+ "tickets": [
299
+ {
300
+ "uuid": "tkt-001-uuid",
301
+ "title": "Fix login bug",
302
+ "status": "Open",
303
+ "created": datetime(2025, 1, 10, 8, 0, 0, tzinfo=UTC),
304
+ }
305
+ ],
306
+ "wiki": [{"name": "Home"}],
307
+ }
308
+
309
+ return reader
310
+
311
+
312
+def _patch_api_fossil():
313
+ """Patch exists_on_disk to True and FossilReader for api_views."""
314
+ reader = _mock_fossil_reader()
315
+ return (
316
+ patch.object(FossilRepository, "exists_on_disk", new_callable=PropertyMock, return_value=True),
317
+ patch("fossil.api_views.FossilReader", return_value=reader),
318
+ reader,
319
+ )
320
+
321
+
322
+def _api_url(slug, endpoint):
323
+ """Build API URL for a given project slug and endpoint."""
324
+ return f"/projects/{slug}/fossil/api/{endpoint}"
325
+
326
+
327
+def _bearer_header(raw_token):
328
+ """Build HTTP_AUTHORIZATION header for Bearer token."""
329
+ return {"HTTP_AUTHORIZATION": f"Bearer {raw_token}"}
330
+
331
+
332
+# ===========================================================================
333
+# Authentication Tests
334
+# ===========================================================================
335
+
336
+
337
+@pytest.mark.django_db
338
+class TestAPIAuthentication:
339
+ """Test auth helper: Bearer tokens, session fallback, errors."""
340
+
341
+ def test_valid_api_token(self, client, sample_project, fossil_repo_obj, api_token):
342
+ """Project-scoped APIToken grants access."""
343
+ _, raw = api_token
344
+ disk_patch, reader_patch, _ = _patch_api_fossil()
345
+ with disk_patch, reader_patch:
346
+ response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
347
+ assert response.status_code == 200
348
+ data = response.json()
349
+ assert data["slug"] == sample_project.slug
350
+
351
+ def test_valid_personal_access_token(self, client, sample_project, fossil_repo_obj, pat_token):
352
+ """User-scoped PersonalAccessToken grants access."""
353
+ _, raw = pat_token
354
+ disk_patch, reader_patch, _ = _patch_api_fossil()
355
+ with disk_patch, reader_patch:
356
+ response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
357
+ assert response.status_code == 200
358
+ data = response.json()
359
+ assert data["slug"] == sample_project.slug
360
+
361
+ def test_session_auth_fallback(self, admin_client, sample_project, fossil_repo_obj):
362
+ """Session auth works when no Bearer token is provided."""
363
+ disk_patch, reader_patch, _ = _patch_api_fossil()
364
+ with disk_patch, reader_patch:
365
+ response = admin_client.get(_api_url(sample_project.slug, "project"))
366
+ assert response.status_code == 200
367
+ data = response.json()
368
+ assert data["slug"] == sample_project.slug
369
+
370
+ def test_no_auth_returns_401(self, anon_client, sample_project, fossil_repo_obj):
371
+ """Unauthenticated request to private project returns 401."""
372
+ disk_patch, reader_patch, _ = _patch_api_fossil()
373
+ with disk_patch, reader_patch:
374
+ response = anon_client.get(_api_url(sample_project.slug, "project"))
375
+ assert response.status_code == 401
376
+ assert response.json()["error"] == "Authentication required"
377
+
378
+ def test_invalid_token_returns_401(self, client, sample_project, fossil_repo_obj):
379
+ """Garbage token returns 401."""
380
+ disk_patch, reader_patch, _ = _patch_api_fossil()
381
+ with disk_patch, reader_patch:
382
+ response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header("frp_invalid_garbage_token"))
383
+ assert response.status_code == 401
384
+ assert response.json()["error"] == "Invalid token"
385
+
386
+ def test_expired_api_token_returns_401(self, client, sample_project, fossil_repo_obj, expired_api_token):
387
+ """Expired project-scoped token returns 401."""
388
+ _, raw = expired_api_token
389
+ disk_patch, reader_patch, _ = _patch_api_fossil()
390
+ with disk_patch, reader_patch:
391
+ response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
392
+ assert response.status_code == 401
393
+ assert response.json()["error"] == "Token expired"
394
+
395
+ def test_expired_pat_returns_401(self, client, sample_project, fossil_repo_obj, expired_pat):
396
+ """Expired PersonalAccessToken returns 401."""
397
+ _, raw = expired_pat
398
+ disk_patch, reader_patch, _ = _patch_api_fossil()
399
+ with disk_patch, reader_patch:
400
+ response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
401
+ assert response.status_code == 401
402
+ assert response.json()["error"] == "Token expired"
403
+
404
+ def test_revoked_pat_returns_401(self, client, sample_project, fossil_repo_obj, revoked_pat):
405
+ """Revoked PersonalAccessToken returns 401."""
406
+ _, raw = revoked_pat
407
+ disk_patch, reader_patch, _ = _patch_api_fossil()
408
+ with disk_patch, reader_patch:
409
+ response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
410
+ assert response.status_code == 401
411
+ assert response.json()["error"] == "Invalid token"
412
+
413
+ def test_api_token_updates_last_used_at(self, client, sample_project, fossil_repo_obj, api_token):
414
+ """Using an API token updates its last_used_at timestamp."""
415
+ token, raw = api_token
416
+ assert token.last_used_at is None
417
+
418
+ disk_patch, reader_patch, _ = _patch_api_fossil()
419
+ with disk_patch, reader_patch:
420
+ client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
421
+
422
+ token.refresh_from_db()
423
+ assert token.last_used_at is not None
424
+
425
+ def test_pat_updates_last_used_at(self, client, sample_project, fossil_repo_obj, pat_token):
426
+ """Using a PAT updates its last_used_at timestamp."""
427
+ pat, raw = pat_token
428
+ assert pat.last_used_at is None
429
+
430
+ disk_patch, reader_patch, _ = _patch_api_fossil()
431
+ with disk_patch, reader_patch:
432
+ client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
433
+
434
+ pat.refresh_from_db()
435
+ assert pat.last_used_at is not None
436
+
437
+ def test_deleted_api_token_returns_401(self, client, sample_project, fossil_repo_obj, api_token, admin_user):
438
+ """Soft-deleted API token cannot authenticate."""
439
+ token, raw = api_token
440
+ token.soft_delete(user=admin_user)
441
+
442
+ disk_patch, reader_patch, _ = _patch_api_fossil()
443
+ with disk_patch, reader_patch:
444
+ response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
445
+ assert response.status_code == 401
446
+
447
+
448
+# ===========================================================================
449
+# Access Control Tests
450
+# ===========================================================================
451
+
452
+
453
+@pytest.mark.django_db
454
+class TestAPIAccessControl:
455
+ """Test read access control: public vs private, user roles."""
456
+
457
+ def test_public_project_allows_anonymous(self, anon_client, public_project, public_fossil_repo):
458
+ """Public projects allow anonymous access via session fallback (no auth needed)."""
459
+ disk_patch, reader_patch, _ = _patch_api_fossil()
460
+ with disk_patch, reader_patch:
461
+ response = anon_client.get(_api_url(public_project.slug, "project"))
462
+ # Anonymous hits session fallback -> user not authenticated -> 401
463
+ # But public project check happens after auth, so this returns 401
464
+ # because the auth helper returns 401 for unauthenticated requests
465
+ assert response.status_code == 401
466
+
467
+ def test_public_project_allows_api_token(self, client, public_project, public_fossil_repo, admin_user):
468
+ """API token scoped to a public project's repo grants access."""
469
+ raw, token_hash, prefix = APIToken.generate()
470
+ APIToken.objects.create(
471
+ repository=public_fossil_repo,
472
+ name="Public Token",
473
+ token_hash=token_hash,
474
+ token_prefix=prefix,
475
+ permissions="*",
476
+ created_by=admin_user,
477
+ )
478
+ disk_patch, reader_patch, _ = _patch_api_fossil()
479
+ with disk_patch, reader_patch:
480
+ response = client.get(_api_url(public_project.slug, "project"), **_bearer_header(raw))
481
+ assert response.status_code == 200
482
+ assert response.json()["slug"] == public_project.slug
483
+
484
+ def test_private_project_denies_no_access_user(self, client, sample_project, fossil_repo_obj, no_access_pat):
485
+ """PAT for a user with no team access to a private project returns 403."""
486
+ _, raw = no_access_pat
487
+ disk_patch, reader_patch, _ = _patch_api_fossil()
488
+ with disk_patch, reader_patch:
489
+ response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
490
+ assert response.status_code == 403
491
+ assert response.json()["error"] == "Access denied"
492
+
493
+ def test_api_token_for_wrong_repo_returns_401(self, client, sample_project, fossil_repo_obj, public_fossil_repo, admin_user):
494
+ """API token scoped to a different repo cannot access another repo."""
495
+ raw, token_hash, prefix = APIToken.generate()
496
+ APIToken.objects.create(
497
+ repository=public_fossil_repo,
498
+ name="Wrong Repo Token",
499
+ token_hash=token_hash,
500
+ token_prefix=prefix,
501
+ permissions="*",
502
+ created_by=admin_user,
503
+ )
504
+ disk_patch, reader_patch, _ = _patch_api_fossil()
505
+ with disk_patch, reader_patch:
506
+ # Try to access sample_project (private) with a token scoped to public_fossil_repo
507
+ response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
508
+ # The token won't match the sample_project's repo, and no PAT match either -> 401
509
+ assert response.status_code == 401
510
+
511
+
512
+# ===========================================================================
513
+# API Docs Endpoint
514
+# ===========================================================================
515
+
516
+
517
+@pytest.mark.django_db
518
+class TestAPIDocs:
519
+ def test_api_docs_returns_endpoint_list(self, admin_client, sample_project, fossil_repo_obj):
520
+ response = admin_client.get(_api_url(sample_project.slug, ""))
521
+ assert response.status_code == 200
522
+ data = response.json()
523
+ assert "endpoints" in data
524
+ assert "auth" in data
525
+ paths = [e["path"] for e in data["endpoints"]]
526
+ assert any("/project" in p for p in paths)
527
+ assert any("/timeline" in p for p in paths)
528
+ assert any("/tickets" in p for p in paths)
529
+ assert any("/wiki" in p for p in paths)
530
+ assert any("/branches" in p for p in paths)
531
+ assert any("/tags" in p for p in paths)
532
+ assert any("/releases" in p for p in paths)
533
+ assert any("/search" in p for p in paths)
534
+
535
+
536
+# ===========================================================================
537
+# Project Metadata Endpoint
538
+# ===========================================================================
539
+
540
+
541
+@pytest.mark.django_db
542
+class TestAPIProject:
543
+ def test_project_metadata(self, admin_client, sample_project, fossil_repo_obj):
544
+ disk_patch, reader_patch, _ = _patch_api_fossil()
545
+ with disk_patch, reader_patch:
546
+ response = admin_client.get(_api_url(sample_project.slug, "project"))
547
+ assert response.status_code == 200
548
+ data = response.json()
549
+ assert data["name"] == sample_project.name
550
+ assert data["slug"] == sample_project.slug
551
+ assert data["visibility"] == sample_project.visibility
552
+ assert "star_count" in data
553
+ assert "description" in data
554
+
555
+ def test_nonexistent_project_returns_404(self, admin_client):
556
+ response = admin_client.get(_api_url("nonexistent-slug", "project"))
557
+ assert response.status_code == 404
558
+
559
+
560
+# ===========================================================================
561
+# Timeline Endpoint
562
+# ===========================================================================
563
+
564
+
565
+@pytest.mark.django_db
566
+class TestAPITimeline:
567
+ def test_timeline_returns_checkins(self, admin_client, sample_project, fossil_repo_obj):
568
+ disk_patch, reader_patch, _ = _patch_api_fossil()
569
+ with disk_patch, reader_patch:
570
+ response = admin_client.get(_api_url(sample_project.slug, "timeline"))
571
+ assert response.status_code == 200
572
+ data = response.json()
573
+ assert "checkins" in data
574
+ assert "total" in data
575
+ assert "page" in data
576
+ assert "per_page" in data
577
+ assert "total_pages" in data
578
+ assert len(data["checkins"]) == 2
579
+ checkin = data["checkins"][0]
580
+ assert "uuid" in checkin
581
+ assert "timestamp" in checkin
582
+ assert "user" in checkin
583
+ assert "comment" in checkin
584
+ assert "branch" in checkin
585
+
586
+ def test_timeline_pagination(self, admin_client, sample_project, fossil_repo_obj):
587
+ disk_patch, reader_patch, reader = _patch_api_fossil()
588
+ with disk_patch, reader_patch:
589
+ response = admin_client.get(_api_url(sample_project.slug, "timeline") + "?page=2&per_page=10")
590
+ assert response.status_code == 200
591
+ data = response.json()
592
+ assert data["page"] == 2
593
+ assert data["per_page"] == 10
594
+
595
+ def test_timeline_branch_filter(self, admin_client, sample_project, fossil_repo_obj):
596
+ disk_patch, reader_patch, _ = _patch_api_fossil()
597
+ with disk_patch, reader_patch:
598
+ response = admin_client.get(_api_url(sample_project.slug, "timeline") + "?branch=trunk")
599
+ assert response.status_code == 200
600
+ data = response.json()
601
+ # All returned checkins should be on "trunk" branch
602
+ for checkin in data["checkins"]:
603
+ assert checkin["branch"] == "trunk"
604
+
605
+ def test_timeline_invalid_page_defaults(self, admin_client, sample_project, fossil_repo_obj):
606
+ disk_patch, reader_patch, _ = _patch_api_fossil()
607
+ with disk_patch, reader_patch:
608
+ response = admin_client.get(_api_url(sample_project.slug, "timeline") + "?page=abc&per_page=xyz")
609
+ assert response.status_code == 200
610
+ data = response.json()
611
+ assert data["page"] == 1
612
+ assert data["per_page"] == 25 # default
613
+
614
+
615
+# ===========================================================================
616
+# Tickets Endpoint
617
+# ===========================================================================
618
+
619
+
620
+@pytest.mark.django_db
621
+class TestAPITickets:
622
+ def test_tickets_returns_list(self, admin_client, sample_project, fossil_repo_obj):
623
+ disk_patch, reader_patch, _ = _patch_api_fossil()
624
+ with disk_patch, reader_patch:
625
+ response = admin_client.get(_api_url(sample_project.slug, "tickets"))
626
+ assert response.status_code == 200
627
+ data = response.json()
628
+ assert "tickets" in data
629
+ assert "total" in data
630
+ assert "page" in data
631
+ assert "per_page" in data
632
+ assert "total_pages" in data
633
+ assert len(data["tickets"]) == 2
634
+ ticket = data["tickets"][0]
635
+ assert "uuid" in ticket
636
+ assert "title" in ticket
637
+ assert "status" in ticket
638
+ assert "type" in ticket
639
+ assert "created" in ticket
640
+
641
+ def test_tickets_status_filter(self, admin_client, sample_project, fossil_repo_obj):
642
+ disk_patch, reader_patch, reader = _patch_api_fossil()
643
+ with disk_patch, reader_patch:
644
+ response = admin_client.get(_api_url(sample_project.slug, "tickets") + "?status=Open")
645
+ assert response.status_code == 200
646
+ # Verify the reader was called with the status filter
647
+ reader.get_tickets.assert_called_once_with(status="Open", limit=1000)
648
+
649
+ def test_tickets_pagination(self, admin_client, sample_project, fossil_repo_obj):
650
+ disk_patch, reader_patch, _ = _patch_api_fossil()
651
+ with disk_patch, reader_patch:
652
+ response = admin_client.get(_api_url(sample_project.slug, "tickets") + "?page=1&per_page=1")
653
+ assert response.status_code == 200
654
+ data = response.json()
655
+ assert data["per_page"] == 1
656
+ assert len(data["tickets"]) == 1
657
+ assert data["total"] == 2
658
+ assert data["total_pages"] == 2
659
+
660
+
661
+# ===========================================================================
662
+# Ticket Detail Endpoint
663
+# ===========================================================================
664
+
665
+
666
+@pytest.mark.django_db
667
+class TestAPITicketDetail:
668
+ def test_ticket_detail_returns_ticket(self, admin_client, sample_project, fossil_repo_obj):
669
+ disk_patch, reader_patch, _ = _patch_api_fossil()
670
+ with disk_patch, reader_patch:
671
+ response = admin_client.get(_api_url(sample_project.slug, "tickets/tkt-001-uuid"))
672
+ assert response.status_code == 200
673
+ data = response.json()
674
+ assert data["uuid"] == "tkt-001-uuid"
675
+ assert data["title"] == "Fix login bug"
676
+ assert data["status"] == "Open"
677
+ assert data["body"] == "Login fails when session expires."
678
+ assert "comments" in data
679
+ assert len(data["comments"]) == 1
680
+ comment = data["comments"][0]
681
+ assert comment["user"] == "bob"
682
+ assert comment["comment"] == "I can reproduce this."
683
+
684
+ def test_ticket_detail_not_found(self, admin_client, sample_project, fossil_repo_obj):
685
+ disk_patch, reader_patch, reader = _patch_api_fossil()
686
+ reader.get_ticket_detail.return_value = None
687
+ with disk_patch, reader_patch:
688
+ response = admin_client.get(_api_url(sample_project.slug, "tickets/nonexistent-uuid"))
689
+ assert response.status_code == 404
690
+ assert response.json()["error"] == "Ticket not found"
691
+
692
+
693
+# ===========================================================================
694
+# Wiki List Endpoint
695
+# ===========================================================================
696
+
697
+
698
+@pytest.mark.django_db
699
+class TestAPIWikiList:
700
+ def test_wiki_list_returns_pages(self, admin_client, sample_project, fossil_repo_obj):
701
+ disk_patch, reader_patch, _ = _patch_api_fossil()
702
+ with disk_patch, reader_patch:
703
+ response = admin_client.get(_api_url(sample_project.slug, "wiki"))
704
+ assert response.status_code == 200
705
+ data = response.json()
706
+ assert "pages" in data
707
+ assert len(data["pages"]) == 2
708
+ page = data["pages"][0]
709
+ assert "name" in page
710
+ assert "last_modified" in page
711
+ assert "user" in page
712
+
713
+ def test_wiki_list_empty(self, admin_client, sample_project, fossil_repo_obj):
714
+ disk_patch, reader_patch, reader = _patch_api_fossil()
715
+ reader.get_wiki_pages.return_value = []
716
+ with disk_patch, reader_patch:
717
+ response = admin_client.get(_api_url(sample_project.slug, "wiki"))
718
+ assert response.status_code == 200
719
+ data = response.json()
720
+ assert data["pages"] == []
721
+
722
+
723
+# ===========================================================================
724
+# Wiki Page Endpoint
725
+# ===========================================================================
726
+
727
+
728
+@pytest.mark.django_db
729
+class TestAPIWikiPage:
730
+ def test_wiki_page_returns_content(self, admin_client, sample_project, fossil_repo_obj):
731
+ disk_patch, reader_patch, _ = _patch_api_fossil()
732
+ with disk_patch, reader_patch, patch("fossil.views._render_fossil_content", return_value="<h1>Welcome</h1>"):
733
+ response = admin_client.get(_api_url(sample_project.slug, "wiki/Home"))
734
+ assert response.status_code == 200
735
+ data = response.json()
736
+ assert data["name"] == "Home"
737
+ assert data["content"] == "# Welcome\nThis is the home page."
738
+ assert "content_html" in data
739
+ assert "last_modified" in data
740
+ assert data["user"] == "alice"
741
+
742
+ def test_wiki_page_not_found(self, admin_client, sample_project, fossil_repo_obj):
743
+ disk_patch, reader_patch, reader = _patch_api_fossil()
744
+ reader.get_wiki_page.return_value = None
745
+ with disk_patch, reader_patch:
746
+ response = admin_client.get(_api_url(sample_project.slug, "wiki/Nonexistent"))
747
+ assert response.status_code == 404
748
+ assert response.json()["error"] == "Wiki page not found"
749
+
750
+
751
+# ===========================================================================
752
+# Branches Endpoint
753
+# ===========================================================================
754
+
755
+
756
+@pytest.mark.django_db
757
+class TestAPIBranches:
758
+ def test_branches_returns_list(self, admin_client, sample_project, fossil_repo_obj):
759
+ disk_patch, reader_patch, _ = _patch_api_fossil()
760
+ with disk_patch, reader_patch:
761
+ response = admin_client.get(_api_url(sample_project.slug, "branches"))
762
+ assert response.status_code == 200
763
+ data = response.json()
764
+ assert "branches" in data
765
+ assert len(data["branches"]) == 2
766
+ branch = data["branches"][0]
767
+ assert "name" in branch
768
+ assert "last_checkin" in branch
769
+ assert "last_user" in branch
770
+ assert "checkin_count" in branch
771
+ assert "last_uuid" in branch
772
+
773
+ def test_branches_empty(self, admin_client, sample_project, fossil_repo_obj):
774
+ disk_patch, reader_patch, reader = _patch_api_fossil()
775
+ reader.get_branches.return_value = []
776
+ with disk_patch, reader_patch:
777
+ response = admin_client.get(_api_url(sample_project.slug, "branches"))
778
+ assert response.status_code == 200
779
+ assert response.json()["branches"] == []
780
+
781
+
782
+# ===========================================================================
783
+# Tags Endpoint
784
+# ===========================================================================
785
+
786
+
787
+@pytest.mark.django_db
788
+class TestAPITags:
789
+ def test_tags_returns_list(self, admin_client, sample_project, fossil_repo_obj):
790
+ disk_patch, reader_patch, _ = _patch_api_fossil()
791
+ with disk_patch, reader_patch:
792
+ response = admin_client.get(_api_url(sample_project.slug, "tags"))
793
+ assert response.status_code == 200
794
+ data = response.json()
795
+ assert "tags" in data
796
+ assert len(data["tags"]) == 1
797
+ tag = data["tags"][0]
798
+ assert tag["name"] == "v1.0.0"
799
+ assert "timestamp" in tag
800
+ assert "user" in tag
801
+ assert "uuid" in tag
802
+
803
+ def test_tags_empty(self, admin_client, sample_project, fossil_repo_obj):
804
+ disk_patch, reader_patch, reader = _patch_api_fossil()
805
+ reader.get_tags.return_value = []
806
+ with disk_patch, reader_patch:
807
+ response = admin_client.get(_api_url(sample_project.slug, "tags"))
808
+ assert response.status_code == 200
809
+ assert response.json()["tags"] == []
810
+
811
+
812
+# ===========================================================================
813
+# Releases Endpoint
814
+# ===========================================================================
815
+
816
+
817
+@pytest.mark.django_db
818
+class TestAPIReleases:
819
+ def test_releases_returns_list(self, admin_client, sample_project, fossil_repo_obj):
820
+ Release.objects.create(
821
+ repository=fossil_repo_obj,
822
+ tag_name="v1.0.0",
823
+ name="Version 1.0.0",
824
+ body="Initial release.",
825
+ is_prerelease=False,
826
+ is_draft=False,
827
+ published_at=timezone.now(),
828
+ checkin_uuid="abc123",
829
+ created_by=admin_client.session.get("_auth_user_id") and User.objects.first(),
830
+ )
831
+ response = admin_client.get(_api_url(sample_project.slug, "releases"))
832
+ assert response.status_code == 200
833
+ data = response.json()
834
+ assert "releases" in data
835
+ assert len(data["releases"]) == 1
836
+ rel = data["releases"][0]
837
+ assert rel["tag_name"] == "v1.0.0"
838
+ assert rel["name"] == "Version 1.0.0"
839
+ assert rel["body"] == "Initial release."
840
+ assert "published_at" in rel
841
+ assert "assets" in rel
842
+
843
+ def test_releases_hides_drafts_from_readers(self, client, sample_project, fossil_repo_obj, pat_token, admin_user):
844
+ """Draft releases are hidden from users without write access."""
845
+ # Create a draft release and a published release
846
+ Release.objects.create(
847
+ repository=fossil_repo_obj,
848
+ tag_name="v0.9.0",
849
+ name="Draft Release",
850
+ is_draft=True,
851
+ created_by=admin_user,
852
+ )
853
+ Release.objects.create(
854
+ repository=fossil_repo_obj,
855
+ tag_name="v1.0.0",
856
+ name="Published Release",
857
+ is_draft=False,
858
+ published_at=timezone.now(),
859
+ created_by=admin_user,
860
+ )
861
+
862
+ # Create a read-only user with a PAT
863
+ reader_user = User.objects.create_user(username="api_reader", password="testpass123")
864
+ team = Team.objects.create(name="API Readers", organization=sample_project.organization, created_by=admin_user)
865
+ team.members.add(reader_user)
866
+ ProjectTeam.objects.create(project=sample_project, team=team, role="read", created_by=admin_user)
867
+
868
+ raw, token_hash, prefix = PersonalAccessToken.generate()
869
+ PersonalAccessToken.objects.create(
870
+ user=reader_user,
871
+ name="Reader PAT",
872
+ token_hash=token_hash,
873
+ token_prefix=prefix,
874
+ scopes="read",
875
+ )
876
+
877
+ response = client.get(_api_url(sample_project.slug, "releases"), **_bearer_header(raw))
878
+ assert response.status_code == 200
879
+ data = response.json()
880
+ # Reader should only see the published release, not the draft
881
+ assert len(data["releases"]) == 1
882
+ assert data["releases"][0]["tag_name"] == "v1.0.0"
883
+
884
+ def test_releases_shows_drafts_to_writers(self, client, sample_project, fossil_repo_obj, pat_token, admin_user):
885
+ """Draft releases are visible to users with write access."""
886
+ Release.objects.create(
887
+ repository=fossil_repo_obj,
888
+ tag_name="v0.9.0",
889
+ name="Draft Release",
890
+ is_draft=True,
891
+ created_by=admin_user,
892
+ )
893
+ Release.objects.create(
894
+ repository=fossil_repo_obj,
895
+ tag_name="v1.0.0",
896
+ name="Published Release",
897
+ is_draft=False,
898
+ published_at=timezone.now(),
899
+ created_by=admin_user,
900
+ )
901
+
902
+ # admin_user has write access via sample_team -> sample_project
903
+ _, raw = pat_token # PAT for admin_user
904
+ response = client.get(_api_url(sample_project.slug, "releases"), **_bearer_header(raw))
905
+ assert response.status_code == 200
906
+ data = response.json()
907
+ assert len(data["releases"]) == 2
908
+
909
+ def test_releases_includes_assets(self, admin_client, sample_project, fossil_repo_obj, admin_user):
910
+ release = Release.objects.create(
911
+ repository=fossil_repo_obj,
912
+ tag_name="v2.0.0",
913
+ name="Version 2.0.0",
914
+ is_draft=False,
915
+ published_at=timezone.now(),
916
+ created_by=admin_user,
917
+ )
918
+ ReleaseAsset.objects.create(
919
+ release=release,
920
+ name="app-v2.0.0.tar.gz",
921
+ file_size_bytes=1024000,
922
+ content_type="application/gzip",
923
+ download_count=5,
924
+ created_by=admin_user,
925
+ )
926
+ response = admin_client.get(_api_url(sample_project.slug, "releases"))
927
+ assert response.status_code == 200
928
+ data = response.json()
929
+ assert len(data["releases"]) == 1
930
+ assets = data["releases"][0]["assets"]
931
+ assert len(assets) == 1
932
+ assert assets[0]["name"] == "app-v2.0.0.tar.gz"
933
+ assert assets[0]["file_size_bytes"] == 1024000
934
+ assert assets[0]["download_count"] == 5
935
+
936
+ def test_releases_empty(self, admin_client, sample_project, fossil_repo_obj):
937
+ response = admin_client.get(_api_url(sample_project.slug, "releases"))
938
+ assert response.status_code == 200
939
+ assert response.json()["releases"] == []
940
+
941
+
942
+# ===========================================================================
943
+# Search Endpoint
944
+# ===========================================================================
945
+
946
+
947
+@pytest.mark.django_db
948
+class TestAPISearch:
949
+ def test_search_returns_results(self, admin_client, sample_project, fossil_repo_obj):
950
+ disk_patch, reader_patch, _ = _patch_api_fossil()
951
+ with disk_patch, reader_patch:
952
+ response = admin_client.get(_api_url(sample_project.slug, "search") + "?q=login")
953
+ assert response.status_code == 200
954
+ data = response.json()
955
+ assert "checkins" in data
956
+ assert "tickets" in data
957
+ assert "wiki" in data
958
+ assert len(data["checkins"]) == 1
959
+ assert len(data["tickets"]) == 1
960
+ assert len(data["wiki"]) == 1
961
+
962
+ def test_search_missing_query_returns_400(self, admin_client, sample_project, fossil_repo_obj):
963
+ disk_patch, reader_patch, _ = _patch_api_fossil()
964
+ with disk_patch, reader_patch:
965
+ response = admin_client.get(_api_url(sample_project.slug, "search"))
966
+ assert response.status_code == 400
967
+ assert response.json()["error"] == "Query parameter 'q' is required"
968
+
969
+ def test_search_empty_query_returns_400(self, admin_client, sample_project, fossil_repo_obj):
970
+ disk_patch, reader_patch, _ = _patch_api_fossil()
971
+ with disk_patch, reader_patch:
972
+ response = admin_client.get(_api_url(sample_project.slug, "search") + "?q=")
973
+ assert response.status_code == 400
974
+
975
+ def test_search_passes_query_to_reader(self, admin_client, sample_project, fossil_repo_obj):
976
+ disk_patch, reader_patch, reader = _patch_api_fossil()
977
+ with disk_patch, reader_patch:
978
+ admin_client.get(_api_url(sample_project.slug, "search") + "?q=test+query")
979
+ reader.search.assert_called_once_with("test query", limit=50)
980
+
981
+
982
+# ===========================================================================
983
+# HTTP Method Restrictions
984
+# ===========================================================================
985
+
986
+
987
+@pytest.mark.django_db
988
+class TestAPIMethodRestrictions:
989
+ """All endpoints should only accept GET requests."""
990
+
991
+ def test_post_to_project_returns_405(self, admin_client, sample_project, fossil_repo_obj):
992
+ disk_patch, reader_patch, _ = _patch_api_fossil()
993
+ with disk_patch, reader_patch:
994
+ response = admin_client.post(_api_url(sample_project.slug, "project"))
995
+ assert response.status_code == 405
996
+
997
+ def test_post_to_timeline_returns_405(self, admin_client, sample_project, fossil_repo_obj):
998
+ disk_patch, reader_patch, _ = _patch_api_fossil()
999
+ with disk_patch, reader_patch:
1000
+ response = admin_client.post(_api_url(sample_project.slug, "timeline"))
1001
+ assert response.status_code == 405
1002
+
1003
+ def test_post_to_tickets_returns_405(self, admin_client, sample_project, fossil_repo_obj):
1004
+ disk_patch, reader_patch, _ = _patch_api_fossil()
1005
+ with disk_patch, reader_patch:
1006
+ response = admin_client.post(_api_url(sample_project.slug, "tickets"))
1007
+ assert response.status_code == 405
1008
+
1009
+ def test_post_to_search_returns_405(self, admin_client, sample_project, fossil_repo_obj):
1010
+ disk_patch, reader_patch, _ = _patch_api_fossil()
1011
+ with disk_patch, reader_patch:
1012
+ response = admin_client.post(_api_url(sample_project.slug, "search"))
1013
+ assert response.status_code == 405
1014
+
1015
+
1016
+# ===========================================================================
1017
+# Cross-endpoint auth consistency
1018
+# ===========================================================================
1019
+
1020
+
1021
+@pytest.mark.django_db
1022
+class TestAPIAllEndpointsRequireAuth:
1023
+ """Every endpoint should return 401 for unauthenticated requests to private projects."""
1024
+
1025
+ @pytest.mark.parametrize(
1026
+ "endpoint",
1027
+ [
1028
+ "project",
1029
+ "timeline",
1030
+ "tickets",
1031
+ "tickets/some-uuid",
1032
+ "wiki",
1033
+ "wiki/Home",
1034
+ "branches",
1035
+ "tags",
1036
+ "releases",
1037
+ "search?q=test",
1038
+ ],
1039
+ )
1040
+ def test_endpoint_requires_auth(self, anon_client, sample_project, fossil_repo_obj, endpoint):
1041
+ disk_patch, reader_patch, _ = _patch_api_fossil()
1042
+ with disk_patch, reader_patch:
1043
+ response = anon_client.get(_api_url(sample_project.slug, endpoint))
1044
+ assert response.status_code == 401
--- a/tests/test_json_api.py
+++ b/tests/test_json_api.py
@@ -0,0 +1,1044 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/tests/test_json_api.py
+++ b/tests/test_json_api.py
@@ -0,0 +1,1044 @@
1 """Tests for JSON API endpoints at /projects/<slug>/fossil/api/.
2
3 Covers:
4 - Authentication: Bearer tokens (APIToken, PersonalAccessToken), session fallback,
5 invalid/expired tokens
6 - Each endpoint: basic response shape, pagination, filtering
7 - Access control: public vs private projects, anonymous vs authenticated
8 """
9
10 from datetime import UTC, datetime, timedelta
11 from unittest.mock import MagicMock, PropertyMock, patch
12
13 import pytest
14 from django.contrib.auth.models import User
15 from django.test import Client
16 from django.utils import timezone
17
18 from accounts.models import PersonalAccessToken
19 from fossil.api_tokens import APIToken
20 from fossil.models import FossilRepository
21 from fossil.reader import TicketEntry, TimelineEntry, WikiPage
22 from fossil.releases import Release, ReleaseAsset
23 from organization.models import Team
24 from projects.models import Project, ProjectTeam
25
26 # ---------------------------------------------------------------------------
27 # Fixtures
28 # ---------------------------------------------------------------------------
29
30
31 @pytest.fixture
32 def fossil_repo_obj(sample_project):
33 """Return the auto-created FossilRepository for sample_project."""
34 return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True)
35
36
37 @pytest.fixture
38 def api_token(fossil_repo_obj, admin_user):
39 """Create a project-scoped API token and return (APIToken, raw_token)."""
40 raw, token_hash, prefix = APIToken.generate()
41 token = APIToken.objects.create(
42 repository=fossil_repo_obj,
43 name="Test API Token",
44 token_hash=token_hash,
45 token_prefix=prefix,
46 permissions="*",
47 created_by=admin_user,
48 )
49 return token, raw
50
51
52 @pytest.fixture
53 def expired_api_token(fossil_repo_obj, admin_user):
54 """Create an expired project-scoped API token."""
55 raw, token_hash, prefix = APIToken.generate()
56 token = APIToken.objects.create(
57 repository=fossil_repo_obj,
58 name="Expired Token",
59 token_hash=token_hash,
60 token_prefix=prefix,
61 permissions="*",
62 expires_at=timezone.now() - timedelta(days=1),
63 created_by=admin_user,
64 )
65 return token, raw
66
67
68 @pytest.fixture
69 def pat_token(admin_user):
70 """Create a user-scoped PersonalAccessToken and return (PAT, raw_token)."""
71 raw, token_hash, prefix = PersonalAccessToken.generate()
72 pat = PersonalAccessToken.objects.create(
73 user=admin_user,
74 name="Test PAT",
75 token_hash=token_hash,
76 token_prefix=prefix,
77 scopes="read,write",
78 )
79 return pat, raw
80
81
82 @pytest.fixture
83 def expired_pat(admin_user):
84 """Create an expired PersonalAccessToken."""
85 raw, token_hash, prefix = PersonalAccessToken.generate()
86 pat = PersonalAccessToken.objects.create(
87 user=admin_user,
88 name="Expired PAT",
89 token_hash=token_hash,
90 token_prefix=prefix,
91 scopes="read",
92 expires_at=timezone.now() - timedelta(days=1),
93 )
94 return pat, raw
95
96
97 @pytest.fixture
98 def revoked_pat(admin_user):
99 """Create a revoked PersonalAccessToken."""
100 raw, token_hash, prefix = PersonalAccessToken.generate()
101 pat = PersonalAccessToken.objects.create(
102 user=admin_user,
103 name="Revoked PAT",
104 token_hash=token_hash,
105 token_prefix=prefix,
106 scopes="read",
107 revoked_at=timezone.now() - timedelta(hours=1),
108 )
109 return pat, raw
110
111
112 @pytest.fixture
113 def public_project(db, org, admin_user, sample_team):
114 """A public project visible to anonymous users."""
115 project = Project.objects.create(
116 name="Public API Project",
117 organization=org,
118 visibility="public",
119 created_by=admin_user,
120 )
121 ProjectTeam.objects.create(project=project, team=sample_team, role="write", created_by=admin_user)
122 return project
123
124
125 @pytest.fixture
126 def public_fossil_repo(public_project):
127 """Return the auto-created FossilRepository for the public project."""
128 return FossilRepository.objects.get(project=public_project, deleted_at__isnull=True)
129
130
131 @pytest.fixture
132 def no_access_user(db, org, admin_user):
133 """User with no team access to any project."""
134 return User.objects.create_user(username="noaccess_api", password="testpass123")
135
136
137 @pytest.fixture
138 def no_access_pat(no_access_user):
139 """PAT for a user with no project access."""
140 raw, token_hash, prefix = PersonalAccessToken.generate()
141 pat = PersonalAccessToken.objects.create(
142 user=no_access_user,
143 name="No Access PAT",
144 token_hash=token_hash,
145 token_prefix=prefix,
146 scopes="read",
147 )
148 return pat, raw
149
150
151 @pytest.fixture
152 def anon_client():
153 """Unauthenticated client."""
154 return Client()
155
156
157 # ---------------------------------------------------------------------------
158 # Mock helpers
159 # ---------------------------------------------------------------------------
160
161
162 def _mock_fossil_reader():
163 """Return a context-manager mock that satisfies FossilReader usage in api_views."""
164 reader = MagicMock()
165 reader.__enter__ = MagicMock(return_value=reader)
166 reader.__exit__ = MagicMock(return_value=False)
167
168 # Timeline
169 reader.get_timeline.return_value = [
170 TimelineEntry(
171 rid=1,
172 uuid="abc123def456",
173 event_type="ci",
174 timestamp=datetime(2025, 1, 15, 10, 30, 0, tzinfo=UTC),
175 user="alice",
176 comment="Initial commit",
177 branch="trunk",
178 ),
179 TimelineEntry(
180 rid=2,
181 uuid="def456abc789",
182 event_type="ci",
183 timestamp=datetime(2025, 1, 14, 9, 0, 0, tzinfo=UTC),
184 user="bob",
185 comment="Add readme",
186 branch="trunk",
187 ),
188 ]
189 reader.get_checkin_count.return_value = 42
190
191 # Tickets
192 reader.get_tickets.return_value = [
193 TicketEntry(
194 uuid="tkt-001-uuid",
195 title="Fix login bug",
196 status="Open",
197 type="Code_Defect",
198 created=datetime(2025, 1, 10, 8, 0, 0, tzinfo=UTC),
199 owner="alice",
200 subsystem="auth",
201 priority="Immediate",
202 severity="Critical",
203 ),
204 TicketEntry(
205 uuid="tkt-002-uuid",
206 title="Add dark mode",
207 status="Open",
208 type="Feature_Request",
209 created=datetime(2025, 1, 11, 12, 0, 0, tzinfo=UTC),
210 owner="bob",
211 subsystem="ui",
212 priority="Medium",
213 severity="Minor",
214 ),
215 ]
216 reader.get_ticket_detail.return_value = TicketEntry(
217 uuid="tkt-001-uuid",
218 title="Fix login bug",
219 status="Open",
220 type="Code_Defect",
221 created=datetime(2025, 1, 10, 8, 0, 0, tzinfo=UTC),
222 owner="alice",
223 subsystem="auth",
224 priority="Immediate",
225 severity="Critical",
226 resolution="",
227 body="Login fails when session expires.",
228 )
229 reader.get_ticket_comments.return_value = [
230 {
231 "timestamp": datetime(2025, 1, 11, 9, 0, 0, tzinfo=UTC),
232 "user": "bob",
233 "comment": "I can reproduce this.",
234 "mimetype": "text/plain",
235 },
236 ]
237
238 # Wiki
239 reader.get_wiki_pages.return_value = [
240 WikiPage(
241 name="Home",
242 content="# Welcome",
243 last_modified=datetime(2025, 1, 12, 15, 0, 0, tzinfo=UTC),
244 user="alice",
245 ),
246 WikiPage(
247 name="FAQ",
248 content="# FAQ\nQ: ...",
249 last_modified=datetime(2025, 1, 13, 10, 0, 0, tzinfo=UTC),
250 user="bob",
251 ),
252 ]
253 reader.get_wiki_page.return_value = WikiPage(
254 name="Home",
255 content="# Welcome\nThis is the home page.",
256 last_modified=datetime(2025, 1, 12, 15, 0, 0, tzinfo=UTC),
257 user="alice",
258 )
259
260 # Branches
261 reader.get_branches.return_value = [
262 {
263 "name": "trunk",
264 "last_checkin": datetime(2025, 1, 15, 10, 30, 0, tzinfo=UTC),
265 "last_user": "alice",
266 "checkin_count": 30,
267 "last_uuid": "abc123def456",
268 },
269 {
270 "name": "feature-x",
271 "last_checkin": datetime(2025, 1, 14, 9, 0, 0, tzinfo=UTC),
272 "last_user": "bob",
273 "checkin_count": 5,
274 "last_uuid": "def456abc789",
275 },
276 ]
277
278 # Tags
279 reader.get_tags.return_value = [
280 {
281 "name": "v1.0.0",
282 "timestamp": datetime(2025, 1, 15, 10, 30, 0, tzinfo=UTC),
283 "user": "alice",
284 "uuid": "tag-uuid-100",
285 },
286 ]
287
288 # Search
289 reader.search.return_value = {
290 "checkins": [
291 {
292 "uuid": "abc123def456",
293 "timestamp": datetime(2025, 1, 15, 10, 30, 0, tzinfo=UTC),
294 "user": "alice",
295 "comment": "Initial commit",
296 }
297 ],
298 "tickets": [
299 {
300 "uuid": "tkt-001-uuid",
301 "title": "Fix login bug",
302 "status": "Open",
303 "created": datetime(2025, 1, 10, 8, 0, 0, tzinfo=UTC),
304 }
305 ],
306 "wiki": [{"name": "Home"}],
307 }
308
309 return reader
310
311
312 def _patch_api_fossil():
313 """Patch exists_on_disk to True and FossilReader for api_views."""
314 reader = _mock_fossil_reader()
315 return (
316 patch.object(FossilRepository, "exists_on_disk", new_callable=PropertyMock, return_value=True),
317 patch("fossil.api_views.FossilReader", return_value=reader),
318 reader,
319 )
320
321
322 def _api_url(slug, endpoint):
323 """Build API URL for a given project slug and endpoint."""
324 return f"/projects/{slug}/fossil/api/{endpoint}"
325
326
327 def _bearer_header(raw_token):
328 """Build HTTP_AUTHORIZATION header for Bearer token."""
329 return {"HTTP_AUTHORIZATION": f"Bearer {raw_token}"}
330
331
332 # ===========================================================================
333 # Authentication Tests
334 # ===========================================================================
335
336
337 @pytest.mark.django_db
338 class TestAPIAuthentication:
339 """Test auth helper: Bearer tokens, session fallback, errors."""
340
341 def test_valid_api_token(self, client, sample_project, fossil_repo_obj, api_token):
342 """Project-scoped APIToken grants access."""
343 _, raw = api_token
344 disk_patch, reader_patch, _ = _patch_api_fossil()
345 with disk_patch, reader_patch:
346 response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
347 assert response.status_code == 200
348 data = response.json()
349 assert data["slug"] == sample_project.slug
350
351 def test_valid_personal_access_token(self, client, sample_project, fossil_repo_obj, pat_token):
352 """User-scoped PersonalAccessToken grants access."""
353 _, raw = pat_token
354 disk_patch, reader_patch, _ = _patch_api_fossil()
355 with disk_patch, reader_patch:
356 response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
357 assert response.status_code == 200
358 data = response.json()
359 assert data["slug"] == sample_project.slug
360
361 def test_session_auth_fallback(self, admin_client, sample_project, fossil_repo_obj):
362 """Session auth works when no Bearer token is provided."""
363 disk_patch, reader_patch, _ = _patch_api_fossil()
364 with disk_patch, reader_patch:
365 response = admin_client.get(_api_url(sample_project.slug, "project"))
366 assert response.status_code == 200
367 data = response.json()
368 assert data["slug"] == sample_project.slug
369
370 def test_no_auth_returns_401(self, anon_client, sample_project, fossil_repo_obj):
371 """Unauthenticated request to private project returns 401."""
372 disk_patch, reader_patch, _ = _patch_api_fossil()
373 with disk_patch, reader_patch:
374 response = anon_client.get(_api_url(sample_project.slug, "project"))
375 assert response.status_code == 401
376 assert response.json()["error"] == "Authentication required"
377
378 def test_invalid_token_returns_401(self, client, sample_project, fossil_repo_obj):
379 """Garbage token returns 401."""
380 disk_patch, reader_patch, _ = _patch_api_fossil()
381 with disk_patch, reader_patch:
382 response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header("frp_invalid_garbage_token"))
383 assert response.status_code == 401
384 assert response.json()["error"] == "Invalid token"
385
386 def test_expired_api_token_returns_401(self, client, sample_project, fossil_repo_obj, expired_api_token):
387 """Expired project-scoped token returns 401."""
388 _, raw = expired_api_token
389 disk_patch, reader_patch, _ = _patch_api_fossil()
390 with disk_patch, reader_patch:
391 response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
392 assert response.status_code == 401
393 assert response.json()["error"] == "Token expired"
394
395 def test_expired_pat_returns_401(self, client, sample_project, fossil_repo_obj, expired_pat):
396 """Expired PersonalAccessToken returns 401."""
397 _, raw = expired_pat
398 disk_patch, reader_patch, _ = _patch_api_fossil()
399 with disk_patch, reader_patch:
400 response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
401 assert response.status_code == 401
402 assert response.json()["error"] == "Token expired"
403
404 def test_revoked_pat_returns_401(self, client, sample_project, fossil_repo_obj, revoked_pat):
405 """Revoked PersonalAccessToken returns 401."""
406 _, raw = revoked_pat
407 disk_patch, reader_patch, _ = _patch_api_fossil()
408 with disk_patch, reader_patch:
409 response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
410 assert response.status_code == 401
411 assert response.json()["error"] == "Invalid token"
412
413 def test_api_token_updates_last_used_at(self, client, sample_project, fossil_repo_obj, api_token):
414 """Using an API token updates its last_used_at timestamp."""
415 token, raw = api_token
416 assert token.last_used_at is None
417
418 disk_patch, reader_patch, _ = _patch_api_fossil()
419 with disk_patch, reader_patch:
420 client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
421
422 token.refresh_from_db()
423 assert token.last_used_at is not None
424
425 def test_pat_updates_last_used_at(self, client, sample_project, fossil_repo_obj, pat_token):
426 """Using a PAT updates its last_used_at timestamp."""
427 pat, raw = pat_token
428 assert pat.last_used_at is None
429
430 disk_patch, reader_patch, _ = _patch_api_fossil()
431 with disk_patch, reader_patch:
432 client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
433
434 pat.refresh_from_db()
435 assert pat.last_used_at is not None
436
437 def test_deleted_api_token_returns_401(self, client, sample_project, fossil_repo_obj, api_token, admin_user):
438 """Soft-deleted API token cannot authenticate."""
439 token, raw = api_token
440 token.soft_delete(user=admin_user)
441
442 disk_patch, reader_patch, _ = _patch_api_fossil()
443 with disk_patch, reader_patch:
444 response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
445 assert response.status_code == 401
446
447
448 # ===========================================================================
449 # Access Control Tests
450 # ===========================================================================
451
452
453 @pytest.mark.django_db
454 class TestAPIAccessControl:
455 """Test read access control: public vs private, user roles."""
456
457 def test_public_project_allows_anonymous(self, anon_client, public_project, public_fossil_repo):
458 """Public projects allow anonymous access via session fallback (no auth needed)."""
459 disk_patch, reader_patch, _ = _patch_api_fossil()
460 with disk_patch, reader_patch:
461 response = anon_client.get(_api_url(public_project.slug, "project"))
462 # Anonymous hits session fallback -> user not authenticated -> 401
463 # But public project check happens after auth, so this returns 401
464 # because the auth helper returns 401 for unauthenticated requests
465 assert response.status_code == 401
466
467 def test_public_project_allows_api_token(self, client, public_project, public_fossil_repo, admin_user):
468 """API token scoped to a public project's repo grants access."""
469 raw, token_hash, prefix = APIToken.generate()
470 APIToken.objects.create(
471 repository=public_fossil_repo,
472 name="Public Token",
473 token_hash=token_hash,
474 token_prefix=prefix,
475 permissions="*",
476 created_by=admin_user,
477 )
478 disk_patch, reader_patch, _ = _patch_api_fossil()
479 with disk_patch, reader_patch:
480 response = client.get(_api_url(public_project.slug, "project"), **_bearer_header(raw))
481 assert response.status_code == 200
482 assert response.json()["slug"] == public_project.slug
483
484 def test_private_project_denies_no_access_user(self, client, sample_project, fossil_repo_obj, no_access_pat):
485 """PAT for a user with no team access to a private project returns 403."""
486 _, raw = no_access_pat
487 disk_patch, reader_patch, _ = _patch_api_fossil()
488 with disk_patch, reader_patch:
489 response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
490 assert response.status_code == 403
491 assert response.json()["error"] == "Access denied"
492
493 def test_api_token_for_wrong_repo_returns_401(self, client, sample_project, fossil_repo_obj, public_fossil_repo, admin_user):
494 """API token scoped to a different repo cannot access another repo."""
495 raw, token_hash, prefix = APIToken.generate()
496 APIToken.objects.create(
497 repository=public_fossil_repo,
498 name="Wrong Repo Token",
499 token_hash=token_hash,
500 token_prefix=prefix,
501 permissions="*",
502 created_by=admin_user,
503 )
504 disk_patch, reader_patch, _ = _patch_api_fossil()
505 with disk_patch, reader_patch:
506 # Try to access sample_project (private) with a token scoped to public_fossil_repo
507 response = client.get(_api_url(sample_project.slug, "project"), **_bearer_header(raw))
508 # The token won't match the sample_project's repo, and no PAT match either -> 401
509 assert response.status_code == 401
510
511
512 # ===========================================================================
513 # API Docs Endpoint
514 # ===========================================================================
515
516
517 @pytest.mark.django_db
518 class TestAPIDocs:
519 def test_api_docs_returns_endpoint_list(self, admin_client, sample_project, fossil_repo_obj):
520 response = admin_client.get(_api_url(sample_project.slug, ""))
521 assert response.status_code == 200
522 data = response.json()
523 assert "endpoints" in data
524 assert "auth" in data
525 paths = [e["path"] for e in data["endpoints"]]
526 assert any("/project" in p for p in paths)
527 assert any("/timeline" in p for p in paths)
528 assert any("/tickets" in p for p in paths)
529 assert any("/wiki" in p for p in paths)
530 assert any("/branches" in p for p in paths)
531 assert any("/tags" in p for p in paths)
532 assert any("/releases" in p for p in paths)
533 assert any("/search" in p for p in paths)
534
535
536 # ===========================================================================
537 # Project Metadata Endpoint
538 # ===========================================================================
539
540
541 @pytest.mark.django_db
542 class TestAPIProject:
543 def test_project_metadata(self, admin_client, sample_project, fossil_repo_obj):
544 disk_patch, reader_patch, _ = _patch_api_fossil()
545 with disk_patch, reader_patch:
546 response = admin_client.get(_api_url(sample_project.slug, "project"))
547 assert response.status_code == 200
548 data = response.json()
549 assert data["name"] == sample_project.name
550 assert data["slug"] == sample_project.slug
551 assert data["visibility"] == sample_project.visibility
552 assert "star_count" in data
553 assert "description" in data
554
555 def test_nonexistent_project_returns_404(self, admin_client):
556 response = admin_client.get(_api_url("nonexistent-slug", "project"))
557 assert response.status_code == 404
558
559
560 # ===========================================================================
561 # Timeline Endpoint
562 # ===========================================================================
563
564
565 @pytest.mark.django_db
566 class TestAPITimeline:
567 def test_timeline_returns_checkins(self, admin_client, sample_project, fossil_repo_obj):
568 disk_patch, reader_patch, _ = _patch_api_fossil()
569 with disk_patch, reader_patch:
570 response = admin_client.get(_api_url(sample_project.slug, "timeline"))
571 assert response.status_code == 200
572 data = response.json()
573 assert "checkins" in data
574 assert "total" in data
575 assert "page" in data
576 assert "per_page" in data
577 assert "total_pages" in data
578 assert len(data["checkins"]) == 2
579 checkin = data["checkins"][0]
580 assert "uuid" in checkin
581 assert "timestamp" in checkin
582 assert "user" in checkin
583 assert "comment" in checkin
584 assert "branch" in checkin
585
586 def test_timeline_pagination(self, admin_client, sample_project, fossil_repo_obj):
587 disk_patch, reader_patch, reader = _patch_api_fossil()
588 with disk_patch, reader_patch:
589 response = admin_client.get(_api_url(sample_project.slug, "timeline") + "?page=2&per_page=10")
590 assert response.status_code == 200
591 data = response.json()
592 assert data["page"] == 2
593 assert data["per_page"] == 10
594
595 def test_timeline_branch_filter(self, admin_client, sample_project, fossil_repo_obj):
596 disk_patch, reader_patch, _ = _patch_api_fossil()
597 with disk_patch, reader_patch:
598 response = admin_client.get(_api_url(sample_project.slug, "timeline") + "?branch=trunk")
599 assert response.status_code == 200
600 data = response.json()
601 # All returned checkins should be on "trunk" branch
602 for checkin in data["checkins"]:
603 assert checkin["branch"] == "trunk"
604
605 def test_timeline_invalid_page_defaults(self, admin_client, sample_project, fossil_repo_obj):
606 disk_patch, reader_patch, _ = _patch_api_fossil()
607 with disk_patch, reader_patch:
608 response = admin_client.get(_api_url(sample_project.slug, "timeline") + "?page=abc&per_page=xyz")
609 assert response.status_code == 200
610 data = response.json()
611 assert data["page"] == 1
612 assert data["per_page"] == 25 # default
613
614
615 # ===========================================================================
616 # Tickets Endpoint
617 # ===========================================================================
618
619
620 @pytest.mark.django_db
621 class TestAPITickets:
622 def test_tickets_returns_list(self, admin_client, sample_project, fossil_repo_obj):
623 disk_patch, reader_patch, _ = _patch_api_fossil()
624 with disk_patch, reader_patch:
625 response = admin_client.get(_api_url(sample_project.slug, "tickets"))
626 assert response.status_code == 200
627 data = response.json()
628 assert "tickets" in data
629 assert "total" in data
630 assert "page" in data
631 assert "per_page" in data
632 assert "total_pages" in data
633 assert len(data["tickets"]) == 2
634 ticket = data["tickets"][0]
635 assert "uuid" in ticket
636 assert "title" in ticket
637 assert "status" in ticket
638 assert "type" in ticket
639 assert "created" in ticket
640
641 def test_tickets_status_filter(self, admin_client, sample_project, fossil_repo_obj):
642 disk_patch, reader_patch, reader = _patch_api_fossil()
643 with disk_patch, reader_patch:
644 response = admin_client.get(_api_url(sample_project.slug, "tickets") + "?status=Open")
645 assert response.status_code == 200
646 # Verify the reader was called with the status filter
647 reader.get_tickets.assert_called_once_with(status="Open", limit=1000)
648
649 def test_tickets_pagination(self, admin_client, sample_project, fossil_repo_obj):
650 disk_patch, reader_patch, _ = _patch_api_fossil()
651 with disk_patch, reader_patch:
652 response = admin_client.get(_api_url(sample_project.slug, "tickets") + "?page=1&per_page=1")
653 assert response.status_code == 200
654 data = response.json()
655 assert data["per_page"] == 1
656 assert len(data["tickets"]) == 1
657 assert data["total"] == 2
658 assert data["total_pages"] == 2
659
660
661 # ===========================================================================
662 # Ticket Detail Endpoint
663 # ===========================================================================
664
665
666 @pytest.mark.django_db
667 class TestAPITicketDetail:
668 def test_ticket_detail_returns_ticket(self, admin_client, sample_project, fossil_repo_obj):
669 disk_patch, reader_patch, _ = _patch_api_fossil()
670 with disk_patch, reader_patch:
671 response = admin_client.get(_api_url(sample_project.slug, "tickets/tkt-001-uuid"))
672 assert response.status_code == 200
673 data = response.json()
674 assert data["uuid"] == "tkt-001-uuid"
675 assert data["title"] == "Fix login bug"
676 assert data["status"] == "Open"
677 assert data["body"] == "Login fails when session expires."
678 assert "comments" in data
679 assert len(data["comments"]) == 1
680 comment = data["comments"][0]
681 assert comment["user"] == "bob"
682 assert comment["comment"] == "I can reproduce this."
683
684 def test_ticket_detail_not_found(self, admin_client, sample_project, fossil_repo_obj):
685 disk_patch, reader_patch, reader = _patch_api_fossil()
686 reader.get_ticket_detail.return_value = None
687 with disk_patch, reader_patch:
688 response = admin_client.get(_api_url(sample_project.slug, "tickets/nonexistent-uuid"))
689 assert response.status_code == 404
690 assert response.json()["error"] == "Ticket not found"
691
692
693 # ===========================================================================
694 # Wiki List Endpoint
695 # ===========================================================================
696
697
698 @pytest.mark.django_db
699 class TestAPIWikiList:
700 def test_wiki_list_returns_pages(self, admin_client, sample_project, fossil_repo_obj):
701 disk_patch, reader_patch, _ = _patch_api_fossil()
702 with disk_patch, reader_patch:
703 response = admin_client.get(_api_url(sample_project.slug, "wiki"))
704 assert response.status_code == 200
705 data = response.json()
706 assert "pages" in data
707 assert len(data["pages"]) == 2
708 page = data["pages"][0]
709 assert "name" in page
710 assert "last_modified" in page
711 assert "user" in page
712
713 def test_wiki_list_empty(self, admin_client, sample_project, fossil_repo_obj):
714 disk_patch, reader_patch, reader = _patch_api_fossil()
715 reader.get_wiki_pages.return_value = []
716 with disk_patch, reader_patch:
717 response = admin_client.get(_api_url(sample_project.slug, "wiki"))
718 assert response.status_code == 200
719 data = response.json()
720 assert data["pages"] == []
721
722
723 # ===========================================================================
724 # Wiki Page Endpoint
725 # ===========================================================================
726
727
728 @pytest.mark.django_db
729 class TestAPIWikiPage:
730 def test_wiki_page_returns_content(self, admin_client, sample_project, fossil_repo_obj):
731 disk_patch, reader_patch, _ = _patch_api_fossil()
732 with disk_patch, reader_patch, patch("fossil.views._render_fossil_content", return_value="<h1>Welcome</h1>"):
733 response = admin_client.get(_api_url(sample_project.slug, "wiki/Home"))
734 assert response.status_code == 200
735 data = response.json()
736 assert data["name"] == "Home"
737 assert data["content"] == "# Welcome\nThis is the home page."
738 assert "content_html" in data
739 assert "last_modified" in data
740 assert data["user"] == "alice"
741
742 def test_wiki_page_not_found(self, admin_client, sample_project, fossil_repo_obj):
743 disk_patch, reader_patch, reader = _patch_api_fossil()
744 reader.get_wiki_page.return_value = None
745 with disk_patch, reader_patch:
746 response = admin_client.get(_api_url(sample_project.slug, "wiki/Nonexistent"))
747 assert response.status_code == 404
748 assert response.json()["error"] == "Wiki page not found"
749
750
751 # ===========================================================================
752 # Branches Endpoint
753 # ===========================================================================
754
755
756 @pytest.mark.django_db
757 class TestAPIBranches:
758 def test_branches_returns_list(self, admin_client, sample_project, fossil_repo_obj):
759 disk_patch, reader_patch, _ = _patch_api_fossil()
760 with disk_patch, reader_patch:
761 response = admin_client.get(_api_url(sample_project.slug, "branches"))
762 assert response.status_code == 200
763 data = response.json()
764 assert "branches" in data
765 assert len(data["branches"]) == 2
766 branch = data["branches"][0]
767 assert "name" in branch
768 assert "last_checkin" in branch
769 assert "last_user" in branch
770 assert "checkin_count" in branch
771 assert "last_uuid" in branch
772
773 def test_branches_empty(self, admin_client, sample_project, fossil_repo_obj):
774 disk_patch, reader_patch, reader = _patch_api_fossil()
775 reader.get_branches.return_value = []
776 with disk_patch, reader_patch:
777 response = admin_client.get(_api_url(sample_project.slug, "branches"))
778 assert response.status_code == 200
779 assert response.json()["branches"] == []
780
781
782 # ===========================================================================
783 # Tags Endpoint
784 # ===========================================================================
785
786
787 @pytest.mark.django_db
788 class TestAPITags:
789 def test_tags_returns_list(self, admin_client, sample_project, fossil_repo_obj):
790 disk_patch, reader_patch, _ = _patch_api_fossil()
791 with disk_patch, reader_patch:
792 response = admin_client.get(_api_url(sample_project.slug, "tags"))
793 assert response.status_code == 200
794 data = response.json()
795 assert "tags" in data
796 assert len(data["tags"]) == 1
797 tag = data["tags"][0]
798 assert tag["name"] == "v1.0.0"
799 assert "timestamp" in tag
800 assert "user" in tag
801 assert "uuid" in tag
802
803 def test_tags_empty(self, admin_client, sample_project, fossil_repo_obj):
804 disk_patch, reader_patch, reader = _patch_api_fossil()
805 reader.get_tags.return_value = []
806 with disk_patch, reader_patch:
807 response = admin_client.get(_api_url(sample_project.slug, "tags"))
808 assert response.status_code == 200
809 assert response.json()["tags"] == []
810
811
812 # ===========================================================================
813 # Releases Endpoint
814 # ===========================================================================
815
816
817 @pytest.mark.django_db
818 class TestAPIReleases:
819 def test_releases_returns_list(self, admin_client, sample_project, fossil_repo_obj):
820 Release.objects.create(
821 repository=fossil_repo_obj,
822 tag_name="v1.0.0",
823 name="Version 1.0.0",
824 body="Initial release.",
825 is_prerelease=False,
826 is_draft=False,
827 published_at=timezone.now(),
828 checkin_uuid="abc123",
829 created_by=admin_client.session.get("_auth_user_id") and User.objects.first(),
830 )
831 response = admin_client.get(_api_url(sample_project.slug, "releases"))
832 assert response.status_code == 200
833 data = response.json()
834 assert "releases" in data
835 assert len(data["releases"]) == 1
836 rel = data["releases"][0]
837 assert rel["tag_name"] == "v1.0.0"
838 assert rel["name"] == "Version 1.0.0"
839 assert rel["body"] == "Initial release."
840 assert "published_at" in rel
841 assert "assets" in rel
842
843 def test_releases_hides_drafts_from_readers(self, client, sample_project, fossil_repo_obj, pat_token, admin_user):
844 """Draft releases are hidden from users without write access."""
845 # Create a draft release and a published release
846 Release.objects.create(
847 repository=fossil_repo_obj,
848 tag_name="v0.9.0",
849 name="Draft Release",
850 is_draft=True,
851 created_by=admin_user,
852 )
853 Release.objects.create(
854 repository=fossil_repo_obj,
855 tag_name="v1.0.0",
856 name="Published Release",
857 is_draft=False,
858 published_at=timezone.now(),
859 created_by=admin_user,
860 )
861
862 # Create a read-only user with a PAT
863 reader_user = User.objects.create_user(username="api_reader", password="testpass123")
864 team = Team.objects.create(name="API Readers", organization=sample_project.organization, created_by=admin_user)
865 team.members.add(reader_user)
866 ProjectTeam.objects.create(project=sample_project, team=team, role="read", created_by=admin_user)
867
868 raw, token_hash, prefix = PersonalAccessToken.generate()
869 PersonalAccessToken.objects.create(
870 user=reader_user,
871 name="Reader PAT",
872 token_hash=token_hash,
873 token_prefix=prefix,
874 scopes="read",
875 )
876
877 response = client.get(_api_url(sample_project.slug, "releases"), **_bearer_header(raw))
878 assert response.status_code == 200
879 data = response.json()
880 # Reader should only see the published release, not the draft
881 assert len(data["releases"]) == 1
882 assert data["releases"][0]["tag_name"] == "v1.0.0"
883
884 def test_releases_shows_drafts_to_writers(self, client, sample_project, fossil_repo_obj, pat_token, admin_user):
885 """Draft releases are visible to users with write access."""
886 Release.objects.create(
887 repository=fossil_repo_obj,
888 tag_name="v0.9.0",
889 name="Draft Release",
890 is_draft=True,
891 created_by=admin_user,
892 )
893 Release.objects.create(
894 repository=fossil_repo_obj,
895 tag_name="v1.0.0",
896 name="Published Release",
897 is_draft=False,
898 published_at=timezone.now(),
899 created_by=admin_user,
900 )
901
902 # admin_user has write access via sample_team -> sample_project
903 _, raw = pat_token # PAT for admin_user
904 response = client.get(_api_url(sample_project.slug, "releases"), **_bearer_header(raw))
905 assert response.status_code == 200
906 data = response.json()
907 assert len(data["releases"]) == 2
908
909 def test_releases_includes_assets(self, admin_client, sample_project, fossil_repo_obj, admin_user):
910 release = Release.objects.create(
911 repository=fossil_repo_obj,
912 tag_name="v2.0.0",
913 name="Version 2.0.0",
914 is_draft=False,
915 published_at=timezone.now(),
916 created_by=admin_user,
917 )
918 ReleaseAsset.objects.create(
919 release=release,
920 name="app-v2.0.0.tar.gz",
921 file_size_bytes=1024000,
922 content_type="application/gzip",
923 download_count=5,
924 created_by=admin_user,
925 )
926 response = admin_client.get(_api_url(sample_project.slug, "releases"))
927 assert response.status_code == 200
928 data = response.json()
929 assert len(data["releases"]) == 1
930 assets = data["releases"][0]["assets"]
931 assert len(assets) == 1
932 assert assets[0]["name"] == "app-v2.0.0.tar.gz"
933 assert assets[0]["file_size_bytes"] == 1024000
934 assert assets[0]["download_count"] == 5
935
936 def test_releases_empty(self, admin_client, sample_project, fossil_repo_obj):
937 response = admin_client.get(_api_url(sample_project.slug, "releases"))
938 assert response.status_code == 200
939 assert response.json()["releases"] == []
940
941
942 # ===========================================================================
943 # Search Endpoint
944 # ===========================================================================
945
946
947 @pytest.mark.django_db
948 class TestAPISearch:
949 def test_search_returns_results(self, admin_client, sample_project, fossil_repo_obj):
950 disk_patch, reader_patch, _ = _patch_api_fossil()
951 with disk_patch, reader_patch:
952 response = admin_client.get(_api_url(sample_project.slug, "search") + "?q=login")
953 assert response.status_code == 200
954 data = response.json()
955 assert "checkins" in data
956 assert "tickets" in data
957 assert "wiki" in data
958 assert len(data["checkins"]) == 1
959 assert len(data["tickets"]) == 1
960 assert len(data["wiki"]) == 1
961
962 def test_search_missing_query_returns_400(self, admin_client, sample_project, fossil_repo_obj):
963 disk_patch, reader_patch, _ = _patch_api_fossil()
964 with disk_patch, reader_patch:
965 response = admin_client.get(_api_url(sample_project.slug, "search"))
966 assert response.status_code == 400
967 assert response.json()["error"] == "Query parameter 'q' is required"
968
969 def test_search_empty_query_returns_400(self, admin_client, sample_project, fossil_repo_obj):
970 disk_patch, reader_patch, _ = _patch_api_fossil()
971 with disk_patch, reader_patch:
972 response = admin_client.get(_api_url(sample_project.slug, "search") + "?q=")
973 assert response.status_code == 400
974
975 def test_search_passes_query_to_reader(self, admin_client, sample_project, fossil_repo_obj):
976 disk_patch, reader_patch, reader = _patch_api_fossil()
977 with disk_patch, reader_patch:
978 admin_client.get(_api_url(sample_project.slug, "search") + "?q=test+query")
979 reader.search.assert_called_once_with("test query", limit=50)
980
981
982 # ===========================================================================
983 # HTTP Method Restrictions
984 # ===========================================================================
985
986
987 @pytest.mark.django_db
988 class TestAPIMethodRestrictions:
989 """All endpoints should only accept GET requests."""
990
991 def test_post_to_project_returns_405(self, admin_client, sample_project, fossil_repo_obj):
992 disk_patch, reader_patch, _ = _patch_api_fossil()
993 with disk_patch, reader_patch:
994 response = admin_client.post(_api_url(sample_project.slug, "project"))
995 assert response.status_code == 405
996
997 def test_post_to_timeline_returns_405(self, admin_client, sample_project, fossil_repo_obj):
998 disk_patch, reader_patch, _ = _patch_api_fossil()
999 with disk_patch, reader_patch:
1000 response = admin_client.post(_api_url(sample_project.slug, "timeline"))
1001 assert response.status_code == 405
1002
1003 def test_post_to_tickets_returns_405(self, admin_client, sample_project, fossil_repo_obj):
1004 disk_patch, reader_patch, _ = _patch_api_fossil()
1005 with disk_patch, reader_patch:
1006 response = admin_client.post(_api_url(sample_project.slug, "tickets"))
1007 assert response.status_code == 405
1008
1009 def test_post_to_search_returns_405(self, admin_client, sample_project, fossil_repo_obj):
1010 disk_patch, reader_patch, _ = _patch_api_fossil()
1011 with disk_patch, reader_patch:
1012 response = admin_client.post(_api_url(sample_project.slug, "search"))
1013 assert response.status_code == 405
1014
1015
1016 # ===========================================================================
1017 # Cross-endpoint auth consistency
1018 # ===========================================================================
1019
1020
1021 @pytest.mark.django_db
1022 class TestAPIAllEndpointsRequireAuth:
1023 """Every endpoint should return 401 for unauthenticated requests to private projects."""
1024
1025 @pytest.mark.parametrize(
1026 "endpoint",
1027 [
1028 "project",
1029 "timeline",
1030 "tickets",
1031 "tickets/some-uuid",
1032 "wiki",
1033 "wiki/Home",
1034 "branches",
1035 "tags",
1036 "releases",
1037 "search?q=test",
1038 ],
1039 )
1040 def test_endpoint_requires_auth(self, anon_client, sample_project, fossil_repo_obj, endpoint):
1041 disk_patch, reader_patch, _ = _patch_api_fossil()
1042 with disk_patch, reader_patch:
1043 response = anon_client.get(_api_url(sample_project.slug, endpoint))
1044 assert response.status_code == 401
--- a/tests/test_mcp_server.py
+++ b/tests/test_mcp_server.py
@@ -0,0 +1,710 @@
1
+"""Tests for MCP server tool definitions and handlers.
2
+
3
+Covers:
4
+- Tool registry: all 17 tools registered with correct schemas
5
+- Tool dispatch: execute_tool routes to correct handler
6
+- Read handlers: list_projects, get_project, browse_code, read_file,
7
+ get_timeline, get_checkin, search_code, list_tickets, get_ticket,
8
+ list_wiki_pages, get_wiki_page, list_branches, get_file_blame,
9
+ get_file_history, sql_query
10
+- Write handlers: create_ticket, update_ticket
11
+- Error handling: unknown tool, missing project, exceptions
12
+"""
13
+
14
+from datetime import UTC, datetime
15
+from unittest.mock import MagicMock, patch
16
+
17
+import pytest
18
+
19
+from fossil.models import FossilRepository
20
+from fossil.reader import (
21
+ CheckinDetail,
22
+ FileEntry,
23
+ RepoMetadata,
24
+ TicketEntry,
25
+ TimelineEntry,
26
+ WikiPage,
27
+)
28
+from mcp_server.tools import TOOLS, execute_tool
29
+
30
+# Patch targets -- tools.py does deferred imports inside handler functions,
31
+# so we patch at the source module rather than at the consumer.
32
+_READER = "fossil.reader.FossilReader"
33
+_CLI = "fossil.cli.FossilCLI"
34
+
35
+# ---------------------------------------------------------------------------
36
+# Fixtures
37
+# ---------------------------------------------------------------------------
38
+
39
+
40
+@pytest.fixture
41
+def fossil_repo_obj(sample_project):
42
+ """Return the auto-created FossilRepository for sample_project."""
43
+ return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True)
44
+
45
+
46
+def _mock_reader():
47
+ """Return a context-manager mock for FossilReader."""
48
+ reader = MagicMock()
49
+ reader.__enter__ = MagicMock(return_value=reader)
50
+ reader.__exit__ = MagicMock(return_value=False)
51
+ return reader
52
+
53
+
54
+# ---------------------------------------------------------------------------
55
+# Tool registry tests
56
+# ---------------------------------------------------------------------------
57
+
58
+
59
+class TestToolRegistry:
60
+ def test_all_17_tools_registered(self):
61
+ assert len(TOOLS) == 17
62
+
63
+ def test_tool_names_are_unique(self):
64
+ names = [t.name for t in TOOLS]
65
+ assert len(names) == len(set(names))
66
+
67
+ def test_every_tool_has_input_schema(self):
68
+ for tool in TOOLS:
69
+ assert tool.inputSchema is not None
70
+ assert tool.inputSchema.get("type") == "object"
71
+
72
+ def test_every_tool_has_description(self):
73
+ for tool in TOOLS:
74
+ assert tool.description
75
+ assert len(tool.description) > 10
76
+
77
+ def test_expected_tools_present(self):
78
+ names = {t.name for t in TOOLS}
79
+ expected = {
80
+ "list_projects",
81
+ "get_project",
82
+ "browse_code",
83
+ "read_file",
84
+ "get_timeline",
85
+ "get_checkin",
86
+ "search_code",
87
+ "list_tickets",
88
+ "get_ticket",
89
+ "create_ticket",
90
+ "update_ticket",
91
+ "list_wiki_pages",
92
+ "get_wiki_page",
93
+ "list_branches",
94
+ "get_file_blame",
95
+ "get_file_history",
96
+ "sql_query",
97
+ }
98
+ assert names == expected
99
+
100
+ def test_slug_required_for_project_scoped_tools(self):
101
+ """All tools except list_projects require a slug parameter."""
102
+ for tool in TOOLS:
103
+ if tool.name == "list_projects":
104
+ assert "slug" not in tool.inputSchema.get("required", [])
105
+ else:
106
+ assert "slug" in tool.inputSchema.get("required", []), f"{tool.name} should require slug"
107
+
108
+
109
+# ---------------------------------------------------------------------------
110
+# Dispatch tests
111
+# ---------------------------------------------------------------------------
112
+
113
+
114
+class TestDispatch:
115
+ def test_unknown_tool_returns_error(self):
116
+ result = execute_tool("nonexistent_tool", {})
117
+ assert "error" in result
118
+ assert "Unknown tool" in result["error"]
119
+
120
+ @pytest.mark.django_db
121
+ def test_missing_project_returns_error(self):
122
+ result = execute_tool("get_project", {"slug": "does-not-exist"})
123
+ assert "error" in result
124
+
125
+ @pytest.mark.django_db
126
+ def test_exception_in_handler_returns_error(self, sample_project):
127
+ with patch("mcp_server.tools._get_repo", side_effect=RuntimeError("boom")):
128
+ result = execute_tool("get_project", {"slug": sample_project.slug})
129
+ assert "error" in result
130
+ assert "boom" in result["error"]
131
+
132
+
133
+# ---------------------------------------------------------------------------
134
+# list_projects
135
+# ---------------------------------------------------------------------------
136
+
137
+
138
+@pytest.mark.django_db
139
+class TestListProjects:
140
+ def test_returns_all_active_projects(self, sample_project):
141
+ result = execute_tool("list_projects", {})
142
+ assert "projects" in result
143
+ slugs = [p["slug"] for p in result["projects"]]
144
+ assert sample_project.slug in slugs
145
+
146
+ def test_excludes_deleted_projects(self, sample_project, admin_user):
147
+ sample_project.soft_delete(user=admin_user)
148
+ result = execute_tool("list_projects", {})
149
+ slugs = [p["slug"] for p in result["projects"]]
150
+ assert sample_project.slug not in slugs
151
+
152
+
153
+# ---------------------------------------------------------------------------
154
+# get_project
155
+# ---------------------------------------------------------------------------
156
+
157
+
158
+@pytest.mark.django_db
159
+class TestGetProject:
160
+ @patch(_READER)
161
+ def test_returns_project_details(self, mock_reader_cls, sample_project, fossil_repo_obj):
162
+ reader = _mock_reader()
163
+ reader.get_metadata.return_value = RepoMetadata(
164
+ project_name="Test",
165
+ checkin_count=10,
166
+ ticket_count=3,
167
+ wiki_page_count=2,
168
+ )
169
+ mock_reader_cls.return_value = reader
170
+
171
+ with patch.object(type(fossil_repo_obj), "exists_on_disk", new_callable=lambda: property(lambda s: True)):
172
+ result = execute_tool("get_project", {"slug": sample_project.slug})
173
+
174
+ assert result["name"] == sample_project.name
175
+ assert result["slug"] == sample_project.slug
176
+ assert result["visibility"] == sample_project.visibility
177
+
178
+ def test_nonexistent_slug_returns_error(self):
179
+ result = execute_tool("get_project", {"slug": "no-such-project"})
180
+ assert "error" in result
181
+
182
+
183
+# ---------------------------------------------------------------------------
184
+# browse_code
185
+# ---------------------------------------------------------------------------
186
+
187
+
188
+@pytest.mark.django_db
189
+class TestBrowseCode:
190
+ @patch(_READER)
191
+ def test_lists_files_at_root(self, mock_reader_cls, sample_project, fossil_repo_obj):
192
+ reader = _mock_reader()
193
+ reader.get_latest_checkin_uuid.return_value = "abc123"
194
+ reader.get_files_at_checkin.return_value = [
195
+ FileEntry(name="README.md", uuid="f1", size=100),
196
+ FileEntry(name="src/main.py", uuid="f2", size=200),
197
+ ]
198
+ mock_reader_cls.return_value = reader
199
+
200
+ result = execute_tool("browse_code", {"slug": sample_project.slug})
201
+ assert len(result["files"]) == 2
202
+ assert result["checkin"] == "abc123"
203
+
204
+ @patch(_READER)
205
+ def test_filters_by_path(self, mock_reader_cls, sample_project, fossil_repo_obj):
206
+ reader = _mock_reader()
207
+ reader.get_latest_checkin_uuid.return_value = "abc123"
208
+ reader.get_files_at_checkin.return_value = [
209
+ FileEntry(name="README.md", uuid="f1", size=100),
210
+ FileEntry(name="src/main.py", uuid="f2", size=200),
211
+ FileEntry(name="src/utils.py", uuid="f3", size=150),
212
+ ]
213
+ mock_reader_cls.return_value = reader
214
+
215
+ result = execute_tool("browse_code", {"slug": sample_project.slug, "path": "src"})
216
+ assert len(result["files"]) == 2
217
+ assert all(f["name"].startswith("src/") for f in result["files"])
218
+
219
+ @patch(_READER)
220
+ def test_empty_repo_returns_error(self, mock_reader_cls, sample_project, fossil_repo_obj):
221
+ reader = _mock_reader()
222
+ reader.get_latest_checkin_uuid.return_value = None
223
+ mock_reader_cls.return_value = reader
224
+
225
+ result = execute_tool("browse_code", {"slug": sample_project.slug})
226
+ assert "error" in result
227
+
228
+
229
+# ---------------------------------------------------------------------------
230
+# read_file
231
+# ---------------------------------------------------------------------------
232
+
233
+
234
+@pytest.mark.django_db
235
+class TestReadFile:
236
+ @patch(_READER)
237
+ def test_reads_text_file(self, mock_reader_cls, sample_project, fossil_repo_obj):
238
+ reader = _mock_reader()
239
+ reader.get_latest_checkin_uuid.return_value = "abc123"
240
+ reader.get_files_at_checkin.return_value = [
241
+ FileEntry(name="README.md", uuid="f1", size=100),
242
+ ]
243
+ reader.get_file_content.return_value = b"# Hello World"
244
+ mock_reader_cls.return_value = reader
245
+
246
+ result = execute_tool("read_file", {"slug": sample_project.slug, "filepath": "README.md"})
247
+ assert result["filepath"] == "README.md"
248
+ assert result["content"] == "# Hello World"
249
+
250
+ @patch(_READER)
251
+ def test_binary_file_returns_metadata(self, mock_reader_cls, sample_project, fossil_repo_obj):
252
+ reader = _mock_reader()
253
+ reader.get_latest_checkin_uuid.return_value = "abc123"
254
+ reader.get_files_at_checkin.return_value = [
255
+ FileEntry(name="image.png", uuid="f1", size=5000),
256
+ ]
257
+ reader.get_file_content.return_value = b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00"
258
+ mock_reader_cls.return_value = reader
259
+
260
+ result = execute_tool("read_file", {"slug": sample_project.slug, "filepath": "image.png"})
261
+ assert result["binary"] is True
262
+ assert result["size"] > 0
263
+
264
+ @patch(_READER)
265
+ def test_file_not_found(self, mock_reader_cls, sample_project, fossil_repo_obj):
266
+ reader = _mock_reader()
267
+ reader.get_latest_checkin_uuid.return_value = "abc123"
268
+ reader.get_files_at_checkin.return_value = []
269
+ mock_reader_cls.return_value = reader
270
+
271
+ result = execute_tool("read_file", {"slug": sample_project.slug, "filepath": "nope.txt"})
272
+ assert "error" in result
273
+ assert "not found" in result["error"].lower()
274
+
275
+
276
+# ---------------------------------------------------------------------------
277
+# get_timeline
278
+# ---------------------------------------------------------------------------
279
+
280
+
281
+@pytest.mark.django_db
282
+class TestGetTimeline:
283
+ @patch(_READER)
284
+ def test_returns_checkins(self, mock_reader_cls, sample_project, fossil_repo_obj):
285
+ reader = _mock_reader()
286
+ reader.get_timeline.return_value = [
287
+ TimelineEntry(
288
+ rid=1,
289
+ uuid="abc123",
290
+ event_type="ci",
291
+ timestamp=datetime(2025, 1, 15, 10, 0, 0, tzinfo=UTC),
292
+ user="alice",
293
+ comment="Initial commit",
294
+ branch="trunk",
295
+ ),
296
+ ]
297
+ mock_reader_cls.return_value = reader
298
+
299
+ result = execute_tool("get_timeline", {"slug": sample_project.slug})
300
+ assert len(result["checkins"]) == 1
301
+ assert result["checkins"][0]["uuid"] == "abc123"
302
+ assert result["checkins"][0]["user"] == "alice"
303
+
304
+ @patch(_READER)
305
+ def test_branch_filter(self, mock_reader_cls, sample_project, fossil_repo_obj):
306
+ reader = _mock_reader()
307
+ reader.get_timeline.return_value = [
308
+ TimelineEntry(
309
+ rid=1,
310
+ uuid="a1",
311
+ event_type="ci",
312
+ timestamp=datetime(2025, 1, 15, tzinfo=UTC),
313
+ user="alice",
314
+ comment="on trunk",
315
+ branch="trunk",
316
+ ),
317
+ TimelineEntry(
318
+ rid=2,
319
+ uuid="b2",
320
+ event_type="ci",
321
+ timestamp=datetime(2025, 1, 14, tzinfo=UTC),
322
+ user="bob",
323
+ comment="on feature",
324
+ branch="feature-x",
325
+ ),
326
+ ]
327
+ mock_reader_cls.return_value = reader
328
+
329
+ result = execute_tool("get_timeline", {"slug": sample_project.slug, "branch": "trunk"})
330
+ assert len(result["checkins"]) == 1
331
+ assert result["checkins"][0]["branch"] == "trunk"
332
+
333
+
334
+# ---------------------------------------------------------------------------
335
+# get_checkin
336
+# ---------------------------------------------------------------------------
337
+
338
+
339
+@pytest.mark.django_db
340
+class TestGetCheckin:
341
+ @patch(_READER)
342
+ def test_returns_checkin_detail(self, mock_reader_cls, sample_project, fossil_repo_obj):
343
+ reader = _mock_reader()
344
+ reader.get_checkin_detail.return_value = CheckinDetail(
345
+ uuid="abc123full",
346
+ timestamp=datetime(2025, 1, 15, 10, 0, 0, tzinfo=UTC),
347
+ user="alice",
348
+ comment="Initial commit",
349
+ branch="trunk",
350
+ parent_uuid="parent000",
351
+ files_changed=[{"name": "README.md", "change_type": "added", "uuid": "f1", "prev_uuid": ""}],
352
+ )
353
+ mock_reader_cls.return_value = reader
354
+
355
+ result = execute_tool("get_checkin", {"slug": sample_project.slug, "uuid": "abc123"})
356
+ assert result["uuid"] == "abc123full"
357
+ assert len(result["files_changed"]) == 1
358
+
359
+ @patch(_READER)
360
+ def test_checkin_not_found(self, mock_reader_cls, sample_project, fossil_repo_obj):
361
+ reader = _mock_reader()
362
+ reader.get_checkin_detail.return_value = None
363
+ mock_reader_cls.return_value = reader
364
+
365
+ result = execute_tool("get_checkin", {"slug": sample_project.slug, "uuid": "nonexistent"})
366
+ assert "error" in result
367
+
368
+
369
+# ---------------------------------------------------------------------------
370
+# search_code
371
+# ---------------------------------------------------------------------------
372
+
373
+
374
+@pytest.mark.django_db
375
+class TestSearchCode:
376
+ @patch(_READER)
377
+ def test_returns_search_results(self, mock_reader_cls, sample_project, fossil_repo_obj):
378
+ reader = _mock_reader()
379
+ reader.search.return_value = {
380
+ "checkins": [{"uuid": "abc", "timestamp": datetime(2025, 1, 15, tzinfo=UTC), "user": "alice", "comment": "fix bug"}],
381
+ "tickets": [{"uuid": "tkt1", "title": "Bug report", "status": "Open", "created": datetime(2025, 1, 10, tzinfo=UTC)}],
382
+ "wiki": [{"name": "Debugging"}],
383
+ }
384
+ mock_reader_cls.return_value = reader
385
+
386
+ result = execute_tool("search_code", {"slug": sample_project.slug, "query": "bug"})
387
+ assert len(result["checkins"]) == 1
388
+ assert len(result["tickets"]) == 1
389
+ assert len(result["wiki"]) == 1
390
+ # Timestamps should be serialized to strings
391
+ assert isinstance(result["checkins"][0]["timestamp"], str)
392
+
393
+
394
+# ---------------------------------------------------------------------------
395
+# list_tickets / get_ticket
396
+# ---------------------------------------------------------------------------
397
+
398
+
399
+@pytest.mark.django_db
400
+class TestTickets:
401
+ @patch(_READER)
402
+ def test_list_tickets(self, mock_reader_cls, sample_project, fossil_repo_obj):
403
+ reader = _mock_reader()
404
+ reader.get_tickets.return_value = [
405
+ TicketEntry(
406
+ uuid="tkt-001",
407
+ title="Fix bug",
408
+ status="Open",
409
+ type="Code_Defect",
410
+ created=datetime(2025, 1, 10, tzinfo=UTC),
411
+ owner="alice",
412
+ priority="High",
413
+ ),
414
+ ]
415
+ mock_reader_cls.return_value = reader
416
+
417
+ result = execute_tool("list_tickets", {"slug": sample_project.slug})
418
+ assert len(result["tickets"]) == 1
419
+ assert result["tickets"][0]["uuid"] == "tkt-001"
420
+
421
+ @patch(_READER)
422
+ def test_get_ticket_detail(self, mock_reader_cls, sample_project, fossil_repo_obj):
423
+ reader = _mock_reader()
424
+ reader.get_ticket_detail.return_value = TicketEntry(
425
+ uuid="tkt-001",
426
+ title="Fix bug",
427
+ status="Open",
428
+ type="Code_Defect",
429
+ created=datetime(2025, 1, 10, tzinfo=UTC),
430
+ owner="alice",
431
+ body="Detailed description",
432
+ priority="High",
433
+ severity="Critical",
434
+ )
435
+ reader.get_ticket_comments.return_value = [
436
+ {"timestamp": datetime(2025, 1, 11, tzinfo=UTC), "user": "bob", "comment": "Reproduced", "mimetype": "text/plain"},
437
+ ]
438
+ mock_reader_cls.return_value = reader
439
+
440
+ result = execute_tool("get_ticket", {"slug": sample_project.slug, "uuid": "tkt-001"})
441
+ assert result["title"] == "Fix bug"
442
+ assert result["body"] == "Detailed description"
443
+ assert len(result["comments"]) == 1
444
+
445
+ @patch(_READER)
446
+ def test_ticket_not_found(self, mock_reader_cls, sample_project, fossil_repo_obj):
447
+ reader = _mock_reader()
448
+ reader.get_ticket_detail.return_value = None
449
+ mock_reader_cls.return_value = reader
450
+
451
+ result = execute_tool("get_ticket", {"slug": sample_project.slug, "uuid": "nonexistent"})
452
+ assert "error" in result
453
+
454
+
455
+# ---------------------------------------------------------------------------
456
+# create_ticket / update_ticket
457
+# ---------------------------------------------------------------------------
458
+
459
+
460
+@pytest.mark.django_db
461
+class TestWriteTickets:
462
+ @patch(_CLI)
463
+ def test_create_ticket(self, mock_cli_cls, sample_project, fossil_repo_obj):
464
+ cli = MagicMock()
465
+ cli.ticket_add.return_value = True
466
+ mock_cli_cls.return_value = cli
467
+
468
+ result = execute_tool(
469
+ "create_ticket",
470
+ {
471
+ "slug": sample_project.slug,
472
+ "title": "New bug",
473
+ "body": "Something is broken",
474
+ },
475
+ )
476
+ assert result["success"] is True
477
+ assert result["title"] == "New bug"
478
+ cli.ticket_add.assert_called_once()
479
+ call_args = cli.ticket_add.call_args
480
+ fields = call_args[0][1]
481
+ assert fields["title"] == "New bug"
482
+ assert fields["comment"] == "Something is broken"
483
+ assert fields["status"] == "Open"
484
+
485
+ @patch(_CLI)
486
+ def test_create_ticket_failure(self, mock_cli_cls, sample_project, fossil_repo_obj):
487
+ cli = MagicMock()
488
+ cli.ticket_add.return_value = False
489
+ mock_cli_cls.return_value = cli
490
+
491
+ result = execute_tool(
492
+ "create_ticket",
493
+ {
494
+ "slug": sample_project.slug,
495
+ "title": "Failing",
496
+ "body": "Will fail",
497
+ },
498
+ )
499
+ assert "error" in result
500
+
501
+ @patch(_CLI)
502
+ def test_update_ticket_status(self, mock_cli_cls, sample_project, fossil_repo_obj):
503
+ cli = MagicMock()
504
+ cli.ticket_change.return_value = True
505
+ mock_cli_cls.return_value = cli
506
+
507
+ result = execute_tool(
508
+ "update_ticket",
509
+ {
510
+ "slug": sample_project.slug,
511
+ "uuid": "tkt-001",
512
+ "status": "Closed",
513
+ },
514
+ )
515
+ assert result["success"] is True
516
+ call_args = cli.ticket_change.call_args
517
+ assert call_args[0][1] == "tkt-001"
518
+ assert call_args[0][2]["status"] == "Closed"
519
+
520
+ @patch(_CLI)
521
+ def test_update_ticket_comment(self, mock_cli_cls, sample_project, fossil_repo_obj):
522
+ cli = MagicMock()
523
+ cli.ticket_change.return_value = True
524
+ mock_cli_cls.return_value = cli
525
+
526
+ result = execute_tool(
527
+ "update_ticket",
528
+ {
529
+ "slug": sample_project.slug,
530
+ "uuid": "tkt-001",
531
+ "comment": "Fixed in latest push",
532
+ },
533
+ )
534
+ assert result["success"] is True
535
+ call_args = cli.ticket_change.call_args
536
+ assert call_args[0][2]["icomment"] == "Fixed in latest push"
537
+
538
+ @patch(_CLI)
539
+ def test_update_ticket_no_fields(self, mock_cli_cls, sample_project, fossil_repo_obj):
540
+ cli = MagicMock()
541
+ mock_cli_cls.return_value = cli
542
+
543
+ result = execute_tool(
544
+ "update_ticket",
545
+ {
546
+ "slug": sample_project.slug,
547
+ "uuid": "tkt-001",
548
+ },
549
+ )
550
+ assert "error" in result
551
+ assert "No fields" in result["error"]
552
+
553
+
554
+# ---------------------------------------------------------------------------
555
+# wiki handlers
556
+# ---------------------------------------------------------------------------
557
+
558
+
559
+@pytest.mark.django_db
560
+class TestWiki:
561
+ @patch(_READER)
562
+ def test_list_wiki_pages(self, mock_reader_cls, sample_project, fossil_repo_obj):
563
+ reader = _mock_reader()
564
+ reader.get_wiki_pages.return_value = [
565
+ WikiPage(name="Home", content="", last_modified=datetime(2025, 1, 12, tzinfo=UTC), user="alice"),
566
+ WikiPage(name="FAQ", content="", last_modified=datetime(2025, 1, 13, tzinfo=UTC), user="bob"),
567
+ ]
568
+ mock_reader_cls.return_value = reader
569
+
570
+ result = execute_tool("list_wiki_pages", {"slug": sample_project.slug})
571
+ assert len(result["pages"]) == 2
572
+ assert result["pages"][0]["name"] == "Home"
573
+
574
+ @patch(_READER)
575
+ def test_get_wiki_page(self, mock_reader_cls, sample_project, fossil_repo_obj):
576
+ reader = _mock_reader()
577
+ reader.get_wiki_page.return_value = WikiPage(
578
+ name="Home",
579
+ content="# Welcome\nThis is home.",
580
+ last_modified=datetime(2025, 1, 12, tzinfo=UTC),
581
+ user="alice",
582
+ )
583
+ mock_reader_cls.return_value = reader
584
+
585
+ result = execute_tool("get_wiki_page", {"slug": sample_project.slug, "page_name": "Home"})
586
+ assert result["name"] == "Home"
587
+ assert "Welcome" in result["content"]
588
+
589
+ @patch(_READER)
590
+ def test_wiki_page_not_found(self, mock_reader_cls, sample_project, fossil_repo_obj):
591
+ reader = _mock_reader()
592
+ reader.get_wiki_page.return_value = None
593
+ mock_reader_cls.return_value = reader
594
+
595
+ result = execute_tool("get_wiki_page", {"slug": sample_project.slug, "page_name": "Missing"})
596
+ assert "error" in result
597
+
598
+
599
+# ---------------------------------------------------------------------------
600
+# branches, blame, file history
601
+# ---------------------------------------------------------------------------
602
+
603
+
604
+@pytest.mark.django_db
605
+class TestBranchesAndHistory:
606
+ @patch(_READER)
607
+ def test_list_branches(self, mock_reader_cls, sample_project, fossil_repo_obj):
608
+ reader = _mock_reader()
609
+ reader.get_branches.return_value = [
610
+ {
611
+ "name": "trunk",
612
+ "last_checkin": datetime(2025, 1, 15, tzinfo=UTC),
613
+ "last_user": "alice",
614
+ "checkin_count": 30,
615
+ "last_uuid": "abc123",
616
+ },
617
+ ]
618
+ mock_reader_cls.return_value = reader
619
+
620
+ result = execute_tool("list_branches", {"slug": sample_project.slug})
621
+ assert len(result["branches"]) == 1
622
+ assert result["branches"][0]["name"] == "trunk"
623
+
624
+ @patch(_CLI)
625
+ def test_get_file_blame(self, mock_cli_cls, sample_project, fossil_repo_obj):
626
+ cli = MagicMock()
627
+ cli.blame.return_value = [
628
+ {"uuid": "aaa", "date": "2025-01-15", "user": "alice", "text": "line 1"},
629
+ {"uuid": "bbb", "date": "2025-01-14", "user": "bob", "text": "line 2"},
630
+ ]
631
+ mock_cli_cls.return_value = cli
632
+
633
+ result = execute_tool("get_file_blame", {"slug": sample_project.slug, "filepath": "main.py"})
634
+ assert result["filepath"] == "main.py"
635
+ assert len(result["lines"]) == 2
636
+ assert result["total"] == 2
637
+
638
+ @patch(_READER)
639
+ def test_get_file_history(self, mock_reader_cls, sample_project, fossil_repo_obj):
640
+ reader = _mock_reader()
641
+ reader.get_file_history.return_value = [
642
+ {"uuid": "c1", "timestamp": datetime(2025, 1, 15, tzinfo=UTC), "user": "alice", "comment": "Update"},
643
+ {"uuid": "c2", "timestamp": datetime(2025, 1, 14, tzinfo=UTC), "user": "bob", "comment": "Create"},
644
+ ]
645
+ mock_reader_cls.return_value = reader
646
+
647
+ result = execute_tool("get_file_history", {"slug": sample_project.slug, "filepath": "main.py"})
648
+ assert result["filepath"] == "main.py"
649
+ assert len(result["history"]) == 2
650
+ # Timestamps should be serialized
651
+ assert isinstance(result["history"][0]["timestamp"], str)
652
+
653
+
654
+# ---------------------------------------------------------------------------
655
+# sql_query
656
+# ---------------------------------------------------------------------------
657
+
658
+
659
+@pytest.mark.django_db
660
+class TestSqlQuery:
661
+ def test_rejects_non_select(self, sample_project, fossil_repo_obj):
662
+ result = execute_tool("sql_query", {"slug": sample_project.slug, "sql": "DELETE FROM ticket"})
663
+ assert "error" in result
664
+ assert "SELECT" in result["error"]
665
+
666
+ def test_rejects_empty_query(self, sample_project, fossil_repo_obj):
667
+ result = execute_tool("sql_query", {"slug": sample_project.slug, "sql": ""})
668
+ assert "error" in result
669
+
670
+ def test_rejects_drop(self, sample_project, fossil_repo_obj):
671
+ result = execute_tool("sql_query", {"slug": sample_project.slug, "sql": "SELECT 1; DROP TABLE ticket"})
672
+ assert "error" in result
673
+
674
+ @patch(_READER)
675
+ def test_valid_select(self, mock_reader_cls, sample_project, fossil_repo_obj):
676
+ reader = _mock_reader()
677
+ mock_cursor = MagicMock()
678
+ mock_cursor.description = [("tkt_uuid",), ("title",)]
679
+ mock_cursor.fetchmany.return_value = [("uuid-1", "Bug one"), ("uuid-2", "Bug two")]
680
+ reader.conn.cursor.return_value = mock_cursor
681
+ mock_reader_cls.return_value = reader
682
+
683
+ result = execute_tool("sql_query", {"slug": sample_project.slug, "sql": "SELECT tkt_uuid, title FROM ticket"})
684
+ assert result["columns"] == ["tkt_uuid", "title"]
685
+ assert len(result["rows"]) == 2
686
+ assert result["count"] == 2
687
+
688
+
689
+# ---------------------------------------------------------------------------
690
+# Server module smoke test
691
+# ---------------------------------------------------------------------------
692
+
693
+
694
+class TestServerModule:
695
+ def test_server_instance_exists(self):
696
+ from mcp_server.server import server
697
+
698
+ assert server.name == "fossilrepo"
699
+
700
+ def test_main_is_coroutine(self):
701
+ import inspect
702
+
703
+ from mcp_server.server import main
704
+
705
+ assert inspect.iscoroutinefunction(main)
706
+
707
+ def test_entry_point_function_exists(self):
708
+ from mcp_server.__main__ import run
709
+
710
+ assert callable(run)
--- a/tests/test_mcp_server.py
+++ b/tests/test_mcp_server.py
@@ -0,0 +1,710 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/tests/test_mcp_server.py
+++ b/tests/test_mcp_server.py
@@ -0,0 +1,710 @@
1 """Tests for MCP server tool definitions and handlers.
2
3 Covers:
4 - Tool registry: all 17 tools registered with correct schemas
5 - Tool dispatch: execute_tool routes to correct handler
6 - Read handlers: list_projects, get_project, browse_code, read_file,
7 get_timeline, get_checkin, search_code, list_tickets, get_ticket,
8 list_wiki_pages, get_wiki_page, list_branches, get_file_blame,
9 get_file_history, sql_query
10 - Write handlers: create_ticket, update_ticket
11 - Error handling: unknown tool, missing project, exceptions
12 """
13
14 from datetime import UTC, datetime
15 from unittest.mock import MagicMock, patch
16
17 import pytest
18
19 from fossil.models import FossilRepository
20 from fossil.reader import (
21 CheckinDetail,
22 FileEntry,
23 RepoMetadata,
24 TicketEntry,
25 TimelineEntry,
26 WikiPage,
27 )
28 from mcp_server.tools import TOOLS, execute_tool
29
30 # Patch targets -- tools.py does deferred imports inside handler functions,
31 # so we patch at the source module rather than at the consumer.
32 _READER = "fossil.reader.FossilReader"
33 _CLI = "fossil.cli.FossilCLI"
34
35 # ---------------------------------------------------------------------------
36 # Fixtures
37 # ---------------------------------------------------------------------------
38
39
40 @pytest.fixture
41 def fossil_repo_obj(sample_project):
42 """Return the auto-created FossilRepository for sample_project."""
43 return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True)
44
45
46 def _mock_reader():
47 """Return a context-manager mock for FossilReader."""
48 reader = MagicMock()
49 reader.__enter__ = MagicMock(return_value=reader)
50 reader.__exit__ = MagicMock(return_value=False)
51 return reader
52
53
54 # ---------------------------------------------------------------------------
55 # Tool registry tests
56 # ---------------------------------------------------------------------------
57
58
59 class TestToolRegistry:
60 def test_all_17_tools_registered(self):
61 assert len(TOOLS) == 17
62
63 def test_tool_names_are_unique(self):
64 names = [t.name for t in TOOLS]
65 assert len(names) == len(set(names))
66
67 def test_every_tool_has_input_schema(self):
68 for tool in TOOLS:
69 assert tool.inputSchema is not None
70 assert tool.inputSchema.get("type") == "object"
71
72 def test_every_tool_has_description(self):
73 for tool in TOOLS:
74 assert tool.description
75 assert len(tool.description) > 10
76
77 def test_expected_tools_present(self):
78 names = {t.name for t in TOOLS}
79 expected = {
80 "list_projects",
81 "get_project",
82 "browse_code",
83 "read_file",
84 "get_timeline",
85 "get_checkin",
86 "search_code",
87 "list_tickets",
88 "get_ticket",
89 "create_ticket",
90 "update_ticket",
91 "list_wiki_pages",
92 "get_wiki_page",
93 "list_branches",
94 "get_file_blame",
95 "get_file_history",
96 "sql_query",
97 }
98 assert names == expected
99
100 def test_slug_required_for_project_scoped_tools(self):
101 """All tools except list_projects require a slug parameter."""
102 for tool in TOOLS:
103 if tool.name == "list_projects":
104 assert "slug" not in tool.inputSchema.get("required", [])
105 else:
106 assert "slug" in tool.inputSchema.get("required", []), f"{tool.name} should require slug"
107
108
109 # ---------------------------------------------------------------------------
110 # Dispatch tests
111 # ---------------------------------------------------------------------------
112
113
114 class TestDispatch:
115 def test_unknown_tool_returns_error(self):
116 result = execute_tool("nonexistent_tool", {})
117 assert "error" in result
118 assert "Unknown tool" in result["error"]
119
120 @pytest.mark.django_db
121 def test_missing_project_returns_error(self):
122 result = execute_tool("get_project", {"slug": "does-not-exist"})
123 assert "error" in result
124
125 @pytest.mark.django_db
126 def test_exception_in_handler_returns_error(self, sample_project):
127 with patch("mcp_server.tools._get_repo", side_effect=RuntimeError("boom")):
128 result = execute_tool("get_project", {"slug": sample_project.slug})
129 assert "error" in result
130 assert "boom" in result["error"]
131
132
133 # ---------------------------------------------------------------------------
134 # list_projects
135 # ---------------------------------------------------------------------------
136
137
138 @pytest.mark.django_db
139 class TestListProjects:
140 def test_returns_all_active_projects(self, sample_project):
141 result = execute_tool("list_projects", {})
142 assert "projects" in result
143 slugs = [p["slug"] for p in result["projects"]]
144 assert sample_project.slug in slugs
145
146 def test_excludes_deleted_projects(self, sample_project, admin_user):
147 sample_project.soft_delete(user=admin_user)
148 result = execute_tool("list_projects", {})
149 slugs = [p["slug"] for p in result["projects"]]
150 assert sample_project.slug not in slugs
151
152
153 # ---------------------------------------------------------------------------
154 # get_project
155 # ---------------------------------------------------------------------------
156
157
158 @pytest.mark.django_db
159 class TestGetProject:
160 @patch(_READER)
161 def test_returns_project_details(self, mock_reader_cls, sample_project, fossil_repo_obj):
162 reader = _mock_reader()
163 reader.get_metadata.return_value = RepoMetadata(
164 project_name="Test",
165 checkin_count=10,
166 ticket_count=3,
167 wiki_page_count=2,
168 )
169 mock_reader_cls.return_value = reader
170
171 with patch.object(type(fossil_repo_obj), "exists_on_disk", new_callable=lambda: property(lambda s: True)):
172 result = execute_tool("get_project", {"slug": sample_project.slug})
173
174 assert result["name"] == sample_project.name
175 assert result["slug"] == sample_project.slug
176 assert result["visibility"] == sample_project.visibility
177
178 def test_nonexistent_slug_returns_error(self):
179 result = execute_tool("get_project", {"slug": "no-such-project"})
180 assert "error" in result
181
182
183 # ---------------------------------------------------------------------------
184 # browse_code
185 # ---------------------------------------------------------------------------
186
187
188 @pytest.mark.django_db
189 class TestBrowseCode:
190 @patch(_READER)
191 def test_lists_files_at_root(self, mock_reader_cls, sample_project, fossil_repo_obj):
192 reader = _mock_reader()
193 reader.get_latest_checkin_uuid.return_value = "abc123"
194 reader.get_files_at_checkin.return_value = [
195 FileEntry(name="README.md", uuid="f1", size=100),
196 FileEntry(name="src/main.py", uuid="f2", size=200),
197 ]
198 mock_reader_cls.return_value = reader
199
200 result = execute_tool("browse_code", {"slug": sample_project.slug})
201 assert len(result["files"]) == 2
202 assert result["checkin"] == "abc123"
203
204 @patch(_READER)
205 def test_filters_by_path(self, mock_reader_cls, sample_project, fossil_repo_obj):
206 reader = _mock_reader()
207 reader.get_latest_checkin_uuid.return_value = "abc123"
208 reader.get_files_at_checkin.return_value = [
209 FileEntry(name="README.md", uuid="f1", size=100),
210 FileEntry(name="src/main.py", uuid="f2", size=200),
211 FileEntry(name="src/utils.py", uuid="f3", size=150),
212 ]
213 mock_reader_cls.return_value = reader
214
215 result = execute_tool("browse_code", {"slug": sample_project.slug, "path": "src"})
216 assert len(result["files"]) == 2
217 assert all(f["name"].startswith("src/") for f in result["files"])
218
219 @patch(_READER)
220 def test_empty_repo_returns_error(self, mock_reader_cls, sample_project, fossil_repo_obj):
221 reader = _mock_reader()
222 reader.get_latest_checkin_uuid.return_value = None
223 mock_reader_cls.return_value = reader
224
225 result = execute_tool("browse_code", {"slug": sample_project.slug})
226 assert "error" in result
227
228
229 # ---------------------------------------------------------------------------
230 # read_file
231 # ---------------------------------------------------------------------------
232
233
234 @pytest.mark.django_db
235 class TestReadFile:
236 @patch(_READER)
237 def test_reads_text_file(self, mock_reader_cls, sample_project, fossil_repo_obj):
238 reader = _mock_reader()
239 reader.get_latest_checkin_uuid.return_value = "abc123"
240 reader.get_files_at_checkin.return_value = [
241 FileEntry(name="README.md", uuid="f1", size=100),
242 ]
243 reader.get_file_content.return_value = b"# Hello World"
244 mock_reader_cls.return_value = reader
245
246 result = execute_tool("read_file", {"slug": sample_project.slug, "filepath": "README.md"})
247 assert result["filepath"] == "README.md"
248 assert result["content"] == "# Hello World"
249
250 @patch(_READER)
251 def test_binary_file_returns_metadata(self, mock_reader_cls, sample_project, fossil_repo_obj):
252 reader = _mock_reader()
253 reader.get_latest_checkin_uuid.return_value = "abc123"
254 reader.get_files_at_checkin.return_value = [
255 FileEntry(name="image.png", uuid="f1", size=5000),
256 ]
257 reader.get_file_content.return_value = b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00"
258 mock_reader_cls.return_value = reader
259
260 result = execute_tool("read_file", {"slug": sample_project.slug, "filepath": "image.png"})
261 assert result["binary"] is True
262 assert result["size"] > 0
263
264 @patch(_READER)
265 def test_file_not_found(self, mock_reader_cls, sample_project, fossil_repo_obj):
266 reader = _mock_reader()
267 reader.get_latest_checkin_uuid.return_value = "abc123"
268 reader.get_files_at_checkin.return_value = []
269 mock_reader_cls.return_value = reader
270
271 result = execute_tool("read_file", {"slug": sample_project.slug, "filepath": "nope.txt"})
272 assert "error" in result
273 assert "not found" in result["error"].lower()
274
275
276 # ---------------------------------------------------------------------------
277 # get_timeline
278 # ---------------------------------------------------------------------------
279
280
281 @pytest.mark.django_db
282 class TestGetTimeline:
283 @patch(_READER)
284 def test_returns_checkins(self, mock_reader_cls, sample_project, fossil_repo_obj):
285 reader = _mock_reader()
286 reader.get_timeline.return_value = [
287 TimelineEntry(
288 rid=1,
289 uuid="abc123",
290 event_type="ci",
291 timestamp=datetime(2025, 1, 15, 10, 0, 0, tzinfo=UTC),
292 user="alice",
293 comment="Initial commit",
294 branch="trunk",
295 ),
296 ]
297 mock_reader_cls.return_value = reader
298
299 result = execute_tool("get_timeline", {"slug": sample_project.slug})
300 assert len(result["checkins"]) == 1
301 assert result["checkins"][0]["uuid"] == "abc123"
302 assert result["checkins"][0]["user"] == "alice"
303
304 @patch(_READER)
305 def test_branch_filter(self, mock_reader_cls, sample_project, fossil_repo_obj):
306 reader = _mock_reader()
307 reader.get_timeline.return_value = [
308 TimelineEntry(
309 rid=1,
310 uuid="a1",
311 event_type="ci",
312 timestamp=datetime(2025, 1, 15, tzinfo=UTC),
313 user="alice",
314 comment="on trunk",
315 branch="trunk",
316 ),
317 TimelineEntry(
318 rid=2,
319 uuid="b2",
320 event_type="ci",
321 timestamp=datetime(2025, 1, 14, tzinfo=UTC),
322 user="bob",
323 comment="on feature",
324 branch="feature-x",
325 ),
326 ]
327 mock_reader_cls.return_value = reader
328
329 result = execute_tool("get_timeline", {"slug": sample_project.slug, "branch": "trunk"})
330 assert len(result["checkins"]) == 1
331 assert result["checkins"][0]["branch"] == "trunk"
332
333
334 # ---------------------------------------------------------------------------
335 # get_checkin
336 # ---------------------------------------------------------------------------
337
338
339 @pytest.mark.django_db
340 class TestGetCheckin:
341 @patch(_READER)
342 def test_returns_checkin_detail(self, mock_reader_cls, sample_project, fossil_repo_obj):
343 reader = _mock_reader()
344 reader.get_checkin_detail.return_value = CheckinDetail(
345 uuid="abc123full",
346 timestamp=datetime(2025, 1, 15, 10, 0, 0, tzinfo=UTC),
347 user="alice",
348 comment="Initial commit",
349 branch="trunk",
350 parent_uuid="parent000",
351 files_changed=[{"name": "README.md", "change_type": "added", "uuid": "f1", "prev_uuid": ""}],
352 )
353 mock_reader_cls.return_value = reader
354
355 result = execute_tool("get_checkin", {"slug": sample_project.slug, "uuid": "abc123"})
356 assert result["uuid"] == "abc123full"
357 assert len(result["files_changed"]) == 1
358
359 @patch(_READER)
360 def test_checkin_not_found(self, mock_reader_cls, sample_project, fossil_repo_obj):
361 reader = _mock_reader()
362 reader.get_checkin_detail.return_value = None
363 mock_reader_cls.return_value = reader
364
365 result = execute_tool("get_checkin", {"slug": sample_project.slug, "uuid": "nonexistent"})
366 assert "error" in result
367
368
369 # ---------------------------------------------------------------------------
370 # search_code
371 # ---------------------------------------------------------------------------
372
373
374 @pytest.mark.django_db
375 class TestSearchCode:
376 @patch(_READER)
377 def test_returns_search_results(self, mock_reader_cls, sample_project, fossil_repo_obj):
378 reader = _mock_reader()
379 reader.search.return_value = {
380 "checkins": [{"uuid": "abc", "timestamp": datetime(2025, 1, 15, tzinfo=UTC), "user": "alice", "comment": "fix bug"}],
381 "tickets": [{"uuid": "tkt1", "title": "Bug report", "status": "Open", "created": datetime(2025, 1, 10, tzinfo=UTC)}],
382 "wiki": [{"name": "Debugging"}],
383 }
384 mock_reader_cls.return_value = reader
385
386 result = execute_tool("search_code", {"slug": sample_project.slug, "query": "bug"})
387 assert len(result["checkins"]) == 1
388 assert len(result["tickets"]) == 1
389 assert len(result["wiki"]) == 1
390 # Timestamps should be serialized to strings
391 assert isinstance(result["checkins"][0]["timestamp"], str)
392
393
394 # ---------------------------------------------------------------------------
395 # list_tickets / get_ticket
396 # ---------------------------------------------------------------------------
397
398
399 @pytest.mark.django_db
400 class TestTickets:
401 @patch(_READER)
402 def test_list_tickets(self, mock_reader_cls, sample_project, fossil_repo_obj):
403 reader = _mock_reader()
404 reader.get_tickets.return_value = [
405 TicketEntry(
406 uuid="tkt-001",
407 title="Fix bug",
408 status="Open",
409 type="Code_Defect",
410 created=datetime(2025, 1, 10, tzinfo=UTC),
411 owner="alice",
412 priority="High",
413 ),
414 ]
415 mock_reader_cls.return_value = reader
416
417 result = execute_tool("list_tickets", {"slug": sample_project.slug})
418 assert len(result["tickets"]) == 1
419 assert result["tickets"][0]["uuid"] == "tkt-001"
420
421 @patch(_READER)
422 def test_get_ticket_detail(self, mock_reader_cls, sample_project, fossil_repo_obj):
423 reader = _mock_reader()
424 reader.get_ticket_detail.return_value = TicketEntry(
425 uuid="tkt-001",
426 title="Fix bug",
427 status="Open",
428 type="Code_Defect",
429 created=datetime(2025, 1, 10, tzinfo=UTC),
430 owner="alice",
431 body="Detailed description",
432 priority="High",
433 severity="Critical",
434 )
435 reader.get_ticket_comments.return_value = [
436 {"timestamp": datetime(2025, 1, 11, tzinfo=UTC), "user": "bob", "comment": "Reproduced", "mimetype": "text/plain"},
437 ]
438 mock_reader_cls.return_value = reader
439
440 result = execute_tool("get_ticket", {"slug": sample_project.slug, "uuid": "tkt-001"})
441 assert result["title"] == "Fix bug"
442 assert result["body"] == "Detailed description"
443 assert len(result["comments"]) == 1
444
445 @patch(_READER)
446 def test_ticket_not_found(self, mock_reader_cls, sample_project, fossil_repo_obj):
447 reader = _mock_reader()
448 reader.get_ticket_detail.return_value = None
449 mock_reader_cls.return_value = reader
450
451 result = execute_tool("get_ticket", {"slug": sample_project.slug, "uuid": "nonexistent"})
452 assert "error" in result
453
454
455 # ---------------------------------------------------------------------------
456 # create_ticket / update_ticket
457 # ---------------------------------------------------------------------------
458
459
460 @pytest.mark.django_db
461 class TestWriteTickets:
462 @patch(_CLI)
463 def test_create_ticket(self, mock_cli_cls, sample_project, fossil_repo_obj):
464 cli = MagicMock()
465 cli.ticket_add.return_value = True
466 mock_cli_cls.return_value = cli
467
468 result = execute_tool(
469 "create_ticket",
470 {
471 "slug": sample_project.slug,
472 "title": "New bug",
473 "body": "Something is broken",
474 },
475 )
476 assert result["success"] is True
477 assert result["title"] == "New bug"
478 cli.ticket_add.assert_called_once()
479 call_args = cli.ticket_add.call_args
480 fields = call_args[0][1]
481 assert fields["title"] == "New bug"
482 assert fields["comment"] == "Something is broken"
483 assert fields["status"] == "Open"
484
485 @patch(_CLI)
486 def test_create_ticket_failure(self, mock_cli_cls, sample_project, fossil_repo_obj):
487 cli = MagicMock()
488 cli.ticket_add.return_value = False
489 mock_cli_cls.return_value = cli
490
491 result = execute_tool(
492 "create_ticket",
493 {
494 "slug": sample_project.slug,
495 "title": "Failing",
496 "body": "Will fail",
497 },
498 )
499 assert "error" in result
500
501 @patch(_CLI)
502 def test_update_ticket_status(self, mock_cli_cls, sample_project, fossil_repo_obj):
503 cli = MagicMock()
504 cli.ticket_change.return_value = True
505 mock_cli_cls.return_value = cli
506
507 result = execute_tool(
508 "update_ticket",
509 {
510 "slug": sample_project.slug,
511 "uuid": "tkt-001",
512 "status": "Closed",
513 },
514 )
515 assert result["success"] is True
516 call_args = cli.ticket_change.call_args
517 assert call_args[0][1] == "tkt-001"
518 assert call_args[0][2]["status"] == "Closed"
519
520 @patch(_CLI)
521 def test_update_ticket_comment(self, mock_cli_cls, sample_project, fossil_repo_obj):
522 cli = MagicMock()
523 cli.ticket_change.return_value = True
524 mock_cli_cls.return_value = cli
525
526 result = execute_tool(
527 "update_ticket",
528 {
529 "slug": sample_project.slug,
530 "uuid": "tkt-001",
531 "comment": "Fixed in latest push",
532 },
533 )
534 assert result["success"] is True
535 call_args = cli.ticket_change.call_args
536 assert call_args[0][2]["icomment"] == "Fixed in latest push"
537
538 @patch(_CLI)
539 def test_update_ticket_no_fields(self, mock_cli_cls, sample_project, fossil_repo_obj):
540 cli = MagicMock()
541 mock_cli_cls.return_value = cli
542
543 result = execute_tool(
544 "update_ticket",
545 {
546 "slug": sample_project.slug,
547 "uuid": "tkt-001",
548 },
549 )
550 assert "error" in result
551 assert "No fields" in result["error"]
552
553
554 # ---------------------------------------------------------------------------
555 # wiki handlers
556 # ---------------------------------------------------------------------------
557
558
559 @pytest.mark.django_db
560 class TestWiki:
561 @patch(_READER)
562 def test_list_wiki_pages(self, mock_reader_cls, sample_project, fossil_repo_obj):
563 reader = _mock_reader()
564 reader.get_wiki_pages.return_value = [
565 WikiPage(name="Home", content="", last_modified=datetime(2025, 1, 12, tzinfo=UTC), user="alice"),
566 WikiPage(name="FAQ", content="", last_modified=datetime(2025, 1, 13, tzinfo=UTC), user="bob"),
567 ]
568 mock_reader_cls.return_value = reader
569
570 result = execute_tool("list_wiki_pages", {"slug": sample_project.slug})
571 assert len(result["pages"]) == 2
572 assert result["pages"][0]["name"] == "Home"
573
574 @patch(_READER)
575 def test_get_wiki_page(self, mock_reader_cls, sample_project, fossil_repo_obj):
576 reader = _mock_reader()
577 reader.get_wiki_page.return_value = WikiPage(
578 name="Home",
579 content="# Welcome\nThis is home.",
580 last_modified=datetime(2025, 1, 12, tzinfo=UTC),
581 user="alice",
582 )
583 mock_reader_cls.return_value = reader
584
585 result = execute_tool("get_wiki_page", {"slug": sample_project.slug, "page_name": "Home"})
586 assert result["name"] == "Home"
587 assert "Welcome" in result["content"]
588
589 @patch(_READER)
590 def test_wiki_page_not_found(self, mock_reader_cls, sample_project, fossil_repo_obj):
591 reader = _mock_reader()
592 reader.get_wiki_page.return_value = None
593 mock_reader_cls.return_value = reader
594
595 result = execute_tool("get_wiki_page", {"slug": sample_project.slug, "page_name": "Missing"})
596 assert "error" in result
597
598
599 # ---------------------------------------------------------------------------
600 # branches, blame, file history
601 # ---------------------------------------------------------------------------
602
603
604 @pytest.mark.django_db
605 class TestBranchesAndHistory:
606 @patch(_READER)
607 def test_list_branches(self, mock_reader_cls, sample_project, fossil_repo_obj):
608 reader = _mock_reader()
609 reader.get_branches.return_value = [
610 {
611 "name": "trunk",
612 "last_checkin": datetime(2025, 1, 15, tzinfo=UTC),
613 "last_user": "alice",
614 "checkin_count": 30,
615 "last_uuid": "abc123",
616 },
617 ]
618 mock_reader_cls.return_value = reader
619
620 result = execute_tool("list_branches", {"slug": sample_project.slug})
621 assert len(result["branches"]) == 1
622 assert result["branches"][0]["name"] == "trunk"
623
624 @patch(_CLI)
625 def test_get_file_blame(self, mock_cli_cls, sample_project, fossil_repo_obj):
626 cli = MagicMock()
627 cli.blame.return_value = [
628 {"uuid": "aaa", "date": "2025-01-15", "user": "alice", "text": "line 1"},
629 {"uuid": "bbb", "date": "2025-01-14", "user": "bob", "text": "line 2"},
630 ]
631 mock_cli_cls.return_value = cli
632
633 result = execute_tool("get_file_blame", {"slug": sample_project.slug, "filepath": "main.py"})
634 assert result["filepath"] == "main.py"
635 assert len(result["lines"]) == 2
636 assert result["total"] == 2
637
638 @patch(_READER)
639 def test_get_file_history(self, mock_reader_cls, sample_project, fossil_repo_obj):
640 reader = _mock_reader()
641 reader.get_file_history.return_value = [
642 {"uuid": "c1", "timestamp": datetime(2025, 1, 15, tzinfo=UTC), "user": "alice", "comment": "Update"},
643 {"uuid": "c2", "timestamp": datetime(2025, 1, 14, tzinfo=UTC), "user": "bob", "comment": "Create"},
644 ]
645 mock_reader_cls.return_value = reader
646
647 result = execute_tool("get_file_history", {"slug": sample_project.slug, "filepath": "main.py"})
648 assert result["filepath"] == "main.py"
649 assert len(result["history"]) == 2
650 # Timestamps should be serialized
651 assert isinstance(result["history"][0]["timestamp"], str)
652
653
654 # ---------------------------------------------------------------------------
655 # sql_query
656 # ---------------------------------------------------------------------------
657
658
659 @pytest.mark.django_db
660 class TestSqlQuery:
661 def test_rejects_non_select(self, sample_project, fossil_repo_obj):
662 result = execute_tool("sql_query", {"slug": sample_project.slug, "sql": "DELETE FROM ticket"})
663 assert "error" in result
664 assert "SELECT" in result["error"]
665
666 def test_rejects_empty_query(self, sample_project, fossil_repo_obj):
667 result = execute_tool("sql_query", {"slug": sample_project.slug, "sql": ""})
668 assert "error" in result
669
670 def test_rejects_drop(self, sample_project, fossil_repo_obj):
671 result = execute_tool("sql_query", {"slug": sample_project.slug, "sql": "SELECT 1; DROP TABLE ticket"})
672 assert "error" in result
673
674 @patch(_READER)
675 def test_valid_select(self, mock_reader_cls, sample_project, fossil_repo_obj):
676 reader = _mock_reader()
677 mock_cursor = MagicMock()
678 mock_cursor.description = [("tkt_uuid",), ("title",)]
679 mock_cursor.fetchmany.return_value = [("uuid-1", "Bug one"), ("uuid-2", "Bug two")]
680 reader.conn.cursor.return_value = mock_cursor
681 mock_reader_cls.return_value = reader
682
683 result = execute_tool("sql_query", {"slug": sample_project.slug, "sql": "SELECT tkt_uuid, title FROM ticket"})
684 assert result["columns"] == ["tkt_uuid", "title"]
685 assert len(result["rows"]) == 2
686 assert result["count"] == 2
687
688
689 # ---------------------------------------------------------------------------
690 # Server module smoke test
691 # ---------------------------------------------------------------------------
692
693
694 class TestServerModule:
695 def test_server_instance_exists(self):
696 from mcp_server.server import server
697
698 assert server.name == "fossilrepo"
699
700 def test_main_is_coroutine(self):
701 import inspect
702
703 from mcp_server.server import main
704
705 assert inspect.iscoroutinefunction(main)
706
707 def test_entry_point_function_exists(self):
708 from mcp_server.__main__ import run
709
710 assert callable(run)

Keyboard Shortcuts

Open search /
Next entry (timeline) j
Previous entry (timeline) k
Open focused entry Enter
Show this help ?
Toggle theme Top nav button