FossilRepo

Add JSON API endpoints and org-wide activity heatmap JSON API: 10 read endpoints at /projects/<slug>/fossil/api/ with Bearer token auth (project-scoped APIToken + user-scoped PersonalAccessToken). Endpoints: project metadata, timeline, tickets, ticket detail, wiki list, wiki page, branches, tags, releases, search. All paginated, filterable. API docs endpoint listing all available routes. Activity heatmap: GitHub-style 365-day contribution grid on the dashboard aggregating commits across all repos. SVG rendering with green color scale, day/month labels, hover tooltips. get_commit_activity_flat() added to FossilReader for date-indexed activity data.

lmata 2026-04-07 16:56 trunk
Commit eaabb5d4e2a759dc60dd883c2f0e5246254a871f0c5b65d51234159637b3f8a2
--- core/views.py
+++ core/views.py
@@ -14,12 +14,16 @@
1414
total_projects = Project.objects.count()
1515
total_checkins = 0
1616
total_tickets = 0
1717
total_wiki = 0
1818
system_activity = [] # weekly commit counts across all repos
19
+ heatmap_data = {} # {date_string: count} -- daily commit counts across all repos
1920
recent_across_all = []
2021
22
+ # NOTE: For large installations with many repos, this per-request aggregation
23
+ # could become slow. Consider caching heatmap_data with a short TTL (e.g. 5 min)
24
+ # via Django's cache framework if this becomes a bottleneck.
2125
repos = FossilRepository.objects.filter(deleted_at__isnull=True)
2226
for repo in repos:
2327
if not repo.exists_on_disk:
2428
continue
2529
try:
@@ -35,10 +39,16 @@
3539
else:
3640
for i, c in enumerate(activity):
3741
if i < len(system_activity):
3842
system_activity[i] += c["count"]
3943
44
+ # Aggregate daily activity for heatmap (single pass per repo)
45
+ daily = reader.get_daily_commit_activity(days=365)
46
+ for entry in daily:
47
+ date = entry["date"]
48
+ heatmap_data[date] = heatmap_data.get(date, 0) + entry["count"]
49
+
4050
commits = reader.get_timeline(limit=3, event_type="ci")
4151
for c in commits:
4252
recent_across_all.append({"project": repo.project, "entry": c})
4353
except Exception:
4454
continue
@@ -45,10 +55,13 @@
4555
4656
# Sort recent across all by timestamp, take top 10
4757
recent_across_all.sort(key=lambda x: x["entry"].timestamp, reverse=True)
4858
recent_across_all = recent_across_all[:10]
4959
60
+ # Convert heatmap to sorted list for the template
61
+ heatmap_json = json.dumps(sorted([{"date": d, "count": c} for d, c in heatmap_data.items()], key=lambda x: x["date"]))
62
+
5063
return render(
5164
request,
5265
"dashboard.html",
5366
{
5467
"total_projects": total_projects,
@@ -55,8 +68,9 @@
5568
"total_checkins": total_checkins,
5669
"total_tickets": total_tickets,
5770
"total_wiki": total_wiki,
5871
"total_repos": repos.count(),
5972
"system_activity_json": json.dumps(system_activity),
73
+ "heatmap_json": heatmap_json,
6074
"recent_across_all": recent_across_all,
6175
},
6276
)
6377
6478
ADDED fossil/api_auth.py
6579
ADDED fossil/api_views.py
--- core/views.py
+++ core/views.py
@@ -14,12 +14,16 @@
14 total_projects = Project.objects.count()
15 total_checkins = 0
16 total_tickets = 0
17 total_wiki = 0
18 system_activity = [] # weekly commit counts across all repos
 
19 recent_across_all = []
20
 
 
 
21 repos = FossilRepository.objects.filter(deleted_at__isnull=True)
22 for repo in repos:
23 if not repo.exists_on_disk:
24 continue
25 try:
@@ -35,10 +39,16 @@
35 else:
36 for i, c in enumerate(activity):
37 if i < len(system_activity):
38 system_activity[i] += c["count"]
39
 
 
 
 
 
 
40 commits = reader.get_timeline(limit=3, event_type="ci")
41 for c in commits:
42 recent_across_all.append({"project": repo.project, "entry": c})
43 except Exception:
44 continue
@@ -45,10 +55,13 @@
45
46 # Sort recent across all by timestamp, take top 10
47 recent_across_all.sort(key=lambda x: x["entry"].timestamp, reverse=True)
48 recent_across_all = recent_across_all[:10]
49
 
 
 
50 return render(
51 request,
52 "dashboard.html",
53 {
54 "total_projects": total_projects,
@@ -55,8 +68,9 @@
55 "total_checkins": total_checkins,
56 "total_tickets": total_tickets,
57 "total_wiki": total_wiki,
58 "total_repos": repos.count(),
59 "system_activity_json": json.dumps(system_activity),
 
60 "recent_across_all": recent_across_all,
61 },
62 )
63
64 DDED fossil/api_auth.py
65 DDED fossil/api_views.py
--- core/views.py
+++ core/views.py
@@ -14,12 +14,16 @@
14 total_projects = Project.objects.count()
15 total_checkins = 0
16 total_tickets = 0
17 total_wiki = 0
18 system_activity = [] # weekly commit counts across all repos
19 heatmap_data = {} # {date_string: count} -- daily commit counts across all repos
20 recent_across_all = []
21
22 # NOTE: For large installations with many repos, this per-request aggregation
23 # could become slow. Consider caching heatmap_data with a short TTL (e.g. 5 min)
24 # via Django's cache framework if this becomes a bottleneck.
25 repos = FossilRepository.objects.filter(deleted_at__isnull=True)
26 for repo in repos:
27 if not repo.exists_on_disk:
28 continue
29 try:
@@ -35,10 +39,16 @@
39 else:
40 for i, c in enumerate(activity):
41 if i < len(system_activity):
42 system_activity[i] += c["count"]
43
44 # Aggregate daily activity for heatmap (single pass per repo)
45 daily = reader.get_daily_commit_activity(days=365)
46 for entry in daily:
47 date = entry["date"]
48 heatmap_data[date] = heatmap_data.get(date, 0) + entry["count"]
49
50 commits = reader.get_timeline(limit=3, event_type="ci")
51 for c in commits:
52 recent_across_all.append({"project": repo.project, "entry": c})
53 except Exception:
54 continue
@@ -45,10 +55,13 @@
55
56 # Sort recent across all by timestamp, take top 10
57 recent_across_all.sort(key=lambda x: x["entry"].timestamp, reverse=True)
58 recent_across_all = recent_across_all[:10]
59
60 # Convert heatmap to sorted list for the template
61 heatmap_json = json.dumps(sorted([{"date": d, "count": c} for d, c in heatmap_data.items()], key=lambda x: x["date"]))
62
63 return render(
64 request,
65 "dashboard.html",
66 {
67 "total_projects": total_projects,
@@ -55,8 +68,9 @@
68 "total_checkins": total_checkins,
69 "total_tickets": total_tickets,
70 "total_wiki": total_wiki,
71 "total_repos": repos.count(),
72 "system_activity_json": json.dumps(system_activity),
73 "heatmap_json": heatmap_json,
74 "recent_across_all": recent_across_all,
75 },
76 )
77
78 DDED fossil/api_auth.py
79 DDED fossil/api_views.py
--- a/fossil/api_auth.py
+++ b/fossil/api_auth.py
@@ -0,0 +1,38 @@
1
+"""API authentication for both project-scoped and user-scoped tokens.
2
+
3
+Supports:
4
+1. Project-scoped APIToken (tied to a FossilRepository)
5
+2. U (tied to a Django User)o User) — scopes enforced
6
+3. Session auth fallback (for browser testing)forced on mutating requests
7
+utils import timezone
8
+
9
+
10
+def authenticate_reque):
11
+ """Authenticate an API requn_or_none, error_response_or_none).
12
+ If error_response is not None, return it immediately.
13
+ """
14
+ auth = request.META.get("HTTP_AUTHORIZATION", "")
15
+ if not auth.startswith("Bearer "):
16
+ # Fall back to session auth — session users have fulken-based callers.
17
+}, status=403)
18
+ return request.user, None, None
19
+ return None, None, JsonResponse({"error": "Authentication required"}, status=401)
20
+
21
+ raw_token = auth[7:]
22
+
23
+ # Try project-scoped APIToken first (only if repository is provided)
24
+ if repository:
25
+ from fossil.api_tokens import APIToken
26
+
27
+ token_hash = APIToken.hash_token(raw_token)
28
+ try:
29
+ token = APIToken.objects.get(token_hash=token_hash, repository=repository, deleted_at__isnull=True)
30
+ if token.expires_at and token.expires_at < timezone.now():
31
+ return None, None, JsonResponse({"error": "Token expired" # No userrn requsspt for token-based callers.
32
+}, status=403)
33
+ return request.user, None, None
34
+ return None, None, JsonResponse({"error": "Authentication required"}, status=401)
35
+
36
+ raw_token = auth[7:]
37
+
38
+ # Try project-sc
--- a/fossil/api_auth.py
+++ b/fossil/api_auth.py
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/fossil/api_auth.py
+++ b/fossil/api_auth.py
@@ -0,0 +1,38 @@
1 """API authentication for both project-scoped and user-scoped tokens.
2
3 Supports:
4 1. Project-scoped APIToken (tied to a FossilRepository)
5 2. U (tied to a Django User)o User) — scopes enforced
6 3. Session auth fallback (for browser testing)forced on mutating requests
7 utils import timezone
8
9
10 def authenticate_reque):
11 """Authenticate an API requn_or_none, error_response_or_none).
12 If error_response is not None, return it immediately.
13 """
14 auth = request.META.get("HTTP_AUTHORIZATION", "")
15 if not auth.startswith("Bearer "):
16 # Fall back to session auth — session users have fulken-based callers.
17 }, status=403)
18 return request.user, None, None
19 return None, None, JsonResponse({"error": "Authentication required"}, status=401)
20
21 raw_token = auth[7:]
22
23 # Try project-scoped APIToken first (only if repository is provided)
24 if repository:
25 from fossil.api_tokens import APIToken
26
27 token_hash = APIToken.hash_token(raw_token)
28 try:
29 token = APIToken.objects.get(token_hash=token_hash, repository=repository, deleted_at__isnull=True)
30 if token.expires_at and token.expires_at < timezone.now():
31 return None, None, JsonResponse({"error": "Token expired" # No userrn requsspt for token-based callers.
32 }, status=403)
33 return request.user, None, None
34 return None, None, JsonResponse({"error": "Authentication required"}, status=401)
35
36 raw_token = auth[7:]
37
38 # Try project-sc
--- a/fossil/api_views.py
+++ b/fossil/api_views.py
@@ -0,0 +1,365 @@
1
+"""JSON API endpoints for programmatic access to Fossil repositories.
2
+
3
+All endpoints live under /projects/<slug>/fossil/api/.
4
+Auth: Bearer token (APIToken or PersonalAccessToken) or session cookie.
5
+All responses are JSON. All read endpoints check can_read_project.
6
+"""
7
+
8
+import math
9
+
10
+from djan
11
+from django.shortcuts import getviews.decorators.csrf import csrf_exempt
12
+from django.views.decorators.http import require_GET
13
+
14
+from fossil.api_auth import authenticate_request
15
+from fossil.models import FossilRepository
16
+from fossil.reader import FossilReader
17
+from projects.access import can_ort can_admindpoints fo"""Jwas already checked.
18
+ if token is not None and user is None:
19
+ return user, token, None
20
+
21
+ # For user-scoped auth (PAT or session), check project visibility
22
+ if user is not None:
23
+ if required_scope == "write" and not can_write_project(user, project):
24
+ return None, None, JsonResponse({"error": "Write access required"}, status=403)
25
+ if not can_read_project(user, project):
26
+ return None, None, JsonResponse({"error": "Access denied"}, status=403)
27
+
28
+ return user, token, None
29
+
30
+
31
+def _paginate_params(request, default_per_page=25, max_per_page=100):
32
+ """Extract and validate page/per_page from query params."""
33
+ try:
34
+ page = max(1, int(request.GET.get("page", "1")))
35
+ except (ValueError, TypeError):
36
+ page = 1
37
+ try:
38
+ per_page = min(max_per_page, max(1, int(request.GET.get("per_page", str(default_per_page)))))
39
+ except (ValueError, TypeError):
40
+ per_page = default_per_page
41
+ return page, per_page
42
+
43
+
44
+def _isoformat(dt):
45
+ """Safely format a datetime to ISO 8601, or None."""
46
+ if dt is None:
47
+ return None
48
+ return dt.isoformat()
49
+
50
+
51
+# --- API Documentation ---
52
+
53
+
54
+@csrf_exempt
55
+@require_GET
56
+def api_docs(request, slug):
57
+ """Return JSON listing all available API endpoints with descriptions."""
58
+ base = f"/projects/{slug}/fossil/api"
59
+ return JsonResponse(
60
+ {
61
+ "endpoints": [
62
+ {"method": "GET", "path": f"{base}/project", "description": "Project metadata"},
63
+ {
64
+ "method": "GET",
65
+ "path": f"{base}/timeline",
66
+ "description": "Recent checkins (paginated)",
67
+ "params": "page, per_page, branch",
68
+ },
69
+ {
70
+ "method": "GET",
71
+ "path": f"{base}/tickets",
72
+ "description": "Ticket list (paginated, filterable)",
73
+ "params": "page, per_page, status",
74
+ },
75
+ {"method": "GET", "path": f"{base}/tickets/<uuid>", "description": "Single ticket detail with comments"},
76
+ {"method": "GET", "path": f"{base}/wiki", "description": "Wiki page list"},
77
+ {"method": "GET", "path": f"{base}/wiki/<name>", "description": "Single wiki page with content"},
78
+ {"method": "GET", "path": f"{base}/branches", "description": "Branch list"},
79
+ {"method": "GET", "path": f"{base}/tags", "description": "Tag list"},
80
+ {"method": "GET", "path": f"{base}/releases", "description": "Release list"},
81
+ {"method": "GET", "path": f"{base}/search", "description": "Search across checkins, tickets, wiki", "params": "q"},
82
+ {
83
+ "method": "POST",
84
+ "path": f"{base}/batch",
85
+ "description": "Execute multiple API calls in a single request (max 25)",
86
+ "body": '{"requests": [{"method": "GET", "path": "/api/timeline", "params": {}}]}',
87
+ },
88
+ {"method": "GET", "path": f"{base}/workspaces", "description": "List agent workspaces", "params": "status"},
89
+ {
90
+ "method": "POST",
91
+ "path": f"{base}/workspaces/create",
92
+ "description": "Create an isolated agent workspace",
93
+ "body": '{"name": "...", "description": "...", "agent_id": "..."}',
94
+ },
95
+ {"method": "GET", "path": f"{base}/workspaces/<name>", "description": "Get workspace details"},
96
+ {
97
+ "method": "POST",
98
+ "path": f"{base}/workspaces/<name>/commit",
99
+ "description": "Commit changes in a workspace",
100
+ "body": '{"message": "...", "files": []}',
101
+ },
102
+ {
103
+ "method": "POST",
104
+ "path": f"{base}/workspaces/<name>/merge",
105
+ "description": "Merge workspace branch back to trunk",
106
+ "body": '{"target_branch": "trunk"}',
107
+ },
108
+ {
109
+ "method": "DELETE",
110
+ "path": f"{base}/workspaces/<name>/abandon",
111
+ "description": "Abandon and clean up a workspace",
112
+ },
113
+ {
114
+ "method": "POST",
115
+ "path": f"{base}/tickets/<uuid>/claim",
116
+ "description": "Claim a ticket for exclusive agent work",
117
+ "body": '{"agent_id": "...", "workspace": "..."}',
118
+ },
119
+ {
120
+ "method": "POST",
121
+ "path": f"{base}/tickets/<uuid>/release",
122
+ "description": "Release a ticket claim",
123
+ },
124
+ {
125
+ "method": "POST",
126
+ "path": f"{base}/tickets/<uuid>/submit",
127
+ "description": "Submit completed work for a claimed ticket",
128
+ "body": '{"summary": "...", "files_changed": [...]}',
129
+ },
130
+ {
131
+ "method": "GET",
132
+ "path": f"{base}/tickets/unclaimed",
133
+ "description": "List tickets not claimed by any agent",
134
+ "params": "status, limit",
135
+ },
136
+ {"method": "GET", "path": f"{base}/events", "description": "Server-Sent Events stream for real-time events"},
137
+ {
138
+ "method": "POST",
139
+ "path": f"{base}/reviews/create",
140
+ "description": "Submit code changes for review",
141
+ "body": '{"title": "...", "diff": "...", "files_changed": [...], "agent_id": "..."}',
142
+ },
143
+ {
144
+ "method": "GET",
145
+ "path": f"{base}/reviews",
146
+ "description": "List code reviews",
147
+ "params": "status, page, per_page",
148
+ },
149
+ {"method": "GET", "path": f"{base}/reviews/<id>", "description": "Get review with comments"},
150
+ {
151
+ "method": "POST",
152
+ "path": f"{base}/reviews/<id>/comment",
153
+ "description": "Add a comment to a review",
154
+ "body": '{"body": "...", "file_path": "...", "line_number": 42, "author": "..."}',
155
+ },
156
+ {"method": "POST", "path": f"{base}/reviews/<id>/approve", "description": "Approve a review"},
157
+ {"method": "POST", "path": f"{base}/reviews/<id>/request-changes", "description": "Request changes on a review"},
158
+ {"method": "POST", "path": f"{base}/reviews/<id>/merge", "description": "Merge an approved review"},
159
+ ],
160
+ "auth": "Bearer token (Authorization: Bearer <token>) or session cookie",
161
+ }
162
+ )
163
+
164
+
165
+# --- Project Metadata ---
166
+
167
+
168
+@csrf_exempt
169
+@require_GET
170
+def api_project(request, slug):
171
+ """Return project metadata as JSON."""
172
+ project, repo = _get_repo(slug)
173
+ user, token, err = _check_api_auth(request, project, repo)
174
+ if err is not None:
175
+ return err
176
+
177
+ return JsonResponse(
178
+ {
179
+ "name": project.name,
180
+ "slug": project.slug,
181
+ "description": project.description or "",
182
+ "visibility": project.visibility,
183
+ "star_count": project.star_count,
184
+ }
185
+ )
186
+
187
+
188
+# --- Timeline ---
189
+
190
+
191
+@csrf_exempt
192
+@require_GEethod": "GET", "pogger(__name__)
193
+
194
+
195
+def _get_repo(slug):
196
+ """Look up project and repository by slug, or return 404 JSON."""
197
+ project = get_object_or_404(Project, slug=slug, deleted_at__isnull=True)
198
+ repo = get_object_or_404(FossilRepository, project=project, deleted_at__isnull=True)
199
+ return project, repo
200
+
201
+
202
+def _check_api_api_auth(requeread acces "write" — enforced on both API tokens and PAT scopes.
203
+
204
+ Returns (user, token, error_response). If error_response is not None,
205
+ the caller should return it immediately.
206
+ """
207
+ user, tokepoints for pro"""JSON API endpoints for programmatic access to Fossil repositories.
208
+
209
+All endpointsalreadyuser), the token itself gr. try:
210
+ y checked.
211
+ if token is not None and user is None:
212
+ return user, token, None
213
+
214
+ # For user-scoped auth (PAT or session), check project visibil andd"}, status=403)
215
+ and not can_writAccess denid.
216
+ if token is not Non.views.decorators.http import require_GET
217
+
218
+from fossil.api_auth import authenticate_request
219
+from fossil.models import FossilRepository
220
+from fossil.reader import FossilReader
221
+from projects.access import can_ort can_admin_project, can_read_project, can_write_project
222
+from projects.models import Project
223
+
224
+logger = logging.getLogger(__name__)
225
+
226
+
227
+def _get_repo(slug):
228
+ """Look up project and repository by slug, or return 404 JSON."""
229
+ project = get_object_or_404(Project, slug=slug, deleted_at__isnull=True)
230
+ repo = get_object_or_404(FossilRepository, project=project, deleted_at__isn"path": f"{base}/tickets/unclaimed",
231
+ "description": "List tickets not claimed by any agent",
232
+ "params": "status, limit",
233
+ },
234
+ {"method": "GET", "path": f"{base}/events", "description": "Server-Sent Events stream for real-time events"},
235
+ {
236
+ "method": "POST",
237
+ "path": f"{base}/reviews/create",
238
+ "description": "Submit code changes for review",
239
+ "body": '{"title": "...", "diff": "...", "files_changed": [...], "agent_id": "..."}',
240
+ },
241
+ {
242
+ "method": "GET",
243
+ "path": f"{base}/reviews",
244
+ "description": "List code reviews",
245
+ "params": "status, page, per_page",
246
+ },
247
+ {"method": "GET", "path": f"{base}/reviews/<id>", "description": "Get review with comments"},
248
+ {
249
+ "method": "POST",
250
+ "path": f"{base}/reviews/<id>/comment",
251
+ "description": "Add a comment to a review",
252
+ "body": '{"body": "...", "file_path": "...", "line_number": 42, "author": "..."}',
253
+ },
254
+ {"method": "POST", "path": f"{base}/reviews/<id>/approve", "description": "Approve a review"},
255
+ {"method": "POST", "path": f"{base}/reviews/<id>/request-changes", "description": "Request changes on a review"},
256
+ {"method": "POST", "path": f"{base}/reviews/<id>/merge", "description": "Merge an approved review"},
257
+ ],
258
+ "auth": "Bearer token (Authorization: Bearer <token>) or session cookie",
259
+ }
260
+ )
261
+
262
+
263
+# --- Project Metadata ---
264
+
265
+
266
+@csrf_exempt
267
+@require_GET
268
+def api_project(request, slug):
269
+ """Return project metadata as JSON."""
270
+ project, repo = _get_repo(slug)
271
+ user, token, err = _check_api_auth(request, project, repo)
272
+ if err is not None:
273
+ return err
274
+
275
+ return JsonResponse(
276
+ {
277
+ "name": project.name,
278
+ "slug": project.slug,
279
+ "description": project.description or "",
280
+ "visibility": project.visibility,
281
+ "star_count": project.star_count,
282
+ }
283
+ )
284
+
285
+
286
+# --- Timeline ---
287
+
288
+
289
+@csrf_exempt
290
+@require_GET
291
+def api_timeline(request, slug):
292
+ """Return recent checkins as JSON, paginated."""
293
+ project, repo = _get_repo(slug)
294
+ user, token, err = _check_api_auth(request, project, repo)
295
+ if err is not None:
296
+ return err
297
+
298
+ page, per_page = _paginate_params(request)
299
+ branch_filter = request.GET.get("branch", "").strip()
300
+ offset = (page - 1) * per_page
301
+
302
+ reader = FossilReader(repo.full_path)
303
+ with reader:
304
+ entries = reader.get_timeline(limit=per_page, offset=offset, event_type="ci")
305
+ total = reader.get_checkin_count()
306
+
307
+ checkins = []
308
+ for e in entries:
309
+ entry = {
310
+ "uuid": e.uuid,
311
+ "timestamp": _isoformat(e.timestamp),
312
+ "user": e.user,
313
+ "comment": e.comment,
314
+ "branch": e.branch,
315
+ }
316
+ checkins.append(entry)
317
+
318
+ # If branch filter is set, filter in Python (Fossil's timeline query
319
+ # doesn't support branch filtering at the SQL level without extra joins).
320
+ if branch_filter:
321
+ checkins = [c for c in checkins if c["branch"] == branch_filter]
322
+
323
+ total_pages = max(1, math.ceil(total / per_page))
324
+
325
+ return JsonResponse(
326
+ {
327
+ "checkins": checkins,
328
+ "total": total,
329
+ "page": page,
330
+ "per_page": per_page,
331
+ "total_pages": total_pages,
332
+ }
333
+ )
334
+
335
+
336
+# --- Tickets ---
337
+
338
+
339
+@csrf_exempt
340
+@require_GET
341
+def api_tickets(request, slug):
342
+ """Return ticket list as JSON, paginated and filterable by status."""
343
+ project, repo = _get_repo(slug)
344
+ user, token, err = _check_api_auth(request, project, repo)
345
+ if err is not None:
346
+ return err
347
+
348
+ page, per_page = _paginate_params(request)
349
+ status_filter = request.GET.get("status", "").strip() or None
350
+
351
+ reader = FossilReader(repo.full_path)
352
+ with reader:
353
+ all_tickets = reader.get_tickets(status=status_filter, limit=1000)
354
+
355
+ total = len(all_tickets)
356
+ total_pages = max(1, math.ceil(total / per_page))
357
+ page = min(page, total_pages)
358
+ page_tickets = all_tickets[(page - 1) * per_page : page * per_page]
359
+
360
+ tickets = []
361
+ for t in page_tickets:
362
+ tickets.append(
363
+ {
364
+ "uuid": t.uuid,
365
+ "title": t.tit
--- a/fossil/api_views.py
+++ b/fossil/api_views.py
@@ -0,0 +1,365 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/fossil/api_views.py
+++ b/fossil/api_views.py
@@ -0,0 +1,365 @@
1 """JSON API endpoints for programmatic access to Fossil repositories.
2
3 All endpoints live under /projects/<slug>/fossil/api/.
4 Auth: Bearer token (APIToken or PersonalAccessToken) or session cookie.
5 All responses are JSON. All read endpoints check can_read_project.
6 """
7
8 import math
9
10 from djan
11 from django.shortcuts import getviews.decorators.csrf import csrf_exempt
12 from django.views.decorators.http import require_GET
13
14 from fossil.api_auth import authenticate_request
15 from fossil.models import FossilRepository
16 from fossil.reader import FossilReader
17 from projects.access import can_ort can_admindpoints fo"""Jwas already checked.
18 if token is not None and user is None:
19 return user, token, None
20
21 # For user-scoped auth (PAT or session), check project visibility
22 if user is not None:
23 if required_scope == "write" and not can_write_project(user, project):
24 return None, None, JsonResponse({"error": "Write access required"}, status=403)
25 if not can_read_project(user, project):
26 return None, None, JsonResponse({"error": "Access denied"}, status=403)
27
28 return user, token, None
29
30
31 def _paginate_params(request, default_per_page=25, max_per_page=100):
32 """Extract and validate page/per_page from query params."""
33 try:
34 page = max(1, int(request.GET.get("page", "1")))
35 except (ValueError, TypeError):
36 page = 1
37 try:
38 per_page = min(max_per_page, max(1, int(request.GET.get("per_page", str(default_per_page)))))
39 except (ValueError, TypeError):
40 per_page = default_per_page
41 return page, per_page
42
43
44 def _isoformat(dt):
45 """Safely format a datetime to ISO 8601, or None."""
46 if dt is None:
47 return None
48 return dt.isoformat()
49
50
51 # --- API Documentation ---
52
53
54 @csrf_exempt
55 @require_GET
56 def api_docs(request, slug):
57 """Return JSON listing all available API endpoints with descriptions."""
58 base = f"/projects/{slug}/fossil/api"
59 return JsonResponse(
60 {
61 "endpoints": [
62 {"method": "GET", "path": f"{base}/project", "description": "Project metadata"},
63 {
64 "method": "GET",
65 "path": f"{base}/timeline",
66 "description": "Recent checkins (paginated)",
67 "params": "page, per_page, branch",
68 },
69 {
70 "method": "GET",
71 "path": f"{base}/tickets",
72 "description": "Ticket list (paginated, filterable)",
73 "params": "page, per_page, status",
74 },
75 {"method": "GET", "path": f"{base}/tickets/<uuid>", "description": "Single ticket detail with comments"},
76 {"method": "GET", "path": f"{base}/wiki", "description": "Wiki page list"},
77 {"method": "GET", "path": f"{base}/wiki/<name>", "description": "Single wiki page with content"},
78 {"method": "GET", "path": f"{base}/branches", "description": "Branch list"},
79 {"method": "GET", "path": f"{base}/tags", "description": "Tag list"},
80 {"method": "GET", "path": f"{base}/releases", "description": "Release list"},
81 {"method": "GET", "path": f"{base}/search", "description": "Search across checkins, tickets, wiki", "params": "q"},
82 {
83 "method": "POST",
84 "path": f"{base}/batch",
85 "description": "Execute multiple API calls in a single request (max 25)",
86 "body": '{"requests": [{"method": "GET", "path": "/api/timeline", "params": {}}]}',
87 },
88 {"method": "GET", "path": f"{base}/workspaces", "description": "List agent workspaces", "params": "status"},
89 {
90 "method": "POST",
91 "path": f"{base}/workspaces/create",
92 "description": "Create an isolated agent workspace",
93 "body": '{"name": "...", "description": "...", "agent_id": "..."}',
94 },
95 {"method": "GET", "path": f"{base}/workspaces/<name>", "description": "Get workspace details"},
96 {
97 "method": "POST",
98 "path": f"{base}/workspaces/<name>/commit",
99 "description": "Commit changes in a workspace",
100 "body": '{"message": "...", "files": []}',
101 },
102 {
103 "method": "POST",
104 "path": f"{base}/workspaces/<name>/merge",
105 "description": "Merge workspace branch back to trunk",
106 "body": '{"target_branch": "trunk"}',
107 },
108 {
109 "method": "DELETE",
110 "path": f"{base}/workspaces/<name>/abandon",
111 "description": "Abandon and clean up a workspace",
112 },
113 {
114 "method": "POST",
115 "path": f"{base}/tickets/<uuid>/claim",
116 "description": "Claim a ticket for exclusive agent work",
117 "body": '{"agent_id": "...", "workspace": "..."}',
118 },
119 {
120 "method": "POST",
121 "path": f"{base}/tickets/<uuid>/release",
122 "description": "Release a ticket claim",
123 },
124 {
125 "method": "POST",
126 "path": f"{base}/tickets/<uuid>/submit",
127 "description": "Submit completed work for a claimed ticket",
128 "body": '{"summary": "...", "files_changed": [...]}',
129 },
130 {
131 "method": "GET",
132 "path": f"{base}/tickets/unclaimed",
133 "description": "List tickets not claimed by any agent",
134 "params": "status, limit",
135 },
136 {"method": "GET", "path": f"{base}/events", "description": "Server-Sent Events stream for real-time events"},
137 {
138 "method": "POST",
139 "path": f"{base}/reviews/create",
140 "description": "Submit code changes for review",
141 "body": '{"title": "...", "diff": "...", "files_changed": [...], "agent_id": "..."}',
142 },
143 {
144 "method": "GET",
145 "path": f"{base}/reviews",
146 "description": "List code reviews",
147 "params": "status, page, per_page",
148 },
149 {"method": "GET", "path": f"{base}/reviews/<id>", "description": "Get review with comments"},
150 {
151 "method": "POST",
152 "path": f"{base}/reviews/<id>/comment",
153 "description": "Add a comment to a review",
154 "body": '{"body": "...", "file_path": "...", "line_number": 42, "author": "..."}',
155 },
156 {"method": "POST", "path": f"{base}/reviews/<id>/approve", "description": "Approve a review"},
157 {"method": "POST", "path": f"{base}/reviews/<id>/request-changes", "description": "Request changes on a review"},
158 {"method": "POST", "path": f"{base}/reviews/<id>/merge", "description": "Merge an approved review"},
159 ],
160 "auth": "Bearer token (Authorization: Bearer <token>) or session cookie",
161 }
162 )
163
164
165 # --- Project Metadata ---
166
167
168 @csrf_exempt
169 @require_GET
170 def api_project(request, slug):
171 """Return project metadata as JSON."""
172 project, repo = _get_repo(slug)
173 user, token, err = _check_api_auth(request, project, repo)
174 if err is not None:
175 return err
176
177 return JsonResponse(
178 {
179 "name": project.name,
180 "slug": project.slug,
181 "description": project.description or "",
182 "visibility": project.visibility,
183 "star_count": project.star_count,
184 }
185 )
186
187
188 # --- Timeline ---
189
190
191 @csrf_exempt
192 @require_GEethod": "GET", "pogger(__name__)
193
194
195 def _get_repo(slug):
196 """Look up project and repository by slug, or return 404 JSON."""
197 project = get_object_or_404(Project, slug=slug, deleted_at__isnull=True)
198 repo = get_object_or_404(FossilRepository, project=project, deleted_at__isnull=True)
199 return project, repo
200
201
202 def _check_api_api_auth(requeread acces "write" — enforced on both API tokens and PAT scopes.
203
204 Returns (user, token, error_response). If error_response is not None,
205 the caller should return it immediately.
206 """
207 user, tokepoints for pro"""JSON API endpoints for programmatic access to Fossil repositories.
208
209 All endpointsalreadyuser), the token itself gr. try:
210 y checked.
211 if token is not None and user is None:
212 return user, token, None
213
214 # For user-scoped auth (PAT or session), check project visibil andd"}, status=403)
215 and not can_writAccess denid.
216 if token is not Non.views.decorators.http import require_GET
217
218 from fossil.api_auth import authenticate_request
219 from fossil.models import FossilRepository
220 from fossil.reader import FossilReader
221 from projects.access import can_ort can_admin_project, can_read_project, can_write_project
222 from projects.models import Project
223
224 logger = logging.getLogger(__name__)
225
226
227 def _get_repo(slug):
228 """Look up project and repository by slug, or return 404 JSON."""
229 project = get_object_or_404(Project, slug=slug, deleted_at__isnull=True)
230 repo = get_object_or_404(FossilRepository, project=project, deleted_at__isn"path": f"{base}/tickets/unclaimed",
231 "description": "List tickets not claimed by any agent",
232 "params": "status, limit",
233 },
234 {"method": "GET", "path": f"{base}/events", "description": "Server-Sent Events stream for real-time events"},
235 {
236 "method": "POST",
237 "path": f"{base}/reviews/create",
238 "description": "Submit code changes for review",
239 "body": '{"title": "...", "diff": "...", "files_changed": [...], "agent_id": "..."}',
240 },
241 {
242 "method": "GET",
243 "path": f"{base}/reviews",
244 "description": "List code reviews",
245 "params": "status, page, per_page",
246 },
247 {"method": "GET", "path": f"{base}/reviews/<id>", "description": "Get review with comments"},
248 {
249 "method": "POST",
250 "path": f"{base}/reviews/<id>/comment",
251 "description": "Add a comment to a review",
252 "body": '{"body": "...", "file_path": "...", "line_number": 42, "author": "..."}',
253 },
254 {"method": "POST", "path": f"{base}/reviews/<id>/approve", "description": "Approve a review"},
255 {"method": "POST", "path": f"{base}/reviews/<id>/request-changes", "description": "Request changes on a review"},
256 {"method": "POST", "path": f"{base}/reviews/<id>/merge", "description": "Merge an approved review"},
257 ],
258 "auth": "Bearer token (Authorization: Bearer <token>) or session cookie",
259 }
260 )
261
262
263 # --- Project Metadata ---
264
265
266 @csrf_exempt
267 @require_GET
268 def api_project(request, slug):
269 """Return project metadata as JSON."""
270 project, repo = _get_repo(slug)
271 user, token, err = _check_api_auth(request, project, repo)
272 if err is not None:
273 return err
274
275 return JsonResponse(
276 {
277 "name": project.name,
278 "slug": project.slug,
279 "description": project.description or "",
280 "visibility": project.visibility,
281 "star_count": project.star_count,
282 }
283 )
284
285
286 # --- Timeline ---
287
288
289 @csrf_exempt
290 @require_GET
291 def api_timeline(request, slug):
292 """Return recent checkins as JSON, paginated."""
293 project, repo = _get_repo(slug)
294 user, token, err = _check_api_auth(request, project, repo)
295 if err is not None:
296 return err
297
298 page, per_page = _paginate_params(request)
299 branch_filter = request.GET.get("branch", "").strip()
300 offset = (page - 1) * per_page
301
302 reader = FossilReader(repo.full_path)
303 with reader:
304 entries = reader.get_timeline(limit=per_page, offset=offset, event_type="ci")
305 total = reader.get_checkin_count()
306
307 checkins = []
308 for e in entries:
309 entry = {
310 "uuid": e.uuid,
311 "timestamp": _isoformat(e.timestamp),
312 "user": e.user,
313 "comment": e.comment,
314 "branch": e.branch,
315 }
316 checkins.append(entry)
317
318 # If branch filter is set, filter in Python (Fossil's timeline query
319 # doesn't support branch filtering at the SQL level without extra joins).
320 if branch_filter:
321 checkins = [c for c in checkins if c["branch"] == branch_filter]
322
323 total_pages = max(1, math.ceil(total / per_page))
324
325 return JsonResponse(
326 {
327 "checkins": checkins,
328 "total": total,
329 "page": page,
330 "per_page": per_page,
331 "total_pages": total_pages,
332 }
333 )
334
335
336 # --- Tickets ---
337
338
339 @csrf_exempt
340 @require_GET
341 def api_tickets(request, slug):
342 """Return ticket list as JSON, paginated and filterable by status."""
343 project, repo = _get_repo(slug)
344 user, token, err = _check_api_auth(request, project, repo)
345 if err is not None:
346 return err
347
348 page, per_page = _paginate_params(request)
349 status_filter = request.GET.get("status", "").strip() or None
350
351 reader = FossilReader(repo.full_path)
352 with reader:
353 all_tickets = reader.get_tickets(status=status_filter, limit=1000)
354
355 total = len(all_tickets)
356 total_pages = max(1, math.ceil(total / per_page))
357 page = min(page, total_pages)
358 page_tickets = all_tickets[(page - 1) * per_page : page * per_page]
359
360 tickets = []
361 for t in page_tickets:
362 tickets.append(
363 {
364 "uuid": t.uuid,
365 "title": t.tit
--- fossil/reader.py
+++ fossil/reader.py
@@ -472,10 +472,36 @@
472472
for w in range(weeks - 1, -1, -1):
473473
activity.append({"week": w, "count": counts.get(w, 0)})
474474
except sqlite3.OperationalError:
475475
pass
476476
return activity
477
+
478
+ def get_daily_commit_activity(self, days: int = 365) -> list[dict]:
479
+ """Get daily commit counts for the last N days. Returns [{date, count}].
480
+
481
+ Each entry has a date string (YYYY-MM-DD) and the number of checkins on that day.
482
+ Used for heatmap visualizations.
483
+ """
484
+ activity = []
485
+ try:
486
+ rows = self.conn.execute(
487
+ """
488
+ SELECT date(event.mtime - 0.5) as day, count(*) as cnt
489
+ FROM event
490
+ WHERE event.type = 'ci'
491
+ AND event.mtime > julianday('now') - ?
492
+ GROUP BY day
493
+ ORDER BY day
494
+ """,
495
+ (days,),
496
+ ).fetchall()
497
+ for r in rows:
498
+ if r["day"]:
499
+ activity.append({"date": r["day"], "count": r["cnt"]})
500
+ except sqlite3.OperationalError:
501
+ pass
502
+ return activity
477503
478504
def get_top_contributors(self, limit: int = 10) -> list[dict]:
479505
"""Get top contributors by checkin count."""
480506
contributors = []
481507
try:
482508
--- fossil/reader.py
+++ fossil/reader.py
@@ -472,10 +472,36 @@
472 for w in range(weeks - 1, -1, -1):
473 activity.append({"week": w, "count": counts.get(w, 0)})
474 except sqlite3.OperationalError:
475 pass
476 return activity
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
477
478 def get_top_contributors(self, limit: int = 10) -> list[dict]:
479 """Get top contributors by checkin count."""
480 contributors = []
481 try:
482
--- fossil/reader.py
+++ fossil/reader.py
@@ -472,10 +472,36 @@
472 for w in range(weeks - 1, -1, -1):
473 activity.append({"week": w, "count": counts.get(w, 0)})
474 except sqlite3.OperationalError:
475 pass
476 return activity
477
478 def get_daily_commit_activity(self, days: int = 365) -> list[dict]:
479 """Get daily commit counts for the last N days. Returns [{date, count}].
480
481 Each entry has a date string (YYYY-MM-DD) and the number of checkins on that day.
482 Used for heatmap visualizations.
483 """
484 activity = []
485 try:
486 rows = self.conn.execute(
487 """
488 SELECT date(event.mtime - 0.5) as day, count(*) as cnt
489 FROM event
490 WHERE event.type = 'ci'
491 AND event.mtime > julianday('now') - ?
492 GROUP BY day
493 ORDER BY day
494 """,
495 (days,),
496 ).fetchall()
497 for r in rows:
498 if r["day"]:
499 activity.append({"date": r["day"], "count": r["cnt"]})
500 except sqlite3.OperationalError:
501 pass
502 return activity
503
504 def get_top_contributors(self, limit: int = 10) -> list[dict]:
505 """Get top contributors by checkin count."""
506 contributors = []
507 try:
508
+14 -1
--- fossil/urls.py
+++ fossil/urls.py
@@ -1,12 +1,25 @@
11
from django.urls import path
22
3
-from . import views
3
+from . import api_views, views
44
55
app_name = "fossil"
66
77
urlpatterns = [
8
+ # JSON API
9
+ path("api/", api_views.api_docs, name="api_docs"),
10
+ path("api/project", api_views.api_project, name="api_project"),
11
+ path("api/timeline", api_views.api_timeline, name="api_timeline"),
12
+ path("api/tickets", api_views.api_tickets, name="api_tickets"),
13
+ path("api/tickets/<str:ticket_uuid>", api_views.api_ticket_detail, name="api_ticket_detail"),
14
+ path("api/wiki", api_views.api_wiki_list, name="api_wiki_list"),
15
+ path("api/wiki/<path:page_name>", api_views.api_wiki_page, name="api_wiki_page"),
16
+ path("api/branches", api_views.api_branches, name="api_branches"),
17
+ path("api/tags", api_views.api_tags, name="api_tags"),
18
+ path("api/releases", api_views.api_releases, name="api_releases"),
19
+ path("api/search", api_views.api_search, name="api_search"),
20
+ #
821
path("code/", views.code_browser, name="code"),
922
path("code/tree/<path:dirpath>/", views.code_browser, name="code_dir"),
1023
path("code/file/<path:filepath>", views.code_file, name="code_file"),
1124
path("timeline/", views.timeline, name="timeline"),
1225
path("checkin/<str:checkin_uuid>/", views.checkin_detail, name="checkin_detail"),
1326
--- fossil/urls.py
+++ fossil/urls.py
@@ -1,12 +1,25 @@
1 from django.urls import path
2
3 from . import views
4
5 app_name = "fossil"
6
7 urlpatterns = [
 
 
 
 
 
 
 
 
 
 
 
 
 
8 path("code/", views.code_browser, name="code"),
9 path("code/tree/<path:dirpath>/", views.code_browser, name="code_dir"),
10 path("code/file/<path:filepath>", views.code_file, name="code_file"),
11 path("timeline/", views.timeline, name="timeline"),
12 path("checkin/<str:checkin_uuid>/", views.checkin_detail, name="checkin_detail"),
13
--- fossil/urls.py
+++ fossil/urls.py
@@ -1,12 +1,25 @@
1 from django.urls import path
2
3 from . import api_views, views
4
5 app_name = "fossil"
6
7 urlpatterns = [
8 # JSON API
9 path("api/", api_views.api_docs, name="api_docs"),
10 path("api/project", api_views.api_project, name="api_project"),
11 path("api/timeline", api_views.api_timeline, name="api_timeline"),
12 path("api/tickets", api_views.api_tickets, name="api_tickets"),
13 path("api/tickets/<str:ticket_uuid>", api_views.api_ticket_detail, name="api_ticket_detail"),
14 path("api/wiki", api_views.api_wiki_list, name="api_wiki_list"),
15 path("api/wiki/<path:page_name>", api_views.api_wiki_page, name="api_wiki_page"),
16 path("api/branches", api_views.api_branches, name="api_branches"),
17 path("api/tags", api_views.api_tags, name="api_tags"),
18 path("api/releases", api_views.api_releases, name="api_releases"),
19 path("api/search", api_views.api_search, name="api_search"),
20 #
21 path("code/", views.code_browser, name="code"),
22 path("code/tree/<path:dirpath>/", views.code_browser, name="code_dir"),
23 path("code/file/<path:filepath>", views.code_file, name="code_file"),
24 path("timeline/", views.timeline, name="timeline"),
25 path("checkin/<str:checkin_uuid>/", views.checkin_detail, name="checkin_detail"),
26
--- templates/dashboard.html
+++ templates/dashboard.html
@@ -29,10 +29,27 @@
2929
<div class="rounded-lg bg-gray-800 border border-gray-700 p-4">
3030
<div class="text-2xl font-bold text-gray-100">{{ total_wiki|default:"0" }}</div>
3131
<div class="text-xs text-gray-500 mt-1">Wiki Pages</div>
3232
</div>
3333
</div>
34
+
35
+<!-- Activity heatmap (all projects, last year) -->
36
+{% if heatmap_json %}
37
+<div class="rounded-lg bg-gray-800 border border-gray-700 p-4 mb-6">
38
+ <h3 class="text-sm font-medium text-gray-300 mb-3">Activity (last year)</h3>
39
+ <div id="heatmap" class="overflow-x-auto"></div>
40
+ <div class="flex items-center justify-end gap-1 mt-2 text-xs text-gray-500">
41
+ <span>Less</span>
42
+ <span class="inline-block w-3 h-3 rounded-sm bg-gray-700"></span>
43
+ <span class="inline-block w-3 h-3 rounded-sm" style="background:#14532d"></span>
44
+ <span class="inline-block w-3 h-3 rounded-sm" style="background:#166534"></span>
45
+ <span class="inline-block w-3 h-3 rounded-sm" style="background:#22c55e"></span>
46
+ <span class="inline-block w-3 h-3 rounded-sm" style="background:#4ade80"></span>
47
+ <span>More</span>
48
+ </div>
49
+</div>
50
+{% endif %}
3451
3552
<div class="grid grid-cols-1 gap-6 lg:grid-cols-3">
3653
<!-- Main column -->
3754
<div class="lg:col-span-2 space-y-6">
3855
{% if system_activity_json and system_activity_json != "[]" %}
@@ -134,8 +151,89 @@
134151
x: { display: false, grid: { display: false } },
135152
y: { display: false, grid: { display: false }, beginAtZero: true }
136153
}
137154
}
138155
});
156
+</script>
157
+{% endif %}
158
+
159
+{% if heatmap_json %}
160
+<script>
161
+(function() {
162
+ var data = {{ heatmap_json|safe }};
163
+ var counts = {};
164
+ data.forEach(function(d) { counts[d.date] = d.count; });
165
+
166
+ // Generate 365 days ending today
167
+ var today = new Date();
168
+ var days = [];
169
+ for (var i = 364; i >= 0; i--) {
170
+ var d = new Date(today);
171
+ d.setDate(d.getDate() - i);
172
+ var key = d.toISOString().slice(0, 10);
173
+ days.push({ date: key, count: counts[key] || 0, dow: d.getDay() });
174
+ }
175
+
176
+ var cellSize = 12;
177
+ var cellGap = 2;
178
+ var step = cellSize + cellGap;
179
+ var labelWidth = 28;
180
+ var monthHeight = 16;
181
+
182
+ // The first day may not be Sunday (dow=0). We need to offset the first column.
183
+ var startDow = days[0].dow;
184
+ var totalSlots = days.length + startDow;
185
+ var weeks = Math.ceil(totalSlots / 7);
186
+ var svgWidth = labelWidth + weeks * step;
187
+ var svgHeight = monthHeight + 7 * step;
188
+
189
+ var svg = '<svg width="' + svgWidth + '" height="' + svgHeight + '" class="text-gray-500">';
190
+
191
+ // Day-of-week labels (Mon, Wed, Fri)
192
+ var dayLabels = ['', 'Mon', '', 'Wed', '', 'Fri', ''];
193
+ for (var di = 0; di < dayLabels.length; di++) {
194
+ if (dayLabels[di]) {
195
+ svg += '<text x="0" y="' + (monthHeight + di * step + cellSize - 2) + '" fill="currentColor" font-size="9" font-family="sans-serif">' + dayLabels[di] + '</text>';
196
+ }
197
+ }
198
+
199
+ // Month labels -- find the first occurrence of each month in the grid
200
+ var monthNames = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'];
201
+ var lastMonth = -1;
202
+ for (var mi = 0; mi < days.length; mi++) {
203
+ var monthNum = parseInt(days[mi].date.slice(5, 7), 10) - 1;
204
+ if (monthNum !== lastMonth) {
205
+ lastMonth = monthNum;
206
+ var weekIdx = Math.floor((mi + startDow) / 7);
207
+ var x = labelWidth + weekIdx * step;
208
+ svg += '<text x="' + x + '" y="10" fill="currentColor" font-size="9" font-family="sans-serif">' + monthNames[monthNum] + '</text>';
209
+ }
210
+ }
211
+
212
+ // Color scale
213
+ function getColor(count) {
214
+ if (count === 0) return '#1f2937';
215
+ if (count <= 2) return '#14532d';
216
+ if (count <= 5) return '#166534';
217
+ if (count <= 10) return '#22c55e';
218
+ return '#4ade80';
219
+ }
220
+
221
+ // Render cells
222
+ for (var ci = 0; ci < days.length; ci++) {
223
+ var day = days[ci];
224
+ var wk = Math.floor((ci + startDow) / 7);
225
+ var dow = (ci + startDow) % 7;
226
+ var cx = labelWidth + wk * step;
227
+ var cy = monthHeight + dow * step;
228
+ var color = getColor(day.count);
229
+ svg += '<rect x="' + cx + '" y="' + cy + '" width="' + cellSize + '" height="' + cellSize + '" rx="2" fill="' + color + '">';
230
+ svg += '<title>' + day.date + ': ' + day.count + ' commit' + (day.count !== 1 ? 's' : '') + '</title>';
231
+ svg += '</rect>';
232
+ }
233
+
234
+ svg += '</svg>';
235
+ document.getElementById('heatmap').innerHTML = svg;
236
+})();
139237
</script>
140238
{% endif %}
141239
{% endblock %}
142240
143241
ADDED tests/test_dashboard_heatmap.py
--- templates/dashboard.html
+++ templates/dashboard.html
@@ -29,10 +29,27 @@
29 <div class="rounded-lg bg-gray-800 border border-gray-700 p-4">
30 <div class="text-2xl font-bold text-gray-100">{{ total_wiki|default:"0" }}</div>
31 <div class="text-xs text-gray-500 mt-1">Wiki Pages</div>
32 </div>
33 </div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
35 <div class="grid grid-cols-1 gap-6 lg:grid-cols-3">
36 <!-- Main column -->
37 <div class="lg:col-span-2 space-y-6">
38 {% if system_activity_json and system_activity_json != "[]" %}
@@ -134,8 +151,89 @@
134 x: { display: false, grid: { display: false } },
135 y: { display: false, grid: { display: false }, beginAtZero: true }
136 }
137 }
138 });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
139 </script>
140 {% endif %}
141 {% endblock %}
142
143 DDED tests/test_dashboard_heatmap.py
--- templates/dashboard.html
+++ templates/dashboard.html
@@ -29,10 +29,27 @@
29 <div class="rounded-lg bg-gray-800 border border-gray-700 p-4">
30 <div class="text-2xl font-bold text-gray-100">{{ total_wiki|default:"0" }}</div>
31 <div class="text-xs text-gray-500 mt-1">Wiki Pages</div>
32 </div>
33 </div>
34
35 <!-- Activity heatmap (all projects, last year) -->
36 {% if heatmap_json %}
37 <div class="rounded-lg bg-gray-800 border border-gray-700 p-4 mb-6">
38 <h3 class="text-sm font-medium text-gray-300 mb-3">Activity (last year)</h3>
39 <div id="heatmap" class="overflow-x-auto"></div>
40 <div class="flex items-center justify-end gap-1 mt-2 text-xs text-gray-500">
41 <span>Less</span>
42 <span class="inline-block w-3 h-3 rounded-sm bg-gray-700"></span>
43 <span class="inline-block w-3 h-3 rounded-sm" style="background:#14532d"></span>
44 <span class="inline-block w-3 h-3 rounded-sm" style="background:#166534"></span>
45 <span class="inline-block w-3 h-3 rounded-sm" style="background:#22c55e"></span>
46 <span class="inline-block w-3 h-3 rounded-sm" style="background:#4ade80"></span>
47 <span>More</span>
48 </div>
49 </div>
50 {% endif %}
51
52 <div class="grid grid-cols-1 gap-6 lg:grid-cols-3">
53 <!-- Main column -->
54 <div class="lg:col-span-2 space-y-6">
55 {% if system_activity_json and system_activity_json != "[]" %}
@@ -134,8 +151,89 @@
151 x: { display: false, grid: { display: false } },
152 y: { display: false, grid: { display: false }, beginAtZero: true }
153 }
154 }
155 });
156 </script>
157 {% endif %}
158
159 {% if heatmap_json %}
160 <script>
161 (function() {
162 var data = {{ heatmap_json|safe }};
163 var counts = {};
164 data.forEach(function(d) { counts[d.date] = d.count; });
165
166 // Generate 365 days ending today
167 var today = new Date();
168 var days = [];
169 for (var i = 364; i >= 0; i--) {
170 var d = new Date(today);
171 d.setDate(d.getDate() - i);
172 var key = d.toISOString().slice(0, 10);
173 days.push({ date: key, count: counts[key] || 0, dow: d.getDay() });
174 }
175
176 var cellSize = 12;
177 var cellGap = 2;
178 var step = cellSize + cellGap;
179 var labelWidth = 28;
180 var monthHeight = 16;
181
182 // The first day may not be Sunday (dow=0). We need to offset the first column.
183 var startDow = days[0].dow;
184 var totalSlots = days.length + startDow;
185 var weeks = Math.ceil(totalSlots / 7);
186 var svgWidth = labelWidth + weeks * step;
187 var svgHeight = monthHeight + 7 * step;
188
189 var svg = '<svg width="' + svgWidth + '" height="' + svgHeight + '" class="text-gray-500">';
190
191 // Day-of-week labels (Mon, Wed, Fri)
192 var dayLabels = ['', 'Mon', '', 'Wed', '', 'Fri', ''];
193 for (var di = 0; di < dayLabels.length; di++) {
194 if (dayLabels[di]) {
195 svg += '<text x="0" y="' + (monthHeight + di * step + cellSize - 2) + '" fill="currentColor" font-size="9" font-family="sans-serif">' + dayLabels[di] + '</text>';
196 }
197 }
198
199 // Month labels -- find the first occurrence of each month in the grid
200 var monthNames = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'];
201 var lastMonth = -1;
202 for (var mi = 0; mi < days.length; mi++) {
203 var monthNum = parseInt(days[mi].date.slice(5, 7), 10) - 1;
204 if (monthNum !== lastMonth) {
205 lastMonth = monthNum;
206 var weekIdx = Math.floor((mi + startDow) / 7);
207 var x = labelWidth + weekIdx * step;
208 svg += '<text x="' + x + '" y="10" fill="currentColor" font-size="9" font-family="sans-serif">' + monthNames[monthNum] + '</text>';
209 }
210 }
211
212 // Color scale
213 function getColor(count) {
214 if (count === 0) return '#1f2937';
215 if (count <= 2) return '#14532d';
216 if (count <= 5) return '#166534';
217 if (count <= 10) return '#22c55e';
218 return '#4ade80';
219 }
220
221 // Render cells
222 for (var ci = 0; ci < days.length; ci++) {
223 var day = days[ci];
224 var wk = Math.floor((ci + startDow) / 7);
225 var dow = (ci + startDow) % 7;
226 var cx = labelWidth + wk * step;
227 var cy = monthHeight + dow * step;
228 var color = getColor(day.count);
229 svg += '<rect x="' + cx + '" y="' + cy + '" width="' + cellSize + '" height="' + cellSize + '" rx="2" fill="' + color + '">';
230 svg += '<title>' + day.date + ': ' + day.count + ' commit' + (day.count !== 1 ? 's' : '') + '</title>';
231 svg += '</rect>';
232 }
233
234 svg += '</svg>';
235 document.getElementById('heatmap').innerHTML = svg;
236 })();
237 </script>
238 {% endif %}
239 {% endblock %}
240
241 DDED tests/test_dashboard_heatmap.py
--- a/tests/test_dashboard_heatmap.py
+++ b/tests/test_dashboard_heatmap.py
@@ -0,0 +1,233 @@
1
+"""Tests for the dashboard activity heatmap."""
2
+
3
+import json
4
+import sqlite3
5
+from datetime impo
6
+from pathlib import Path
7
+
8
+import pytest
9
+from django.test import Client
10
+
11
+from fossil.models import FossilRepository
12
+from fossil.reader import FossilReader
13
+
14
+
15
+def _create_test_fossil_db(path: Path, checkin_days_ago: list[int] | None = None):
16
+ """Create a minimal .fossil SQLite database with event data for testing.
17
+
18
+ Args:
19
+ path: Where to write the .fossil file.
20
+ checkin_days_ago: List of integers representing days ago for each checkin.
21
+ Multiple entries for the same day create multiple checkins on that day.
22
+
23
+ Note: Uses SQLite's julianday('now') for the reference point so that the
24
+ date(mtime - 0.5) conversion in reader.py queries produces consistent dates.
25
+ Python datetime vs SQLite julianday can differ by fractions of a second,
26
+ which at day boundaries shifts the resulting date.
27
+ """
28
+ conn = sqlite3.connect(str(path))
29
+ conn.execute("CREATE TABLE IF NOT EXISTS config (name TEXT PRIMARY KEY, value TEXT)")
30
+ conn.execute("INSERT OR REPLACE INTO config VALUES ('project-name', 'test-project')")
31
+ conn.execute("INSERT OR REPLACE INTO config VALUES ('project-code', 'abc123')")
32
+ conn.execute(
33
+ """CREATE TABLE IF NOT EXISTS event (
34
+ type TEXT, mtime REAL, objid INTEGER, tagid INTEGER,
35
+ uid INTEGER, bgcolor TEXT, euser TEXT, user TEXT,
36
+ ecomment TEXT, comment TEXT, brief TEXT,
37
+ omtime REAL
38
+ )"""
39
+ )
40
+ conn.execute(
41
+ """CREATE TABLE IF NOT EXISTS blob (
42
+ rid INTEGER PRIMARY KEY, rcvid INTEGER, size INTEGER,
43
+ uuid TEXT UNIQUE, content BLOB
44
+ )"""
45
+ )
46
+ conn.execute("CREATE TABLE IF NOT EXISTS tag (tagid INTEGER PRIMARY KEY, tagname TEXT, tagtype INTEGER)")
47
+ conn.execute("CREATE TABLE IF NOT EXISTS ticket (tkt_id TEXT PRIMARY KEY, tkt_uuid TEXT)")
48
+
49
+ if checkin_days_ago:
50
+ # Use SQLite's own julianday('now') so the reference point matches
51
+ # what the reader.py queries will use for date calculations.
52
+ now_julian = conn.execute("SELECT julianday('now')").fetchone()[0]
53
+ for i, days in enumerate(checkin_days_ago):
54
+ mtime = now_julian - days
55
+ conn.execute("INSERT INTO blob VALUES (?, 0, 100, ?, NULL)", (i + 1, f"uuid{i:04d}"))
56
+ conn.execute(
57
+ "INSERT INTO event (type, mtime, objid, user, comment) VALUES ('ci', ?, ?, 'testuser', 'commit')",
58
+ (mtime, i + 1),
59
+ )
60
+
61
+ conn.commit()
62
+ conn.close()
63
+
64
+
65
+class TestFossilReaderDailyActivity:
66
+ """Tests for FossilReader.get_daily_commit_activity()."""
67
+
68
+ def test_returns_empty_for_no_checkins(self, tmp_path):
69
+ db_path = tmp_path / "empty.fossil"
70
+ _create_test_fossil_db(db_path, checkin_days_ago=[])
71
+ with FossilReader(db_path) as reader:
72
+ result = reader.get_daily_commit_activity(days=365)
73
+ assert result == []
74
+
75
+ def test_returns_daily_counts(self, tmp_path):
76
+ # 3 checkins at 5 days ago, 1 checkin at 10 days ago
77
+ db_path = tmp_path / "active.fossil"
78
+ _create_test_fossil_db(db_path, checkin_days_ago=[5, 5, 5, 10])
79
+ with FossilReader(db_path) as reader:
80
+ result = reader.get_daily_commit_activity(days=365)
81
+
82
+ counts_by_date = {entry["date"]: entry["count"] for entry in result}
83
+
84
+ # Should have 2 distinct dates with counts 3 and 1
85
+ assert len(counts_by_date) == 2
86
+ counts = sorted(counts_by_date.values())
87
+ assert counts == [1, 3]
88
+
89
+ def test_excludes_old_data_outside_window(self, tmp_path):
90
+ # One checkin 10 days ago, one 400 days ago
91
+ db_path = tmp_path / "old.fossil"
92
+ _create_test_fossil_db(db_path, checkin_days_ago=[10, 400])
93
+ with FossilReader(db_path) as reader:
94
+ result = reader.get_daily_commit_activity(days=365)
95
+
96
+ dates = [entry["date"] for entry in result]
97
+ assert len(dates) == 1 # only the 10-day-ago entry
98
+
99
+ def test_custom_day_window(self, tmp_path):
100
+ # Checkins at 5, 20, and 40 days ago -- with a 30-day window
101
+ db_path = tmp_path / "window.fossil"
102
+ _create_test_fossil_db(db_path, checkin_days_ago=[5, 20, 40])
103
+ with FossilReader(db_path) as reader:
104
+ result = reader.get_daily_commit_activity(days=30)
105
+
106
+ dates = [entry["date"] for entry in result]
107
+ assert len(dates) == 2 # 5 and 20 days ago; 40 is outside window
108
+
109
+ def test_results_sorted_by_date(self, tmp_path):
110
+ db_path = tmp_path / "sorted.fossil"
111
+ _create_test_fossil_db(db_path, checkin_days_ago=[30, 10, 20, 5])
112
+ with FossilReader(db_path) as reader:
113
+ result = reader.get_daily_commit_activity(days=365)
114
+
115
+ dates = [entry["date"] for entry in result]
116
+ assert dates == sorted(dates)
117
+
118
+ def test_handles_missing_event_table(self, tmp_path):
119
+ # A .fossil file that has no event table at all
120
+ db_path = tmp_path / "broken.fossil"
121
+ conn = sqlite3.connect(str(db_path))
122
+ conn.execute("CREATE TABLE config (name TEXT, value TEXT)")
123
+ conn.commit()
124
+ conn.close()
125
+
126
+ with FossilReader(db_path) as reader:
127
+ result = reader.get_daily_commit_activity(days=365)
128
+ assert result == []
129
+
130
+
131
+@pytest.mark.django_db
132
+class TestDashboardHeatmapView:
133
+ """Tests for the heatmap data in the dashboard view."""
134
+
135
+ def test_unauthenticated_redirects_to_login(self):
136
+ client = Client()
137
+ response = client.get("/dashboard/")
138
+ assert response.status_code == 302
139
+ assert "/auth/login/" in response.url
140
+
141
+ def test_dashboard_returns_heatmap_json(self, admin_client):
142
+ response = admin_client.get("/dashboard/")
143
+ assert response.status_code == 200
144
+ assert "heatmap_json" in response.context
145
+
146
+ # With no repos on disk, heatmap should be an empty JSON array
147
+ heatmap = json.loads(response.context["heatmap_json"])
148
+ assert isinstance(heatmap, list)
149
+
150
+ def test_dashboard_heatmap_aggregates_across_repos(self, admin_client, admin_user, sample_project, tmp_path):
151
+ """Create two repos with overlapping daily activity and verify aggregation.
152
+
153
+ Uses days well in the past (5 and 10) to avoid date-boundary issues
154
+ caused by Fossil's Julian-day-to-date conversion (date(mtime - 0.5)).
155
+ """
156
+ from constance import config
157
+
158
+ from organization.models import Organization
159
+ from projects.models import Project
160
+
161
+ # Use the auto-created repo from the signal (Project post_save creates a FossilRepository)
162
+ repo1 = FossilRepository.objects.get(project=sample_project)
163
+ repo1.filename = "repo1.fossil"
164
+ repo1.save(update_fields=["filename", "updated_at", "version"])
165
+
166
+ # Need a second project for the second repo (OneToOne constraint)
167
+ org = Organization.objects.first()
168
+ project2 = Project.objects.create(name="Second Project", organization=org, visibility="private", created_by=admin_user)
169
+ repo2 = FossilRepository.objects.get(project=project2)
170
+ repo2.filename = "repo2.fossil"
171
+ repo2.save(update_fields=["filename", "updated_at", "version"])
172
+
173
+ # Create .fossil files at the paths full_path resolves to (FOSSIL_DATA_DIR/filename)
174
+ original_dir = config.FOSSIL_DATA_DIR
175
+ config.FOSSIL_DATA_DIR = str(tmp_path)
176
+ try:
177
+ _create_test_fossil_db(tmp_path / "repo1.fossil", checkin_days_ago=[5, 5, 10]) # 2 at day-5, 1 at day-10
178
+ _create_test_fossil_db(tmp_path / "repo2.fossil", checkin_days_ago=[5, 10, 10]) # 1 at day-5, 2 at day-10
179
+
180
+ response = admin_client.get("/dashboard/")
181
+ finally:
182
+ config.FOSSIL_DATA_DIR = original_dir
183
+
184
+ assert response.status_code == 200
185
+ heatmap = json.loads(response.context["heatmap_json"])
186
+ counts_by_date = {entry["date"]: entry["count"] for entry in heatmap}
187
+
188
+ # Aggregated: 3 at day-5, 3 at day-10 = 6 total across 2 dates
189
+ assert len(counts_by_date) == 2
190
+ assert sum(counts_by_date.values()) == 6
191
+ # Each date should have exactly 3 commits (2+1 and 1+2)
192
+ for count in counts_by_date.values():
193
+ assert count == 3
194
+
195
+ def test_dashboard_heatmap_json_is_sorted(self, admin_client, admin_user, sample_project, tmp_path):
196
+ from constance import config
197
+
198
+ # Use the auto-created repo from the signal
199
+ repo = FossilRepository.objects.get(project=sample_project)
200
+
201
+ original_dir = config.FOSSIL_DATA_DIR
202
+ config.FOSSIL_DATA_DIR = str(tmp_path)
203
+ try:
204
+ _create_test_fossil_db(tmp_path / repo.filename, checkin_days_ago=[30, 5, 20, 10])
205
+ response = admin_client.get("/dashboard/")
206
+ finally:
207
+ config.FOSSIL_DATA_DIR = original_dir
208
+
209
+ heatmap = json.loads(response.context["heatmap_json"])
210
+ dates = [entry["date"] for entry in heatmap]
211
+ assert dates == sorted(dates)
212
+
213
+ def test_dashboard_heatmap_skips_missing_repos(self, admin_client, admin_user, sample_project):
214
+ """Repos where the file doesn't exist on disk should be silently skipped."""
215
+ # The signal already created a FossilRepository -- just update the filename
216
+ repo = FossilRepository.objects.get(project=sample_project)
217
+ repo.filename = "nonexistent.fossil"
218
+ repo.save(update_fields=["filename", "updated_at", "version"])
219
+
220
+ response = admin_client.get("/dashboard/")
221
+ assert response.status_code == 200
222
+ heatmap = json.loads(response.context["heatmap_json"])
223
+ assert heatmap == []
224
+
225
+ def test_dashboard_renders_heatmap_container(self, admin_client, admin_user, sample_project, tmp_path):
226
+ """When heatmap data exists, the template should include the heatmap div."""
227
+ from constance import config
228
+
229
+ # Use the auto-created repo from the signal
230
+ repo = FossilRepository.objects.get(project=sample_project)
231
+
232
+ original_dir = config.FOSSIL_DATA_DIR
233
+ config.FOSSIL_DATA_DIR = str(tmp_pat
--- a/tests/test_dashboard_heatmap.py
+++ b/tests/test_dashboard_heatmap.py
@@ -0,0 +1,233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/tests/test_dashboard_heatmap.py
+++ b/tests/test_dashboard_heatmap.py
@@ -0,0 +1,233 @@
1 """Tests for the dashboard activity heatmap."""
2
3 import json
4 import sqlite3
5 from datetime impo
6 from pathlib import Path
7
8 import pytest
9 from django.test import Client
10
11 from fossil.models import FossilRepository
12 from fossil.reader import FossilReader
13
14
15 def _create_test_fossil_db(path: Path, checkin_days_ago: list[int] | None = None):
16 """Create a minimal .fossil SQLite database with event data for testing.
17
18 Args:
19 path: Where to write the .fossil file.
20 checkin_days_ago: List of integers representing days ago for each checkin.
21 Multiple entries for the same day create multiple checkins on that day.
22
23 Note: Uses SQLite's julianday('now') for the reference point so that the
24 date(mtime - 0.5) conversion in reader.py queries produces consistent dates.
25 Python datetime vs SQLite julianday can differ by fractions of a second,
26 which at day boundaries shifts the resulting date.
27 """
28 conn = sqlite3.connect(str(path))
29 conn.execute("CREATE TABLE IF NOT EXISTS config (name TEXT PRIMARY KEY, value TEXT)")
30 conn.execute("INSERT OR REPLACE INTO config VALUES ('project-name', 'test-project')")
31 conn.execute("INSERT OR REPLACE INTO config VALUES ('project-code', 'abc123')")
32 conn.execute(
33 """CREATE TABLE IF NOT EXISTS event (
34 type TEXT, mtime REAL, objid INTEGER, tagid INTEGER,
35 uid INTEGER, bgcolor TEXT, euser TEXT, user TEXT,
36 ecomment TEXT, comment TEXT, brief TEXT,
37 omtime REAL
38 )"""
39 )
40 conn.execute(
41 """CREATE TABLE IF NOT EXISTS blob (
42 rid INTEGER PRIMARY KEY, rcvid INTEGER, size INTEGER,
43 uuid TEXT UNIQUE, content BLOB
44 )"""
45 )
46 conn.execute("CREATE TABLE IF NOT EXISTS tag (tagid INTEGER PRIMARY KEY, tagname TEXT, tagtype INTEGER)")
47 conn.execute("CREATE TABLE IF NOT EXISTS ticket (tkt_id TEXT PRIMARY KEY, tkt_uuid TEXT)")
48
49 if checkin_days_ago:
50 # Use SQLite's own julianday('now') so the reference point matches
51 # what the reader.py queries will use for date calculations.
52 now_julian = conn.execute("SELECT julianday('now')").fetchone()[0]
53 for i, days in enumerate(checkin_days_ago):
54 mtime = now_julian - days
55 conn.execute("INSERT INTO blob VALUES (?, 0, 100, ?, NULL)", (i + 1, f"uuid{i:04d}"))
56 conn.execute(
57 "INSERT INTO event (type, mtime, objid, user, comment) VALUES ('ci', ?, ?, 'testuser', 'commit')",
58 (mtime, i + 1),
59 )
60
61 conn.commit()
62 conn.close()
63
64
65 class TestFossilReaderDailyActivity:
66 """Tests for FossilReader.get_daily_commit_activity()."""
67
68 def test_returns_empty_for_no_checkins(self, tmp_path):
69 db_path = tmp_path / "empty.fossil"
70 _create_test_fossil_db(db_path, checkin_days_ago=[])
71 with FossilReader(db_path) as reader:
72 result = reader.get_daily_commit_activity(days=365)
73 assert result == []
74
75 def test_returns_daily_counts(self, tmp_path):
76 # 3 checkins at 5 days ago, 1 checkin at 10 days ago
77 db_path = tmp_path / "active.fossil"
78 _create_test_fossil_db(db_path, checkin_days_ago=[5, 5, 5, 10])
79 with FossilReader(db_path) as reader:
80 result = reader.get_daily_commit_activity(days=365)
81
82 counts_by_date = {entry["date"]: entry["count"] for entry in result}
83
84 # Should have 2 distinct dates with counts 3 and 1
85 assert len(counts_by_date) == 2
86 counts = sorted(counts_by_date.values())
87 assert counts == [1, 3]
88
89 def test_excludes_old_data_outside_window(self, tmp_path):
90 # One checkin 10 days ago, one 400 days ago
91 db_path = tmp_path / "old.fossil"
92 _create_test_fossil_db(db_path, checkin_days_ago=[10, 400])
93 with FossilReader(db_path) as reader:
94 result = reader.get_daily_commit_activity(days=365)
95
96 dates = [entry["date"] for entry in result]
97 assert len(dates) == 1 # only the 10-day-ago entry
98
99 def test_custom_day_window(self, tmp_path):
100 # Checkins at 5, 20, and 40 days ago -- with a 30-day window
101 db_path = tmp_path / "window.fossil"
102 _create_test_fossil_db(db_path, checkin_days_ago=[5, 20, 40])
103 with FossilReader(db_path) as reader:
104 result = reader.get_daily_commit_activity(days=30)
105
106 dates = [entry["date"] for entry in result]
107 assert len(dates) == 2 # 5 and 20 days ago; 40 is outside window
108
109 def test_results_sorted_by_date(self, tmp_path):
110 db_path = tmp_path / "sorted.fossil"
111 _create_test_fossil_db(db_path, checkin_days_ago=[30, 10, 20, 5])
112 with FossilReader(db_path) as reader:
113 result = reader.get_daily_commit_activity(days=365)
114
115 dates = [entry["date"] for entry in result]
116 assert dates == sorted(dates)
117
118 def test_handles_missing_event_table(self, tmp_path):
119 # A .fossil file that has no event table at all
120 db_path = tmp_path / "broken.fossil"
121 conn = sqlite3.connect(str(db_path))
122 conn.execute("CREATE TABLE config (name TEXT, value TEXT)")
123 conn.commit()
124 conn.close()
125
126 with FossilReader(db_path) as reader:
127 result = reader.get_daily_commit_activity(days=365)
128 assert result == []
129
130
131 @pytest.mark.django_db
132 class TestDashboardHeatmapView:
133 """Tests for the heatmap data in the dashboard view."""
134
135 def test_unauthenticated_redirects_to_login(self):
136 client = Client()
137 response = client.get("/dashboard/")
138 assert response.status_code == 302
139 assert "/auth/login/" in response.url
140
141 def test_dashboard_returns_heatmap_json(self, admin_client):
142 response = admin_client.get("/dashboard/")
143 assert response.status_code == 200
144 assert "heatmap_json" in response.context
145
146 # With no repos on disk, heatmap should be an empty JSON array
147 heatmap = json.loads(response.context["heatmap_json"])
148 assert isinstance(heatmap, list)
149
150 def test_dashboard_heatmap_aggregates_across_repos(self, admin_client, admin_user, sample_project, tmp_path):
151 """Create two repos with overlapping daily activity and verify aggregation.
152
153 Uses days well in the past (5 and 10) to avoid date-boundary issues
154 caused by Fossil's Julian-day-to-date conversion (date(mtime - 0.5)).
155 """
156 from constance import config
157
158 from organization.models import Organization
159 from projects.models import Project
160
161 # Use the auto-created repo from the signal (Project post_save creates a FossilRepository)
162 repo1 = FossilRepository.objects.get(project=sample_project)
163 repo1.filename = "repo1.fossil"
164 repo1.save(update_fields=["filename", "updated_at", "version"])
165
166 # Need a second project for the second repo (OneToOne constraint)
167 org = Organization.objects.first()
168 project2 = Project.objects.create(name="Second Project", organization=org, visibility="private", created_by=admin_user)
169 repo2 = FossilRepository.objects.get(project=project2)
170 repo2.filename = "repo2.fossil"
171 repo2.save(update_fields=["filename", "updated_at", "version"])
172
173 # Create .fossil files at the paths full_path resolves to (FOSSIL_DATA_DIR/filename)
174 original_dir = config.FOSSIL_DATA_DIR
175 config.FOSSIL_DATA_DIR = str(tmp_path)
176 try:
177 _create_test_fossil_db(tmp_path / "repo1.fossil", checkin_days_ago=[5, 5, 10]) # 2 at day-5, 1 at day-10
178 _create_test_fossil_db(tmp_path / "repo2.fossil", checkin_days_ago=[5, 10, 10]) # 1 at day-5, 2 at day-10
179
180 response = admin_client.get("/dashboard/")
181 finally:
182 config.FOSSIL_DATA_DIR = original_dir
183
184 assert response.status_code == 200
185 heatmap = json.loads(response.context["heatmap_json"])
186 counts_by_date = {entry["date"]: entry["count"] for entry in heatmap}
187
188 # Aggregated: 3 at day-5, 3 at day-10 = 6 total across 2 dates
189 assert len(counts_by_date) == 2
190 assert sum(counts_by_date.values()) == 6
191 # Each date should have exactly 3 commits (2+1 and 1+2)
192 for count in counts_by_date.values():
193 assert count == 3
194
195 def test_dashboard_heatmap_json_is_sorted(self, admin_client, admin_user, sample_project, tmp_path):
196 from constance import config
197
198 # Use the auto-created repo from the signal
199 repo = FossilRepository.objects.get(project=sample_project)
200
201 original_dir = config.FOSSIL_DATA_DIR
202 config.FOSSIL_DATA_DIR = str(tmp_path)
203 try:
204 _create_test_fossil_db(tmp_path / repo.filename, checkin_days_ago=[30, 5, 20, 10])
205 response = admin_client.get("/dashboard/")
206 finally:
207 config.FOSSIL_DATA_DIR = original_dir
208
209 heatmap = json.loads(response.context["heatmap_json"])
210 dates = [entry["date"] for entry in heatmap]
211 assert dates == sorted(dates)
212
213 def test_dashboard_heatmap_skips_missing_repos(self, admin_client, admin_user, sample_project):
214 """Repos where the file doesn't exist on disk should be silently skipped."""
215 # The signal already created a FossilRepository -- just update the filename
216 repo = FossilRepository.objects.get(project=sample_project)
217 repo.filename = "nonexistent.fossil"
218 repo.save(update_fields=["filename", "updated_at", "version"])
219
220 response = admin_client.get("/dashboard/")
221 assert response.status_code == 200
222 heatmap = json.loads(response.context["heatmap_json"])
223 assert heatmap == []
224
225 def test_dashboard_renders_heatmap_container(self, admin_client, admin_user, sample_project, tmp_path):
226 """When heatmap data exists, the template should include the heatmap div."""
227 from constance import config
228
229 # Use the auto-created repo from the signal
230 repo = FossilRepository.objects.get(project=sample_project)
231
232 original_dir = config.FOSSIL_DATA_DIR
233 config.FOSSIL_DATA_DIR = str(tmp_pat

Keyboard Shortcuts

Open search /
Next entry (timeline) j
Previous entry (timeline) k
Open focused entry Enter
Show this help ?
Toggle theme Top nav button