FossilRepo

fossilrepo / tests / test_dashboard_heatmap.py
Blame History Raw 244 lines
1
"""Tests for the dashboard activity heatmap."""
2
3
import json
4
import sqlite3
5
from pathlib import Path
6
7
import pytest
8
from django.test import Client
9
10
from fossil.models import FossilRepository
11
from fossil.reader import FossilReader
12
13
14
def _create_test_fossil_db(path: Path, checkin_days_ago: list[int] | None = None):
15
"""Create a minimal .fossil SQLite database with event data for testing.
16
17
Args:
18
path: Where to write the .fossil file.
19
checkin_days_ago: List of integers representing days ago for each checkin.
20
Multiple entries for the same day create multiple checkins on that day.
21
22
Note: Uses SQLite's julianday('now') for the reference point so that the
23
date(mtime - 0.5) conversion in reader.py queries produces consistent dates.
24
Python datetime vs SQLite julianday can differ by fractions of a second,
25
which at day boundaries shifts the resulting date.
26
"""
27
conn = sqlite3.connect(str(path))
28
conn.execute("CREATE TABLE IF NOT EXISTS config (name TEXT PRIMARY KEY, value TEXT)")
29
conn.execute("INSERT OR REPLACE INTO config VALUES ('project-name', 'test-project')")
30
conn.execute("INSERT OR REPLACE INTO config VALUES ('project-code', 'abc123')")
31
conn.execute(
32
"""CREATE TABLE IF NOT EXISTS event (
33
type TEXT, mtime REAL, objid INTEGER, tagid INTEGER,
34
uid INTEGER, bgcolor TEXT, euser TEXT, user TEXT,
35
ecomment TEXT, comment TEXT, brief TEXT,
36
omtime REAL
37
)"""
38
)
39
conn.execute(
40
"""CREATE TABLE IF NOT EXISTS blob (
41
rid INTEGER PRIMARY KEY, rcvid INTEGER, size INTEGER,
42
uuid TEXT UNIQUE, content BLOB
43
)"""
44
)
45
conn.execute("CREATE TABLE IF NOT EXISTS tag (tagid INTEGER PRIMARY KEY, tagname TEXT, tagtype INTEGER)")
46
conn.execute("CREATE TABLE IF NOT EXISTS ticket (tkt_id TEXT PRIMARY KEY, tkt_uuid TEXT)")
47
48
if checkin_days_ago:
49
# Use SQLite's own julianday('now') so the reference point matches
50
# what the reader.py queries will use for date calculations.
51
now_julian = conn.execute("SELECT julianday('now')").fetchone()[0]
52
for i, days in enumerate(checkin_days_ago):
53
mtime = now_julian - days
54
conn.execute("INSERT INTO blob VALUES (?, 0, 100, ?, NULL)", (i + 1, f"uuid{i:04d}"))
55
conn.execute(
56
"INSERT INTO event (type, mtime, objid, user, comment) VALUES ('ci', ?, ?, 'testuser', 'commit')",
57
(mtime, i + 1),
58
)
59
60
conn.commit()
61
conn.close()
62
63
64
class TestFossilReaderDailyActivity:
65
"""Tests for FossilReader.get_daily_commit_activity()."""
66
67
def test_returns_empty_for_no_checkins(self, tmp_path):
68
db_path = tmp_path / "empty.fossil"
69
_create_test_fossil_db(db_path, checkin_days_ago=[])
70
with FossilReader(db_path) as reader:
71
result = reader.get_daily_commit_activity(days=365)
72
assert result == []
73
74
def test_returns_daily_counts(self, tmp_path):
75
# 3 checkins at 5 days ago, 1 checkin at 10 days ago
76
db_path = tmp_path / "active.fossil"
77
_create_test_fossil_db(db_path, checkin_days_ago=[5, 5, 5, 10])
78
with FossilReader(db_path) as reader:
79
result = reader.get_daily_commit_activity(days=365)
80
81
counts_by_date = {entry["date"]: entry["count"] for entry in result}
82
83
# Should have 2 distinct dates with counts 3 and 1
84
assert len(counts_by_date) == 2
85
counts = sorted(counts_by_date.values())
86
assert counts == [1, 3]
87
88
def test_excludes_old_data_outside_window(self, tmp_path):
89
# One checkin 10 days ago, one 400 days ago
90
db_path = tmp_path / "old.fossil"
91
_create_test_fossil_db(db_path, checkin_days_ago=[10, 400])
92
with FossilReader(db_path) as reader:
93
result = reader.get_daily_commit_activity(days=365)
94
95
dates = [entry["date"] for entry in result]
96
assert len(dates) == 1 # only the 10-day-ago entry
97
98
def test_custom_day_window(self, tmp_path):
99
# Checkins at 5, 20, and 40 days ago -- with a 30-day window
100
db_path = tmp_path / "window.fossil"
101
_create_test_fossil_db(db_path, checkin_days_ago=[5, 20, 40])
102
with FossilReader(db_path) as reader:
103
result = reader.get_daily_commit_activity(days=30)
104
105
dates = [entry["date"] for entry in result]
106
assert len(dates) == 2 # 5 and 20 days ago; 40 is outside window
107
108
def test_results_sorted_by_date(self, tmp_path):
109
db_path = tmp_path / "sorted.fossil"
110
_create_test_fossil_db(db_path, checkin_days_ago=[30, 10, 20, 5])
111
with FossilReader(db_path) as reader:
112
result = reader.get_daily_commit_activity(days=365)
113
114
dates = [entry["date"] for entry in result]
115
assert dates == sorted(dates)
116
117
def test_handles_missing_event_table(self, tmp_path):
118
# A .fossil file that has no event table at all
119
db_path = tmp_path / "broken.fossil"
120
conn = sqlite3.connect(str(db_path))
121
conn.execute("CREATE TABLE config (name TEXT, value TEXT)")
122
conn.commit()
123
conn.close()
124
125
with FossilReader(db_path) as reader:
126
result = reader.get_daily_commit_activity(days=365)
127
assert result == []
128
129
130
@pytest.mark.django_db
131
class TestDashboardHeatmapView:
132
"""Tests for the heatmap data in the dashboard view."""
133
134
def test_unauthenticated_redirects_to_login(self):
135
client = Client()
136
response = client.get("/dashboard/")
137
assert response.status_code == 302
138
assert "/auth/login/" in response.url
139
140
def test_dashboard_returns_heatmap_json(self, admin_client):
141
response = admin_client.get("/dashboard/")
142
assert response.status_code == 200
143
assert "heatmap_json" in response.context
144
145
# With no repos on disk, heatmap should be an empty JSON array
146
heatmap = json.loads(response.context["heatmap_json"])
147
assert isinstance(heatmap, list)
148
149
def test_dashboard_heatmap_aggregates_across_repos(self, admin_client, admin_user, sample_project, tmp_path):
150
"""Create two repos with overlapping daily activity and verify aggregation.
151
152
Uses days well in the past (5 and 10) to avoid date-boundary issues
153
caused by Fossil's Julian-day-to-date conversion (date(mtime - 0.5)).
154
"""
155
from constance import config
156
157
from organization.models import Organization
158
from projects.models import Project
159
160
# Use the auto-created repo from the signal (Project post_save creates a FossilRepository)
161
repo1 = FossilRepository.objects.get(project=sample_project)
162
repo1.filename = "repo1.fossil"
163
repo1.save(update_fields=["filename", "updated_at", "version"])
164
165
# Need a second project for the second repo (OneToOne constraint)
166
org = Organization.objects.first()
167
project2 = Project.objects.create(name="Second Project", organization=org, visibility="private", created_by=admin_user)
168
repo2 = FossilRepository.objects.get(project=project2)
169
repo2.filename = "repo2.fossil"
170
repo2.save(update_fields=["filename", "updated_at", "version"])
171
172
# Create .fossil files at the paths full_path resolves to (FOSSIL_DATA_DIR/filename)
173
original_dir = config.FOSSIL_DATA_DIR
174
config.FOSSIL_DATA_DIR = str(tmp_path)
175
try:
176
_create_test_fossil_db(tmp_path / "repo1.fossil", checkin_days_ago=[5, 5, 10]) # 2 at day-5, 1 at day-10
177
_create_test_fossil_db(tmp_path / "repo2.fossil", checkin_days_ago=[5, 10, 10]) # 1 at day-5, 2 at day-10
178
179
response = admin_client.get("/dashboard/")
180
finally:
181
config.FOSSIL_DATA_DIR = original_dir
182
183
assert response.status_code == 200
184
heatmap = json.loads(response.context["heatmap_json"])
185
counts_by_date = {entry["date"]: entry["count"] for entry in heatmap}
186
187
# Aggregated: 3 at day-5, 3 at day-10 = 6 total across 2 dates
188
assert len(counts_by_date) == 2
189
assert sum(counts_by_date.values()) == 6
190
# Each date should have exactly 3 commits (2+1 and 1+2)
191
for count in counts_by_date.values():
192
assert count == 3
193
194
def test_dashboard_heatmap_json_is_sorted(self, admin_client, admin_user, sample_project, tmp_path):
195
from constance import config
196
197
# Use the auto-created repo from the signal
198
repo = FossilRepository.objects.get(project=sample_project)
199
200
original_dir = config.FOSSIL_DATA_DIR
201
config.FOSSIL_DATA_DIR = str(tmp_path)
202
try:
203
_create_test_fossil_db(tmp_path / repo.filename, checkin_days_ago=[30, 5, 20, 10])
204
response = admin_client.get("/dashboard/")
205
finally:
206
config.FOSSIL_DATA_DIR = original_dir
207
208
heatmap = json.loads(response.context["heatmap_json"])
209
dates = [entry["date"] for entry in heatmap]
210
assert dates == sorted(dates)
211
212
def test_dashboard_heatmap_skips_missing_repos(self, admin_client, admin_user, sample_project):
213
"""Repos where the file doesn't exist on disk should be silently skipped."""
214
# The signal already created a FossilRepository -- just update the filename
215
repo = FossilRepository.objects.get(project=sample_project)
216
repo.filename = "nonexistent.fossil"
217
repo.save(update_fields=["filename", "updated_at", "version"])
218
219
response = admin_client.get("/dashboard/")
220
assert response.status_code == 200
221
heatmap = json.loads(response.context["heatmap_json"])
222
assert heatmap == []
223
224
def test_dashboard_renders_heatmap_container(self, admin_client, admin_user, sample_project, tmp_path):
225
"""When heatmap data exists, the template should include the heatmap div."""
226
from constance import config
227
228
# Use the auto-created repo from the signal
229
repo = FossilRepository.objects.get(project=sample_project)
230
231
original_dir = config.FOSSIL_DATA_DIR
232
config.FOSSIL_DATA_DIR = str(tmp_path)
233
try:
234
_create_test_fossil_db(tmp_path / repo.filename, checkin_days_ago=[5, 10, 15])
235
response = admin_client.get("/dashboard/")
236
finally:
237
config.FOSSIL_DATA_DIR = original_dir
238
239
content = response.content.decode()
240
assert 'id="heatmap"' in content
241
assert "Activity (last year)" in content
242
assert "Less" in content
243
assert "More" in content
244

Keyboard Shortcuts

Open search /
Next entry (timeline) j
Previous entry (timeline) k
Open focused entry Enter
Show this help ?
Toggle theme Top nav button