|
c588255…
|
ragelink
|
1 |
"""Tests for the dashboard activity heatmap.""" |
|
c588255…
|
ragelink
|
2 |
|
|
c588255…
|
ragelink
|
3 |
import json |
|
c588255…
|
ragelink
|
4 |
import sqlite3 |
|
c588255…
|
ragelink
|
5 |
from pathlib import Path |
|
c588255…
|
ragelink
|
6 |
|
|
c588255…
|
ragelink
|
7 |
import pytest |
|
c588255…
|
ragelink
|
8 |
from django.test import Client |
|
c588255…
|
ragelink
|
9 |
|
|
c588255…
|
ragelink
|
10 |
from fossil.models import FossilRepository |
|
c588255…
|
ragelink
|
11 |
from fossil.reader import FossilReader |
|
c588255…
|
ragelink
|
12 |
|
|
c588255…
|
ragelink
|
13 |
|
|
c588255…
|
ragelink
|
14 |
def _create_test_fossil_db(path: Path, checkin_days_ago: list[int] | None = None): |
|
c588255…
|
ragelink
|
15 |
"""Create a minimal .fossil SQLite database with event data for testing. |
|
c588255…
|
ragelink
|
16 |
|
|
c588255…
|
ragelink
|
17 |
Args: |
|
c588255…
|
ragelink
|
18 |
path: Where to write the .fossil file. |
|
c588255…
|
ragelink
|
19 |
checkin_days_ago: List of integers representing days ago for each checkin. |
|
c588255…
|
ragelink
|
20 |
Multiple entries for the same day create multiple checkins on that day. |
|
c588255…
|
ragelink
|
21 |
|
|
c588255…
|
ragelink
|
22 |
Note: Uses SQLite's julianday('now') for the reference point so that the |
|
c588255…
|
ragelink
|
23 |
date(mtime - 0.5) conversion in reader.py queries produces consistent dates. |
|
c588255…
|
ragelink
|
24 |
Python datetime vs SQLite julianday can differ by fractions of a second, |
|
c588255…
|
ragelink
|
25 |
which at day boundaries shifts the resulting date. |
|
c588255…
|
ragelink
|
26 |
""" |
|
c588255…
|
ragelink
|
27 |
conn = sqlite3.connect(str(path)) |
|
c588255…
|
ragelink
|
28 |
conn.execute("CREATE TABLE IF NOT EXISTS config (name TEXT PRIMARY KEY, value TEXT)") |
|
c588255…
|
ragelink
|
29 |
conn.execute("INSERT OR REPLACE INTO config VALUES ('project-name', 'test-project')") |
|
c588255…
|
ragelink
|
30 |
conn.execute("INSERT OR REPLACE INTO config VALUES ('project-code', 'abc123')") |
|
c588255…
|
ragelink
|
31 |
conn.execute( |
|
c588255…
|
ragelink
|
32 |
"""CREATE TABLE IF NOT EXISTS event ( |
|
c588255…
|
ragelink
|
33 |
type TEXT, mtime REAL, objid INTEGER, tagid INTEGER, |
|
c588255…
|
ragelink
|
34 |
uid INTEGER, bgcolor TEXT, euser TEXT, user TEXT, |
|
c588255…
|
ragelink
|
35 |
ecomment TEXT, comment TEXT, brief TEXT, |
|
c588255…
|
ragelink
|
36 |
omtime REAL |
|
c588255…
|
ragelink
|
37 |
)""" |
|
c588255…
|
ragelink
|
38 |
) |
|
c588255…
|
ragelink
|
39 |
conn.execute( |
|
c588255…
|
ragelink
|
40 |
"""CREATE TABLE IF NOT EXISTS blob ( |
|
c588255…
|
ragelink
|
41 |
rid INTEGER PRIMARY KEY, rcvid INTEGER, size INTEGER, |
|
c588255…
|
ragelink
|
42 |
uuid TEXT UNIQUE, content BLOB |
|
c588255…
|
ragelink
|
43 |
)""" |
|
c588255…
|
ragelink
|
44 |
) |
|
c588255…
|
ragelink
|
45 |
conn.execute("CREATE TABLE IF NOT EXISTS tag (tagid INTEGER PRIMARY KEY, tagname TEXT, tagtype INTEGER)") |
|
c588255…
|
ragelink
|
46 |
conn.execute("CREATE TABLE IF NOT EXISTS ticket (tkt_id TEXT PRIMARY KEY, tkt_uuid TEXT)") |
|
c588255…
|
ragelink
|
47 |
|
|
c588255…
|
ragelink
|
48 |
if checkin_days_ago: |
|
c588255…
|
ragelink
|
49 |
# Use SQLite's own julianday('now') so the reference point matches |
|
c588255…
|
ragelink
|
50 |
# what the reader.py queries will use for date calculations. |
|
c588255…
|
ragelink
|
51 |
now_julian = conn.execute("SELECT julianday('now')").fetchone()[0] |
|
c588255…
|
ragelink
|
52 |
for i, days in enumerate(checkin_days_ago): |
|
c588255…
|
ragelink
|
53 |
mtime = now_julian - days |
|
c588255…
|
ragelink
|
54 |
conn.execute("INSERT INTO blob VALUES (?, 0, 100, ?, NULL)", (i + 1, f"uuid{i:04d}")) |
|
c588255…
|
ragelink
|
55 |
conn.execute( |
|
c588255…
|
ragelink
|
56 |
"INSERT INTO event (type, mtime, objid, user, comment) VALUES ('ci', ?, ?, 'testuser', 'commit')", |
|
c588255…
|
ragelink
|
57 |
(mtime, i + 1), |
|
c588255…
|
ragelink
|
58 |
) |
|
c588255…
|
ragelink
|
59 |
|
|
c588255…
|
ragelink
|
60 |
conn.commit() |
|
c588255…
|
ragelink
|
61 |
conn.close() |
|
c588255…
|
ragelink
|
62 |
|
|
c588255…
|
ragelink
|
63 |
|
|
c588255…
|
ragelink
|
64 |
class TestFossilReaderDailyActivity: |
|
c588255…
|
ragelink
|
65 |
"""Tests for FossilReader.get_daily_commit_activity().""" |
|
c588255…
|
ragelink
|
66 |
|
|
c588255…
|
ragelink
|
67 |
def test_returns_empty_for_no_checkins(self, tmp_path): |
|
c588255…
|
ragelink
|
68 |
db_path = tmp_path / "empty.fossil" |
|
c588255…
|
ragelink
|
69 |
_create_test_fossil_db(db_path, checkin_days_ago=[]) |
|
c588255…
|
ragelink
|
70 |
with FossilReader(db_path) as reader: |
|
c588255…
|
ragelink
|
71 |
result = reader.get_daily_commit_activity(days=365) |
|
c588255…
|
ragelink
|
72 |
assert result == [] |
|
c588255…
|
ragelink
|
73 |
|
|
c588255…
|
ragelink
|
74 |
def test_returns_daily_counts(self, tmp_path): |
|
c588255…
|
ragelink
|
75 |
# 3 checkins at 5 days ago, 1 checkin at 10 days ago |
|
c588255…
|
ragelink
|
76 |
db_path = tmp_path / "active.fossil" |
|
c588255…
|
ragelink
|
77 |
_create_test_fossil_db(db_path, checkin_days_ago=[5, 5, 5, 10]) |
|
c588255…
|
ragelink
|
78 |
with FossilReader(db_path) as reader: |
|
c588255…
|
ragelink
|
79 |
result = reader.get_daily_commit_activity(days=365) |
|
c588255…
|
ragelink
|
80 |
|
|
c588255…
|
ragelink
|
81 |
counts_by_date = {entry["date"]: entry["count"] for entry in result} |
|
c588255…
|
ragelink
|
82 |
|
|
c588255…
|
ragelink
|
83 |
# Should have 2 distinct dates with counts 3 and 1 |
|
c588255…
|
ragelink
|
84 |
assert len(counts_by_date) == 2 |
|
c588255…
|
ragelink
|
85 |
counts = sorted(counts_by_date.values()) |
|
c588255…
|
ragelink
|
86 |
assert counts == [1, 3] |
|
c588255…
|
ragelink
|
87 |
|
|
c588255…
|
ragelink
|
88 |
def test_excludes_old_data_outside_window(self, tmp_path): |
|
c588255…
|
ragelink
|
89 |
# One checkin 10 days ago, one 400 days ago |
|
c588255…
|
ragelink
|
90 |
db_path = tmp_path / "old.fossil" |
|
c588255…
|
ragelink
|
91 |
_create_test_fossil_db(db_path, checkin_days_ago=[10, 400]) |
|
c588255…
|
ragelink
|
92 |
with FossilReader(db_path) as reader: |
|
c588255…
|
ragelink
|
93 |
result = reader.get_daily_commit_activity(days=365) |
|
c588255…
|
ragelink
|
94 |
|
|
c588255…
|
ragelink
|
95 |
dates = [entry["date"] for entry in result] |
|
c588255…
|
ragelink
|
96 |
assert len(dates) == 1 # only the 10-day-ago entry |
|
c588255…
|
ragelink
|
97 |
|
|
c588255…
|
ragelink
|
98 |
def test_custom_day_window(self, tmp_path): |
|
c588255…
|
ragelink
|
99 |
# Checkins at 5, 20, and 40 days ago -- with a 30-day window |
|
c588255…
|
ragelink
|
100 |
db_path = tmp_path / "window.fossil" |
|
c588255…
|
ragelink
|
101 |
_create_test_fossil_db(db_path, checkin_days_ago=[5, 20, 40]) |
|
c588255…
|
ragelink
|
102 |
with FossilReader(db_path) as reader: |
|
c588255…
|
ragelink
|
103 |
result = reader.get_daily_commit_activity(days=30) |
|
c588255…
|
ragelink
|
104 |
|
|
c588255…
|
ragelink
|
105 |
dates = [entry["date"] for entry in result] |
|
c588255…
|
ragelink
|
106 |
assert len(dates) == 2 # 5 and 20 days ago; 40 is outside window |
|
c588255…
|
ragelink
|
107 |
|
|
c588255…
|
ragelink
|
108 |
def test_results_sorted_by_date(self, tmp_path): |
|
c588255…
|
ragelink
|
109 |
db_path = tmp_path / "sorted.fossil" |
|
c588255…
|
ragelink
|
110 |
_create_test_fossil_db(db_path, checkin_days_ago=[30, 10, 20, 5]) |
|
c588255…
|
ragelink
|
111 |
with FossilReader(db_path) as reader: |
|
c588255…
|
ragelink
|
112 |
result = reader.get_daily_commit_activity(days=365) |
|
c588255…
|
ragelink
|
113 |
|
|
c588255…
|
ragelink
|
114 |
dates = [entry["date"] for entry in result] |
|
c588255…
|
ragelink
|
115 |
assert dates == sorted(dates) |
|
c588255…
|
ragelink
|
116 |
|
|
c588255…
|
ragelink
|
117 |
def test_handles_missing_event_table(self, tmp_path): |
|
c588255…
|
ragelink
|
118 |
# A .fossil file that has no event table at all |
|
c588255…
|
ragelink
|
119 |
db_path = tmp_path / "broken.fossil" |
|
c588255…
|
ragelink
|
120 |
conn = sqlite3.connect(str(db_path)) |
|
c588255…
|
ragelink
|
121 |
conn.execute("CREATE TABLE config (name TEXT, value TEXT)") |
|
c588255…
|
ragelink
|
122 |
conn.commit() |
|
c588255…
|
ragelink
|
123 |
conn.close() |
|
c588255…
|
ragelink
|
124 |
|
|
c588255…
|
ragelink
|
125 |
with FossilReader(db_path) as reader: |
|
c588255…
|
ragelink
|
126 |
result = reader.get_daily_commit_activity(days=365) |
|
c588255…
|
ragelink
|
127 |
assert result == [] |
|
c588255…
|
ragelink
|
128 |
|
|
c588255…
|
ragelink
|
129 |
|
|
c588255…
|
ragelink
|
130 |
@pytest.mark.django_db |
|
c588255…
|
ragelink
|
131 |
class TestDashboardHeatmapView: |
|
c588255…
|
ragelink
|
132 |
"""Tests for the heatmap data in the dashboard view.""" |
|
c588255…
|
ragelink
|
133 |
|
|
c588255…
|
ragelink
|
134 |
def test_unauthenticated_redirects_to_login(self): |
|
c588255…
|
ragelink
|
135 |
client = Client() |
|
c588255…
|
ragelink
|
136 |
response = client.get("/dashboard/") |
|
c588255…
|
ragelink
|
137 |
assert response.status_code == 302 |
|
c588255…
|
ragelink
|
138 |
assert "/auth/login/" in response.url |
|
c588255…
|
ragelink
|
139 |
|
|
c588255…
|
ragelink
|
140 |
def test_dashboard_returns_heatmap_json(self, admin_client): |
|
c588255…
|
ragelink
|
141 |
response = admin_client.get("/dashboard/") |
|
c588255…
|
ragelink
|
142 |
assert response.status_code == 200 |
|
c588255…
|
ragelink
|
143 |
assert "heatmap_json" in response.context |
|
c588255…
|
ragelink
|
144 |
|
|
c588255…
|
ragelink
|
145 |
# With no repos on disk, heatmap should be an empty JSON array |
|
c588255…
|
ragelink
|
146 |
heatmap = json.loads(response.context["heatmap_json"]) |
|
c588255…
|
ragelink
|
147 |
assert isinstance(heatmap, list) |
|
c588255…
|
ragelink
|
148 |
|
|
c588255…
|
ragelink
|
149 |
def test_dashboard_heatmap_aggregates_across_repos(self, admin_client, admin_user, sample_project, tmp_path): |
|
c588255…
|
ragelink
|
150 |
"""Create two repos with overlapping daily activity and verify aggregation. |
|
c588255…
|
ragelink
|
151 |
|
|
c588255…
|
ragelink
|
152 |
Uses days well in the past (5 and 10) to avoid date-boundary issues |
|
c588255…
|
ragelink
|
153 |
caused by Fossil's Julian-day-to-date conversion (date(mtime - 0.5)). |
|
c588255…
|
ragelink
|
154 |
""" |
|
c588255…
|
ragelink
|
155 |
from constance import config |
|
c588255…
|
ragelink
|
156 |
|
|
c588255…
|
ragelink
|
157 |
from organization.models import Organization |
|
c588255…
|
ragelink
|
158 |
from projects.models import Project |
|
c588255…
|
ragelink
|
159 |
|
|
c588255…
|
ragelink
|
160 |
# Use the auto-created repo from the signal (Project post_save creates a FossilRepository) |
|
c588255…
|
ragelink
|
161 |
repo1 = FossilRepository.objects.get(project=sample_project) |
|
c588255…
|
ragelink
|
162 |
repo1.filename = "repo1.fossil" |
|
c588255…
|
ragelink
|
163 |
repo1.save(update_fields=["filename", "updated_at", "version"]) |
|
c588255…
|
ragelink
|
164 |
|
|
c588255…
|
ragelink
|
165 |
# Need a second project for the second repo (OneToOne constraint) |
|
c588255…
|
ragelink
|
166 |
org = Organization.objects.first() |
|
c588255…
|
ragelink
|
167 |
project2 = Project.objects.create(name="Second Project", organization=org, visibility="private", created_by=admin_user) |
|
c588255…
|
ragelink
|
168 |
repo2 = FossilRepository.objects.get(project=project2) |
|
c588255…
|
ragelink
|
169 |
repo2.filename = "repo2.fossil" |
|
c588255…
|
ragelink
|
170 |
repo2.save(update_fields=["filename", "updated_at", "version"]) |
|
c588255…
|
ragelink
|
171 |
|
|
c588255…
|
ragelink
|
172 |
# Create .fossil files at the paths full_path resolves to (FOSSIL_DATA_DIR/filename) |
|
c588255…
|
ragelink
|
173 |
original_dir = config.FOSSIL_DATA_DIR |
|
c588255…
|
ragelink
|
174 |
config.FOSSIL_DATA_DIR = str(tmp_path) |
|
c588255…
|
ragelink
|
175 |
try: |
|
c588255…
|
ragelink
|
176 |
_create_test_fossil_db(tmp_path / "repo1.fossil", checkin_days_ago=[5, 5, 10]) # 2 at day-5, 1 at day-10 |
|
c588255…
|
ragelink
|
177 |
_create_test_fossil_db(tmp_path / "repo2.fossil", checkin_days_ago=[5, 10, 10]) # 1 at day-5, 2 at day-10 |
|
c588255…
|
ragelink
|
178 |
|
|
c588255…
|
ragelink
|
179 |
response = admin_client.get("/dashboard/") |
|
c588255…
|
ragelink
|
180 |
finally: |
|
c588255…
|
ragelink
|
181 |
config.FOSSIL_DATA_DIR = original_dir |
|
c588255…
|
ragelink
|
182 |
|
|
c588255…
|
ragelink
|
183 |
assert response.status_code == 200 |
|
c588255…
|
ragelink
|
184 |
heatmap = json.loads(response.context["heatmap_json"]) |
|
c588255…
|
ragelink
|
185 |
counts_by_date = {entry["date"]: entry["count"] for entry in heatmap} |
|
c588255…
|
ragelink
|
186 |
|
|
c588255…
|
ragelink
|
187 |
# Aggregated: 3 at day-5, 3 at day-10 = 6 total across 2 dates |
|
c588255…
|
ragelink
|
188 |
assert len(counts_by_date) == 2 |
|
c588255…
|
ragelink
|
189 |
assert sum(counts_by_date.values()) == 6 |
|
c588255…
|
ragelink
|
190 |
# Each date should have exactly 3 commits (2+1 and 1+2) |
|
c588255…
|
ragelink
|
191 |
for count in counts_by_date.values(): |
|
c588255…
|
ragelink
|
192 |
assert count == 3 |
|
c588255…
|
ragelink
|
193 |
|
|
c588255…
|
ragelink
|
194 |
def test_dashboard_heatmap_json_is_sorted(self, admin_client, admin_user, sample_project, tmp_path): |
|
c588255…
|
ragelink
|
195 |
from constance import config |
|
c588255…
|
ragelink
|
196 |
|
|
c588255…
|
ragelink
|
197 |
# Use the auto-created repo from the signal |
|
c588255…
|
ragelink
|
198 |
repo = FossilRepository.objects.get(project=sample_project) |
|
c588255…
|
ragelink
|
199 |
|
|
c588255…
|
ragelink
|
200 |
original_dir = config.FOSSIL_DATA_DIR |
|
c588255…
|
ragelink
|
201 |
config.FOSSIL_DATA_DIR = str(tmp_path) |
|
c588255…
|
ragelink
|
202 |
try: |
|
c588255…
|
ragelink
|
203 |
_create_test_fossil_db(tmp_path / repo.filename, checkin_days_ago=[30, 5, 20, 10]) |
|
c588255…
|
ragelink
|
204 |
response = admin_client.get("/dashboard/") |
|
c588255…
|
ragelink
|
205 |
finally: |
|
c588255…
|
ragelink
|
206 |
config.FOSSIL_DATA_DIR = original_dir |
|
c588255…
|
ragelink
|
207 |
|
|
c588255…
|
ragelink
|
208 |
heatmap = json.loads(response.context["heatmap_json"]) |
|
c588255…
|
ragelink
|
209 |
dates = [entry["date"] for entry in heatmap] |
|
c588255…
|
ragelink
|
210 |
assert dates == sorted(dates) |
|
c588255…
|
ragelink
|
211 |
|
|
c588255…
|
ragelink
|
212 |
def test_dashboard_heatmap_skips_missing_repos(self, admin_client, admin_user, sample_project): |
|
c588255…
|
ragelink
|
213 |
"""Repos where the file doesn't exist on disk should be silently skipped.""" |
|
c588255…
|
ragelink
|
214 |
# The signal already created a FossilRepository -- just update the filename |
|
c588255…
|
ragelink
|
215 |
repo = FossilRepository.objects.get(project=sample_project) |
|
c588255…
|
ragelink
|
216 |
repo.filename = "nonexistent.fossil" |
|
c588255…
|
ragelink
|
217 |
repo.save(update_fields=["filename", "updated_at", "version"]) |
|
c588255…
|
ragelink
|
218 |
|
|
c588255…
|
ragelink
|
219 |
response = admin_client.get("/dashboard/") |
|
c588255…
|
ragelink
|
220 |
assert response.status_code == 200 |
|
c588255…
|
ragelink
|
221 |
heatmap = json.loads(response.context["heatmap_json"]) |
|
c588255…
|
ragelink
|
222 |
assert heatmap == [] |
|
c588255…
|
ragelink
|
223 |
|
|
c588255…
|
ragelink
|
224 |
def test_dashboard_renders_heatmap_container(self, admin_client, admin_user, sample_project, tmp_path): |
|
c588255…
|
ragelink
|
225 |
"""When heatmap data exists, the template should include the heatmap div.""" |
|
c588255…
|
ragelink
|
226 |
from constance import config |
|
c588255…
|
ragelink
|
227 |
|
|
c588255…
|
ragelink
|
228 |
# Use the auto-created repo from the signal |
|
c588255…
|
ragelink
|
229 |
repo = FossilRepository.objects.get(project=sample_project) |
|
c588255…
|
ragelink
|
230 |
|
|
c588255…
|
ragelink
|
231 |
original_dir = config.FOSSIL_DATA_DIR |
|
c588255…
|
ragelink
|
232 |
config.FOSSIL_DATA_DIR = str(tmp_path) |
|
c588255…
|
ragelink
|
233 |
try: |
|
c588255…
|
ragelink
|
234 |
_create_test_fossil_db(tmp_path / repo.filename, checkin_days_ago=[5, 10, 15]) |
|
c588255…
|
ragelink
|
235 |
response = admin_client.get("/dashboard/") |
|
c588255…
|
ragelink
|
236 |
finally: |
|
c588255…
|
ragelink
|
237 |
config.FOSSIL_DATA_DIR = original_dir |
|
c588255…
|
ragelink
|
238 |
|
|
c588255…
|
ragelink
|
239 |
content = response.content.decode() |
|
c588255…
|
ragelink
|
240 |
assert 'id="heatmap"' in content |
|
c588255…
|
ragelink
|
241 |
assert "Activity (last year)" in content |
|
c588255…
|
ragelink
|
242 |
assert "Less" in content |
|
c588255…
|
ragelink
|
243 |
assert "More" in content |