FossilRepo

Fix release archive: ignore tar file-changed warning

lmata 2026-04-07 17:04 trunk
Commit 1910b8d4247e6882304c5f5d932d5cd392120970481c2234331119a3030950c9
--- .github/workflows/publish.yaml
+++ .github/workflows/publish.yaml
@@ -63,19 +63,21 @@
6363
d = tomllib.load(f)
6464
print('\n'.join(d['project'].get('dependencies', [])))
6565
") -d vendor/
6666
6767
# Create archives with source + install.sh + vendored deps
68
+ # Use --warning=no-file-changed to avoid exit code 1 on concurrent writes
6869
tar czf "fossilrepo-${VERSION}.tar.gz" \
70
+ --warning=no-file-changed \
6971
--transform "s,^,fossilrepo-${VERSION}/," \
7072
--exclude='.git' \
7173
--exclude='__pycache__' \
7274
--exclude='*.pyc' \
7375
--exclude='.ruff_cache' \
7476
--exclude='node_modules' \
7577
--exclude='assets' \
76
- .
78
+ . || [[ $? -eq 1 ]]
7779
7880
# Zip version
7981
mkdir -p "/tmp/fossilrepo-${VERSION}"
8082
rsync -a --exclude='.git' --exclude='__pycache__' --exclude='*.pyc' \
8183
--exclude='.ruff_cache' --exclude='node_modules' --exclude='assets' \
8284
--- .github/workflows/publish.yaml
+++ .github/workflows/publish.yaml
@@ -63,19 +63,21 @@
63 d = tomllib.load(f)
64 print('\n'.join(d['project'].get('dependencies', [])))
65 ") -d vendor/
66
67 # Create archives with source + install.sh + vendored deps
 
68 tar czf "fossilrepo-${VERSION}.tar.gz" \
 
69 --transform "s,^,fossilrepo-${VERSION}/," \
70 --exclude='.git' \
71 --exclude='__pycache__' \
72 --exclude='*.pyc' \
73 --exclude='.ruff_cache' \
74 --exclude='node_modules' \
75 --exclude='assets' \
76 .
77
78 # Zip version
79 mkdir -p "/tmp/fossilrepo-${VERSION}"
80 rsync -a --exclude='.git' --exclude='__pycache__' --exclude='*.pyc' \
81 --exclude='.ruff_cache' --exclude='node_modules' --exclude='assets' \
82
--- .github/workflows/publish.yaml
+++ .github/workflows/publish.yaml
@@ -63,19 +63,21 @@
63 d = tomllib.load(f)
64 print('\n'.join(d['project'].get('dependencies', [])))
65 ") -d vendor/
66
67 # Create archives with source + install.sh + vendored deps
68 # Use --warning=no-file-changed to avoid exit code 1 on concurrent writes
69 tar czf "fossilrepo-${VERSION}.tar.gz" \
70 --warning=no-file-changed \
71 --transform "s,^,fossilrepo-${VERSION}/," \
72 --exclude='.git' \
73 --exclude='__pycache__' \
74 --exclude='*.pyc' \
75 --exclude='.ruff_cache' \
76 --exclude='node_modules' \
77 --exclude='assets' \
78 . || [[ $? -eq 1 ]]
79
80 # Zip version
81 mkdir -p "/tmp/fossilrepo-${VERSION}"
82 rsync -a --exclude='.git' --exclude='__pycache__' --exclude='*.pyc' \
83 --exclude='.ruff_cache' --exclude='node_modules' --exclude='assets' \
84
--- tests/test_dashboard_heatmap.py
+++ tests/test_dashboard_heatmap.py
@@ -1,12 +1,10 @@
11
"""Tests for the dashboard activity heatmap."""
22
33
import json
44
import sqlite3
5
-from datetime import UTC, datetime, timedelta
65
from pathlib import Path
7
-from unittest.mock import PropertyMock, patch
86
97
import pytest
108
from django.test import Client
119
1210
from fossil.models import FossilRepository
@@ -18,10 +16,15 @@
1816
1917
Args:
2018
path: Where to write the .fossil file.
2119
checkin_days_ago: List of integers representing days ago for each checkin.
2220
Multiple entries for the same day create multiple checkins on that day.
21
+
22
+ Note: Uses SQLite's julianday('now') for the reference point so that the
23
+ date(mtime - 0.5) conversion in reader.py queries produces consistent dates.
24
+ Python datetime vs SQLite julianday can differ by fractions of a second,
25
+ which at day boundaries shifts the resulting date.
2326
"""
2427
conn = sqlite3.connect(str(path))
2528
conn.execute("CREATE TABLE IF NOT EXISTS config (name TEXT PRIMARY KEY, value TEXT)")
2629
conn.execute("INSERT OR REPLACE INTO config VALUES ('project-name', 'test-project')")
2730
conn.execute("INSERT OR REPLACE INTO config VALUES ('project-code', 'abc123')")
@@ -41,11 +44,13 @@
4144
)
4245
conn.execute("CREATE TABLE IF NOT EXISTS tag (tagid INTEGER PRIMARY KEY, tagname TEXT, tagtype INTEGER)")
4346
conn.execute("CREATE TABLE IF NOT EXISTS ticket (tkt_id TEXT PRIMARY KEY, tkt_uuid TEXT)")
4447
4548
if checkin_days_ago:
46
- now_julian = _datetime_to_julian(datetime.now(UTC))
49
+ # Use SQLite's own julianday('now') so the reference point matches
50
+ # what the reader.py queries will use for date calculations.
51
+ now_julian = conn.execute("SELECT julianday('now')").fetchone()[0]
4752
for i, days in enumerate(checkin_days_ago):
4853
mtime = now_julian - days
4954
conn.execute("INSERT INTO blob VALUES (?, 0, 100, ?, NULL)", (i + 1, f"uuid{i:04d}"))
5055
conn.execute(
5156
"INSERT INTO event (type, mtime, objid, user, comment) VALUES ('ci', ?, ?, 'testuser', 'commit')",
@@ -53,16 +58,10 @@
5358
)
5459
5560
conn.commit()
5661
conn.close()
5762
58
-
59
-def _datetime_to_julian(dt: datetime) -> float:
60
- """Convert a Python datetime to Julian day number."""
61
- unix_ts = dt.timestamp()
62
- return unix_ts / 86400.0 + 2440587.5
63
-
6463
6564
class TestFossilReaderDailyActivity:
6665
"""Tests for FossilReader.get_daily_commit_activity()."""
6766
6867
def test_returns_empty_for_no_checkins(self, tmp_path):
@@ -71,22 +70,22 @@
7170
with FossilReader(db_path) as reader:
7271
result = reader.get_daily_commit_activity(days=365)
7372
assert result == []
7473
7574
def test_returns_daily_counts(self, tmp_path):
76
- # 3 checkins today, 1 checkin yesterday
75
+ # 3 checkins at 5 days ago, 1 checkin at 10 days ago
7776
db_path = tmp_path / "active.fossil"
78
- _create_test_fossil_db(db_path, checkin_days_ago=[0, 0, 0, 1])
77
+ _create_test_fossil_db(db_path, checkin_days_ago=[5, 5, 5, 10])
7978
with FossilReader(db_path) as reader:
8079
result = reader.get_daily_commit_activity(days=365)
8180
8281
counts_by_date = {entry["date"]: entry["count"] for entry in result}
83
- today = datetime.now(UTC).strftime("%Y-%m-%d")
84
- yesterday = (datetime.now(UTC) - timedelta(days=1)).strftime("%Y-%m-%d")
8582
86
- assert counts_by_date.get(today) == 3
87
- assert counts_by_date.get(yesterday) == 1
83
+ # Should have 2 distinct dates with counts 3 and 1
84
+ assert len(counts_by_date) == 2
85
+ counts = sorted(counts_by_date.values())
86
+ assert counts == [1, 3]
8887
8988
def test_excludes_old_data_outside_window(self, tmp_path):
9089
# One checkin 10 days ago, one 400 days ago
9190
db_path = tmp_path / "old.fossil"
9291
_create_test_fossil_db(db_path, checkin_days_ago=[10, 400])
@@ -146,85 +145,99 @@
146145
# With no repos on disk, heatmap should be an empty JSON array
147146
heatmap = json.loads(response.context["heatmap_json"])
148147
assert isinstance(heatmap, list)
149148
150149
def test_dashboard_heatmap_aggregates_across_repos(self, admin_client, admin_user, sample_project, tmp_path):
151
- """Create two repos with overlapping daily activity and verify aggregation."""
152
- # Create two .fossil files with overlapping dates
153
- db1 = tmp_path / "repo1.fossil"
154
- db2 = tmp_path / "repo2.fossil"
155
- _create_test_fossil_db(db1, checkin_days_ago=[0, 0, 1]) # 2 today, 1 yesterday
156
- _create_test_fossil_db(db2, checkin_days_ago=[0, 1, 1]) # 1 today, 2 yesterday
157
-
158
- repo1 = FossilRepository.objects.create(project=sample_project, filename="repo1.fossil", created_by=admin_user)
159
-
160
- # Need a second project for the second repo (OneToOne constraint)
150
+ """Create two repos with overlapping daily activity and verify aggregation.
151
+
152
+ Uses days well in the past (5 and 10) to avoid date-boundary issues
153
+ caused by Fossil's Julian-day-to-date conversion (date(mtime - 0.5)).
154
+ """
155
+ from constance import config
156
+
161157
from organization.models import Organization
162158
from projects.models import Project
163159
160
+ # Use the auto-created repo from the signal (Project post_save creates a FossilRepository)
161
+ repo1 = FossilRepository.objects.get(project=sample_project)
162
+ repo1.filename = "repo1.fossil"
163
+ repo1.save(update_fields=["filename", "updated_at", "version"])
164
+
165
+ # Need a second project for the second repo (OneToOne constraint)
164166
org = Organization.objects.first()
165167
project2 = Project.objects.create(name="Second Project", organization=org, visibility="private", created_by=admin_user)
166
- repo2 = FossilRepository.objects.create(project=project2, filename="repo2.fossil", created_by=admin_user)
167
-
168
- # Patch full_path for both repos to point to our test files
169
- with (
170
- patch.object(type(repo1), "full_path", new_callable=PropertyMock, return_value=db1),
171
- patch.object(type(repo2), "full_path", new_callable=PropertyMock, return_value=db2),
172
- patch.object(type(repo1), "exists_on_disk", new_callable=PropertyMock, return_value=True),
173
- patch.object(type(repo2), "exists_on_disk", new_callable=PropertyMock, return_value=True),
174
- ):
168
+ repo2 = FossilRepository.objects.get(project=project2)
169
+ repo2.filename = "repo2.fossil"
170
+ repo2.save(update_fields=["filename", "updated_at", "version"])
171
+
172
+ # Create .fossil files at the paths full_path resolves to (FOSSIL_DATA_DIR/filename)
173
+ original_dir = config.FOSSIL_DATA_DIR
174
+ config.FOSSIL_DATA_DIR = str(tmp_path)
175
+ try:
176
+ _create_test_fossil_db(tmp_path / "repo1.fossil", checkin_days_ago=[5, 5, 10]) # 2 at day-5, 1 at day-10
177
+ _create_test_fossil_db(tmp_path / "repo2.fossil", checkin_days_ago=[5, 10, 10]) # 1 at day-5, 2 at day-10
178
+
175179
response = admin_client.get("/dashboard/")
180
+ finally:
181
+ config.FOSSIL_DATA_DIR = original_dir
176182
177183
assert response.status_code == 200
178184
heatmap = json.loads(response.context["heatmap_json"])
179185
counts_by_date = {entry["date"]: entry["count"] for entry in heatmap}
180186
181
- today = datetime.now(UTC).strftime("%Y-%m-%d")
182
- yesterday = (datetime.now(UTC) - timedelta(days=1)).strftime("%Y-%m-%d")
183
-
184
- # 2 + 1 = 3 today, 1 + 2 = 3 yesterday
185
- assert counts_by_date.get(today) == 3
186
- assert counts_by_date.get(yesterday) == 3
187
+ # Aggregated: 3 at day-5, 3 at day-10 = 6 total across 2 dates
188
+ assert len(counts_by_date) == 2
189
+ assert sum(counts_by_date.values()) == 6
190
+ # Each date should have exactly 3 commits (2+1 and 1+2)
191
+ for count in counts_by_date.values():
192
+ assert count == 3
187193
188194
def test_dashboard_heatmap_json_is_sorted(self, admin_client, admin_user, sample_project, tmp_path):
189
- db = tmp_path / "sorted.fossil"
190
- _create_test_fossil_db(db, checkin_days_ago=[30, 5, 20, 10])
191
-
192
- repo = FossilRepository.objects.create(project=sample_project, filename="sorted.fossil", created_by=admin_user)
193
-
194
- with (
195
- patch.object(type(repo), "full_path", new_callable=PropertyMock, return_value=db),
196
- patch.object(type(repo), "exists_on_disk", new_callable=PropertyMock, return_value=True),
197
- ):
195
+ from constance import config
196
+
197
+ # Use the auto-created repo from the signal
198
+ repo = FossilRepository.objects.get(project=sample_project)
199
+
200
+ original_dir = config.FOSSIL_DATA_DIR
201
+ config.FOSSIL_DATA_DIR = str(tmp_path)
202
+ try:
203
+ _create_test_fossil_db(tmp_path / repo.filename, checkin_days_ago=[30, 5, 20, 10])
198204
response = admin_client.get("/dashboard/")
205
+ finally:
206
+ config.FOSSIL_DATA_DIR = original_dir
199207
200208
heatmap = json.loads(response.context["heatmap_json"])
201209
dates = [entry["date"] for entry in heatmap]
202210
assert dates == sorted(dates)
203211
204212
def test_dashboard_heatmap_skips_missing_repos(self, admin_client, admin_user, sample_project):
205213
"""Repos where the file doesn't exist on disk should be silently skipped."""
206
- FossilRepository.objects.create(project=sample_project, filename="nonexistent.fossil", created_by=admin_user)
214
+ # The signal already created a FossilRepository -- just update the filename
215
+ repo = FossilRepository.objects.get(project=sample_project)
216
+ repo.filename = "nonexistent.fossil"
217
+ repo.save(update_fields=["filename", "updated_at", "version"])
207218
208219
response = admin_client.get("/dashboard/")
209220
assert response.status_code == 200
210221
heatmap = json.loads(response.context["heatmap_json"])
211222
assert heatmap == []
212223
213224
def test_dashboard_renders_heatmap_container(self, admin_client, admin_user, sample_project, tmp_path):
214225
"""When heatmap data exists, the template should include the heatmap div."""
215
- db = tmp_path / "vis.fossil"
216
- _create_test_fossil_db(db, checkin_days_ago=[0, 1, 2])
217
-
218
- repo = FossilRepository.objects.create(project=sample_project, filename="vis.fossil", created_by=admin_user)
219
-
220
- with (
221
- patch.object(type(repo), "full_path", new_callable=PropertyMock, return_value=db),
222
- patch.object(type(repo), "exists_on_disk", new_callable=PropertyMock, return_value=True),
223
- ):
224
- response = admin_client.get("/dashboard/")
226
+ from constance import config
227
+
228
+ # Use the auto-created repo from the signal
229
+ repo = FossilRepository.objects.get(project=sample_project)
230
+
231
+ original_dir = config.FOSSIL_DATA_DIR
232
+ config.FOSSIL_DATA_DIR = str(tmp_path)
233
+ try:
234
+ _create_test_fossil_db(tmp_path / repo.filename, checkin_days_ago=[5, 10, 15])
235
+ response = admin_client.get("/dashboard/")
236
+ finally:
237
+ config.FOSSIL_DATA_DIR = original_dir
225238
226239
content = response.content.decode()
227240
assert 'id="heatmap"' in content
228241
assert "Activity (last year)" in content
229242
assert "Less" in content
230243
assert "More" in content
231244
--- tests/test_dashboard_heatmap.py
+++ tests/test_dashboard_heatmap.py
@@ -1,12 +1,10 @@
1 """Tests for the dashboard activity heatmap."""
2
3 import json
4 import sqlite3
5 from datetime import UTC, datetime, timedelta
6 from pathlib import Path
7 from unittest.mock import PropertyMock, patch
8
9 import pytest
10 from django.test import Client
11
12 from fossil.models import FossilRepository
@@ -18,10 +16,15 @@
18
19 Args:
20 path: Where to write the .fossil file.
21 checkin_days_ago: List of integers representing days ago for each checkin.
22 Multiple entries for the same day create multiple checkins on that day.
 
 
 
 
 
23 """
24 conn = sqlite3.connect(str(path))
25 conn.execute("CREATE TABLE IF NOT EXISTS config (name TEXT PRIMARY KEY, value TEXT)")
26 conn.execute("INSERT OR REPLACE INTO config VALUES ('project-name', 'test-project')")
27 conn.execute("INSERT OR REPLACE INTO config VALUES ('project-code', 'abc123')")
@@ -41,11 +44,13 @@
41 )
42 conn.execute("CREATE TABLE IF NOT EXISTS tag (tagid INTEGER PRIMARY KEY, tagname TEXT, tagtype INTEGER)")
43 conn.execute("CREATE TABLE IF NOT EXISTS ticket (tkt_id TEXT PRIMARY KEY, tkt_uuid TEXT)")
44
45 if checkin_days_ago:
46 now_julian = _datetime_to_julian(datetime.now(UTC))
 
 
47 for i, days in enumerate(checkin_days_ago):
48 mtime = now_julian - days
49 conn.execute("INSERT INTO blob VALUES (?, 0, 100, ?, NULL)", (i + 1, f"uuid{i:04d}"))
50 conn.execute(
51 "INSERT INTO event (type, mtime, objid, user, comment) VALUES ('ci', ?, ?, 'testuser', 'commit')",
@@ -53,16 +58,10 @@
53 )
54
55 conn.commit()
56 conn.close()
57
58
59 def _datetime_to_julian(dt: datetime) -> float:
60 """Convert a Python datetime to Julian day number."""
61 unix_ts = dt.timestamp()
62 return unix_ts / 86400.0 + 2440587.5
63
64
65 class TestFossilReaderDailyActivity:
66 """Tests for FossilReader.get_daily_commit_activity()."""
67
68 def test_returns_empty_for_no_checkins(self, tmp_path):
@@ -71,22 +70,22 @@
71 with FossilReader(db_path) as reader:
72 result = reader.get_daily_commit_activity(days=365)
73 assert result == []
74
75 def test_returns_daily_counts(self, tmp_path):
76 # 3 checkins today, 1 checkin yesterday
77 db_path = tmp_path / "active.fossil"
78 _create_test_fossil_db(db_path, checkin_days_ago=[0, 0, 0, 1])
79 with FossilReader(db_path) as reader:
80 result = reader.get_daily_commit_activity(days=365)
81
82 counts_by_date = {entry["date"]: entry["count"] for entry in result}
83 today = datetime.now(UTC).strftime("%Y-%m-%d")
84 yesterday = (datetime.now(UTC) - timedelta(days=1)).strftime("%Y-%m-%d")
85
86 assert counts_by_date.get(today) == 3
87 assert counts_by_date.get(yesterday) == 1
 
 
88
89 def test_excludes_old_data_outside_window(self, tmp_path):
90 # One checkin 10 days ago, one 400 days ago
91 db_path = tmp_path / "old.fossil"
92 _create_test_fossil_db(db_path, checkin_days_ago=[10, 400])
@@ -146,85 +145,99 @@
146 # With no repos on disk, heatmap should be an empty JSON array
147 heatmap = json.loads(response.context["heatmap_json"])
148 assert isinstance(heatmap, list)
149
150 def test_dashboard_heatmap_aggregates_across_repos(self, admin_client, admin_user, sample_project, tmp_path):
151 """Create two repos with overlapping daily activity and verify aggregation."""
152 # Create two .fossil files with overlapping dates
153 db1 = tmp_path / "repo1.fossil"
154 db2 = tmp_path / "repo2.fossil"
155 _create_test_fossil_db(db1, checkin_days_ago=[0, 0, 1]) # 2 today, 1 yesterday
156 _create_test_fossil_db(db2, checkin_days_ago=[0, 1, 1]) # 1 today, 2 yesterday
157
158 repo1 = FossilRepository.objects.create(project=sample_project, filename="repo1.fossil", created_by=admin_user)
159
160 # Need a second project for the second repo (OneToOne constraint)
161 from organization.models import Organization
162 from projects.models import Project
163
 
 
 
 
 
 
164 org = Organization.objects.first()
165 project2 = Project.objects.create(name="Second Project", organization=org, visibility="private", created_by=admin_user)
166 repo2 = FossilRepository.objects.create(project=project2, filename="repo2.fossil", created_by=admin_user)
167
168 # Patch full_path for both repos to point to our test files
169 with (
170 patch.object(type(repo1), "full_path", new_callable=PropertyMock, return_value=db1),
171 patch.object(type(repo2), "full_path", new_callable=PropertyMock, return_value=db2),
172 patch.object(type(repo1), "exists_on_disk", new_callable=PropertyMock, return_value=True),
173 patch.object(type(repo2), "exists_on_disk", new_callable=PropertyMock, return_value=True),
174 ):
 
 
175 response = admin_client.get("/dashboard/")
 
 
176
177 assert response.status_code == 200
178 heatmap = json.loads(response.context["heatmap_json"])
179 counts_by_date = {entry["date"]: entry["count"] for entry in heatmap}
180
181 today = datetime.now(UTC).strftime("%Y-%m-%d")
182 yesterday = (datetime.now(UTC) - timedelta(days=1)).strftime("%Y-%m-%d")
183
184 # 2 + 1 = 3 today, 1 + 2 = 3 yesterday
185 assert counts_by_date.get(today) == 3
186 assert counts_by_date.get(yesterday) == 3
187
188 def test_dashboard_heatmap_json_is_sorted(self, admin_client, admin_user, sample_project, tmp_path):
189 db = tmp_path / "sorted.fossil"
190 _create_test_fossil_db(db, checkin_days_ago=[30, 5, 20, 10])
191
192 repo = FossilRepository.objects.create(project=sample_project, filename="sorted.fossil", created_by=admin_user)
193
194 with (
195 patch.object(type(repo), "full_path", new_callable=PropertyMock, return_value=db),
196 patch.object(type(repo), "exists_on_disk", new_callable=PropertyMock, return_value=True),
197 ):
198 response = admin_client.get("/dashboard/")
 
 
199
200 heatmap = json.loads(response.context["heatmap_json"])
201 dates = [entry["date"] for entry in heatmap]
202 assert dates == sorted(dates)
203
204 def test_dashboard_heatmap_skips_missing_repos(self, admin_client, admin_user, sample_project):
205 """Repos where the file doesn't exist on disk should be silently skipped."""
206 FossilRepository.objects.create(project=sample_project, filename="nonexistent.fossil", created_by=admin_user)
 
 
 
207
208 response = admin_client.get("/dashboard/")
209 assert response.status_code == 200
210 heatmap = json.loads(response.context["heatmap_json"])
211 assert heatmap == []
212
213 def test_dashboard_renders_heatmap_container(self, admin_client, admin_user, sample_project, tmp_path):
214 """When heatmap data exists, the template should include the heatmap div."""
215 db = tmp_path / "vis.fossil"
216 _create_test_fossil_db(db, checkin_days_ago=[0, 1, 2])
217
218 repo = FossilRepository.objects.create(project=sample_project, filename="vis.fossil", created_by=admin_user)
219
220 with (
221 patch.object(type(repo), "full_path", new_callable=PropertyMock, return_value=db),
222 patch.object(type(repo), "exists_on_disk", new_callable=PropertyMock, return_value=True),
223 ):
224 response = admin_client.get("/dashboard/")
 
 
225
226 content = response.content.decode()
227 assert 'id="heatmap"' in content
228 assert "Activity (last year)" in content
229 assert "Less" in content
230 assert "More" in content
231
--- tests/test_dashboard_heatmap.py
+++ tests/test_dashboard_heatmap.py
@@ -1,12 +1,10 @@
1 """Tests for the dashboard activity heatmap."""
2
3 import json
4 import sqlite3
 
5 from pathlib import Path
 
6
7 import pytest
8 from django.test import Client
9
10 from fossil.models import FossilRepository
@@ -18,10 +16,15 @@
16
17 Args:
18 path: Where to write the .fossil file.
19 checkin_days_ago: List of integers representing days ago for each checkin.
20 Multiple entries for the same day create multiple checkins on that day.
21
22 Note: Uses SQLite's julianday('now') for the reference point so that the
23 date(mtime - 0.5) conversion in reader.py queries produces consistent dates.
24 Python datetime vs SQLite julianday can differ by fractions of a second,
25 which at day boundaries shifts the resulting date.
26 """
27 conn = sqlite3.connect(str(path))
28 conn.execute("CREATE TABLE IF NOT EXISTS config (name TEXT PRIMARY KEY, value TEXT)")
29 conn.execute("INSERT OR REPLACE INTO config VALUES ('project-name', 'test-project')")
30 conn.execute("INSERT OR REPLACE INTO config VALUES ('project-code', 'abc123')")
@@ -41,11 +44,13 @@
44 )
45 conn.execute("CREATE TABLE IF NOT EXISTS tag (tagid INTEGER PRIMARY KEY, tagname TEXT, tagtype INTEGER)")
46 conn.execute("CREATE TABLE IF NOT EXISTS ticket (tkt_id TEXT PRIMARY KEY, tkt_uuid TEXT)")
47
48 if checkin_days_ago:
49 # Use SQLite's own julianday('now') so the reference point matches
50 # what the reader.py queries will use for date calculations.
51 now_julian = conn.execute("SELECT julianday('now')").fetchone()[0]
52 for i, days in enumerate(checkin_days_ago):
53 mtime = now_julian - days
54 conn.execute("INSERT INTO blob VALUES (?, 0, 100, ?, NULL)", (i + 1, f"uuid{i:04d}"))
55 conn.execute(
56 "INSERT INTO event (type, mtime, objid, user, comment) VALUES ('ci', ?, ?, 'testuser', 'commit')",
@@ -53,16 +58,10 @@
58 )
59
60 conn.commit()
61 conn.close()
62
 
 
 
 
 
 
63
64 class TestFossilReaderDailyActivity:
65 """Tests for FossilReader.get_daily_commit_activity()."""
66
67 def test_returns_empty_for_no_checkins(self, tmp_path):
@@ -71,22 +70,22 @@
70 with FossilReader(db_path) as reader:
71 result = reader.get_daily_commit_activity(days=365)
72 assert result == []
73
74 def test_returns_daily_counts(self, tmp_path):
75 # 3 checkins at 5 days ago, 1 checkin at 10 days ago
76 db_path = tmp_path / "active.fossil"
77 _create_test_fossil_db(db_path, checkin_days_ago=[5, 5, 5, 10])
78 with FossilReader(db_path) as reader:
79 result = reader.get_daily_commit_activity(days=365)
80
81 counts_by_date = {entry["date"]: entry["count"] for entry in result}
 
 
82
83 # Should have 2 distinct dates with counts 3 and 1
84 assert len(counts_by_date) == 2
85 counts = sorted(counts_by_date.values())
86 assert counts == [1, 3]
87
88 def test_excludes_old_data_outside_window(self, tmp_path):
89 # One checkin 10 days ago, one 400 days ago
90 db_path = tmp_path / "old.fossil"
91 _create_test_fossil_db(db_path, checkin_days_ago=[10, 400])
@@ -146,85 +145,99 @@
145 # With no repos on disk, heatmap should be an empty JSON array
146 heatmap = json.loads(response.context["heatmap_json"])
147 assert isinstance(heatmap, list)
148
149 def test_dashboard_heatmap_aggregates_across_repos(self, admin_client, admin_user, sample_project, tmp_path):
150 """Create two repos with overlapping daily activity and verify aggregation.
151
152 Uses days well in the past (5 and 10) to avoid date-boundary issues
153 caused by Fossil's Julian-day-to-date conversion (date(mtime - 0.5)).
154 """
155 from constance import config
156
 
 
 
157 from organization.models import Organization
158 from projects.models import Project
159
160 # Use the auto-created repo from the signal (Project post_save creates a FossilRepository)
161 repo1 = FossilRepository.objects.get(project=sample_project)
162 repo1.filename = "repo1.fossil"
163 repo1.save(update_fields=["filename", "updated_at", "version"])
164
165 # Need a second project for the second repo (OneToOne constraint)
166 org = Organization.objects.first()
167 project2 = Project.objects.create(name="Second Project", organization=org, visibility="private", created_by=admin_user)
168 repo2 = FossilRepository.objects.get(project=project2)
169 repo2.filename = "repo2.fossil"
170 repo2.save(update_fields=["filename", "updated_at", "version"])
171
172 # Create .fossil files at the paths full_path resolves to (FOSSIL_DATA_DIR/filename)
173 original_dir = config.FOSSIL_DATA_DIR
174 config.FOSSIL_DATA_DIR = str(tmp_path)
175 try:
176 _create_test_fossil_db(tmp_path / "repo1.fossil", checkin_days_ago=[5, 5, 10]) # 2 at day-5, 1 at day-10
177 _create_test_fossil_db(tmp_path / "repo2.fossil", checkin_days_ago=[5, 10, 10]) # 1 at day-5, 2 at day-10
178
179 response = admin_client.get("/dashboard/")
180 finally:
181 config.FOSSIL_DATA_DIR = original_dir
182
183 assert response.status_code == 200
184 heatmap = json.loads(response.context["heatmap_json"])
185 counts_by_date = {entry["date"]: entry["count"] for entry in heatmap}
186
187 # Aggregated: 3 at day-5, 3 at day-10 = 6 total across 2 dates
188 assert len(counts_by_date) == 2
189 assert sum(counts_by_date.values()) == 6
190 # Each date should have exactly 3 commits (2+1 and 1+2)
191 for count in counts_by_date.values():
192 assert count == 3
193
194 def test_dashboard_heatmap_json_is_sorted(self, admin_client, admin_user, sample_project, tmp_path):
195 from constance import config
196
197 # Use the auto-created repo from the signal
198 repo = FossilRepository.objects.get(project=sample_project)
199
200 original_dir = config.FOSSIL_DATA_DIR
201 config.FOSSIL_DATA_DIR = str(tmp_path)
202 try:
203 _create_test_fossil_db(tmp_path / repo.filename, checkin_days_ago=[30, 5, 20, 10])
204 response = admin_client.get("/dashboard/")
205 finally:
206 config.FOSSIL_DATA_DIR = original_dir
207
208 heatmap = json.loads(response.context["heatmap_json"])
209 dates = [entry["date"] for entry in heatmap]
210 assert dates == sorted(dates)
211
212 def test_dashboard_heatmap_skips_missing_repos(self, admin_client, admin_user, sample_project):
213 """Repos where the file doesn't exist on disk should be silently skipped."""
214 # The signal already created a FossilRepository -- just update the filename
215 repo = FossilRepository.objects.get(project=sample_project)
216 repo.filename = "nonexistent.fossil"
217 repo.save(update_fields=["filename", "updated_at", "version"])
218
219 response = admin_client.get("/dashboard/")
220 assert response.status_code == 200
221 heatmap = json.loads(response.context["heatmap_json"])
222 assert heatmap == []
223
224 def test_dashboard_renders_heatmap_container(self, admin_client, admin_user, sample_project, tmp_path):
225 """When heatmap data exists, the template should include the heatmap div."""
226 from constance import config
227
228 # Use the auto-created repo from the signal
229 repo = FossilRepository.objects.get(project=sample_project)
230
231 original_dir = config.FOSSIL_DATA_DIR
232 config.FOSSIL_DATA_DIR = str(tmp_path)
233 try:
234 _create_test_fossil_db(tmp_path / repo.filename, checkin_days_ago=[5, 10, 15])
235 response = admin_client.get("/dashboard/")
236 finally:
237 config.FOSSIL_DATA_DIR = original_dir
238
239 content = response.content.decode()
240 assert 'id="heatmap"' in content
241 assert "Activity (last year)" in content
242 assert "Less" in content
243 assert "More" in content
244

Keyboard Shortcuts

Open search /
Next entry (timeline) j
Previous entry (timeline) k
Open focused entry Enter
Show this help ?
Toggle theme Top nav button