FossilRepo
Clean up merge conflict markers in templates
Commit
254b46777e916db1064d86437315afdfe653ec9a6f89fbcefa8865ef5793b5f1
Parent
7d099f36f18de78…
10 files changed
+13
-2
+76
-25
+1497
+1207
+1059
+1591
+1499
+6
-3
~
core/__pycache__/sanitize.cpython-314.pyc
~
core/sanitize.py
~
templates/fossil/docs_index.html
~
tests/__pycache__/test_webhooks.cpython-314-pytest-9.0.2.pyc
+
tests/test_api_coverage.py
+
tests/test_cli.py
+
tests/test_integrations.py
+
tests/test_tasks_and_accounts.py
+
tests/test_views_coverage.py
~
tests/test_webhooks.py
| --- core/__pycache__/sanitize.cpython-314.pyc | ||
| +++ core/__pycache__/sanitize.cpython-314.pyc | ||
| cannot compute difference between binary files | ||
| 1 | 1 |
| --- core/__pycache__/sanitize.cpython-314.pyc | |
| +++ core/__pycache__/sanitize.cpython-314.pyc | |
| 0 | annot compute difference between binary files |
| 1 |
| --- core/__pycache__/sanitize.cpython-314.pyc | |
| +++ core/__pycache__/sanitize.cpython-314.pyc | |
| 0 | annot compute difference between binary files |
| 1 |
+13
-2
| --- core/sanitize.py | ||
| +++ core/sanitize.py | ||
| @@ -145,15 +145,24 @@ | ||
| 145 | 145 | def __init__(self): |
| 146 | 146 | super().__init__(convert_charrefs=False) |
| 147 | 147 | self.out = StringIO() |
| 148 | 148 | self._skip_depth = 0 # Track depth inside dangerous tags to skip content |
| 149 | 149 | |
| 150 | + # Void elements that are dangerous but never have content/closing tags | |
| 151 | + _DANGEROUS_VOID = frozenset({"base", "meta", "link"}) | |
| 152 | + # Dangerous container tags — skip both the tag and all content inside | |
| 153 | + _DANGEROUS_CONTAINER = frozenset({"script", "style", "iframe", "object", "embed", "form"}) | |
| 154 | + | |
| 150 | 155 | def handle_starttag(self, tag, attrs): |
| 151 | 156 | tag_lower = tag.lower() |
| 157 | + | |
| 158 | + # Dangerous void tags — just drop the tag (no content to skip) | |
| 159 | + if tag_lower in self._DANGEROUS_VOID: | |
| 160 | + return | |
| 152 | 161 | |
| 153 | 162 | # Dangerous content tags — skip tag AND all content inside |
| 154 | - if tag_lower in ("script", "style", "iframe", "object", "embed", "form", "base", "meta", "link"): | |
| 163 | + if tag_lower in self._DANGEROUS_CONTAINER: | |
| 155 | 164 | self._skip_depth += 1 |
| 156 | 165 | return |
| 157 | 166 | |
| 158 | 167 | if self._skip_depth > 0: |
| 159 | 168 | return |
| @@ -187,11 +196,13 @@ | ||
| 187 | 196 | |
| 188 | 197 | self.out.write(f"<{tag}{attr_str}>") |
| 189 | 198 | |
| 190 | 199 | def handle_endtag(self, tag): |
| 191 | 200 | tag_lower = tag.lower() |
| 192 | - if tag_lower in ("script", "style", "iframe", "object", "embed", "form", "base", "meta", "link"): | |
| 201 | + if tag_lower in self._DANGEROUS_VOID: | |
| 202 | + return | |
| 203 | + if tag_lower in self._DANGEROUS_CONTAINER: | |
| 193 | 204 | self._skip_depth = max(0, self._skip_depth - 1) |
| 194 | 205 | return |
| 195 | 206 | if self._skip_depth > 0: |
| 196 | 207 | return |
| 197 | 208 | if tag_lower in ALLOWED_TAGS: |
| 198 | 209 |
| --- core/sanitize.py | |
| +++ core/sanitize.py | |
| @@ -145,15 +145,24 @@ | |
| 145 | def __init__(self): |
| 146 | super().__init__(convert_charrefs=False) |
| 147 | self.out = StringIO() |
| 148 | self._skip_depth = 0 # Track depth inside dangerous tags to skip content |
| 149 | |
| 150 | def handle_starttag(self, tag, attrs): |
| 151 | tag_lower = tag.lower() |
| 152 | |
| 153 | # Dangerous content tags — skip tag AND all content inside |
| 154 | if tag_lower in ("script", "style", "iframe", "object", "embed", "form", "base", "meta", "link"): |
| 155 | self._skip_depth += 1 |
| 156 | return |
| 157 | |
| 158 | if self._skip_depth > 0: |
| 159 | return |
| @@ -187,11 +196,13 @@ | |
| 187 | |
| 188 | self.out.write(f"<{tag}{attr_str}>") |
| 189 | |
| 190 | def handle_endtag(self, tag): |
| 191 | tag_lower = tag.lower() |
| 192 | if tag_lower in ("script", "style", "iframe", "object", "embed", "form", "base", "meta", "link"): |
| 193 | self._skip_depth = max(0, self._skip_depth - 1) |
| 194 | return |
| 195 | if self._skip_depth > 0: |
| 196 | return |
| 197 | if tag_lower in ALLOWED_TAGS: |
| 198 |
| --- core/sanitize.py | |
| +++ core/sanitize.py | |
| @@ -145,15 +145,24 @@ | |
| 145 | def __init__(self): |
| 146 | super().__init__(convert_charrefs=False) |
| 147 | self.out = StringIO() |
| 148 | self._skip_depth = 0 # Track depth inside dangerous tags to skip content |
| 149 | |
| 150 | # Void elements that are dangerous but never have content/closing tags |
| 151 | _DANGEROUS_VOID = frozenset({"base", "meta", "link"}) |
| 152 | # Dangerous container tags — skip both the tag and all content inside |
| 153 | _DANGEROUS_CONTAINER = frozenset({"script", "style", "iframe", "object", "embed", "form"}) |
| 154 | |
| 155 | def handle_starttag(self, tag, attrs): |
| 156 | tag_lower = tag.lower() |
| 157 | |
| 158 | # Dangerous void tags — just drop the tag (no content to skip) |
| 159 | if tag_lower in self._DANGEROUS_VOID: |
| 160 | return |
| 161 | |
| 162 | # Dangerous content tags — skip tag AND all content inside |
| 163 | if tag_lower in self._DANGEROUS_CONTAINER: |
| 164 | self._skip_depth += 1 |
| 165 | return |
| 166 | |
| 167 | if self._skip_depth > 0: |
| 168 | return |
| @@ -187,11 +196,13 @@ | |
| 196 | |
| 197 | self.out.write(f"<{tag}{attr_str}>") |
| 198 | |
| 199 | def handle_endtag(self, tag): |
| 200 | tag_lower = tag.lower() |
| 201 | if tag_lower in self._DANGEROUS_VOID: |
| 202 | return |
| 203 | if tag_lower in self._DANGEROUS_CONTAINER: |
| 204 | self._skip_depth = max(0, self._skip_depth - 1) |
| 205 | return |
| 206 | if self._skip_depth > 0: |
| 207 | return |
| 208 | if tag_lower in ALLOWED_TAGS: |
| 209 |
+76
-25
| --- templates/fossil/docs_index.html | ||
| +++ templates/fossil/docs_index.html | ||
| @@ -2,65 +2,116 @@ | ||
| 2 | 2 | {% block title %}FossilSCM Guide — Fossilrepo{% endblock %} |
| 3 | 3 | |
| 4 | 4 | {% block content %} |
| 5 | 5 | <div class="max-w-4xl"> |
| 6 | 6 | <h1 class="text-2xl font-bold text-gray-100 mb-2">FossilSCM Guide</h1> |
| 7 | - <p class="text-sm text-gray-400 mb-6">Reference documentation for Fossil SCM, bundled with your Fossilrepo installation.</p> | |
| 7 | + <p class="text-sm text-gray-400 mb-4">Reference documentation for Fossil SCM, bundled with your FossilRepo installation.</p> | |
| 8 | + | |
| 9 | + <div class="flex items-center gap-4 text-xs text-gray-500 mb-6"> | |
| 10 | + <span class="flex items-center gap-1.5"><span class="inline-block w-2 h-2 rounded-full bg-green-500"></span> Supported in FossilRepo</span> | |
| 11 | + <span class="flex items-center gap-1.5"><span class="inline-block w-2 h-2 rounded-full bg-yellow-500"></span> Partial support</span> | |
| 12 | + <span class="flex items-center gap-1.5"><span class="inline-block w-2 h-2 rounded-full bg-gray-600"></span> Not yet supported</span> | |
| 13 | + </div> | |
| 8 | 14 | |
| 9 | 15 | <div class="grid grid-cols-1 gap-4 sm:grid-cols-2"> |
| 10 | 16 | |
| 11 | 17 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 12 | 18 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Getting Started</h3> |
| 13 | 19 | <div class="space-y-2"> |
| 14 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/quickstart.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Quick Start Guide</a> | |
| 15 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/build.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Building from Source</a> | |
| 16 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/concepts.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Core Concepts</a> | |
| 17 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/faq.wiki' %}" class="block text-sm text-brand-light hover:text-brand">FAQ</a> | |
| 20 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/quickstart.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 21 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Quick Start Guide</a> | |
| 22 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/build.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 23 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Building from Source</a> | |
| 24 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/concepts.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 25 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Core Concepts</a> | |
| 26 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/faq.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 27 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> FAQ</a> | |
| 18 | 28 | </div> |
| 19 | 29 | </div> |
| 20 | 30 | |
| 21 | 31 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 22 | 32 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Version Control</h3> |
| 23 | 33 | <div class="space-y-2"> |
| 24 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/checkin.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Check-in Overview</a> | |
| 25 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/branching.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Branching</a> | |
| 26 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/merge.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Merging</a> | |
| 27 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/delta_format.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Delta Format</a> | |
| 34 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/checkin.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 35 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Check-in Overview</a> | |
| 36 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/branching.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 37 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Branching</a> | |
| 38 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/merge.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 39 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Merging</a> | |
| 40 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/delta_format.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 41 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Delta Format</a> | |
| 28 | 42 | </div> |
| 29 | 43 | </div> |
| 30 | 44 | |
| 31 | 45 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 32 | 46 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Built-in Features</h3> |
| 33 | 47 | <div class="space-y-2"> |
| 34 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/wikitheory.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Wiki</a> | |
| 35 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/tickets.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Ticket System</a> | |
| 36 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/forum.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Forum</a> | |
| 37 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/alerts.md' %}" class="block text-sm text-brand-light hover:text-brand">Email Alerts</a> | |
| 38 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/chat.md' %}" class="block text-sm text-brand-light hover:text-brand">Chat</a> | |
| 48 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/wikitheory.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 49 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Wiki</a> | |
| 50 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/tickets.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 51 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Ticket System</a> | |
| 52 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/forum.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 53 | + <span class="w-2 h-2 rounded-full bg-yellow-500 flex-shrink-0"></span> Forum | |
| 54 | + <span class="text-xs text-gray-500">(read from Fossil + Django write)</span></a> | |
| 55 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/alerts.md' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 56 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Email Alerts</a> | |
| 57 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/chat.md' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 58 | + <span class="w-2 h-2 rounded-full bg-gray-600 flex-shrink-0"></span> Chat | |
| 59 | + <span class="text-xs text-gray-500">(not yet — multiple approaches under evaluation)</span></a> | |
| 39 | 60 | </div> |
| 40 | 61 | </div> |
| 41 | 62 | |
| 42 | 63 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 43 | 64 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Administration</h3> |
| 44 | 65 | <div class="space-y-2"> |
| 45 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/server/index.md' %}" class="block text-sm text-brand-light hover:text-brand">Server Setup</a> | |
| 46 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/sync.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Sync Protocol</a> | |
| 47 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/backup.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Backups</a> | |
| 48 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/fileformat.wiki' %}" class="block text-sm text-brand-light hover:text-brand">File Format</a> | |
| 66 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/server/index.md' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 67 | + <span class="w-2 h-2 rounded-full bg-yellow-500 flex-shrink-0"></span> Server Setup | |
| 68 | + <span class="text-xs text-gray-500">(FossilRepo replaces native server)</span></a> | |
| 69 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/sync.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 70 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Sync Protocol</a> | |
| 71 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/backup.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 72 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Backups</a> | |
| 73 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/fileformat.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 74 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> File Format</a> | |
| 49 | 75 | </div> |
| 50 | 76 | </div> |
| 51 | 77 | |
| 52 | 78 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5 sm:col-span-2"> |
| 53 | 79 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Reference</h3> |
| 54 | - <div class="grid grid-cols-2 gap-2"> | |
| 55 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/changes.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Changelog</a> | |
| 56 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/permutedindex.html' %}" class="block text-sm text-brand-light hover:text-brand">Command Reference</a> | |
| 57 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/th1.md' %}" class="block text-sm text-brand-light hover:text-brand">TH1 Scripting</a> | |
| 58 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/fossil-v-git.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Fossil vs Git</a> | |
| 59 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/hashpolicy.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Hash Policy</a> | |
| 60 | - <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/embeddeddoc.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Embedded Docs</a> | |
| 80 | + <div class="grid grid-cols-1 sm:grid-cols-2 gap-2"> | |
| 81 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/changes.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 82 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Changelog</a> | |
| 83 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/permutedindex.html' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 84 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Command Reference</a> | |
| 85 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/th1.md' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 86 | + <span class="w-2 h-2 rounded-full bg-gray-600 flex-shrink-0"></span> TH1 Scripting | |
| 87 | + <span class="text-xs text-gray-500">(native Fossil only)</span></a> | |
| 88 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/fossil-v-git.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 89 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Fossil vs Git</a> | |
| 90 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/hashpolicy.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 91 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Hash Policy</a> | |
| 92 | + <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/embeddeddoc.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> | |
| 93 | + <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Embedded Docs</a> | |
| 61 | 94 | </div> |
| 62 | 95 | </div> |
| 63 | 96 | |
| 64 | 97 | </div> |
| 98 | + | |
| 99 | + <!-- FossilRepo-specific additions --> | |
| 100 | + <div class="mt-6 rounded-lg bg-gray-800/50 border border-gray-700 p-5"> | |
| 101 | + <h3 class="text-sm font-semibold text-gray-200 mb-2">FossilRepo Additions</h3> | |
| 102 | + <p class="text-xs text-gray-400 mb-3">Features added by FossilRepo beyond native Fossil:</p> | |
| 103 | + <div class="grid grid-cols-1 sm:grid-cols-2 gap-2 text-sm text-gray-400"> | |
| 104 | + <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Git mirror sync (GitHub/GitLab)</span> | |
| 105 | + <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> MCP server for AI tools</span> | |
| 106 | + <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> JSON API + batch operations</span> | |
| 107 | + <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Agent workspaces + task claiming</span> | |
| 108 | + <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> CI status checks + SVG badges</span> | |
| 109 | + <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Webhooks with HMAC signing</span> | |
| 110 | + <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Org roles + project-level RBAC</span> | |
| 111 | + <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Release management with archives</span> | |
| 112 | + <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> SQLite schema explorer</span> | |
| 113 | + <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Custom ticket fields + SQL reports</span> | |
| 114 | + </div> | |
| 115 | + </div> | |
| 65 | 116 | </div> |
| 66 | 117 | {% endblock %} |
| 67 | 118 |
| --- templates/fossil/docs_index.html | |
| +++ templates/fossil/docs_index.html | |
| @@ -2,65 +2,116 @@ | |
| 2 | {% block title %}FossilSCM Guide — Fossilrepo{% endblock %} |
| 3 | |
| 4 | {% block content %} |
| 5 | <div class="max-w-4xl"> |
| 6 | <h1 class="text-2xl font-bold text-gray-100 mb-2">FossilSCM Guide</h1> |
| 7 | <p class="text-sm text-gray-400 mb-6">Reference documentation for Fossil SCM, bundled with your Fossilrepo installation.</p> |
| 8 | |
| 9 | <div class="grid grid-cols-1 gap-4 sm:grid-cols-2"> |
| 10 | |
| 11 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 12 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Getting Started</h3> |
| 13 | <div class="space-y-2"> |
| 14 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/quickstart.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Quick Start Guide</a> |
| 15 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/build.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Building from Source</a> |
| 16 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/concepts.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Core Concepts</a> |
| 17 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/faq.wiki' %}" class="block text-sm text-brand-light hover:text-brand">FAQ</a> |
| 18 | </div> |
| 19 | </div> |
| 20 | |
| 21 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 22 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Version Control</h3> |
| 23 | <div class="space-y-2"> |
| 24 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/checkin.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Check-in Overview</a> |
| 25 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/branching.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Branching</a> |
| 26 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/merge.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Merging</a> |
| 27 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/delta_format.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Delta Format</a> |
| 28 | </div> |
| 29 | </div> |
| 30 | |
| 31 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 32 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Built-in Features</h3> |
| 33 | <div class="space-y-2"> |
| 34 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/wikitheory.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Wiki</a> |
| 35 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/tickets.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Ticket System</a> |
| 36 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/forum.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Forum</a> |
| 37 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/alerts.md' %}" class="block text-sm text-brand-light hover:text-brand">Email Alerts</a> |
| 38 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/chat.md' %}" class="block text-sm text-brand-light hover:text-brand">Chat</a> |
| 39 | </div> |
| 40 | </div> |
| 41 | |
| 42 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 43 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Administration</h3> |
| 44 | <div class="space-y-2"> |
| 45 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/server/index.md' %}" class="block text-sm text-brand-light hover:text-brand">Server Setup</a> |
| 46 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/sync.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Sync Protocol</a> |
| 47 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/backup.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Backups</a> |
| 48 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/fileformat.wiki' %}" class="block text-sm text-brand-light hover:text-brand">File Format</a> |
| 49 | </div> |
| 50 | </div> |
| 51 | |
| 52 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5 sm:col-span-2"> |
| 53 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Reference</h3> |
| 54 | <div class="grid grid-cols-2 gap-2"> |
| 55 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/changes.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Changelog</a> |
| 56 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/permutedindex.html' %}" class="block text-sm text-brand-light hover:text-brand">Command Reference</a> |
| 57 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/th1.md' %}" class="block text-sm text-brand-light hover:text-brand">TH1 Scripting</a> |
| 58 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/fossil-v-git.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Fossil vs Git</a> |
| 59 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/hashpolicy.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Hash Policy</a> |
| 60 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/embeddeddoc.wiki' %}" class="block text-sm text-brand-light hover:text-brand">Embedded Docs</a> |
| 61 | </div> |
| 62 | </div> |
| 63 | |
| 64 | </div> |
| 65 | </div> |
| 66 | {% endblock %} |
| 67 |
| --- templates/fossil/docs_index.html | |
| +++ templates/fossil/docs_index.html | |
| @@ -2,65 +2,116 @@ | |
| 2 | {% block title %}FossilSCM Guide — Fossilrepo{% endblock %} |
| 3 | |
| 4 | {% block content %} |
| 5 | <div class="max-w-4xl"> |
| 6 | <h1 class="text-2xl font-bold text-gray-100 mb-2">FossilSCM Guide</h1> |
| 7 | <p class="text-sm text-gray-400 mb-4">Reference documentation for Fossil SCM, bundled with your FossilRepo installation.</p> |
| 8 | |
| 9 | <div class="flex items-center gap-4 text-xs text-gray-500 mb-6"> |
| 10 | <span class="flex items-center gap-1.5"><span class="inline-block w-2 h-2 rounded-full bg-green-500"></span> Supported in FossilRepo</span> |
| 11 | <span class="flex items-center gap-1.5"><span class="inline-block w-2 h-2 rounded-full bg-yellow-500"></span> Partial support</span> |
| 12 | <span class="flex items-center gap-1.5"><span class="inline-block w-2 h-2 rounded-full bg-gray-600"></span> Not yet supported</span> |
| 13 | </div> |
| 14 | |
| 15 | <div class="grid grid-cols-1 gap-4 sm:grid-cols-2"> |
| 16 | |
| 17 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 18 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Getting Started</h3> |
| 19 | <div class="space-y-2"> |
| 20 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/quickstart.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 21 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Quick Start Guide</a> |
| 22 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/build.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 23 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Building from Source</a> |
| 24 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/concepts.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 25 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Core Concepts</a> |
| 26 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/faq.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 27 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> FAQ</a> |
| 28 | </div> |
| 29 | </div> |
| 30 | |
| 31 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 32 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Version Control</h3> |
| 33 | <div class="space-y-2"> |
| 34 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/checkin.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 35 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Check-in Overview</a> |
| 36 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/branching.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 37 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Branching</a> |
| 38 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/merge.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 39 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Merging</a> |
| 40 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/delta_format.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 41 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Delta Format</a> |
| 42 | </div> |
| 43 | </div> |
| 44 | |
| 45 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 46 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Built-in Features</h3> |
| 47 | <div class="space-y-2"> |
| 48 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/wikitheory.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 49 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Wiki</a> |
| 50 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/tickets.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 51 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Ticket System</a> |
| 52 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/forum.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 53 | <span class="w-2 h-2 rounded-full bg-yellow-500 flex-shrink-0"></span> Forum |
| 54 | <span class="text-xs text-gray-500">(read from Fossil + Django write)</span></a> |
| 55 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/alerts.md' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 56 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Email Alerts</a> |
| 57 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/chat.md' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 58 | <span class="w-2 h-2 rounded-full bg-gray-600 flex-shrink-0"></span> Chat |
| 59 | <span class="text-xs text-gray-500">(not yet — multiple approaches under evaluation)</span></a> |
| 60 | </div> |
| 61 | </div> |
| 62 | |
| 63 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5"> |
| 64 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Administration</h3> |
| 65 | <div class="space-y-2"> |
| 66 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/server/index.md' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 67 | <span class="w-2 h-2 rounded-full bg-yellow-500 flex-shrink-0"></span> Server Setup |
| 68 | <span class="text-xs text-gray-500">(FossilRepo replaces native server)</span></a> |
| 69 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/sync.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 70 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Sync Protocol</a> |
| 71 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/backup.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 72 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Backups</a> |
| 73 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/fileformat.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 74 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> File Format</a> |
| 75 | </div> |
| 76 | </div> |
| 77 | |
| 78 | <div class="rounded-lg bg-gray-800 border border-gray-700 p-5 sm:col-span-2"> |
| 79 | <h3 class="text-sm font-semibold text-gray-200 mb-3 uppercase tracking-wider">Reference</h3> |
| 80 | <div class="grid grid-cols-1 sm:grid-cols-2 gap-2"> |
| 81 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/changes.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 82 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Changelog</a> |
| 83 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/permutedindex.html' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 84 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Command Reference</a> |
| 85 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/th1.md' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 86 | <span class="w-2 h-2 rounded-full bg-gray-600 flex-shrink-0"></span> TH1 Scripting |
| 87 | <span class="text-xs text-gray-500">(native Fossil only)</span></a> |
| 88 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/fossil-v-git.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 89 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Fossil vs Git</a> |
| 90 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/hashpolicy.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 91 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Hash Policy</a> |
| 92 | <a href="{% url 'fossil:doc_page' slug=fossil_scm_slug doc_path='www/embeddeddoc.wiki' %}" class="flex items-center gap-2 text-sm text-brand-light hover:text-brand"> |
| 93 | <span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Embedded Docs</a> |
| 94 | </div> |
| 95 | </div> |
| 96 | |
| 97 | </div> |
| 98 | |
| 99 | <!-- FossilRepo-specific additions --> |
| 100 | <div class="mt-6 rounded-lg bg-gray-800/50 border border-gray-700 p-5"> |
| 101 | <h3 class="text-sm font-semibold text-gray-200 mb-2">FossilRepo Additions</h3> |
| 102 | <p class="text-xs text-gray-400 mb-3">Features added by FossilRepo beyond native Fossil:</p> |
| 103 | <div class="grid grid-cols-1 sm:grid-cols-2 gap-2 text-sm text-gray-400"> |
| 104 | <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Git mirror sync (GitHub/GitLab)</span> |
| 105 | <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> MCP server for AI tools</span> |
| 106 | <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> JSON API + batch operations</span> |
| 107 | <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Agent workspaces + task claiming</span> |
| 108 | <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> CI status checks + SVG badges</span> |
| 109 | <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Webhooks with HMAC signing</span> |
| 110 | <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Org roles + project-level RBAC</span> |
| 111 | <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Release management with archives</span> |
| 112 | <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> SQLite schema explorer</span> |
| 113 | <span class="flex items-center gap-2"><span class="w-2 h-2 rounded-full bg-green-500 flex-shrink-0"></span> Custom ticket fields + SQL reports</span> |
| 114 | </div> |
| 115 | </div> |
| 116 | </div> |
| 117 | {% endblock %} |
| 118 |
| --- tests/__pycache__/test_webhooks.cpython-314-pytest-9.0.2.pyc | ||
| +++ tests/__pycache__/test_webhooks.cpython-314-pytest-9.0.2.pyc | ||
| cannot compute difference between binary files | ||
| 1 | 1 | |
| 2 | 2 | ADDED tests/test_api_coverage.py |
| 3 | 3 | ADDED tests/test_cli.py |
| 4 | 4 | ADDED tests/test_integrations.py |
| 5 | 5 | ADDED tests/test_tasks_and_accounts.py |
| 6 | 6 | ADDED tests/test_views_coverage.py |
| --- tests/__pycache__/test_webhooks.cpython-314-pytest-9.0.2.pyc | |
| +++ tests/__pycache__/test_webhooks.cpython-314-pytest-9.0.2.pyc | |
| 0 | annot compute difference between binary files |
| 1 | |
| 2 | DDED tests/test_api_coverage.py |
| 3 | DDED tests/test_cli.py |
| 4 | DDED tests/test_integrations.py |
| 5 | DDED tests/test_tasks_and_accounts.py |
| 6 | DDED tests/test_views_coverage.py |
| --- tests/__pycache__/test_webhooks.cpython-314-pytest-9.0.2.pyc | |
| +++ tests/__pycache__/test_webhooks.cpython-314-pytest-9.0.2.pyc | |
| 0 | annot compute difference between binary files |
| 1 | |
| 2 | DDED tests/test_api_coverage.py |
| 3 | DDED tests/test_cli.py |
| 4 | DDED tests/test_integrations.py |
| 5 | DDED tests/test_tasks_and_accounts.py |
| 6 | DDED tests/test_views_coverage.py |
+1497
| --- a/tests/test_api_coverage.py | ||
| +++ b/tests/test_api_coverage.py | ||
| @@ -0,0 +1,1497 @@ | ||
| 1 | +"""Tests covering uncovered code paths in fossil/api_views.py. | |
| 2 | + | |
| 3 | +Targets: batch API, workspace CRUD (list/create/detail/commit/merge/abandon), | |
| 4 | +workspace ownership checks, SSE event stream internals, and _resolve_batch_route. | |
| 5 | +Existing test_agent_coordination.py covers ticket claim/release/submit and review | |
| 6 | +CRUD -- this file does NOT duplicate those. | |
| 7 | +""" | |
| 8 | + | |
| 9 | +import json | |
| 10 | +from unittest.mock import MagicMock, patch | |
| 11 | + | |
| 12 | +import pytest | |
| 13 | +from django.contrib.auth.models import User | |
| 14 | +from django.test import Client, RequestFactory | |
| 15 | + | |
| 16 | +from fossil.agent_claims import TicketClaim | |
| 17 | +from fossil.branch_protection import BranchProtection | |
| 18 | +from fossil.ci import StatusCheck | |
| 19 | +from fossil.code_reviews import CodeReview | |
| 20 | +from fossil.models import FossilRepository | |
| 21 | +from fossil.workspaces import AgentWorkspace | |
| 22 | +from organization.models import Team | |
| 23 | +from projects.models import ProjectTeam | |
| 24 | + | |
| 25 | +# ---- Fixtures ---- | |
| 26 | + | |
| 27 | + | |
| 28 | +@pytest.fixture | |
| 29 | +def fossil_repo_obj(sample_project): | |
| 30 | + """Return the auto-created FossilRepository for sample_project.""" | |
| 31 | + return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True) | |
| 32 | + | |
| 33 | + | |
| 34 | +@pytest.fixture | |
| 35 | +def writer_user(db, admin_user, sample_project): | |
| 36 | + """Non-admin user with write access to the project.""" | |
| 37 | + writer = User.objects.create_user(username="writer_cov", password="testpass123") | |
| 38 | + team = Team.objects.create(name="Cov Writers", organization=sample_project.organization, created_by=admin_user) | |
| 39 | + team.members.add(writer) | |
| 40 | + ProjectTeam.objects.create(project=sample_project, team=team, role="write", created_by=admin_user) | |
| 41 | + return writer | |
| 42 | + | |
| 43 | + | |
| 44 | +@pytest.fixture | |
| 45 | +def writer_client(writer_user): | |
| 46 | + c = Client() | |
| 47 | + c.login(username="writer_cov", password="testpass123") | |
| 48 | + return c | |
| 49 | + | |
| 50 | + | |
| 51 | +@pytest.fixture | |
| 52 | +def reader_user(db, admin_user, sample_project): | |
| 53 | + """User with read-only access to the project.""" | |
| 54 | + reader = User.objects.create_user(username="reader_cov", password="testpass123") | |
| 55 | + team = Team.objects.create(name="Cov Readers", organization=sample_project.organization, created_by=admin_user) | |
| 56 | + team.members.add(reader) | |
| 57 | + ProjectTeam.objects.create(project=sample_project, team=team, role="read", created_by=admin_user) | |
| 58 | + return reader | |
| 59 | + | |
| 60 | + | |
| 61 | +@pytest.fixture | |
| 62 | +def reader_client(reader_user): | |
| 63 | + c = Client() | |
| 64 | + c.login(username="reader_cov", password="testpass123") | |
| 65 | + return c | |
| 66 | + | |
| 67 | + | |
| 68 | +@pytest.fixture | |
| 69 | +def workspace(fossil_repo_obj, admin_user): | |
| 70 | + """An active agent workspace with a checkout path.""" | |
| 71 | + return AgentWorkspace.objects.create( | |
| 72 | + repository=fossil_repo_obj, | |
| 73 | + name="ws-test-1", | |
| 74 | + branch="workspace/ws-test-1", | |
| 75 | + agent_id="claude-test", | |
| 76 | + status="active", | |
| 77 | + checkout_path="/tmp/fake-checkout", | |
| 78 | + created_by=admin_user, | |
| 79 | + ) | |
| 80 | + | |
| 81 | + | |
| 82 | +def _api_url(slug, path): | |
| 83 | + return f"/projects/{slug}/fossil/{path}" | |
| 84 | + | |
| 85 | + | |
| 86 | +# ---- Helper to build a mock subprocess.run result ---- | |
| 87 | + | |
| 88 | + | |
| 89 | +def _make_proc(returncode=0, stdout="", stderr=""): | |
| 90 | + result = MagicMock() | |
| 91 | + result.returncode = returncode | |
| 92 | + result.stdout = stdout | |
| 93 | + result.stderr = stderr | |
| 94 | + return result | |
| 95 | + | |
| 96 | + | |
| 97 | +class _SSEBreakError(Exception): | |
| 98 | + """Raised from mocked time.sleep to break the SSE infinite loop.""" | |
| 99 | + | |
| 100 | + | |
| 101 | +def _drain_sse_one_iteration(response): | |
| 102 | + """Read one iteration of the SSE generator, collecting yielded chunks. | |
| 103 | + | |
| 104 | + The SSE event_stream is an infinite while-True generator with time.sleep(5) | |
| 105 | + at the end of each iteration. We mock time.sleep to raise _SSEBreakError after | |
| 106 | + yielding events from the first poll cycle. | |
| 107 | + """ | |
| 108 | + events = [] | |
| 109 | + with patch("fossil.api_views.time.sleep", side_effect=_SSEBreakError): | |
| 110 | + try: | |
| 111 | + for chunk in response.streaming_content: | |
| 112 | + # StreamingHttpResponse wraps generator output in map() for | |
| 113 | + # encoding; chunks are bytes. | |
| 114 | + if isinstance(chunk, bytes): | |
| 115 | + chunk = chunk.decode("utf-8", errors="replace") | |
| 116 | + events.append(chunk) | |
| 117 | + except (_SSEBreakError, RuntimeError): | |
| 118 | + pass | |
| 119 | + return events | |
| 120 | + | |
| 121 | + | |
| 122 | +def _drain_sse_n_iterations(response, n=3): | |
| 123 | + """Read n iterations of the SSE generator.""" | |
| 124 | + call_count = 0 | |
| 125 | + | |
| 126 | + def _count_and_break(_seconds): | |
| 127 | + nonlocal call_count | |
| 128 | + call_count += 1 | |
| 129 | + if call_count >= n: | |
| 130 | + raise _SSEBreakError | |
| 131 | + | |
| 132 | + events = [] | |
| 133 | + with patch("fossil.api_views.time.sleep", side_effect=_count_and_break): | |
| 134 | + try: | |
| 135 | + for chunk in response.streaming_content: | |
| 136 | + if isinstance(chunk, bytes): | |
| 137 | + chunk = chunk.decode("utf-8", errors="replace") | |
| 138 | + events.append(chunk) | |
| 139 | + except (_SSEBreakError, RuntimeError): | |
| 140 | + pass | |
| 141 | + return events | |
| 142 | + | |
| 143 | + | |
| 144 | +# ================================================================ | |
| 145 | +# Batch API | |
| 146 | +# ================================================================ | |
| 147 | + | |
| 148 | + | |
| 149 | +@pytest.mark.django_db | |
| 150 | +class TestBatchAPI: | |
| 151 | + """Tests for POST /projects/<slug>/fossil/api/batch (lines 636-706).""" | |
| 152 | + | |
| 153 | + def test_batch_success_with_multiple_sub_requests(self, admin_client, sample_project, fossil_repo_obj): | |
| 154 | + """Batch call dispatches multiple GET sub-requests and returns combined results.""" | |
| 155 | + with patch("fossil.api_views.FossilReader") as mock_reader_cls: | |
| 156 | + reader = mock_reader_cls.return_value | |
| 157 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 158 | + reader.__exit__ = MagicMock(return_value=False) | |
| 159 | + reader.get_timeline.return_value = [] | |
| 160 | + reader.get_checkin_count.return_value = 0 | |
| 161 | + reader.get_tickets.return_value = [] | |
| 162 | + | |
| 163 | + response = admin_client.post( | |
| 164 | + _api_url(sample_project.slug, "api/batch"), | |
| 165 | + data=json.dumps( | |
| 166 | + { | |
| 167 | + "requests": [ | |
| 168 | + {"method": "GET", "path": "/api/timeline"}, | |
| 169 | + {"method": "GET", "path": "/api/tickets"}, | |
| 170 | + ] | |
| 171 | + } | |
| 172 | + ), | |
| 173 | + content_type="application/json", | |
| 174 | + ) | |
| 175 | + | |
| 176 | + assert response.status_code == 200 | |
| 177 | + data = response.json() | |
| 178 | + assert len(data["responses"]) == 2 | |
| 179 | + assert data["responses"][0]["status"] == 200 | |
| 180 | + assert "checkins" in data["responses"][0]["body"] | |
| 181 | + assert data["responses"][1]["status"] == 200 | |
| 182 | + assert "tickets" in data["responses"][1]["body"] | |
| 183 | + | |
| 184 | + def test_batch_wrong_method(self, admin_client, sample_project, fossil_repo_obj): | |
| 185 | + """GET to batch endpoint returns 405.""" | |
| 186 | + response = admin_client.get(_api_url(sample_project.slug, "api/batch")) | |
| 187 | + assert response.status_code == 405 | |
| 188 | + | |
| 189 | + def test_batch_invalid_json(self, admin_client, sample_project, fossil_repo_obj): | |
| 190 | + """Non-JSON body returns 400.""" | |
| 191 | + response = admin_client.post( | |
| 192 | + _api_url(sample_project.slug, "api/batch"), | |
| 193 | + data="not json", | |
| 194 | + content_type="application/json", | |
| 195 | + ) | |
| 196 | + assert response.status_code == 400 | |
| 197 | + assert "Invalid JSON" in response.json()["error"] | |
| 198 | + | |
| 199 | + def test_batch_requests_not_list(self, admin_client, sample_project, fossil_repo_obj): | |
| 200 | + """'requests' must be a list.""" | |
| 201 | + response = admin_client.post( | |
| 202 | + _api_url(sample_project.slug, "api/batch"), | |
| 203 | + data=json.dumps({"requests": "not-a-list"}), | |
| 204 | + content_type="application/json", | |
| 205 | + ) | |
| 206 | + assert response.status_code == 400 | |
| 207 | + assert "'requests' must be a list" in response.json()["error"] | |
| 208 | + | |
| 209 | + def test_batch_exceeds_max_requests(self, admin_client, sample_project, fossil_repo_obj): | |
| 210 | + """More than 25 sub-requests returns 400.""" | |
| 211 | + response = admin_client.post( | |
| 212 | + _api_url(sample_project.slug, "api/batch"), | |
| 213 | + data=json.dumps({"requests": [{"method": "GET", "path": "/api/project"}] * 26}), | |
| 214 | + content_type="application/json", | |
| 215 | + ) | |
| 216 | + assert response.status_code == 400 | |
| 217 | + assert "Maximum 25" in response.json()["error"] | |
| 218 | + | |
| 219 | + def test_batch_empty_requests(self, admin_client, sample_project, fossil_repo_obj): | |
| 220 | + """Empty requests list returns empty responses.""" | |
| 221 | + response = admin_client.post( | |
| 222 | + _api_url(sample_project.slug, "api/batch"), | |
| 223 | + data=json.dumps({"requests": []}), | |
| 224 | + content_type="application/json", | |
| 225 | + ) | |
| 226 | + assert response.status_code == 200 | |
| 227 | + assert response.json()["responses"] == [] | |
| 228 | + | |
| 229 | + def test_batch_non_get_rejected(self, admin_client, sample_project, fossil_repo_obj): | |
| 230 | + """Non-GET sub-requests are rejected with 405.""" | |
| 231 | + response = admin_client.post( | |
| 232 | + _api_url(sample_project.slug, "api/batch"), | |
| 233 | + data=json.dumps({"requests": [{"method": "POST", "path": "/api/project"}]}), | |
| 234 | + content_type="application/json", | |
| 235 | + ) | |
| 236 | + assert response.status_code == 200 | |
| 237 | + sub = response.json()["responses"][0] | |
| 238 | + assert sub["status"] == 405 | |
| 239 | + assert "Only GET" in sub["body"]["error"] | |
| 240 | + | |
| 241 | + def test_batch_unknown_path(self, admin_client, sample_project, fossil_repo_obj): | |
| 242 | + """Unknown API path in batch returns 404 sub-response.""" | |
| 243 | + response = admin_client.post( | |
| 244 | + _api_url(sample_project.slug, "api/batch"), | |
| 245 | + data=json.dumps({"requests": [{"method": "GET", "path": "/api/nonexistent"}]}), | |
| 246 | + content_type="application/json", | |
| 247 | + ) | |
| 248 | + assert response.status_code == 200 | |
| 249 | + sub = response.json()["responses"][0] | |
| 250 | + assert sub["status"] == 404 | |
| 251 | + assert "Unknown API path" in sub["body"]["error"] | |
| 252 | + | |
| 253 | + def test_batch_missing_path(self, admin_client, sample_project, fossil_repo_obj): | |
| 254 | + """Sub-request without 'path' returns 400 sub-response.""" | |
| 255 | + response = admin_client.post( | |
| 256 | + _api_url(sample_project.slug, "api/batch"), | |
| 257 | + data=json.dumps({"requests": [{"method": "GET"}]}), | |
| 258 | + content_type="application/json", | |
| 259 | + ) | |
| 260 | + assert response.status_code == 200 | |
| 261 | + sub = response.json()["responses"][0] | |
| 262 | + assert sub["status"] == 400 | |
| 263 | + assert "Missing 'path'" in sub["body"]["error"] | |
| 264 | + | |
| 265 | + def test_batch_non_dict_sub_request(self, admin_client, sample_project, fossil_repo_obj): | |
| 266 | + """Non-dict items in requests list return 400 sub-response.""" | |
| 267 | + response = admin_client.post( | |
| 268 | + _api_url(sample_project.slug, "api/batch"), | |
| 269 | + data=json.dumps({"requests": ["not-a-dict"]}), | |
| 270 | + content_type="application/json", | |
| 271 | + ) | |
| 272 | + assert response.status_code == 200 | |
| 273 | + sub = response.json()["responses"][0] | |
| 274 | + assert sub["status"] == 400 | |
| 275 | + assert "must be an object" in sub["body"]["error"] | |
| 276 | + | |
| 277 | + def test_batch_dynamic_route_ticket_detail(self, admin_client, sample_project, fossil_repo_obj): | |
| 278 | + """Batch can route to dynamic ticket detail path.""" | |
| 279 | + with patch("fossil.api_views.FossilReader") as mock_reader_cls: | |
| 280 | + reader = mock_reader_cls.return_value | |
| 281 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 282 | + reader.__exit__ = MagicMock(return_value=False) | |
| 283 | + ticket = MagicMock() | |
| 284 | + ticket.uuid = "abc123" | |
| 285 | + ticket.title = "Test" | |
| 286 | + ticket.status = "Open" | |
| 287 | + ticket.type = "Bug" | |
| 288 | + ticket.subsystem = "" | |
| 289 | + ticket.priority = "" | |
| 290 | + ticket.severity = "" | |
| 291 | + ticket.resolution = "" | |
| 292 | + ticket.body = "" | |
| 293 | + ticket.created = None | |
| 294 | + reader.get_ticket_detail.return_value = ticket | |
| 295 | + reader.get_ticket_comments.return_value = [] | |
| 296 | + | |
| 297 | + response = admin_client.post( | |
| 298 | + _api_url(sample_project.slug, "api/batch"), | |
| 299 | + data=json.dumps({"requests": [{"method": "GET", "path": "/api/tickets/abc123"}]}), | |
| 300 | + content_type="application/json", | |
| 301 | + ) | |
| 302 | + | |
| 303 | + assert response.status_code == 200 | |
| 304 | + sub = response.json()["responses"][0] | |
| 305 | + assert sub["status"] == 200 | |
| 306 | + assert sub["body"]["uuid"] == "abc123" | |
| 307 | + | |
| 308 | + def test_batch_dynamic_route_wiki_page(self, admin_client, sample_project, fossil_repo_obj): | |
| 309 | + """Batch can route to dynamic wiki page path.""" | |
| 310 | + with patch("fossil.api_views.FossilReader") as mock_reader_cls: | |
| 311 | + reader = mock_reader_cls.return_value | |
| 312 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 313 | + reader.__exit__ = MagicMock(return_value=False) | |
| 314 | + page = MagicMock() | |
| 315 | + page.name = "Home" | |
| 316 | + page.content = "# Home" | |
| 317 | + page.last_modified = None | |
| 318 | + page.user = "admin" | |
| 319 | + reader.get_wiki_page.return_value = page | |
| 320 | + | |
| 321 | + with patch("fossil.views._render_fossil_content", return_value="<h1>Home</h1>"): | |
| 322 | + response = admin_client.post( | |
| 323 | + _api_url(sample_project.slug, "api/batch"), | |
| 324 | + data=json.dumps({"requests": [{"method": "GET", "path": "/api/wiki/Home"}]}), | |
| 325 | + content_type="application/json", | |
| 326 | + ) | |
| 327 | + | |
| 328 | + assert response.status_code == 200 | |
| 329 | + sub = response.json()["responses"][0] | |
| 330 | + assert sub["status"] == 200 | |
| 331 | + assert sub["body"]["name"] == "Home" | |
| 332 | + | |
| 333 | + def test_batch_denied_for_anon(self, client, sample_project, fossil_repo_obj): | |
| 334 | + """Anonymous users cannot use the batch API.""" | |
| 335 | + response = client.post( | |
| 336 | + _api_url(sample_project.slug, "api/batch"), | |
| 337 | + data=json.dumps({"requests": []}), | |
| 338 | + content_type="application/json", | |
| 339 | + ) | |
| 340 | + assert response.status_code == 401 | |
| 341 | + | |
| 342 | + def test_batch_sub_request_exception_returns_500(self, admin_client, sample_project, fossil_repo_obj): | |
| 343 | + """When a sub-request raises an exception, we get a 500 sub-response.""" | |
| 344 | + with patch("fossil.api_views.FossilReader") as mock_reader_cls: | |
| 345 | + mock_reader_cls.side_effect = RuntimeError("boom") | |
| 346 | + | |
| 347 | + response = admin_client.post( | |
| 348 | + _api_url(sample_project.slug, "api/batch"), | |
| 349 | + data=json.dumps({"requests": [{"method": "GET", "path": "/api/timeline"}]}), | |
| 350 | + content_type="application/json", | |
| 351 | + ) | |
| 352 | + | |
| 353 | + assert response.status_code == 200 | |
| 354 | + sub = response.json()["responses"][0] | |
| 355 | + assert sub["status"] == 500 | |
| 356 | + assert "Internal error" in sub["body"]["error"] | |
| 357 | + | |
| 358 | + | |
| 359 | +# ================================================================ | |
| 360 | +# Workspace List | |
| 361 | +# ================================================================ | |
| 362 | + | |
| 363 | + | |
| 364 | +@pytest.mark.django_db | |
| 365 | +class TestWorkspaceList: | |
| 366 | + """Tests for GET /projects/<slug>/fossil/api/workspaces (lines 749-786).""" | |
| 367 | + | |
| 368 | + def test_list_workspaces_empty(self, admin_client, sample_project, fossil_repo_obj): | |
| 369 | + """Empty workspace list returns zero results.""" | |
| 370 | + response = admin_client.get(_api_url(sample_project.slug, "api/workspaces")) | |
| 371 | + assert response.status_code == 200 | |
| 372 | + data = response.json() | |
| 373 | + assert data["workspaces"] == [] | |
| 374 | + | |
| 375 | + def test_list_workspaces_returns_all(self, admin_client, sample_project, fossil_repo_obj, admin_user): | |
| 376 | + """Lists all workspaces for the repo.""" | |
| 377 | + AgentWorkspace.objects.create( | |
| 378 | + repository=fossil_repo_obj, name="ws-1", branch="workspace/ws-1", agent_id="a1", created_by=admin_user | |
| 379 | + ) | |
| 380 | + AgentWorkspace.objects.create( | |
| 381 | + repository=fossil_repo_obj, name="ws-2", branch="workspace/ws-2", agent_id="a2", created_by=admin_user | |
| 382 | + ) | |
| 383 | + | |
| 384 | + response = admin_client.get(_api_url(sample_project.slug, "api/workspaces")) | |
| 385 | + assert response.status_code == 200 | |
| 386 | + data = response.json() | |
| 387 | + assert len(data["workspaces"]) == 2 | |
| 388 | + names = {ws["name"] for ws in data["workspaces"]} | |
| 389 | + assert names == {"ws-1", "ws-2"} | |
| 390 | + | |
| 391 | + def test_list_workspaces_filter_by_status(self, admin_client, sample_project, fossil_repo_obj, admin_user): | |
| 392 | + """Status filter returns only matching workspaces.""" | |
| 393 | + AgentWorkspace.objects.create(repository=fossil_repo_obj, name="ws-active", branch="b/a", status="active", created_by=admin_user) | |
| 394 | + AgentWorkspace.objects.create(repository=fossil_repo_obj, name="ws-merged", branch="b/m", status="merged", created_by=admin_user) | |
| 395 | + | |
| 396 | + response = admin_client.get(_api_url(sample_project.slug, "api/workspaces") + "?status=active") | |
| 397 | + assert response.status_code == 200 | |
| 398 | + data = response.json() | |
| 399 | + assert len(data["workspaces"]) == 1 | |
| 400 | + assert data["workspaces"][0]["name"] == "ws-active" | |
| 401 | + | |
| 402 | + def test_list_workspaces_wrong_method(self, admin_client, sample_project, fossil_repo_obj): | |
| 403 | + """POST to workspace list returns 405.""" | |
| 404 | + response = admin_client.post( | |
| 405 | + _api_url(sample_project.slug, "api/workspaces"), | |
| 406 | + content_type="application/json", | |
| 407 | + ) | |
| 408 | + assert response.status_code == 405 | |
| 409 | + | |
| 410 | + def test_list_workspaces_denied_for_anon(self, client, sample_project, fossil_repo_obj): | |
| 411 | + """Anonymous users cannot list workspaces.""" | |
| 412 | + response = client.get(_api_url(sample_project.slug, "api/workspaces")) | |
| 413 | + assert response.status_code == 401 | |
| 414 | + | |
| 415 | + def test_list_workspaces_response_shape(self, admin_client, sample_project, fossil_repo_obj, admin_user): | |
| 416 | + """Verify the response includes all expected fields.""" | |
| 417 | + AgentWorkspace.objects.create( | |
| 418 | + repository=fossil_repo_obj, | |
| 419 | + name="ws-shape", | |
| 420 | + branch="workspace/ws-shape", | |
| 421 | + agent_id="claude-shape", | |
| 422 | + description="test workspace", | |
| 423 | + files_changed=3, | |
| 424 | + commits_made=2, | |
| 425 | + created_by=admin_user, | |
| 426 | + ) | |
| 427 | + response = admin_client.get(_api_url(sample_project.slug, "api/workspaces")) | |
| 428 | + ws = response.json()["workspaces"][0] | |
| 429 | + assert ws["name"] == "ws-shape" | |
| 430 | + assert ws["branch"] == "workspace/ws-shape" | |
| 431 | + assert ws["status"] == "active" | |
| 432 | + assert ws["agent_id"] == "claude-shape" | |
| 433 | + assert ws["description"] == "test workspace" | |
| 434 | + assert ws["files_changed"] == 3 | |
| 435 | + assert ws["commits_made"] == 2 | |
| 436 | + assert ws["created_at"] is not None | |
| 437 | + | |
| 438 | + | |
| 439 | +# ================================================================ | |
| 440 | +# Workspace Detail | |
| 441 | +# ================================================================ | |
| 442 | + | |
| 443 | + | |
| 444 | +@pytest.mark.django_db | |
| 445 | +class TestWorkspaceDetail: | |
| 446 | + """Tests for GET /projects/<slug>/fossil/api/workspaces/<name> (lines 904-934).""" | |
| 447 | + | |
| 448 | + def test_get_workspace_detail(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 449 | + """Workspace detail returns all fields.""" | |
| 450 | + response = admin_client.get(_api_url(sample_project.slug, "api/workspaces/ws-test-1")) | |
| 451 | + assert response.status_code == 200 | |
| 452 | + data = response.json() | |
| 453 | + assert data["name"] == "ws-test-1" | |
| 454 | + assert data["branch"] == "workspace/ws-test-1" | |
| 455 | + assert data["agent_id"] == "claude-test" | |
| 456 | + assert data["status"] == "active" | |
| 457 | + assert data["updated_at"] is not None | |
| 458 | + | |
| 459 | + def test_get_workspace_not_found(self, admin_client, sample_project, fossil_repo_obj): | |
| 460 | + """Non-existent workspace returns 404.""" | |
| 461 | + response = admin_client.get(_api_url(sample_project.slug, "api/workspaces/nonexistent")) | |
| 462 | + assert response.status_code == 404 | |
| 463 | + assert "not found" in response.json()["error"].lower() | |
| 464 | + | |
| 465 | + def test_get_workspace_wrong_method(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 466 | + """POST to workspace detail returns 405.""" | |
| 467 | + response = admin_client.post( | |
| 468 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1"), | |
| 469 | + content_type="application/json", | |
| 470 | + ) | |
| 471 | + assert response.status_code == 405 | |
| 472 | + | |
| 473 | + def test_get_workspace_denied_for_anon(self, client, sample_project, fossil_repo_obj, workspace): | |
| 474 | + """Anonymous users cannot view workspace details.""" | |
| 475 | + response = client.get(_api_url(sample_project.slug, "api/workspaces/ws-test-1")) | |
| 476 | + assert response.status_code == 401 | |
| 477 | + | |
| 478 | + | |
| 479 | +# ================================================================ | |
| 480 | +# Workspace Create | |
| 481 | +# ================================================================ | |
| 482 | + | |
| 483 | + | |
| 484 | +@pytest.mark.django_db | |
| 485 | +class TestWorkspaceCreate: | |
| 486 | + """Tests for POST /projects/<slug>/fossil/api/workspaces/create (lines 789-901).""" | |
| 487 | + | |
| 488 | + def test_create_workspace_success(self, admin_client, sample_project, fossil_repo_obj): | |
| 489 | + """Creating a workspace opens a Fossil checkout and creates DB record.""" | |
| 490 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 491 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 492 | + mock_cli_cls.return_value._env = {} | |
| 493 | + # All three subprocess calls succeed: open, branch new, update | |
| 494 | + mock_run.return_value = _make_proc(stdout="checkout opened") | |
| 495 | + | |
| 496 | + response = admin_client.post( | |
| 497 | + _api_url(sample_project.slug, "api/workspaces/create"), | |
| 498 | + data=json.dumps({"name": "agent-fix-99", "description": "Fix bug #99", "agent_id": "claude-99"}), | |
| 499 | + content_type="application/json", | |
| 500 | + ) | |
| 501 | + | |
| 502 | + assert response.status_code == 201 | |
| 503 | + data = response.json() | |
| 504 | + assert data["name"] == "agent-fix-99" | |
| 505 | + assert data["branch"] == "workspace/agent-fix-99" | |
| 506 | + assert data["status"] == "active" | |
| 507 | + assert data["agent_id"] == "claude-99" | |
| 508 | + | |
| 509 | + # Verify DB state | |
| 510 | + ws = AgentWorkspace.objects.get(repository=fossil_repo_obj, name="agent-fix-99") | |
| 511 | + assert ws.branch == "workspace/agent-fix-99" | |
| 512 | + assert ws.description == "Fix bug #99" | |
| 513 | + | |
| 514 | + def test_create_workspace_missing_name(self, admin_client, sample_project, fossil_repo_obj): | |
| 515 | + """Workspace name is required.""" | |
| 516 | + response = admin_client.post( | |
| 517 | + _api_url(sample_project.slug, "api/workspaces/create"), | |
| 518 | + data=json.dumps({"description": "no name"}), | |
| 519 | + content_type="application/json", | |
| 520 | + ) | |
| 521 | + assert response.status_code == 400 | |
| 522 | + assert "name" in response.json()["error"].lower() | |
| 523 | + | |
| 524 | + def test_create_workspace_invalid_name(self, admin_client, sample_project, fossil_repo_obj): | |
| 525 | + """Invalid workspace name (special chars) returns 400.""" | |
| 526 | + response = admin_client.post( | |
| 527 | + _api_url(sample_project.slug, "api/workspaces/create"), | |
| 528 | + data=json.dumps({"name": "../../etc/passwd"}), | |
| 529 | + content_type="application/json", | |
| 530 | + ) | |
| 531 | + assert response.status_code == 400 | |
| 532 | + assert "Invalid workspace name" in response.json()["error"] | |
| 533 | + | |
| 534 | + def test_create_workspace_name_starts_with_dot(self, admin_client, sample_project, fossil_repo_obj): | |
| 535 | + """Workspace name starting with a dot is rejected by the regex.""" | |
| 536 | + response = admin_client.post( | |
| 537 | + _api_url(sample_project.slug, "api/workspaces/create"), | |
| 538 | + data=json.dumps({"name": ".hidden"}), | |
| 539 | + content_type="application/json", | |
| 540 | + ) | |
| 541 | + assert response.status_code == 400 | |
| 542 | + | |
| 543 | + def test_create_workspace_duplicate_name(self, admin_client, sample_project, fossil_repo_obj, admin_user): | |
| 544 | + """Duplicate workspace name returns 409.""" | |
| 545 | + AgentWorkspace.objects.create(repository=fossil_repo_obj, name="dup-ws", branch="workspace/dup-ws", created_by=admin_user) | |
| 546 | + | |
| 547 | + response = admin_client.post( | |
| 548 | + _api_url(sample_project.slug, "api/workspaces/create"), | |
| 549 | + data=json.dumps({"name": "dup-ws"}), | |
| 550 | + content_type="application/json", | |
| 551 | + ) | |
| 552 | + assert response.status_code == 409 | |
| 553 | + assert "already exists" in response.json()["error"] | |
| 554 | + | |
| 555 | + def test_create_workspace_invalid_json(self, admin_client, sample_project, fossil_repo_obj): | |
| 556 | + """Invalid JSON body returns 400.""" | |
| 557 | + response = admin_client.post( | |
| 558 | + _api_url(sample_project.slug, "api/workspaces/create"), | |
| 559 | + data="not json", | |
| 560 | + content_type="application/json", | |
| 561 | + ) | |
| 562 | + assert response.status_code == 400 | |
| 563 | + assert "Invalid JSON" in response.json()["error"] | |
| 564 | + | |
| 565 | + def test_create_workspace_fossil_open_fails(self, admin_client, sample_project, fossil_repo_obj): | |
| 566 | + """When fossil open fails, return 500 and clean up.""" | |
| 567 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): | |
| 568 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 569 | + mock_cli_cls.return_value._env = {} | |
| 570 | + mock_run.return_value = _make_proc(returncode=1, stderr="open failed") | |
| 571 | + | |
| 572 | + response = admin_client.post( | |
| 573 | + _api_url(sample_project.slug, "api/workspaces/create"), | |
| 574 | + data=json.dumps({"name": "fail-open"}), | |
| 575 | + content_type="application/json", | |
| 576 | + ) | |
| 577 | + | |
| 578 | + assert response.status_code == 500 | |
| 579 | + assert "Failed to open" in response.json()["error"] | |
| 580 | + | |
| 581 | + def test_create_workspace_branch_creation_fails(self, admin_client, sample_project, fossil_repo_obj): | |
| 582 | + """When branch creation fails, return 500 and clean up checkout.""" | |
| 583 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): | |
| 584 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 585 | + mock_cli_cls.return_value._env = {} | |
| 586 | + # First call (open) succeeds, second (branch new) fails | |
| 587 | + mock_run.side_effect = [ | |
| 588 | + _make_proc(returncode=0), # open | |
| 589 | + _make_proc(returncode=1, stderr="branch error"), # branch new | |
| 590 | + _make_proc(returncode=0), # close --force (cleanup) | |
| 591 | + ] | |
| 592 | + | |
| 593 | + response = admin_client.post( | |
| 594 | + _api_url(sample_project.slug, "api/workspaces/create"), | |
| 595 | + data=json.dumps({"name": "fail-branch"}), | |
| 596 | + content_type="application/json", | |
| 597 | + ) | |
| 598 | + | |
| 599 | + assert response.status_code == 500 | |
| 600 | + assert "Failed to create branch" in response.json()["error"] | |
| 601 | + | |
| 602 | + def test_create_workspace_update_fails(self, admin_client, sample_project, fossil_repo_obj): | |
| 603 | + """When switching to the new branch fails, return 500 and clean up.""" | |
| 604 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): | |
| 605 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 606 | + mock_cli_cls.return_value._env = {} | |
| 607 | + mock_run.side_effect = [ | |
| 608 | + _make_proc(returncode=0), # open | |
| 609 | + _make_proc(returncode=0), # branch new | |
| 610 | + _make_proc(returncode=1, stderr="update failed"), # update branch | |
| 611 | + _make_proc(returncode=0), # close --force (cleanup) | |
| 612 | + ] | |
| 613 | + | |
| 614 | + response = admin_client.post( | |
| 615 | + _api_url(sample_project.slug, "api/workspaces/create"), | |
| 616 | + data=json.dumps({"name": "fail-update"}), | |
| 617 | + content_type="application/json", | |
| 618 | + ) | |
| 619 | + | |
| 620 | + assert response.status_code == 500 | |
| 621 | + assert "Failed to switch to branch" in response.json()["error"] | |
| 622 | + | |
| 623 | + def test_create_workspace_wrong_method(self, admin_client, sample_project, fossil_repo_obj): | |
| 624 | + """GET to create endpoint returns 405.""" | |
| 625 | + response = admin_client.get(_api_url(sample_project.slug, "api/workspaces/create")) | |
| 626 | + assert response.status_code == 405 | |
| 627 | + | |
| 628 | + def test_create_workspace_denied_for_reader(self, reader_client, sample_project, fossil_repo_obj): | |
| 629 | + """Read-only users cannot create workspaces.""" | |
| 630 | + response = reader_client.post( | |
| 631 | + _api_url(sample_project.slug, "api/workspaces/create"), | |
| 632 | + data=json.dumps({"name": "denied-ws"}), | |
| 633 | + content_type="application/json", | |
| 634 | + ) | |
| 635 | + assert response.status_code == 403 | |
| 636 | + | |
| 637 | + def test_create_workspace_denied_for_anon(self, client, sample_project, fossil_repo_obj): | |
| 638 | + """Anonymous users cannot create workspaces.""" | |
| 639 | + response = client.post( | |
| 640 | + _api_url(sample_project.slug, "api/workspaces/create"), | |
| 641 | + data=json.dumps({"name": "anon-ws"}), | |
| 642 | + content_type="application/json", | |
| 643 | + ) | |
| 644 | + assert response.status_code == 401 | |
| 645 | + | |
| 646 | + | |
| 647 | +# ================================================================ | |
| 648 | +# Workspace Commit | |
| 649 | +# ================================================================ | |
| 650 | + | |
| 651 | + | |
| 652 | +@pytest.mark.django_db | |
| 653 | +class TestWorkspaceCommit: | |
| 654 | + """Tests for POST /projects/<slug>/fossil/api/workspaces/<name>/commit (lines 937-1034).""" | |
| 655 | + | |
| 656 | + def test_commit_success(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 657 | + """Successful commit increments commits_made and returns output.""" | |
| 658 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 659 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 660 | + mock_cli_cls.return_value._env = {} | |
| 661 | + # addremove then commit | |
| 662 | + mock_run.side_effect = [ | |
| 663 | + _make_proc(returncode=0), # addremove | |
| 664 | + _make_proc(returncode=0, stdout="New_Version: abc123"), # commit | |
| 665 | + ] | |
| 666 | + | |
| 667 | + response = admin_client.post( | |
| 668 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), | |
| 669 | + data=json.dumps({"message": "Fix bug", "agent_id": "claude-test"}), | |
| 670 | + content_type="application/json", | |
| 671 | + ) | |
| 672 | + | |
| 673 | + assert response.status_code == 200 | |
| 674 | + data = response.json() | |
| 675 | + assert data["message"] == "Fix bug" | |
| 676 | + assert data["commits_made"] == 1 | |
| 677 | + | |
| 678 | + workspace.refresh_from_db() | |
| 679 | + assert workspace.commits_made == 1 | |
| 680 | + | |
| 681 | + def test_commit_with_specific_files(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 682 | + """Committing specific files adds them individually.""" | |
| 683 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 684 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 685 | + mock_cli_cls.return_value._env = {} | |
| 686 | + mock_run.side_effect = [ | |
| 687 | + _make_proc(returncode=0), # add file1 | |
| 688 | + _make_proc(returncode=0), # add file2 | |
| 689 | + _make_proc(returncode=0, stdout="New_Version: def456"), # commit | |
| 690 | + ] | |
| 691 | + | |
| 692 | + response = admin_client.post( | |
| 693 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), | |
| 694 | + data=json.dumps({"message": "Add files", "files": ["a.py", "b.py"], "agent_id": "claude-test"}), | |
| 695 | + content_type="application/json", | |
| 696 | + ) | |
| 697 | + | |
| 698 | + assert response.status_code == 200 | |
| 699 | + | |
| 700 | + def test_commit_nothing_to_commit(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 701 | + """When fossil says nothing changed, return 409.""" | |
| 702 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 703 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 704 | + mock_cli_cls.return_value._env = {} | |
| 705 | + mock_run.side_effect = [ | |
| 706 | + _make_proc(returncode=0), # addremove | |
| 707 | + _make_proc(returncode=1, stderr="nothing has changed"), # commit | |
| 708 | + ] | |
| 709 | + | |
| 710 | + response = admin_client.post( | |
| 711 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), | |
| 712 | + data=json.dumps({"message": "no change", "agent_id": "claude-test"}), | |
| 713 | + content_type="application/json", | |
| 714 | + ) | |
| 715 | + | |
| 716 | + assert response.status_code == 409 | |
| 717 | + assert "Nothing to commit" in response.json()["error"] | |
| 718 | + | |
| 719 | + def test_commit_fossil_error(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 720 | + """When fossil commit fails (not nothing-changed), return 500.""" | |
| 721 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 722 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 723 | + mock_cli_cls.return_value._env = {} | |
| 724 | + mock_run.side_effect = [ | |
| 725 | + _make_proc(returncode=0), # addremove | |
| 726 | + _make_proc(returncode=1, stderr="lock failed"), # commit | |
| 727 | + ] | |
| 728 | + | |
| 729 | + response = admin_client.post( | |
| 730 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), | |
| 731 | + data=json.dumps({"message": "fail commit", "agent_id": "claude-test"}), | |
| 732 | + content_type="application/json", | |
| 733 | + ) | |
| 734 | + | |
| 735 | + assert response.status_code == 500 | |
| 736 | + assert "Commit failed" in response.json()["error"] | |
| 737 | + | |
| 738 | + def test_commit_missing_message(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 739 | + """Commit without message returns 400.""" | |
| 740 | + response = admin_client.post( | |
| 741 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), | |
| 742 | + data=json.dumps({"agent_id": "claude-test"}), | |
| 743 | + content_type="application/json", | |
| 744 | + ) | |
| 745 | + assert response.status_code == 400 | |
| 746 | + assert "message" in response.json()["error"].lower() | |
| 747 | + | |
| 748 | + def test_commit_workspace_not_found(self, admin_client, sample_project, fossil_repo_obj): | |
| 749 | + """Commit to non-existent workspace returns 404.""" | |
| 750 | + response = admin_client.post( | |
| 751 | + _api_url(sample_project.slug, "api/workspaces/nonexistent/commit"), | |
| 752 | + data=json.dumps({"message": "fix"}), | |
| 753 | + content_type="application/json", | |
| 754 | + ) | |
| 755 | + assert response.status_code == 404 | |
| 756 | + | |
| 757 | + def test_commit_workspace_not_active(self, admin_client, sample_project, fossil_repo_obj, admin_user): | |
| 758 | + """Commit to a merged workspace returns 409.""" | |
| 759 | + AgentWorkspace.objects.create( | |
| 760 | + repository=fossil_repo_obj, | |
| 761 | + name="ws-merged", | |
| 762 | + branch="workspace/ws-merged", | |
| 763 | + status="merged", | |
| 764 | + created_by=admin_user, | |
| 765 | + ) | |
| 766 | + | |
| 767 | + response = admin_client.post( | |
| 768 | + _api_url(sample_project.slug, "api/workspaces/ws-merged/commit"), | |
| 769 | + data=json.dumps({"message": "too late"}), | |
| 770 | + content_type="application/json", | |
| 771 | + ) | |
| 772 | + assert response.status_code == 409 | |
| 773 | + assert "merged" in response.json()["error"] | |
| 774 | + | |
| 775 | + def test_commit_invalid_json(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 776 | + """Invalid JSON body returns 400.""" | |
| 777 | + response = admin_client.post( | |
| 778 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), | |
| 779 | + data="not json", | |
| 780 | + content_type="application/json", | |
| 781 | + ) | |
| 782 | + assert response.status_code == 400 | |
| 783 | + | |
| 784 | + def test_commit_wrong_method(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 785 | + """GET to commit endpoint returns 405.""" | |
| 786 | + response = admin_client.get(_api_url(sample_project.slug, "api/workspaces/ws-test-1/commit")) | |
| 787 | + assert response.status_code == 405 | |
| 788 | + | |
| 789 | + def test_commit_denied_for_reader(self, reader_client, sample_project, fossil_repo_obj, workspace): | |
| 790 | + """Read-only users cannot commit.""" | |
| 791 | + response = reader_client.post( | |
| 792 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), | |
| 793 | + data=json.dumps({"message": "denied"}), | |
| 794 | + content_type="application/json", | |
| 795 | + ) | |
| 796 | + assert response.status_code == 403 | |
| 797 | + | |
| 798 | + | |
| 799 | +# ================================================================ | |
| 800 | +# Workspace Merge | |
| 801 | +# ================================================================ | |
| 802 | + | |
| 803 | + | |
| 804 | +@pytest.mark.django_db | |
| 805 | +class TestWorkspaceMerge: | |
| 806 | + """Tests for POST /projects/<slug>/fossil/api/workspaces/<name>/merge (lines 1037-1185). | |
| 807 | + | |
| 808 | + This endpoint is complex: it enforces branch protection, review gates, | |
| 809 | + and runs three subprocess calls (update, merge, commit). | |
| 810 | + """ | |
| 811 | + | |
| 812 | + def test_merge_success_admin_bypass(self, admin_client, sample_project, fossil_repo_obj, workspace, admin_user): | |
| 813 | + """Admin can merge without an approved review (admin bypass of review gate).""" | |
| 814 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): | |
| 815 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 816 | + mock_cli_cls.return_value._env = {} | |
| 817 | + mock_run.side_effect = [ | |
| 818 | + _make_proc(returncode=0), # update trunk | |
| 819 | + _make_proc(returncode=0, stdout="merged ok"), # merge | |
| 820 | + _make_proc(returncode=0, stdout="committed"), # commit | |
| 821 | + _make_proc(returncode=0), # close --force | |
| 822 | + ] | |
| 823 | + | |
| 824 | + response = admin_client.post( | |
| 825 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), | |
| 826 | + data=json.dumps({"target_branch": "trunk", "agent_id": "claude-test"}), | |
| 827 | + content_type="application/json", | |
| 828 | + ) | |
| 829 | + | |
| 830 | + assert response.status_code == 200 | |
| 831 | + data = response.json() | |
| 832 | + assert data["status"] == "merged" | |
| 833 | + assert data["target_branch"] == "trunk" | |
| 834 | + | |
| 835 | + workspace.refresh_from_db() | |
| 836 | + assert workspace.status == "merged" | |
| 837 | + assert workspace.checkout_path == "" | |
| 838 | + | |
| 839 | + def test_merge_with_approved_review(self, writer_client, sample_project, fossil_repo_obj, admin_user): | |
| 840 | + """Non-admin writer can merge if an approved review exists for the workspace.""" | |
| 841 | + ws = AgentWorkspace.objects.create( | |
| 842 | + repository=fossil_repo_obj, | |
| 843 | + name="ws-reviewed", | |
| 844 | + branch="workspace/ws-reviewed", | |
| 845 | + status="active", | |
| 846 | + checkout_path="/tmp/fake", | |
| 847 | + created_by=admin_user, | |
| 848 | + ) | |
| 849 | + CodeReview.objects.create( | |
| 850 | + repository=fossil_repo_obj, | |
| 851 | + workspace=ws, | |
| 852 | + title="Fix", | |
| 853 | + diff="d", | |
| 854 | + status="approved", | |
| 855 | + created_by=admin_user, | |
| 856 | + ) | |
| 857 | + | |
| 858 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): | |
| 859 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 860 | + mock_cli_cls.return_value._env = {} | |
| 861 | + mock_run.side_effect = [ | |
| 862 | + _make_proc(returncode=0), # update | |
| 863 | + _make_proc(returncode=0), # merge | |
| 864 | + _make_proc(returncode=0), # commit | |
| 865 | + _make_proc(returncode=0), # close | |
| 866 | + ] | |
| 867 | + | |
| 868 | + response = writer_client.post( | |
| 869 | + _api_url(sample_project.slug, "api/workspaces/ws-reviewed/merge"), | |
| 870 | + data=json.dumps({"target_branch": "trunk"}), | |
| 871 | + content_type="application/json", | |
| 872 | + ) | |
| 873 | + | |
| 874 | + assert response.status_code == 200 | |
| 875 | + assert response.json()["status"] == "merged" | |
| 876 | + | |
| 877 | + def test_merge_marks_linked_review_as_merged(self, admin_client, sample_project, fossil_repo_obj, workspace, admin_user): | |
| 878 | + """Merging a workspace with an approved review updates the review status to merged.""" | |
| 879 | + review = CodeReview.objects.create( | |
| 880 | + repository=fossil_repo_obj, | |
| 881 | + workspace=workspace, | |
| 882 | + title="ws review", | |
| 883 | + diff="d", | |
| 884 | + status="approved", | |
| 885 | + created_by=admin_user, | |
| 886 | + ) | |
| 887 | + | |
| 888 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): | |
| 889 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 890 | + mock_cli_cls.return_value._env = {} | |
| 891 | + mock_run.return_value = _make_proc(returncode=0) | |
| 892 | + | |
| 893 | + admin_client.post( | |
| 894 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), | |
| 895 | + data=json.dumps({"agent_id": "claude-test"}), | |
| 896 | + content_type="application/json", | |
| 897 | + ) | |
| 898 | + | |
| 899 | + review.refresh_from_db() | |
| 900 | + assert review.status == "merged" | |
| 901 | + | |
| 902 | + def test_merge_blocked_no_review_non_admin(self, writer_client, sample_project, fossil_repo_obj, admin_user): | |
| 903 | + """Non-admin cannot merge if no approved review exists for the workspace.""" | |
| 904 | + AgentWorkspace.objects.create( | |
| 905 | + repository=fossil_repo_obj, | |
| 906 | + name="ws-no-review", | |
| 907 | + branch="workspace/ws-no-review", | |
| 908 | + status="active", | |
| 909 | + checkout_path="/tmp/fake", | |
| 910 | + created_by=admin_user, | |
| 911 | + ) | |
| 912 | + | |
| 913 | + response = writer_client.post( | |
| 914 | + _api_url(sample_project.slug, "api/workspaces/ws-no-review/merge"), | |
| 915 | + data=json.dumps({}), | |
| 916 | + content_type="application/json", | |
| 917 | + ) | |
| 918 | + assert response.status_code == 403 | |
| 919 | + assert "No approved code review" in response.json()["error"] | |
| 920 | + | |
| 921 | + def test_merge_blocked_review_not_approved(self, writer_client, sample_project, fossil_repo_obj, admin_user): | |
| 922 | + """Non-admin cannot merge if the linked review is still pending.""" | |
| 923 | + ws = AgentWorkspace.objects.create( | |
| 924 | + repository=fossil_repo_obj, | |
| 925 | + name="ws-pending-review", | |
| 926 | + branch="workspace/ws-pending-review", | |
| 927 | + status="active", | |
| 928 | + checkout_path="/tmp/fake", | |
| 929 | + created_by=admin_user, | |
| 930 | + ) | |
| 931 | + CodeReview.objects.create( | |
| 932 | + repository=fossil_repo_obj, | |
| 933 | + workspace=ws, | |
| 934 | + title="Pending", | |
| 935 | + diff="d", | |
| 936 | + status="pending", | |
| 937 | + created_by=admin_user, | |
| 938 | + ) | |
| 939 | + | |
| 940 | + response = writer_client.post( | |
| 941 | + _api_url(sample_project.slug, "api/workspaces/ws-pending-review/merge"), | |
| 942 | + data=json.dumps({}), | |
| 943 | + content_type="application/json", | |
| 944 | + ) | |
| 945 | + assert response.status_code == 403 | |
| 946 | + assert "must be approved" in response.json()["error"] | |
| 947 | + | |
| 948 | + def test_merge_blocked_branch_protection_restrict_push(self, writer_client, sample_project, fossil_repo_obj, admin_user): | |
| 949 | + """Branch protection with restrict_push blocks non-admin merges.""" | |
| 950 | + AgentWorkspace.objects.create( | |
| 951 | + repository=fossil_repo_obj, | |
| 952 | + name="ws-protected", | |
| 953 | + branch="workspace/ws-protected", | |
| 954 | + status="active", | |
| 955 | + checkout_path="/tmp/fake", | |
| 956 | + created_by=admin_user, | |
| 957 | + ) | |
| 958 | + BranchProtection.objects.create( | |
| 959 | + repository=fossil_repo_obj, | |
| 960 | + branch_pattern="trunk", | |
| 961 | + restrict_push=True, | |
| 962 | + created_by=admin_user, | |
| 963 | + ) | |
| 964 | + | |
| 965 | + response = writer_client.post( | |
| 966 | + _api_url(sample_project.slug, "api/workspaces/ws-protected/merge"), | |
| 967 | + data=json.dumps({"target_branch": "trunk"}), | |
| 968 | + content_type="application/json", | |
| 969 | + ) | |
| 970 | + assert response.status_code == 403 | |
| 971 | + assert "protected" in response.json()["error"].lower() | |
| 972 | + | |
| 973 | + def test_merge_blocked_required_status_check_not_passed(self, writer_client, sample_project, fossil_repo_obj, admin_user): | |
| 974 | + """Branch protection with required status checks blocks merge when check hasn't passed.""" | |
| 975 | + AgentWorkspace.objects.create( | |
| 976 | + repository=fossil_repo_obj, | |
| 977 | + name="ws-ci-fail", | |
| 978 | + branch="workspace/ws-ci-fail", | |
| 979 | + status="active", | |
| 980 | + checkout_path="/tmp/fake", | |
| 981 | + created_by=admin_user, | |
| 982 | + ) | |
| 983 | + BranchProtection.objects.create( | |
| 984 | + repository=fossil_repo_obj, | |
| 985 | + branch_pattern="trunk", | |
| 986 | + restrict_push=False, | |
| 987 | + require_status_checks=True, | |
| 988 | + required_contexts="ci/tests", | |
| 989 | + created_by=admin_user, | |
| 990 | + ) | |
| 991 | + # Status check is pending (not success) | |
| 992 | + StatusCheck.objects.create( | |
| 993 | + repository=fossil_repo_obj, | |
| 994 | + checkin_uuid="some-uuid", | |
| 995 | + context="ci/tests", | |
| 996 | + state="pending", | |
| 997 | + created_by=admin_user, | |
| 998 | + ) | |
| 999 | + | |
| 1000 | + response = writer_client.post( | |
| 1001 | + _api_url(sample_project.slug, "api/workspaces/ws-ci-fail/merge"), | |
| 1002 | + data=json.dumps({"target_branch": "trunk"}), | |
| 1003 | + content_type="application/json", | |
| 1004 | + ) | |
| 1005 | + assert response.status_code == 403 | |
| 1006 | + assert "status check" in response.json()["error"].lower() | |
| 1007 | + | |
| 1008 | + def test_merge_allowed_with_passing_status_check(self, writer_client, sample_project, fossil_repo_obj, admin_user): | |
| 1009 | + """Branch protection with passing required status check allows merge.""" | |
| 1010 | + ws = AgentWorkspace.objects.create( | |
| 1011 | + repository=fossil_repo_obj, | |
| 1012 | + name="ws-ci-pass", | |
| 1013 | + branch="workspace/ws-ci-pass", | |
| 1014 | + status="active", | |
| 1015 | + checkout_path="/tmp/fake", | |
| 1016 | + created_by=admin_user, | |
| 1017 | + ) | |
| 1018 | + BranchProtection.objects.create( | |
| 1019 | + repository=fossil_repo_obj, | |
| 1020 | + branch_pattern="trunk", | |
| 1021 | + restrict_push=False, | |
| 1022 | + require_status_checks=True, | |
| 1023 | + required_contexts="ci/tests", | |
| 1024 | + created_by=admin_user, | |
| 1025 | + ) | |
| 1026 | + StatusCheck.objects.create( | |
| 1027 | + repository=fossil_repo_obj, | |
| 1028 | + checkin_uuid="some-uuid", | |
| 1029 | + context="ci/tests", | |
| 1030 | + state="success", | |
| 1031 | + created_by=admin_user, | |
| 1032 | + ) | |
| 1033 | + CodeReview.objects.create( | |
| 1034 | + repository=fossil_repo_obj, | |
| 1035 | + workspace=ws, | |
| 1036 | + title="Fix", | |
| 1037 | + diff="d", | |
| 1038 | + status="approved", | |
| 1039 | + created_by=admin_user, | |
| 1040 | + ) | |
| 1041 | + | |
| 1042 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): | |
| 1043 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 1044 | + mock_cli_cls.return_value._env = {} | |
| 1045 | + mock_run.return_value = _make_proc(returncode=0) | |
| 1046 | + | |
| 1047 | + response = writer_client.post( | |
| 1048 | + _api_url(sample_project.slug, "api/workspaces/ws-ci-pass/merge"), | |
| 1049 | + data=json.dumps({"target_branch": "trunk"}), | |
| 1050 | + content_type="application/json", | |
| 1051 | + ) | |
| 1052 | + | |
| 1053 | + assert response.status_code == 200 | |
| 1054 | + | |
| 1055 | + def test_merge_fossil_update_fails(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 1056 | + """When fossil update to target branch fails, return 500.""" | |
| 1057 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 1058 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 1059 | + mock_cli_cls.return_value._env = {} | |
| 1060 | + mock_run.return_value = _make_proc(returncode=1, stderr="update failed") | |
| 1061 | + | |
| 1062 | + response = admin_client.post( | |
| 1063 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), | |
| 1064 | + data=json.dumps({"agent_id": "claude-test"}), | |
| 1065 | + content_type="application/json", | |
| 1066 | + ) | |
| 1067 | + | |
| 1068 | + assert response.status_code == 500 | |
| 1069 | + assert "Failed to switch" in response.json()["error"] | |
| 1070 | + | |
| 1071 | + def test_merge_fossil_merge_fails(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 1072 | + """When fossil merge command fails, return 500.""" | |
| 1073 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 1074 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 1075 | + mock_cli_cls.return_value._env = {} | |
| 1076 | + mock_run.side_effect = [ | |
| 1077 | + _make_proc(returncode=0), # update | |
| 1078 | + _make_proc(returncode=1, stderr="merge conflict"), # merge | |
| 1079 | + ] | |
| 1080 | + | |
| 1081 | + response = admin_client.post( | |
| 1082 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), | |
| 1083 | + data=json.dumps({"agent_id": "claude-test"}), | |
| 1084 | + content_type="application/json", | |
| 1085 | + ) | |
| 1086 | + | |
| 1087 | + assert response.status_code == 500 | |
| 1088 | + assert "Merge failed" in response.json()["error"] | |
| 1089 | + | |
| 1090 | + def test_merge_commit_fails(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 1091 | + """When the merge commit fails, return 500 and don't close workspace.""" | |
| 1092 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 1093 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 1094 | + mock_cli_cls.return_value._env = {} | |
| 1095 | + mock_run.side_effect = [ | |
| 1096 | + _make_proc(returncode=0), # update | |
| 1097 | + _make_proc(returncode=0), # merge | |
| 1098 | + _make_proc(returncode=1, stderr="commit lock"), # commit | |
| 1099 | + ] | |
| 1100 | + | |
| 1101 | + response = admin_client.post( | |
| 1102 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), | |
| 1103 | + data=json.dumps({"agent_id": "claude-test"}), | |
| 1104 | + content_type="application/json", | |
| 1105 | + ) | |
| 1106 | + | |
| 1107 | + assert response.status_code == 500 | |
| 1108 | + assert "Merge commit failed" in response.json()["error"] | |
| 1109 | + | |
| 1110 | + # Workspace should still be active (not closed on commit failure) | |
| 1111 | + workspace.refresh_from_db() | |
| 1112 | + assert workspace.status == "active" | |
| 1113 | + | |
| 1114 | + def test_merge_workspace_not_found(self, admin_client, sample_project, fossil_repo_obj): | |
| 1115 | + """Merging a non-existent workspace returns 404.""" | |
| 1116 | + response = admin_client.post( | |
| 1117 | + _api_url(sample_project.slug, "api/workspaces/nonexistent/merge"), | |
| 1118 | + data=json.dumps({}), | |
| 1119 | + content_type="application/json", | |
| 1120 | + ) | |
| 1121 | + assert response.status_code == 404 | |
| 1122 | + | |
| 1123 | + def test_merge_workspace_not_active(self, admin_client, sample_project, fossil_repo_obj, admin_user): | |
| 1124 | + """Merging an already-merged workspace returns 409.""" | |
| 1125 | + AgentWorkspace.objects.create( | |
| 1126 | + repository=fossil_repo_obj, | |
| 1127 | + name="ws-already-merged", | |
| 1128 | + branch="workspace/ws-already-merged", | |
| 1129 | + status="merged", | |
| 1130 | + created_by=admin_user, | |
| 1131 | + ) | |
| 1132 | + | |
| 1133 | + response = admin_client.post( | |
| 1134 | + _api_url(sample_project.slug, "api/workspaces/ws-already-merged/merge"), | |
| 1135 | + data=json.dumps({}), | |
| 1136 | + content_type="application/json", | |
| 1137 | + ) | |
| 1138 | + assert response.status_code == 409 | |
| 1139 | + assert "merged" in response.json()["error"] | |
| 1140 | + | |
| 1141 | + def test_merge_wrong_method(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 1142 | + """GET to merge endpoint returns 405.""" | |
| 1143 | + response = admin_client.get(_api_url(sample_project.slug, "api/workspaces/ws-test-1/merge")) | |
| 1144 | + assert response.status_code == 405 | |
| 1145 | + | |
| 1146 | + def test_merge_denied_for_reader(self, reader_client, sample_project, fossil_repo_obj, workspace): | |
| 1147 | + """Read-only users cannot merge workspaces.""" | |
| 1148 | + response = reader_client.post( | |
| 1149 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), | |
| 1150 | + data=json.dumps({}), | |
| 1151 | + content_type="application/json", | |
| 1152 | + ) | |
| 1153 | + assert response.status_code == 403 | |
| 1154 | + | |
| 1155 | + | |
| 1156 | +# ================================================================ | |
| 1157 | +# Workspace Abandon | |
| 1158 | +# ================================================================ | |
| 1159 | + | |
| 1160 | + | |
| 1161 | +@pytest.mark.django_db | |
| 1162 | +class TestWorkspaceAbandon: | |
| 1163 | + """Tests for DELETE /projects/<slug>/fossil/api/workspaces/<name>/abandon (lines 1188-1238).""" | |
| 1164 | + | |
| 1165 | + def test_abandon_success(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 1166 | + """Abandoning a workspace closes checkout, cleans up directory, and updates status.""" | |
| 1167 | + with ( | |
| 1168 | + patch("subprocess.run") as mock_run, | |
| 1169 | + patch("fossil.cli.FossilCLI") as mock_cli_cls, | |
| 1170 | + patch("shutil.rmtree") as mock_rmtree, | |
| 1171 | + ): | |
| 1172 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 1173 | + mock_cli_cls.return_value._env = {} | |
| 1174 | + mock_run.return_value = _make_proc(returncode=0) | |
| 1175 | + | |
| 1176 | + response = admin_client.delete( | |
| 1177 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/abandon"), | |
| 1178 | + ) | |
| 1179 | + | |
| 1180 | + assert response.status_code == 200 | |
| 1181 | + data = response.json() | |
| 1182 | + assert data["status"] == "abandoned" | |
| 1183 | + assert data["name"] == "ws-test-1" | |
| 1184 | + | |
| 1185 | + workspace.refresh_from_db() | |
| 1186 | + assert workspace.status == "abandoned" | |
| 1187 | + assert workspace.checkout_path == "" | |
| 1188 | + | |
| 1189 | + # Verify cleanup was called | |
| 1190 | + mock_rmtree.assert_called_once() | |
| 1191 | + | |
| 1192 | + def test_abandon_no_checkout_path(self, admin_client, sample_project, fossil_repo_obj, admin_user): | |
| 1193 | + """Abandoning a workspace with empty checkout path still works (no cleanup needed).""" | |
| 1194 | + ws = AgentWorkspace.objects.create( | |
| 1195 | + repository=fossil_repo_obj, | |
| 1196 | + name="ws-no-path", | |
| 1197 | + branch="workspace/ws-no-path", | |
| 1198 | + status="active", | |
| 1199 | + checkout_path="", | |
| 1200 | + created_by=admin_user, | |
| 1201 | + ) | |
| 1202 | + | |
| 1203 | + with patch("fossil.cli.FossilCLI"): | |
| 1204 | + response = admin_client.delete(_api_url(sample_project.slug, "api/workspaces/ws-no-path/abandon")) | |
| 1205 | + | |
| 1206 | + assert response.status_code == 200 | |
| 1207 | + ws.refresh_from_db() | |
| 1208 | + assert ws.status == "abandoned" | |
| 1209 | + | |
| 1210 | + def test_abandon_workspace_not_found(self, admin_client, sample_project, fossil_repo_obj): | |
| 1211 | + """Abandoning a non-existent workspace returns 404.""" | |
| 1212 | + response = admin_client.delete(_api_url(sample_project.slug, "api/workspaces/nonexistent/abandon")) | |
| 1213 | + assert response.status_code == 404 | |
| 1214 | + | |
| 1215 | + def test_abandon_workspace_already_abandoned(self, admin_client, sample_project, fossil_repo_obj, admin_user): | |
| 1216 | + """Abandoning an already-abandoned workspace returns 409.""" | |
| 1217 | + AgentWorkspace.objects.create( | |
| 1218 | + repository=fossil_repo_obj, | |
| 1219 | + name="ws-gone", | |
| 1220 | + branch="workspace/ws-gone", | |
| 1221 | + status="abandoned", | |
| 1222 | + created_by=admin_user, | |
| 1223 | + ) | |
| 1224 | + | |
| 1225 | + response = admin_client.delete(_api_url(sample_project.slug, "api/workspaces/ws-gone/abandon")) | |
| 1226 | + assert response.status_code == 409 | |
| 1227 | + assert "already abandoned" in response.json()["error"] | |
| 1228 | + | |
| 1229 | + def test_abandon_wrong_method(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 1230 | + """POST to abandon endpoint returns 405 (DELETE required).""" | |
| 1231 | + response = admin_client.post( | |
| 1232 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/abandon"), | |
| 1233 | + content_type="application/json", | |
| 1234 | + ) | |
| 1235 | + assert response.status_code == 405 | |
| 1236 | + | |
| 1237 | + def test_abandon_denied_for_reader(self, reader_client, sample_project, fossil_repo_obj, workspace): | |
| 1238 | + """Read-only users cannot abandon workspaces.""" | |
| 1239 | + response = reader_client.delete(_api_url(sample_project.slug, "api/workspaces/ws-test-1/abandon")) | |
| 1240 | + assert response.status_code == 403 | |
| 1241 | + | |
| 1242 | + def test_abandon_denied_for_anon(self, client, sample_project, fossil_repo_obj, workspace): | |
| 1243 | + """Anonymous users cannot abandon workspaces.""" | |
| 1244 | + response = client.delete(_api_url(sample_project.slug, "api/workspaces/ws-test-1/abandon")) | |
| 1245 | + assert response.status_code == 401 | |
| 1246 | + | |
| 1247 | + | |
| 1248 | +# ================================================================ | |
| 1249 | +# Workspace Ownership Checks | |
| 1250 | +# ================================================================ | |
| 1251 | + | |
| 1252 | + | |
| 1253 | +@pytest.mark.django_db | |
| 1254 | +class TestWorkspaceOwnership: | |
| 1255 | + """Tests for _check_workspace_ownership (lines 722-747). | |
| 1256 | + | |
| 1257 | + Token-based callers must supply matching agent_id. | |
| 1258 | + Session-auth users (human oversight) are always allowed. | |
| 1259 | + """ | |
| 1260 | + | |
| 1261 | + def test_session_user_always_allowed(self, admin_client, sample_project, fossil_repo_obj, workspace): | |
| 1262 | + """Session-auth users bypass ownership check (human oversight). | |
| 1263 | + Tested through the commit endpoint which calls _check_workspace_ownership. | |
| 1264 | + """ | |
| 1265 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 1266 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 1267 | + mock_cli_cls.return_value._env = {} | |
| 1268 | + mock_run.side_effect = [ | |
| 1269 | + _make_proc(returncode=0), # addremove | |
| 1270 | + _make_proc(returncode=0, stdout="committed"), # commit | |
| 1271 | + ] | |
| 1272 | + | |
| 1273 | + # Session user does not provide agent_id -- should still be allowed | |
| 1274 | + response = admin_client.post( | |
| 1275 | + _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), | |
| 1276 | + data=json.dumps({"message": "Human override"}), | |
| 1277 | + content_type="application/json", | |
| 1278 | + ) | |
| 1279 | + | |
| 1280 | + assert response.status_code == 200 | |
| 1281 | + | |
| 1282 | + def test_workspace_without_agent_id_allows_any_writer(self, admin_client, sample_project, fossil_repo_obj, admin_user): | |
| 1283 | + """Workspace with empty agent_id allows any writer to operate.""" | |
| 1284 | + AgentWorkspace.objects.create( | |
| 1285 | + repository=fossil_repo_obj, | |
| 1286 | + name="ws-no-agent", | |
| 1287 | + branch="workspace/ws-no-agent", | |
| 1288 | + agent_id="", | |
| 1289 | + status="active", | |
| 1290 | + checkout_path="/tmp/fake", | |
| 1291 | + created_by=admin_user, | |
| 1292 | + ) | |
| 1293 | + | |
| 1294 | + with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 1295 | + mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" | |
| 1296 | + mock_cli_cls.return_value._env = {} | |
| 1297 | + mock_run.side_effect = [ | |
| 1298 | + _make_proc(returncode=0), | |
| 1299 | + _make_proc(returncode=0, stdout="committed"), | |
| 1300 | + ] | |
| 1301 | + | |
| 1302 | + response = admin_client.post( | |
| 1303 | + _api_url(sample_project.slug, "api/workspaces/ws-no-agent/commit"), | |
| 1304 | + data=json.dumps({"message": "Anyone can commit"}), | |
| 1305 | + content_type="application/json", | |
| 1306 | + ) | |
| 1307 | + | |
| 1308 | + assert response.status_code == 200 | |
| 1309 | + | |
| 1310 | + | |
| 1311 | +# ================================================================ | |
| 1312 | +# SSE Events - Stream Content | |
| 1313 | +# ================================================================ | |
| 1314 | + | |
| 1315 | + | |
| 1316 | +@pytest.mark.django_db | |
| 1317 | +class TestSSEEventStream: | |
| 1318 | + """Tests for GET /projects/<slug>/fossil/api/events (lines 1521-1653). | |
| 1319 | + | |
| 1320 | + The SSE endpoint returns a StreamingHttpResponse. We verify the response | |
| 1321 | + metadata and test the event generator for various event types. | |
| 1322 | + """ | |
| 1323 | + | |
| 1324 | + def test_sse_response_headers(self, admin_client, sample_project, fossil_repo_obj): | |
| 1325 | + """SSE endpoint sets correct headers for event streaming.""" | |
| 1326 | + with patch("fossil.api_views.FossilReader") as mock_reader_cls: | |
| 1327 | + reader = mock_reader_cls.return_value | |
| 1328 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1329 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1330 | + reader.get_checkin_count.return_value = 0 | |
| 1331 | + | |
| 1332 | + response = admin_client.get(_api_url(sample_project.slug, "api/events")) | |
| 1333 | + | |
| 1334 | + assert response.status_code == 200 | |
| 1335 | + assert response["Content-Type"] == "text/event-stream" | |
| 1336 | + assert response["Cache-Control"] == "no-cache" | |
| 1337 | + assert response["X-Accel-Buffering"] == "no" | |
| 1338 | + assert response.streaming is True | |
| 1339 | +l_repo_obj(sample_project): | |
| 1340 | + """Return the auto-created FossilRepository for sample_project.""" | |
| 1341 | + return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True) | |
| 1342 | + | |
| 1343 | + | |
| 1344 | +@pytest.fixture | |
| 1345 | +def writer_user(db, admin_user, sample_project): | |
| 1346 | + """Non-admin user with write access to the project.""" | |
| 1347 | + writer = User.objects.create_user(username="writer_cov", password="testpass123") | |
| 1348 | + team = Team.objects.create(name="Cov Writers", organization=sample_project.organization, created_by=admin_user) | |
| 1349 | + team.members.add(writer) | |
| 1350 | + ProjectTeam.objects.create(project=sample_project, team=team, role="write", created_by=admin_user) | |
| 1351 | + return writer | |
| 1352 | + | |
| 1353 | + | |
| 1354 | +@pytest.fixture | |
| 1355 | +def writer_client(writer_user): | |
| 1356 | + c = Client() | |
| 1357 | + c.login(username="writer_cov", password="testpass123") | |
| 1358 | + return c | |
| 1359 | + | |
| 1360 | + | |
| 1361 | +@pytest.fixture | |
| 1362 | +def reader_user(db, admin_user, sample_project): | |
| 1363 | + """User with read-only access to the project.""" | |
| 1364 | + reader = User.objects.create_user(username="reader_cov", password="testpass123") | |
| 1365 | + team = Team.objects.create(name="Cov Readers", organization=sample_project.organization, created_by=admin_user) | |
| 1366 | + team.members.add(reader) | |
| 1367 | + ProjectTeam.objects.create(project=sample_project, team=team, role="read", created_by=admin_user) | |
| 1368 | + return reader | |
| 1369 | + | |
| 1370 | + | |
| 1371 | +@pytest.fixture | |
| 1372 | +def reader_client(reader_user): | |
| 1373 | + c = Client() | |
| 1374 | + c.login(username="reader_cov", password="testpass123") | |
| 1375 | + return c | |
| 1376 | + | |
| 1377 | + | |
| 1378 | +@pytest.fixture | |
| 1379 | +def workspace(fossil_repo_obj, admin_user): | |
| 1380 | + """An active agent workspace with a checkout path.""" | |
| 1381 | + return AgentWorkspace.objects.create( | |
| 1382 | + repository=fossil_repo_obj, | |
| 1383 | + name="ws-test-1", | |
| 1384 | + branch="workspace/ws-test-1", | |
| 1385 | + agent_id="claude-test", | |
| 1386 | + status="active", | |
| 1387 | + checkout_path="/tmp/fake-checkout", | |
| 1388 | + created_by=admin_user, | |
| 1389 | + ) | |
| 1390 | + | |
| 1391 | + | |
| 1392 | +def _api_url(slug, path): | |
| 1393 | + return f"/projects/{slug}/fossil/{path}" | |
| 1394 | + | |
| 1395 | + | |
| 1396 | +# ---- Helper to build a mock subprocess.run result ---- | |
| 1397 | + | |
| 1398 | + | |
| 1399 | +def _make_proc(returncode=0, stdout="", stderr=""): | |
| 1400 | + result = MagicMock() | |
| 1401 | + result.returncode = returncode | |
| 1402 | + result.stdout = stdout | |
| 1403 | + result.stderr = stderr | |
| 1404 | + return result | |
| 1405 | + | |
| 1406 | + | |
| 1407 | +class _SSEBreakError(Exception): | |
| 1408 | + """Raised from mocked time.sleep to break the SSE infinite loop.""" | |
| 1409 | + | |
| 1410 | + | |
| 1411 | +def _drain_sse_one_iteration(response): | |
| 1412 | + """Read one iteration of the SSE generator, collecting yielded chunks. | |
| 1413 | + | |
| 1414 | + The SSE event_stream is an infinite while-True generator with time.sleep(5) | |
| 1415 | + at the end of each iteration. We mock time.sleep to raise _SSEBreakError after | |
| 1416 | + yielding events from the first poll cycle. | |
| 1417 | + """ | |
| 1418 | + events = [] | |
| 1419 | + with patch("fossil.api_views.time.sleep", side_effect=_SSEBreakError): | |
| 1420 | + try: | |
| 1421 | + for chunk in response.streaming_content: | |
| 1422 | + # StreamingHttpResponse wraps generator output in map() for | |
| 1423 | + # encoding; chunks are bytes. | |
| 1424 | + if isinstance(chunk, bytes): | |
| 1425 | + chunk = chunk.decode("utf-8", errors="replace") | |
| 1426 | + events.append(chunk) | |
| 1427 | + except (_SSEBreakError, RuntimeError): | |
| 1428 | + pass | |
| 1429 | + return events | |
| 1430 | + | |
| 1431 | + | |
| 1432 | +def _drain_sse_n_iterations(response, n=3): | |
| 1433 | + """Read n iterations of the SSE generator.""" | |
| 1434 | + call_count = 0 | |
| 1435 | + | |
| 1436 | + def _count_and_break(_seconds): | |
| 1437 | + nonlocal call_count | |
| 1438 | + call_count += 1 | |
| 1439 | + if call_count >= n: | |
| 1440 | + raise _SSEBreakError | |
| 1441 | + | |
| 1442 | + events = [] | |
| 1443 | + with patch("fossil.api_views.time.sleep", side_effect=_count_and_break): | |
| 1444 | + try: | |
| 1445 | + for chunk in response.streaming_content: | |
| 1446 | + if isinstance(chunk, bytes): | |
| 1447 | + chunk = chunk.decode("utf-8", errors="replace") | |
| 1448 | + events.append(chunk) | |
| 1449 | + except (_SSEBreakError, RuntimeError): | |
| 1450 | + pass | |
| 1451 | + return events | |
| 1452 | + | |
| 1453 | + | |
| 1454 | +# ================================================================ | |
| 1455 | +# Batch API | |
| 1456 | +# ================================================================ | |
| 1457 | + | |
| 1458 | + | |
| 1459 | +@pytest.mark.django_db | |
| 1460 | +class TestBatchAPI: | |
| 1461 | + """Tests for POST /projects/<slug>/fossil/api/batch (lines 636-706).""" | |
| 1462 | + | |
| 1463 | + def test_batch_success_with_multiple_sub_requests(self, admin_client, sample_project, fossil_repo_obj): | |
| 1464 | + """Batch call dispatches multiple GET sub-requests and returns combined results.""" | |
| 1465 | + with patch("fossil.api_views.FossilReader") as mock_reader_cls: | |
| 1466 | + reader = mock_reader_cls.return_value | |
| 1467 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1468 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1469 | + reader.get_timeline.return_value = [] | |
| 1470 | + reader.get_checkin_count.return_value = 0 | |
| 1471 | + reader.get_tickets.return_value = [] | |
| 1472 | + | |
| 1473 | + response = admin_client.post( | |
| 1474 | + _api_url(sample_project.slug, "api/batch"), | |
| 1475 | + data=json.dumps( | |
| 1476 | + { | |
| 1477 | + "requests": [ | |
| 1478 | + {"method": "GET", "path": "/api/timeline"}, | |
| 1479 | + {"method": "GET", "path": "/api/tickets"}, | |
| 1480 | + ] | |
| 1481 | + } | |
| 1482 | + ), | |
| 1483 | + content_type="application/json", | |
| 1484 | + ) | |
| 1485 | + | |
| 1486 | + assert response.status_code == 200 | |
| 1487 | + data = response.json() | |
| 1488 | + assert len(data["responses"]) == 2 | |
| 1489 | + assert data["responses"][0]["status"] == 200 | |
| 1490 | + assert "checkins" in data["responses"][0]["body"] | |
| 1491 | + assert data["responses"][1]["status"] == 200 | |
| 1492 | + assert "tickets" in data["responses"][1]["body"] | |
| 1493 | + | |
| 1494 | + def test_batch_wrong_method(self, admin_client, sample_project, fossil_repo_obj): | |
| 1495 | + """GET to batch endpoint returns 405.""" | |
| 1496 | + response = admin_client.get(_api_url(sample_project.slug, "api/batch")) | |
| 1497 | + assert response.status_code == 405 |
| --- a/tests/test_api_coverage.py | |
| +++ b/tests/test_api_coverage.py | |
| @@ -0,0 +1,1497 @@ | |
| --- a/tests/test_api_coverage.py | |
| +++ b/tests/test_api_coverage.py | |
| @@ -0,0 +1,1497 @@ | |
| 1 | """Tests covering uncovered code paths in fossil/api_views.py. |
| 2 | |
| 3 | Targets: batch API, workspace CRUD (list/create/detail/commit/merge/abandon), |
| 4 | workspace ownership checks, SSE event stream internals, and _resolve_batch_route. |
| 5 | Existing test_agent_coordination.py covers ticket claim/release/submit and review |
| 6 | CRUD -- this file does NOT duplicate those. |
| 7 | """ |
| 8 | |
| 9 | import json |
| 10 | from unittest.mock import MagicMock, patch |
| 11 | |
| 12 | import pytest |
| 13 | from django.contrib.auth.models import User |
| 14 | from django.test import Client, RequestFactory |
| 15 | |
| 16 | from fossil.agent_claims import TicketClaim |
| 17 | from fossil.branch_protection import BranchProtection |
| 18 | from fossil.ci import StatusCheck |
| 19 | from fossil.code_reviews import CodeReview |
| 20 | from fossil.models import FossilRepository |
| 21 | from fossil.workspaces import AgentWorkspace |
| 22 | from organization.models import Team |
| 23 | from projects.models import ProjectTeam |
| 24 | |
| 25 | # ---- Fixtures ---- |
| 26 | |
| 27 | |
| 28 | @pytest.fixture |
| 29 | def fossil_repo_obj(sample_project): |
| 30 | """Return the auto-created FossilRepository for sample_project.""" |
| 31 | return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True) |
| 32 | |
| 33 | |
| 34 | @pytest.fixture |
| 35 | def writer_user(db, admin_user, sample_project): |
| 36 | """Non-admin user with write access to the project.""" |
| 37 | writer = User.objects.create_user(username="writer_cov", password="testpass123") |
| 38 | team = Team.objects.create(name="Cov Writers", organization=sample_project.organization, created_by=admin_user) |
| 39 | team.members.add(writer) |
| 40 | ProjectTeam.objects.create(project=sample_project, team=team, role="write", created_by=admin_user) |
| 41 | return writer |
| 42 | |
| 43 | |
| 44 | @pytest.fixture |
| 45 | def writer_client(writer_user): |
| 46 | c = Client() |
| 47 | c.login(username="writer_cov", password="testpass123") |
| 48 | return c |
| 49 | |
| 50 | |
| 51 | @pytest.fixture |
| 52 | def reader_user(db, admin_user, sample_project): |
| 53 | """User with read-only access to the project.""" |
| 54 | reader = User.objects.create_user(username="reader_cov", password="testpass123") |
| 55 | team = Team.objects.create(name="Cov Readers", organization=sample_project.organization, created_by=admin_user) |
| 56 | team.members.add(reader) |
| 57 | ProjectTeam.objects.create(project=sample_project, team=team, role="read", created_by=admin_user) |
| 58 | return reader |
| 59 | |
| 60 | |
| 61 | @pytest.fixture |
| 62 | def reader_client(reader_user): |
| 63 | c = Client() |
| 64 | c.login(username="reader_cov", password="testpass123") |
| 65 | return c |
| 66 | |
| 67 | |
| 68 | @pytest.fixture |
| 69 | def workspace(fossil_repo_obj, admin_user): |
| 70 | """An active agent workspace with a checkout path.""" |
| 71 | return AgentWorkspace.objects.create( |
| 72 | repository=fossil_repo_obj, |
| 73 | name="ws-test-1", |
| 74 | branch="workspace/ws-test-1", |
| 75 | agent_id="claude-test", |
| 76 | status="active", |
| 77 | checkout_path="/tmp/fake-checkout", |
| 78 | created_by=admin_user, |
| 79 | ) |
| 80 | |
| 81 | |
| 82 | def _api_url(slug, path): |
| 83 | return f"/projects/{slug}/fossil/{path}" |
| 84 | |
| 85 | |
| 86 | # ---- Helper to build a mock subprocess.run result ---- |
| 87 | |
| 88 | |
| 89 | def _make_proc(returncode=0, stdout="", stderr=""): |
| 90 | result = MagicMock() |
| 91 | result.returncode = returncode |
| 92 | result.stdout = stdout |
| 93 | result.stderr = stderr |
| 94 | return result |
| 95 | |
| 96 | |
| 97 | class _SSEBreakError(Exception): |
| 98 | """Raised from mocked time.sleep to break the SSE infinite loop.""" |
| 99 | |
| 100 | |
| 101 | def _drain_sse_one_iteration(response): |
| 102 | """Read one iteration of the SSE generator, collecting yielded chunks. |
| 103 | |
| 104 | The SSE event_stream is an infinite while-True generator with time.sleep(5) |
| 105 | at the end of each iteration. We mock time.sleep to raise _SSEBreakError after |
| 106 | yielding events from the first poll cycle. |
| 107 | """ |
| 108 | events = [] |
| 109 | with patch("fossil.api_views.time.sleep", side_effect=_SSEBreakError): |
| 110 | try: |
| 111 | for chunk in response.streaming_content: |
| 112 | # StreamingHttpResponse wraps generator output in map() for |
| 113 | # encoding; chunks are bytes. |
| 114 | if isinstance(chunk, bytes): |
| 115 | chunk = chunk.decode("utf-8", errors="replace") |
| 116 | events.append(chunk) |
| 117 | except (_SSEBreakError, RuntimeError): |
| 118 | pass |
| 119 | return events |
| 120 | |
| 121 | |
| 122 | def _drain_sse_n_iterations(response, n=3): |
| 123 | """Read n iterations of the SSE generator.""" |
| 124 | call_count = 0 |
| 125 | |
| 126 | def _count_and_break(_seconds): |
| 127 | nonlocal call_count |
| 128 | call_count += 1 |
| 129 | if call_count >= n: |
| 130 | raise _SSEBreakError |
| 131 | |
| 132 | events = [] |
| 133 | with patch("fossil.api_views.time.sleep", side_effect=_count_and_break): |
| 134 | try: |
| 135 | for chunk in response.streaming_content: |
| 136 | if isinstance(chunk, bytes): |
| 137 | chunk = chunk.decode("utf-8", errors="replace") |
| 138 | events.append(chunk) |
| 139 | except (_SSEBreakError, RuntimeError): |
| 140 | pass |
| 141 | return events |
| 142 | |
| 143 | |
| 144 | # ================================================================ |
| 145 | # Batch API |
| 146 | # ================================================================ |
| 147 | |
| 148 | |
| 149 | @pytest.mark.django_db |
| 150 | class TestBatchAPI: |
| 151 | """Tests for POST /projects/<slug>/fossil/api/batch (lines 636-706).""" |
| 152 | |
| 153 | def test_batch_success_with_multiple_sub_requests(self, admin_client, sample_project, fossil_repo_obj): |
| 154 | """Batch call dispatches multiple GET sub-requests and returns combined results.""" |
| 155 | with patch("fossil.api_views.FossilReader") as mock_reader_cls: |
| 156 | reader = mock_reader_cls.return_value |
| 157 | reader.__enter__ = MagicMock(return_value=reader) |
| 158 | reader.__exit__ = MagicMock(return_value=False) |
| 159 | reader.get_timeline.return_value = [] |
| 160 | reader.get_checkin_count.return_value = 0 |
| 161 | reader.get_tickets.return_value = [] |
| 162 | |
| 163 | response = admin_client.post( |
| 164 | _api_url(sample_project.slug, "api/batch"), |
| 165 | data=json.dumps( |
| 166 | { |
| 167 | "requests": [ |
| 168 | {"method": "GET", "path": "/api/timeline"}, |
| 169 | {"method": "GET", "path": "/api/tickets"}, |
| 170 | ] |
| 171 | } |
| 172 | ), |
| 173 | content_type="application/json", |
| 174 | ) |
| 175 | |
| 176 | assert response.status_code == 200 |
| 177 | data = response.json() |
| 178 | assert len(data["responses"]) == 2 |
| 179 | assert data["responses"][0]["status"] == 200 |
| 180 | assert "checkins" in data["responses"][0]["body"] |
| 181 | assert data["responses"][1]["status"] == 200 |
| 182 | assert "tickets" in data["responses"][1]["body"] |
| 183 | |
| 184 | def test_batch_wrong_method(self, admin_client, sample_project, fossil_repo_obj): |
| 185 | """GET to batch endpoint returns 405.""" |
| 186 | response = admin_client.get(_api_url(sample_project.slug, "api/batch")) |
| 187 | assert response.status_code == 405 |
| 188 | |
| 189 | def test_batch_invalid_json(self, admin_client, sample_project, fossil_repo_obj): |
| 190 | """Non-JSON body returns 400.""" |
| 191 | response = admin_client.post( |
| 192 | _api_url(sample_project.slug, "api/batch"), |
| 193 | data="not json", |
| 194 | content_type="application/json", |
| 195 | ) |
| 196 | assert response.status_code == 400 |
| 197 | assert "Invalid JSON" in response.json()["error"] |
| 198 | |
| 199 | def test_batch_requests_not_list(self, admin_client, sample_project, fossil_repo_obj): |
| 200 | """'requests' must be a list.""" |
| 201 | response = admin_client.post( |
| 202 | _api_url(sample_project.slug, "api/batch"), |
| 203 | data=json.dumps({"requests": "not-a-list"}), |
| 204 | content_type="application/json", |
| 205 | ) |
| 206 | assert response.status_code == 400 |
| 207 | assert "'requests' must be a list" in response.json()["error"] |
| 208 | |
| 209 | def test_batch_exceeds_max_requests(self, admin_client, sample_project, fossil_repo_obj): |
| 210 | """More than 25 sub-requests returns 400.""" |
| 211 | response = admin_client.post( |
| 212 | _api_url(sample_project.slug, "api/batch"), |
| 213 | data=json.dumps({"requests": [{"method": "GET", "path": "/api/project"}] * 26}), |
| 214 | content_type="application/json", |
| 215 | ) |
| 216 | assert response.status_code == 400 |
| 217 | assert "Maximum 25" in response.json()["error"] |
| 218 | |
| 219 | def test_batch_empty_requests(self, admin_client, sample_project, fossil_repo_obj): |
| 220 | """Empty requests list returns empty responses.""" |
| 221 | response = admin_client.post( |
| 222 | _api_url(sample_project.slug, "api/batch"), |
| 223 | data=json.dumps({"requests": []}), |
| 224 | content_type="application/json", |
| 225 | ) |
| 226 | assert response.status_code == 200 |
| 227 | assert response.json()["responses"] == [] |
| 228 | |
| 229 | def test_batch_non_get_rejected(self, admin_client, sample_project, fossil_repo_obj): |
| 230 | """Non-GET sub-requests are rejected with 405.""" |
| 231 | response = admin_client.post( |
| 232 | _api_url(sample_project.slug, "api/batch"), |
| 233 | data=json.dumps({"requests": [{"method": "POST", "path": "/api/project"}]}), |
| 234 | content_type="application/json", |
| 235 | ) |
| 236 | assert response.status_code == 200 |
| 237 | sub = response.json()["responses"][0] |
| 238 | assert sub["status"] == 405 |
| 239 | assert "Only GET" in sub["body"]["error"] |
| 240 | |
| 241 | def test_batch_unknown_path(self, admin_client, sample_project, fossil_repo_obj): |
| 242 | """Unknown API path in batch returns 404 sub-response.""" |
| 243 | response = admin_client.post( |
| 244 | _api_url(sample_project.slug, "api/batch"), |
| 245 | data=json.dumps({"requests": [{"method": "GET", "path": "/api/nonexistent"}]}), |
| 246 | content_type="application/json", |
| 247 | ) |
| 248 | assert response.status_code == 200 |
| 249 | sub = response.json()["responses"][0] |
| 250 | assert sub["status"] == 404 |
| 251 | assert "Unknown API path" in sub["body"]["error"] |
| 252 | |
| 253 | def test_batch_missing_path(self, admin_client, sample_project, fossil_repo_obj): |
| 254 | """Sub-request without 'path' returns 400 sub-response.""" |
| 255 | response = admin_client.post( |
| 256 | _api_url(sample_project.slug, "api/batch"), |
| 257 | data=json.dumps({"requests": [{"method": "GET"}]}), |
| 258 | content_type="application/json", |
| 259 | ) |
| 260 | assert response.status_code == 200 |
| 261 | sub = response.json()["responses"][0] |
| 262 | assert sub["status"] == 400 |
| 263 | assert "Missing 'path'" in sub["body"]["error"] |
| 264 | |
| 265 | def test_batch_non_dict_sub_request(self, admin_client, sample_project, fossil_repo_obj): |
| 266 | """Non-dict items in requests list return 400 sub-response.""" |
| 267 | response = admin_client.post( |
| 268 | _api_url(sample_project.slug, "api/batch"), |
| 269 | data=json.dumps({"requests": ["not-a-dict"]}), |
| 270 | content_type="application/json", |
| 271 | ) |
| 272 | assert response.status_code == 200 |
| 273 | sub = response.json()["responses"][0] |
| 274 | assert sub["status"] == 400 |
| 275 | assert "must be an object" in sub["body"]["error"] |
| 276 | |
| 277 | def test_batch_dynamic_route_ticket_detail(self, admin_client, sample_project, fossil_repo_obj): |
| 278 | """Batch can route to dynamic ticket detail path.""" |
| 279 | with patch("fossil.api_views.FossilReader") as mock_reader_cls: |
| 280 | reader = mock_reader_cls.return_value |
| 281 | reader.__enter__ = MagicMock(return_value=reader) |
| 282 | reader.__exit__ = MagicMock(return_value=False) |
| 283 | ticket = MagicMock() |
| 284 | ticket.uuid = "abc123" |
| 285 | ticket.title = "Test" |
| 286 | ticket.status = "Open" |
| 287 | ticket.type = "Bug" |
| 288 | ticket.subsystem = "" |
| 289 | ticket.priority = "" |
| 290 | ticket.severity = "" |
| 291 | ticket.resolution = "" |
| 292 | ticket.body = "" |
| 293 | ticket.created = None |
| 294 | reader.get_ticket_detail.return_value = ticket |
| 295 | reader.get_ticket_comments.return_value = [] |
| 296 | |
| 297 | response = admin_client.post( |
| 298 | _api_url(sample_project.slug, "api/batch"), |
| 299 | data=json.dumps({"requests": [{"method": "GET", "path": "/api/tickets/abc123"}]}), |
| 300 | content_type="application/json", |
| 301 | ) |
| 302 | |
| 303 | assert response.status_code == 200 |
| 304 | sub = response.json()["responses"][0] |
| 305 | assert sub["status"] == 200 |
| 306 | assert sub["body"]["uuid"] == "abc123" |
| 307 | |
| 308 | def test_batch_dynamic_route_wiki_page(self, admin_client, sample_project, fossil_repo_obj): |
| 309 | """Batch can route to dynamic wiki page path.""" |
| 310 | with patch("fossil.api_views.FossilReader") as mock_reader_cls: |
| 311 | reader = mock_reader_cls.return_value |
| 312 | reader.__enter__ = MagicMock(return_value=reader) |
| 313 | reader.__exit__ = MagicMock(return_value=False) |
| 314 | page = MagicMock() |
| 315 | page.name = "Home" |
| 316 | page.content = "# Home" |
| 317 | page.last_modified = None |
| 318 | page.user = "admin" |
| 319 | reader.get_wiki_page.return_value = page |
| 320 | |
| 321 | with patch("fossil.views._render_fossil_content", return_value="<h1>Home</h1>"): |
| 322 | response = admin_client.post( |
| 323 | _api_url(sample_project.slug, "api/batch"), |
| 324 | data=json.dumps({"requests": [{"method": "GET", "path": "/api/wiki/Home"}]}), |
| 325 | content_type="application/json", |
| 326 | ) |
| 327 | |
| 328 | assert response.status_code == 200 |
| 329 | sub = response.json()["responses"][0] |
| 330 | assert sub["status"] == 200 |
| 331 | assert sub["body"]["name"] == "Home" |
| 332 | |
| 333 | def test_batch_denied_for_anon(self, client, sample_project, fossil_repo_obj): |
| 334 | """Anonymous users cannot use the batch API.""" |
| 335 | response = client.post( |
| 336 | _api_url(sample_project.slug, "api/batch"), |
| 337 | data=json.dumps({"requests": []}), |
| 338 | content_type="application/json", |
| 339 | ) |
| 340 | assert response.status_code == 401 |
| 341 | |
| 342 | def test_batch_sub_request_exception_returns_500(self, admin_client, sample_project, fossil_repo_obj): |
| 343 | """When a sub-request raises an exception, we get a 500 sub-response.""" |
| 344 | with patch("fossil.api_views.FossilReader") as mock_reader_cls: |
| 345 | mock_reader_cls.side_effect = RuntimeError("boom") |
| 346 | |
| 347 | response = admin_client.post( |
| 348 | _api_url(sample_project.slug, "api/batch"), |
| 349 | data=json.dumps({"requests": [{"method": "GET", "path": "/api/timeline"}]}), |
| 350 | content_type="application/json", |
| 351 | ) |
| 352 | |
| 353 | assert response.status_code == 200 |
| 354 | sub = response.json()["responses"][0] |
| 355 | assert sub["status"] == 500 |
| 356 | assert "Internal error" in sub["body"]["error"] |
| 357 | |
| 358 | |
| 359 | # ================================================================ |
| 360 | # Workspace List |
| 361 | # ================================================================ |
| 362 | |
| 363 | |
| 364 | @pytest.mark.django_db |
| 365 | class TestWorkspaceList: |
| 366 | """Tests for GET /projects/<slug>/fossil/api/workspaces (lines 749-786).""" |
| 367 | |
| 368 | def test_list_workspaces_empty(self, admin_client, sample_project, fossil_repo_obj): |
| 369 | """Empty workspace list returns zero results.""" |
| 370 | response = admin_client.get(_api_url(sample_project.slug, "api/workspaces")) |
| 371 | assert response.status_code == 200 |
| 372 | data = response.json() |
| 373 | assert data["workspaces"] == [] |
| 374 | |
| 375 | def test_list_workspaces_returns_all(self, admin_client, sample_project, fossil_repo_obj, admin_user): |
| 376 | """Lists all workspaces for the repo.""" |
| 377 | AgentWorkspace.objects.create( |
| 378 | repository=fossil_repo_obj, name="ws-1", branch="workspace/ws-1", agent_id="a1", created_by=admin_user |
| 379 | ) |
| 380 | AgentWorkspace.objects.create( |
| 381 | repository=fossil_repo_obj, name="ws-2", branch="workspace/ws-2", agent_id="a2", created_by=admin_user |
| 382 | ) |
| 383 | |
| 384 | response = admin_client.get(_api_url(sample_project.slug, "api/workspaces")) |
| 385 | assert response.status_code == 200 |
| 386 | data = response.json() |
| 387 | assert len(data["workspaces"]) == 2 |
| 388 | names = {ws["name"] for ws in data["workspaces"]} |
| 389 | assert names == {"ws-1", "ws-2"} |
| 390 | |
| 391 | def test_list_workspaces_filter_by_status(self, admin_client, sample_project, fossil_repo_obj, admin_user): |
| 392 | """Status filter returns only matching workspaces.""" |
| 393 | AgentWorkspace.objects.create(repository=fossil_repo_obj, name="ws-active", branch="b/a", status="active", created_by=admin_user) |
| 394 | AgentWorkspace.objects.create(repository=fossil_repo_obj, name="ws-merged", branch="b/m", status="merged", created_by=admin_user) |
| 395 | |
| 396 | response = admin_client.get(_api_url(sample_project.slug, "api/workspaces") + "?status=active") |
| 397 | assert response.status_code == 200 |
| 398 | data = response.json() |
| 399 | assert len(data["workspaces"]) == 1 |
| 400 | assert data["workspaces"][0]["name"] == "ws-active" |
| 401 | |
| 402 | def test_list_workspaces_wrong_method(self, admin_client, sample_project, fossil_repo_obj): |
| 403 | """POST to workspace list returns 405.""" |
| 404 | response = admin_client.post( |
| 405 | _api_url(sample_project.slug, "api/workspaces"), |
| 406 | content_type="application/json", |
| 407 | ) |
| 408 | assert response.status_code == 405 |
| 409 | |
| 410 | def test_list_workspaces_denied_for_anon(self, client, sample_project, fossil_repo_obj): |
| 411 | """Anonymous users cannot list workspaces.""" |
| 412 | response = client.get(_api_url(sample_project.slug, "api/workspaces")) |
| 413 | assert response.status_code == 401 |
| 414 | |
| 415 | def test_list_workspaces_response_shape(self, admin_client, sample_project, fossil_repo_obj, admin_user): |
| 416 | """Verify the response includes all expected fields.""" |
| 417 | AgentWorkspace.objects.create( |
| 418 | repository=fossil_repo_obj, |
| 419 | name="ws-shape", |
| 420 | branch="workspace/ws-shape", |
| 421 | agent_id="claude-shape", |
| 422 | description="test workspace", |
| 423 | files_changed=3, |
| 424 | commits_made=2, |
| 425 | created_by=admin_user, |
| 426 | ) |
| 427 | response = admin_client.get(_api_url(sample_project.slug, "api/workspaces")) |
| 428 | ws = response.json()["workspaces"][0] |
| 429 | assert ws["name"] == "ws-shape" |
| 430 | assert ws["branch"] == "workspace/ws-shape" |
| 431 | assert ws["status"] == "active" |
| 432 | assert ws["agent_id"] == "claude-shape" |
| 433 | assert ws["description"] == "test workspace" |
| 434 | assert ws["files_changed"] == 3 |
| 435 | assert ws["commits_made"] == 2 |
| 436 | assert ws["created_at"] is not None |
| 437 | |
| 438 | |
| 439 | # ================================================================ |
| 440 | # Workspace Detail |
| 441 | # ================================================================ |
| 442 | |
| 443 | |
| 444 | @pytest.mark.django_db |
| 445 | class TestWorkspaceDetail: |
| 446 | """Tests for GET /projects/<slug>/fossil/api/workspaces/<name> (lines 904-934).""" |
| 447 | |
| 448 | def test_get_workspace_detail(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 449 | """Workspace detail returns all fields.""" |
| 450 | response = admin_client.get(_api_url(sample_project.slug, "api/workspaces/ws-test-1")) |
| 451 | assert response.status_code == 200 |
| 452 | data = response.json() |
| 453 | assert data["name"] == "ws-test-1" |
| 454 | assert data["branch"] == "workspace/ws-test-1" |
| 455 | assert data["agent_id"] == "claude-test" |
| 456 | assert data["status"] == "active" |
| 457 | assert data["updated_at"] is not None |
| 458 | |
| 459 | def test_get_workspace_not_found(self, admin_client, sample_project, fossil_repo_obj): |
| 460 | """Non-existent workspace returns 404.""" |
| 461 | response = admin_client.get(_api_url(sample_project.slug, "api/workspaces/nonexistent")) |
| 462 | assert response.status_code == 404 |
| 463 | assert "not found" in response.json()["error"].lower() |
| 464 | |
| 465 | def test_get_workspace_wrong_method(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 466 | """POST to workspace detail returns 405.""" |
| 467 | response = admin_client.post( |
| 468 | _api_url(sample_project.slug, "api/workspaces/ws-test-1"), |
| 469 | content_type="application/json", |
| 470 | ) |
| 471 | assert response.status_code == 405 |
| 472 | |
| 473 | def test_get_workspace_denied_for_anon(self, client, sample_project, fossil_repo_obj, workspace): |
| 474 | """Anonymous users cannot view workspace details.""" |
| 475 | response = client.get(_api_url(sample_project.slug, "api/workspaces/ws-test-1")) |
| 476 | assert response.status_code == 401 |
| 477 | |
| 478 | |
| 479 | # ================================================================ |
| 480 | # Workspace Create |
| 481 | # ================================================================ |
| 482 | |
| 483 | |
| 484 | @pytest.mark.django_db |
| 485 | class TestWorkspaceCreate: |
| 486 | """Tests for POST /projects/<slug>/fossil/api/workspaces/create (lines 789-901).""" |
| 487 | |
| 488 | def test_create_workspace_success(self, admin_client, sample_project, fossil_repo_obj): |
| 489 | """Creating a workspace opens a Fossil checkout and creates DB record.""" |
| 490 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 491 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 492 | mock_cli_cls.return_value._env = {} |
| 493 | # All three subprocess calls succeed: open, branch new, update |
| 494 | mock_run.return_value = _make_proc(stdout="checkout opened") |
| 495 | |
| 496 | response = admin_client.post( |
| 497 | _api_url(sample_project.slug, "api/workspaces/create"), |
| 498 | data=json.dumps({"name": "agent-fix-99", "description": "Fix bug #99", "agent_id": "claude-99"}), |
| 499 | content_type="application/json", |
| 500 | ) |
| 501 | |
| 502 | assert response.status_code == 201 |
| 503 | data = response.json() |
| 504 | assert data["name"] == "agent-fix-99" |
| 505 | assert data["branch"] == "workspace/agent-fix-99" |
| 506 | assert data["status"] == "active" |
| 507 | assert data["agent_id"] == "claude-99" |
| 508 | |
| 509 | # Verify DB state |
| 510 | ws = AgentWorkspace.objects.get(repository=fossil_repo_obj, name="agent-fix-99") |
| 511 | assert ws.branch == "workspace/agent-fix-99" |
| 512 | assert ws.description == "Fix bug #99" |
| 513 | |
| 514 | def test_create_workspace_missing_name(self, admin_client, sample_project, fossil_repo_obj): |
| 515 | """Workspace name is required.""" |
| 516 | response = admin_client.post( |
| 517 | _api_url(sample_project.slug, "api/workspaces/create"), |
| 518 | data=json.dumps({"description": "no name"}), |
| 519 | content_type="application/json", |
| 520 | ) |
| 521 | assert response.status_code == 400 |
| 522 | assert "name" in response.json()["error"].lower() |
| 523 | |
| 524 | def test_create_workspace_invalid_name(self, admin_client, sample_project, fossil_repo_obj): |
| 525 | """Invalid workspace name (special chars) returns 400.""" |
| 526 | response = admin_client.post( |
| 527 | _api_url(sample_project.slug, "api/workspaces/create"), |
| 528 | data=json.dumps({"name": "../../etc/passwd"}), |
| 529 | content_type="application/json", |
| 530 | ) |
| 531 | assert response.status_code == 400 |
| 532 | assert "Invalid workspace name" in response.json()["error"] |
| 533 | |
| 534 | def test_create_workspace_name_starts_with_dot(self, admin_client, sample_project, fossil_repo_obj): |
| 535 | """Workspace name starting with a dot is rejected by the regex.""" |
| 536 | response = admin_client.post( |
| 537 | _api_url(sample_project.slug, "api/workspaces/create"), |
| 538 | data=json.dumps({"name": ".hidden"}), |
| 539 | content_type="application/json", |
| 540 | ) |
| 541 | assert response.status_code == 400 |
| 542 | |
| 543 | def test_create_workspace_duplicate_name(self, admin_client, sample_project, fossil_repo_obj, admin_user): |
| 544 | """Duplicate workspace name returns 409.""" |
| 545 | AgentWorkspace.objects.create(repository=fossil_repo_obj, name="dup-ws", branch="workspace/dup-ws", created_by=admin_user) |
| 546 | |
| 547 | response = admin_client.post( |
| 548 | _api_url(sample_project.slug, "api/workspaces/create"), |
| 549 | data=json.dumps({"name": "dup-ws"}), |
| 550 | content_type="application/json", |
| 551 | ) |
| 552 | assert response.status_code == 409 |
| 553 | assert "already exists" in response.json()["error"] |
| 554 | |
| 555 | def test_create_workspace_invalid_json(self, admin_client, sample_project, fossil_repo_obj): |
| 556 | """Invalid JSON body returns 400.""" |
| 557 | response = admin_client.post( |
| 558 | _api_url(sample_project.slug, "api/workspaces/create"), |
| 559 | data="not json", |
| 560 | content_type="application/json", |
| 561 | ) |
| 562 | assert response.status_code == 400 |
| 563 | assert "Invalid JSON" in response.json()["error"] |
| 564 | |
| 565 | def test_create_workspace_fossil_open_fails(self, admin_client, sample_project, fossil_repo_obj): |
| 566 | """When fossil open fails, return 500 and clean up.""" |
| 567 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): |
| 568 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 569 | mock_cli_cls.return_value._env = {} |
| 570 | mock_run.return_value = _make_proc(returncode=1, stderr="open failed") |
| 571 | |
| 572 | response = admin_client.post( |
| 573 | _api_url(sample_project.slug, "api/workspaces/create"), |
| 574 | data=json.dumps({"name": "fail-open"}), |
| 575 | content_type="application/json", |
| 576 | ) |
| 577 | |
| 578 | assert response.status_code == 500 |
| 579 | assert "Failed to open" in response.json()["error"] |
| 580 | |
| 581 | def test_create_workspace_branch_creation_fails(self, admin_client, sample_project, fossil_repo_obj): |
| 582 | """When branch creation fails, return 500 and clean up checkout.""" |
| 583 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): |
| 584 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 585 | mock_cli_cls.return_value._env = {} |
| 586 | # First call (open) succeeds, second (branch new) fails |
| 587 | mock_run.side_effect = [ |
| 588 | _make_proc(returncode=0), # open |
| 589 | _make_proc(returncode=1, stderr="branch error"), # branch new |
| 590 | _make_proc(returncode=0), # close --force (cleanup) |
| 591 | ] |
| 592 | |
| 593 | response = admin_client.post( |
| 594 | _api_url(sample_project.slug, "api/workspaces/create"), |
| 595 | data=json.dumps({"name": "fail-branch"}), |
| 596 | content_type="application/json", |
| 597 | ) |
| 598 | |
| 599 | assert response.status_code == 500 |
| 600 | assert "Failed to create branch" in response.json()["error"] |
| 601 | |
| 602 | def test_create_workspace_update_fails(self, admin_client, sample_project, fossil_repo_obj): |
| 603 | """When switching to the new branch fails, return 500 and clean up.""" |
| 604 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): |
| 605 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 606 | mock_cli_cls.return_value._env = {} |
| 607 | mock_run.side_effect = [ |
| 608 | _make_proc(returncode=0), # open |
| 609 | _make_proc(returncode=0), # branch new |
| 610 | _make_proc(returncode=1, stderr="update failed"), # update branch |
| 611 | _make_proc(returncode=0), # close --force (cleanup) |
| 612 | ] |
| 613 | |
| 614 | response = admin_client.post( |
| 615 | _api_url(sample_project.slug, "api/workspaces/create"), |
| 616 | data=json.dumps({"name": "fail-update"}), |
| 617 | content_type="application/json", |
| 618 | ) |
| 619 | |
| 620 | assert response.status_code == 500 |
| 621 | assert "Failed to switch to branch" in response.json()["error"] |
| 622 | |
| 623 | def test_create_workspace_wrong_method(self, admin_client, sample_project, fossil_repo_obj): |
| 624 | """GET to create endpoint returns 405.""" |
| 625 | response = admin_client.get(_api_url(sample_project.slug, "api/workspaces/create")) |
| 626 | assert response.status_code == 405 |
| 627 | |
| 628 | def test_create_workspace_denied_for_reader(self, reader_client, sample_project, fossil_repo_obj): |
| 629 | """Read-only users cannot create workspaces.""" |
| 630 | response = reader_client.post( |
| 631 | _api_url(sample_project.slug, "api/workspaces/create"), |
| 632 | data=json.dumps({"name": "denied-ws"}), |
| 633 | content_type="application/json", |
| 634 | ) |
| 635 | assert response.status_code == 403 |
| 636 | |
| 637 | def test_create_workspace_denied_for_anon(self, client, sample_project, fossil_repo_obj): |
| 638 | """Anonymous users cannot create workspaces.""" |
| 639 | response = client.post( |
| 640 | _api_url(sample_project.slug, "api/workspaces/create"), |
| 641 | data=json.dumps({"name": "anon-ws"}), |
| 642 | content_type="application/json", |
| 643 | ) |
| 644 | assert response.status_code == 401 |
| 645 | |
| 646 | |
| 647 | # ================================================================ |
| 648 | # Workspace Commit |
| 649 | # ================================================================ |
| 650 | |
| 651 | |
| 652 | @pytest.mark.django_db |
| 653 | class TestWorkspaceCommit: |
| 654 | """Tests for POST /projects/<slug>/fossil/api/workspaces/<name>/commit (lines 937-1034).""" |
| 655 | |
| 656 | def test_commit_success(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 657 | """Successful commit increments commits_made and returns output.""" |
| 658 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 659 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 660 | mock_cli_cls.return_value._env = {} |
| 661 | # addremove then commit |
| 662 | mock_run.side_effect = [ |
| 663 | _make_proc(returncode=0), # addremove |
| 664 | _make_proc(returncode=0, stdout="New_Version: abc123"), # commit |
| 665 | ] |
| 666 | |
| 667 | response = admin_client.post( |
| 668 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), |
| 669 | data=json.dumps({"message": "Fix bug", "agent_id": "claude-test"}), |
| 670 | content_type="application/json", |
| 671 | ) |
| 672 | |
| 673 | assert response.status_code == 200 |
| 674 | data = response.json() |
| 675 | assert data["message"] == "Fix bug" |
| 676 | assert data["commits_made"] == 1 |
| 677 | |
| 678 | workspace.refresh_from_db() |
| 679 | assert workspace.commits_made == 1 |
| 680 | |
| 681 | def test_commit_with_specific_files(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 682 | """Committing specific files adds them individually.""" |
| 683 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 684 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 685 | mock_cli_cls.return_value._env = {} |
| 686 | mock_run.side_effect = [ |
| 687 | _make_proc(returncode=0), # add file1 |
| 688 | _make_proc(returncode=0), # add file2 |
| 689 | _make_proc(returncode=0, stdout="New_Version: def456"), # commit |
| 690 | ] |
| 691 | |
| 692 | response = admin_client.post( |
| 693 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), |
| 694 | data=json.dumps({"message": "Add files", "files": ["a.py", "b.py"], "agent_id": "claude-test"}), |
| 695 | content_type="application/json", |
| 696 | ) |
| 697 | |
| 698 | assert response.status_code == 200 |
| 699 | |
| 700 | def test_commit_nothing_to_commit(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 701 | """When fossil says nothing changed, return 409.""" |
| 702 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 703 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 704 | mock_cli_cls.return_value._env = {} |
| 705 | mock_run.side_effect = [ |
| 706 | _make_proc(returncode=0), # addremove |
| 707 | _make_proc(returncode=1, stderr="nothing has changed"), # commit |
| 708 | ] |
| 709 | |
| 710 | response = admin_client.post( |
| 711 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), |
| 712 | data=json.dumps({"message": "no change", "agent_id": "claude-test"}), |
| 713 | content_type="application/json", |
| 714 | ) |
| 715 | |
| 716 | assert response.status_code == 409 |
| 717 | assert "Nothing to commit" in response.json()["error"] |
| 718 | |
| 719 | def test_commit_fossil_error(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 720 | """When fossil commit fails (not nothing-changed), return 500.""" |
| 721 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 722 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 723 | mock_cli_cls.return_value._env = {} |
| 724 | mock_run.side_effect = [ |
| 725 | _make_proc(returncode=0), # addremove |
| 726 | _make_proc(returncode=1, stderr="lock failed"), # commit |
| 727 | ] |
| 728 | |
| 729 | response = admin_client.post( |
| 730 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), |
| 731 | data=json.dumps({"message": "fail commit", "agent_id": "claude-test"}), |
| 732 | content_type="application/json", |
| 733 | ) |
| 734 | |
| 735 | assert response.status_code == 500 |
| 736 | assert "Commit failed" in response.json()["error"] |
| 737 | |
| 738 | def test_commit_missing_message(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 739 | """Commit without message returns 400.""" |
| 740 | response = admin_client.post( |
| 741 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), |
| 742 | data=json.dumps({"agent_id": "claude-test"}), |
| 743 | content_type="application/json", |
| 744 | ) |
| 745 | assert response.status_code == 400 |
| 746 | assert "message" in response.json()["error"].lower() |
| 747 | |
| 748 | def test_commit_workspace_not_found(self, admin_client, sample_project, fossil_repo_obj): |
| 749 | """Commit to non-existent workspace returns 404.""" |
| 750 | response = admin_client.post( |
| 751 | _api_url(sample_project.slug, "api/workspaces/nonexistent/commit"), |
| 752 | data=json.dumps({"message": "fix"}), |
| 753 | content_type="application/json", |
| 754 | ) |
| 755 | assert response.status_code == 404 |
| 756 | |
| 757 | def test_commit_workspace_not_active(self, admin_client, sample_project, fossil_repo_obj, admin_user): |
| 758 | """Commit to a merged workspace returns 409.""" |
| 759 | AgentWorkspace.objects.create( |
| 760 | repository=fossil_repo_obj, |
| 761 | name="ws-merged", |
| 762 | branch="workspace/ws-merged", |
| 763 | status="merged", |
| 764 | created_by=admin_user, |
| 765 | ) |
| 766 | |
| 767 | response = admin_client.post( |
| 768 | _api_url(sample_project.slug, "api/workspaces/ws-merged/commit"), |
| 769 | data=json.dumps({"message": "too late"}), |
| 770 | content_type="application/json", |
| 771 | ) |
| 772 | assert response.status_code == 409 |
| 773 | assert "merged" in response.json()["error"] |
| 774 | |
| 775 | def test_commit_invalid_json(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 776 | """Invalid JSON body returns 400.""" |
| 777 | response = admin_client.post( |
| 778 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), |
| 779 | data="not json", |
| 780 | content_type="application/json", |
| 781 | ) |
| 782 | assert response.status_code == 400 |
| 783 | |
| 784 | def test_commit_wrong_method(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 785 | """GET to commit endpoint returns 405.""" |
| 786 | response = admin_client.get(_api_url(sample_project.slug, "api/workspaces/ws-test-1/commit")) |
| 787 | assert response.status_code == 405 |
| 788 | |
| 789 | def test_commit_denied_for_reader(self, reader_client, sample_project, fossil_repo_obj, workspace): |
| 790 | """Read-only users cannot commit.""" |
| 791 | response = reader_client.post( |
| 792 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), |
| 793 | data=json.dumps({"message": "denied"}), |
| 794 | content_type="application/json", |
| 795 | ) |
| 796 | assert response.status_code == 403 |
| 797 | |
| 798 | |
| 799 | # ================================================================ |
| 800 | # Workspace Merge |
| 801 | # ================================================================ |
| 802 | |
| 803 | |
| 804 | @pytest.mark.django_db |
| 805 | class TestWorkspaceMerge: |
| 806 | """Tests for POST /projects/<slug>/fossil/api/workspaces/<name>/merge (lines 1037-1185). |
| 807 | |
| 808 | This endpoint is complex: it enforces branch protection, review gates, |
| 809 | and runs three subprocess calls (update, merge, commit). |
| 810 | """ |
| 811 | |
| 812 | def test_merge_success_admin_bypass(self, admin_client, sample_project, fossil_repo_obj, workspace, admin_user): |
| 813 | """Admin can merge without an approved review (admin bypass of review gate).""" |
| 814 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): |
| 815 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 816 | mock_cli_cls.return_value._env = {} |
| 817 | mock_run.side_effect = [ |
| 818 | _make_proc(returncode=0), # update trunk |
| 819 | _make_proc(returncode=0, stdout="merged ok"), # merge |
| 820 | _make_proc(returncode=0, stdout="committed"), # commit |
| 821 | _make_proc(returncode=0), # close --force |
| 822 | ] |
| 823 | |
| 824 | response = admin_client.post( |
| 825 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), |
| 826 | data=json.dumps({"target_branch": "trunk", "agent_id": "claude-test"}), |
| 827 | content_type="application/json", |
| 828 | ) |
| 829 | |
| 830 | assert response.status_code == 200 |
| 831 | data = response.json() |
| 832 | assert data["status"] == "merged" |
| 833 | assert data["target_branch"] == "trunk" |
| 834 | |
| 835 | workspace.refresh_from_db() |
| 836 | assert workspace.status == "merged" |
| 837 | assert workspace.checkout_path == "" |
| 838 | |
| 839 | def test_merge_with_approved_review(self, writer_client, sample_project, fossil_repo_obj, admin_user): |
| 840 | """Non-admin writer can merge if an approved review exists for the workspace.""" |
| 841 | ws = AgentWorkspace.objects.create( |
| 842 | repository=fossil_repo_obj, |
| 843 | name="ws-reviewed", |
| 844 | branch="workspace/ws-reviewed", |
| 845 | status="active", |
| 846 | checkout_path="/tmp/fake", |
| 847 | created_by=admin_user, |
| 848 | ) |
| 849 | CodeReview.objects.create( |
| 850 | repository=fossil_repo_obj, |
| 851 | workspace=ws, |
| 852 | title="Fix", |
| 853 | diff="d", |
| 854 | status="approved", |
| 855 | created_by=admin_user, |
| 856 | ) |
| 857 | |
| 858 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): |
| 859 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 860 | mock_cli_cls.return_value._env = {} |
| 861 | mock_run.side_effect = [ |
| 862 | _make_proc(returncode=0), # update |
| 863 | _make_proc(returncode=0), # merge |
| 864 | _make_proc(returncode=0), # commit |
| 865 | _make_proc(returncode=0), # close |
| 866 | ] |
| 867 | |
| 868 | response = writer_client.post( |
| 869 | _api_url(sample_project.slug, "api/workspaces/ws-reviewed/merge"), |
| 870 | data=json.dumps({"target_branch": "trunk"}), |
| 871 | content_type="application/json", |
| 872 | ) |
| 873 | |
| 874 | assert response.status_code == 200 |
| 875 | assert response.json()["status"] == "merged" |
| 876 | |
| 877 | def test_merge_marks_linked_review_as_merged(self, admin_client, sample_project, fossil_repo_obj, workspace, admin_user): |
| 878 | """Merging a workspace with an approved review updates the review status to merged.""" |
| 879 | review = CodeReview.objects.create( |
| 880 | repository=fossil_repo_obj, |
| 881 | workspace=workspace, |
| 882 | title="ws review", |
| 883 | diff="d", |
| 884 | status="approved", |
| 885 | created_by=admin_user, |
| 886 | ) |
| 887 | |
| 888 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): |
| 889 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 890 | mock_cli_cls.return_value._env = {} |
| 891 | mock_run.return_value = _make_proc(returncode=0) |
| 892 | |
| 893 | admin_client.post( |
| 894 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), |
| 895 | data=json.dumps({"agent_id": "claude-test"}), |
| 896 | content_type="application/json", |
| 897 | ) |
| 898 | |
| 899 | review.refresh_from_db() |
| 900 | assert review.status == "merged" |
| 901 | |
| 902 | def test_merge_blocked_no_review_non_admin(self, writer_client, sample_project, fossil_repo_obj, admin_user): |
| 903 | """Non-admin cannot merge if no approved review exists for the workspace.""" |
| 904 | AgentWorkspace.objects.create( |
| 905 | repository=fossil_repo_obj, |
| 906 | name="ws-no-review", |
| 907 | branch="workspace/ws-no-review", |
| 908 | status="active", |
| 909 | checkout_path="/tmp/fake", |
| 910 | created_by=admin_user, |
| 911 | ) |
| 912 | |
| 913 | response = writer_client.post( |
| 914 | _api_url(sample_project.slug, "api/workspaces/ws-no-review/merge"), |
| 915 | data=json.dumps({}), |
| 916 | content_type="application/json", |
| 917 | ) |
| 918 | assert response.status_code == 403 |
| 919 | assert "No approved code review" in response.json()["error"] |
| 920 | |
| 921 | def test_merge_blocked_review_not_approved(self, writer_client, sample_project, fossil_repo_obj, admin_user): |
| 922 | """Non-admin cannot merge if the linked review is still pending.""" |
| 923 | ws = AgentWorkspace.objects.create( |
| 924 | repository=fossil_repo_obj, |
| 925 | name="ws-pending-review", |
| 926 | branch="workspace/ws-pending-review", |
| 927 | status="active", |
| 928 | checkout_path="/tmp/fake", |
| 929 | created_by=admin_user, |
| 930 | ) |
| 931 | CodeReview.objects.create( |
| 932 | repository=fossil_repo_obj, |
| 933 | workspace=ws, |
| 934 | title="Pending", |
| 935 | diff="d", |
| 936 | status="pending", |
| 937 | created_by=admin_user, |
| 938 | ) |
| 939 | |
| 940 | response = writer_client.post( |
| 941 | _api_url(sample_project.slug, "api/workspaces/ws-pending-review/merge"), |
| 942 | data=json.dumps({}), |
| 943 | content_type="application/json", |
| 944 | ) |
| 945 | assert response.status_code == 403 |
| 946 | assert "must be approved" in response.json()["error"] |
| 947 | |
| 948 | def test_merge_blocked_branch_protection_restrict_push(self, writer_client, sample_project, fossil_repo_obj, admin_user): |
| 949 | """Branch protection with restrict_push blocks non-admin merges.""" |
| 950 | AgentWorkspace.objects.create( |
| 951 | repository=fossil_repo_obj, |
| 952 | name="ws-protected", |
| 953 | branch="workspace/ws-protected", |
| 954 | status="active", |
| 955 | checkout_path="/tmp/fake", |
| 956 | created_by=admin_user, |
| 957 | ) |
| 958 | BranchProtection.objects.create( |
| 959 | repository=fossil_repo_obj, |
| 960 | branch_pattern="trunk", |
| 961 | restrict_push=True, |
| 962 | created_by=admin_user, |
| 963 | ) |
| 964 | |
| 965 | response = writer_client.post( |
| 966 | _api_url(sample_project.slug, "api/workspaces/ws-protected/merge"), |
| 967 | data=json.dumps({"target_branch": "trunk"}), |
| 968 | content_type="application/json", |
| 969 | ) |
| 970 | assert response.status_code == 403 |
| 971 | assert "protected" in response.json()["error"].lower() |
| 972 | |
| 973 | def test_merge_blocked_required_status_check_not_passed(self, writer_client, sample_project, fossil_repo_obj, admin_user): |
| 974 | """Branch protection with required status checks blocks merge when check hasn't passed.""" |
| 975 | AgentWorkspace.objects.create( |
| 976 | repository=fossil_repo_obj, |
| 977 | name="ws-ci-fail", |
| 978 | branch="workspace/ws-ci-fail", |
| 979 | status="active", |
| 980 | checkout_path="/tmp/fake", |
| 981 | created_by=admin_user, |
| 982 | ) |
| 983 | BranchProtection.objects.create( |
| 984 | repository=fossil_repo_obj, |
| 985 | branch_pattern="trunk", |
| 986 | restrict_push=False, |
| 987 | require_status_checks=True, |
| 988 | required_contexts="ci/tests", |
| 989 | created_by=admin_user, |
| 990 | ) |
| 991 | # Status check is pending (not success) |
| 992 | StatusCheck.objects.create( |
| 993 | repository=fossil_repo_obj, |
| 994 | checkin_uuid="some-uuid", |
| 995 | context="ci/tests", |
| 996 | state="pending", |
| 997 | created_by=admin_user, |
| 998 | ) |
| 999 | |
| 1000 | response = writer_client.post( |
| 1001 | _api_url(sample_project.slug, "api/workspaces/ws-ci-fail/merge"), |
| 1002 | data=json.dumps({"target_branch": "trunk"}), |
| 1003 | content_type="application/json", |
| 1004 | ) |
| 1005 | assert response.status_code == 403 |
| 1006 | assert "status check" in response.json()["error"].lower() |
| 1007 | |
| 1008 | def test_merge_allowed_with_passing_status_check(self, writer_client, sample_project, fossil_repo_obj, admin_user): |
| 1009 | """Branch protection with passing required status check allows merge.""" |
| 1010 | ws = AgentWorkspace.objects.create( |
| 1011 | repository=fossil_repo_obj, |
| 1012 | name="ws-ci-pass", |
| 1013 | branch="workspace/ws-ci-pass", |
| 1014 | status="active", |
| 1015 | checkout_path="/tmp/fake", |
| 1016 | created_by=admin_user, |
| 1017 | ) |
| 1018 | BranchProtection.objects.create( |
| 1019 | repository=fossil_repo_obj, |
| 1020 | branch_pattern="trunk", |
| 1021 | restrict_push=False, |
| 1022 | require_status_checks=True, |
| 1023 | required_contexts="ci/tests", |
| 1024 | created_by=admin_user, |
| 1025 | ) |
| 1026 | StatusCheck.objects.create( |
| 1027 | repository=fossil_repo_obj, |
| 1028 | checkin_uuid="some-uuid", |
| 1029 | context="ci/tests", |
| 1030 | state="success", |
| 1031 | created_by=admin_user, |
| 1032 | ) |
| 1033 | CodeReview.objects.create( |
| 1034 | repository=fossil_repo_obj, |
| 1035 | workspace=ws, |
| 1036 | title="Fix", |
| 1037 | diff="d", |
| 1038 | status="approved", |
| 1039 | created_by=admin_user, |
| 1040 | ) |
| 1041 | |
| 1042 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls, patch("shutil.rmtree"): |
| 1043 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 1044 | mock_cli_cls.return_value._env = {} |
| 1045 | mock_run.return_value = _make_proc(returncode=0) |
| 1046 | |
| 1047 | response = writer_client.post( |
| 1048 | _api_url(sample_project.slug, "api/workspaces/ws-ci-pass/merge"), |
| 1049 | data=json.dumps({"target_branch": "trunk"}), |
| 1050 | content_type="application/json", |
| 1051 | ) |
| 1052 | |
| 1053 | assert response.status_code == 200 |
| 1054 | |
| 1055 | def test_merge_fossil_update_fails(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 1056 | """When fossil update to target branch fails, return 500.""" |
| 1057 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 1058 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 1059 | mock_cli_cls.return_value._env = {} |
| 1060 | mock_run.return_value = _make_proc(returncode=1, stderr="update failed") |
| 1061 | |
| 1062 | response = admin_client.post( |
| 1063 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), |
| 1064 | data=json.dumps({"agent_id": "claude-test"}), |
| 1065 | content_type="application/json", |
| 1066 | ) |
| 1067 | |
| 1068 | assert response.status_code == 500 |
| 1069 | assert "Failed to switch" in response.json()["error"] |
| 1070 | |
| 1071 | def test_merge_fossil_merge_fails(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 1072 | """When fossil merge command fails, return 500.""" |
| 1073 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 1074 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 1075 | mock_cli_cls.return_value._env = {} |
| 1076 | mock_run.side_effect = [ |
| 1077 | _make_proc(returncode=0), # update |
| 1078 | _make_proc(returncode=1, stderr="merge conflict"), # merge |
| 1079 | ] |
| 1080 | |
| 1081 | response = admin_client.post( |
| 1082 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), |
| 1083 | data=json.dumps({"agent_id": "claude-test"}), |
| 1084 | content_type="application/json", |
| 1085 | ) |
| 1086 | |
| 1087 | assert response.status_code == 500 |
| 1088 | assert "Merge failed" in response.json()["error"] |
| 1089 | |
| 1090 | def test_merge_commit_fails(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 1091 | """When the merge commit fails, return 500 and don't close workspace.""" |
| 1092 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 1093 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 1094 | mock_cli_cls.return_value._env = {} |
| 1095 | mock_run.side_effect = [ |
| 1096 | _make_proc(returncode=0), # update |
| 1097 | _make_proc(returncode=0), # merge |
| 1098 | _make_proc(returncode=1, stderr="commit lock"), # commit |
| 1099 | ] |
| 1100 | |
| 1101 | response = admin_client.post( |
| 1102 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), |
| 1103 | data=json.dumps({"agent_id": "claude-test"}), |
| 1104 | content_type="application/json", |
| 1105 | ) |
| 1106 | |
| 1107 | assert response.status_code == 500 |
| 1108 | assert "Merge commit failed" in response.json()["error"] |
| 1109 | |
| 1110 | # Workspace should still be active (not closed on commit failure) |
| 1111 | workspace.refresh_from_db() |
| 1112 | assert workspace.status == "active" |
| 1113 | |
| 1114 | def test_merge_workspace_not_found(self, admin_client, sample_project, fossil_repo_obj): |
| 1115 | """Merging a non-existent workspace returns 404.""" |
| 1116 | response = admin_client.post( |
| 1117 | _api_url(sample_project.slug, "api/workspaces/nonexistent/merge"), |
| 1118 | data=json.dumps({}), |
| 1119 | content_type="application/json", |
| 1120 | ) |
| 1121 | assert response.status_code == 404 |
| 1122 | |
| 1123 | def test_merge_workspace_not_active(self, admin_client, sample_project, fossil_repo_obj, admin_user): |
| 1124 | """Merging an already-merged workspace returns 409.""" |
| 1125 | AgentWorkspace.objects.create( |
| 1126 | repository=fossil_repo_obj, |
| 1127 | name="ws-already-merged", |
| 1128 | branch="workspace/ws-already-merged", |
| 1129 | status="merged", |
| 1130 | created_by=admin_user, |
| 1131 | ) |
| 1132 | |
| 1133 | response = admin_client.post( |
| 1134 | _api_url(sample_project.slug, "api/workspaces/ws-already-merged/merge"), |
| 1135 | data=json.dumps({}), |
| 1136 | content_type="application/json", |
| 1137 | ) |
| 1138 | assert response.status_code == 409 |
| 1139 | assert "merged" in response.json()["error"] |
| 1140 | |
| 1141 | def test_merge_wrong_method(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 1142 | """GET to merge endpoint returns 405.""" |
| 1143 | response = admin_client.get(_api_url(sample_project.slug, "api/workspaces/ws-test-1/merge")) |
| 1144 | assert response.status_code == 405 |
| 1145 | |
| 1146 | def test_merge_denied_for_reader(self, reader_client, sample_project, fossil_repo_obj, workspace): |
| 1147 | """Read-only users cannot merge workspaces.""" |
| 1148 | response = reader_client.post( |
| 1149 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/merge"), |
| 1150 | data=json.dumps({}), |
| 1151 | content_type="application/json", |
| 1152 | ) |
| 1153 | assert response.status_code == 403 |
| 1154 | |
| 1155 | |
| 1156 | # ================================================================ |
| 1157 | # Workspace Abandon |
| 1158 | # ================================================================ |
| 1159 | |
| 1160 | |
| 1161 | @pytest.mark.django_db |
| 1162 | class TestWorkspaceAbandon: |
| 1163 | """Tests for DELETE /projects/<slug>/fossil/api/workspaces/<name>/abandon (lines 1188-1238).""" |
| 1164 | |
| 1165 | def test_abandon_success(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 1166 | """Abandoning a workspace closes checkout, cleans up directory, and updates status.""" |
| 1167 | with ( |
| 1168 | patch("subprocess.run") as mock_run, |
| 1169 | patch("fossil.cli.FossilCLI") as mock_cli_cls, |
| 1170 | patch("shutil.rmtree") as mock_rmtree, |
| 1171 | ): |
| 1172 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 1173 | mock_cli_cls.return_value._env = {} |
| 1174 | mock_run.return_value = _make_proc(returncode=0) |
| 1175 | |
| 1176 | response = admin_client.delete( |
| 1177 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/abandon"), |
| 1178 | ) |
| 1179 | |
| 1180 | assert response.status_code == 200 |
| 1181 | data = response.json() |
| 1182 | assert data["status"] == "abandoned" |
| 1183 | assert data["name"] == "ws-test-1" |
| 1184 | |
| 1185 | workspace.refresh_from_db() |
| 1186 | assert workspace.status == "abandoned" |
| 1187 | assert workspace.checkout_path == "" |
| 1188 | |
| 1189 | # Verify cleanup was called |
| 1190 | mock_rmtree.assert_called_once() |
| 1191 | |
| 1192 | def test_abandon_no_checkout_path(self, admin_client, sample_project, fossil_repo_obj, admin_user): |
| 1193 | """Abandoning a workspace with empty checkout path still works (no cleanup needed).""" |
| 1194 | ws = AgentWorkspace.objects.create( |
| 1195 | repository=fossil_repo_obj, |
| 1196 | name="ws-no-path", |
| 1197 | branch="workspace/ws-no-path", |
| 1198 | status="active", |
| 1199 | checkout_path="", |
| 1200 | created_by=admin_user, |
| 1201 | ) |
| 1202 | |
| 1203 | with patch("fossil.cli.FossilCLI"): |
| 1204 | response = admin_client.delete(_api_url(sample_project.slug, "api/workspaces/ws-no-path/abandon")) |
| 1205 | |
| 1206 | assert response.status_code == 200 |
| 1207 | ws.refresh_from_db() |
| 1208 | assert ws.status == "abandoned" |
| 1209 | |
| 1210 | def test_abandon_workspace_not_found(self, admin_client, sample_project, fossil_repo_obj): |
| 1211 | """Abandoning a non-existent workspace returns 404.""" |
| 1212 | response = admin_client.delete(_api_url(sample_project.slug, "api/workspaces/nonexistent/abandon")) |
| 1213 | assert response.status_code == 404 |
| 1214 | |
| 1215 | def test_abandon_workspace_already_abandoned(self, admin_client, sample_project, fossil_repo_obj, admin_user): |
| 1216 | """Abandoning an already-abandoned workspace returns 409.""" |
| 1217 | AgentWorkspace.objects.create( |
| 1218 | repository=fossil_repo_obj, |
| 1219 | name="ws-gone", |
| 1220 | branch="workspace/ws-gone", |
| 1221 | status="abandoned", |
| 1222 | created_by=admin_user, |
| 1223 | ) |
| 1224 | |
| 1225 | response = admin_client.delete(_api_url(sample_project.slug, "api/workspaces/ws-gone/abandon")) |
| 1226 | assert response.status_code == 409 |
| 1227 | assert "already abandoned" in response.json()["error"] |
| 1228 | |
| 1229 | def test_abandon_wrong_method(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 1230 | """POST to abandon endpoint returns 405 (DELETE required).""" |
| 1231 | response = admin_client.post( |
| 1232 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/abandon"), |
| 1233 | content_type="application/json", |
| 1234 | ) |
| 1235 | assert response.status_code == 405 |
| 1236 | |
| 1237 | def test_abandon_denied_for_reader(self, reader_client, sample_project, fossil_repo_obj, workspace): |
| 1238 | """Read-only users cannot abandon workspaces.""" |
| 1239 | response = reader_client.delete(_api_url(sample_project.slug, "api/workspaces/ws-test-1/abandon")) |
| 1240 | assert response.status_code == 403 |
| 1241 | |
| 1242 | def test_abandon_denied_for_anon(self, client, sample_project, fossil_repo_obj, workspace): |
| 1243 | """Anonymous users cannot abandon workspaces.""" |
| 1244 | response = client.delete(_api_url(sample_project.slug, "api/workspaces/ws-test-1/abandon")) |
| 1245 | assert response.status_code == 401 |
| 1246 | |
| 1247 | |
| 1248 | # ================================================================ |
| 1249 | # Workspace Ownership Checks |
| 1250 | # ================================================================ |
| 1251 | |
| 1252 | |
| 1253 | @pytest.mark.django_db |
| 1254 | class TestWorkspaceOwnership: |
| 1255 | """Tests for _check_workspace_ownership (lines 722-747). |
| 1256 | |
| 1257 | Token-based callers must supply matching agent_id. |
| 1258 | Session-auth users (human oversight) are always allowed. |
| 1259 | """ |
| 1260 | |
| 1261 | def test_session_user_always_allowed(self, admin_client, sample_project, fossil_repo_obj, workspace): |
| 1262 | """Session-auth users bypass ownership check (human oversight). |
| 1263 | Tested through the commit endpoint which calls _check_workspace_ownership. |
| 1264 | """ |
| 1265 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 1266 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 1267 | mock_cli_cls.return_value._env = {} |
| 1268 | mock_run.side_effect = [ |
| 1269 | _make_proc(returncode=0), # addremove |
| 1270 | _make_proc(returncode=0, stdout="committed"), # commit |
| 1271 | ] |
| 1272 | |
| 1273 | # Session user does not provide agent_id -- should still be allowed |
| 1274 | response = admin_client.post( |
| 1275 | _api_url(sample_project.slug, "api/workspaces/ws-test-1/commit"), |
| 1276 | data=json.dumps({"message": "Human override"}), |
| 1277 | content_type="application/json", |
| 1278 | ) |
| 1279 | |
| 1280 | assert response.status_code == 200 |
| 1281 | |
| 1282 | def test_workspace_without_agent_id_allows_any_writer(self, admin_client, sample_project, fossil_repo_obj, admin_user): |
| 1283 | """Workspace with empty agent_id allows any writer to operate.""" |
| 1284 | AgentWorkspace.objects.create( |
| 1285 | repository=fossil_repo_obj, |
| 1286 | name="ws-no-agent", |
| 1287 | branch="workspace/ws-no-agent", |
| 1288 | agent_id="", |
| 1289 | status="active", |
| 1290 | checkout_path="/tmp/fake", |
| 1291 | created_by=admin_user, |
| 1292 | ) |
| 1293 | |
| 1294 | with patch("subprocess.run") as mock_run, patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 1295 | mock_cli_cls.return_value.binary = "/usr/local/bin/fossil" |
| 1296 | mock_cli_cls.return_value._env = {} |
| 1297 | mock_run.side_effect = [ |
| 1298 | _make_proc(returncode=0), |
| 1299 | _make_proc(returncode=0, stdout="committed"), |
| 1300 | ] |
| 1301 | |
| 1302 | response = admin_client.post( |
| 1303 | _api_url(sample_project.slug, "api/workspaces/ws-no-agent/commit"), |
| 1304 | data=json.dumps({"message": "Anyone can commit"}), |
| 1305 | content_type="application/json", |
| 1306 | ) |
| 1307 | |
| 1308 | assert response.status_code == 200 |
| 1309 | |
| 1310 | |
| 1311 | # ================================================================ |
| 1312 | # SSE Events - Stream Content |
| 1313 | # ================================================================ |
| 1314 | |
| 1315 | |
| 1316 | @pytest.mark.django_db |
| 1317 | class TestSSEEventStream: |
| 1318 | """Tests for GET /projects/<slug>/fossil/api/events (lines 1521-1653). |
| 1319 | |
| 1320 | The SSE endpoint returns a StreamingHttpResponse. We verify the response |
| 1321 | metadata and test the event generator for various event types. |
| 1322 | """ |
| 1323 | |
| 1324 | def test_sse_response_headers(self, admin_client, sample_project, fossil_repo_obj): |
| 1325 | """SSE endpoint sets correct headers for event streaming.""" |
| 1326 | with patch("fossil.api_views.FossilReader") as mock_reader_cls: |
| 1327 | reader = mock_reader_cls.return_value |
| 1328 | reader.__enter__ = MagicMock(return_value=reader) |
| 1329 | reader.__exit__ = MagicMock(return_value=False) |
| 1330 | reader.get_checkin_count.return_value = 0 |
| 1331 | |
| 1332 | response = admin_client.get(_api_url(sample_project.slug, "api/events")) |
| 1333 | |
| 1334 | assert response.status_code == 200 |
| 1335 | assert response["Content-Type"] == "text/event-stream" |
| 1336 | assert response["Cache-Control"] == "no-cache" |
| 1337 | assert response["X-Accel-Buffering"] == "no" |
| 1338 | assert response.streaming is True |
| 1339 | l_repo_obj(sample_project): |
| 1340 | """Return the auto-created FossilRepository for sample_project.""" |
| 1341 | return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True) |
| 1342 | |
| 1343 | |
| 1344 | @pytest.fixture |
| 1345 | def writer_user(db, admin_user, sample_project): |
| 1346 | """Non-admin user with write access to the project.""" |
| 1347 | writer = User.objects.create_user(username="writer_cov", password="testpass123") |
| 1348 | team = Team.objects.create(name="Cov Writers", organization=sample_project.organization, created_by=admin_user) |
| 1349 | team.members.add(writer) |
| 1350 | ProjectTeam.objects.create(project=sample_project, team=team, role="write", created_by=admin_user) |
| 1351 | return writer |
| 1352 | |
| 1353 | |
| 1354 | @pytest.fixture |
| 1355 | def writer_client(writer_user): |
| 1356 | c = Client() |
| 1357 | c.login(username="writer_cov", password="testpass123") |
| 1358 | return c |
| 1359 | |
| 1360 | |
| 1361 | @pytest.fixture |
| 1362 | def reader_user(db, admin_user, sample_project): |
| 1363 | """User with read-only access to the project.""" |
| 1364 | reader = User.objects.create_user(username="reader_cov", password="testpass123") |
| 1365 | team = Team.objects.create(name="Cov Readers", organization=sample_project.organization, created_by=admin_user) |
| 1366 | team.members.add(reader) |
| 1367 | ProjectTeam.objects.create(project=sample_project, team=team, role="read", created_by=admin_user) |
| 1368 | return reader |
| 1369 | |
| 1370 | |
| 1371 | @pytest.fixture |
| 1372 | def reader_client(reader_user): |
| 1373 | c = Client() |
| 1374 | c.login(username="reader_cov", password="testpass123") |
| 1375 | return c |
| 1376 | |
| 1377 | |
| 1378 | @pytest.fixture |
| 1379 | def workspace(fossil_repo_obj, admin_user): |
| 1380 | """An active agent workspace with a checkout path.""" |
| 1381 | return AgentWorkspace.objects.create( |
| 1382 | repository=fossil_repo_obj, |
| 1383 | name="ws-test-1", |
| 1384 | branch="workspace/ws-test-1", |
| 1385 | agent_id="claude-test", |
| 1386 | status="active", |
| 1387 | checkout_path="/tmp/fake-checkout", |
| 1388 | created_by=admin_user, |
| 1389 | ) |
| 1390 | |
| 1391 | |
| 1392 | def _api_url(slug, path): |
| 1393 | return f"/projects/{slug}/fossil/{path}" |
| 1394 | |
| 1395 | |
| 1396 | # ---- Helper to build a mock subprocess.run result ---- |
| 1397 | |
| 1398 | |
| 1399 | def _make_proc(returncode=0, stdout="", stderr=""): |
| 1400 | result = MagicMock() |
| 1401 | result.returncode = returncode |
| 1402 | result.stdout = stdout |
| 1403 | result.stderr = stderr |
| 1404 | return result |
| 1405 | |
| 1406 | |
| 1407 | class _SSEBreakError(Exception): |
| 1408 | """Raised from mocked time.sleep to break the SSE infinite loop.""" |
| 1409 | |
| 1410 | |
| 1411 | def _drain_sse_one_iteration(response): |
| 1412 | """Read one iteration of the SSE generator, collecting yielded chunks. |
| 1413 | |
| 1414 | The SSE event_stream is an infinite while-True generator with time.sleep(5) |
| 1415 | at the end of each iteration. We mock time.sleep to raise _SSEBreakError after |
| 1416 | yielding events from the first poll cycle. |
| 1417 | """ |
| 1418 | events = [] |
| 1419 | with patch("fossil.api_views.time.sleep", side_effect=_SSEBreakError): |
| 1420 | try: |
| 1421 | for chunk in response.streaming_content: |
| 1422 | # StreamingHttpResponse wraps generator output in map() for |
| 1423 | # encoding; chunks are bytes. |
| 1424 | if isinstance(chunk, bytes): |
| 1425 | chunk = chunk.decode("utf-8", errors="replace") |
| 1426 | events.append(chunk) |
| 1427 | except (_SSEBreakError, RuntimeError): |
| 1428 | pass |
| 1429 | return events |
| 1430 | |
| 1431 | |
| 1432 | def _drain_sse_n_iterations(response, n=3): |
| 1433 | """Read n iterations of the SSE generator.""" |
| 1434 | call_count = 0 |
| 1435 | |
| 1436 | def _count_and_break(_seconds): |
| 1437 | nonlocal call_count |
| 1438 | call_count += 1 |
| 1439 | if call_count >= n: |
| 1440 | raise _SSEBreakError |
| 1441 | |
| 1442 | events = [] |
| 1443 | with patch("fossil.api_views.time.sleep", side_effect=_count_and_break): |
| 1444 | try: |
| 1445 | for chunk in response.streaming_content: |
| 1446 | if isinstance(chunk, bytes): |
| 1447 | chunk = chunk.decode("utf-8", errors="replace") |
| 1448 | events.append(chunk) |
| 1449 | except (_SSEBreakError, RuntimeError): |
| 1450 | pass |
| 1451 | return events |
| 1452 | |
| 1453 | |
| 1454 | # ================================================================ |
| 1455 | # Batch API |
| 1456 | # ================================================================ |
| 1457 | |
| 1458 | |
| 1459 | @pytest.mark.django_db |
| 1460 | class TestBatchAPI: |
| 1461 | """Tests for POST /projects/<slug>/fossil/api/batch (lines 636-706).""" |
| 1462 | |
| 1463 | def test_batch_success_with_multiple_sub_requests(self, admin_client, sample_project, fossil_repo_obj): |
| 1464 | """Batch call dispatches multiple GET sub-requests and returns combined results.""" |
| 1465 | with patch("fossil.api_views.FossilReader") as mock_reader_cls: |
| 1466 | reader = mock_reader_cls.return_value |
| 1467 | reader.__enter__ = MagicMock(return_value=reader) |
| 1468 | reader.__exit__ = MagicMock(return_value=False) |
| 1469 | reader.get_timeline.return_value = [] |
| 1470 | reader.get_checkin_count.return_value = 0 |
| 1471 | reader.get_tickets.return_value = [] |
| 1472 | |
| 1473 | response = admin_client.post( |
| 1474 | _api_url(sample_project.slug, "api/batch"), |
| 1475 | data=json.dumps( |
| 1476 | { |
| 1477 | "requests": [ |
| 1478 | {"method": "GET", "path": "/api/timeline"}, |
| 1479 | {"method": "GET", "path": "/api/tickets"}, |
| 1480 | ] |
| 1481 | } |
| 1482 | ), |
| 1483 | content_type="application/json", |
| 1484 | ) |
| 1485 | |
| 1486 | assert response.status_code == 200 |
| 1487 | data = response.json() |
| 1488 | assert len(data["responses"]) == 2 |
| 1489 | assert data["responses"][0]["status"] == 200 |
| 1490 | assert "checkins" in data["responses"][0]["body"] |
| 1491 | assert data["responses"][1]["status"] == 200 |
| 1492 | assert "tickets" in data["responses"][1]["body"] |
| 1493 | |
| 1494 | def test_batch_wrong_method(self, admin_client, sample_project, fossil_repo_obj): |
| 1495 | """GET to batch endpoint returns 405.""" |
| 1496 | response = admin_client.get(_api_url(sample_project.slug, "api/batch")) |
| 1497 | assert response.status_code == 405 |
+1207
| --- a/tests/test_cli.py | ||
| +++ b/tests/test_cli.py | ||
| @@ -0,0 +1,1207 @@ | ||
| 1 | +"""Unit tests for fossil/cli.py -- FossilCLI subprocess wrapper. | |
| 2 | + | |
| 3 | +Tests mock subprocess.run throughout since FossilCLI is a thin wrapper | |
| 4 | +around the fossil binary. We verify that: | |
| 5 | +- Correct commands are assembled for every method | |
| 6 | +- Success/failure return values are propagated correctly | |
| 7 | +- Environment variables are set properly (_env property) | |
| 8 | +- Timeouts and exceptions are handled gracefully | |
| 9 | +- Edge-case inputs (empty strings, special characters) work | |
| 10 | +""" | |
| 11 | + | |
| 12 | +import os | |
| 13 | +import subprocess | |
| 14 | +from pathlib import Path | |
| 15 | +from unittest.mock import MagicMock, patch | |
| 16 | + | |
| 17 | +import pytest | |
| 18 | + | |
| 19 | +from fossil.cli import FossilCLI | |
| 20 | + | |
| 21 | +# --------------------------------------------------------------------------- | |
| 22 | +# Helpers | |
| 23 | +# --------------------------------------------------------------------------- | |
| 24 | + | |
| 25 | + | |
| 26 | +def _ok(stdout="", stderr="", returncode=0): | |
| 27 | + """Build a mock CompletedProcess for a successful command.""" | |
| 28 | + return subprocess.CompletedProcess(args=[], returncode=returncode, stdout=stdout, stderr=stderr) | |
| 29 | + | |
| 30 | + | |
| 31 | +def _fail(stdout="", stderr="error", returncode=1): | |
| 32 | + """Build a mock CompletedProcess for a failed command.""" | |
| 33 | + return subprocess.CompletedProcess(args=[], returncode=returncode, stdout=stdout, stderr=stderr) | |
| 34 | + | |
| 35 | + | |
| 36 | +def _ok_bytes(stdout=b"", stderr=b"", returncode=0): | |
| 37 | + """Build a mock CompletedProcess returning raw bytes (not text).""" | |
| 38 | + return subprocess.CompletedProcess(args=[], returncode=returncode, stdout=stdout, stderr=stderr) | |
| 39 | + | |
| 40 | + | |
| 41 | +# --------------------------------------------------------------------------- | |
| 42 | +# Constructor and _env | |
| 43 | +# --------------------------------------------------------------------------- | |
| 44 | + | |
| 45 | + | |
| 46 | +class TestFossilCLIInit: | |
| 47 | + """Constructor: explicit binary path vs constance fallback.""" | |
| 48 | + | |
| 49 | + def test_explicit_binary(self): | |
| 50 | + cli = FossilCLI(binary="/usr/local/bin/fossil") | |
| 51 | + assert cli.binary == "/usr/local/bin/fossil" | |
| 52 | + | |
| 53 | + def test_constance_fallback(self): | |
| 54 | + mock_config = MagicMock() | |
| 55 | + mock_config.FOSSIL_BINARY_PATH = "/opt/fossil/bin/fossil" | |
| 56 | + with patch("constance.config", mock_config): | |
| 57 | + cli = FossilCLI() | |
| 58 | + assert cli.binary == "/opt/fossil/bin/fossil" | |
| 59 | + | |
| 60 | + | |
| 61 | +class TestEnvProperty: | |
| 62 | + """_env injects USER=fossilrepo into the inherited environment.""" | |
| 63 | + | |
| 64 | + def test_env_sets_user(self): | |
| 65 | + cli = FossilCLI(binary="/bin/false") | |
| 66 | + env = cli._env | |
| 67 | + assert env["USER"] == "fossilrepo" | |
| 68 | + | |
| 69 | + def test_env_inherits_system_env(self): | |
| 70 | + cli = FossilCLI(binary="/bin/false") | |
| 71 | + env = cli._env | |
| 72 | + # PATH should come from os.environ | |
| 73 | + assert "PATH" in env | |
| 74 | + | |
| 75 | + | |
| 76 | +# --------------------------------------------------------------------------- | |
| 77 | +# _run helper | |
| 78 | +# --------------------------------------------------------------------------- | |
| 79 | + | |
| 80 | + | |
| 81 | +class TestRunHelper: | |
| 82 | + """_run assembles the command and delegates to subprocess.run.""" | |
| 83 | + | |
| 84 | + def test_run_builds_correct_command(self): | |
| 85 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 86 | + with patch("subprocess.run", return_value=_ok("ok")) as mock_run: | |
| 87 | + cli._run("version") | |
| 88 | + mock_run.assert_called_once() | |
| 89 | + cmd = mock_run.call_args[0][0] | |
| 90 | + assert cmd == ["/usr/bin/fossil", "version"] | |
| 91 | + | |
| 92 | + def test_run_passes_env(self): | |
| 93 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 94 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 95 | + cli._run("version") | |
| 96 | + env = mock_run.call_args[1]["env"] | |
| 97 | + assert env["USER"] == "fossilrepo" | |
| 98 | + | |
| 99 | + def test_run_uses_check_true(self): | |
| 100 | + """_run uses check=True so CalledProcessError is raised on failure.""" | |
| 101 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 102 | + with ( | |
| 103 | + patch("subprocess.run", side_effect=subprocess.CalledProcessError(1, "fossil")), | |
| 104 | + pytest.raises(subprocess.CalledProcessError), | |
| 105 | + ): | |
| 106 | + cli._run("bad-command") | |
| 107 | + | |
| 108 | + def test_run_custom_timeout(self): | |
| 109 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 110 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 111 | + cli._run("clone", "http://example.com", timeout=120) | |
| 112 | + assert mock_run.call_args[1]["timeout"] == 120 | |
| 113 | + | |
| 114 | + def test_run_multiple_args(self): | |
| 115 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 116 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 117 | + cli._run("push", "-R", "/tmp/repo.fossil") | |
| 118 | + cmd = mock_run.call_args[0][0] | |
| 119 | + assert cmd == ["/usr/bin/fossil", "push", "-R", "/tmp/repo.fossil"] | |
| 120 | + | |
| 121 | + | |
| 122 | +# --------------------------------------------------------------------------- | |
| 123 | +# init | |
| 124 | +# --------------------------------------------------------------------------- | |
| 125 | + | |
| 126 | + | |
| 127 | +class TestInit: | |
| 128 | + def test_init_creates_parent_dirs_and_runs_fossil_init(self, tmp_path): | |
| 129 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 130 | + target = tmp_path / "sub" / "dir" / "repo.fossil" | |
| 131 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 132 | + result = cli.init(target) | |
| 133 | + assert result == target | |
| 134 | + # Parent dirs created | |
| 135 | + assert target.parent.exists() | |
| 136 | + cmd = mock_run.call_args[0][0] | |
| 137 | + assert cmd == ["/usr/bin/fossil", "init", str(target)] | |
| 138 | + | |
| 139 | + def test_init_returns_path(self, tmp_path): | |
| 140 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 141 | + target = tmp_path / "repo.fossil" | |
| 142 | + with patch("subprocess.run", return_value=_ok()): | |
| 143 | + path = cli.init(target) | |
| 144 | + assert isinstance(path, Path) | |
| 145 | + assert path == target | |
| 146 | + | |
| 147 | + | |
| 148 | +# --------------------------------------------------------------------------- | |
| 149 | +# version | |
| 150 | +# --------------------------------------------------------------------------- | |
| 151 | + | |
| 152 | + | |
| 153 | +class TestVersion: | |
| 154 | + def test_version_returns_stripped_stdout(self): | |
| 155 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 156 | + with patch("subprocess.run", return_value=_ok(" This is fossil version 2.24\n")): | |
| 157 | + result = cli.version() | |
| 158 | + assert result == "This is fossil version 2.24" | |
| 159 | + | |
| 160 | + def test_version_propagates_error(self): | |
| 161 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 162 | + with ( | |
| 163 | + patch("subprocess.run", side_effect=subprocess.CalledProcessError(1, "fossil")), | |
| 164 | + pytest.raises(subprocess.CalledProcessError), | |
| 165 | + ): | |
| 166 | + cli.version() | |
| 167 | + | |
| 168 | + | |
| 169 | +# --------------------------------------------------------------------------- | |
| 170 | +# is_available | |
| 171 | +# --------------------------------------------------------------------------- | |
| 172 | + | |
| 173 | + | |
| 174 | +class TestIsAvailable: | |
| 175 | + def test_available_when_version_works(self): | |
| 176 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 177 | + with patch("subprocess.run", return_value=_ok("2.24")): | |
| 178 | + assert cli.is_available() is True | |
| 179 | + | |
| 180 | + def test_not_available_on_file_not_found(self): | |
| 181 | + cli = FossilCLI(binary="/nonexistent/fossil") | |
| 182 | + with patch("subprocess.run", side_effect=FileNotFoundError): | |
| 183 | + assert cli.is_available() is False | |
| 184 | + | |
| 185 | + def test_not_available_on_called_process_error(self): | |
| 186 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 187 | + with patch("subprocess.run", side_effect=subprocess.CalledProcessError(1, "fossil")): | |
| 188 | + assert cli.is_available() is False | |
| 189 | + | |
| 190 | + | |
| 191 | +# --------------------------------------------------------------------------- | |
| 192 | +# render_pikchr | |
| 193 | +# --------------------------------------------------------------------------- | |
| 194 | + | |
| 195 | + | |
| 196 | +class TestRenderPikchr: | |
| 197 | + def test_renders_svg_on_success(self): | |
| 198 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 199 | + svg = '<svg viewBox="0 0 100 100"></svg>' | |
| 200 | + result_proc = subprocess.CompletedProcess(args=[], returncode=0, stdout=svg, stderr="") | |
| 201 | + with patch("subprocess.run", return_value=result_proc) as mock_run: | |
| 202 | + result = cli.render_pikchr("circle") | |
| 203 | + assert result == svg | |
| 204 | + cmd = mock_run.call_args[0][0] | |
| 205 | + assert cmd == ["/usr/bin/fossil", "pikchr", "-"] | |
| 206 | + assert mock_run.call_args[1]["input"] == "circle" | |
| 207 | + | |
| 208 | + def test_returns_empty_on_failure(self): | |
| 209 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 210 | + result_proc = subprocess.CompletedProcess(args=[], returncode=1, stdout="", stderr="error") | |
| 211 | + with patch("subprocess.run", return_value=result_proc): | |
| 212 | + assert cli.render_pikchr("bad") == "" | |
| 213 | + | |
| 214 | + def test_returns_empty_on_file_not_found(self): | |
| 215 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 216 | + with patch("subprocess.run", side_effect=FileNotFoundError): | |
| 217 | + assert cli.render_pikchr("test") == "" | |
| 218 | + | |
| 219 | + def test_returns_empty_on_timeout(self): | |
| 220 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 221 | + with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 10)): | |
| 222 | + assert cli.render_pikchr("test") == "" | |
| 223 | + | |
| 224 | + | |
| 225 | +# --------------------------------------------------------------------------- | |
| 226 | +# ensure_default_user | |
| 227 | +# --------------------------------------------------------------------------- | |
| 228 | + | |
| 229 | + | |
| 230 | +class TestEnsureDefaultUser: | |
| 231 | + def test_creates_user_when_missing(self, tmp_path): | |
| 232 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 233 | + repo_path = tmp_path / "repo.fossil" | |
| 234 | + # First call: user list (user not present), second: create, third: default | |
| 235 | + with patch("subprocess.run") as mock_run: | |
| 236 | + mock_run.side_effect = [ | |
| 237 | + _ok(stdout="admin\n"), # user list -- "fossilrepo" not in output | |
| 238 | + _ok(), # user new | |
| 239 | + _ok(), # user default | |
| 240 | + ] | |
| 241 | + cli.ensure_default_user(repo_path) | |
| 242 | + assert mock_run.call_count == 3 | |
| 243 | + # Verify the user new call | |
| 244 | + new_cmd = mock_run.call_args_list[1][0][0] | |
| 245 | + assert "user" in new_cmd | |
| 246 | + assert "new" in new_cmd | |
| 247 | + assert "fossilrepo" in new_cmd | |
| 248 | + | |
| 249 | + def test_skips_create_when_user_exists(self, tmp_path): | |
| 250 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 251 | + repo_path = tmp_path / "repo.fossil" | |
| 252 | + with patch("subprocess.run") as mock_run: | |
| 253 | + mock_run.side_effect = [ | |
| 254 | + _ok(stdout="admin\nfossilrepo\n"), # user list -- fossilrepo IS present | |
| 255 | + _ok(), # user default | |
| 256 | + ] | |
| 257 | + cli.ensure_default_user(repo_path) | |
| 258 | + assert mock_run.call_count == 2 # no "new" call | |
| 259 | + | |
| 260 | + def test_custom_username(self, tmp_path): | |
| 261 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 262 | + repo_path = tmp_path / "repo.fossil" | |
| 263 | + with patch("subprocess.run") as mock_run: | |
| 264 | + mock_run.side_effect = [ | |
| 265 | + _ok(stdout="admin\n"), # user list -- custom not present | |
| 266 | + _ok(), # user new | |
| 267 | + _ok(), # user default | |
| 268 | + ] | |
| 269 | + cli.ensure_default_user(repo_path, username="custom-bot") | |
| 270 | + new_cmd = mock_run.call_args_list[1][0][0] | |
| 271 | + assert "custom-bot" in new_cmd | |
| 272 | + | |
| 273 | + def test_silently_swallows_exceptions(self, tmp_path): | |
| 274 | + """ensure_default_user has a bare except -- should not raise.""" | |
| 275 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 276 | + repo_path = tmp_path / "repo.fossil" | |
| 277 | + with patch("subprocess.run", side_effect=Exception("kaboom")): | |
| 278 | + cli.ensure_default_user(repo_path) # should not raise | |
| 279 | + | |
| 280 | + | |
| 281 | +# --------------------------------------------------------------------------- | |
| 282 | +# tarball | |
| 283 | +# --------------------------------------------------------------------------- | |
| 284 | + | |
| 285 | + | |
| 286 | +class TestTarball: | |
| 287 | + def test_returns_bytes_on_success(self, tmp_path): | |
| 288 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 289 | + repo_path = tmp_path / "repo.fossil" | |
| 290 | + fake_tar = b"\x1f\x8b\x08\x00" + b"\x00" * 100 # fake gzip header | |
| 291 | + with patch("subprocess.run", return_value=_ok_bytes(stdout=fake_tar)) as mock_run: | |
| 292 | + result = cli.tarball(repo_path, "trunk") | |
| 293 | + assert result == fake_tar | |
| 294 | + cmd = mock_run.call_args[0][0] | |
| 295 | + assert cmd == ["/usr/bin/fossil", "tarball", "trunk", "-R", str(repo_path), "/dev/stdout"] | |
| 296 | + | |
| 297 | + def test_returns_empty_bytes_on_failure(self, tmp_path): | |
| 298 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 299 | + repo_path = tmp_path / "repo.fossil" | |
| 300 | + with patch("subprocess.run", return_value=_ok_bytes(returncode=1)): | |
| 301 | + result = cli.tarball(repo_path, "trunk") | |
| 302 | + assert result == b"" | |
| 303 | + | |
| 304 | + | |
| 305 | +# --------------------------------------------------------------------------- | |
| 306 | +# zip_archive | |
| 307 | +# --------------------------------------------------------------------------- | |
| 308 | + | |
| 309 | + | |
| 310 | +class TestZipArchive: | |
| 311 | + def test_returns_bytes_on_success(self, tmp_path): | |
| 312 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 313 | + repo_path = tmp_path / "repo.fossil" | |
| 314 | + fake_zip = b"PK\x03\x04" + b"\x00" * 100 | |
| 315 | + | |
| 316 | + def side_effect(cmd, **kwargs): | |
| 317 | + # Write content to the tempfile that fossil would create | |
| 318 | + # The tempfile path is in the command args | |
| 319 | + outfile = cmd[3] # zip <checkin> <outfile> -R <repo> | |
| 320 | + Path(outfile).write_bytes(fake_zip) | |
| 321 | + return _ok() | |
| 322 | + | |
| 323 | + with patch("subprocess.run", side_effect=side_effect): | |
| 324 | + result = cli.zip_archive(repo_path, "trunk") | |
| 325 | + assert result == fake_zip | |
| 326 | + | |
| 327 | + def test_returns_empty_bytes_on_failure(self, tmp_path): | |
| 328 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 329 | + repo_path = tmp_path / "repo.fossil" | |
| 330 | + with patch("subprocess.run", return_value=_fail()): | |
| 331 | + result = cli.zip_archive(repo_path, "trunk") | |
| 332 | + assert result == b"" | |
| 333 | + | |
| 334 | + | |
| 335 | +# --------------------------------------------------------------------------- | |
| 336 | +# blame | |
| 337 | +# --------------------------------------------------------------------------- | |
| 338 | + | |
| 339 | + | |
| 340 | +class TestBlame: | |
| 341 | + def test_parses_blame_output(self, tmp_path): | |
| 342 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 343 | + repo_path = tmp_path / "repo.fossil" | |
| 344 | + blame_output = ( | |
| 345 | + "abc12345 2026-01-15 ragelink: def hello():\n" | |
| 346 | + "abc12345 2026-01-15 ragelink: return 'world'\n" | |
| 347 | + "def67890 2026-01-20 contributor: pass\n" | |
| 348 | + ) | |
| 349 | + with patch("subprocess.run") as mock_run: | |
| 350 | + mock_run.side_effect = [ | |
| 351 | + _ok(), # fossil open | |
| 352 | + _ok(stdout=blame_output), # fossil blame | |
| 353 | + _ok(), # fossil close | |
| 354 | + ] | |
| 355 | + lines = cli.blame(repo_path, "main.py") | |
| 356 | + assert len(lines) == 3 | |
| 357 | + assert lines[0]["uuid"] == "abc12345" | |
| 358 | + assert lines[0]["date"] == "2026-01-15" | |
| 359 | + assert lines[0]["user"] == "ragelink" | |
| 360 | + assert lines[0]["text"] == "def hello():" | |
| 361 | + assert lines[2]["user"] == "contributor" | |
| 362 | + | |
| 363 | + def test_returns_empty_on_failure(self, tmp_path): | |
| 364 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 365 | + repo_path = tmp_path / "repo.fossil" | |
| 366 | + with patch("subprocess.run") as mock_run: | |
| 367 | + mock_run.side_effect = [ | |
| 368 | + _ok(), # fossil open | |
| 369 | + _fail(), # fossil blame fails | |
| 370 | + _ok(), # fossil close | |
| 371 | + ] | |
| 372 | + lines = cli.blame(repo_path, "nonexistent.py") | |
| 373 | + assert lines == [] | |
| 374 | + | |
| 375 | + def test_returns_empty_on_exception(self, tmp_path): | |
| 376 | + """blame has a broad except -- should not raise.""" | |
| 377 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 378 | + repo_path = tmp_path / "repo.fossil" | |
| 379 | + with patch("subprocess.run", side_effect=Exception("error")): | |
| 380 | + lines = cli.blame(repo_path, "file.py") | |
| 381 | + assert lines == [] | |
| 382 | + | |
| 383 | + def test_cleans_up_tmpdir(self, tmp_path): | |
| 384 | + """Temp directory must be cleaned up even on error.""" | |
| 385 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 386 | + repo_path = tmp_path / "repo.fossil" | |
| 387 | + | |
| 388 | + created_dirs = [] | |
| 389 | + original_mkdtemp = __import__("tempfile").mkdtemp | |
| 390 | + | |
| 391 | + def tracking_mkdtemp(**kwargs): | |
| 392 | + d = original_mkdtemp(**kwargs) | |
| 393 | + created_dirs.append(d) | |
| 394 | + return d | |
| 395 | + | |
| 396 | + with ( | |
| 397 | + patch("subprocess.run", side_effect=Exception("fail")), | |
| 398 | + patch("tempfile.mkdtemp", side_effect=tracking_mkdtemp), | |
| 399 | + ): | |
| 400 | + cli.blame(repo_path, "file.py") | |
| 401 | + | |
| 402 | + # The tmpdir should have been cleaned up by shutil.rmtree | |
| 403 | + for d in created_dirs: | |
| 404 | + assert not Path(d).exists() | |
| 405 | + | |
| 406 | + | |
| 407 | +# --------------------------------------------------------------------------- | |
| 408 | +# push | |
| 409 | +# --------------------------------------------------------------------------- | |
| 410 | + | |
| 411 | + | |
| 412 | +class TestPush: | |
| 413 | + def test_push_success_with_artifacts(self, tmp_path): | |
| 414 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 415 | + repo_path = tmp_path / "repo.fossil" | |
| 416 | + with patch("subprocess.run", return_value=_ok(stdout="Round-trips: 1 Artifacts sent: 5 sent: 5")): | |
| 417 | + result = cli.push(repo_path) | |
| 418 | + assert result["success"] is True | |
| 419 | + assert result["artifacts_sent"] == 5 | |
| 420 | + | |
| 421 | + def test_push_with_remote_url(self, tmp_path): | |
| 422 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 423 | + repo_path = tmp_path / "repo.fossil" | |
| 424 | + with patch("subprocess.run", return_value=_ok(stdout="sent: 3")) as mock_run: | |
| 425 | + result = cli.push(repo_path, remote_url="https://fossil.example.com/repo") | |
| 426 | + cmd = mock_run.call_args[0][0] | |
| 427 | + assert "https://fossil.example.com/repo" in cmd | |
| 428 | + assert result["artifacts_sent"] == 3 | |
| 429 | + | |
| 430 | + def test_push_no_artifacts_in_output(self, tmp_path): | |
| 431 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 432 | + repo_path = tmp_path / "repo.fossil" | |
| 433 | + with patch("subprocess.run", return_value=_ok(stdout="nothing to push")): | |
| 434 | + result = cli.push(repo_path) | |
| 435 | + assert result["success"] is True | |
| 436 | + assert result["artifacts_sent"] == 0 | |
| 437 | + | |
| 438 | + def test_push_failure(self, tmp_path): | |
| 439 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 440 | + repo_path = tmp_path / "repo.fossil" | |
| 441 | + with patch("subprocess.run", return_value=_fail(stdout="connection refused")): | |
| 442 | + result = cli.push(repo_path) | |
| 443 | + assert result["success"] is False | |
| 444 | + | |
| 445 | + def test_push_timeout(self, tmp_path): | |
| 446 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 447 | + repo_path = tmp_path / "repo.fossil" | |
| 448 | + with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 120)): | |
| 449 | + result = cli.push(repo_path) | |
| 450 | + assert result["success"] is False | |
| 451 | + assert result["artifacts_sent"] == 0 | |
| 452 | + assert "timed out" in result["message"].lower() | |
| 453 | + | |
| 454 | + def test_push_file_not_found(self, tmp_path): | |
| 455 | + cli = FossilCLI(binary="/nonexistent/fossil") | |
| 456 | + repo_path = tmp_path / "repo.fossil" | |
| 457 | + with patch("subprocess.run", side_effect=FileNotFoundError("No such file")): | |
| 458 | + result = cli.push(repo_path) | |
| 459 | + assert result["success"] is False | |
| 460 | + assert result["artifacts_sent"] == 0 | |
| 461 | + | |
| 462 | + | |
| 463 | +# --------------------------------------------------------------------------- | |
| 464 | +# sync | |
| 465 | +# --------------------------------------------------------------------------- | |
| 466 | + | |
| 467 | + | |
| 468 | +class TestSync: | |
| 469 | + def test_sync_success(self, tmp_path): | |
| 470 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 471 | + repo_path = tmp_path / "repo.fossil" | |
| 472 | + with patch("subprocess.run", return_value=_ok(stdout="sync complete")): | |
| 473 | + result = cli.sync(repo_path) | |
| 474 | + assert result["success"] is True | |
| 475 | + assert result["message"] == "sync complete" | |
| 476 | + | |
| 477 | + def test_sync_with_remote_url(self, tmp_path): | |
| 478 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 479 | + repo_path = tmp_path / "repo.fossil" | |
| 480 | + with patch("subprocess.run", return_value=_ok(stdout="ok")) as mock_run: | |
| 481 | + cli.sync(repo_path, remote_url="https://fossil.example.com/repo") | |
| 482 | + cmd = mock_run.call_args[0][0] | |
| 483 | + assert "https://fossil.example.com/repo" in cmd | |
| 484 | + | |
| 485 | + def test_sync_failure(self, tmp_path): | |
| 486 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 487 | + repo_path = tmp_path / "repo.fossil" | |
| 488 | + with patch("subprocess.run", return_value=_fail(stdout="error")): | |
| 489 | + result = cli.sync(repo_path) | |
| 490 | + assert result["success"] is False | |
| 491 | + | |
| 492 | + def test_sync_timeout(self, tmp_path): | |
| 493 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 494 | + repo_path = tmp_path / "repo.fossil" | |
| 495 | + with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 120)): | |
| 496 | + result = cli.sync(repo_path) | |
| 497 | + assert result["success"] is False | |
| 498 | + assert "timed out" in result["message"].lower() | |
| 499 | + | |
| 500 | + def test_sync_file_not_found(self, tmp_path): | |
| 501 | + cli = FossilCLI(binary="/nonexistent/fossil") | |
| 502 | + repo_path = tmp_path / "repo.fossil" | |
| 503 | + with patch("subprocess.run", side_effect=FileNotFoundError("No such file")): | |
| 504 | + result = cli.sync(repo_path) | |
| 505 | + assert result["success"] is False | |
| 506 | + | |
| 507 | + | |
| 508 | +# --------------------------------------------------------------------------- | |
| 509 | +# pull | |
| 510 | +# --------------------------------------------------------------------------- | |
| 511 | + | |
| 512 | + | |
| 513 | +class TestPull: | |
| 514 | + def test_pull_success_with_artifacts(self, tmp_path): | |
| 515 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 516 | + repo_path = tmp_path / "repo.fossil" | |
| 517 | + with patch("subprocess.run", return_value=_ok(stdout="Round-trips: 1 received: 12")): | |
| 518 | + result = cli.pull(repo_path) | |
| 519 | + assert result["success"] is True | |
| 520 | + assert result["artifacts_received"] == 12 | |
| 521 | + | |
| 522 | + def test_pull_no_artifacts(self, tmp_path): | |
| 523 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 524 | + repo_path = tmp_path / "repo.fossil" | |
| 525 | + with patch("subprocess.run", return_value=_ok(stdout="nothing new")): | |
| 526 | + result = cli.pull(repo_path) | |
| 527 | + assert result["success"] is True | |
| 528 | + assert result["artifacts_received"] == 0 | |
| 529 | + | |
| 530 | + def test_pull_failure(self, tmp_path): | |
| 531 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 532 | + repo_path = tmp_path / "repo.fossil" | |
| 533 | + with patch("subprocess.run", return_value=_fail(stdout="connection refused")): | |
| 534 | + result = cli.pull(repo_path) | |
| 535 | + assert result["success"] is False | |
| 536 | + | |
| 537 | + def test_pull_timeout(self, tmp_path): | |
| 538 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 539 | + repo_path = tmp_path / "repo.fossil" | |
| 540 | + with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 60)): | |
| 541 | + result = cli.pull(repo_path) | |
| 542 | + assert result["success"] is False | |
| 543 | + assert result["artifacts_received"] == 0 | |
| 544 | + | |
| 545 | + def test_pull_file_not_found(self, tmp_path): | |
| 546 | + cli = FossilCLI(binary="/nonexistent/fossil") | |
| 547 | + repo_path = tmp_path / "repo.fossil" | |
| 548 | + with patch("subprocess.run", side_effect=FileNotFoundError("No such file")): | |
| 549 | + result = cli.pull(repo_path) | |
| 550 | + assert result["success"] is False | |
| 551 | + assert result["artifacts_received"] == 0 | |
| 552 | + | |
| 553 | + | |
| 554 | +# --------------------------------------------------------------------------- | |
| 555 | +# get_remote_url | |
| 556 | +# --------------------------------------------------------------------------- | |
| 557 | + | |
| 558 | + | |
| 559 | +class TestGetRemoteUrl: | |
| 560 | + def test_returns_url_on_success(self, tmp_path): | |
| 561 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 562 | + repo_path = tmp_path / "repo.fossil" | |
| 563 | + result_proc = subprocess.CompletedProcess(args=[], returncode=0, stdout="https://fossil.example.com/repo\n", stderr="") | |
| 564 | + with patch("subprocess.run", return_value=result_proc): | |
| 565 | + url = cli.get_remote_url(repo_path) | |
| 566 | + assert url == "https://fossil.example.com/repo" | |
| 567 | + | |
| 568 | + def test_returns_empty_on_failure(self, tmp_path): | |
| 569 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 570 | + repo_path = tmp_path / "repo.fossil" | |
| 571 | + result_proc = subprocess.CompletedProcess(args=[], returncode=1, stdout="", stderr="not configured") | |
| 572 | + with patch("subprocess.run", return_value=result_proc): | |
| 573 | + url = cli.get_remote_url(repo_path) | |
| 574 | + assert url == "" | |
| 575 | + | |
| 576 | + def test_returns_empty_on_file_not_found(self, tmp_path): | |
| 577 | + cli = FossilCLI(binary="/nonexistent/fossil") | |
| 578 | + repo_path = tmp_path / "repo.fossil" | |
| 579 | + with patch("subprocess.run", side_effect=FileNotFoundError): | |
| 580 | + url = cli.get_remote_url(repo_path) | |
| 581 | + assert url == "" | |
| 582 | + | |
| 583 | + def test_returns_empty_on_timeout(self, tmp_path): | |
| 584 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 585 | + repo_path = tmp_path / "repo.fossil" | |
| 586 | + with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 10)): | |
| 587 | + url = cli.get_remote_url(repo_path) | |
| 588 | + assert url == "" | |
| 589 | + | |
| 590 | + | |
| 591 | +# --------------------------------------------------------------------------- | |
| 592 | +# wiki_commit | |
| 593 | +# --------------------------------------------------------------------------- | |
| 594 | + | |
| 595 | + | |
| 596 | +class TestWikiCommit: | |
| 597 | + def test_success(self, tmp_path): | |
| 598 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 599 | + repo_path = tmp_path / "repo.fossil" | |
| 600 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 601 | + result = cli.wiki_commit(repo_path, "Home", "# Welcome") | |
| 602 | + assert result is True | |
| 603 | + assert mock_run.call_args[1]["input"] == "# Welcome" | |
| 604 | + cmd = mock_run.call_args[0][0] | |
| 605 | + assert cmd == ["/usr/bin/fossil", "wiki", "commit", "Home", "-R", str(repo_path)] | |
| 606 | + | |
| 607 | + def test_with_user(self, tmp_path): | |
| 608 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 609 | + repo_path = tmp_path / "repo.fossil" | |
| 610 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 611 | + cli.wiki_commit(repo_path, "Home", "content", user="admin") | |
| 612 | + cmd = mock_run.call_args[0][0] | |
| 613 | + assert "--technote-user" in cmd | |
| 614 | + assert "admin" in cmd | |
| 615 | + | |
| 616 | + def test_failure(self, tmp_path): | |
| 617 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 618 | + repo_path = tmp_path / "repo.fossil" | |
| 619 | + with patch("subprocess.run", return_value=_fail()): | |
| 620 | + result = cli.wiki_commit(repo_path, "Missing", "content") | |
| 621 | + assert result is False | |
| 622 | + | |
| 623 | + | |
| 624 | +# --------------------------------------------------------------------------- | |
| 625 | +# wiki_create | |
| 626 | +# --------------------------------------------------------------------------- | |
| 627 | + | |
| 628 | + | |
| 629 | +class TestWikiCreate: | |
| 630 | + def test_success(self, tmp_path): | |
| 631 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 632 | + repo_path = tmp_path / "repo.fossil" | |
| 633 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 634 | + result = cli.wiki_create(repo_path, "NewPage", "# New content") | |
| 635 | + assert result is True | |
| 636 | + cmd = mock_run.call_args[0][0] | |
| 637 | + assert cmd == ["/usr/bin/fossil", "wiki", "create", "NewPage", "-R", str(repo_path)] | |
| 638 | + assert mock_run.call_args[1]["input"] == "# New content" | |
| 639 | + | |
| 640 | + def test_failure(self, tmp_path): | |
| 641 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 642 | + repo_path = tmp_path / "repo.fossil" | |
| 643 | + with patch("subprocess.run", return_value=_fail()): | |
| 644 | + result = cli.wiki_create(repo_path, "Dup", "content") | |
| 645 | + assert result is False | |
| 646 | + | |
| 647 | + | |
| 648 | +# --------------------------------------------------------------------------- | |
| 649 | +# ticket_add | |
| 650 | +# --------------------------------------------------------------------------- | |
| 651 | + | |
| 652 | + | |
| 653 | +class TestTicketAdd: | |
| 654 | + def test_success_with_fields(self, tmp_path): | |
| 655 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 656 | + repo_path = tmp_path / "repo.fossil" | |
| 657 | + fields = {"title": "Bug report", "status": "open", "type": "bug"} | |
| 658 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 659 | + result = cli.ticket_add(repo_path, fields) | |
| 660 | + assert result is True | |
| 661 | + cmd = mock_run.call_args[0][0] | |
| 662 | + # Should have: fossil ticket add -R <path> title "Bug report" status open type bug | |
| 663 | + assert cmd[:4] == ["/usr/bin/fossil", "ticket", "add", "-R"] | |
| 664 | + assert "title" in cmd | |
| 665 | + assert "Bug report" in cmd | |
| 666 | + | |
| 667 | + def test_empty_fields(self, tmp_path): | |
| 668 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 669 | + repo_path = tmp_path / "repo.fossil" | |
| 670 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 671 | + result = cli.ticket_add(repo_path, {}) | |
| 672 | + assert result is True | |
| 673 | + cmd = mock_run.call_args[0][0] | |
| 674 | + assert cmd == ["/usr/bin/fossil", "ticket", "add", "-R", str(repo_path)] | |
| 675 | + | |
| 676 | + def test_failure(self, tmp_path): | |
| 677 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 678 | + repo_path = tmp_path / "repo.fossil" | |
| 679 | + with patch("subprocess.run", return_value=_fail()): | |
| 680 | + result = cli.ticket_add(repo_path, {"title": "test"}) | |
| 681 | + assert result is False | |
| 682 | + | |
| 683 | + | |
| 684 | +# --------------------------------------------------------------------------- | |
| 685 | +# ticket_change | |
| 686 | +# --------------------------------------------------------------------------- | |
| 687 | + | |
| 688 | + | |
| 689 | +class TestTicketChange: | |
| 690 | + def test_success(self, tmp_path): | |
| 691 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 692 | + repo_path = tmp_path / "repo.fossil" | |
| 693 | + uuid = "abc123def456" | |
| 694 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 695 | + result = cli.ticket_change(repo_path, uuid, {"status": "closed"}) | |
| 696 | + assert result is True | |
| 697 | + cmd = mock_run.call_args[0][0] | |
| 698 | + assert cmd[:5] == ["/usr/bin/fossil", "ticket", "change", uuid, "-R"] | |
| 699 | + assert "status" in cmd | |
| 700 | + assert "closed" in cmd | |
| 701 | + | |
| 702 | + def test_failure(self, tmp_path): | |
| 703 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 704 | + repo_path = tmp_path / "repo.fossil" | |
| 705 | + with patch("subprocess.run", return_value=_fail()): | |
| 706 | + result = cli.ticket_change(repo_path, "badid", {"status": "open"}) | |
| 707 | + assert result is False | |
| 708 | + | |
| 709 | + | |
| 710 | +# --------------------------------------------------------------------------- | |
| 711 | +# technote_create | |
| 712 | +# --------------------------------------------------------------------------- | |
| 713 | + | |
| 714 | + | |
| 715 | +class TestTechnoteCreate: | |
| 716 | + def test_with_explicit_timestamp(self, tmp_path): | |
| 717 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 718 | + repo_path = tmp_path / "repo.fossil" | |
| 719 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 720 | + result = cli.technote_create(repo_path, "Release v1.0", "Details here", timestamp="2026-04-07T12:00:00") | |
| 721 | + assert result is True | |
| 722 | + cmd = mock_run.call_args[0][0] | |
| 723 | + assert "--technote" in cmd | |
| 724 | + assert "2026-04-07T12:00:00" in cmd | |
| 725 | + assert mock_run.call_args[1]["input"] == "Details here" | |
| 726 | + | |
| 727 | + def test_auto_generates_timestamp(self, tmp_path): | |
| 728 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 729 | + repo_path = tmp_path / "repo.fossil" | |
| 730 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 731 | + cli.technote_create(repo_path, "Note", "body") | |
| 732 | + cmd = mock_run.call_args[0][0] | |
| 733 | + # Should have generated a timestamp in ISO format | |
| 734 | + ts_idx = cmd.index("--technote") + 1 | |
| 735 | + assert "T" in cmd[ts_idx] # ISO datetime has T separator | |
| 736 | + | |
| 737 | + def test_with_user(self, tmp_path): | |
| 738 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 739 | + repo_path = tmp_path / "repo.fossil" | |
| 740 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 741 | + cli.technote_create(repo_path, "Note", "body", timestamp="2026-01-01T00:00:00", user="author") | |
| 742 | + cmd = mock_run.call_args[0][0] | |
| 743 | + assert "--technote-user" in cmd | |
| 744 | + assert "author" in cmd | |
| 745 | + | |
| 746 | + def test_failure(self, tmp_path): | |
| 747 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 748 | + repo_path = tmp_path / "repo.fossil" | |
| 749 | + with patch("subprocess.run", return_value=_fail()): | |
| 750 | + result = cli.technote_create(repo_path, "Fail", "body", timestamp="2026-01-01T00:00:00") | |
| 751 | + assert result is False | |
| 752 | + | |
| 753 | + | |
| 754 | +# --------------------------------------------------------------------------- | |
| 755 | +# technote_edit | |
| 756 | +# --------------------------------------------------------------------------- | |
| 757 | + | |
| 758 | + | |
| 759 | +class TestTechnoteEdit: | |
| 760 | + def test_success(self, tmp_path): | |
| 761 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 762 | + repo_path = tmp_path / "repo.fossil" | |
| 763 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 764 | + result = cli.technote_edit(repo_path, "abc123", "Updated body") | |
| 765 | + assert result is True | |
| 766 | + cmd = mock_run.call_args[0][0] | |
| 767 | + assert "--technote" in cmd | |
| 768 | + assert "abc123" in cmd | |
| 769 | + assert mock_run.call_args[1]["input"] == "Updated body" | |
| 770 | + | |
| 771 | + def test_with_user(self, tmp_path): | |
| 772 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 773 | + repo_path = tmp_path / "repo.fossil" | |
| 774 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 775 | + cli.technote_edit(repo_path, "abc123", "body", user="editor") | |
| 776 | + cmd = mock_run.call_args[0][0] | |
| 777 | + assert "--technote-user" in cmd | |
| 778 | + assert "editor" in cmd | |
| 779 | + | |
| 780 | + def test_failure(self, tmp_path): | |
| 781 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 782 | + repo_path = tmp_path / "repo.fossil" | |
| 783 | + with patch("subprocess.run", return_value=_fail()): | |
| 784 | + result = cli.technote_edit(repo_path, "badid", "body") | |
| 785 | + assert result is False | |
| 786 | + | |
| 787 | + | |
| 788 | +# --------------------------------------------------------------------------- | |
| 789 | +# uv_add | |
| 790 | +# --------------------------------------------------------------------------- | |
| 791 | + | |
| 792 | + | |
| 793 | +class TestUvAdd: | |
| 794 | + def test_success(self, tmp_path): | |
| 795 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 796 | + repo_path = tmp_path / "repo.fossil" | |
| 797 | + filepath = tmp_path / "logo.png" | |
| 798 | + with patch("subprocess.run", return_value=_ok()) as mock_run: | |
| 799 | + result = cli.uv_add(repo_path, "logo.png", filepath) | |
| 800 | + assert result is True | |
| 801 | + cmd = mock_run.call_args[0][0] | |
| 802 | + assert cmd == ["/usr/bin/fossil", "uv", "add", str(filepath), "--as", "logo.png", "-R", str(repo_path)] | |
| 803 | + | |
| 804 | + def test_failure(self, tmp_path): | |
| 805 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 806 | + repo_path = tmp_path / "repo.fossil" | |
| 807 | + with patch("subprocess.run", return_value=_fail()): | |
| 808 | + result = cli.uv_add(repo_path, "file.txt", tmp_path / "file.txt") | |
| 809 | + assert result is False | |
| 810 | + | |
| 811 | + | |
| 812 | +# --------------------------------------------------------------------------- | |
| 813 | +# uv_cat | |
| 814 | +# --------------------------------------------------------------------------- | |
| 815 | + | |
| 816 | + | |
| 817 | +class TestUvCat: | |
| 818 | + def test_returns_bytes_on_success(self, tmp_path): | |
| 819 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 820 | + repo_path = tmp_path / "repo.fossil" | |
| 821 | + content = b"\x89PNG\r\n\x1a\n" # PNG header bytes | |
| 822 | + with patch("subprocess.run", return_value=_ok_bytes(stdout=content)) as mock_run: | |
| 823 | + result = cli.uv_cat(repo_path, "logo.png") | |
| 824 | + assert result == content | |
| 825 | + cmd = mock_run.call_args[0][0] | |
| 826 | + assert cmd == ["/usr/bin/fossil", "uv", "cat", "logo.png", "-R", str(repo_path)] | |
| 827 | + | |
| 828 | + def test_raises_file_not_found_on_failure(self, tmp_path): | |
| 829 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 830 | + repo_path = tmp_path / "repo.fossil" | |
| 831 | + with ( | |
| 832 | + patch("subprocess.run", return_value=_ok_bytes(returncode=1)), | |
| 833 | + pytest.raises(FileNotFoundError, match="Unversioned file not found"), | |
| 834 | + ): | |
| 835 | + cli.uv_cat(repo_path, "missing.txt") | |
| 836 | + | |
| 837 | + | |
| 838 | +# --------------------------------------------------------------------------- | |
| 839 | +# git_export (supplements TestGitExportTokenHandling in test_security.py) | |
| 840 | +# --------------------------------------------------------------------------- | |
| 841 | + | |
| 842 | + | |
| 843 | +class TestGitExport: | |
| 844 | + def test_basic_export_no_autopush(self, tmp_path): | |
| 845 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 846 | + repo_path = tmp_path / "repo.fossil" | |
| 847 | + mirror_dir = tmp_path / "mirror" | |
| 848 | + with patch("subprocess.run", return_value=_ok(stdout="exported 5 commits")) as mock_run: | |
| 849 | + result = cli.git_export(repo_path, mirror_dir) | |
| 850 | + assert result["success"] is True | |
| 851 | + assert result["message"] == "exported 5 commits" | |
| 852 | + cmd = mock_run.call_args[0][0] | |
| 853 | + assert cmd == ["/usr/bin/fossil", "git", "export", str(mirror_dir), "-R", str(repo_path)] | |
| 854 | + # mirror_dir should be created | |
| 855 | + assert mirror_dir.exists() | |
| 856 | + | |
| 857 | + def test_with_autopush_url(self, tmp_path): | |
| 858 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 859 | + repo_path = tmp_path / "repo.fossil" | |
| 860 | + mirror_dir = tmp_path / "mirror" | |
| 861 | + with patch("subprocess.run", return_value=_ok(stdout="pushed")) as mock_run: | |
| 862 | + cli.git_export(repo_path, mirror_dir, autopush_url="https://github.com/user/repo.git") | |
| 863 | + cmd = mock_run.call_args[0][0] | |
| 864 | + assert "--autopush" in cmd | |
| 865 | + assert "https://github.com/user/repo.git" in cmd | |
| 866 | + | |
| 867 | + def test_timeout_returns_failure(self, tmp_path): | |
| 868 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 869 | + repo_path = tmp_path / "repo.fossil" | |
| 870 | + mirror_dir = tmp_path / "mirror" | |
| 871 | + with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 300)): | |
| 872 | + result = cli.git_export(repo_path, mirror_dir) | |
| 873 | + assert result["success"] is False | |
| 874 | + assert "timed out" in result["message"].lower() | |
| 875 | + | |
| 876 | + def test_failure_returncode(self, tmp_path): | |
| 877 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 878 | + repo_path = tmp_path / "repo.fossil" | |
| 879 | + mirror_dir = tmp_path / "mirror" | |
| 880 | + with patch("subprocess.run", return_value=_ok(stdout="fatal error", returncode=1)): | |
| 881 | + result = cli.git_export(repo_path, mirror_dir) | |
| 882 | + assert result["success"] is False | |
| 883 | + | |
| 884 | + def test_temp_files_cleaned_on_success(self, tmp_path): | |
| 885 | + """Askpass and token temp files are removed after successful export.""" | |
| 886 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 887 | + repo_path = tmp_path / "repo.fossil" | |
| 888 | + mirror_dir = tmp_path / "mirror" | |
| 889 | + | |
| 890 | + created_files = [] | |
| 891 | + | |
| 892 | + original_mkstemp = __import__("tempfile").mkstemp | |
| 893 | + | |
| 894 | + def tracking_mkstemp(**kwargs): | |
| 895 | + fd, path = original_mkstemp(**kwargs) | |
| 896 | + created_files.append(path) | |
| 897 | + return fd, path | |
| 898 | + | |
| 899 | + with ( | |
| 900 | + patch("subprocess.run", return_value=_ok(stdout="ok")), | |
| 901 | + patch("tempfile.mkstemp", side_effect=tracking_mkstemp), | |
| 902 | + ): | |
| 903 | + cli.git_export(repo_path, mirror_dir, autopush_url="https://github.com/u/r.git", auth_token="tok123") | |
| 904 | + | |
| 905 | + # Both temp files should be cleaned up | |
| 906 | + assert len(created_files) == 2 | |
| 907 | + for f in created_files: | |
| 908 | + assert not os.path.exists(f) | |
| 909 | + | |
| 910 | + def test_temp_files_cleaned_on_timeout(self, tmp_path): | |
| 911 | + """Askpass and token temp files are removed even when subprocess times out.""" | |
| 912 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 913 | + repo_path = tmp_path / "repo.fossil" | |
| 914 | + mirror_dir = tmp_path / "mirror" | |
| 915 | + | |
| 916 | + created_files = [] | |
| 917 | + original_mkstemp = __import__("tempfile").mkstemp | |
| 918 | + | |
| 919 | + def tracking_mkstemp(**kwargs): | |
| 920 | + fd, path = original_mkstemp(**kwargs) | |
| 921 | + created_files.append(path) | |
| 922 | + return fd, path | |
| 923 | + | |
| 924 | + with ( | |
| 925 | + patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 300)), | |
| 926 | + patch("tempfile.mkstemp", side_effect=tracking_mkstemp), | |
| 927 | + ): | |
| 928 | + cli.git_export(repo_path, mirror_dir, autopush_url="https://github.com/u/r.git", auth_token="tok123") | |
| 929 | + | |
| 930 | + for f in created_files: | |
| 931 | + assert not os.path.exists(f) | |
| 932 | + | |
| 933 | + def test_no_redaction_when_no_token(self, tmp_path): | |
| 934 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 935 | + repo_path = tmp_path / "repo.fossil" | |
| 936 | + mirror_dir = tmp_path / "mirror" | |
| 937 | + with patch("subprocess.run", return_value=_ok(stdout="push ok")): | |
| 938 | + result = cli.git_export(repo_path, mirror_dir, autopush_url="https://github.com/u/r.git") | |
| 939 | + assert result["message"] == "push ok" | |
| 940 | + assert "[REDACTED]" not in result["message"] | |
| 941 | + | |
| 942 | + def test_combines_stdout_and_stderr(self, tmp_path): | |
| 943 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 944 | + repo_path = tmp_path / "repo.fossil" | |
| 945 | + mirror_dir = tmp_path / "mirror" | |
| 946 | + with patch("subprocess.run", return_value=_ok(stdout="out\n", stderr="err")): | |
| 947 | + result = cli.git_export(repo_path, mirror_dir) | |
| 948 | + assert "out" in result["message"] | |
| 949 | + assert "err" in result["message"] | |
| 950 | + | |
| 951 | + | |
| 952 | +# --------------------------------------------------------------------------- | |
| 953 | +# generate_ssh_key | |
| 954 | +# --------------------------------------------------------------------------- | |
| 955 | + | |
| 956 | + | |
| 957 | +class TestGenerateSSHKey: | |
| 958 | + def test_success(self, tmp_path): | |
| 959 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 960 | + key_path = tmp_path / "keys" / "id_ed25519" | |
| 961 | + pub_key_content = "ssh-ed25519 AAAAC3Nza...== fossilrepo" | |
| 962 | + fingerprint_output = "256 SHA256:abcdef123456 fossilrepo (ED25519)" | |
| 963 | + | |
| 964 | + # Create the public key file that generate_ssh_key will try to read | |
| 965 | + key_path.parent.mkdir(parents=True, exist_ok=True) | |
| 966 | + | |
| 967 | + with patch("subprocess.run") as mock_run: | |
| 968 | + # ssh-keygen creates the key, then we read pubkey, then fingerprint | |
| 969 | + def side_effect(cmd, **kwargs): | |
| 970 | + if "-t" in cmd: | |
| 971 | + # Write fake pub key file on "creation" | |
| 972 | + key_path.with_suffix(".pub").write_text(pub_key_content) | |
| 973 | + return _ok() | |
| 974 | + elif "-lf" in cmd: | |
| 975 | + return _ok(stdout=fingerprint_output) | |
| 976 | + return _ok() | |
| 977 | + | |
| 978 | + mock_run.side_effect = side_effect | |
| 979 | + result = cli.generate_ssh_key(key_path, comment="fossilrepo") | |
| 980 | + | |
| 981 | + assert result["success"] is True | |
| 982 | + assert result["public_key"] == pub_key_content | |
| 983 | + assert result["fingerprint"] == "SHA256:abcdef123456" | |
| 984 | + | |
| 985 | + def test_creates_parent_dirs(self, tmp_path): | |
| 986 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 987 | + key_path = tmp_path / "deep" / "nested" / "id_ed25519" | |
| 988 | + with patch("subprocess.run", return_value=_fail()): | |
| 989 | + cli.generate_ssh_key(key_path) | |
| 990 | + # Parent dirs should exist even if ssh-keygen fails | |
| 991 | + assert key_path.parent.exists() | |
| 992 | + | |
| 993 | + def test_failure_returns_error_dict(self, tmp_path): | |
| 994 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 995 | + key_path = tmp_path / "id_ed25519" | |
| 996 | + with patch("subprocess.run", return_value=_fail()): | |
| 997 | + result = cli.generate_ssh_key(key_path) | |
| 998 | + assert result["success"] is False | |
| 999 | + assert result["public_key"] == "" | |
| 1000 | + assert result["fingerprint"] == "" | |
| 1001 | + | |
| 1002 | + def test_exception_returns_error_dict(self, tmp_path): | |
| 1003 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1004 | + key_path = tmp_path / "id_ed25519" | |
| 1005 | + with patch("subprocess.run", side_effect=Exception("ssh-keygen not found")): | |
| 1006 | + result = cli.generate_ssh_key(key_path) | |
| 1007 | + assert result["success"] is False | |
| 1008 | + assert "ssh-keygen not found" in result["error"] | |
| 1009 | + | |
| 1010 | + def test_keygen_command_uses_ed25519(self, tmp_path): | |
| 1011 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1012 | + key_path = tmp_path / "id_ed25519" | |
| 1013 | + with patch("subprocess.run", return_value=_fail()) as mock_run: | |
| 1014 | + cli.generate_ssh_key(key_path, comment="test-key") | |
| 1015 | + cmd = mock_run.call_args[0][0] | |
| 1016 | + assert cmd == ["ssh-keygen", "-t", "ed25519", "-f", str(key_path), "-N", "", "-C", "test-key"] | |
| 1017 | + | |
| 1018 | + def test_fingerprint_empty_on_keygen_lf_failure(self, tmp_path): | |
| 1019 | + """If ssh-keygen -lf fails, fingerprint should be empty but success still True.""" | |
| 1020 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1021 | + key_path = tmp_path / "id_ed25519" | |
| 1022 | + pub_key_content = "ssh-ed25519 AAAAC3Nza...== test" | |
| 1023 | + | |
| 1024 | + with patch("subprocess.run") as mock_run: | |
| 1025 | + | |
| 1026 | + def side_effect(cmd, **kwargs): | |
| 1027 | + if "-t" in cmd: | |
| 1028 | + key_path.with_suffix(".pub").write_text(pub_key_content) | |
| 1029 | + return _ok() | |
| 1030 | + elif "-lf" in cmd: | |
| 1031 | + return _fail() | |
| 1032 | + return _ok() | |
| 1033 | + | |
| 1034 | + mock_run.side_effect = side_effect | |
| 1035 | + result = cli.generate_ssh_key(key_path) | |
| 1036 | + assert result["success"] is True | |
| 1037 | + assert result["public_key"] == pub_key_content | |
| 1038 | + assert result["fingerprint"] == "" | |
| 1039 | + | |
| 1040 | + | |
| 1041 | +# --------------------------------------------------------------------------- | |
| 1042 | +# http_proxy | |
| 1043 | +# --------------------------------------------------------------------------- | |
| 1044 | + | |
| 1045 | + | |
| 1046 | +class TestHttpProxy: | |
| 1047 | + def test_parses_crlf_response(self, tmp_path): | |
| 1048 | + """Standard HTTP response with \\r\\n\\r\\n separator.""" | |
| 1049 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1050 | + repo_path = tmp_path / "repo.fossil" | |
| 1051 | + raw_response = b"HTTP/1.1 200 OK\r\nContent-Type: application/x-fossil\r\n\r\n\x00\x01\x02\x03" | |
| 1052 | + with patch("subprocess.run", return_value=_ok_bytes(stdout=raw_response)): | |
| 1053 | + body, content_type = cli.http_proxy(repo_path, b"request_body") | |
| 1054 | + assert body == b"\x00\x01\x02\x03" | |
| 1055 | + assert content_type == "application/x-fossil" | |
| 1056 | + | |
| 1057 | + def test_parses_lf_response(self, tmp_path): | |
| 1058 | + """Fallback: \\n\\n separator (no \\r).""" | |
| 1059 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1060 | + repo_path = tmp_path / "repo.fossil" | |
| 1061 | + raw_response = b"Content-Type: text/html\n\n<html>body</html>" | |
| 1062 | + with patch("subprocess.run", return_value=_ok_bytes(stdout=raw_response)): | |
| 1063 | + body, content_type = cli.http_proxy(repo_path, b"req") | |
| 1064 | + assert body == b"<html>body</html>" | |
| 1065 | + assert content_type == "text/html" | |
| 1066 | + | |
| 1067 | + def test_no_separator_returns_entire_body(self, tmp_path): | |
| 1068 | + """If no header/body separator, treat entire output as body.""" | |
| 1069 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1070 | + repo_path = tmp_path / "repo.fossil" | |
| 1071 | + raw_response = b"raw binary data with no headers" | |
| 1072 | + with patch("subprocess.run", return_value=_ok_bytes(stdout=raw_response)): | |
| 1073 | + body, content_type = cli.http_proxy(repo_path, b"req") | |
| 1074 | + assert body == raw_response | |
| 1075 | + assert content_type == "application/x-fossil" | |
| 1076 | + | |
| 1077 | + def test_localauth_flag(self, tmp_path): | |
| 1078 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1079 | + repo_path = tmp_path / "repo.fossil" | |
| 1080 | + with patch("subprocess.run", return_value=_ok_bytes(stdout=b"\r\n\r\n")) as mock_run: | |
| 1081 | + cli.http_proxy(repo_path, b"body", localauth=True) | |
| 1082 | + cmd = mock_run.call_args[0][0] | |
| 1083 | + assert "--localauth" in cmd | |
| 1084 | + | |
| 1085 | + def test_no_localauth_flag(self, tmp_path): | |
| 1086 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1087 | + repo_path = tmp_path / "repo.fossil" | |
| 1088 | + with patch("subprocess.run", return_value=_ok_bytes(stdout=b"\r\n\r\n")) as mock_run: | |
| 1089 | + cli.http_proxy(repo_path, b"body", localauth=False) | |
| 1090 | + cmd = mock_run.call_args[0][0] | |
| 1091 | + assert "--localauth" not in cmd | |
| 1092 | + | |
| 1093 | + def test_builds_http_request_on_stdin(self, tmp_path): | |
| 1094 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1095 | + repo_path = tmp_path / "repo.fossil" | |
| 1096 | + request_body = b"\x00\x01binary-data" | |
| 1097 | + with patch("subprocess.run", return_value=_ok_bytes(stdout=b"\r\n\r\n")) as mock_run: | |
| 1098 | + cli.http_proxy(repo_path, request_body, content_type="application/x-fossil") | |
| 1099 | + http_input = mock_run.call_args[1]["input"] | |
| 1100 | + # Should contain POST, Host, Content-Type, Content-Length headers + body | |
| 1101 | + assert b"POST /xfer HTTP/1.1\r\n" in http_input | |
| 1102 | + assert b"Host: localhost\r\n" in http_input | |
| 1103 | + assert b"Content-Type: application/x-fossil\r\n" in http_input | |
| 1104 | + assert f"Content-Length: {len(request_body)}".encode() in http_input | |
| 1105 | + assert http_input.endswith(request_body) | |
| 1106 | + | |
| 1107 | + def test_default_content_type(self, tmp_path): | |
| 1108 | + """When no content_type provided, defaults to application/x-fossil.""" | |
| 1109 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1110 | + repo_path = tmp_path / "repo.fossil" | |
| 1111 | + with patch("subprocess.run", return_value=_ok_bytes(stdout=b"\r\n\r\n")) as mock_run: | |
| 1112 | + cli.http_proxy(repo_path, b"body") | |
| 1113 | + http_input = mock_run.call_args[1]["input"] | |
| 1114 | + assert b"Content-Type: application/x-fossil\r\n" in http_input | |
| 1115 | + | |
| 1116 | + def test_timeout_raises(self, tmp_path): | |
| 1117 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1118 | + repo_path = tmp_path / "repo.fossil" | |
| 1119 | + with ( | |
| 1120 | + patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 120)), | |
| 1121 | + pytest.raises(subprocess.TimeoutExpired), | |
| 1122 | + ): | |
| 1123 | + cli.http_proxy(repo_path, b"body") | |
| 1124 | + | |
| 1125 | + def test_file_not_found_raises(self, tmp_path): | |
| 1126 | + cli = FossilCLI(binary="/nonexistent/fossil") | |
| 1127 | + repo_path = tmp_path / "repo.fossil" | |
| 1128 | + with ( | |
| 1129 | + patch("subprocess.run", side_effect=FileNotFoundError), | |
| 1130 | + pytest.raises(FileNotFoundError), | |
| 1131 | + ): | |
| 1132 | + cli.http_proxy(repo_path, b"body") | |
| 1133 | + | |
| 1134 | + def test_nonzero_returncode_does_not_raise(self, tmp_path): | |
| 1135 | + """Non-zero exit code logs a warning but does not raise.""" | |
| 1136 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1137 | + repo_path = tmp_path / "repo.fossil" | |
| 1138 | + raw_response = b"Content-Type: application/x-fossil\r\n\r\nbody" | |
| 1139 | + with patch("subprocess.run", return_value=_ok_bytes(stdout=raw_response, returncode=1)): | |
| 1140 | + body, ct = cli.http_proxy(repo_path, b"req") | |
| 1141 | + assert body == b"body" | |
| 1142 | + | |
| 1143 | + def test_gateway_interface_stripped(self, tmp_path): | |
| 1144 | + """GATEWAY_INTERFACE must not be in the env passed to fossil http.""" | |
| 1145 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1146 | + repo_path = tmp_path / "repo.fossil" | |
| 1147 | + with ( | |
| 1148 | + patch.dict(os.environ, {"GATEWAY_INTERFACE": "CGI/1.1"}), | |
| 1149 | + patch("subprocess.run", return_value=_ok_bytes(stdout=b"\r\n\r\n")) as mock_run, | |
| 1150 | + ): | |
| 1151 | + cli.http_proxy(repo_path, b"body") | |
| 1152 | + env = mock_run.call_args[1]["env"] | |
| 1153 | + assert "GATEWAY_INTERFACE" not in env | |
| 1154 | + | |
| 1155 | + | |
| 1156 | +# --------------------------------------------------------------------------- | |
| 1157 | +# shun / shun_list | |
| 1158 | +# --------------------------------------------------------------------------- | |
| 1159 | + | |
| 1160 | + | |
| 1161 | +class TestShun: | |
| 1162 | + def test_shun_success(self, tmp_path): | |
| 1163 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1164 | + repo_path = tmp_path / "repo.fossil" | |
| 1165 | + with patch("subprocess.run", return_value=_ok(stdout="Shunned")): | |
| 1166 | + result = cli.shun(repo_path, "abc123def456") | |
| 1167 | + assert result["success"] is True | |
| 1168 | + assert "Shunned" in result["message"] | |
| 1169 | + | |
| 1170 | + def test_shun_failure(self, tmp_path): | |
| 1171 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1172 | + repo_path = tmp_path / "repo.fossil" | |
| 1173 | + with patch("subprocess.run", return_value=_fail(stdout="", stderr="not found")): | |
| 1174 | + result = cli.shun(repo_path, "badid") | |
| 1175 | + assert result["success"] is False | |
| 1176 | + assert "not found" in result["message"] | |
| 1177 | + | |
| 1178 | + def test_shun_combines_stdout_stderr(self, tmp_path): | |
| 1179 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1180 | + repo_path = tmp_path / "repo.fossil" | |
| 1181 | + with patch("subprocess.run", return_value=_ok(stdout="out\n", stderr="warning")): | |
| 1182 | + result = cli.shun(repo_path, "abc123") | |
| 1183 | + assert "out" in result["message"] | |
| 1184 | + assert "warning" in result["message"] | |
| 1185 | + | |
| 1186 | + | |
| 1187 | +class TestShunList: | |
| 1188 | + def test_returns_uuids(self, tmp_path): | |
| 1189 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1190 | + repo_path = tmp_path / "repo.fossil" | |
| 1191 | + with patch("subprocess.run", return_value=_ok(stdout="abc123\ndef456\nghi789\n")): | |
| 1192 | + result = cli.shun_list(repo_path) | |
| 1193 | + assert result == ["abc123", "def456", "ghi789"] | |
| 1194 | + | |
| 1195 | + def test_returns_empty_on_failure(self, tmp_path): | |
| 1196 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1197 | + repo_path = tmp_path / "repo.fossil" | |
| 1198 | + with patch("subprocess.run", return_value=_fail()): | |
| 1199 | + result = cli.shun_list(repo_path) | |
| 1200 | + assert result == [] | |
| 1201 | + | |
| 1202 | + def test_strips_whitespace_and_empty_lines(self, tmp_path): | |
| 1203 | + cli = FossilCLI(binary="/usr/bin/fossil") | |
| 1204 | + repo_path = tmp_path / "repo.fossil" | |
| 1205 | + with patch("subprocess.run", return_value=_ok(stdout="\n abc123 \n\n def456\n\n")): | |
| 1206 | + result = cli.shun_list(repo_path) | |
| 1207 | + assert result == ["abc123", "def456"] |
| --- a/tests/test_cli.py | |
| +++ b/tests/test_cli.py | |
| @@ -0,0 +1,1207 @@ | |
| --- a/tests/test_cli.py | |
| +++ b/tests/test_cli.py | |
| @@ -0,0 +1,1207 @@ | |
| 1 | """Unit tests for fossil/cli.py -- FossilCLI subprocess wrapper. |
| 2 | |
| 3 | Tests mock subprocess.run throughout since FossilCLI is a thin wrapper |
| 4 | around the fossil binary. We verify that: |
| 5 | - Correct commands are assembled for every method |
| 6 | - Success/failure return values are propagated correctly |
| 7 | - Environment variables are set properly (_env property) |
| 8 | - Timeouts and exceptions are handled gracefully |
| 9 | - Edge-case inputs (empty strings, special characters) work |
| 10 | """ |
| 11 | |
| 12 | import os |
| 13 | import subprocess |
| 14 | from pathlib import Path |
| 15 | from unittest.mock import MagicMock, patch |
| 16 | |
| 17 | import pytest |
| 18 | |
| 19 | from fossil.cli import FossilCLI |
| 20 | |
| 21 | # --------------------------------------------------------------------------- |
| 22 | # Helpers |
| 23 | # --------------------------------------------------------------------------- |
| 24 | |
| 25 | |
| 26 | def _ok(stdout="", stderr="", returncode=0): |
| 27 | """Build a mock CompletedProcess for a successful command.""" |
| 28 | return subprocess.CompletedProcess(args=[], returncode=returncode, stdout=stdout, stderr=stderr) |
| 29 | |
| 30 | |
| 31 | def _fail(stdout="", stderr="error", returncode=1): |
| 32 | """Build a mock CompletedProcess for a failed command.""" |
| 33 | return subprocess.CompletedProcess(args=[], returncode=returncode, stdout=stdout, stderr=stderr) |
| 34 | |
| 35 | |
| 36 | def _ok_bytes(stdout=b"", stderr=b"", returncode=0): |
| 37 | """Build a mock CompletedProcess returning raw bytes (not text).""" |
| 38 | return subprocess.CompletedProcess(args=[], returncode=returncode, stdout=stdout, stderr=stderr) |
| 39 | |
| 40 | |
| 41 | # --------------------------------------------------------------------------- |
| 42 | # Constructor and _env |
| 43 | # --------------------------------------------------------------------------- |
| 44 | |
| 45 | |
| 46 | class TestFossilCLIInit: |
| 47 | """Constructor: explicit binary path vs constance fallback.""" |
| 48 | |
| 49 | def test_explicit_binary(self): |
| 50 | cli = FossilCLI(binary="/usr/local/bin/fossil") |
| 51 | assert cli.binary == "/usr/local/bin/fossil" |
| 52 | |
| 53 | def test_constance_fallback(self): |
| 54 | mock_config = MagicMock() |
| 55 | mock_config.FOSSIL_BINARY_PATH = "/opt/fossil/bin/fossil" |
| 56 | with patch("constance.config", mock_config): |
| 57 | cli = FossilCLI() |
| 58 | assert cli.binary == "/opt/fossil/bin/fossil" |
| 59 | |
| 60 | |
| 61 | class TestEnvProperty: |
| 62 | """_env injects USER=fossilrepo into the inherited environment.""" |
| 63 | |
| 64 | def test_env_sets_user(self): |
| 65 | cli = FossilCLI(binary="/bin/false") |
| 66 | env = cli._env |
| 67 | assert env["USER"] == "fossilrepo" |
| 68 | |
| 69 | def test_env_inherits_system_env(self): |
| 70 | cli = FossilCLI(binary="/bin/false") |
| 71 | env = cli._env |
| 72 | # PATH should come from os.environ |
| 73 | assert "PATH" in env |
| 74 | |
| 75 | |
| 76 | # --------------------------------------------------------------------------- |
| 77 | # _run helper |
| 78 | # --------------------------------------------------------------------------- |
| 79 | |
| 80 | |
| 81 | class TestRunHelper: |
| 82 | """_run assembles the command and delegates to subprocess.run.""" |
| 83 | |
| 84 | def test_run_builds_correct_command(self): |
| 85 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 86 | with patch("subprocess.run", return_value=_ok("ok")) as mock_run: |
| 87 | cli._run("version") |
| 88 | mock_run.assert_called_once() |
| 89 | cmd = mock_run.call_args[0][0] |
| 90 | assert cmd == ["/usr/bin/fossil", "version"] |
| 91 | |
| 92 | def test_run_passes_env(self): |
| 93 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 94 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 95 | cli._run("version") |
| 96 | env = mock_run.call_args[1]["env"] |
| 97 | assert env["USER"] == "fossilrepo" |
| 98 | |
| 99 | def test_run_uses_check_true(self): |
| 100 | """_run uses check=True so CalledProcessError is raised on failure.""" |
| 101 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 102 | with ( |
| 103 | patch("subprocess.run", side_effect=subprocess.CalledProcessError(1, "fossil")), |
| 104 | pytest.raises(subprocess.CalledProcessError), |
| 105 | ): |
| 106 | cli._run("bad-command") |
| 107 | |
| 108 | def test_run_custom_timeout(self): |
| 109 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 110 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 111 | cli._run("clone", "http://example.com", timeout=120) |
| 112 | assert mock_run.call_args[1]["timeout"] == 120 |
| 113 | |
| 114 | def test_run_multiple_args(self): |
| 115 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 116 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 117 | cli._run("push", "-R", "/tmp/repo.fossil") |
| 118 | cmd = mock_run.call_args[0][0] |
| 119 | assert cmd == ["/usr/bin/fossil", "push", "-R", "/tmp/repo.fossil"] |
| 120 | |
| 121 | |
| 122 | # --------------------------------------------------------------------------- |
| 123 | # init |
| 124 | # --------------------------------------------------------------------------- |
| 125 | |
| 126 | |
| 127 | class TestInit: |
| 128 | def test_init_creates_parent_dirs_and_runs_fossil_init(self, tmp_path): |
| 129 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 130 | target = tmp_path / "sub" / "dir" / "repo.fossil" |
| 131 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 132 | result = cli.init(target) |
| 133 | assert result == target |
| 134 | # Parent dirs created |
| 135 | assert target.parent.exists() |
| 136 | cmd = mock_run.call_args[0][0] |
| 137 | assert cmd == ["/usr/bin/fossil", "init", str(target)] |
| 138 | |
| 139 | def test_init_returns_path(self, tmp_path): |
| 140 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 141 | target = tmp_path / "repo.fossil" |
| 142 | with patch("subprocess.run", return_value=_ok()): |
| 143 | path = cli.init(target) |
| 144 | assert isinstance(path, Path) |
| 145 | assert path == target |
| 146 | |
| 147 | |
| 148 | # --------------------------------------------------------------------------- |
| 149 | # version |
| 150 | # --------------------------------------------------------------------------- |
| 151 | |
| 152 | |
| 153 | class TestVersion: |
| 154 | def test_version_returns_stripped_stdout(self): |
| 155 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 156 | with patch("subprocess.run", return_value=_ok(" This is fossil version 2.24\n")): |
| 157 | result = cli.version() |
| 158 | assert result == "This is fossil version 2.24" |
| 159 | |
| 160 | def test_version_propagates_error(self): |
| 161 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 162 | with ( |
| 163 | patch("subprocess.run", side_effect=subprocess.CalledProcessError(1, "fossil")), |
| 164 | pytest.raises(subprocess.CalledProcessError), |
| 165 | ): |
| 166 | cli.version() |
| 167 | |
| 168 | |
| 169 | # --------------------------------------------------------------------------- |
| 170 | # is_available |
| 171 | # --------------------------------------------------------------------------- |
| 172 | |
| 173 | |
| 174 | class TestIsAvailable: |
| 175 | def test_available_when_version_works(self): |
| 176 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 177 | with patch("subprocess.run", return_value=_ok("2.24")): |
| 178 | assert cli.is_available() is True |
| 179 | |
| 180 | def test_not_available_on_file_not_found(self): |
| 181 | cli = FossilCLI(binary="/nonexistent/fossil") |
| 182 | with patch("subprocess.run", side_effect=FileNotFoundError): |
| 183 | assert cli.is_available() is False |
| 184 | |
| 185 | def test_not_available_on_called_process_error(self): |
| 186 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 187 | with patch("subprocess.run", side_effect=subprocess.CalledProcessError(1, "fossil")): |
| 188 | assert cli.is_available() is False |
| 189 | |
| 190 | |
| 191 | # --------------------------------------------------------------------------- |
| 192 | # render_pikchr |
| 193 | # --------------------------------------------------------------------------- |
| 194 | |
| 195 | |
| 196 | class TestRenderPikchr: |
| 197 | def test_renders_svg_on_success(self): |
| 198 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 199 | svg = '<svg viewBox="0 0 100 100"></svg>' |
| 200 | result_proc = subprocess.CompletedProcess(args=[], returncode=0, stdout=svg, stderr="") |
| 201 | with patch("subprocess.run", return_value=result_proc) as mock_run: |
| 202 | result = cli.render_pikchr("circle") |
| 203 | assert result == svg |
| 204 | cmd = mock_run.call_args[0][0] |
| 205 | assert cmd == ["/usr/bin/fossil", "pikchr", "-"] |
| 206 | assert mock_run.call_args[1]["input"] == "circle" |
| 207 | |
| 208 | def test_returns_empty_on_failure(self): |
| 209 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 210 | result_proc = subprocess.CompletedProcess(args=[], returncode=1, stdout="", stderr="error") |
| 211 | with patch("subprocess.run", return_value=result_proc): |
| 212 | assert cli.render_pikchr("bad") == "" |
| 213 | |
| 214 | def test_returns_empty_on_file_not_found(self): |
| 215 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 216 | with patch("subprocess.run", side_effect=FileNotFoundError): |
| 217 | assert cli.render_pikchr("test") == "" |
| 218 | |
| 219 | def test_returns_empty_on_timeout(self): |
| 220 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 221 | with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 10)): |
| 222 | assert cli.render_pikchr("test") == "" |
| 223 | |
| 224 | |
| 225 | # --------------------------------------------------------------------------- |
| 226 | # ensure_default_user |
| 227 | # --------------------------------------------------------------------------- |
| 228 | |
| 229 | |
| 230 | class TestEnsureDefaultUser: |
| 231 | def test_creates_user_when_missing(self, tmp_path): |
| 232 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 233 | repo_path = tmp_path / "repo.fossil" |
| 234 | # First call: user list (user not present), second: create, third: default |
| 235 | with patch("subprocess.run") as mock_run: |
| 236 | mock_run.side_effect = [ |
| 237 | _ok(stdout="admin\n"), # user list -- "fossilrepo" not in output |
| 238 | _ok(), # user new |
| 239 | _ok(), # user default |
| 240 | ] |
| 241 | cli.ensure_default_user(repo_path) |
| 242 | assert mock_run.call_count == 3 |
| 243 | # Verify the user new call |
| 244 | new_cmd = mock_run.call_args_list[1][0][0] |
| 245 | assert "user" in new_cmd |
| 246 | assert "new" in new_cmd |
| 247 | assert "fossilrepo" in new_cmd |
| 248 | |
| 249 | def test_skips_create_when_user_exists(self, tmp_path): |
| 250 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 251 | repo_path = tmp_path / "repo.fossil" |
| 252 | with patch("subprocess.run") as mock_run: |
| 253 | mock_run.side_effect = [ |
| 254 | _ok(stdout="admin\nfossilrepo\n"), # user list -- fossilrepo IS present |
| 255 | _ok(), # user default |
| 256 | ] |
| 257 | cli.ensure_default_user(repo_path) |
| 258 | assert mock_run.call_count == 2 # no "new" call |
| 259 | |
| 260 | def test_custom_username(self, tmp_path): |
| 261 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 262 | repo_path = tmp_path / "repo.fossil" |
| 263 | with patch("subprocess.run") as mock_run: |
| 264 | mock_run.side_effect = [ |
| 265 | _ok(stdout="admin\n"), # user list -- custom not present |
| 266 | _ok(), # user new |
| 267 | _ok(), # user default |
| 268 | ] |
| 269 | cli.ensure_default_user(repo_path, username="custom-bot") |
| 270 | new_cmd = mock_run.call_args_list[1][0][0] |
| 271 | assert "custom-bot" in new_cmd |
| 272 | |
| 273 | def test_silently_swallows_exceptions(self, tmp_path): |
| 274 | """ensure_default_user has a bare except -- should not raise.""" |
| 275 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 276 | repo_path = tmp_path / "repo.fossil" |
| 277 | with patch("subprocess.run", side_effect=Exception("kaboom")): |
| 278 | cli.ensure_default_user(repo_path) # should not raise |
| 279 | |
| 280 | |
| 281 | # --------------------------------------------------------------------------- |
| 282 | # tarball |
| 283 | # --------------------------------------------------------------------------- |
| 284 | |
| 285 | |
| 286 | class TestTarball: |
| 287 | def test_returns_bytes_on_success(self, tmp_path): |
| 288 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 289 | repo_path = tmp_path / "repo.fossil" |
| 290 | fake_tar = b"\x1f\x8b\x08\x00" + b"\x00" * 100 # fake gzip header |
| 291 | with patch("subprocess.run", return_value=_ok_bytes(stdout=fake_tar)) as mock_run: |
| 292 | result = cli.tarball(repo_path, "trunk") |
| 293 | assert result == fake_tar |
| 294 | cmd = mock_run.call_args[0][0] |
| 295 | assert cmd == ["/usr/bin/fossil", "tarball", "trunk", "-R", str(repo_path), "/dev/stdout"] |
| 296 | |
| 297 | def test_returns_empty_bytes_on_failure(self, tmp_path): |
| 298 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 299 | repo_path = tmp_path / "repo.fossil" |
| 300 | with patch("subprocess.run", return_value=_ok_bytes(returncode=1)): |
| 301 | result = cli.tarball(repo_path, "trunk") |
| 302 | assert result == b"" |
| 303 | |
| 304 | |
| 305 | # --------------------------------------------------------------------------- |
| 306 | # zip_archive |
| 307 | # --------------------------------------------------------------------------- |
| 308 | |
| 309 | |
| 310 | class TestZipArchive: |
| 311 | def test_returns_bytes_on_success(self, tmp_path): |
| 312 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 313 | repo_path = tmp_path / "repo.fossil" |
| 314 | fake_zip = b"PK\x03\x04" + b"\x00" * 100 |
| 315 | |
| 316 | def side_effect(cmd, **kwargs): |
| 317 | # Write content to the tempfile that fossil would create |
| 318 | # The tempfile path is in the command args |
| 319 | outfile = cmd[3] # zip <checkin> <outfile> -R <repo> |
| 320 | Path(outfile).write_bytes(fake_zip) |
| 321 | return _ok() |
| 322 | |
| 323 | with patch("subprocess.run", side_effect=side_effect): |
| 324 | result = cli.zip_archive(repo_path, "trunk") |
| 325 | assert result == fake_zip |
| 326 | |
| 327 | def test_returns_empty_bytes_on_failure(self, tmp_path): |
| 328 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 329 | repo_path = tmp_path / "repo.fossil" |
| 330 | with patch("subprocess.run", return_value=_fail()): |
| 331 | result = cli.zip_archive(repo_path, "trunk") |
| 332 | assert result == b"" |
| 333 | |
| 334 | |
| 335 | # --------------------------------------------------------------------------- |
| 336 | # blame |
| 337 | # --------------------------------------------------------------------------- |
| 338 | |
| 339 | |
| 340 | class TestBlame: |
| 341 | def test_parses_blame_output(self, tmp_path): |
| 342 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 343 | repo_path = tmp_path / "repo.fossil" |
| 344 | blame_output = ( |
| 345 | "abc12345 2026-01-15 ragelink: def hello():\n" |
| 346 | "abc12345 2026-01-15 ragelink: return 'world'\n" |
| 347 | "def67890 2026-01-20 contributor: pass\n" |
| 348 | ) |
| 349 | with patch("subprocess.run") as mock_run: |
| 350 | mock_run.side_effect = [ |
| 351 | _ok(), # fossil open |
| 352 | _ok(stdout=blame_output), # fossil blame |
| 353 | _ok(), # fossil close |
| 354 | ] |
| 355 | lines = cli.blame(repo_path, "main.py") |
| 356 | assert len(lines) == 3 |
| 357 | assert lines[0]["uuid"] == "abc12345" |
| 358 | assert lines[0]["date"] == "2026-01-15" |
| 359 | assert lines[0]["user"] == "ragelink" |
| 360 | assert lines[0]["text"] == "def hello():" |
| 361 | assert lines[2]["user"] == "contributor" |
| 362 | |
| 363 | def test_returns_empty_on_failure(self, tmp_path): |
| 364 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 365 | repo_path = tmp_path / "repo.fossil" |
| 366 | with patch("subprocess.run") as mock_run: |
| 367 | mock_run.side_effect = [ |
| 368 | _ok(), # fossil open |
| 369 | _fail(), # fossil blame fails |
| 370 | _ok(), # fossil close |
| 371 | ] |
| 372 | lines = cli.blame(repo_path, "nonexistent.py") |
| 373 | assert lines == [] |
| 374 | |
| 375 | def test_returns_empty_on_exception(self, tmp_path): |
| 376 | """blame has a broad except -- should not raise.""" |
| 377 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 378 | repo_path = tmp_path / "repo.fossil" |
| 379 | with patch("subprocess.run", side_effect=Exception("error")): |
| 380 | lines = cli.blame(repo_path, "file.py") |
| 381 | assert lines == [] |
| 382 | |
| 383 | def test_cleans_up_tmpdir(self, tmp_path): |
| 384 | """Temp directory must be cleaned up even on error.""" |
| 385 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 386 | repo_path = tmp_path / "repo.fossil" |
| 387 | |
| 388 | created_dirs = [] |
| 389 | original_mkdtemp = __import__("tempfile").mkdtemp |
| 390 | |
| 391 | def tracking_mkdtemp(**kwargs): |
| 392 | d = original_mkdtemp(**kwargs) |
| 393 | created_dirs.append(d) |
| 394 | return d |
| 395 | |
| 396 | with ( |
| 397 | patch("subprocess.run", side_effect=Exception("fail")), |
| 398 | patch("tempfile.mkdtemp", side_effect=tracking_mkdtemp), |
| 399 | ): |
| 400 | cli.blame(repo_path, "file.py") |
| 401 | |
| 402 | # The tmpdir should have been cleaned up by shutil.rmtree |
| 403 | for d in created_dirs: |
| 404 | assert not Path(d).exists() |
| 405 | |
| 406 | |
| 407 | # --------------------------------------------------------------------------- |
| 408 | # push |
| 409 | # --------------------------------------------------------------------------- |
| 410 | |
| 411 | |
| 412 | class TestPush: |
| 413 | def test_push_success_with_artifacts(self, tmp_path): |
| 414 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 415 | repo_path = tmp_path / "repo.fossil" |
| 416 | with patch("subprocess.run", return_value=_ok(stdout="Round-trips: 1 Artifacts sent: 5 sent: 5")): |
| 417 | result = cli.push(repo_path) |
| 418 | assert result["success"] is True |
| 419 | assert result["artifacts_sent"] == 5 |
| 420 | |
| 421 | def test_push_with_remote_url(self, tmp_path): |
| 422 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 423 | repo_path = tmp_path / "repo.fossil" |
| 424 | with patch("subprocess.run", return_value=_ok(stdout="sent: 3")) as mock_run: |
| 425 | result = cli.push(repo_path, remote_url="https://fossil.example.com/repo") |
| 426 | cmd = mock_run.call_args[0][0] |
| 427 | assert "https://fossil.example.com/repo" in cmd |
| 428 | assert result["artifacts_sent"] == 3 |
| 429 | |
| 430 | def test_push_no_artifacts_in_output(self, tmp_path): |
| 431 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 432 | repo_path = tmp_path / "repo.fossil" |
| 433 | with patch("subprocess.run", return_value=_ok(stdout="nothing to push")): |
| 434 | result = cli.push(repo_path) |
| 435 | assert result["success"] is True |
| 436 | assert result["artifacts_sent"] == 0 |
| 437 | |
| 438 | def test_push_failure(self, tmp_path): |
| 439 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 440 | repo_path = tmp_path / "repo.fossil" |
| 441 | with patch("subprocess.run", return_value=_fail(stdout="connection refused")): |
| 442 | result = cli.push(repo_path) |
| 443 | assert result["success"] is False |
| 444 | |
| 445 | def test_push_timeout(self, tmp_path): |
| 446 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 447 | repo_path = tmp_path / "repo.fossil" |
| 448 | with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 120)): |
| 449 | result = cli.push(repo_path) |
| 450 | assert result["success"] is False |
| 451 | assert result["artifacts_sent"] == 0 |
| 452 | assert "timed out" in result["message"].lower() |
| 453 | |
| 454 | def test_push_file_not_found(self, tmp_path): |
| 455 | cli = FossilCLI(binary="/nonexistent/fossil") |
| 456 | repo_path = tmp_path / "repo.fossil" |
| 457 | with patch("subprocess.run", side_effect=FileNotFoundError("No such file")): |
| 458 | result = cli.push(repo_path) |
| 459 | assert result["success"] is False |
| 460 | assert result["artifacts_sent"] == 0 |
| 461 | |
| 462 | |
| 463 | # --------------------------------------------------------------------------- |
| 464 | # sync |
| 465 | # --------------------------------------------------------------------------- |
| 466 | |
| 467 | |
| 468 | class TestSync: |
| 469 | def test_sync_success(self, tmp_path): |
| 470 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 471 | repo_path = tmp_path / "repo.fossil" |
| 472 | with patch("subprocess.run", return_value=_ok(stdout="sync complete")): |
| 473 | result = cli.sync(repo_path) |
| 474 | assert result["success"] is True |
| 475 | assert result["message"] == "sync complete" |
| 476 | |
| 477 | def test_sync_with_remote_url(self, tmp_path): |
| 478 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 479 | repo_path = tmp_path / "repo.fossil" |
| 480 | with patch("subprocess.run", return_value=_ok(stdout="ok")) as mock_run: |
| 481 | cli.sync(repo_path, remote_url="https://fossil.example.com/repo") |
| 482 | cmd = mock_run.call_args[0][0] |
| 483 | assert "https://fossil.example.com/repo" in cmd |
| 484 | |
| 485 | def test_sync_failure(self, tmp_path): |
| 486 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 487 | repo_path = tmp_path / "repo.fossil" |
| 488 | with patch("subprocess.run", return_value=_fail(stdout="error")): |
| 489 | result = cli.sync(repo_path) |
| 490 | assert result["success"] is False |
| 491 | |
| 492 | def test_sync_timeout(self, tmp_path): |
| 493 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 494 | repo_path = tmp_path / "repo.fossil" |
| 495 | with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 120)): |
| 496 | result = cli.sync(repo_path) |
| 497 | assert result["success"] is False |
| 498 | assert "timed out" in result["message"].lower() |
| 499 | |
| 500 | def test_sync_file_not_found(self, tmp_path): |
| 501 | cli = FossilCLI(binary="/nonexistent/fossil") |
| 502 | repo_path = tmp_path / "repo.fossil" |
| 503 | with patch("subprocess.run", side_effect=FileNotFoundError("No such file")): |
| 504 | result = cli.sync(repo_path) |
| 505 | assert result["success"] is False |
| 506 | |
| 507 | |
| 508 | # --------------------------------------------------------------------------- |
| 509 | # pull |
| 510 | # --------------------------------------------------------------------------- |
| 511 | |
| 512 | |
| 513 | class TestPull: |
| 514 | def test_pull_success_with_artifacts(self, tmp_path): |
| 515 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 516 | repo_path = tmp_path / "repo.fossil" |
| 517 | with patch("subprocess.run", return_value=_ok(stdout="Round-trips: 1 received: 12")): |
| 518 | result = cli.pull(repo_path) |
| 519 | assert result["success"] is True |
| 520 | assert result["artifacts_received"] == 12 |
| 521 | |
| 522 | def test_pull_no_artifacts(self, tmp_path): |
| 523 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 524 | repo_path = tmp_path / "repo.fossil" |
| 525 | with patch("subprocess.run", return_value=_ok(stdout="nothing new")): |
| 526 | result = cli.pull(repo_path) |
| 527 | assert result["success"] is True |
| 528 | assert result["artifacts_received"] == 0 |
| 529 | |
| 530 | def test_pull_failure(self, tmp_path): |
| 531 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 532 | repo_path = tmp_path / "repo.fossil" |
| 533 | with patch("subprocess.run", return_value=_fail(stdout="connection refused")): |
| 534 | result = cli.pull(repo_path) |
| 535 | assert result["success"] is False |
| 536 | |
| 537 | def test_pull_timeout(self, tmp_path): |
| 538 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 539 | repo_path = tmp_path / "repo.fossil" |
| 540 | with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 60)): |
| 541 | result = cli.pull(repo_path) |
| 542 | assert result["success"] is False |
| 543 | assert result["artifacts_received"] == 0 |
| 544 | |
| 545 | def test_pull_file_not_found(self, tmp_path): |
| 546 | cli = FossilCLI(binary="/nonexistent/fossil") |
| 547 | repo_path = tmp_path / "repo.fossil" |
| 548 | with patch("subprocess.run", side_effect=FileNotFoundError("No such file")): |
| 549 | result = cli.pull(repo_path) |
| 550 | assert result["success"] is False |
| 551 | assert result["artifacts_received"] == 0 |
| 552 | |
| 553 | |
| 554 | # --------------------------------------------------------------------------- |
| 555 | # get_remote_url |
| 556 | # --------------------------------------------------------------------------- |
| 557 | |
| 558 | |
| 559 | class TestGetRemoteUrl: |
| 560 | def test_returns_url_on_success(self, tmp_path): |
| 561 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 562 | repo_path = tmp_path / "repo.fossil" |
| 563 | result_proc = subprocess.CompletedProcess(args=[], returncode=0, stdout="https://fossil.example.com/repo\n", stderr="") |
| 564 | with patch("subprocess.run", return_value=result_proc): |
| 565 | url = cli.get_remote_url(repo_path) |
| 566 | assert url == "https://fossil.example.com/repo" |
| 567 | |
| 568 | def test_returns_empty_on_failure(self, tmp_path): |
| 569 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 570 | repo_path = tmp_path / "repo.fossil" |
| 571 | result_proc = subprocess.CompletedProcess(args=[], returncode=1, stdout="", stderr="not configured") |
| 572 | with patch("subprocess.run", return_value=result_proc): |
| 573 | url = cli.get_remote_url(repo_path) |
| 574 | assert url == "" |
| 575 | |
| 576 | def test_returns_empty_on_file_not_found(self, tmp_path): |
| 577 | cli = FossilCLI(binary="/nonexistent/fossil") |
| 578 | repo_path = tmp_path / "repo.fossil" |
| 579 | with patch("subprocess.run", side_effect=FileNotFoundError): |
| 580 | url = cli.get_remote_url(repo_path) |
| 581 | assert url == "" |
| 582 | |
| 583 | def test_returns_empty_on_timeout(self, tmp_path): |
| 584 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 585 | repo_path = tmp_path / "repo.fossil" |
| 586 | with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 10)): |
| 587 | url = cli.get_remote_url(repo_path) |
| 588 | assert url == "" |
| 589 | |
| 590 | |
| 591 | # --------------------------------------------------------------------------- |
| 592 | # wiki_commit |
| 593 | # --------------------------------------------------------------------------- |
| 594 | |
| 595 | |
| 596 | class TestWikiCommit: |
| 597 | def test_success(self, tmp_path): |
| 598 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 599 | repo_path = tmp_path / "repo.fossil" |
| 600 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 601 | result = cli.wiki_commit(repo_path, "Home", "# Welcome") |
| 602 | assert result is True |
| 603 | assert mock_run.call_args[1]["input"] == "# Welcome" |
| 604 | cmd = mock_run.call_args[0][0] |
| 605 | assert cmd == ["/usr/bin/fossil", "wiki", "commit", "Home", "-R", str(repo_path)] |
| 606 | |
| 607 | def test_with_user(self, tmp_path): |
| 608 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 609 | repo_path = tmp_path / "repo.fossil" |
| 610 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 611 | cli.wiki_commit(repo_path, "Home", "content", user="admin") |
| 612 | cmd = mock_run.call_args[0][0] |
| 613 | assert "--technote-user" in cmd |
| 614 | assert "admin" in cmd |
| 615 | |
| 616 | def test_failure(self, tmp_path): |
| 617 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 618 | repo_path = tmp_path / "repo.fossil" |
| 619 | with patch("subprocess.run", return_value=_fail()): |
| 620 | result = cli.wiki_commit(repo_path, "Missing", "content") |
| 621 | assert result is False |
| 622 | |
| 623 | |
| 624 | # --------------------------------------------------------------------------- |
| 625 | # wiki_create |
| 626 | # --------------------------------------------------------------------------- |
| 627 | |
| 628 | |
| 629 | class TestWikiCreate: |
| 630 | def test_success(self, tmp_path): |
| 631 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 632 | repo_path = tmp_path / "repo.fossil" |
| 633 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 634 | result = cli.wiki_create(repo_path, "NewPage", "# New content") |
| 635 | assert result is True |
| 636 | cmd = mock_run.call_args[0][0] |
| 637 | assert cmd == ["/usr/bin/fossil", "wiki", "create", "NewPage", "-R", str(repo_path)] |
| 638 | assert mock_run.call_args[1]["input"] == "# New content" |
| 639 | |
| 640 | def test_failure(self, tmp_path): |
| 641 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 642 | repo_path = tmp_path / "repo.fossil" |
| 643 | with patch("subprocess.run", return_value=_fail()): |
| 644 | result = cli.wiki_create(repo_path, "Dup", "content") |
| 645 | assert result is False |
| 646 | |
| 647 | |
| 648 | # --------------------------------------------------------------------------- |
| 649 | # ticket_add |
| 650 | # --------------------------------------------------------------------------- |
| 651 | |
| 652 | |
| 653 | class TestTicketAdd: |
| 654 | def test_success_with_fields(self, tmp_path): |
| 655 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 656 | repo_path = tmp_path / "repo.fossil" |
| 657 | fields = {"title": "Bug report", "status": "open", "type": "bug"} |
| 658 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 659 | result = cli.ticket_add(repo_path, fields) |
| 660 | assert result is True |
| 661 | cmd = mock_run.call_args[0][0] |
| 662 | # Should have: fossil ticket add -R <path> title "Bug report" status open type bug |
| 663 | assert cmd[:4] == ["/usr/bin/fossil", "ticket", "add", "-R"] |
| 664 | assert "title" in cmd |
| 665 | assert "Bug report" in cmd |
| 666 | |
| 667 | def test_empty_fields(self, tmp_path): |
| 668 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 669 | repo_path = tmp_path / "repo.fossil" |
| 670 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 671 | result = cli.ticket_add(repo_path, {}) |
| 672 | assert result is True |
| 673 | cmd = mock_run.call_args[0][0] |
| 674 | assert cmd == ["/usr/bin/fossil", "ticket", "add", "-R", str(repo_path)] |
| 675 | |
| 676 | def test_failure(self, tmp_path): |
| 677 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 678 | repo_path = tmp_path / "repo.fossil" |
| 679 | with patch("subprocess.run", return_value=_fail()): |
| 680 | result = cli.ticket_add(repo_path, {"title": "test"}) |
| 681 | assert result is False |
| 682 | |
| 683 | |
| 684 | # --------------------------------------------------------------------------- |
| 685 | # ticket_change |
| 686 | # --------------------------------------------------------------------------- |
| 687 | |
| 688 | |
| 689 | class TestTicketChange: |
| 690 | def test_success(self, tmp_path): |
| 691 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 692 | repo_path = tmp_path / "repo.fossil" |
| 693 | uuid = "abc123def456" |
| 694 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 695 | result = cli.ticket_change(repo_path, uuid, {"status": "closed"}) |
| 696 | assert result is True |
| 697 | cmd = mock_run.call_args[0][0] |
| 698 | assert cmd[:5] == ["/usr/bin/fossil", "ticket", "change", uuid, "-R"] |
| 699 | assert "status" in cmd |
| 700 | assert "closed" in cmd |
| 701 | |
| 702 | def test_failure(self, tmp_path): |
| 703 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 704 | repo_path = tmp_path / "repo.fossil" |
| 705 | with patch("subprocess.run", return_value=_fail()): |
| 706 | result = cli.ticket_change(repo_path, "badid", {"status": "open"}) |
| 707 | assert result is False |
| 708 | |
| 709 | |
| 710 | # --------------------------------------------------------------------------- |
| 711 | # technote_create |
| 712 | # --------------------------------------------------------------------------- |
| 713 | |
| 714 | |
| 715 | class TestTechnoteCreate: |
| 716 | def test_with_explicit_timestamp(self, tmp_path): |
| 717 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 718 | repo_path = tmp_path / "repo.fossil" |
| 719 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 720 | result = cli.technote_create(repo_path, "Release v1.0", "Details here", timestamp="2026-04-07T12:00:00") |
| 721 | assert result is True |
| 722 | cmd = mock_run.call_args[0][0] |
| 723 | assert "--technote" in cmd |
| 724 | assert "2026-04-07T12:00:00" in cmd |
| 725 | assert mock_run.call_args[1]["input"] == "Details here" |
| 726 | |
| 727 | def test_auto_generates_timestamp(self, tmp_path): |
| 728 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 729 | repo_path = tmp_path / "repo.fossil" |
| 730 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 731 | cli.technote_create(repo_path, "Note", "body") |
| 732 | cmd = mock_run.call_args[0][0] |
| 733 | # Should have generated a timestamp in ISO format |
| 734 | ts_idx = cmd.index("--technote") + 1 |
| 735 | assert "T" in cmd[ts_idx] # ISO datetime has T separator |
| 736 | |
| 737 | def test_with_user(self, tmp_path): |
| 738 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 739 | repo_path = tmp_path / "repo.fossil" |
| 740 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 741 | cli.technote_create(repo_path, "Note", "body", timestamp="2026-01-01T00:00:00", user="author") |
| 742 | cmd = mock_run.call_args[0][0] |
| 743 | assert "--technote-user" in cmd |
| 744 | assert "author" in cmd |
| 745 | |
| 746 | def test_failure(self, tmp_path): |
| 747 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 748 | repo_path = tmp_path / "repo.fossil" |
| 749 | with patch("subprocess.run", return_value=_fail()): |
| 750 | result = cli.technote_create(repo_path, "Fail", "body", timestamp="2026-01-01T00:00:00") |
| 751 | assert result is False |
| 752 | |
| 753 | |
| 754 | # --------------------------------------------------------------------------- |
| 755 | # technote_edit |
| 756 | # --------------------------------------------------------------------------- |
| 757 | |
| 758 | |
| 759 | class TestTechnoteEdit: |
| 760 | def test_success(self, tmp_path): |
| 761 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 762 | repo_path = tmp_path / "repo.fossil" |
| 763 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 764 | result = cli.technote_edit(repo_path, "abc123", "Updated body") |
| 765 | assert result is True |
| 766 | cmd = mock_run.call_args[0][0] |
| 767 | assert "--technote" in cmd |
| 768 | assert "abc123" in cmd |
| 769 | assert mock_run.call_args[1]["input"] == "Updated body" |
| 770 | |
| 771 | def test_with_user(self, tmp_path): |
| 772 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 773 | repo_path = tmp_path / "repo.fossil" |
| 774 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 775 | cli.technote_edit(repo_path, "abc123", "body", user="editor") |
| 776 | cmd = mock_run.call_args[0][0] |
| 777 | assert "--technote-user" in cmd |
| 778 | assert "editor" in cmd |
| 779 | |
| 780 | def test_failure(self, tmp_path): |
| 781 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 782 | repo_path = tmp_path / "repo.fossil" |
| 783 | with patch("subprocess.run", return_value=_fail()): |
| 784 | result = cli.technote_edit(repo_path, "badid", "body") |
| 785 | assert result is False |
| 786 | |
| 787 | |
| 788 | # --------------------------------------------------------------------------- |
| 789 | # uv_add |
| 790 | # --------------------------------------------------------------------------- |
| 791 | |
| 792 | |
| 793 | class TestUvAdd: |
| 794 | def test_success(self, tmp_path): |
| 795 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 796 | repo_path = tmp_path / "repo.fossil" |
| 797 | filepath = tmp_path / "logo.png" |
| 798 | with patch("subprocess.run", return_value=_ok()) as mock_run: |
| 799 | result = cli.uv_add(repo_path, "logo.png", filepath) |
| 800 | assert result is True |
| 801 | cmd = mock_run.call_args[0][0] |
| 802 | assert cmd == ["/usr/bin/fossil", "uv", "add", str(filepath), "--as", "logo.png", "-R", str(repo_path)] |
| 803 | |
| 804 | def test_failure(self, tmp_path): |
| 805 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 806 | repo_path = tmp_path / "repo.fossil" |
| 807 | with patch("subprocess.run", return_value=_fail()): |
| 808 | result = cli.uv_add(repo_path, "file.txt", tmp_path / "file.txt") |
| 809 | assert result is False |
| 810 | |
| 811 | |
| 812 | # --------------------------------------------------------------------------- |
| 813 | # uv_cat |
| 814 | # --------------------------------------------------------------------------- |
| 815 | |
| 816 | |
| 817 | class TestUvCat: |
| 818 | def test_returns_bytes_on_success(self, tmp_path): |
| 819 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 820 | repo_path = tmp_path / "repo.fossil" |
| 821 | content = b"\x89PNG\r\n\x1a\n" # PNG header bytes |
| 822 | with patch("subprocess.run", return_value=_ok_bytes(stdout=content)) as mock_run: |
| 823 | result = cli.uv_cat(repo_path, "logo.png") |
| 824 | assert result == content |
| 825 | cmd = mock_run.call_args[0][0] |
| 826 | assert cmd == ["/usr/bin/fossil", "uv", "cat", "logo.png", "-R", str(repo_path)] |
| 827 | |
| 828 | def test_raises_file_not_found_on_failure(self, tmp_path): |
| 829 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 830 | repo_path = tmp_path / "repo.fossil" |
| 831 | with ( |
| 832 | patch("subprocess.run", return_value=_ok_bytes(returncode=1)), |
| 833 | pytest.raises(FileNotFoundError, match="Unversioned file not found"), |
| 834 | ): |
| 835 | cli.uv_cat(repo_path, "missing.txt") |
| 836 | |
| 837 | |
| 838 | # --------------------------------------------------------------------------- |
| 839 | # git_export (supplements TestGitExportTokenHandling in test_security.py) |
| 840 | # --------------------------------------------------------------------------- |
| 841 | |
| 842 | |
| 843 | class TestGitExport: |
| 844 | def test_basic_export_no_autopush(self, tmp_path): |
| 845 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 846 | repo_path = tmp_path / "repo.fossil" |
| 847 | mirror_dir = tmp_path / "mirror" |
| 848 | with patch("subprocess.run", return_value=_ok(stdout="exported 5 commits")) as mock_run: |
| 849 | result = cli.git_export(repo_path, mirror_dir) |
| 850 | assert result["success"] is True |
| 851 | assert result["message"] == "exported 5 commits" |
| 852 | cmd = mock_run.call_args[0][0] |
| 853 | assert cmd == ["/usr/bin/fossil", "git", "export", str(mirror_dir), "-R", str(repo_path)] |
| 854 | # mirror_dir should be created |
| 855 | assert mirror_dir.exists() |
| 856 | |
| 857 | def test_with_autopush_url(self, tmp_path): |
| 858 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 859 | repo_path = tmp_path / "repo.fossil" |
| 860 | mirror_dir = tmp_path / "mirror" |
| 861 | with patch("subprocess.run", return_value=_ok(stdout="pushed")) as mock_run: |
| 862 | cli.git_export(repo_path, mirror_dir, autopush_url="https://github.com/user/repo.git") |
| 863 | cmd = mock_run.call_args[0][0] |
| 864 | assert "--autopush" in cmd |
| 865 | assert "https://github.com/user/repo.git" in cmd |
| 866 | |
| 867 | def test_timeout_returns_failure(self, tmp_path): |
| 868 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 869 | repo_path = tmp_path / "repo.fossil" |
| 870 | mirror_dir = tmp_path / "mirror" |
| 871 | with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 300)): |
| 872 | result = cli.git_export(repo_path, mirror_dir) |
| 873 | assert result["success"] is False |
| 874 | assert "timed out" in result["message"].lower() |
| 875 | |
| 876 | def test_failure_returncode(self, tmp_path): |
| 877 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 878 | repo_path = tmp_path / "repo.fossil" |
| 879 | mirror_dir = tmp_path / "mirror" |
| 880 | with patch("subprocess.run", return_value=_ok(stdout="fatal error", returncode=1)): |
| 881 | result = cli.git_export(repo_path, mirror_dir) |
| 882 | assert result["success"] is False |
| 883 | |
| 884 | def test_temp_files_cleaned_on_success(self, tmp_path): |
| 885 | """Askpass and token temp files are removed after successful export.""" |
| 886 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 887 | repo_path = tmp_path / "repo.fossil" |
| 888 | mirror_dir = tmp_path / "mirror" |
| 889 | |
| 890 | created_files = [] |
| 891 | |
| 892 | original_mkstemp = __import__("tempfile").mkstemp |
| 893 | |
| 894 | def tracking_mkstemp(**kwargs): |
| 895 | fd, path = original_mkstemp(**kwargs) |
| 896 | created_files.append(path) |
| 897 | return fd, path |
| 898 | |
| 899 | with ( |
| 900 | patch("subprocess.run", return_value=_ok(stdout="ok")), |
| 901 | patch("tempfile.mkstemp", side_effect=tracking_mkstemp), |
| 902 | ): |
| 903 | cli.git_export(repo_path, mirror_dir, autopush_url="https://github.com/u/r.git", auth_token="tok123") |
| 904 | |
| 905 | # Both temp files should be cleaned up |
| 906 | assert len(created_files) == 2 |
| 907 | for f in created_files: |
| 908 | assert not os.path.exists(f) |
| 909 | |
| 910 | def test_temp_files_cleaned_on_timeout(self, tmp_path): |
| 911 | """Askpass and token temp files are removed even when subprocess times out.""" |
| 912 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 913 | repo_path = tmp_path / "repo.fossil" |
| 914 | mirror_dir = tmp_path / "mirror" |
| 915 | |
| 916 | created_files = [] |
| 917 | original_mkstemp = __import__("tempfile").mkstemp |
| 918 | |
| 919 | def tracking_mkstemp(**kwargs): |
| 920 | fd, path = original_mkstemp(**kwargs) |
| 921 | created_files.append(path) |
| 922 | return fd, path |
| 923 | |
| 924 | with ( |
| 925 | patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 300)), |
| 926 | patch("tempfile.mkstemp", side_effect=tracking_mkstemp), |
| 927 | ): |
| 928 | cli.git_export(repo_path, mirror_dir, autopush_url="https://github.com/u/r.git", auth_token="tok123") |
| 929 | |
| 930 | for f in created_files: |
| 931 | assert not os.path.exists(f) |
| 932 | |
| 933 | def test_no_redaction_when_no_token(self, tmp_path): |
| 934 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 935 | repo_path = tmp_path / "repo.fossil" |
| 936 | mirror_dir = tmp_path / "mirror" |
| 937 | with patch("subprocess.run", return_value=_ok(stdout="push ok")): |
| 938 | result = cli.git_export(repo_path, mirror_dir, autopush_url="https://github.com/u/r.git") |
| 939 | assert result["message"] == "push ok" |
| 940 | assert "[REDACTED]" not in result["message"] |
| 941 | |
| 942 | def test_combines_stdout_and_stderr(self, tmp_path): |
| 943 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 944 | repo_path = tmp_path / "repo.fossil" |
| 945 | mirror_dir = tmp_path / "mirror" |
| 946 | with patch("subprocess.run", return_value=_ok(stdout="out\n", stderr="err")): |
| 947 | result = cli.git_export(repo_path, mirror_dir) |
| 948 | assert "out" in result["message"] |
| 949 | assert "err" in result["message"] |
| 950 | |
| 951 | |
| 952 | # --------------------------------------------------------------------------- |
| 953 | # generate_ssh_key |
| 954 | # --------------------------------------------------------------------------- |
| 955 | |
| 956 | |
| 957 | class TestGenerateSSHKey: |
| 958 | def test_success(self, tmp_path): |
| 959 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 960 | key_path = tmp_path / "keys" / "id_ed25519" |
| 961 | pub_key_content = "ssh-ed25519 AAAAC3Nza...== fossilrepo" |
| 962 | fingerprint_output = "256 SHA256:abcdef123456 fossilrepo (ED25519)" |
| 963 | |
| 964 | # Create the public key file that generate_ssh_key will try to read |
| 965 | key_path.parent.mkdir(parents=True, exist_ok=True) |
| 966 | |
| 967 | with patch("subprocess.run") as mock_run: |
| 968 | # ssh-keygen creates the key, then we read pubkey, then fingerprint |
| 969 | def side_effect(cmd, **kwargs): |
| 970 | if "-t" in cmd: |
| 971 | # Write fake pub key file on "creation" |
| 972 | key_path.with_suffix(".pub").write_text(pub_key_content) |
| 973 | return _ok() |
| 974 | elif "-lf" in cmd: |
| 975 | return _ok(stdout=fingerprint_output) |
| 976 | return _ok() |
| 977 | |
| 978 | mock_run.side_effect = side_effect |
| 979 | result = cli.generate_ssh_key(key_path, comment="fossilrepo") |
| 980 | |
| 981 | assert result["success"] is True |
| 982 | assert result["public_key"] == pub_key_content |
| 983 | assert result["fingerprint"] == "SHA256:abcdef123456" |
| 984 | |
| 985 | def test_creates_parent_dirs(self, tmp_path): |
| 986 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 987 | key_path = tmp_path / "deep" / "nested" / "id_ed25519" |
| 988 | with patch("subprocess.run", return_value=_fail()): |
| 989 | cli.generate_ssh_key(key_path) |
| 990 | # Parent dirs should exist even if ssh-keygen fails |
| 991 | assert key_path.parent.exists() |
| 992 | |
| 993 | def test_failure_returns_error_dict(self, tmp_path): |
| 994 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 995 | key_path = tmp_path / "id_ed25519" |
| 996 | with patch("subprocess.run", return_value=_fail()): |
| 997 | result = cli.generate_ssh_key(key_path) |
| 998 | assert result["success"] is False |
| 999 | assert result["public_key"] == "" |
| 1000 | assert result["fingerprint"] == "" |
| 1001 | |
| 1002 | def test_exception_returns_error_dict(self, tmp_path): |
| 1003 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1004 | key_path = tmp_path / "id_ed25519" |
| 1005 | with patch("subprocess.run", side_effect=Exception("ssh-keygen not found")): |
| 1006 | result = cli.generate_ssh_key(key_path) |
| 1007 | assert result["success"] is False |
| 1008 | assert "ssh-keygen not found" in result["error"] |
| 1009 | |
| 1010 | def test_keygen_command_uses_ed25519(self, tmp_path): |
| 1011 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1012 | key_path = tmp_path / "id_ed25519" |
| 1013 | with patch("subprocess.run", return_value=_fail()) as mock_run: |
| 1014 | cli.generate_ssh_key(key_path, comment="test-key") |
| 1015 | cmd = mock_run.call_args[0][0] |
| 1016 | assert cmd == ["ssh-keygen", "-t", "ed25519", "-f", str(key_path), "-N", "", "-C", "test-key"] |
| 1017 | |
| 1018 | def test_fingerprint_empty_on_keygen_lf_failure(self, tmp_path): |
| 1019 | """If ssh-keygen -lf fails, fingerprint should be empty but success still True.""" |
| 1020 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1021 | key_path = tmp_path / "id_ed25519" |
| 1022 | pub_key_content = "ssh-ed25519 AAAAC3Nza...== test" |
| 1023 | |
| 1024 | with patch("subprocess.run") as mock_run: |
| 1025 | |
| 1026 | def side_effect(cmd, **kwargs): |
| 1027 | if "-t" in cmd: |
| 1028 | key_path.with_suffix(".pub").write_text(pub_key_content) |
| 1029 | return _ok() |
| 1030 | elif "-lf" in cmd: |
| 1031 | return _fail() |
| 1032 | return _ok() |
| 1033 | |
| 1034 | mock_run.side_effect = side_effect |
| 1035 | result = cli.generate_ssh_key(key_path) |
| 1036 | assert result["success"] is True |
| 1037 | assert result["public_key"] == pub_key_content |
| 1038 | assert result["fingerprint"] == "" |
| 1039 | |
| 1040 | |
| 1041 | # --------------------------------------------------------------------------- |
| 1042 | # http_proxy |
| 1043 | # --------------------------------------------------------------------------- |
| 1044 | |
| 1045 | |
| 1046 | class TestHttpProxy: |
| 1047 | def test_parses_crlf_response(self, tmp_path): |
| 1048 | """Standard HTTP response with \\r\\n\\r\\n separator.""" |
| 1049 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1050 | repo_path = tmp_path / "repo.fossil" |
| 1051 | raw_response = b"HTTP/1.1 200 OK\r\nContent-Type: application/x-fossil\r\n\r\n\x00\x01\x02\x03" |
| 1052 | with patch("subprocess.run", return_value=_ok_bytes(stdout=raw_response)): |
| 1053 | body, content_type = cli.http_proxy(repo_path, b"request_body") |
| 1054 | assert body == b"\x00\x01\x02\x03" |
| 1055 | assert content_type == "application/x-fossil" |
| 1056 | |
| 1057 | def test_parses_lf_response(self, tmp_path): |
| 1058 | """Fallback: \\n\\n separator (no \\r).""" |
| 1059 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1060 | repo_path = tmp_path / "repo.fossil" |
| 1061 | raw_response = b"Content-Type: text/html\n\n<html>body</html>" |
| 1062 | with patch("subprocess.run", return_value=_ok_bytes(stdout=raw_response)): |
| 1063 | body, content_type = cli.http_proxy(repo_path, b"req") |
| 1064 | assert body == b"<html>body</html>" |
| 1065 | assert content_type == "text/html" |
| 1066 | |
| 1067 | def test_no_separator_returns_entire_body(self, tmp_path): |
| 1068 | """If no header/body separator, treat entire output as body.""" |
| 1069 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1070 | repo_path = tmp_path / "repo.fossil" |
| 1071 | raw_response = b"raw binary data with no headers" |
| 1072 | with patch("subprocess.run", return_value=_ok_bytes(stdout=raw_response)): |
| 1073 | body, content_type = cli.http_proxy(repo_path, b"req") |
| 1074 | assert body == raw_response |
| 1075 | assert content_type == "application/x-fossil" |
| 1076 | |
| 1077 | def test_localauth_flag(self, tmp_path): |
| 1078 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1079 | repo_path = tmp_path / "repo.fossil" |
| 1080 | with patch("subprocess.run", return_value=_ok_bytes(stdout=b"\r\n\r\n")) as mock_run: |
| 1081 | cli.http_proxy(repo_path, b"body", localauth=True) |
| 1082 | cmd = mock_run.call_args[0][0] |
| 1083 | assert "--localauth" in cmd |
| 1084 | |
| 1085 | def test_no_localauth_flag(self, tmp_path): |
| 1086 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1087 | repo_path = tmp_path / "repo.fossil" |
| 1088 | with patch("subprocess.run", return_value=_ok_bytes(stdout=b"\r\n\r\n")) as mock_run: |
| 1089 | cli.http_proxy(repo_path, b"body", localauth=False) |
| 1090 | cmd = mock_run.call_args[0][0] |
| 1091 | assert "--localauth" not in cmd |
| 1092 | |
| 1093 | def test_builds_http_request_on_stdin(self, tmp_path): |
| 1094 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1095 | repo_path = tmp_path / "repo.fossil" |
| 1096 | request_body = b"\x00\x01binary-data" |
| 1097 | with patch("subprocess.run", return_value=_ok_bytes(stdout=b"\r\n\r\n")) as mock_run: |
| 1098 | cli.http_proxy(repo_path, request_body, content_type="application/x-fossil") |
| 1099 | http_input = mock_run.call_args[1]["input"] |
| 1100 | # Should contain POST, Host, Content-Type, Content-Length headers + body |
| 1101 | assert b"POST /xfer HTTP/1.1\r\n" in http_input |
| 1102 | assert b"Host: localhost\r\n" in http_input |
| 1103 | assert b"Content-Type: application/x-fossil\r\n" in http_input |
| 1104 | assert f"Content-Length: {len(request_body)}".encode() in http_input |
| 1105 | assert http_input.endswith(request_body) |
| 1106 | |
| 1107 | def test_default_content_type(self, tmp_path): |
| 1108 | """When no content_type provided, defaults to application/x-fossil.""" |
| 1109 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1110 | repo_path = tmp_path / "repo.fossil" |
| 1111 | with patch("subprocess.run", return_value=_ok_bytes(stdout=b"\r\n\r\n")) as mock_run: |
| 1112 | cli.http_proxy(repo_path, b"body") |
| 1113 | http_input = mock_run.call_args[1]["input"] |
| 1114 | assert b"Content-Type: application/x-fossil\r\n" in http_input |
| 1115 | |
| 1116 | def test_timeout_raises(self, tmp_path): |
| 1117 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1118 | repo_path = tmp_path / "repo.fossil" |
| 1119 | with ( |
| 1120 | patch("subprocess.run", side_effect=subprocess.TimeoutExpired("cmd", 120)), |
| 1121 | pytest.raises(subprocess.TimeoutExpired), |
| 1122 | ): |
| 1123 | cli.http_proxy(repo_path, b"body") |
| 1124 | |
| 1125 | def test_file_not_found_raises(self, tmp_path): |
| 1126 | cli = FossilCLI(binary="/nonexistent/fossil") |
| 1127 | repo_path = tmp_path / "repo.fossil" |
| 1128 | with ( |
| 1129 | patch("subprocess.run", side_effect=FileNotFoundError), |
| 1130 | pytest.raises(FileNotFoundError), |
| 1131 | ): |
| 1132 | cli.http_proxy(repo_path, b"body") |
| 1133 | |
| 1134 | def test_nonzero_returncode_does_not_raise(self, tmp_path): |
| 1135 | """Non-zero exit code logs a warning but does not raise.""" |
| 1136 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1137 | repo_path = tmp_path / "repo.fossil" |
| 1138 | raw_response = b"Content-Type: application/x-fossil\r\n\r\nbody" |
| 1139 | with patch("subprocess.run", return_value=_ok_bytes(stdout=raw_response, returncode=1)): |
| 1140 | body, ct = cli.http_proxy(repo_path, b"req") |
| 1141 | assert body == b"body" |
| 1142 | |
| 1143 | def test_gateway_interface_stripped(self, tmp_path): |
| 1144 | """GATEWAY_INTERFACE must not be in the env passed to fossil http.""" |
| 1145 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1146 | repo_path = tmp_path / "repo.fossil" |
| 1147 | with ( |
| 1148 | patch.dict(os.environ, {"GATEWAY_INTERFACE": "CGI/1.1"}), |
| 1149 | patch("subprocess.run", return_value=_ok_bytes(stdout=b"\r\n\r\n")) as mock_run, |
| 1150 | ): |
| 1151 | cli.http_proxy(repo_path, b"body") |
| 1152 | env = mock_run.call_args[1]["env"] |
| 1153 | assert "GATEWAY_INTERFACE" not in env |
| 1154 | |
| 1155 | |
| 1156 | # --------------------------------------------------------------------------- |
| 1157 | # shun / shun_list |
| 1158 | # --------------------------------------------------------------------------- |
| 1159 | |
| 1160 | |
| 1161 | class TestShun: |
| 1162 | def test_shun_success(self, tmp_path): |
| 1163 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1164 | repo_path = tmp_path / "repo.fossil" |
| 1165 | with patch("subprocess.run", return_value=_ok(stdout="Shunned")): |
| 1166 | result = cli.shun(repo_path, "abc123def456") |
| 1167 | assert result["success"] is True |
| 1168 | assert "Shunned" in result["message"] |
| 1169 | |
| 1170 | def test_shun_failure(self, tmp_path): |
| 1171 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1172 | repo_path = tmp_path / "repo.fossil" |
| 1173 | with patch("subprocess.run", return_value=_fail(stdout="", stderr="not found")): |
| 1174 | result = cli.shun(repo_path, "badid") |
| 1175 | assert result["success"] is False |
| 1176 | assert "not found" in result["message"] |
| 1177 | |
| 1178 | def test_shun_combines_stdout_stderr(self, tmp_path): |
| 1179 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1180 | repo_path = tmp_path / "repo.fossil" |
| 1181 | with patch("subprocess.run", return_value=_ok(stdout="out\n", stderr="warning")): |
| 1182 | result = cli.shun(repo_path, "abc123") |
| 1183 | assert "out" in result["message"] |
| 1184 | assert "warning" in result["message"] |
| 1185 | |
| 1186 | |
| 1187 | class TestShunList: |
| 1188 | def test_returns_uuids(self, tmp_path): |
| 1189 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1190 | repo_path = tmp_path / "repo.fossil" |
| 1191 | with patch("subprocess.run", return_value=_ok(stdout="abc123\ndef456\nghi789\n")): |
| 1192 | result = cli.shun_list(repo_path) |
| 1193 | assert result == ["abc123", "def456", "ghi789"] |
| 1194 | |
| 1195 | def test_returns_empty_on_failure(self, tmp_path): |
| 1196 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1197 | repo_path = tmp_path / "repo.fossil" |
| 1198 | with patch("subprocess.run", return_value=_fail()): |
| 1199 | result = cli.shun_list(repo_path) |
| 1200 | assert result == [] |
| 1201 | |
| 1202 | def test_strips_whitespace_and_empty_lines(self, tmp_path): |
| 1203 | cli = FossilCLI(binary="/usr/bin/fossil") |
| 1204 | repo_path = tmp_path / "repo.fossil" |
| 1205 | with patch("subprocess.run", return_value=_ok(stdout="\n abc123 \n\n def456\n\n")): |
| 1206 | result = cli.shun_list(repo_path) |
| 1207 | assert result == ["abc123", "def456"] |
+1059
| --- a/tests/test_integrations.py | ||
| +++ b/tests/test_integrations.py | ||
| @@ -0,0 +1,1059 @@ | ||
| 1 | +"""Tests for fossil/github_api.py, fossil/oauth.py, and core/sanitize.py. | |
| 2 | + | |
| 3 | +Covers: | |
| 4 | +- GitHubClient: rate limiting, issue CRUD, file CRUD, error handling | |
| 5 | +- parse_github_repo: URL format parsing | |
| 6 | +- fossil_status_to_github: status mapping | |
| 7 | +- format_ticket_body: markdown generation | |
| 8 | +- content_hash: deterministic hashing | |
| 9 | +- OAuth: authorize URL builders, token exchange (success + failure) | |
| 10 | +- Sanitize: edge cases not covered in test_security.py | |
| 11 | +""" | |
| 12 | + | |
| 13 | +import hashlib | |
| 14 | +from types import SimpleNamespace | |
| 15 | +from unittest.mock import MagicMock, patch | |
| 16 | + | |
| 17 | +import pytest | |
| 18 | +from django.test import RequestFactory | |
| 19 | + | |
| 20 | +from core.sanitize import ( | |
| 21 | + _is_safe_url, | |
| 22 | + sanitize_html, | |
| 23 | +) | |
| 24 | +from fossil.github_api import ( | |
| 25 | + GitHubClient, | |
| 26 | + content_hash, | |
| 27 | + format_ticket_body, | |
| 28 | + fossil_status_to_github, | |
| 29 | + parse_github_repo, | |
| 30 | +) | |
| 31 | +from fossil.oauth import ( | |
| 32 | + GITHUB_AUTHORIZE_URL, | |
| 33 | + GITLAB_AUTHORIZE_URL, | |
| 34 | + github_authorize_url, | |
| 35 | + github_exchange_token, | |
| 36 | + gitlab_authorize_url, | |
| 37 | + gitlab_exchange_token, | |
| 38 | +) | |
| 39 | + | |
| 40 | +# --------------------------------------------------------------------------- | |
| 41 | +# Helpers | |
| 42 | +# --------------------------------------------------------------------------- | |
| 43 | + | |
| 44 | + | |
| 45 | +def _mock_response(status_code=200, json_data=None, text="", headers=None): | |
| 46 | + """Build a mock requests.Response.""" | |
| 47 | + resp = MagicMock() | |
| 48 | + resp.status_code = status_code | |
| 49 | + resp.json.return_value = json_data or {} | |
| 50 | + resp.text = text | |
| 51 | + resp.ok = 200 <= status_code < 300 | |
| 52 | + resp.headers = headers or {} | |
| 53 | + return resp | |
| 54 | + | |
| 55 | + | |
| 56 | +# =========================================================================== | |
| 57 | +# fossil/github_api.py -- parse_github_repo | |
| 58 | +# =========================================================================== | |
| 59 | + | |
| 60 | + | |
| 61 | +class TestParseGithubRepo: | |
| 62 | + def test_https_with_git_suffix(self): | |
| 63 | + result = parse_github_repo("https://github.com/owner/repo.git") | |
| 64 | + assert result == ("owner", "repo") | |
| 65 | + | |
| 66 | + def test_https_without_git_suffix(self): | |
| 67 | + result = parse_github_repo("https://github.com/owner/repo") | |
| 68 | + assert result == ("owner", "repo") | |
| 69 | + | |
| 70 | + def test_ssh_url(self): | |
| 71 | + result = parse_github_repo("[email protected]:owner/repo.git") | |
| 72 | + assert result == ("owner", "repo") | |
| 73 | + | |
| 74 | + def test_ssh_url_without_git_suffix(self): | |
| 75 | + result = parse_github_repo("[email protected]:owner/repo") | |
| 76 | + assert result == ("owner", "repo") | |
| 77 | + | |
| 78 | + def test_non_github_url_returns_none(self): | |
| 79 | + assert parse_github_repo("https://gitlab.com/owner/repo.git") is None | |
| 80 | + | |
| 81 | + def test_malformed_url_returns_none(self): | |
| 82 | + assert parse_github_repo("not-a-url") is None | |
| 83 | + | |
| 84 | + def test_empty_string_returns_none(self): | |
| 85 | + assert parse_github_repo("") is None | |
| 86 | + | |
| 87 | + def test_owner_with_hyphens_and_dots(self): | |
| 88 | + result = parse_github_repo("https://github.com/my-org.dev/my-repo.git") | |
| 89 | + assert result == ("my-org.dev", "my-repo") | |
| 90 | + | |
| 91 | + def test_url_with_trailing_slash_returns_none(self): | |
| 92 | + # The regex expects owner/repo at end of string, trailing slash breaks it | |
| 93 | + assert parse_github_repo("https://github.com/owner/repo/") is None | |
| 94 | + | |
| 95 | + | |
| 96 | +# =========================================================================== | |
| 97 | +# fossil/github_api.py -- fossil_status_to_github | |
| 98 | +# =========================================================================== | |
| 99 | + | |
| 100 | + | |
| 101 | +class TestFossilStatusToGithub: | |
| 102 | + @pytest.mark.parametrize( | |
| 103 | + "status", | |
| 104 | + ["closed", "fixed", "resolved", "wontfix", "unable_to_reproduce", "works_as_designed", "deferred"], | |
| 105 | + ) | |
| 106 | + def test_closed_statuses(self, status): | |
| 107 | + assert fossil_status_to_github(status) == "closed" | |
| 108 | + | |
| 109 | + @pytest.mark.parametrize("status", ["open", "active", "new", "review", "pending"]) | |
| 110 | + def test_open_statuses(self, status): | |
| 111 | + assert fossil_status_to_github(status) == "open" | |
| 112 | + | |
| 113 | + def test_case_insensitive(self): | |
| 114 | + assert fossil_status_to_github("CLOSED") == "closed" | |
| 115 | + assert fossil_status_to_github("Fixed") == "closed" | |
| 116 | + | |
| 117 | + def test_strips_whitespace(self): | |
| 118 | + assert fossil_status_to_github(" closed ") == "closed" | |
| 119 | + assert fossil_status_to_github(" open ") == "open" | |
| 120 | + | |
| 121 | + def test_empty_string_maps_to_open(self): | |
| 122 | + assert fossil_status_to_github("") == "open" | |
| 123 | + | |
| 124 | + | |
| 125 | +# =========================================================================== | |
| 126 | +# fossil/github_api.py -- content_hash | |
| 127 | +# =========================================================================== | |
| 128 | + | |
| 129 | + | |
| 130 | +class TestContentHash: | |
| 131 | + def test_deterministic(self): | |
| 132 | + assert content_hash("hello") == content_hash("hello") | |
| 133 | + | |
| 134 | + def test_matches_sha256(self): | |
| 135 | + expected = hashlib.sha256(b"hello").hexdigest() | |
| 136 | + assert content_hash("hello") == expected | |
| 137 | + | |
| 138 | + def test_different_inputs_different_hashes(self): | |
| 139 | + assert content_hash("hello") != content_hash("world") | |
| 140 | + | |
| 141 | + def test_empty_string(self): | |
| 142 | + expected = hashlib.sha256(b"").hexdigest() | |
| 143 | + assert content_hash("") == expected | |
| 144 | + | |
| 145 | + | |
| 146 | +# =========================================================================== | |
| 147 | +# fossil/github_api.py -- format_ticket_body | |
| 148 | +# =========================================================================== | |
| 149 | + | |
| 150 | + | |
| 151 | +class TestFormatTicketBody: | |
| 152 | + def _ticket(self, **kwargs): | |
| 153 | + defaults = { | |
| 154 | + "body": "Bug description", | |
| 155 | + "type": "bug", | |
| 156 | + "priority": "high", | |
| 157 | + "severity": "critical", | |
| 158 | + "subsystem": "auth", | |
| 159 | + "resolution": "", | |
| 160 | + "owner": "alice", | |
| 161 | + "uuid": "abcdef1234567890", | |
| 162 | + } | |
| 163 | + defaults.update(kwargs) | |
| 164 | + return SimpleNamespace(**defaults) | |
| 165 | + | |
| 166 | + def test_includes_body(self): | |
| 167 | + ticket = self._ticket() | |
| 168 | + result = format_ticket_body(ticket) | |
| 169 | + assert "Bug description" in result | |
| 170 | + | |
| 171 | + def test_includes_metadata_table(self): | |
| 172 | + ticket = self._ticket() | |
| 173 | + result = format_ticket_body(ticket) | |
| 174 | + assert "| Type | bug |" in result | |
| 175 | + assert "| Priority | high |" in result | |
| 176 | + assert "| Severity | critical |" in result | |
| 177 | + assert "| Subsystem | auth |" in result | |
| 178 | + assert "| Owner | alice |" in result | |
| 179 | + | |
| 180 | + def test_skips_empty_metadata_fields(self): | |
| 181 | + ticket = self._ticket(type="", priority="", severity="", subsystem="", resolution="", owner="") | |
| 182 | + result = format_ticket_body(ticket) | |
| 183 | + assert "Fossil metadata" not in result | |
| 184 | + | |
| 185 | + def test_includes_uuid_trailer(self): | |
| 186 | + ticket = self._ticket() | |
| 187 | + result = format_ticket_body(ticket) | |
| 188 | + assert "abcdef1234" in result | |
| 189 | + | |
| 190 | + def test_includes_comments(self): | |
| 191 | + from datetime import datetime | |
| 192 | + | |
| 193 | + ticket = self._ticket() | |
| 194 | + comments = [ | |
| 195 | + {"user": "bob", "timestamp": datetime(2025, 1, 15, 10, 30), "comment": "I can reproduce this."}, | |
| 196 | + {"user": "alice", "timestamp": datetime(2025, 1, 16, 14, 0), "comment": "Fix incoming."}, | |
| 197 | + ] | |
| 198 | + result = format_ticket_body(ticket, comments=comments) | |
| 199 | + assert "bob" in result | |
| 200 | + assert "2025-01-15 10:30" in result | |
| 201 | + assert "I can reproduce this." in result | |
| 202 | + assert "alice" in result | |
| 203 | + assert "Fix incoming." in result | |
| 204 | + | |
| 205 | + def test_no_comments(self): | |
| 206 | + ticket = self._ticket() | |
| 207 | + result = format_ticket_body(ticket, comments=None) | |
| 208 | + assert "Comments" not in result | |
| 209 | + | |
| 210 | + def test_empty_comments_list(self): | |
| 211 | + ticket = self._ticket() | |
| 212 | + result = format_ticket_body(ticket, comments=[]) | |
| 213 | + assert "Comments" not in result | |
| 214 | + | |
| 215 | + def test_comment_without_timestamp(self): | |
| 216 | + ticket = self._ticket() | |
| 217 | + comments = [{"user": "dan", "comment": "No timestamp here."}] | |
| 218 | + result = format_ticket_body(ticket, comments=comments) | |
| 219 | + assert "dan" in result | |
| 220 | + assert "No timestamp here." in result | |
| 221 | + | |
| 222 | + def test_resolution_shown_when_set(self): | |
| 223 | + ticket = self._ticket(resolution="wontfix") | |
| 224 | + result = format_ticket_body(ticket) | |
| 225 | + assert "| Resolution | wontfix |" in result | |
| 226 | + | |
| 227 | + def test_no_body_ticket(self): | |
| 228 | + ticket = self._ticket(body="") | |
| 229 | + result = format_ticket_body(ticket) | |
| 230 | + # Should still have the uuid trailer | |
| 231 | + assert "abcdef1234" in result | |
| 232 | + | |
| 233 | + | |
| 234 | +# =========================================================================== | |
| 235 | +# fossil/github_api.py -- GitHubClient | |
| 236 | +# =========================================================================== | |
| 237 | + | |
| 238 | + | |
| 239 | +class TestGitHubClientInit: | |
| 240 | + def test_session_headers(self): | |
| 241 | + client = GitHubClient("ghp_test123", min_interval=0) | |
| 242 | + assert client.session.headers["Authorization"] == "Bearer ghp_test123" | |
| 243 | + assert "application/vnd.github+json" in client.session.headers["Accept"] | |
| 244 | + assert client.session.headers["X-GitHub-Api-Version"] == "2022-11-28" | |
| 245 | + | |
| 246 | + | |
| 247 | +class TestGitHubClientRequest: | |
| 248 | + """Tests for _request method: throttle, retry on 403/429.""" | |
| 249 | + | |
| 250 | + def test_successful_request(self): | |
| 251 | + client = GitHubClient("tok", min_interval=0) | |
| 252 | + mock_resp = _mock_response(200, {"ok": True}) | |
| 253 | + | |
| 254 | + with patch.object(client.session, "request", return_value=mock_resp): | |
| 255 | + resp = client._request("GET", "/repos/owner/repo") | |
| 256 | + assert resp.status_code == 200 | |
| 257 | + | |
| 258 | + @patch("fossil.github_api.time.sleep") | |
| 259 | + def test_retries_on_429(self, mock_sleep): | |
| 260 | + client = GitHubClient("tok", min_interval=0) | |
| 261 | + rate_limited = _mock_response(429, headers={"Retry-After": "1"}) | |
| 262 | + success = _mock_response(200, {"ok": True}) | |
| 263 | + | |
| 264 | + with patch.object(client.session, "request", side_effect=[rate_limited, success]): | |
| 265 | + resp = client._request("GET", "/repos/o/r", max_retries=3) | |
| 266 | + assert resp.status_code == 200 | |
| 267 | + # Should have slept for the retry | |
| 268 | + assert mock_sleep.call_count >= 1 | |
| 269 | + | |
| 270 | + @patch("fossil.github_api.time.sleep") | |
| 271 | + def test_retries_on_403(self, mock_sleep): | |
| 272 | + client = GitHubClient("tok", min_interval=0) | |
| 273 | + forbidden = _mock_response(403, headers={}) | |
| 274 | + success = _mock_response(200, {"ok": True}) | |
| 275 | + | |
| 276 | + with patch.object(client.session, "request", side_effect=[forbidden, success]): | |
| 277 | + resp = client._request("GET", "/repos/o/r", max_retries=3) | |
| 278 | + assert resp.status_code == 200 | |
| 279 | + | |
| 280 | + @patch("fossil.github_api.time.sleep") | |
| 281 | + def test_exhausted_retries_returns_last_response(self, mock_sleep): | |
| 282 | + client = GitHubClient("tok", min_interval=0) | |
| 283 | + rate_limited = _mock_response(429, headers={}) | |
| 284 | + | |
| 285 | + with patch.object(client.session, "request", return_value=rate_limited): | |
| 286 | + resp = client._request("GET", "/repos/o/r", max_retries=2) | |
| 287 | + assert resp.status_code == 429 | |
| 288 | + | |
| 289 | + def test_absolute_url_not_prefixed(self): | |
| 290 | + client = GitHubClient("tok", min_interval=0) | |
| 291 | + mock_resp = _mock_response(200) | |
| 292 | + | |
| 293 | + with patch.object(client.session, "request", return_value=mock_resp) as mock_req: | |
| 294 | + client._request("GET", "https://custom.api.com/thing") | |
| 295 | + # Should pass the absolute URL through unchanged | |
| 296 | + mock_req.assert_called_once() | |
| 297 | + call_args = mock_req.call_args | |
| 298 | + assert call_args[0][1] == "https://custom.api.com/thing" | |
| 299 | + | |
| 300 | + | |
| 301 | +class TestGitHubClientCreateIssue: | |
| 302 | + @patch("fossil.github_api.time.sleep") | |
| 303 | + def test_create_issue_success(self, mock_sleep): | |
| 304 | + client = GitHubClient("tok", min_interval=0) | |
| 305 | + resp = _mock_response(201, {"number": 42, "html_url": "https://github.com/o/r/issues/42"}) | |
| 306 | + | |
| 307 | + with patch.object(client.session, "request", return_value=resp): | |
| 308 | + result = client.create_issue("o", "r", "Bug title", "Bug body") | |
| 309 | + assert result["number"] == 42 | |
| 310 | + assert result["url"] == "https://github.com/o/r/issues/42" | |
| 311 | + assert result["error"] == "" | |
| 312 | + | |
| 313 | + @patch("fossil.github_api.time.sleep") | |
| 314 | + def test_create_issue_failure(self, mock_sleep): | |
| 315 | + client = GitHubClient("tok", min_interval=0) | |
| 316 | + resp = _mock_response(422, text="Validation Failed") | |
| 317 | + | |
| 318 | + with patch.object(client.session, "request", return_value=resp): | |
| 319 | + result = client.create_issue("o", "r", "Bad", "data") | |
| 320 | + assert result["number"] == 0 | |
| 321 | + assert result["url"] == "" | |
| 322 | + assert "422" in result["error"] | |
| 323 | + | |
| 324 | + @patch("fossil.github_api.time.sleep") | |
| 325 | + def test_create_issue_with_closed_state(self, mock_sleep): | |
| 326 | + """Creating an issue with state='closed' should create then close it.""" | |
| 327 | + client = GitHubClient("tok", min_interval=0) | |
| 328 | + create_resp = _mock_response(201, {"number": 99, "html_url": "https://github.com/o/r/issues/99"}) | |
| 329 | + close_resp = _mock_response(200, {"number": 99}) | |
| 330 | + | |
| 331 | + with patch.object(client.session, "request", side_effect=[create_resp, close_resp]) as mock_req: | |
| 332 | + result = client.create_issue("o", "r", "Fixed bug", "Already done", state="closed") | |
| 333 | + assert result["number"] == 99 | |
| 334 | + # Should have made two requests: POST create + PATCH close | |
| 335 | + assert mock_req.call_count == 2 | |
| 336 | + second_call = mock_req.call_args_list[1] | |
| 337 | + assert second_call[0][0] == "PATCH" | |
| 338 | + | |
| 339 | + | |
| 340 | +class TestGitHubClientUpdateIssue: | |
| 341 | + @patch("fossil.github_api.time.sleep") | |
| 342 | + def test_update_issue_success(self, mock_sleep): | |
| 343 | + client = GitHubClient("tok", min_interval=0) | |
| 344 | + resp = _mock_response(200, {"number": 42}) | |
| 345 | + | |
| 346 | + with patch.object(client.session, "request", return_value=resp): | |
| 347 | + result = client.update_issue("o", "r", 42, title="New title", state="closed") | |
| 348 | + assert result["success"] is True | |
| 349 | + assert result["error"] == "" | |
| 350 | + | |
| 351 | + @patch("fossil.github_api.time.sleep") | |
| 352 | + def test_update_issue_failure(self, mock_sleep): | |
| 353 | + client = GitHubClient("tok", min_interval=0) | |
| 354 | + resp = _mock_response(404, text="Not Found") | |
| 355 | + | |
| 356 | + with patch.object(client.session, "request", return_value=resp): | |
| 357 | + result = client.update_issue("o", "r", 999, state="closed") | |
| 358 | + assert result["success"] is False | |
| 359 | + assert "404" in result["error"] | |
| 360 | + | |
| 361 | + @patch("fossil.github_api.time.sleep") | |
| 362 | + def test_update_issue_builds_payload_selectively(self, mock_sleep): | |
| 363 | + """Only non-empty fields should be in the payload.""" | |
| 364 | + client = GitHubClient("tok", min_interval=0) | |
| 365 | + resp = _mock_response(200) | |
| 366 | + | |
| 367 | + with patch.object(client.session, "request", return_value=resp) as mock_req: | |
| 368 | + client.update_issue("o", "r", 1, title="", body="new body", state="") | |
| 369 | + call_kwargs = mock_req.call_args[1] | |
| 370 | + payload = call_kwargs["json"] | |
| 371 | + assert "title" not in payload | |
| 372 | + assert "state" not in payload | |
| 373 | + assert payload["body"] == "new body" | |
| 374 | + | |
| 375 | + | |
| 376 | +class TestGitHubClientGetFileSha: | |
| 377 | + @patch("fossil.github_api.time.sleep") | |
| 378 | + def test_get_file_sha_found(self, mock_sleep): | |
| 379 | + client = GitHubClient("tok", min_interval=0) | |
| 380 | + resp = _mock_response(200, {"sha": "abc123"}) | |
| 381 | + | |
| 382 | + with patch.object(client.session, "request", return_value=resp): | |
| 383 | + sha = client.get_file_sha("o", "r", "README.md") | |
| 384 | + assert sha == "abc123" | |
| 385 | + | |
| 386 | + @patch("fossil.github_api.time.sleep") | |
| 387 | + def test_get_file_sha_not_found(self, mock_sleep): | |
| 388 | + client = GitHubClient("tok", min_interval=0) | |
| 389 | + resp = _mock_response(404) | |
| 390 | + | |
| 391 | + with patch.object(client.session, "request", return_value=resp): | |
| 392 | + sha = client.get_file_sha("o", "r", "nonexistent.md") | |
| 393 | + assert sha == "" | |
| 394 | + | |
| 395 | + | |
| 396 | +class TestGitHubClientCreateOrUpdateFile: | |
| 397 | + @patch("fossil.github_api.time.sleep") | |
| 398 | + def test_create_new_file(self, mock_sleep): | |
| 399 | + client = GitHubClient("tok", min_interval=0) | |
| 400 | + get_resp = _mock_response(404) # file does not exist | |
| 401 | + put_resp = _mock_response(201, {"content": {"sha": "newsha"}}) | |
| 402 | + | |
| 403 | + with patch.object(client.session, "request", side_effect=[get_resp, put_resp]) as mock_req: | |
| 404 | + result = client.create_or_update_file("o", "r", "docs/new.md", "# New", "Add new doc") | |
| 405 | + assert result["success"] is True | |
| 406 | + assert result["sha"] == "newsha" | |
| 407 | + assert result["error"] == "" | |
| 408 | + # PUT payload should NOT have 'sha' key since file is new | |
| 409 | + put_call = mock_req.call_args_list[1] | |
| 410 | + payload = put_call[1]["json"] | |
| 411 | + assert "sha" not in payload | |
| 412 | + | |
| 413 | + @patch("fossil.github_api.time.sleep") | |
| 414 | + def test_update_existing_file(self, mock_sleep): | |
| 415 | + client = GitHubClient("tok", min_interval=0) | |
| 416 | + get_resp = _mock_response(200, {"sha": "oldsha"}) # file exists | |
| 417 | + put_resp = _mock_response(200, {"content": {"sha": "updatedsha"}}) | |
| 418 | + | |
| 419 | + with patch.object(client.session, "request", side_effect=[get_resp, put_resp]) as mock_req: | |
| 420 | + result = client.create_or_update_file("o", "r", "docs/existing.md", "# Updated", "Update doc") | |
| 421 | + assert result["success"] is True | |
| 422 | + assert result["sha"] == "updatedsha" | |
| 423 | + # PUT payload should include the existing SHA | |
| 424 | + put_call = mock_req.call_args_list[1] | |
| 425 | + payload = put_call[1]["json"] | |
| 426 | + assert payload["sha"] == "oldsha" | |
| 427 | + | |
| 428 | + @patch("fossil.github_api.time.sleep") | |
| 429 | + def test_create_or_update_file_failure(self, mock_sleep): | |
| 430 | + client = GitHubClient("tok", min_interval=0) | |
| 431 | + get_resp = _mock_response(404) | |
| 432 | + put_resp = _mock_response(422, text="Validation Failed") | |
| 433 | + | |
| 434 | + with patch.object(client.session, "request", side_effect=[get_resp, put_resp]): | |
| 435 | + result = client.create_or_update_file("o", "r", "bad.md", "content", "msg") | |
| 436 | + assert result["success"] is False | |
| 437 | + assert "422" in result["error"] | |
| 438 | + | |
| 439 | + @patch("fossil.github_api.time.sleep") | |
| 440 | + def test_content_is_base64_encoded(self, mock_sleep): | |
| 441 | + import base64 | |
| 442 | + | |
| 443 | + client = GitHubClient("tok", min_interval=0) | |
| 444 | + get_resp = _mock_response(404) | |
| 445 | + put_resp = _mock_response(201, {"content": {"sha": "s"}}) | |
| 446 | + | |
| 447 | + with patch.object(client.session, "request", side_effect=[get_resp, put_resp]) as mock_req: | |
| 448 | + client.create_or_update_file("o", "r", "f.md", "hello world", "msg") | |
| 449 | + put_call = mock_req.call_args_list[1] | |
| 450 | + payload = put_call[1]["json"] | |
| 451 | + decoded = base64.b64decode(payload["content"]).decode("utf-8") | |
| 452 | + assert decoded == "hello world" | |
| 453 | + | |
| 454 | + | |
| 455 | +# =========================================================================== | |
| 456 | +# fossil/oauth.py -- authorize URL builders | |
| 457 | +# =========================================================================== | |
| 458 | + | |
| 459 | + | |
| 460 | +@pytest.fixture | |
| 461 | +def rf(): | |
| 462 | + return RequestFactory() | |
| 463 | + | |
| 464 | + | |
| 465 | +@pytest.fixture | |
| 466 | +def mock_session(): | |
| 467 | + """A dict-like session for request factory requests.""" | |
| 468 | + return {} | |
| 469 | + | |
| 470 | + | |
| 471 | +@pytest.mark.django_db | |
| 472 | +class TestGithubAuthorizeUrl: | |
| 473 | + def test_returns_none_when_no_client_id(self, rf, mock_session): | |
| 474 | + request = rf.get("/") | |
| 475 | + request.session = mock_session | |
| 476 | + mock_config = MagicMock() | |
| 477 | + mock_config.GITHUB_OAUTH_CLIENT_ID = "" | |
| 478 | + | |
| 479 | + with patch("constance.config", mock_config): | |
| 480 | + url = github_authorize_url(request, "my-project") | |
| 481 | + assert url is None | |
| 482 | + | |
| 483 | + def test_builds_url_with_all_params(self, rf, mock_session): | |
| 484 | + request = rf.get("/") | |
| 485 | + request.session = mock_session | |
| 486 | + mock_config = MagicMock() | |
| 487 | + mock_config.GITHUB_OAUTH_CLIENT_ID = "client123" | |
| 488 | + | |
| 489 | + with patch("constance.config", mock_config): | |
| 490 | + url = github_authorize_url(request, "my-proj", mirror_id="77") | |
| 491 | + | |
| 492 | + assert url.startswith(GITHUB_AUTHORIZE_URL) | |
| 493 | + assert "client_id=client123" in url | |
| 494 | + assert "scope=repo" in url | |
| 495 | + assert "state=my-proj:77:" in url | |
| 496 | + assert "redirect_uri=" in url | |
| 497 | + assert "oauth_state_nonce" in mock_session | |
| 498 | + | |
| 499 | + def test_default_mirror_id_is_new(self, rf, mock_session): | |
| 500 | + request = rf.get("/") | |
| 501 | + request.session = mock_session | |
| 502 | + mock_config = MagicMock() | |
| 503 | + mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" | |
| 504 | + | |
| 505 | + with patch("constance.config", mock_config): | |
| 506 | + url = github_authorize_url(request, "slug") | |
| 507 | + | |
| 508 | + assert ":new:" in url | |
| 509 | + | |
| 510 | + def test_nonce_stored_in_session(self, rf, mock_session): | |
| 511 | + request = rf.get("/") | |
| 512 | + request.session = mock_session | |
| 513 | + mock_config = MagicMock() | |
| 514 | + mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" | |
| 515 | + | |
| 516 | + with patch("constance.config", mock_config): | |
| 517 | + github_authorize_url(request, "slug") | |
| 518 | + | |
| 519 | + nonce = mock_session["oauth_state_nonce"] | |
| 520 | + assert len(nonce) > 20 # token_urlsafe(32) is ~43 chars | |
| 521 | + | |
| 522 | + | |
| 523 | +@pytest.mark.django_db | |
| 524 | +class TestGitlabAuthorizeUrl: | |
| 525 | + def test_returns_none_when_no_client_id(self, rf, mock_session): | |
| 526 | + request = rf.get("/") | |
| 527 | + request.session = mock_session | |
| 528 | + mock_config = MagicMock() | |
| 529 | + mock_config.GITLAB_OAUTH_CLIENT_ID = "" | |
| 530 | + | |
| 531 | + with patch("constance.config", mock_config): | |
| 532 | + url = gitlab_authorize_url(request, "proj") | |
| 533 | + assert url is None | |
| 534 | + | |
| 535 | + def test_builds_url_with_all_params(self, rf, mock_session): | |
| 536 | + request = rf.get("/") | |
| 537 | + request.session = mock_session | |
| 538 | + mock_config = MagicMock() | |
| 539 | + mock_config.GITLAB_OAUTH_CLIENT_ID = "gl_client" | |
| 540 | + | |
| 541 | + with patch("constance.config", mock_config): | |
| 542 | + url = gitlab_authorize_url(request, "proj", mirror_id="5") | |
| 543 | + | |
| 544 | + assert url.startswith(GITLAB_AUTHORIZE_URL) | |
| 545 | + assert "client_id=gl_client" in url | |
| 546 | + assert "response_type=code" in url | |
| 547 | + assert "scope=api" in url | |
| 548 | + assert "state=proj:5:" in url | |
| 549 | + assert "oauth_state_nonce" in mock_session | |
| 550 | + | |
| 551 | + def test_default_mirror_id_is_new(self, rf, mock_session): | |
| 552 | + request = rf.get("/") | |
| 553 | + request.session = mock_session | |
| 554 | + mock_config = MagicMock() | |
| 555 | + mock_config.GITLAB_OAUTH_CLIENT_ID = "gl" | |
| 556 | + | |
| 557 | + with patch("constance.config", mock_config): | |
| 558 | + url = gitlab_authorize_url(request, "slug") | |
| 559 | + | |
| 560 | + assert ":new:" in url | |
| 561 | + | |
| 562 | + | |
| 563 | +# =========================================================================== | |
| 564 | +# fossil/oauth.py -- token exchange | |
| 565 | +# =========================================================================== | |
| 566 | + | |
| 567 | + | |
| 568 | +@pytest.mark.django_db | |
| 569 | +class TestGithubExchangeToken: | |
| 570 | + def test_returns_error_when_no_code(self, rf): | |
| 571 | + request = rf.get("/callback/") # no ?code= param | |
| 572 | + mock_config = MagicMock() | |
| 573 | + mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" | |
| 574 | + mock_config.GITHUB_OAUTH_CLIENT_SECRET = "secret" | |
| 575 | + | |
| 576 | + with patch("constance.config", mock_config): | |
| 577 | + result = github_exchange_token(request, "slug") | |
| 578 | + | |
| 579 | + assert result["error"] == "No code received" | |
| 580 | + assert result["token"] == "" | |
| 581 | + | |
| 582 | + @patch("fossil.oauth.requests.get") | |
| 583 | + @patch("fossil.oauth.requests.post") | |
| 584 | + def test_successful_exchange(self, mock_post, mock_get, rf): | |
| 585 | + request = rf.get("/callback/?code=authcode123") | |
| 586 | + mock_config = MagicMock() | |
| 587 | + mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" | |
| 588 | + mock_config.GITHUB_OAUTH_CLIENT_SECRET = "secret" | |
| 589 | + | |
| 590 | + mock_post.return_value = _mock_response(200, {"access_token": "ghp_tok456"}) | |
| 591 | + mock_get.return_value = _mock_response(200, {"login": "octocat"}) | |
| 592 | + | |
| 593 | + with patch("constance.config", mock_config): | |
| 594 | + result = github_exchange_token(request, "slug") | |
| 595 | + | |
| 596 | + assert result["token"] == "ghp_tok456" | |
| 597 | + assert result["username"] == "octocat" | |
| 598 | + assert result["error"] == "" | |
| 599 | + mock_post.assert_called_once() | |
| 600 | + mock_get.assert_called_once() | |
| 601 | + | |
| 602 | + @patch("fossil.oauth.requests.post") | |
| 603 | + def test_exchange_no_access_token_in_response(self, mock_post, rf): | |
| 604 | + request = rf.get("/callback/?code=badcode") | |
| 605 | + mock_config = MagicMock() | |
| 606 | + mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" | |
| 607 | + mock_config.GITHUB_OAUTH_CLIENT_SECRET = "secret" | |
| 608 | + | |
| 609 | + mock_post.return_value = _mock_response(200, {"error": "bad_verification_code", "error_description": "Bad code"}) | |
| 610 | + | |
| 611 | + with patch("constance.config", mock_config): | |
| 612 | + result = github_exchange_token(request, "slug") | |
| 613 | + | |
| 614 | + assert result["token"] == "" | |
| 615 | + assert result["error"] == "Bad code" | |
| 616 | + | |
| 617 | + @patch("fossil.oauth.requests.post") | |
| 618 | + def test_exchange_network_error(self, mock_post, rf): | |
| 619 | + request = rf.get("/callback/?code=code") | |
| 620 | + mock_config = MagicMock() | |
| 621 | + mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" | |
| 622 | + mock_config.GITHUB_OAUTH_CLIENT_SECRET = "secret" | |
| 623 | + | |
| 624 | + mock_post.side_effect = ConnectionError("Network unreachable") | |
| 625 | + | |
| 626 | + with patch("constance.config", mock_config): | |
| 627 | + result = github_exchange_token(request, "slug") | |
| 628 | + | |
| 629 | + assert result["token"] == "" | |
| 630 | + assert "Network unreachable" in result["error"] | |
| 631 | + | |
| 632 | + @patch("fossil.oauth.requests.get") | |
| 633 | + @patch("fossil.oauth.requests.post") | |
| 634 | + def test_exchange_user_api_fails(self, mock_post, mock_get, rf): | |
| 635 | + """Token exchange succeeds but user info endpoint fails.""" | |
| 636 | + request = rf.get("/callback/?code=code") | |
| 637 | + mock_config = MagicMock() | |
| 638 | + mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" | |
| 639 | + mock_config.GITHUB_OAUTH_CLIENT_SECRET = "secret" | |
| 640 | + | |
| 641 | + mock_post.return_value = _mock_response(200, {"access_token": "ghp_tok"}) | |
| 642 | + mock_get.return_value = _mock_response(401, {"message": "Bad credentials"}) | |
| 643 | + | |
| 644 | + with patch("constance.config", mock_config): | |
| 645 | + result = github_exchange_token(request, "slug") | |
| 646 | + | |
| 647 | + # Token should still be returned, username will be empty | |
| 648 | + assert result["token"] == "ghp_tok" | |
| 649 | + assert result["username"] == "" | |
| 650 | + assert result["error"] == "" | |
| 651 | + | |
| 652 | + | |
| 653 | +@pytest.mark.django_db | |
| 654 | +class TestGitlabExchangeToken: | |
| 655 | + def test_returns_error_when_no_code(self, rf): | |
| 656 | + request = rf.get("/callback/") | |
| 657 | + mock_config = MagicMock() | |
| 658 | + mock_config.GITLAB_OAUTH_CLIENT_ID = "cid" | |
| 659 | + mock_config.GITLAB_OAUTH_CLIENT_SECRET = "secret" | |
| 660 | + | |
| 661 | + with patch("constance.config", mock_config): | |
| 662 | + result = gitlab_exchange_token(request, "slug") | |
| 663 | + | |
| 664 | + assert result["error"] == "No code received" | |
| 665 | + assert result["token"] == "" | |
| 666 | + | |
| 667 | + @patch("fossil.oauth.requests.post") | |
| 668 | + def test_successful_exchange(self, mock_post, rf): | |
| 669 | + request = rf.get("/callback/?code=glcode") | |
| 670 | + mock_config = MagicMock() | |
| 671 | + mock_config.GITLAB_OAUTH_CLIENT_ID = "cid" | |
| 672 | + mock_config.GITLAB_OAUTH_CLIENT_SECRET = "secret" | |
| 673 | + | |
| 674 | + mock_post.return_value = _mock_response(200, {"access_token": "glpat_token789"}) | |
| 675 | + | |
| 676 | + with patch("constance.config", mock_config): | |
| 677 | + result = gitlab_exchange_token(request, "slug") | |
| 678 | + | |
| 679 | + assert result["token"] == "glpat_token789" | |
| 680 | + assert result["error"] == "" | |
| 681 | + | |
| 682 | + @patch("fossil.oauth.requests.post") | |
| 683 | + def test_exchange_no_access_token(self, mock_post, rf): | |
| 684 | + request = rf.get("/callback/?code=badcode") | |
| 685 | + mock_config = MagicMock() | |
| 686 | + mock_config.GITLAB_OAUTH_CLIENT_ID = "cid" | |
| 687 | + mock_config.GITLAB_OAUTH_CLIENT_SECRET = "secret" | |
| 688 | + | |
| 689 | + mock_post.return_value = _mock_response(200, {"error_description": "Invalid code"}) | |
| 690 | + | |
| 691 | + with patch("constance.config", mock_config): | |
| 692 | + result = gitlab_exchange_token(request, "slug") | |
| 693 | + | |
| 694 | + assert result["token"] == "" | |
| 695 | + assert result["error"] == "Invalid code" | |
| 696 | + | |
| 697 | + @patch("fossil.oauth.requests.post") | |
| 698 | + def test_exchange_network_error(self, mock_post, rf): | |
| 699 | + request = rf.get("/callback/?code=code") | |
| 700 | + mock_config = MagicMock() | |
| 701 | + mock_config.GITLAB_OAUTH_CLIENT_ID = "cid" | |
| 702 | + mock_config.GITLAB_OAUTH_CLIENT_SECRET = "secret" | |
| 703 | + | |
| 704 | + mock_post.side_effect = TimeoutError("Connection timed out") | |
| 705 | + | |
| 706 | + with patch("constance.config", mock_config): | |
| 707 | + result = gitlab_exchange_token(request, "slug") | |
| 708 | + | |
| 709 | + assert result["token"] == "" | |
| 710 | + assert "timed out" in result["error"] | |
| 711 | + | |
| 712 | + @patch("fossil.oauth.requests.post") | |
| 713 | + def test_exchange_sends_correct_payload(self, mock_post, rf): | |
| 714 | + """Verify the POST body includes grant_type and redirect_uri for GitLab.""" | |
| 715 | + request = rf.get("/callback/?code=code") | |
| 716 | + mock_config = MagicMock() | |
| 717 | + mock_config.GITLAB_OAUTH_CLIENT_ID = "gl_cid" | |
| 718 | + mock_config.GITLAB_OAUTH_CLIENT_SECRET = "gl_secret" | |
| 719 | + | |
| 720 | + mock_post.return_value = _mock_response(200, {"access_token": "tok"}) | |
| 721 | + | |
| 722 | + with patch("constance.config", mock_config): | |
| 723 | + gitlab_exchange_token(request, "slug") | |
| 724 | + | |
| 725 | + call_kwargs = mock_post.call_args[1] | |
| 726 | + data = call_kwargs["data"] | |
| 727 | + assert data["grant_type"] == "authorization_code" | |
| 728 | + assert data["client_id"] == "gl_cid" | |
| 729 | + assert data["client_secret"] == "gl_secret" | |
| 730 | + assert data["code"] == "code" | |
| 731 | + assert "/oauth/callback/gitlab/" in data["redirect_uri"] | |
| 732 | + | |
| 733 | + | |
| 734 | +# =========================================================================== | |
| 735 | +# core/sanitize.py -- edge cases not in test_security.py | |
| 736 | +# =========================================================================== | |
| 737 | + | |
| 738 | + | |
| 739 | +class TestSanitizeAllowedTags: | |
| 740 | + """Verify specific allowed tags survive sanitization.""" | |
| 741 | + | |
| 742 | + @pytest.mark.parametrize( | |
| 743 | + "tag", | |
| 744 | + ["abbr", "acronym", "dd", "del", "details", "dl", "dt", "ins", "kbd", "mark", "q", "s", "samp", "small", "sub", "sup", "tt", "var"], | |
| 745 | + ) | |
| 746 | + def test_inline_tags_preserved(self, tag): | |
| 747 | + html_in = f"<{tag}>content</{tag}>" | |
| 748 | + result = sanitize_html(html_in) | |
| 749 | + assert f"<{tag}>" in result | |
| 750 | + assert f"</{tag}>" in result | |
| 751 | + | |
| 752 | + def test_summary_tag_preserved(self): | |
| 753 | + html_in = '<details open class="info"><summary class="title">Details</summary>Content</details>' | |
| 754 | + result = sanitize_html(html_in) | |
| 755 | + assert "<details" in result | |
| 756 | + assert "<summary" in result | |
| 757 | + assert "Details" in result | |
| 758 | + | |
| 759 | + | |
| 760 | +class TestSanitizeAttributeFiltering: | |
| 761 | + """Verify attribute allowlist/blocklist behavior.""" | |
| 762 | + | |
| 763 | + def test_strips_non_allowed_attributes(self): | |
| 764 | + html_in = '<p style="color:red" data-custom="x">text</p>' | |
| 765 | + result = sanitize_html(html_in) | |
| 766 | + assert "style=" not in result | |
| 767 | + assert "data-custom=" not in result | |
| 768 | + assert "<p>" in result | |
| 769 | + | |
| 770 | + def test_table_colspan_preserved(self): | |
| 771 | + html_in = '<table><tr><td colspan="2" class="wide">cell</td></tr></table>' | |
| 772 | + result = sanitize_html(html_in) | |
| 773 | + assert 'colspan="2"' in result | |
| 774 | + | |
| 775 | + def test_ol_start_and_type_preserved(self): | |
| 776 | + html_in = '<ol start="5" type="a"><li>item</li></ol>' | |
| 777 | + result = sanitize_html(html_in) | |
| 778 | + assert 'start="5"' in result | |
| 779 | + assert 'type="a"' in result | |
| 780 | + | |
| 781 | + def test_li_value_preserved(self): | |
| 782 | + html_in = '<ul><li value="3">item</li></ul>' | |
| 783 | + result = sanitize_html(html_in) | |
| 784 | + assert 'value="3"' in result | |
| 785 | + | |
| 786 | + def test_heading_id_preserved(self): | |
| 787 | + html_in = '<h2 id="section-1" class="title">Title</h2>' | |
| 788 | + result = sanitize_html(html_in) | |
| 789 | + assert 'id="section-1"' in result | |
| 790 | + assert 'class="title"' in result | |
| 791 | + | |
| 792 | + def test_a_name_attribute_preserved(self): | |
| 793 | + html_in = '<a name="anchor">anchor</a>' | |
| 794 | + result = sanitize_html(html_in) | |
| 795 | + assert 'name="anchor"' in result | |
| 796 | + | |
| 797 | + def test_boolean_attribute_no_value(self): | |
| 798 | + html_in = "<details open><summary>info</summary>body</details>" | |
| 799 | + result = sanitize_html(html_in) | |
| 800 | + assert "<details open>" in result | |
| 801 | + | |
| 802 | + | |
| 803 | +class TestSanitizeUrlSchemes: | |
| 804 | + """Test URL protocol validation in href/src attributes.""" | |
| 805 | + | |
| 806 | + def test_http_allowed(self): | |
| 807 | + assert _is_safe_url("http://example.com") is True | |
| 808 | + | |
| 809 | + def test_https_allowed(self): | |
| 810 | + assert _is_safe_url("https://example.com") is True | |
| 811 | + | |
| 812 | + def test_mailto_allowed(self): | |
| 813 | + assert _is_safe_url("mailto:[email protected]") is True | |
| 814 | + | |
| 815 | + def test_ftp_allowed(self): | |
| 816 | + assert _is_safe_url("ftp://files.example.com/doc.txt") is True | |
| 817 | + | |
| 818 | + def test_javascript_blocked(self): | |
| 819 | + assert _is_safe_url("javascript:alert(1)") is False | |
| 820 | + | |
| 821 | + def test_vbscript_blocked(self): | |
| 822 | + assert _is_safe_url("vbscript:MsgBox") is False | |
| 823 | + | |
| 824 | + def test_data_blocked(self): | |
| 825 | + assert _is_safe_url("data:text/html,<script>alert(1)</script>") is False | |
| 826 | + | |
| 827 | + def test_entity_encoded_javascript_blocked(self): | |
| 828 | + """HTML entity encoding should not bypass protocol check.""" | |
| 829 | + assert _is_safe_url("javascript:alert(1)") is False | |
| 830 | + | |
| 831 | + def test_tab_in_protocol_blocked(self): | |
| 832 | + """Tabs injected in the protocol name should be stripped before checking.""" | |
| 833 | + assert _is_safe_url("jav\tascript:alert(1)") is False | |
| 834 | + | |
| 835 | + def test_cr_in_protocol_blocked(self): | |
| 836 | + assert _is_safe_url("java\rscript:alert(1)") is False | |
| 837 | + | |
| 838 | + def test_newline_in_protocol_blocked(self): | |
| 839 | + assert _is_safe_url("java\nscript:alert(1)") is False | |
| 840 | + | |
| 841 | + def test_null_byte_in_protocol_blocked(self): | |
| 842 | + assert _is_safe_url("java\x00script:alert(1)") is False | |
| 843 | + | |
| 844 | + def test_fragment_only_allowed(self): | |
| 845 | + assert _is_safe_url("#section") is True | |
| 846 | + | |
| 847 | + def test_relative_url_allowed(self): | |
| 848 | + assert _is_safe_url("/page/about") is True | |
| 849 | + | |
| 850 | + def test_empty_url_allowed(self): | |
| 851 | + assert _is_safe_url("") is True | |
| 852 | + | |
| 853 | + def test_mixed_case_protocol_blocked(self): | |
| 854 | + assert _is_safe_url("JaVaScRiPt:alert(1)") is False | |
| 855 | + | |
| 856 | + | |
| 857 | +class TestSanitizeHrefSrcReplacement: | |
| 858 | + """Verify that unsafe URLs in href/src are replaced with '#'.""" | |
| 859 | + | |
| 860 | + def test_javascript_href_neutralized(self): | |
| 861 | + html_in = '<a href="javascript:alert(1)">link</a>' | |
| 862 | + result = sanitize_html(html_in) | |
| 863 | + assert 'href="#"' in result | |
| 864 | + assert "javascript" not in result | |
| 865 | + | |
| 866 | + def test_data_src_neutralized(self): | |
| 867 | + html_in = '<img src="data:image/svg+xml,<script>alert(1)</script>">' | |
| 868 | + result = sanitize_html(html_in) | |
| 869 | + assert 'src="#"' in result | |
| 870 | + | |
| 871 | + def test_safe_href_preserved(self): | |
| 872 | + html_in = '<a href="https://example.com">link</a>' | |
| 873 | + result = sanitize_html(html_in) | |
| 874 | + assert 'href="https://example.com"' in result | |
| 875 | + | |
| 876 | + | |
| 877 | +class TestSanitizeDangerousTags: | |
| 878 | + """Test the container vs void dangerous tag distinction.""" | |
| 879 | + | |
| 880 | + def test_script_content_fully_removed(self): | |
| 881 | + html_in = "<p>before</p><script>var x = 1;</script><p>after</p>" | |
| 882 | + result = sanitize_html(html_in) | |
| 883 | + assert "var x" not in result | |
| 884 | + assert "<p>before</p>" in result | |
| 885 | + assert "<p>after</p>" in result | |
| 886 | + | |
| 887 | + def test_style_content_fully_removed(self): | |
| 888 | + html_in = "<div>ok</div><style>.evil { display:none }</style><div>fine</div>" | |
| 889 | + result = sanitize_html(html_in) | |
| 890 | + assert ".evil" not in result | |
| 891 | + assert "<div>ok</div>" in result | |
| 892 | + | |
| 893 | + def test_iframe_content_fully_removed(self): | |
| 894 | + html_in = '<iframe src="x">text inside iframe</iframe>' | |
| 895 | + result = sanitize_html(html_in) | |
| 896 | + assert "text inside iframe" not in result | |
| 897 | + assert "<iframe" not in result | |
| 898 | + | |
| 899 | + def test_nested_dangerous_tags(self): | |
| 900 | + """Nested script tags should be fully stripped.""" | |
| 901 | + html_in = "<script><script>inner</script></script><p>safe</p>" | |
| 902 | + result = sanitize_html(html_in) | |
| 903 | + assert "inner" not in result | |
| 904 | + assert "<p>safe</p>" in result | |
| 905 | + | |
| 906 | + def test_base_tag_stripped(self): | |
| 907 | + html_in = '<base href="https://evil.com/">' | |
| 908 | + result = sanitize_html(html_in) | |
| 909 | + assert "<base" not in result | |
| 910 | + | |
| 911 | + def test_meta_tag_stripped(self): | |
| 912 | + html_in = '<meta http-equiv="refresh" content="0;url=https://evil.com">' | |
| 913 | + result = sanitize_html(html_in) | |
| 914 | + assert "<meta" not in result | |
| 915 | + | |
| 916 | + def test_link_tag_stripped(self): | |
| 917 | + html_in = '<link rel="stylesheet" href="https://evil.com/style.css">' | |
| 918 | + result = sanitize_html(html_in) | |
| 919 | + assert "<link" not in result | |
| 920 | + | |
| 921 | + | |
| 922 | +class TestSanitizeTextPreservation: | |
| 923 | + """Verify text inside stripped tags is preserved vs. removed appropriately.""" | |
| 924 | + | |
| 925 | + def test_unknown_tag_text_preserved(self): | |
| 926 | + """Unknown non-dangerous tags are stripped but their text content remains.""" | |
| 927 | + html_in = "<custom>inner text</custom>" | |
| 928 | + result = sanitize_html(html_in) | |
| 929 | + assert "<custom>" not in result | |
| 930 | + assert "inner text" in result | |
| 931 | + | |
| 932 | + def test_form_content_fully_removed(self): | |
| 933 | + """Form is a dangerous container -- content inside should be dropped.""" | |
| 934 | + html_in = "<form>login prompt</form>" | |
| 935 | + result = sanitize_html(html_in) | |
| 936 | + assert "login prompt" not in result | |
| 937 | + | |
| 938 | + def test_object_content_fully_removed(self): | |
| 939 | + html_in = "<object>fallback text</object>" | |
| 940 | + result = sanitize_html(html_in) | |
| 941 | + assert "fallback text" not in result | |
| 942 | + | |
| 943 | + def test_embed_is_dangerous_container(self): | |
| 944 | + html_in = "<embed>text</embed>" | |
| 945 | + result = sanitize_html(html_in) | |
| 946 | + assert "text" not in result | |
| 947 | + | |
| 948 | + | |
| 949 | +class TestSanitizeEntityHandling: | |
| 950 | + """Verify HTML entity passthrough outside dangerous contexts.""" | |
| 951 | + | |
| 952 | + def test_named_entity_preserved(self): | |
| 953 | + html_in = "<p>& < ></p>" | |
| 954 | + result = sanitize_html(html_in) | |
| 955 | + assert "&" in result | |
| 956 | + assert "<" in result | |
| 957 | + assert ">" in result | |
| 958 | + | |
| 959 | + def test_numeric_entity_preserved(self): | |
| 960 | + html_in = "<p>© —</p>" | |
| 961 | + result = sanitize_html(html_in) | |
| 962 | + assert "©" in result | |
| 963 | + assert "—" in result | |
| 964 | + | |
| 965 | + def test_entities_inside_script_stripped(self): | |
| 966 | + html_in = "<script>& entity</script>" | |
| 967 | + result = sanitize_html(html_in) | |
| 968 | + assert "&" not in result | |
| 969 | + | |
| 970 | + | |
| 971 | +class TestSanitizeComments: | |
| 972 | + def test_html_comments_stripped(self): | |
| 973 | + html_in = "<p>before</p><!-- secret comment --><p>after</p>" | |
| 974 | + result = sanitize_html(html_in) | |
| 975 | + assert "secret comment" not in result | |
| 976 | + assert "<!--" not in result | |
| 977 | + assert "<p>before</p>" in result | |
| 978 | + assert "<p>after</p>" in result | |
| 979 | + | |
| 980 | + def test_conditional_comment_stripped(self): | |
| 981 | + html_in = "<!--[if IE]>evil<![endif]--><p>safe</p>" | |
| 982 | + result = sanitize_html(html_in) | |
| 983 | + assert "evil" not in result | |
| 984 | + assert "<p>safe</p>" in result | |
| 985 | + | |
| 986 | + | |
| 987 | +class TestSanitizeSVG: | |
| 988 | + """SVG support for Pikchr diagrams.""" | |
| 989 | + | |
| 990 | + def test_svg_with_allowed_attrs(self): | |
| 991 | + html_in = ( | |
| 992 | + '<svg viewBox="0 0 200 200" xmlns="http://www.w3.org/2000/svg"><rect x="10" y="10" width="80" height="80" fill="blue"/></svg>' | |
| 993 | + ) | |
| 994 | + result = sanitize_html(html_in) | |
| 995 | + assert "<svg" in result | |
| 996 | + assert "<rect" in result | |
| 997 | + assert 'fill="blue"' in result | |
| 998 | + | |
| 999 | + def test_svg_strips_script_inside(self): | |
| 1000 | + html_in = '<svg><script>alert(1)</script><circle cx="50" cy="50" r="40"/></svg>' | |
| 1001 | + result = sanitize_html(html_in) | |
| 1002 | + assert "<script" not in result | |
| 1003 | + assert "alert" not in result | |
| 1004 | + assert "<circle" in result | |
| 1005 | + | |
| 1006 | + def test_svg_strips_event_handler(self): | |
| 1007 | + html_in = '<svg onload="alert(1)"><circle cx="50" cy="50" r="40"/></svg>' | |
| 1008 | + result = sanitize_html(html_in) | |
| 1009 | + assert "onload" not in result | |
| 1010 | + assert "<circle" in result | |
| 1011 | + | |
| 1012 | + def test_svg_path_preserved(self): | |
| 1013 | + html_in = '<svg><path d="M10 10 L90 90" stroke="black" stroke-width="2"/></svg>' | |
| 1014 | + result = sanitize_html(html_in) | |
| 1015 | + assert "<path" in result | |
| 1016 | + assert 'stroke="black"' in result | |
| 1017 | + | |
| 1018 | + def test_svg_text_element(self): | |
| 1019 | + html_in = '<svg><text x="10" y="20" font-size="14" fill="black">Label</text></svg>' | |
| 1020 | + result = sanitize_html(html_in) | |
| 1021 | + assert "<text" in result | |
| 1022 | + assert "Label" in result | |
| 1023 | + | |
| 1024 | + def test_svg_g_transform(self): | |
| 1025 | + html_in = '<svg><g transform="translate(10,20)"><circle cx="0" cy="0" r="5"/></g></svg>' | |
| 1026 | + result = sanitize_html(html_in) | |
| 1027 | + assert "<g" in result | |
| 1028 | + assert 'transform="translate(10,20)"' in result | |
| 1029 | + | |
| 1030 | + | |
| 1031 | +class TestSanitizeAttributeEscaping: | |
| 1032 | + """Verify attribute values are properly escaped in output.""" | |
| 1033 | + | |
| 1034 | + def test_ampersand_in_href_escaped(self): | |
| 1035 | + html_in = '<a href="https://example.com?a=1&b=2">link</a>' | |
| 1036 | + result = sanitize_html(html_in) | |
| 1037 | + assert "&" in result | |
| 1038 | + | |
| 1039 | + def test_quote_in_attribute_escaped(self): | |
| 1040 | + html_in = '<a href="https://example.com" title="a "quoted" title">link</a>' | |
| 1041 | + result = sanitize_html(html_in) | |
| 1042 | + assert """ in result or """ in result | |
| 1043 | + | |
| 1044 | + | |
| 1045 | +class TestSanitizeSelfClosingTags: | |
| 1046 | + """Handle self-closing (void) tags.""" | |
| 1047 | + | |
| 1048 | + def test_br_self_closing(self): | |
| 1049 | + html_in = "line1<br/>line2" | |
| 1050 | + result = sanitize_html(html_in) | |
| 1051 | + assert "<br>" in result | |
| 1052 | + assert "line1" in result | |
| 1053 | + assert "line2" in result | |
| 1054 | + | |
| 1055 | + def test_img_self_closing_with_attrs(self): | |
| 1056 | + html_in = '<img src="photo.jpg" alt="A photo"/>' | |
| 1057 | + result = sanitize_html(html_in) | |
| 1058 | + assert 'src="photo.jpg"' in result | |
| 1059 | + assert 'alt="A photo"' in result |
| --- a/tests/test_integrations.py | |
| +++ b/tests/test_integrations.py | |
| @@ -0,0 +1,1059 @@ | |
| --- a/tests/test_integrations.py | |
| +++ b/tests/test_integrations.py | |
| @@ -0,0 +1,1059 @@ | |
| 1 | """Tests for fossil/github_api.py, fossil/oauth.py, and core/sanitize.py. |
| 2 | |
| 3 | Covers: |
| 4 | - GitHubClient: rate limiting, issue CRUD, file CRUD, error handling |
| 5 | - parse_github_repo: URL format parsing |
| 6 | - fossil_status_to_github: status mapping |
| 7 | - format_ticket_body: markdown generation |
| 8 | - content_hash: deterministic hashing |
| 9 | - OAuth: authorize URL builders, token exchange (success + failure) |
| 10 | - Sanitize: edge cases not covered in test_security.py |
| 11 | """ |
| 12 | |
| 13 | import hashlib |
| 14 | from types import SimpleNamespace |
| 15 | from unittest.mock import MagicMock, patch |
| 16 | |
| 17 | import pytest |
| 18 | from django.test import RequestFactory |
| 19 | |
| 20 | from core.sanitize import ( |
| 21 | _is_safe_url, |
| 22 | sanitize_html, |
| 23 | ) |
| 24 | from fossil.github_api import ( |
| 25 | GitHubClient, |
| 26 | content_hash, |
| 27 | format_ticket_body, |
| 28 | fossil_status_to_github, |
| 29 | parse_github_repo, |
| 30 | ) |
| 31 | from fossil.oauth import ( |
| 32 | GITHUB_AUTHORIZE_URL, |
| 33 | GITLAB_AUTHORIZE_URL, |
| 34 | github_authorize_url, |
| 35 | github_exchange_token, |
| 36 | gitlab_authorize_url, |
| 37 | gitlab_exchange_token, |
| 38 | ) |
| 39 | |
| 40 | # --------------------------------------------------------------------------- |
| 41 | # Helpers |
| 42 | # --------------------------------------------------------------------------- |
| 43 | |
| 44 | |
| 45 | def _mock_response(status_code=200, json_data=None, text="", headers=None): |
| 46 | """Build a mock requests.Response.""" |
| 47 | resp = MagicMock() |
| 48 | resp.status_code = status_code |
| 49 | resp.json.return_value = json_data or {} |
| 50 | resp.text = text |
| 51 | resp.ok = 200 <= status_code < 300 |
| 52 | resp.headers = headers or {} |
| 53 | return resp |
| 54 | |
| 55 | |
| 56 | # =========================================================================== |
| 57 | # fossil/github_api.py -- parse_github_repo |
| 58 | # =========================================================================== |
| 59 | |
| 60 | |
| 61 | class TestParseGithubRepo: |
| 62 | def test_https_with_git_suffix(self): |
| 63 | result = parse_github_repo("https://github.com/owner/repo.git") |
| 64 | assert result == ("owner", "repo") |
| 65 | |
| 66 | def test_https_without_git_suffix(self): |
| 67 | result = parse_github_repo("https://github.com/owner/repo") |
| 68 | assert result == ("owner", "repo") |
| 69 | |
| 70 | def test_ssh_url(self): |
| 71 | result = parse_github_repo("[email protected]:owner/repo.git") |
| 72 | assert result == ("owner", "repo") |
| 73 | |
| 74 | def test_ssh_url_without_git_suffix(self): |
| 75 | result = parse_github_repo("[email protected]:owner/repo") |
| 76 | assert result == ("owner", "repo") |
| 77 | |
| 78 | def test_non_github_url_returns_none(self): |
| 79 | assert parse_github_repo("https://gitlab.com/owner/repo.git") is None |
| 80 | |
| 81 | def test_malformed_url_returns_none(self): |
| 82 | assert parse_github_repo("not-a-url") is None |
| 83 | |
| 84 | def test_empty_string_returns_none(self): |
| 85 | assert parse_github_repo("") is None |
| 86 | |
| 87 | def test_owner_with_hyphens_and_dots(self): |
| 88 | result = parse_github_repo("https://github.com/my-org.dev/my-repo.git") |
| 89 | assert result == ("my-org.dev", "my-repo") |
| 90 | |
| 91 | def test_url_with_trailing_slash_returns_none(self): |
| 92 | # The regex expects owner/repo at end of string, trailing slash breaks it |
| 93 | assert parse_github_repo("https://github.com/owner/repo/") is None |
| 94 | |
| 95 | |
| 96 | # =========================================================================== |
| 97 | # fossil/github_api.py -- fossil_status_to_github |
| 98 | # =========================================================================== |
| 99 | |
| 100 | |
| 101 | class TestFossilStatusToGithub: |
| 102 | @pytest.mark.parametrize( |
| 103 | "status", |
| 104 | ["closed", "fixed", "resolved", "wontfix", "unable_to_reproduce", "works_as_designed", "deferred"], |
| 105 | ) |
| 106 | def test_closed_statuses(self, status): |
| 107 | assert fossil_status_to_github(status) == "closed" |
| 108 | |
| 109 | @pytest.mark.parametrize("status", ["open", "active", "new", "review", "pending"]) |
| 110 | def test_open_statuses(self, status): |
| 111 | assert fossil_status_to_github(status) == "open" |
| 112 | |
| 113 | def test_case_insensitive(self): |
| 114 | assert fossil_status_to_github("CLOSED") == "closed" |
| 115 | assert fossil_status_to_github("Fixed") == "closed" |
| 116 | |
| 117 | def test_strips_whitespace(self): |
| 118 | assert fossil_status_to_github(" closed ") == "closed" |
| 119 | assert fossil_status_to_github(" open ") == "open" |
| 120 | |
| 121 | def test_empty_string_maps_to_open(self): |
| 122 | assert fossil_status_to_github("") == "open" |
| 123 | |
| 124 | |
| 125 | # =========================================================================== |
| 126 | # fossil/github_api.py -- content_hash |
| 127 | # =========================================================================== |
| 128 | |
| 129 | |
| 130 | class TestContentHash: |
| 131 | def test_deterministic(self): |
| 132 | assert content_hash("hello") == content_hash("hello") |
| 133 | |
| 134 | def test_matches_sha256(self): |
| 135 | expected = hashlib.sha256(b"hello").hexdigest() |
| 136 | assert content_hash("hello") == expected |
| 137 | |
| 138 | def test_different_inputs_different_hashes(self): |
| 139 | assert content_hash("hello") != content_hash("world") |
| 140 | |
| 141 | def test_empty_string(self): |
| 142 | expected = hashlib.sha256(b"").hexdigest() |
| 143 | assert content_hash("") == expected |
| 144 | |
| 145 | |
| 146 | # =========================================================================== |
| 147 | # fossil/github_api.py -- format_ticket_body |
| 148 | # =========================================================================== |
| 149 | |
| 150 | |
| 151 | class TestFormatTicketBody: |
| 152 | def _ticket(self, **kwargs): |
| 153 | defaults = { |
| 154 | "body": "Bug description", |
| 155 | "type": "bug", |
| 156 | "priority": "high", |
| 157 | "severity": "critical", |
| 158 | "subsystem": "auth", |
| 159 | "resolution": "", |
| 160 | "owner": "alice", |
| 161 | "uuid": "abcdef1234567890", |
| 162 | } |
| 163 | defaults.update(kwargs) |
| 164 | return SimpleNamespace(**defaults) |
| 165 | |
| 166 | def test_includes_body(self): |
| 167 | ticket = self._ticket() |
| 168 | result = format_ticket_body(ticket) |
| 169 | assert "Bug description" in result |
| 170 | |
| 171 | def test_includes_metadata_table(self): |
| 172 | ticket = self._ticket() |
| 173 | result = format_ticket_body(ticket) |
| 174 | assert "| Type | bug |" in result |
| 175 | assert "| Priority | high |" in result |
| 176 | assert "| Severity | critical |" in result |
| 177 | assert "| Subsystem | auth |" in result |
| 178 | assert "| Owner | alice |" in result |
| 179 | |
| 180 | def test_skips_empty_metadata_fields(self): |
| 181 | ticket = self._ticket(type="", priority="", severity="", subsystem="", resolution="", owner="") |
| 182 | result = format_ticket_body(ticket) |
| 183 | assert "Fossil metadata" not in result |
| 184 | |
| 185 | def test_includes_uuid_trailer(self): |
| 186 | ticket = self._ticket() |
| 187 | result = format_ticket_body(ticket) |
| 188 | assert "abcdef1234" in result |
| 189 | |
| 190 | def test_includes_comments(self): |
| 191 | from datetime import datetime |
| 192 | |
| 193 | ticket = self._ticket() |
| 194 | comments = [ |
| 195 | {"user": "bob", "timestamp": datetime(2025, 1, 15, 10, 30), "comment": "I can reproduce this."}, |
| 196 | {"user": "alice", "timestamp": datetime(2025, 1, 16, 14, 0), "comment": "Fix incoming."}, |
| 197 | ] |
| 198 | result = format_ticket_body(ticket, comments=comments) |
| 199 | assert "bob" in result |
| 200 | assert "2025-01-15 10:30" in result |
| 201 | assert "I can reproduce this." in result |
| 202 | assert "alice" in result |
| 203 | assert "Fix incoming." in result |
| 204 | |
| 205 | def test_no_comments(self): |
| 206 | ticket = self._ticket() |
| 207 | result = format_ticket_body(ticket, comments=None) |
| 208 | assert "Comments" not in result |
| 209 | |
| 210 | def test_empty_comments_list(self): |
| 211 | ticket = self._ticket() |
| 212 | result = format_ticket_body(ticket, comments=[]) |
| 213 | assert "Comments" not in result |
| 214 | |
| 215 | def test_comment_without_timestamp(self): |
| 216 | ticket = self._ticket() |
| 217 | comments = [{"user": "dan", "comment": "No timestamp here."}] |
| 218 | result = format_ticket_body(ticket, comments=comments) |
| 219 | assert "dan" in result |
| 220 | assert "No timestamp here." in result |
| 221 | |
| 222 | def test_resolution_shown_when_set(self): |
| 223 | ticket = self._ticket(resolution="wontfix") |
| 224 | result = format_ticket_body(ticket) |
| 225 | assert "| Resolution | wontfix |" in result |
| 226 | |
| 227 | def test_no_body_ticket(self): |
| 228 | ticket = self._ticket(body="") |
| 229 | result = format_ticket_body(ticket) |
| 230 | # Should still have the uuid trailer |
| 231 | assert "abcdef1234" in result |
| 232 | |
| 233 | |
| 234 | # =========================================================================== |
| 235 | # fossil/github_api.py -- GitHubClient |
| 236 | # =========================================================================== |
| 237 | |
| 238 | |
| 239 | class TestGitHubClientInit: |
| 240 | def test_session_headers(self): |
| 241 | client = GitHubClient("ghp_test123", min_interval=0) |
| 242 | assert client.session.headers["Authorization"] == "Bearer ghp_test123" |
| 243 | assert "application/vnd.github+json" in client.session.headers["Accept"] |
| 244 | assert client.session.headers["X-GitHub-Api-Version"] == "2022-11-28" |
| 245 | |
| 246 | |
| 247 | class TestGitHubClientRequest: |
| 248 | """Tests for _request method: throttle, retry on 403/429.""" |
| 249 | |
| 250 | def test_successful_request(self): |
| 251 | client = GitHubClient("tok", min_interval=0) |
| 252 | mock_resp = _mock_response(200, {"ok": True}) |
| 253 | |
| 254 | with patch.object(client.session, "request", return_value=mock_resp): |
| 255 | resp = client._request("GET", "/repos/owner/repo") |
| 256 | assert resp.status_code == 200 |
| 257 | |
| 258 | @patch("fossil.github_api.time.sleep") |
| 259 | def test_retries_on_429(self, mock_sleep): |
| 260 | client = GitHubClient("tok", min_interval=0) |
| 261 | rate_limited = _mock_response(429, headers={"Retry-After": "1"}) |
| 262 | success = _mock_response(200, {"ok": True}) |
| 263 | |
| 264 | with patch.object(client.session, "request", side_effect=[rate_limited, success]): |
| 265 | resp = client._request("GET", "/repos/o/r", max_retries=3) |
| 266 | assert resp.status_code == 200 |
| 267 | # Should have slept for the retry |
| 268 | assert mock_sleep.call_count >= 1 |
| 269 | |
| 270 | @patch("fossil.github_api.time.sleep") |
| 271 | def test_retries_on_403(self, mock_sleep): |
| 272 | client = GitHubClient("tok", min_interval=0) |
| 273 | forbidden = _mock_response(403, headers={}) |
| 274 | success = _mock_response(200, {"ok": True}) |
| 275 | |
| 276 | with patch.object(client.session, "request", side_effect=[forbidden, success]): |
| 277 | resp = client._request("GET", "/repos/o/r", max_retries=3) |
| 278 | assert resp.status_code == 200 |
| 279 | |
| 280 | @patch("fossil.github_api.time.sleep") |
| 281 | def test_exhausted_retries_returns_last_response(self, mock_sleep): |
| 282 | client = GitHubClient("tok", min_interval=0) |
| 283 | rate_limited = _mock_response(429, headers={}) |
| 284 | |
| 285 | with patch.object(client.session, "request", return_value=rate_limited): |
| 286 | resp = client._request("GET", "/repos/o/r", max_retries=2) |
| 287 | assert resp.status_code == 429 |
| 288 | |
| 289 | def test_absolute_url_not_prefixed(self): |
| 290 | client = GitHubClient("tok", min_interval=0) |
| 291 | mock_resp = _mock_response(200) |
| 292 | |
| 293 | with patch.object(client.session, "request", return_value=mock_resp) as mock_req: |
| 294 | client._request("GET", "https://custom.api.com/thing") |
| 295 | # Should pass the absolute URL through unchanged |
| 296 | mock_req.assert_called_once() |
| 297 | call_args = mock_req.call_args |
| 298 | assert call_args[0][1] == "https://custom.api.com/thing" |
| 299 | |
| 300 | |
| 301 | class TestGitHubClientCreateIssue: |
| 302 | @patch("fossil.github_api.time.sleep") |
| 303 | def test_create_issue_success(self, mock_sleep): |
| 304 | client = GitHubClient("tok", min_interval=0) |
| 305 | resp = _mock_response(201, {"number": 42, "html_url": "https://github.com/o/r/issues/42"}) |
| 306 | |
| 307 | with patch.object(client.session, "request", return_value=resp): |
| 308 | result = client.create_issue("o", "r", "Bug title", "Bug body") |
| 309 | assert result["number"] == 42 |
| 310 | assert result["url"] == "https://github.com/o/r/issues/42" |
| 311 | assert result["error"] == "" |
| 312 | |
| 313 | @patch("fossil.github_api.time.sleep") |
| 314 | def test_create_issue_failure(self, mock_sleep): |
| 315 | client = GitHubClient("tok", min_interval=0) |
| 316 | resp = _mock_response(422, text="Validation Failed") |
| 317 | |
| 318 | with patch.object(client.session, "request", return_value=resp): |
| 319 | result = client.create_issue("o", "r", "Bad", "data") |
| 320 | assert result["number"] == 0 |
| 321 | assert result["url"] == "" |
| 322 | assert "422" in result["error"] |
| 323 | |
| 324 | @patch("fossil.github_api.time.sleep") |
| 325 | def test_create_issue_with_closed_state(self, mock_sleep): |
| 326 | """Creating an issue with state='closed' should create then close it.""" |
| 327 | client = GitHubClient("tok", min_interval=0) |
| 328 | create_resp = _mock_response(201, {"number": 99, "html_url": "https://github.com/o/r/issues/99"}) |
| 329 | close_resp = _mock_response(200, {"number": 99}) |
| 330 | |
| 331 | with patch.object(client.session, "request", side_effect=[create_resp, close_resp]) as mock_req: |
| 332 | result = client.create_issue("o", "r", "Fixed bug", "Already done", state="closed") |
| 333 | assert result["number"] == 99 |
| 334 | # Should have made two requests: POST create + PATCH close |
| 335 | assert mock_req.call_count == 2 |
| 336 | second_call = mock_req.call_args_list[1] |
| 337 | assert second_call[0][0] == "PATCH" |
| 338 | |
| 339 | |
| 340 | class TestGitHubClientUpdateIssue: |
| 341 | @patch("fossil.github_api.time.sleep") |
| 342 | def test_update_issue_success(self, mock_sleep): |
| 343 | client = GitHubClient("tok", min_interval=0) |
| 344 | resp = _mock_response(200, {"number": 42}) |
| 345 | |
| 346 | with patch.object(client.session, "request", return_value=resp): |
| 347 | result = client.update_issue("o", "r", 42, title="New title", state="closed") |
| 348 | assert result["success"] is True |
| 349 | assert result["error"] == "" |
| 350 | |
| 351 | @patch("fossil.github_api.time.sleep") |
| 352 | def test_update_issue_failure(self, mock_sleep): |
| 353 | client = GitHubClient("tok", min_interval=0) |
| 354 | resp = _mock_response(404, text="Not Found") |
| 355 | |
| 356 | with patch.object(client.session, "request", return_value=resp): |
| 357 | result = client.update_issue("o", "r", 999, state="closed") |
| 358 | assert result["success"] is False |
| 359 | assert "404" in result["error"] |
| 360 | |
| 361 | @patch("fossil.github_api.time.sleep") |
| 362 | def test_update_issue_builds_payload_selectively(self, mock_sleep): |
| 363 | """Only non-empty fields should be in the payload.""" |
| 364 | client = GitHubClient("tok", min_interval=0) |
| 365 | resp = _mock_response(200) |
| 366 | |
| 367 | with patch.object(client.session, "request", return_value=resp) as mock_req: |
| 368 | client.update_issue("o", "r", 1, title="", body="new body", state="") |
| 369 | call_kwargs = mock_req.call_args[1] |
| 370 | payload = call_kwargs["json"] |
| 371 | assert "title" not in payload |
| 372 | assert "state" not in payload |
| 373 | assert payload["body"] == "new body" |
| 374 | |
| 375 | |
| 376 | class TestGitHubClientGetFileSha: |
| 377 | @patch("fossil.github_api.time.sleep") |
| 378 | def test_get_file_sha_found(self, mock_sleep): |
| 379 | client = GitHubClient("tok", min_interval=0) |
| 380 | resp = _mock_response(200, {"sha": "abc123"}) |
| 381 | |
| 382 | with patch.object(client.session, "request", return_value=resp): |
| 383 | sha = client.get_file_sha("o", "r", "README.md") |
| 384 | assert sha == "abc123" |
| 385 | |
| 386 | @patch("fossil.github_api.time.sleep") |
| 387 | def test_get_file_sha_not_found(self, mock_sleep): |
| 388 | client = GitHubClient("tok", min_interval=0) |
| 389 | resp = _mock_response(404) |
| 390 | |
| 391 | with patch.object(client.session, "request", return_value=resp): |
| 392 | sha = client.get_file_sha("o", "r", "nonexistent.md") |
| 393 | assert sha == "" |
| 394 | |
| 395 | |
| 396 | class TestGitHubClientCreateOrUpdateFile: |
| 397 | @patch("fossil.github_api.time.sleep") |
| 398 | def test_create_new_file(self, mock_sleep): |
| 399 | client = GitHubClient("tok", min_interval=0) |
| 400 | get_resp = _mock_response(404) # file does not exist |
| 401 | put_resp = _mock_response(201, {"content": {"sha": "newsha"}}) |
| 402 | |
| 403 | with patch.object(client.session, "request", side_effect=[get_resp, put_resp]) as mock_req: |
| 404 | result = client.create_or_update_file("o", "r", "docs/new.md", "# New", "Add new doc") |
| 405 | assert result["success"] is True |
| 406 | assert result["sha"] == "newsha" |
| 407 | assert result["error"] == "" |
| 408 | # PUT payload should NOT have 'sha' key since file is new |
| 409 | put_call = mock_req.call_args_list[1] |
| 410 | payload = put_call[1]["json"] |
| 411 | assert "sha" not in payload |
| 412 | |
| 413 | @patch("fossil.github_api.time.sleep") |
| 414 | def test_update_existing_file(self, mock_sleep): |
| 415 | client = GitHubClient("tok", min_interval=0) |
| 416 | get_resp = _mock_response(200, {"sha": "oldsha"}) # file exists |
| 417 | put_resp = _mock_response(200, {"content": {"sha": "updatedsha"}}) |
| 418 | |
| 419 | with patch.object(client.session, "request", side_effect=[get_resp, put_resp]) as mock_req: |
| 420 | result = client.create_or_update_file("o", "r", "docs/existing.md", "# Updated", "Update doc") |
| 421 | assert result["success"] is True |
| 422 | assert result["sha"] == "updatedsha" |
| 423 | # PUT payload should include the existing SHA |
| 424 | put_call = mock_req.call_args_list[1] |
| 425 | payload = put_call[1]["json"] |
| 426 | assert payload["sha"] == "oldsha" |
| 427 | |
| 428 | @patch("fossil.github_api.time.sleep") |
| 429 | def test_create_or_update_file_failure(self, mock_sleep): |
| 430 | client = GitHubClient("tok", min_interval=0) |
| 431 | get_resp = _mock_response(404) |
| 432 | put_resp = _mock_response(422, text="Validation Failed") |
| 433 | |
| 434 | with patch.object(client.session, "request", side_effect=[get_resp, put_resp]): |
| 435 | result = client.create_or_update_file("o", "r", "bad.md", "content", "msg") |
| 436 | assert result["success"] is False |
| 437 | assert "422" in result["error"] |
| 438 | |
| 439 | @patch("fossil.github_api.time.sleep") |
| 440 | def test_content_is_base64_encoded(self, mock_sleep): |
| 441 | import base64 |
| 442 | |
| 443 | client = GitHubClient("tok", min_interval=0) |
| 444 | get_resp = _mock_response(404) |
| 445 | put_resp = _mock_response(201, {"content": {"sha": "s"}}) |
| 446 | |
| 447 | with patch.object(client.session, "request", side_effect=[get_resp, put_resp]) as mock_req: |
| 448 | client.create_or_update_file("o", "r", "f.md", "hello world", "msg") |
| 449 | put_call = mock_req.call_args_list[1] |
| 450 | payload = put_call[1]["json"] |
| 451 | decoded = base64.b64decode(payload["content"]).decode("utf-8") |
| 452 | assert decoded == "hello world" |
| 453 | |
| 454 | |
| 455 | # =========================================================================== |
| 456 | # fossil/oauth.py -- authorize URL builders |
| 457 | # =========================================================================== |
| 458 | |
| 459 | |
| 460 | @pytest.fixture |
| 461 | def rf(): |
| 462 | return RequestFactory() |
| 463 | |
| 464 | |
| 465 | @pytest.fixture |
| 466 | def mock_session(): |
| 467 | """A dict-like session for request factory requests.""" |
| 468 | return {} |
| 469 | |
| 470 | |
| 471 | @pytest.mark.django_db |
| 472 | class TestGithubAuthorizeUrl: |
| 473 | def test_returns_none_when_no_client_id(self, rf, mock_session): |
| 474 | request = rf.get("/") |
| 475 | request.session = mock_session |
| 476 | mock_config = MagicMock() |
| 477 | mock_config.GITHUB_OAUTH_CLIENT_ID = "" |
| 478 | |
| 479 | with patch("constance.config", mock_config): |
| 480 | url = github_authorize_url(request, "my-project") |
| 481 | assert url is None |
| 482 | |
| 483 | def test_builds_url_with_all_params(self, rf, mock_session): |
| 484 | request = rf.get("/") |
| 485 | request.session = mock_session |
| 486 | mock_config = MagicMock() |
| 487 | mock_config.GITHUB_OAUTH_CLIENT_ID = "client123" |
| 488 | |
| 489 | with patch("constance.config", mock_config): |
| 490 | url = github_authorize_url(request, "my-proj", mirror_id="77") |
| 491 | |
| 492 | assert url.startswith(GITHUB_AUTHORIZE_URL) |
| 493 | assert "client_id=client123" in url |
| 494 | assert "scope=repo" in url |
| 495 | assert "state=my-proj:77:" in url |
| 496 | assert "redirect_uri=" in url |
| 497 | assert "oauth_state_nonce" in mock_session |
| 498 | |
| 499 | def test_default_mirror_id_is_new(self, rf, mock_session): |
| 500 | request = rf.get("/") |
| 501 | request.session = mock_session |
| 502 | mock_config = MagicMock() |
| 503 | mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" |
| 504 | |
| 505 | with patch("constance.config", mock_config): |
| 506 | url = github_authorize_url(request, "slug") |
| 507 | |
| 508 | assert ":new:" in url |
| 509 | |
| 510 | def test_nonce_stored_in_session(self, rf, mock_session): |
| 511 | request = rf.get("/") |
| 512 | request.session = mock_session |
| 513 | mock_config = MagicMock() |
| 514 | mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" |
| 515 | |
| 516 | with patch("constance.config", mock_config): |
| 517 | github_authorize_url(request, "slug") |
| 518 | |
| 519 | nonce = mock_session["oauth_state_nonce"] |
| 520 | assert len(nonce) > 20 # token_urlsafe(32) is ~43 chars |
| 521 | |
| 522 | |
| 523 | @pytest.mark.django_db |
| 524 | class TestGitlabAuthorizeUrl: |
| 525 | def test_returns_none_when_no_client_id(self, rf, mock_session): |
| 526 | request = rf.get("/") |
| 527 | request.session = mock_session |
| 528 | mock_config = MagicMock() |
| 529 | mock_config.GITLAB_OAUTH_CLIENT_ID = "" |
| 530 | |
| 531 | with patch("constance.config", mock_config): |
| 532 | url = gitlab_authorize_url(request, "proj") |
| 533 | assert url is None |
| 534 | |
| 535 | def test_builds_url_with_all_params(self, rf, mock_session): |
| 536 | request = rf.get("/") |
| 537 | request.session = mock_session |
| 538 | mock_config = MagicMock() |
| 539 | mock_config.GITLAB_OAUTH_CLIENT_ID = "gl_client" |
| 540 | |
| 541 | with patch("constance.config", mock_config): |
| 542 | url = gitlab_authorize_url(request, "proj", mirror_id="5") |
| 543 | |
| 544 | assert url.startswith(GITLAB_AUTHORIZE_URL) |
| 545 | assert "client_id=gl_client" in url |
| 546 | assert "response_type=code" in url |
| 547 | assert "scope=api" in url |
| 548 | assert "state=proj:5:" in url |
| 549 | assert "oauth_state_nonce" in mock_session |
| 550 | |
| 551 | def test_default_mirror_id_is_new(self, rf, mock_session): |
| 552 | request = rf.get("/") |
| 553 | request.session = mock_session |
| 554 | mock_config = MagicMock() |
| 555 | mock_config.GITLAB_OAUTH_CLIENT_ID = "gl" |
| 556 | |
| 557 | with patch("constance.config", mock_config): |
| 558 | url = gitlab_authorize_url(request, "slug") |
| 559 | |
| 560 | assert ":new:" in url |
| 561 | |
| 562 | |
| 563 | # =========================================================================== |
| 564 | # fossil/oauth.py -- token exchange |
| 565 | # =========================================================================== |
| 566 | |
| 567 | |
| 568 | @pytest.mark.django_db |
| 569 | class TestGithubExchangeToken: |
| 570 | def test_returns_error_when_no_code(self, rf): |
| 571 | request = rf.get("/callback/") # no ?code= param |
| 572 | mock_config = MagicMock() |
| 573 | mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" |
| 574 | mock_config.GITHUB_OAUTH_CLIENT_SECRET = "secret" |
| 575 | |
| 576 | with patch("constance.config", mock_config): |
| 577 | result = github_exchange_token(request, "slug") |
| 578 | |
| 579 | assert result["error"] == "No code received" |
| 580 | assert result["token"] == "" |
| 581 | |
| 582 | @patch("fossil.oauth.requests.get") |
| 583 | @patch("fossil.oauth.requests.post") |
| 584 | def test_successful_exchange(self, mock_post, mock_get, rf): |
| 585 | request = rf.get("/callback/?code=authcode123") |
| 586 | mock_config = MagicMock() |
| 587 | mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" |
| 588 | mock_config.GITHUB_OAUTH_CLIENT_SECRET = "secret" |
| 589 | |
| 590 | mock_post.return_value = _mock_response(200, {"access_token": "ghp_tok456"}) |
| 591 | mock_get.return_value = _mock_response(200, {"login": "octocat"}) |
| 592 | |
| 593 | with patch("constance.config", mock_config): |
| 594 | result = github_exchange_token(request, "slug") |
| 595 | |
| 596 | assert result["token"] == "ghp_tok456" |
| 597 | assert result["username"] == "octocat" |
| 598 | assert result["error"] == "" |
| 599 | mock_post.assert_called_once() |
| 600 | mock_get.assert_called_once() |
| 601 | |
| 602 | @patch("fossil.oauth.requests.post") |
| 603 | def test_exchange_no_access_token_in_response(self, mock_post, rf): |
| 604 | request = rf.get("/callback/?code=badcode") |
| 605 | mock_config = MagicMock() |
| 606 | mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" |
| 607 | mock_config.GITHUB_OAUTH_CLIENT_SECRET = "secret" |
| 608 | |
| 609 | mock_post.return_value = _mock_response(200, {"error": "bad_verification_code", "error_description": "Bad code"}) |
| 610 | |
| 611 | with patch("constance.config", mock_config): |
| 612 | result = github_exchange_token(request, "slug") |
| 613 | |
| 614 | assert result["token"] == "" |
| 615 | assert result["error"] == "Bad code" |
| 616 | |
| 617 | @patch("fossil.oauth.requests.post") |
| 618 | def test_exchange_network_error(self, mock_post, rf): |
| 619 | request = rf.get("/callback/?code=code") |
| 620 | mock_config = MagicMock() |
| 621 | mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" |
| 622 | mock_config.GITHUB_OAUTH_CLIENT_SECRET = "secret" |
| 623 | |
| 624 | mock_post.side_effect = ConnectionError("Network unreachable") |
| 625 | |
| 626 | with patch("constance.config", mock_config): |
| 627 | result = github_exchange_token(request, "slug") |
| 628 | |
| 629 | assert result["token"] == "" |
| 630 | assert "Network unreachable" in result["error"] |
| 631 | |
| 632 | @patch("fossil.oauth.requests.get") |
| 633 | @patch("fossil.oauth.requests.post") |
| 634 | def test_exchange_user_api_fails(self, mock_post, mock_get, rf): |
| 635 | """Token exchange succeeds but user info endpoint fails.""" |
| 636 | request = rf.get("/callback/?code=code") |
| 637 | mock_config = MagicMock() |
| 638 | mock_config.GITHUB_OAUTH_CLIENT_ID = "cid" |
| 639 | mock_config.GITHUB_OAUTH_CLIENT_SECRET = "secret" |
| 640 | |
| 641 | mock_post.return_value = _mock_response(200, {"access_token": "ghp_tok"}) |
| 642 | mock_get.return_value = _mock_response(401, {"message": "Bad credentials"}) |
| 643 | |
| 644 | with patch("constance.config", mock_config): |
| 645 | result = github_exchange_token(request, "slug") |
| 646 | |
| 647 | # Token should still be returned, username will be empty |
| 648 | assert result["token"] == "ghp_tok" |
| 649 | assert result["username"] == "" |
| 650 | assert result["error"] == "" |
| 651 | |
| 652 | |
| 653 | @pytest.mark.django_db |
| 654 | class TestGitlabExchangeToken: |
| 655 | def test_returns_error_when_no_code(self, rf): |
| 656 | request = rf.get("/callback/") |
| 657 | mock_config = MagicMock() |
| 658 | mock_config.GITLAB_OAUTH_CLIENT_ID = "cid" |
| 659 | mock_config.GITLAB_OAUTH_CLIENT_SECRET = "secret" |
| 660 | |
| 661 | with patch("constance.config", mock_config): |
| 662 | result = gitlab_exchange_token(request, "slug") |
| 663 | |
| 664 | assert result["error"] == "No code received" |
| 665 | assert result["token"] == "" |
| 666 | |
| 667 | @patch("fossil.oauth.requests.post") |
| 668 | def test_successful_exchange(self, mock_post, rf): |
| 669 | request = rf.get("/callback/?code=glcode") |
| 670 | mock_config = MagicMock() |
| 671 | mock_config.GITLAB_OAUTH_CLIENT_ID = "cid" |
| 672 | mock_config.GITLAB_OAUTH_CLIENT_SECRET = "secret" |
| 673 | |
| 674 | mock_post.return_value = _mock_response(200, {"access_token": "glpat_token789"}) |
| 675 | |
| 676 | with patch("constance.config", mock_config): |
| 677 | result = gitlab_exchange_token(request, "slug") |
| 678 | |
| 679 | assert result["token"] == "glpat_token789" |
| 680 | assert result["error"] == "" |
| 681 | |
| 682 | @patch("fossil.oauth.requests.post") |
| 683 | def test_exchange_no_access_token(self, mock_post, rf): |
| 684 | request = rf.get("/callback/?code=badcode") |
| 685 | mock_config = MagicMock() |
| 686 | mock_config.GITLAB_OAUTH_CLIENT_ID = "cid" |
| 687 | mock_config.GITLAB_OAUTH_CLIENT_SECRET = "secret" |
| 688 | |
| 689 | mock_post.return_value = _mock_response(200, {"error_description": "Invalid code"}) |
| 690 | |
| 691 | with patch("constance.config", mock_config): |
| 692 | result = gitlab_exchange_token(request, "slug") |
| 693 | |
| 694 | assert result["token"] == "" |
| 695 | assert result["error"] == "Invalid code" |
| 696 | |
| 697 | @patch("fossil.oauth.requests.post") |
| 698 | def test_exchange_network_error(self, mock_post, rf): |
| 699 | request = rf.get("/callback/?code=code") |
| 700 | mock_config = MagicMock() |
| 701 | mock_config.GITLAB_OAUTH_CLIENT_ID = "cid" |
| 702 | mock_config.GITLAB_OAUTH_CLIENT_SECRET = "secret" |
| 703 | |
| 704 | mock_post.side_effect = TimeoutError("Connection timed out") |
| 705 | |
| 706 | with patch("constance.config", mock_config): |
| 707 | result = gitlab_exchange_token(request, "slug") |
| 708 | |
| 709 | assert result["token"] == "" |
| 710 | assert "timed out" in result["error"] |
| 711 | |
| 712 | @patch("fossil.oauth.requests.post") |
| 713 | def test_exchange_sends_correct_payload(self, mock_post, rf): |
| 714 | """Verify the POST body includes grant_type and redirect_uri for GitLab.""" |
| 715 | request = rf.get("/callback/?code=code") |
| 716 | mock_config = MagicMock() |
| 717 | mock_config.GITLAB_OAUTH_CLIENT_ID = "gl_cid" |
| 718 | mock_config.GITLAB_OAUTH_CLIENT_SECRET = "gl_secret" |
| 719 | |
| 720 | mock_post.return_value = _mock_response(200, {"access_token": "tok"}) |
| 721 | |
| 722 | with patch("constance.config", mock_config): |
| 723 | gitlab_exchange_token(request, "slug") |
| 724 | |
| 725 | call_kwargs = mock_post.call_args[1] |
| 726 | data = call_kwargs["data"] |
| 727 | assert data["grant_type"] == "authorization_code" |
| 728 | assert data["client_id"] == "gl_cid" |
| 729 | assert data["client_secret"] == "gl_secret" |
| 730 | assert data["code"] == "code" |
| 731 | assert "/oauth/callback/gitlab/" in data["redirect_uri"] |
| 732 | |
| 733 | |
| 734 | # =========================================================================== |
| 735 | # core/sanitize.py -- edge cases not in test_security.py |
| 736 | # =========================================================================== |
| 737 | |
| 738 | |
| 739 | class TestSanitizeAllowedTags: |
| 740 | """Verify specific allowed tags survive sanitization.""" |
| 741 | |
| 742 | @pytest.mark.parametrize( |
| 743 | "tag", |
| 744 | ["abbr", "acronym", "dd", "del", "details", "dl", "dt", "ins", "kbd", "mark", "q", "s", "samp", "small", "sub", "sup", "tt", "var"], |
| 745 | ) |
| 746 | def test_inline_tags_preserved(self, tag): |
| 747 | html_in = f"<{tag}>content</{tag}>" |
| 748 | result = sanitize_html(html_in) |
| 749 | assert f"<{tag}>" in result |
| 750 | assert f"</{tag}>" in result |
| 751 | |
| 752 | def test_summary_tag_preserved(self): |
| 753 | html_in = '<details open class="info"><summary class="title">Details</summary>Content</details>' |
| 754 | result = sanitize_html(html_in) |
| 755 | assert "<details" in result |
| 756 | assert "<summary" in result |
| 757 | assert "Details" in result |
| 758 | |
| 759 | |
| 760 | class TestSanitizeAttributeFiltering: |
| 761 | """Verify attribute allowlist/blocklist behavior.""" |
| 762 | |
| 763 | def test_strips_non_allowed_attributes(self): |
| 764 | html_in = '<p style="color:red" data-custom="x">text</p>' |
| 765 | result = sanitize_html(html_in) |
| 766 | assert "style=" not in result |
| 767 | assert "data-custom=" not in result |
| 768 | assert "<p>" in result |
| 769 | |
| 770 | def test_table_colspan_preserved(self): |
| 771 | html_in = '<table><tr><td colspan="2" class="wide">cell</td></tr></table>' |
| 772 | result = sanitize_html(html_in) |
| 773 | assert 'colspan="2"' in result |
| 774 | |
| 775 | def test_ol_start_and_type_preserved(self): |
| 776 | html_in = '<ol start="5" type="a"><li>item</li></ol>' |
| 777 | result = sanitize_html(html_in) |
| 778 | assert 'start="5"' in result |
| 779 | assert 'type="a"' in result |
| 780 | |
| 781 | def test_li_value_preserved(self): |
| 782 | html_in = '<ul><li value="3">item</li></ul>' |
| 783 | result = sanitize_html(html_in) |
| 784 | assert 'value="3"' in result |
| 785 | |
| 786 | def test_heading_id_preserved(self): |
| 787 | html_in = '<h2 id="section-1" class="title">Title</h2>' |
| 788 | result = sanitize_html(html_in) |
| 789 | assert 'id="section-1"' in result |
| 790 | assert 'class="title"' in result |
| 791 | |
| 792 | def test_a_name_attribute_preserved(self): |
| 793 | html_in = '<a name="anchor">anchor</a>' |
| 794 | result = sanitize_html(html_in) |
| 795 | assert 'name="anchor"' in result |
| 796 | |
| 797 | def test_boolean_attribute_no_value(self): |
| 798 | html_in = "<details open><summary>info</summary>body</details>" |
| 799 | result = sanitize_html(html_in) |
| 800 | assert "<details open>" in result |
| 801 | |
| 802 | |
| 803 | class TestSanitizeUrlSchemes: |
| 804 | """Test URL protocol validation in href/src attributes.""" |
| 805 | |
| 806 | def test_http_allowed(self): |
| 807 | assert _is_safe_url("http://example.com") is True |
| 808 | |
| 809 | def test_https_allowed(self): |
| 810 | assert _is_safe_url("https://example.com") is True |
| 811 | |
| 812 | def test_mailto_allowed(self): |
| 813 | assert _is_safe_url("mailto:[email protected]") is True |
| 814 | |
| 815 | def test_ftp_allowed(self): |
| 816 | assert _is_safe_url("ftp://files.example.com/doc.txt") is True |
| 817 | |
| 818 | def test_javascript_blocked(self): |
| 819 | assert _is_safe_url("javascript:alert(1)") is False |
| 820 | |
| 821 | def test_vbscript_blocked(self): |
| 822 | assert _is_safe_url("vbscript:MsgBox") is False |
| 823 | |
| 824 | def test_data_blocked(self): |
| 825 | assert _is_safe_url("data:text/html,<script>alert(1)</script>") is False |
| 826 | |
| 827 | def test_entity_encoded_javascript_blocked(self): |
| 828 | """HTML entity encoding should not bypass protocol check.""" |
| 829 | assert _is_safe_url("javascript:alert(1)") is False |
| 830 | |
| 831 | def test_tab_in_protocol_blocked(self): |
| 832 | """Tabs injected in the protocol name should be stripped before checking.""" |
| 833 | assert _is_safe_url("jav\tascript:alert(1)") is False |
| 834 | |
| 835 | def test_cr_in_protocol_blocked(self): |
| 836 | assert _is_safe_url("java\rscript:alert(1)") is False |
| 837 | |
| 838 | def test_newline_in_protocol_blocked(self): |
| 839 | assert _is_safe_url("java\nscript:alert(1)") is False |
| 840 | |
| 841 | def test_null_byte_in_protocol_blocked(self): |
| 842 | assert _is_safe_url("java\x00script:alert(1)") is False |
| 843 | |
| 844 | def test_fragment_only_allowed(self): |
| 845 | assert _is_safe_url("#section") is True |
| 846 | |
| 847 | def test_relative_url_allowed(self): |
| 848 | assert _is_safe_url("/page/about") is True |
| 849 | |
| 850 | def test_empty_url_allowed(self): |
| 851 | assert _is_safe_url("") is True |
| 852 | |
| 853 | def test_mixed_case_protocol_blocked(self): |
| 854 | assert _is_safe_url("JaVaScRiPt:alert(1)") is False |
| 855 | |
| 856 | |
| 857 | class TestSanitizeHrefSrcReplacement: |
| 858 | """Verify that unsafe URLs in href/src are replaced with '#'.""" |
| 859 | |
| 860 | def test_javascript_href_neutralized(self): |
| 861 | html_in = '<a href="javascript:alert(1)">link</a>' |
| 862 | result = sanitize_html(html_in) |
| 863 | assert 'href="#"' in result |
| 864 | assert "javascript" not in result |
| 865 | |
| 866 | def test_data_src_neutralized(self): |
| 867 | html_in = '<img src="data:image/svg+xml,<script>alert(1)</script>">' |
| 868 | result = sanitize_html(html_in) |
| 869 | assert 'src="#"' in result |
| 870 | |
| 871 | def test_safe_href_preserved(self): |
| 872 | html_in = '<a href="https://example.com">link</a>' |
| 873 | result = sanitize_html(html_in) |
| 874 | assert 'href="https://example.com"' in result |
| 875 | |
| 876 | |
| 877 | class TestSanitizeDangerousTags: |
| 878 | """Test the container vs void dangerous tag distinction.""" |
| 879 | |
| 880 | def test_script_content_fully_removed(self): |
| 881 | html_in = "<p>before</p><script>var x = 1;</script><p>after</p>" |
| 882 | result = sanitize_html(html_in) |
| 883 | assert "var x" not in result |
| 884 | assert "<p>before</p>" in result |
| 885 | assert "<p>after</p>" in result |
| 886 | |
| 887 | def test_style_content_fully_removed(self): |
| 888 | html_in = "<div>ok</div><style>.evil { display:none }</style><div>fine</div>" |
| 889 | result = sanitize_html(html_in) |
| 890 | assert ".evil" not in result |
| 891 | assert "<div>ok</div>" in result |
| 892 | |
| 893 | def test_iframe_content_fully_removed(self): |
| 894 | html_in = '<iframe src="x">text inside iframe</iframe>' |
| 895 | result = sanitize_html(html_in) |
| 896 | assert "text inside iframe" not in result |
| 897 | assert "<iframe" not in result |
| 898 | |
| 899 | def test_nested_dangerous_tags(self): |
| 900 | """Nested script tags should be fully stripped.""" |
| 901 | html_in = "<script><script>inner</script></script><p>safe</p>" |
| 902 | result = sanitize_html(html_in) |
| 903 | assert "inner" not in result |
| 904 | assert "<p>safe</p>" in result |
| 905 | |
| 906 | def test_base_tag_stripped(self): |
| 907 | html_in = '<base href="https://evil.com/">' |
| 908 | result = sanitize_html(html_in) |
| 909 | assert "<base" not in result |
| 910 | |
| 911 | def test_meta_tag_stripped(self): |
| 912 | html_in = '<meta http-equiv="refresh" content="0;url=https://evil.com">' |
| 913 | result = sanitize_html(html_in) |
| 914 | assert "<meta" not in result |
| 915 | |
| 916 | def test_link_tag_stripped(self): |
| 917 | html_in = '<link rel="stylesheet" href="https://evil.com/style.css">' |
| 918 | result = sanitize_html(html_in) |
| 919 | assert "<link" not in result |
| 920 | |
| 921 | |
| 922 | class TestSanitizeTextPreservation: |
| 923 | """Verify text inside stripped tags is preserved vs. removed appropriately.""" |
| 924 | |
| 925 | def test_unknown_tag_text_preserved(self): |
| 926 | """Unknown non-dangerous tags are stripped but their text content remains.""" |
| 927 | html_in = "<custom>inner text</custom>" |
| 928 | result = sanitize_html(html_in) |
| 929 | assert "<custom>" not in result |
| 930 | assert "inner text" in result |
| 931 | |
| 932 | def test_form_content_fully_removed(self): |
| 933 | """Form is a dangerous container -- content inside should be dropped.""" |
| 934 | html_in = "<form>login prompt</form>" |
| 935 | result = sanitize_html(html_in) |
| 936 | assert "login prompt" not in result |
| 937 | |
| 938 | def test_object_content_fully_removed(self): |
| 939 | html_in = "<object>fallback text</object>" |
| 940 | result = sanitize_html(html_in) |
| 941 | assert "fallback text" not in result |
| 942 | |
| 943 | def test_embed_is_dangerous_container(self): |
| 944 | html_in = "<embed>text</embed>" |
| 945 | result = sanitize_html(html_in) |
| 946 | assert "text" not in result |
| 947 | |
| 948 | |
| 949 | class TestSanitizeEntityHandling: |
| 950 | """Verify HTML entity passthrough outside dangerous contexts.""" |
| 951 | |
| 952 | def test_named_entity_preserved(self): |
| 953 | html_in = "<p>& < ></p>" |
| 954 | result = sanitize_html(html_in) |
| 955 | assert "&" in result |
| 956 | assert "<" in result |
| 957 | assert ">" in result |
| 958 | |
| 959 | def test_numeric_entity_preserved(self): |
| 960 | html_in = "<p>© —</p>" |
| 961 | result = sanitize_html(html_in) |
| 962 | assert "©" in result |
| 963 | assert "—" in result |
| 964 | |
| 965 | def test_entities_inside_script_stripped(self): |
| 966 | html_in = "<script>& entity</script>" |
| 967 | result = sanitize_html(html_in) |
| 968 | assert "&" not in result |
| 969 | |
| 970 | |
| 971 | class TestSanitizeComments: |
| 972 | def test_html_comments_stripped(self): |
| 973 | html_in = "<p>before</p><!-- secret comment --><p>after</p>" |
| 974 | result = sanitize_html(html_in) |
| 975 | assert "secret comment" not in result |
| 976 | assert "<!--" not in result |
| 977 | assert "<p>before</p>" in result |
| 978 | assert "<p>after</p>" in result |
| 979 | |
| 980 | def test_conditional_comment_stripped(self): |
| 981 | html_in = "<!--[if IE]>evil<![endif]--><p>safe</p>" |
| 982 | result = sanitize_html(html_in) |
| 983 | assert "evil" not in result |
| 984 | assert "<p>safe</p>" in result |
| 985 | |
| 986 | |
| 987 | class TestSanitizeSVG: |
| 988 | """SVG support for Pikchr diagrams.""" |
| 989 | |
| 990 | def test_svg_with_allowed_attrs(self): |
| 991 | html_in = ( |
| 992 | '<svg viewBox="0 0 200 200" xmlns="http://www.w3.org/2000/svg"><rect x="10" y="10" width="80" height="80" fill="blue"/></svg>' |
| 993 | ) |
| 994 | result = sanitize_html(html_in) |
| 995 | assert "<svg" in result |
| 996 | assert "<rect" in result |
| 997 | assert 'fill="blue"' in result |
| 998 | |
| 999 | def test_svg_strips_script_inside(self): |
| 1000 | html_in = '<svg><script>alert(1)</script><circle cx="50" cy="50" r="40"/></svg>' |
| 1001 | result = sanitize_html(html_in) |
| 1002 | assert "<script" not in result |
| 1003 | assert "alert" not in result |
| 1004 | assert "<circle" in result |
| 1005 | |
| 1006 | def test_svg_strips_event_handler(self): |
| 1007 | html_in = '<svg onload="alert(1)"><circle cx="50" cy="50" r="40"/></svg>' |
| 1008 | result = sanitize_html(html_in) |
| 1009 | assert "onload" not in result |
| 1010 | assert "<circle" in result |
| 1011 | |
| 1012 | def test_svg_path_preserved(self): |
| 1013 | html_in = '<svg><path d="M10 10 L90 90" stroke="black" stroke-width="2"/></svg>' |
| 1014 | result = sanitize_html(html_in) |
| 1015 | assert "<path" in result |
| 1016 | assert 'stroke="black"' in result |
| 1017 | |
| 1018 | def test_svg_text_element(self): |
| 1019 | html_in = '<svg><text x="10" y="20" font-size="14" fill="black">Label</text></svg>' |
| 1020 | result = sanitize_html(html_in) |
| 1021 | assert "<text" in result |
| 1022 | assert "Label" in result |
| 1023 | |
| 1024 | def test_svg_g_transform(self): |
| 1025 | html_in = '<svg><g transform="translate(10,20)"><circle cx="0" cy="0" r="5"/></g></svg>' |
| 1026 | result = sanitize_html(html_in) |
| 1027 | assert "<g" in result |
| 1028 | assert 'transform="translate(10,20)"' in result |
| 1029 | |
| 1030 | |
| 1031 | class TestSanitizeAttributeEscaping: |
| 1032 | """Verify attribute values are properly escaped in output.""" |
| 1033 | |
| 1034 | def test_ampersand_in_href_escaped(self): |
| 1035 | html_in = '<a href="https://example.com?a=1&b=2">link</a>' |
| 1036 | result = sanitize_html(html_in) |
| 1037 | assert "&" in result |
| 1038 | |
| 1039 | def test_quote_in_attribute_escaped(self): |
| 1040 | html_in = '<a href="https://example.com" title="a "quoted" title">link</a>' |
| 1041 | result = sanitize_html(html_in) |
| 1042 | assert """ in result or """ in result |
| 1043 | |
| 1044 | |
| 1045 | class TestSanitizeSelfClosingTags: |
| 1046 | """Handle self-closing (void) tags.""" |
| 1047 | |
| 1048 | def test_br_self_closing(self): |
| 1049 | html_in = "line1<br/>line2" |
| 1050 | result = sanitize_html(html_in) |
| 1051 | assert "<br>" in result |
| 1052 | assert "line1" in result |
| 1053 | assert "line2" in result |
| 1054 | |
| 1055 | def test_img_self_closing_with_attrs(self): |
| 1056 | html_in = '<img src="photo.jpg" alt="A photo"/>' |
| 1057 | result = sanitize_html(html_in) |
| 1058 | assert 'src="photo.jpg"' in result |
| 1059 | assert 'alt="A photo"' in result |
+1591
| --- a/tests/test_tasks_and_accounts.py | ||
| +++ b/tests/test_tasks_and_accounts.py | ||
| @@ -0,0 +1,1591 @@ | ||
| 1 | +"""Tests for fossil/tasks.py and accounts/views.py uncovered lines. | |
| 2 | + | |
| 3 | +Targets: | |
| 4 | + - fossil/tasks.py (33% -> higher): sync_metadata, create_snapshot, | |
| 5 | + check_upstream, run_git_sync, dispatch_notifications, | |
| 6 | + sync_tickets_to_github, sync_wiki_to_github | |
| 7 | + - accounts/views.py (77% -> higher): _sanitize_ssh_key, _verify_turnstile, | |
| 8 | + login turnstile flow, ssh key CRUD, notification prefs HTMX, | |
| 9 | + profile_token_create edge cases | |
| 10 | +""" | |
| 11 | + | |
| 12 | +from datetime import UTC, datetime | |
| 13 | +from unittest.mock import MagicMock, PropertyMock, patch | |
| 14 | + | |
| 15 | +import pytest | |
| 16 | + | |
| 17 | +from fossil.models import FossilRepository, FossilSnapshot | |
| 18 | +from fossil.notifications import Notification, NotificationPreference, ProjectWatch | |
| 19 | +from fossil.reader import TicketEntry, TimelineEntry, WikiPage | |
| 20 | +from fossil.sync_models import GitMirror, SyncLog, TicketSyncMapping, WikiSyncMapping | |
| 21 | +from fossil.webhooks import Webhook, WebhookDelivery | |
| 22 | + | |
| 23 | +# --------------------------------------------------------------------------- | |
| 24 | +# Helpers | |
| 25 | +# --------------------------------------------------------------------------- | |
| 26 | + | |
| 27 | +# Reusable patch that makes FossilRepository.exists_on_disk return True | |
| 28 | +_disk_exists = patch( | |
| 29 | + "fossil.models.FossilRepository.exists_on_disk", | |
| 30 | + new_callable=lambda: property(lambda self: True), | |
| 31 | +) | |
| 32 | + | |
| 33 | + | |
| 34 | +def _make_reader_mock(**methods): | |
| 35 | + """Create a context-manager-compatible FossilReader mock.""" | |
| 36 | + mock_cls = MagicMock() | |
| 37 | + instance = MagicMock() | |
| 38 | + mock_cls.return_value = instance | |
| 39 | + instance.__enter__ = MagicMock(return_value=instance) | |
| 40 | + instance.__exit__ = MagicMock(return_value=False) | |
| 41 | + for name, val in methods.items(): | |
| 42 | + getattr(instance, name).return_value = val | |
| 43 | + return mock_cls | |
| 44 | + | |
| 45 | + | |
| 46 | +def _make_timeline_entry(**overrides): | |
| 47 | + defaults = { | |
| 48 | + "rid": 1, | |
| 49 | + "uuid": "abc123def456", | |
| 50 | + "event_type": "ci", | |
| 51 | + "timestamp": datetime.now(UTC), | |
| 52 | + "user": "dev", | |
| 53 | + "comment": "fix typo", | |
| 54 | + "branch": "trunk", | |
| 55 | + } | |
| 56 | + defaults.update(overrides) | |
| 57 | + return TimelineEntry(**defaults) | |
| 58 | + | |
| 59 | + | |
| 60 | +def _make_ticket(**overrides): | |
| 61 | + defaults = { | |
| 62 | + "uuid": "ticket-uuid-001", | |
| 63 | + "title": "Bug report", | |
| 64 | + "status": "open", | |
| 65 | + "type": "bug", | |
| 66 | + "created": datetime.now(UTC), | |
| 67 | + "owner": "dev", | |
| 68 | + "body": "Something is broken", | |
| 69 | + "priority": "high", | |
| 70 | + "severity": "critical", | |
| 71 | + } | |
| 72 | + defaults.update(overrides) | |
| 73 | + return TicketEntry(**defaults) | |
| 74 | + | |
| 75 | + | |
| 76 | +def _make_wiki_page(**overrides): | |
| 77 | + defaults = { | |
| 78 | + "name": "Home", | |
| 79 | + "content": "# Welcome", | |
| 80 | + "last_modified": datetime.now(UTC), | |
| 81 | + "user": "dev", | |
| 82 | + } | |
| 83 | + defaults.update(overrides) | |
| 84 | + return WikiPage(**defaults) | |
| 85 | + | |
| 86 | + | |
| 87 | +# --------------------------------------------------------------------------- | |
| 88 | +# Fixtures | |
| 89 | +# --------------------------------------------------------------------------- | |
| 90 | + | |
| 91 | + | |
| 92 | +@pytest.fixture | |
| 93 | +def fossil_repo_obj(sample_project): | |
| 94 | + """Return the auto-created FossilRepository for sample_project.""" | |
| 95 | + return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True) | |
| 96 | + | |
| 97 | + | |
| 98 | +@pytest.fixture | |
| 99 | +def mirror(fossil_repo_obj, admin_user): | |
| 100 | + return GitMirror.objects.create( | |
| 101 | + repository=fossil_repo_obj, | |
| 102 | + git_remote_url="https://github.com/testorg/testrepo.git", | |
| 103 | + auth_method="token", | |
| 104 | + auth_credential="ghp_testtoken123", | |
| 105 | + sync_direction="push", | |
| 106 | + sync_mode="scheduled", | |
| 107 | + sync_tickets=False, | |
| 108 | + sync_wiki=False, | |
| 109 | + created_by=admin_user, | |
| 110 | + ) | |
| 111 | + | |
| 112 | + | |
| 113 | +@pytest.fixture | |
| 114 | +def webhook(fossil_repo_obj, admin_user): | |
| 115 | + return Webhook.objects.create( | |
| 116 | + repository=fossil_repo_obj, | |
| 117 | + url="https://hooks.example.com/test", | |
| 118 | + secret="test-secret", | |
| 119 | + events="all", | |
| 120 | + is_active=True, | |
| 121 | + created_by=admin_user, | |
| 122 | + ) | |
| 123 | + | |
| 124 | + | |
| 125 | +# =================================================================== | |
| 126 | +# fossil/tasks.py -- sync_repository_metadata | |
| 127 | +# =================================================================== | |
| 128 | + | |
| 129 | + | |
| 130 | +@pytest.mark.django_db | |
| 131 | +class TestSyncRepositoryMetadata: | |
| 132 | + """Test the sync_metadata periodic task.""" | |
| 133 | + | |
| 134 | + def test_updates_metadata_from_reader(self, fossil_repo_obj): | |
| 135 | + """Task reads the .fossil file and updates checkin_count, file_size, project_code.""" | |
| 136 | + from fossil.tasks import sync_repository_metadata | |
| 137 | + | |
| 138 | + timeline_entry = _make_timeline_entry() | |
| 139 | + reader_mock = _make_reader_mock( | |
| 140 | + get_checkin_count=42, | |
| 141 | + get_timeline=[timeline_entry], | |
| 142 | + get_project_code="abc123project", | |
| 143 | + ) | |
| 144 | + | |
| 145 | + fake_stat = MagicMock() | |
| 146 | + fake_stat.st_size = 98765 | |
| 147 | + | |
| 148 | + with ( | |
| 149 | + _disk_exists, | |
| 150 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 151 | + patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock) as mock_path, | |
| 152 | + ): | |
| 153 | + mock_path.return_value = MagicMock() | |
| 154 | + mock_path.return_value.stat.return_value = fake_stat | |
| 155 | + | |
| 156 | + sync_repository_metadata() | |
| 157 | + | |
| 158 | + fossil_repo_obj.refresh_from_db() | |
| 159 | + assert fossil_repo_obj.checkin_count == 42 | |
| 160 | + assert fossil_repo_obj.file_size_bytes == 98765 | |
| 161 | + assert fossil_repo_obj.fossil_project_code == "abc123project" | |
| 162 | + assert fossil_repo_obj.last_checkin_at == timeline_entry.timestamp | |
| 163 | + | |
| 164 | + def test_skips_repo_not_on_disk(self, fossil_repo_obj): | |
| 165 | + """Repos that don't exist on disk should be skipped without error.""" | |
| 166 | + from fossil.tasks import sync_repository_metadata | |
| 167 | + | |
| 168 | + with patch( | |
| 169 | + "fossil.models.FossilRepository.exists_on_disk", | |
| 170 | + new_callable=lambda: property(lambda self: False), | |
| 171 | + ): | |
| 172 | + # Should complete without error | |
| 173 | + sync_repository_metadata() | |
| 174 | + | |
| 175 | + fossil_repo_obj.refresh_from_db() | |
| 176 | + assert fossil_repo_obj.checkin_count == 0 # unchanged | |
| 177 | + | |
| 178 | + def test_handles_empty_timeline(self, fossil_repo_obj): | |
| 179 | + """When timeline is empty, last_checkin_at stays None.""" | |
| 180 | + from fossil.tasks import sync_repository_metadata | |
| 181 | + | |
| 182 | + reader_mock = _make_reader_mock( | |
| 183 | + get_checkin_count=0, | |
| 184 | + get_timeline=[], | |
| 185 | + get_project_code="proj-code", | |
| 186 | + ) | |
| 187 | + | |
| 188 | + fake_stat = MagicMock() | |
| 189 | + fake_stat.st_size = 1024 | |
| 190 | + | |
| 191 | + with ( | |
| 192 | + _disk_exists, | |
| 193 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 194 | + patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock) as mock_path, | |
| 195 | + ): | |
| 196 | + mock_path.return_value = MagicMock() | |
| 197 | + mock_path.return_value.stat.return_value = fake_stat | |
| 198 | + | |
| 199 | + sync_repository_metadata() | |
| 200 | + | |
| 201 | + fossil_repo_obj.refresh_from_db() | |
| 202 | + assert fossil_repo_obj.last_checkin_at is None | |
| 203 | + | |
| 204 | + def test_handles_reader_exception(self, fossil_repo_obj): | |
| 205 | + """If FossilReader raises, the task logs and moves on.""" | |
| 206 | + from fossil.tasks import sync_repository_metadata | |
| 207 | + | |
| 208 | + reader_mock = MagicMock(side_effect=Exception("corrupt db")) | |
| 209 | + | |
| 210 | + with ( | |
| 211 | + _disk_exists, | |
| 212 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 213 | + patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock) as mock_path, | |
| 214 | + ): | |
| 215 | + mock_path.return_value = MagicMock() | |
| 216 | + mock_path.return_value.stat.side_effect = Exception("stat failed") | |
| 217 | + | |
| 218 | + # Should not raise | |
| 219 | + sync_repository_metadata() | |
| 220 | + | |
| 221 | + | |
| 222 | +# =================================================================== | |
| 223 | +# fossil/tasks.py -- create_snapshot | |
| 224 | +# =================================================================== | |
| 225 | + | |
| 226 | + | |
| 227 | +@pytest.mark.django_db | |
| 228 | +class TestCreateSnapshot: | |
| 229 | + """Test the create_snapshot task.""" | |
| 230 | + | |
| 231 | + def _mock_config(self, store_in_db=True): | |
| 232 | + """Build a constance config mock with FOSSIL_STORE_IN_DB set.""" | |
| 233 | + cfg = MagicMock() | |
| 234 | + cfg.FOSSIL_STORE_IN_DB = store_in_db | |
| 235 | + return cfg | |
| 236 | + | |
| 237 | + def test_creates_snapshot_when_enabled(self, fossil_repo_obj, tmp_path, settings): | |
| 238 | + """Snapshot is created when FOSSIL_STORE_IN_DB is True.""" | |
| 239 | + from fossil.tasks import create_snapshot | |
| 240 | + | |
| 241 | + # Ensure default file storage is configured for the test | |
| 242 | + settings.STORAGES = { | |
| 243 | + **settings.STORAGES, | |
| 244 | + "default": {"BACKEND": "django.core.files.storage.FileSystemStorage"}, | |
| 245 | + } | |
| 246 | + settings.MEDIA_ROOT = str(tmp_path / "media") | |
| 247 | + | |
| 248 | + # Write a fake fossil file | |
| 249 | + fossil_file = tmp_path / "test.fossil" | |
| 250 | + fossil_file.write_bytes(b"FAKE FOSSIL DATA 12345") | |
| 251 | + | |
| 252 | + with ( | |
| 253 | + patch("constance.config", self._mock_config(store_in_db=True)), | |
| 254 | + patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock, return_value=fossil_file), | |
| 255 | + _disk_exists, | |
| 256 | + ): | |
| 257 | + create_snapshot(fossil_repo_obj.pk, note="manual backup") | |
| 258 | + | |
| 259 | + snapshot = FossilSnapshot.objects.filter(repository=fossil_repo_obj).first() | |
| 260 | + assert snapshot is not None | |
| 261 | + assert snapshot.note == "manual backup" | |
| 262 | + assert snapshot.file_size_bytes == len(b"FAKE FOSSIL DATA 12345") | |
| 263 | + assert snapshot.fossil_hash # should be a sha256 hex string | |
| 264 | + assert len(snapshot.fossil_hash) == 64 | |
| 265 | + | |
| 266 | + def test_skips_when_store_in_db_disabled(self, fossil_repo_obj): | |
| 267 | + """No snapshot created when FOSSIL_STORE_IN_DB is False.""" | |
| 268 | + from fossil.tasks import create_snapshot | |
| 269 | + | |
| 270 | + with patch("constance.config", self._mock_config(store_in_db=False)): | |
| 271 | + create_snapshot(fossil_repo_obj.pk, note="should not exist") | |
| 272 | + | |
| 273 | + assert FossilSnapshot.objects.filter(repository=fossil_repo_obj).count() == 0 | |
| 274 | + | |
| 275 | + def test_skips_for_nonexistent_repo(self): | |
| 276 | + """Returns early for a repository ID that doesn't exist.""" | |
| 277 | + from fossil.tasks import create_snapshot | |
| 278 | + | |
| 279 | + with patch("constance.config", self._mock_config(store_in_db=True)): | |
| 280 | + # Should not raise | |
| 281 | + create_snapshot(99999, note="orphan") | |
| 282 | + | |
| 283 | + assert FossilSnapshot.objects.count() == 0 | |
| 284 | + | |
| 285 | + def test_skips_when_not_on_disk(self, fossil_repo_obj): | |
| 286 | + """Returns early when the file doesn't exist on disk.""" | |
| 287 | + from fossil.tasks import create_snapshot | |
| 288 | + | |
| 289 | + with ( | |
| 290 | + patch("constance.config", self._mock_config(store_in_db=True)), | |
| 291 | + patch( | |
| 292 | + "fossil.models.FossilRepository.exists_on_disk", | |
| 293 | + new_callable=lambda: property(lambda self: False), | |
| 294 | + ), | |
| 295 | + ): | |
| 296 | + create_snapshot(fossil_repo_obj.pk) | |
| 297 | + | |
| 298 | + assert FossilSnapshot.objects.filter(repository=fossil_repo_obj).count() == 0 | |
| 299 | + | |
| 300 | + def test_skips_duplicate_hash(self, fossil_repo_obj, tmp_path, admin_user): | |
| 301 | + """If latest snapshot has the same hash, no new snapshot is created.""" | |
| 302 | + import hashlib | |
| 303 | + | |
| 304 | + from fossil.tasks import create_snapshot | |
| 305 | + | |
| 306 | + fossil_file = tmp_path / "test.fossil" | |
| 307 | + data = b"SAME DATA TWICE" | |
| 308 | + fossil_file.write_bytes(data) | |
| 309 | + sha = hashlib.sha256(data).hexdigest() | |
| 310 | + | |
| 311 | + # Create an existing snapshot with the same hash | |
| 312 | + FossilSnapshot.objects.create( | |
| 313 | + repository=fossil_repo_obj, | |
| 314 | + file_size_bytes=len(data), | |
| 315 | + fossil_hash=sha, | |
| 316 | + note="previous", | |
| 317 | + created_by=admin_user, | |
| 318 | + ) | |
| 319 | + | |
| 320 | + with ( | |
| 321 | + patch("constance.config", self._mock_config(store_in_db=True)), | |
| 322 | + patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock, return_value=fossil_file), | |
| 323 | + _disk_exists, | |
| 324 | + ): | |
| 325 | + create_snapshot(fossil_repo_obj.pk, note="duplicate check") | |
| 326 | + | |
| 327 | + # Still only one snapshot | |
| 328 | + assert FossilSnapshot.objects.filter(repository=fossil_repo_obj).count() == 1 | |
| 329 | + | |
| 330 | + | |
| 331 | +# =================================================================== | |
| 332 | +# fossil/tasks.py -- check_upstream_updates | |
| 333 | +# =================================================================== | |
| 334 | + | |
| 335 | + | |
| 336 | +@pytest.mark.django_db | |
| 337 | +class TestCheckUpstreamUpdates: | |
| 338 | + """Test the check_upstream periodic task.""" | |
| 339 | + | |
| 340 | + def test_pulls_and_updates_metadata_when_artifacts_received(self, fossil_repo_obj): | |
| 341 | + """When upstream has new artifacts, metadata is updated after pull.""" | |
| 342 | + from fossil.tasks import check_upstream_updates | |
| 343 | + | |
| 344 | + # Give the repo a remote URL | |
| 345 | + fossil_repo_obj.remote_url = "https://fossil.example.com/repo" | |
| 346 | + fossil_repo_obj.save(update_fields=["remote_url"]) | |
| 347 | + | |
| 348 | + cli_mock = MagicMock() | |
| 349 | + cli_mock.is_available.return_value = True | |
| 350 | + cli_mock.pull.return_value = {"success": True, "artifacts_received": 5, "message": "received: 5"} | |
| 351 | + | |
| 352 | + timeline_entry = _make_timeline_entry() | |
| 353 | + reader_mock = _make_reader_mock( | |
| 354 | + get_checkin_count=50, | |
| 355 | + get_timeline=[timeline_entry], | |
| 356 | + ) | |
| 357 | + | |
| 358 | + fake_stat = MagicMock() | |
| 359 | + fake_stat.st_size = 200000 | |
| 360 | + | |
| 361 | + with ( | |
| 362 | + _disk_exists, | |
| 363 | + patch("fossil.cli.FossilCLI", return_value=cli_mock), | |
| 364 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 365 | + patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock) as mock_path, | |
| 366 | + ): | |
| 367 | + mock_path.return_value = MagicMock() | |
| 368 | + mock_path.return_value.stat.return_value = fake_stat | |
| 369 | + | |
| 370 | + check_upstream_updates() | |
| 371 | + | |
| 372 | + fossil_repo_obj.refresh_from_db() | |
| 373 | + assert fossil_repo_obj.upstream_artifacts_available == 5 | |
| 374 | + assert fossil_repo_obj.checkin_count == 50 | |
| 375 | + assert fossil_repo_obj.last_sync_at is not None | |
| 376 | + assert fossil_repo_obj.file_size_bytes == 200000 | |
| 377 | + | |
| 378 | + def test_zero_artifacts_resets_counter(self, fossil_repo_obj): | |
| 379 | + """When pull returns zero artifacts, upstream count is reset.""" | |
| 380 | + from fossil.tasks import check_upstream_updates | |
| 381 | + | |
| 382 | + fossil_repo_obj.remote_url = "https://fossil.example.com/repo" | |
| 383 | + fossil_repo_obj.upstream_artifacts_available = 10 | |
| 384 | + fossil_repo_obj.save(update_fields=["remote_url", "upstream_artifacts_available"]) | |
| 385 | + | |
| 386 | + cli_mock = MagicMock() | |
| 387 | + cli_mock.is_available.return_value = True | |
| 388 | + cli_mock.pull.return_value = {"success": True, "artifacts_received": 0, "message": "received: 0"} | |
| 389 | + | |
| 390 | + with ( | |
| 391 | + _disk_exists, | |
| 392 | + patch("fossil.cli.FossilCLI", return_value=cli_mock), | |
| 393 | + ): | |
| 394 | + check_upstream_updates() | |
| 395 | + | |
| 396 | + fossil_repo_obj.refresh_from_db() | |
| 397 | + assert fossil_repo_obj.upstream_artifacts_available == 0 | |
| 398 | + assert fossil_repo_obj.last_sync_at is not None | |
| 399 | + | |
| 400 | + def test_skips_when_fossil_not_available(self, fossil_repo_obj): | |
| 401 | + """When fossil binary is not available, task returns early.""" | |
| 402 | + from fossil.tasks import check_upstream_updates | |
| 403 | + | |
| 404 | + fossil_repo_obj.remote_url = "https://fossil.example.com/repo" | |
| 405 | + fossil_repo_obj.save(update_fields=["remote_url"]) | |
| 406 | + | |
| 407 | + cli_mock = MagicMock() | |
| 408 | + cli_mock.is_available.return_value = False | |
| 409 | + | |
| 410 | + with patch("fossil.cli.FossilCLI", return_value=cli_mock): | |
| 411 | + check_upstream_updates() | |
| 412 | + | |
| 413 | + fossil_repo_obj.refresh_from_db() | |
| 414 | + assert fossil_repo_obj.last_sync_at is None | |
| 415 | + | |
| 416 | + def test_handles_pull_exception(self, fossil_repo_obj): | |
| 417 | + """If pull raises an exception, the task logs and continues.""" | |
| 418 | + from fossil.tasks import check_upstream_updates | |
| 419 | + | |
| 420 | + fossil_repo_obj.remote_url = "https://fossil.example.com/repo" | |
| 421 | + fossil_repo_obj.save(update_fields=["remote_url"]) | |
| 422 | + | |
| 423 | + cli_mock = MagicMock() | |
| 424 | + cli_mock.is_available.return_value = True | |
| 425 | + cli_mock.pull.side_effect = Exception("network error") | |
| 426 | + | |
| 427 | + with ( | |
| 428 | + _disk_exists, | |
| 429 | + patch("fossil.cli.FossilCLI", return_value=cli_mock), | |
| 430 | + ): | |
| 431 | + # Should not raise | |
| 432 | + check_upstream_updates() | |
| 433 | + | |
| 434 | + def test_skips_repos_without_remote_url(self, fossil_repo_obj): | |
| 435 | + """Repos with empty remote_url are excluded from the queryset.""" | |
| 436 | + from fossil.tasks import check_upstream_updates | |
| 437 | + | |
| 438 | + # fossil_repo_obj.remote_url is "" by default | |
| 439 | + cli_mock = MagicMock() | |
| 440 | + cli_mock.is_available.return_value = True | |
| 441 | + | |
| 442 | + with patch("fossil.cli.FossilCLI", return_value=cli_mock): | |
| 443 | + check_upstream_updates() | |
| 444 | + | |
| 445 | + # pull should never be called since no repos have remote_url | |
| 446 | + cli_mock.pull.assert_not_called() | |
| 447 | + | |
| 448 | + | |
| 449 | +# =================================================================== | |
| 450 | +# fossil/tasks.py -- run_git_sync | |
| 451 | +# =================================================================== | |
| 452 | + | |
| 453 | + | |
| 454 | +@pytest.mark.django_db | |
| 455 | +class TestRunGitSync: | |
| 456 | + """Test the run_git_sync task for Git mirror operations.""" | |
| 457 | + | |
| 458 | + @staticmethod | |
| 459 | + def _git_config(): | |
| 460 | + cfg = MagicMock() | |
| 461 | + cfg.GIT_MIRROR_DIR = "/tmp/git-mirrors" | |
| 462 | + return cfg | |
| 463 | + | |
| 464 | + def test_successful_sync_creates_log(self, mirror, fossil_repo_obj): | |
| 465 | + """A successful git export updates the mirror and creates a success log.""" | |
| 466 | + from fossil.tasks import run_git_sync | |
| 467 | + | |
| 468 | + cli_mock = MagicMock() | |
| 469 | + cli_mock.is_available.return_value = True | |
| 470 | + cli_mock.git_export.return_value = {"success": True, "message": "Exported 10 commits"} | |
| 471 | + | |
| 472 | + with ( | |
| 473 | + _disk_exists, | |
| 474 | + patch("fossil.cli.FossilCLI", return_value=cli_mock), | |
| 475 | + patch("constance.config", self._git_config()), | |
| 476 | + ): | |
| 477 | + run_git_sync(mirror_id=mirror.pk) | |
| 478 | + | |
| 479 | + log = SyncLog.objects.get(mirror=mirror) | |
| 480 | + assert log.status == "success" | |
| 481 | + assert log.triggered_by == "manual" | |
| 482 | + assert log.completed_at is not None | |
| 483 | + | |
| 484 | + mirror.refresh_from_db() | |
| 485 | + assert mirror.last_sync_status == "success" | |
| 486 | + assert mirror.total_syncs == 1 | |
| 487 | + | |
| 488 | + def test_failed_sync_records_failure(self, mirror, fossil_repo_obj): | |
| 489 | + """A failed git export records the failure in log and mirror.""" | |
| 490 | + from fossil.tasks import run_git_sync | |
| 491 | + | |
| 492 | + cli_mock = MagicMock() | |
| 493 | + cli_mock.is_available.return_value = True | |
| 494 | + cli_mock.git_export.return_value = {"success": False, "message": "Push rejected by remote"} | |
| 495 | + | |
| 496 | + with ( | |
| 497 | + _disk_exists, | |
| 498 | + patch("fossil.cli.FossilCLI", return_value=cli_mock), | |
| 499 | + patch("constance.config", self._git_config()), | |
| 500 | + ): | |
| 501 | + run_git_sync(mirror_id=mirror.pk) | |
| 502 | + | |
| 503 | + log = SyncLog.objects.get(mirror=mirror) | |
| 504 | + assert log.status == "failed" | |
| 505 | + | |
| 506 | + mirror.refresh_from_db() | |
| 507 | + assert mirror.last_sync_status == "failed" | |
| 508 | + | |
| 509 | + def test_exception_during_sync_creates_failed_log(self, mirror, fossil_repo_obj): | |
| 510 | + """An unexpected exception during sync records a failed log.""" | |
| 511 | + from fossil.tasks import run_git_sync | |
| 512 | + | |
| 513 | + cli_mock = MagicMock() | |
| 514 | + cli_mock.is_available.return_value = True | |
| 515 | + cli_mock.git_export.side_effect = RuntimeError("subprocess crash") | |
| 516 | + | |
| 517 | + with ( | |
| 518 | + _disk_exists, | |
| 519 | + patch("fossil.cli.FossilCLI", return_value=cli_mock), | |
| 520 | + patch("constance.config", self._git_config()), | |
| 521 | + ): | |
| 522 | + run_git_sync(mirror_id=mirror.pk) | |
| 523 | + | |
| 524 | + log = SyncLog.objects.get(mirror=mirror) | |
| 525 | + assert log.status == "failed" | |
| 526 | + assert "Unexpected error" in log.message | |
| 527 | + | |
| 528 | + def test_credential_redacted_from_log(self, mirror, fossil_repo_obj): | |
| 529 | + """Auth credentials must not appear in sync log messages.""" | |
| 530 | + from fossil.tasks import run_git_sync | |
| 531 | + | |
| 532 | + token = mirror.auth_credential | |
| 533 | + cli_mock = MagicMock() | |
| 534 | + cli_mock.is_available.return_value = True | |
| 535 | + cli_mock.git_export.return_value = {"success": True, "message": f"Push to remote with {token} auth"} | |
| 536 | + | |
| 537 | + with ( | |
| 538 | + _disk_exists, | |
| 539 | + patch("fossil.cli.FossilCLI", return_value=cli_mock), | |
| 540 | + patch("constance.config", self._git_config()), | |
| 541 | + ): | |
| 542 | + run_git_sync(mirror_id=mirror.pk) | |
| 543 | + | |
| 544 | + log = SyncLog.objects.get(mirror=mirror) | |
| 545 | + assert token not in log.message | |
| 546 | + assert "[REDACTED]" in log.message | |
| 547 | + | |
| 548 | + def test_skips_when_fossil_not_available(self, mirror): | |
| 549 | + """When fossil binary is not available, task returns early.""" | |
| 550 | + from fossil.tasks import run_git_sync | |
| 551 | + | |
| 552 | + cli_mock = MagicMock() | |
| 553 | + cli_mock.is_available.return_value = False | |
| 554 | + | |
| 555 | + with patch("fossil.cli.FossilCLI", return_value=cli_mock): | |
| 556 | + run_git_sync(mirror_id=mirror.pk) | |
| 557 | + | |
| 558 | + assert SyncLog.objects.count() == 0 | |
| 559 | + | |
| 560 | + def test_skips_disabled_mirrors(self, fossil_repo_obj, admin_user): | |
| 561 | + """Mirrors with sync_mode='disabled' are excluded.""" | |
| 562 | + from fossil.tasks import run_git_sync | |
| 563 | + | |
| 564 | + disabled_mirror = GitMirror.objects.create( | |
| 565 | + repository=fossil_repo_obj, | |
| 566 | + git_remote_url="https://github.com/test/disabled.git", | |
| 567 | + sync_mode="disabled", | |
| 568 | + created_by=admin_user, | |
| 569 | + ) | |
| 570 | + | |
| 571 | + cli_mock = MagicMock() | |
| 572 | + cli_mock.is_available.return_value = True | |
| 573 | + | |
| 574 | + with ( | |
| 575 | + _disk_exists, | |
| 576 | + patch("fossil.cli.FossilCLI", return_value=cli_mock), | |
| 577 | + patch("constance.config", self._git_config()), | |
| 578 | + ): | |
| 579 | + run_git_sync() | |
| 580 | + | |
| 581 | + assert SyncLog.objects.filter(mirror=disabled_mirror).count() == 0 | |
| 582 | + | |
| 583 | + def test_chains_ticket_and_wiki_sync_when_enabled(self, mirror, fossil_repo_obj): | |
| 584 | + """Successful sync chains ticket/wiki sync tasks when enabled.""" | |
| 585 | + from fossil.tasks import run_git_sync | |
| 586 | + | |
| 587 | + mirror.sync_tickets = True | |
| 588 | + mirror.sync_wiki = True | |
| 589 | + mirror.save(update_fields=["sync_tickets", "sync_wiki"]) | |
| 590 | + | |
| 591 | + cli_mock = MagicMock() | |
| 592 | + cli_mock.is_available.return_value = True | |
| 593 | + cli_mock.git_export.return_value = {"success": True, "message": "ok"} | |
| 594 | + | |
| 595 | + with ( | |
| 596 | + _disk_exists, | |
| 597 | + patch("fossil.cli.FossilCLI", return_value=cli_mock), | |
| 598 | + patch("constance.config", self._git_config()), | |
| 599 | + patch("fossil.tasks.sync_tickets_to_github") as mock_tickets, | |
| 600 | + patch("fossil.tasks.sync_wiki_to_github") as mock_wiki, | |
| 601 | + ): | |
| 602 | + run_git_sync(mirror_id=mirror.pk) | |
| 603 | + | |
| 604 | + mock_tickets.delay.assert_called_once_with(mirror.id) | |
| 605 | + mock_wiki.delay.assert_called_once_with(mirror.id) | |
| 606 | + | |
| 607 | + def test_schedule_triggered_by(self, mirror, fossil_repo_obj): | |
| 608 | + """When called without mirror_id, triggered_by is 'schedule'.""" | |
| 609 | + from fossil.tasks import run_git_sync | |
| 610 | + | |
| 611 | + cli_mock = MagicMock() | |
| 612 | + cli_mock.is_available.return_value = True | |
| 613 | + cli_mock.git_export.return_value = {"success": True, "message": "ok"} | |
| 614 | + | |
| 615 | + with ( | |
| 616 | + _disk_exists, | |
| 617 | + patch("fossil.cli.FossilCLI", return_value=cli_mock), | |
| 618 | + patch("constance.config", self._git_config()), | |
| 619 | + ): | |
| 620 | + run_git_sync() # no mirror_id | |
| 621 | + | |
| 622 | + log = SyncLog.objects.get(mirror=mirror) | |
| 623 | + assert log.triggered_by == "schedule" | |
| 624 | + | |
| 625 | + | |
| 626 | +# =================================================================== | |
| 627 | +# fossil/tasks.py -- dispatch_notifications | |
| 628 | +# =================================================================== | |
| 629 | + | |
| 630 | + | |
| 631 | +@pytest.mark.django_db | |
| 632 | +class TestDispatchNotifications: | |
| 633 | + """Test the dispatch_notifications periodic task.""" | |
| 634 | + | |
| 635 | + def test_creates_notifications_for_recent_events(self, fossil_repo_obj, sample_project, admin_user): | |
| 636 | + """Recent timeline events create notifications for project watchers.""" | |
| 637 | + from fossil.tasks import dispatch_notifications | |
| 638 | + | |
| 639 | + # Create a watcher | |
| 640 | + ProjectWatch.objects.create( | |
| 641 | + project=sample_project, | |
| 642 | + user=admin_user, | |
| 643 | + email_enabled=True, | |
| 644 | + created_by=admin_user, | |
| 645 | + ) | |
| 646 | + NotificationPreference.objects.create(user=admin_user, delivery_mode="immediate") | |
| 647 | + | |
| 648 | + recent_entry = _make_timeline_entry( | |
| 649 | + event_type="ci", | |
| 650 | + comment="Added new feature", | |
| 651 | + user="dev", | |
| 652 | + ) | |
| 653 | + | |
| 654 | + reader_mock = _make_reader_mock(get_timeline=[recent_entry]) | |
| 655 | + | |
| 656 | + with ( | |
| 657 | + _disk_exists, | |
| 658 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 659 | + patch("django.core.mail.send_mail"), | |
| 660 | + patch("django.template.loader.render_to_string", return_value="<html>test</html>"), | |
| 661 | + ): | |
| 662 | + dispatch_notifications() | |
| 663 | + | |
| 664 | + notif = Notification.objects.filter(user=admin_user, project=sample_project).first() | |
| 665 | + assert notif is not None | |
| 666 | + assert "Added new feature" in notif.title or "dev" in notif.title | |
| 667 | + | |
| 668 | + def test_skips_when_no_watched_projects(self, fossil_repo_obj): | |
| 669 | + """Task returns early when nobody is watching any projects.""" | |
| 670 | + from fossil.tasks import dispatch_notifications | |
| 671 | + | |
| 672 | + # No watches exist, so task should complete immediately | |
| 673 | + dispatch_notifications() | |
| 674 | + assert Notification.objects.count() == 0 | |
| 675 | + | |
| 676 | + def test_skips_repo_not_on_disk(self, fossil_repo_obj, sample_project, admin_user): | |
| 677 | + """Repos that don't exist on disk are skipped.""" | |
| 678 | + from fossil.tasks import dispatch_notifications | |
| 679 | + | |
| 680 | + ProjectWatch.objects.create( | |
| 681 | + project=sample_project, | |
| 682 | + user=admin_user, | |
| 683 | + email_enabled=True, | |
| 684 | + created_by=admin_user, | |
| 685 | + ) | |
| 686 | + | |
| 687 | + with patch( | |
| 688 | + "fossil.models.FossilRepository.exists_on_disk", | |
| 689 | + new_callable=lambda: property(lambda self: False), | |
| 690 | + ): | |
| 691 | + dispatch_notifications() | |
| 692 | + | |
| 693 | + assert Notification.objects.count() == 0 | |
| 694 | + | |
| 695 | + def test_handles_reader_exception(self, fossil_repo_obj, sample_project, admin_user): | |
| 696 | + """Reader exceptions are caught and logged per-repo.""" | |
| 697 | + from fossil.tasks import dispatch_notifications | |
| 698 | + | |
| 699 | + ProjectWatch.objects.create( | |
| 700 | + project=sample_project, | |
| 701 | + user=admin_user, | |
| 702 | + email_enabled=True, | |
| 703 | + created_by=admin_user, | |
| 704 | + ) | |
| 705 | + | |
| 706 | + reader_mock = MagicMock(side_effect=Exception("corrupt db")) | |
| 707 | + | |
| 708 | + with ( | |
| 709 | + _disk_exists, | |
| 710 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 711 | + ): | |
| 712 | + # Should not raise | |
| 713 | + dispatch_notifications() | |
| 714 | + | |
| 715 | + | |
| 716 | +# =================================================================== | |
| 717 | +# fossil/tasks.py -- sync_tickets_to_github | |
| 718 | +# =================================================================== | |
| 719 | + | |
| 720 | + | |
| 721 | +@pytest.mark.django_db | |
| 722 | +class TestSyncTicketsToGithub: | |
| 723 | + """Test the sync_tickets_to_github task.""" | |
| 724 | + | |
| 725 | + def test_creates_new_github_issues(self, mirror, fossil_repo_obj): | |
| 726 | + """Unsynced tickets create new GitHub issues with mappings.""" | |
| 727 | + from fossil.tasks import sync_tickets_to_github | |
| 728 | + | |
| 729 | + ticket = _make_ticket(uuid="new-ticket-uuid-001") | |
| 730 | + detail = _make_ticket(uuid="new-ticket-uuid-001") | |
| 731 | + | |
| 732 | + reader_mock = _make_reader_mock( | |
| 733 | + get_tickets=[ticket], | |
| 734 | + get_ticket_detail=detail, | |
| 735 | + get_ticket_comments=[], | |
| 736 | + ) | |
| 737 | + | |
| 738 | + gh_client_mock = MagicMock() | |
| 739 | + gh_client_mock.create_issue.return_value = {"number": 42, "url": "https://github.com/test/42", "error": ""} | |
| 740 | + | |
| 741 | + with ( | |
| 742 | + _disk_exists, | |
| 743 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 744 | + patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), | |
| 745 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 746 | + ): | |
| 747 | + sync_tickets_to_github(mirror.pk) | |
| 748 | + | |
| 749 | + mapping = TicketSyncMapping.objects.get(mirror=mirror, fossil_ticket_uuid="new-ticket-uuid-001") | |
| 750 | + assert mapping.github_issue_number == 42 | |
| 751 | + | |
| 752 | + log = SyncLog.objects.get(mirror=mirror, triggered_by="ticket_sync") | |
| 753 | + assert log.status == "success" | |
| 754 | + assert "1 tickets" in log.message | |
| 755 | + | |
| 756 | + def test_updates_existing_github_issue(self, mirror, fossil_repo_obj): | |
| 757 | + """Already-synced tickets with changed status update the existing issue.""" | |
| 758 | + from fossil.tasks import sync_tickets_to_github | |
| 759 | + | |
| 760 | + # Pre-existing mapping with old status | |
| 761 | + TicketSyncMapping.objects.create( | |
| 762 | + mirror=mirror, | |
| 763 | + fossil_ticket_uuid="existing-ticket-001", | |
| 764 | + github_issue_number=10, | |
| 765 | + fossil_status="open", | |
| 766 | + ) | |
| 767 | + | |
| 768 | + ticket = _make_ticket(uuid="existing-ticket-001", status="closed") | |
| 769 | + detail = _make_ticket(uuid="existing-ticket-001", status="closed") | |
| 770 | + | |
| 771 | + reader_mock = _make_reader_mock( | |
| 772 | + get_tickets=[ticket], | |
| 773 | + get_ticket_detail=detail, | |
| 774 | + get_ticket_comments=[], | |
| 775 | + ) | |
| 776 | + | |
| 777 | + gh_client_mock = MagicMock() | |
| 778 | + gh_client_mock.update_issue.return_value = {"success": True, "error": ""} | |
| 779 | + | |
| 780 | + with ( | |
| 781 | + _disk_exists, | |
| 782 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 783 | + patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), | |
| 784 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 785 | + ): | |
| 786 | + sync_tickets_to_github(mirror.pk) | |
| 787 | + | |
| 788 | + mapping = TicketSyncMapping.objects.get(mirror=mirror, fossil_ticket_uuid="existing-ticket-001") | |
| 789 | + assert mapping.fossil_status == "closed" | |
| 790 | + | |
| 791 | + gh_client_mock.update_issue.assert_called_once() | |
| 792 | + | |
| 793 | + def test_skips_already_synced_same_status(self, mirror, fossil_repo_obj): | |
| 794 | + """Tickets already synced with the same status are skipped.""" | |
| 795 | + from fossil.tasks import sync_tickets_to_github | |
| 796 | + | |
| 797 | + TicketSyncMapping.objects.create( | |
| 798 | + mirror=mirror, | |
| 799 | + fossil_ticket_uuid="synced-ticket-001", | |
| 800 | + github_issue_number=5, | |
| 801 | + fossil_status="open", | |
| 802 | + ) | |
| 803 | + | |
| 804 | + ticket = _make_ticket(uuid="synced-ticket-001", status="open") | |
| 805 | + | |
| 806 | + reader_mock = _make_reader_mock(get_tickets=[ticket]) | |
| 807 | + | |
| 808 | + gh_client_mock = MagicMock() | |
| 809 | + | |
| 810 | + with ( | |
| 811 | + _disk_exists, | |
| 812 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 813 | + patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), | |
| 814 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 815 | + ): | |
| 816 | + sync_tickets_to_github(mirror.pk) | |
| 817 | + | |
| 818 | + # Neither create nor update called | |
| 819 | + gh_client_mock.create_issue.assert_not_called() | |
| 820 | + gh_client_mock.update_issue.assert_not_called() | |
| 821 | + | |
| 822 | + def test_returns_early_for_deleted_mirror(self): | |
| 823 | + """Task exits gracefully when mirror doesn't exist.""" | |
| 824 | + from fossil.tasks import sync_tickets_to_github | |
| 825 | + | |
| 826 | + sync_tickets_to_github(99999) | |
| 827 | + assert SyncLog.objects.count() == 0 | |
| 828 | + | |
| 829 | + def test_returns_early_when_no_auth_token(self, mirror, fossil_repo_obj): | |
| 830 | + """Task warns and exits when mirror has no auth_credential.""" | |
| 831 | + from fossil.tasks import sync_tickets_to_github | |
| 832 | + | |
| 833 | + mirror.auth_credential = "" | |
| 834 | + mirror.save(update_fields=["auth_credential"]) | |
| 835 | + | |
| 836 | + with ( | |
| 837 | + _disk_exists, | |
| 838 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 839 | + ): | |
| 840 | + sync_tickets_to_github(mirror.pk) | |
| 841 | + | |
| 842 | + # A log is not created because we return before SyncLog.objects.create | |
| 843 | + assert SyncLog.objects.filter(mirror=mirror, triggered_by="ticket_sync").count() == 0 | |
| 844 | + | |
| 845 | + def test_returns_early_when_url_not_parseable(self, mirror, fossil_repo_obj): | |
| 846 | + """Task exits when git_remote_url can't be parsed to owner/repo.""" | |
| 847 | + from fossil.tasks import sync_tickets_to_github | |
| 848 | + | |
| 849 | + with ( | |
| 850 | + _disk_exists, | |
| 851 | + patch("fossil.github_api.parse_github_repo", return_value=None), | |
| 852 | + ): | |
| 853 | + sync_tickets_to_github(mirror.pk) | |
| 854 | + | |
| 855 | + assert SyncLog.objects.filter(mirror=mirror, triggered_by="ticket_sync").count() == 0 | |
| 856 | + | |
| 857 | + def test_handles_exception_during_sync(self, mirror, fossil_repo_obj): | |
| 858 | + """Unexpected exceptions are caught and logged.""" | |
| 859 | + from fossil.tasks import sync_tickets_to_github | |
| 860 | + | |
| 861 | + reader_mock = MagicMock(side_effect=Exception("reader crash")) | |
| 862 | + | |
| 863 | + with ( | |
| 864 | + _disk_exists, | |
| 865 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 866 | + patch("fossil.github_api.GitHubClient"), | |
| 867 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 868 | + ): | |
| 869 | + sync_tickets_to_github(mirror.pk) | |
| 870 | + | |
| 871 | + log = SyncLog.objects.get(mirror=mirror, triggered_by="ticket_sync") | |
| 872 | + assert log.status == "failed" | |
| 873 | + assert "Unexpected error" in log.message | |
| 874 | + | |
| 875 | + def test_create_issue_error_recorded(self, mirror, fossil_repo_obj): | |
| 876 | + """When GitHub create_issue returns an error, it's recorded in the log.""" | |
| 877 | + from fossil.tasks import sync_tickets_to_github | |
| 878 | + | |
| 879 | + ticket = _make_ticket(uuid="fail-create-001") | |
| 880 | + detail = _make_ticket(uuid="fail-create-001") | |
| 881 | + | |
| 882 | + reader_mock = _make_reader_mock( | |
| 883 | + get_tickets=[ticket], | |
| 884 | + get_ticket_detail=detail, | |
| 885 | + get_ticket_comments=[], | |
| 886 | + ) | |
| 887 | + | |
| 888 | + gh_client_mock = MagicMock() | |
| 889 | + gh_client_mock.create_issue.return_value = {"number": 0, "url": "", "error": "HTTP 403: Forbidden"} | |
| 890 | + | |
| 891 | + with ( | |
| 892 | + _disk_exists, | |
| 893 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 894 | + patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), | |
| 895 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 896 | + ): | |
| 897 | + sync_tickets_to_github(mirror.pk) | |
| 898 | + | |
| 899 | + log = SyncLog.objects.get(mirror=mirror, triggered_by="ticket_sync") | |
| 900 | + assert log.status == "failed" | |
| 901 | + assert "Errors" in log.message | |
| 902 | + | |
| 903 | + | |
| 904 | +# =================================================================== | |
| 905 | +# fossil/tasks.py -- sync_wiki_to_github | |
| 906 | +# =================================================================== | |
| 907 | + | |
| 908 | + | |
| 909 | +@pytest.mark.django_db | |
| 910 | +class TestSyncWikiToGithub: | |
| 911 | + """Test the sync_wiki_to_github task.""" | |
| 912 | + | |
| 913 | + def test_syncs_new_wiki_pages(self, mirror, fossil_repo_obj): | |
| 914 | + """New wiki pages are pushed to GitHub and mappings created.""" | |
| 915 | + from fossil.tasks import sync_wiki_to_github | |
| 916 | + | |
| 917 | + page_listing = _make_wiki_page(name="Home", content="") | |
| 918 | + full_page = _make_wiki_page(name="Home", content="# Home\nWelcome to the wiki.") | |
| 919 | + | |
| 920 | + reader_mock = _make_reader_mock( | |
| 921 | + get_wiki_pages=[page_listing], | |
| 922 | + get_wiki_page=full_page, | |
| 923 | + ) | |
| 924 | + | |
| 925 | + gh_client_mock = MagicMock() | |
| 926 | + gh_client_mock.create_or_update_file.return_value = {"success": True, "sha": "abc123", "error": ""} | |
| 927 | + | |
| 928 | + with ( | |
| 929 | + _disk_exists, | |
| 930 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 931 | + patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), | |
| 932 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 933 | + ): | |
| 934 | + sync_wiki_to_github(mirror.pk) | |
| 935 | + | |
| 936 | + mapping = WikiSyncMapping.objects.get(mirror=mirror, fossil_page_name="Home") | |
| 937 | + assert mapping.github_path == "wiki/Home.md" | |
| 938 | + assert mapping.content_hash # should be a sha256 hex string | |
| 939 | + | |
| 940 | + log = SyncLog.objects.get(mirror=mirror, triggered_by="wiki_sync") | |
| 941 | + assert log.status == "success" | |
| 942 | + assert "1 wiki pages" in log.message | |
| 943 | + | |
| 944 | + def test_updates_existing_page_mapping(self, mirror, fossil_repo_obj): | |
| 945 | + """Changed content updates the existing mapping hash.""" | |
| 946 | + from fossil.github_api import content_hash | |
| 947 | + from fossil.tasks import sync_wiki_to_github | |
| 948 | + | |
| 949 | + old_hash = content_hash("old content") | |
| 950 | + WikiSyncMapping.objects.create( | |
| 951 | + mirror=mirror, | |
| 952 | + fossil_page_name="Changelog", | |
| 953 | + content_hash=old_hash, | |
| 954 | + github_path="wiki/Changelog.md", | |
| 955 | + ) | |
| 956 | + | |
| 957 | + page_listing = _make_wiki_page(name="Changelog", content="") | |
| 958 | + full_page = _make_wiki_page(name="Changelog", content="# Changelog\nv2.0 release") | |
| 959 | + | |
| 960 | + reader_mock = _make_reader_mock( | |
| 961 | + get_wiki_pages=[page_listing], | |
| 962 | + get_wiki_page=full_page, | |
| 963 | + ) | |
| 964 | + | |
| 965 | + gh_client_mock = MagicMock() | |
| 966 | + gh_client_mock.create_or_update_file.return_value = {"success": True, "sha": "def456", "error": ""} | |
| 967 | + | |
| 968 | + with ( | |
| 969 | + _disk_exists, | |
| 970 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 971 | + patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), | |
| 972 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 973 | + ): | |
| 974 | + sync_wiki_to_github(mirror.pk) | |
| 975 | + | |
| 976 | + mapping = WikiSyncMapping.objects.get(mirror=mirror, fossil_page_name="Changelog") | |
| 977 | + new_hash = content_hash("# Changelog\nv2.0 release") | |
| 978 | + assert mapping.content_hash == new_hash | |
| 979 | + | |
| 980 | + def test_skips_unchanged_content(self, mirror, fossil_repo_obj): | |
| 981 | + """Pages with unchanged content hash are not re-pushed.""" | |
| 982 | + from fossil.github_api import content_hash | |
| 983 | + from fossil.tasks import sync_wiki_to_github | |
| 984 | + | |
| 985 | + content = "# Home\nSame content." | |
| 986 | + WikiSyncMapping.objects.create( | |
| 987 | + mirror=mirror, | |
| 988 | + fossil_page_name="Home", | |
| 989 | + content_hash=content_hash(content), | |
| 990 | + github_path="wiki/Home.md", | |
| 991 | + ) | |
| 992 | + | |
| 993 | + page_listing = _make_wiki_page(name="Home", content="") | |
| 994 | + full_page = _make_wiki_page(name="Home", content=content) | |
| 995 | + | |
| 996 | + reader_mock = _make_reader_mock( | |
| 997 | + get_wiki_pages=[page_listing], | |
| 998 | + get_wiki_page=full_page, | |
| 999 | + ) | |
| 1000 | + | |
| 1001 | + gh_client_mock = MagicMock() | |
| 1002 | + | |
| 1003 | + with ( | |
| 1004 | + _disk_exists, | |
| 1005 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 1006 | + patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), | |
| 1007 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 1008 | + ): | |
| 1009 | + sync_wiki_to_github(mirror.pk) | |
| 1010 | + | |
| 1011 | + gh_client_mock.create_or_update_file.assert_not_called() | |
| 1012 | + | |
| 1013 | + def test_skips_empty_page_content(self, mirror, fossil_repo_obj): | |
| 1014 | + """Pages with empty content after stripping are skipped.""" | |
| 1015 | + from fossil.tasks import sync_wiki_to_github | |
| 1016 | + | |
| 1017 | + page_listing = _make_wiki_page(name="Empty", content="") | |
| 1018 | + full_page = _make_wiki_page(name="Empty", content=" \n ") | |
| 1019 | + | |
| 1020 | + reader_mock = _make_reader_mock( | |
| 1021 | + get_wiki_pages=[page_listing], | |
| 1022 | + get_wiki_page=full_page, | |
| 1023 | + ) | |
| 1024 | + | |
| 1025 | + gh_client_mock = MagicMock() | |
| 1026 | + | |
| 1027 | + with ( | |
| 1028 | + _disk_exists, | |
| 1029 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 1030 | + patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), | |
| 1031 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 1032 | + ): | |
| 1033 | + sync_wiki_to_github(mirror.pk) | |
| 1034 | + | |
| 1035 | + gh_client_mock.create_or_update_file.assert_not_called() | |
| 1036 | + | |
| 1037 | + def test_returns_early_for_deleted_mirror(self): | |
| 1038 | + """Task exits for nonexistent mirror.""" | |
| 1039 | + from fossil.tasks import sync_wiki_to_github | |
| 1040 | + | |
| 1041 | + sync_wiki_to_github(99999) | |
| 1042 | + assert SyncLog.objects.count() == 0 | |
| 1043 | + | |
| 1044 | + def test_returns_early_when_no_auth_token(self, mirror, fossil_repo_obj): | |
| 1045 | + """Task exits when no auth token available.""" | |
| 1046 | + from fossil.tasks import sync_wiki_to_github | |
| 1047 | + | |
| 1048 | + mirror.auth_credential = "" | |
| 1049 | + mirror.save(update_fields=["auth_credential"]) | |
| 1050 | + | |
| 1051 | + with ( | |
| 1052 | + _disk_exists, | |
| 1053 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 1054 | + ): | |
| 1055 | + sync_wiki_to_github(mirror.pk) | |
| 1056 | + | |
| 1057 | + assert SyncLog.objects.filter(mirror=mirror, triggered_by="wiki_sync").count() == 0 | |
| 1058 | + | |
| 1059 | + def test_handles_github_api_error(self, mirror, fossil_repo_obj): | |
| 1060 | + """GitHub API errors are recorded in the log.""" | |
| 1061 | + from fossil.tasks import sync_wiki_to_github | |
| 1062 | + | |
| 1063 | + page_listing = _make_wiki_page(name="Failing", content="") | |
| 1064 | + full_page = _make_wiki_page(name="Failing", content="# Oops") | |
| 1065 | + | |
| 1066 | + reader_mock = _make_reader_mock( | |
| 1067 | + get_wiki_pages=[page_listing], | |
| 1068 | + get_wiki_page=full_page, | |
| 1069 | + ) | |
| 1070 | + | |
| 1071 | + gh_client_mock = MagicMock() | |
| 1072 | + gh_client_mock.create_or_update_file.return_value = {"success": False, "sha": "", "error": "HTTP 500"} | |
| 1073 | + | |
| 1074 | + with ( | |
| 1075 | + _disk_exists, | |
| 1076 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 1077 | + patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), | |
| 1078 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 1079 | + ): | |
| 1080 | + sync_wiki_to_github(mirror.pk) | |
| 1081 | + | |
| 1082 | + log = SyncLog.objects.get(mirror=mirror, triggered_by="wiki_sync") | |
| 1083 | + assert log.status == "failed" | |
| 1084 | + assert "Errors" in log.message | |
| 1085 | + | |
| 1086 | + def test_handles_exception_during_sync(self, mirror, fossil_repo_obj): | |
| 1087 | + """Unexpected exceptions are caught and recorded.""" | |
| 1088 | + from fossil.tasks import sync_wiki_to_github | |
| 1089 | + | |
| 1090 | + reader_mock = MagicMock(side_effect=Exception("reader crash")) | |
| 1091 | + | |
| 1092 | + with ( | |
| 1093 | + _disk_exists, | |
| 1094 | + patch("fossil.reader.FossilReader", reader_mock), | |
| 1095 | + patch("fossil.github_api.GitHubClient"), | |
| 1096 | + patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), | |
| 1097 | + ): | |
| 1098 | + sync_wiki_to_github(mirror.pk) | |
| 1099 | + | |
| 1100 | + log = SyncLog.objects.get(mirror=mirror, triggered_by="wiki_sync") | |
| 1101 | + assert log.status == "failed" | |
| 1102 | + assert "Unexpected error" in log.message | |
| 1103 | + | |
| 1104 | + | |
| 1105 | +# =================================================================== | |
| 1106 | +# fossil/tasks.py -- dispatch_webhook (additional edge cases) | |
| 1107 | +# =================================================================== | |
| 1108 | + | |
| 1109 | + | |
| 1110 | +@pytest.mark.django_db | |
| 1111 | +class TestDispatchWebhookEdgeCases: | |
| 1112 | + """Edge cases for the dispatch_webhook task not covered by test_webhooks.py.""" | |
| 1113 | + | |
| 1114 | + def test_unsafe_url_blocked_at_dispatch_time(self, webhook): | |
| 1115 | + """URLs that fail safety check at dispatch are blocked and logged.""" | |
| 1116 | + from fossil.tasks import dispatch_webhook | |
| 1117 | + | |
| 1118 | + with patch("core.url_validation.is_safe_outbound_url", return_value=(False, "Private IP detected")): | |
| 1119 | + dispatch_webhook.apply(args=[webhook.pk, "checkin", {"hash": "abc"}]) | |
| 1120 | + | |
| 1121 | + delivery = WebhookDelivery.objects.get(webhook=webhook) | |
| 1122 | + assert delivery.success is False | |
| 1123 | + assert delivery.response_status == 0 | |
| 1124 | + assert "Blocked" in delivery.response_body | |
| 1125 | + assert "Private IP" in delivery.response_body | |
| 1126 | + | |
| 1127 | + def test_request_exception_creates_delivery_and_retries(self, webhook): | |
| 1128 | + """Network errors create a delivery record and trigger retry.""" | |
| 1129 | + import requests as req | |
| 1130 | + | |
| 1131 | + from fossil.tasks import dispatch_webhook | |
| 1132 | + | |
| 1133 | + with ( | |
| 1134 | + patch("core.url_validation.is_safe_outbound_url", return_value=(True, "")), | |
| 1135 | + patch("requests.post", side_effect=req.ConnectionError("refused")), | |
| 1136 | + ): | |
| 1137 | + dispatch_webhook.apply(args=[webhook.pk, "ticket", {"id": "123"}]) | |
| 1138 | + | |
| 1139 | + delivery = WebhookDelivery.objects.filter(webhook=webhook).first() | |
| 1140 | + assert delivery is not None | |
| 1141 | + assert delivery.success is False | |
| 1142 | + assert delivery.response_status == 0 | |
| 1143 | + assert "refused" in delivery.response_body | |
| 1144 | + | |
| 1145 | + | |
| 1146 | +# =================================================================== | |
| 1147 | +# accounts/views.py -- _sanitize_ssh_key | |
| 1148 | +# =================================================================== | |
| 1149 | + | |
| 1150 | + | |
| 1151 | +class TestSanitizeSSHKey: | |
| 1152 | + """Unit tests for SSH key validation (no DB needed).""" | |
| 1153 | + | |
| 1154 | + def test_rejects_key_with_newlines(self): | |
| 1155 | + from accounts.views import _sanitize_ssh_key | |
| 1156 | + | |
| 1157 | + result, error = _sanitize_ssh_key("ssh-ed25519 AAAA key1\nssh-rsa BBBB key2") | |
| 1158 | + assert result is None | |
| 1159 | + assert "Newlines" in error | |
| 1160 | + | |
| 1161 | + def test_rejects_key_with_carriage_return(self): | |
| 1162 | + from accounts.views import _sanitize_ssh_key | |
| 1163 | + | |
| 1164 | + result, error = _sanitize_ssh_key("ssh-ed25519 AAAA key1\rssh-rsa BBBB key2") | |
| 1165 | + assert result is None | |
| 1166 | + assert "Newlines" in error | |
| 1167 | + | |
| 1168 | + def test_rejects_key_with_null_byte(self): | |
| 1169 | + from accounts.views import _sanitize_ssh_key | |
| 1170 | + | |
| 1171 | + result, error = _sanitize_ssh_key("ssh-ed25519 AAAA\x00inject") | |
| 1172 | + assert result is None | |
| 1173 | + assert "null bytes" in error | |
| 1174 | + | |
| 1175 | + def test_rejects_empty_key(self): | |
| 1176 | + from accounts.views import _sanitize_ssh_key | |
| 1177 | + | |
| 1178 | + result, error = _sanitize_ssh_key(" ") | |
| 1179 | + assert result is None | |
| 1180 | + assert "empty" in error.lower() | |
| 1181 | + | |
| 1182 | + def test_rejects_wrong_part_count(self): | |
| 1183 | + from accounts.views import _sanitize_ssh_key | |
| 1184 | + | |
| 1185 | + result, error = _sanitize_ssh_key("ssh-ed25519") | |
| 1186 | + assert result is None | |
| 1187 | + assert "format" in error.lower() | |
| 1188 | + | |
| 1189 | + def test_rejects_too_many_parts(self): | |
| 1190 | + from accounts.views import _sanitize_ssh_key | |
| 1191 | + | |
| 1192 | + result, error = _sanitize_ssh_key("ssh-ed25519 AAAA comment extra-part") | |
| 1193 | + assert result is None | |
| 1194 | + assert "format" in error.lower() | |
| 1195 | + | |
| 1196 | + def test_rejects_unsupported_key_type(self): | |
| 1197 | + from accounts.views import _sanitize_ssh_key | |
| 1198 | + | |
| 1199 | + result, error = _sanitize_ssh_key("ssh-unknown AAAA comment") | |
| 1200 | + assert result is None | |
| 1201 | + assert "Unsupported" in error | |
| 1202 | + | |
| 1203 | + def test_rejects_bad_base64(self): | |
| 1204 | + from accounts.views import _sanitize_ssh_key | |
| 1205 | + | |
| 1206 | + result, error = _sanitize_ssh_key("ssh-ed25519 !!!invalid comment") | |
| 1207 | + assert result is None | |
| 1208 | + assert "encoding" in error.lower() | |
| 1209 | + | |
| 1210 | + def test_accepts_valid_ed25519_key(self): | |
| 1211 | + from accounts.views import _sanitize_ssh_key | |
| 1212 | + | |
| 1213 | + key = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFakeKeyDataHere= user@host" | |
| 1214 | + result, error = _sanitize_ssh_key(key) | |
| 1215 | + assert result == key | |
| 1216 | + assert error == "" | |
| 1217 | + | |
| 1218 | + def test_accepts_valid_rsa_key(self): | |
| 1219 | + from accounts.views import _sanitize_ssh_key | |
| 1220 | + | |
| 1221 | + key = "ssh-rsa AAAAB3NzaC1yc2EAAAAFakeBase64Data== user@host" | |
| 1222 | + result, error = _sanitize_ssh_key(key) | |
| 1223 | + assert result == key | |
| 1224 | + assert error == "" | |
| 1225 | + | |
| 1226 | + def test_accepts_ecdsa_key(self): | |
| 1227 | + from accounts.views import _sanitize_ssh_key | |
| 1228 | + | |
| 1229 | + key = "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTY= user@host" | |
| 1230 | + result, error = _sanitize_ssh_key(key) | |
| 1231 | + assert result == key | |
| 1232 | + assert error == "" | |
| 1233 | + | |
| 1234 | + def test_strips_whitespace(self): | |
| 1235 | + from accounts.views import _sanitize_ssh_key | |
| 1236 | + | |
| 1237 | + key = " ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFake= " | |
| 1238 | + result, error = _sanitize_ssh_key(key) | |
| 1239 | + assert result is not None | |
| 1240 | + assert result == key.strip() | |
| 1241 | + | |
| 1242 | + | |
| 1243 | +# =================================================================== | |
| 1244 | +# accounts/views.py -- _verify_turnstile | |
| 1245 | +# =================================================================== | |
| 1246 | + | |
| 1247 | + | |
| 1248 | +class TestVerifyTurnstile: | |
| 1249 | + """Unit tests for Turnstile CAPTCHA verification.""" | |
| 1250 | + | |
| 1251 | + @staticmethod | |
| 1252 | + def _turnstile_config(secret_key=""): | |
| 1253 | + cfg = MagicMock() | |
| 1254 | + cfg.TURNSTILE_SECRET_KEY = secret_key | |
| 1255 | + return cfg | |
| 1256 | + | |
| 1257 | + def test_returns_false_when_no_secret_key(self): | |
| 1258 | + from accounts.views import _verify_turnstile | |
| 1259 | + | |
| 1260 | + with patch("constance.config", self._turnstile_config(secret_key="")): | |
| 1261 | + assert _verify_turnstile("some-token", "1.2.3.4") is False | |
| 1262 | + | |
| 1263 | + def test_returns_true_on_success(self): | |
| 1264 | + from accounts.views import _verify_turnstile | |
| 1265 | + | |
| 1266 | + mock_resp = MagicMock() | |
| 1267 | + mock_resp.status_code = 200 | |
| 1268 | + mock_resp.json.return_value = {"success": True} | |
| 1269 | + | |
| 1270 | + with ( | |
| 1271 | + patch("constance.config", self._turnstile_config(secret_key="secret-key")), | |
| 1272 | + patch("requests.post", return_value=mock_resp), | |
| 1273 | + ): | |
| 1274 | + assert _verify_turnstile("valid-token", "1.2.3.4") is True | |
| 1275 | + | |
| 1276 | + def test_returns_false_on_failed_verification(self): | |
| 1277 | + from accounts.views import _verify_turnstile | |
| 1278 | + | |
| 1279 | + mock_resp = MagicMock() | |
| 1280 | + mock_resp.status_code = 200 | |
| 1281 | + mock_resp.json.return_value = {"success": False} | |
| 1282 | + | |
| 1283 | + with ( | |
| 1284 | + patch("constance.config", self._turnstile_config(secret_key="secret-key")), | |
| 1285 | + patch("requests.post", return_value=mock_resp), | |
| 1286 | + ): | |
| 1287 | + assert _verify_turnstile("bad-token", "1.2.3.4") is False | |
| 1288 | + | |
| 1289 | + def test_returns_false_on_network_error(self): | |
| 1290 | + from accounts.views import _verify_turnstile | |
| 1291 | + | |
| 1292 | + with ( | |
| 1293 | + patch("constance.config", self._turnstile_config(secret_key="secret-key")), | |
| 1294 | + patch("requests.post", side_effect=Exception("connection refused")), | |
| 1295 | + ): | |
| 1296 | + assert _verify_turnstile("token", "1.2.3.4") is False | |
| 1297 | + | |
| 1298 | + | |
| 1299 | +# =================================================================== | |
| 1300 | +# accounts/views.py -- Login Turnstile flow | |
| 1301 | +# =================================================================== | |
| 1302 | + | |
| 1303 | + | |
| 1304 | +def _login_turnstile_config(): | |
| 1305 | + cfg = MagicMock() | |
| 1306 | + cfg.TURNSTILE_ENABLED = True | |
| 1307 | + cfg.TURNSTILE_SITE_KEY = "site-key-123" | |
| 1308 | + cfg.TURNSTILE_SECRET_KEY = "secret-key" | |
| 1309 | + return cfg | |
| 1310 | + | |
| 1311 | + | |
| 1312 | +@pytest.mark.django_db | |
| 1313 | +class TestLoginTurnstile: | |
| 1314 | + """Test login view with Turnstile CAPTCHA enabled.""" | |
| 1315 | + | |
| 1316 | + def test_turnstile_error_rerenders_form(self, client, admin_user): | |
| 1317 | + """When Turnstile fails, the login form is re-rendered with error.""" | |
| 1318 | + with ( | |
| 1319 | + patch("constance.config", _login_turnstile_config()), | |
| 1320 | + patch("accounts.views._verify_turnstile", return_value=False), | |
| 1321 | + ): | |
| 1322 | + response = client.post( | |
| 1323 | + "/auth/login/", | |
| 1324 | + {"username": "admin", "password": "testpass123", "cf-turnstile-response": "bad-token"}, | |
| 1325 | + ) | |
| 1326 | + | |
| 1327 | + assert response.status_code == 200 | |
| 1328 | + assert b"login" in response.content.lower() | |
| 1329 | + | |
| 1330 | + def test_turnstile_context_passed_to_template(self, client): | |
| 1331 | + """When Turnstile is enabled, context includes turnstile_enabled and site_key.""" | |
| 1332 | + with patch("constance.config", _login_turnstile_config()): | |
| 1333 | + response = client.get("/auth/login/") | |
| 1334 | + | |
| 1335 | + assert response.status_code == 200 | |
| 1336 | + assert response.context["turnstile_enabled"] is True | |
| 1337 | + assert response.context["turnstile_site_key"] == "site-key-123" | |
| 1338 | + | |
| 1339 | + | |
| 1340 | +# =================================================================== | |
| 1341 | +# accounts/views.py -- SSH key management | |
| 1342 | +# =================================================================== | |
| 1343 | + | |
| 1344 | + | |
| 1345 | +@pytest.mark.django_db | |
| 1346 | +class TestSSHKeyViews: | |
| 1347 | + """Test SSH key list, add, and delete views.""" | |
| 1348 | + | |
| 1349 | + def test_list_ssh_keys(self, admin_client, admin_user): | |
| 1350 | + response = admin_client.get("/auth/ssh-keys/") | |
| 1351 | + assert response.status_code == 200 | |
| 1352 | + | |
| 1353 | + def test_add_valid_ssh_key(self, admin_client, admin_user): | |
| 1354 | + """Adding a valid SSH key creates the record and regenerates authorized_keys.""" | |
| 1355 | + from fossil.user_keys import UserSSHKey | |
| 1356 | + | |
| 1357 | + with patch("accounts.views._regenerate_authorized_keys"): | |
| 1358 | + response = admin_client.post( | |
| 1359 | + "/auth/ssh-keys/", | |
| 1360 | + { | |
| 1361 | + "title": "Work Laptop", | |
| 1362 | + "public_key": "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFakeKeyDataHere= user@host", | |
| 1363 | + }, | |
| 1364 | + ) | |
| 1365 | + | |
| 1366 | + assert response.status_code == 302 # redirect after success | |
| 1367 | + key = UserSSHKey.objects.get(user=admin_user, title="Work Laptop") | |
| 1368 | + assert key.key_type == "ed25519" | |
| 1369 | + assert key.fingerprint # SHA256 computed | |
| 1370 | + | |
| 1371 | + def test_add_invalid_ssh_key_shows_error(self, admin_client, admin_user): | |
| 1372 | + """Adding an invalid SSH key shows an error message.""" | |
| 1373 | + response = admin_client.post( | |
| 1374 | + "/auth/ssh-keys/", | |
| 1375 | + { | |
| 1376 | + "title": "Bad Key", | |
| 1377 | + "public_key": "not-a-real-key", | |
| 1378 | + }, | |
| 1379 | + ) | |
| 1380 | + | |
| 1381 | + assert response.status_code == 200 # re-renders form | |
| 1382 | + | |
| 1383 | + def test_add_ssh_key_with_injection_newline(self, admin_client, admin_user): | |
| 1384 | + """Keys with newlines are rejected (injection prevention).""" | |
| 1385 | + from fossil.user_keys import UserSSHKey | |
| 1386 | + | |
| 1387 | + response = admin_client.post( | |
| 1388 | + "/auth/ssh-keys/", | |
| 1389 | + { | |
| 1390 | + "title": "Injected Key", | |
| 1391 | + "public_key": "ssh-ed25519 AAAA key1\nssh-rsa BBBB key2", | |
| 1392 | + }, | |
| 1393 | + ) | |
| 1394 | + | |
| 1395 | + assert response.status_code == 200 | |
| 1396 | + assert UserSSHKey.objects.filter(user=admin_user).count() == 0 | |
| 1397 | + | |
| 1398 | + def test_delete_ssh_key(self, admin_client, admin_user): | |
| 1399 | + """Deleting an SSH key soft-deletes it and regenerates authorized_keys.""" | |
| 1400 | + from fossil.user_keys import UserSSHKey | |
| 1401 | + | |
| 1402 | + key = UserSSHKey.objects.create( | |
| 1403 | + user=admin_user, | |
| 1404 | + title="Delete Me", | |
| 1405 | + public_key="ssh-ed25519 AAAA= test", | |
| 1406 | + created_by=admin_user, | |
| 1407 | + ) | |
| 1408 | + | |
| 1409 | + with patch("accounts.views._regenerate_authorized_keys"): | |
| 1410 | + response = admin_client.post(f"/auth/ssh-keys/{key.pk}/delete/") | |
| 1411 | + | |
| 1412 | + assert response.status_code == 302 | |
| 1413 | + key.refresh_from_db() | |
| 1414 | + assert key.deleted_at is not None | |
| 1415 | + | |
| 1416 | + def test_delete_ssh_key_htmx(self, admin_client, admin_user): | |
| 1417 | + """HTMX delete returns HX-Redirect header.""" | |
| 1418 | + from fossil.user_keys import UserSSHKey | |
| 1419 | + | |
| 1420 | + key = UserSSHKey.objects.create( | |
| 1421 | + user=admin_user, | |
| 1422 | + title="HX Delete", | |
| 1423 | + public_key="ssh-ed25519 AAAA= test", | |
| 1424 | + created_by=admin_user, | |
| 1425 | + ) | |
| 1426 | + | |
| 1427 | + with patch("accounts.views._regenerate_authorized_keys"): | |
| 1428 | + response = admin_client.post( | |
| 1429 | + f"/auth/ssh-keys/{key.pk}/delete/", | |
| 1430 | + HTTP_HX_REQUEST="true", | |
| 1431 | + ) | |
| 1432 | + | |
| 1433 | + assert response.status_code == 200 | |
| 1434 | + assert response["HX-Redirect"] == "/auth/ssh-keys/" | |
| 1435 | + | |
| 1436 | + def test_delete_other_users_key_404(self, admin_client, viewer_user, admin_user): | |
| 1437 | + """Cannot delete another user's SSH key.""" | |
| 1438 | + from fossil.user_keys import UserSSHKey | |
| 1439 | + | |
| 1440 | + key = UserSSHKey.objects.create( | |
| 1441 | + user=viewer_user, | |
| 1442 | + title="Viewer Key", | |
| 1443 | + public_key="ssh-ed25519 AAAA= test", | |
| 1444 | + created_by=viewer_user, | |
| 1445 | + ) | |
| 1446 | + | |
| 1447 | + response = admin_client.post(f"/auth/ssh-keys/{key.pk}/delete/") | |
| 1448 | + assert response.status_code == 404 | |
| 1449 | + | |
| 1450 | + def test_ssh_keys_require_login(self, client): | |
| 1451 | + response = client.get("/auth/ssh-keys/") | |
| 1452 | + assert response.status_code == 302 | |
| 1453 | + assert "/auth/login/" in response.url | |
| 1454 | + | |
| 1455 | + | |
| 1456 | +# =================================================================== | |
| 1457 | +# accounts/views.py -- Notification preferences HTMX | |
| 1458 | +# =================================================================== | |
| 1459 | + | |
| 1460 | + | |
| 1461 | +@pytest.mark.django_db | |
| 1462 | +class TestNotificationPreferencesHTMX: | |
| 1463 | + """Test the HTMX return path for notification preferences.""" | |
| 1464 | + | |
| 1465 | + def test_post_htmx_returns_hx_redirect(self, admin_client, admin_user): | |
| 1466 | + """HTMX POST returns 200 with HX-Redirect header instead of 302.""" | |
| 1467 | + NotificationPreference.objects.create(user=admin_user) | |
| 1468 | + | |
| 1469 | + response = admin_client.post( | |
| 1470 | + "/auth/notifications/", | |
| 1471 | + {"delivery_mode": "weekly"}, | |
| 1472 | + HTTP_HX_REQUEST="true", | |
| 1473 | + ) | |
| 1474 | + | |
| 1475 | + assert response.status_code == 200 | |
| 1476 | + assert response["HX-Redirect"] == "/auth/notifications/" | |
| 1477 | + | |
| 1478 | + | |
| 1479 | +# =================================================================== | |
| 1480 | +# accounts/views.py -- _parse_key_type and _compute_fingerprint | |
| 1481 | +# =================================================================== | |
| 1482 | + | |
| 1483 | + | |
| 1484 | +class TestParseKeyType: | |
| 1485 | + """Unit tests for SSH key type parsing helper.""" | |
| 1486 | + | |
| 1487 | + def test_ed25519(self): | |
| 1488 | + from accounts.views import _parse_key_type | |
| 1489 | + | |
| 1490 | + assert _parse_key_type("ssh-ed25519 AAAA") == "ed25519" | |
| 1491 | + | |
| 1492 | + def test_rsa(self): | |
| 1493 | + from accounts.views import _parse_key_type | |
| 1494 | + | |
| 1495 | + assert _parse_key_type("ssh-rsa AAAA") == "rsa" | |
| 1496 | + | |
| 1497 | + def test_ecdsa_256(self): | |
| 1498 | + from accounts.views import _parse_key_type | |
| 1499 | + | |
| 1500 | + assert _parse_key_type("ecdsa-sha2-nistp256 AAAA") == "ecdsa" | |
| 1501 | + | |
| 1502 | + def test_ecdsa_384(self): | |
| 1503 | + from accounts.views import _parse_key_type | |
| 1504 | + | |
| 1505 | + assert _parse_key_type("ecdsa-sha2-nistp384 AAAA") == "ecdsa" | |
| 1506 | + | |
| 1507 | + def test_dsa(self): | |
| 1508 | + from accounts.views import _parse_key_type | |
| 1509 | + | |
| 1510 | + assert _parse_key_type("ssh-dss AAAA") == "dsa" | |
| 1511 | + | |
| 1512 | + def test_unknown_type(self): | |
| 1513 | + from accounts.views import _parse_key_type | |
| 1514 | + | |
| 1515 | + assert _parse_key_type("custom-type AAAA") == "custom-type" | |
| 1516 | + | |
| 1517 | + def test_empty_string(self): | |
| 1518 | + from accounts.views import _parse_key_type | |
| 1519 | + | |
| 1520 | + assert _parse_key_type("") == "" | |
| 1521 | + | |
| 1522 | + | |
| 1523 | +class TestComputeFingerprint: | |
| 1524 | + """Unit tests for SSH key fingerprint computation.""" | |
| 1525 | + | |
| 1526 | + def test_computes_sha256_fingerprint(self): | |
| 1527 | + from accounts.views import _compute_fingerprint | |
| 1528 | + | |
| 1529 | + # Valid base64 key data | |
| 1530 | + key = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFakeKeyDataHere= user@host" | |
| 1531 | + result = _compute_fingerprint(key) | |
| 1532 | + assert result.startswith("SHA256:") | |
| 1533 | + | |
| 1534 | + def test_invalid_base64_returns_empty(self): | |
| 1535 | + from accounts.views import _compute_fingerprint | |
| 1536 | + | |
| 1537 | + key = "ssh-ed25519 !!!notbase64 user@host" | |
| 1538 | + result = _compute_fingerprint(key) | |
| 1539 | + assert result == "" | |
| 1540 | + | |
| 1541 | + def test_single_part_returns_empty(self): | |
| 1542 | + from accounts.views import _compute_fingerprint | |
| 1543 | + | |
| 1544 | + result = _compute_fingerprint("onlyonepart") | |
| 1545 | + assert result == "" | |
| 1546 | + | |
| 1547 | + | |
| 1548 | +# =================================================================== | |
| 1549 | +# accounts/views.py -- profile_token_create scopes edge cases | |
| 1550 | +# =================================================================== | |
| 1551 | + | |
| 1552 | + | |
| 1553 | +@pytest.mark.django_db | |
| 1554 | +class TestProfileTokenCreateEdgeCases: | |
| 1555 | + """Additional edge cases for token creation.""" | |
| 1556 | + | |
| 1557 | + def test_create_admin_scope_token(self, admin_client, admin_user): | |
| 1558 | + """Admin scope is a valid scope.""" | |
| 1559 | + from accounts.models import PersonalAccessToken | |
| 1560 | + | |
| 1561 | + response = admin_client.post( | |
| 1562 | + "/auth/profile/tokens/create/", | |
| 1563 | + {"name": "Admin Token", "scopes": "read,write,admin"}, | |
| 1564 | + ) | |
| 1565 | + assert response.status_code == 200 | |
| 1566 | + token = PersonalAccessToken.objects.get(user=admin_user, name="Admin Token") | |
| 1567 | + assert "admin" in token.scopes | |
| 1568 | + assert "read" in token.scopes | |
| 1569 | + assert "write" in token.scopes | |
| 1570 | + | |
| 1571 | + def test_create_token_mixed_valid_invalid_scopes(self, admin_client, admin_user): | |
| 1572 | + """Invalid scopes are filtered out, valid ones kept.""" | |
| 1573 | + from accounts.models import PersonalAccessToken | |
| 1574 | + | |
| 1575 | + admin_client.post( | |
| 1576 | + "/auth/profile/tokens/create/", | |
| 1577 | + {"name": "Mixed Scopes", "scopes": "read,destroy,write,hack"}, | |
| 1578 | + ) | |
| 1579 | + token = PersonalAccessToken.objects.get(user=admin_user, name="Mixed Scopes") | |
| 1580 | + assert token.scopes == "read,write" | |
| 1581 | + | |
| 1582 | + def test_create_token_whitespace_scopes(self, admin_client, admin_user): | |
| 1583 | + """Scopes with extra whitespace are handled correctly.""" | |
| 1584 | + from accounts.models import PersonalAccessToken | |
| 1585 | + | |
| 1586 | + admin_client.post( | |
| 1587 | + "/auth/profile/tokens/create/", | |
| 1588 | + {"name": "Whitespace", "scopes": " read , write "}, | |
| 1589 | + ) | |
| 1590 | + token = PersonalAccessToken.objects.get(user=admin_user, name="Whitespace") | |
| 1591 | + assert token.scopes == "read,write" |
| --- a/tests/test_tasks_and_accounts.py | |
| +++ b/tests/test_tasks_and_accounts.py | |
| @@ -0,0 +1,1591 @@ | |
| --- a/tests/test_tasks_and_accounts.py | |
| +++ b/tests/test_tasks_and_accounts.py | |
| @@ -0,0 +1,1591 @@ | |
| 1 | """Tests for fossil/tasks.py and accounts/views.py uncovered lines. |
| 2 | |
| 3 | Targets: |
| 4 | - fossil/tasks.py (33% -> higher): sync_metadata, create_snapshot, |
| 5 | check_upstream, run_git_sync, dispatch_notifications, |
| 6 | sync_tickets_to_github, sync_wiki_to_github |
| 7 | - accounts/views.py (77% -> higher): _sanitize_ssh_key, _verify_turnstile, |
| 8 | login turnstile flow, ssh key CRUD, notification prefs HTMX, |
| 9 | profile_token_create edge cases |
| 10 | """ |
| 11 | |
| 12 | from datetime import UTC, datetime |
| 13 | from unittest.mock import MagicMock, PropertyMock, patch |
| 14 | |
| 15 | import pytest |
| 16 | |
| 17 | from fossil.models import FossilRepository, FossilSnapshot |
| 18 | from fossil.notifications import Notification, NotificationPreference, ProjectWatch |
| 19 | from fossil.reader import TicketEntry, TimelineEntry, WikiPage |
| 20 | from fossil.sync_models import GitMirror, SyncLog, TicketSyncMapping, WikiSyncMapping |
| 21 | from fossil.webhooks import Webhook, WebhookDelivery |
| 22 | |
| 23 | # --------------------------------------------------------------------------- |
| 24 | # Helpers |
| 25 | # --------------------------------------------------------------------------- |
| 26 | |
| 27 | # Reusable patch that makes FossilRepository.exists_on_disk return True |
| 28 | _disk_exists = patch( |
| 29 | "fossil.models.FossilRepository.exists_on_disk", |
| 30 | new_callable=lambda: property(lambda self: True), |
| 31 | ) |
| 32 | |
| 33 | |
| 34 | def _make_reader_mock(**methods): |
| 35 | """Create a context-manager-compatible FossilReader mock.""" |
| 36 | mock_cls = MagicMock() |
| 37 | instance = MagicMock() |
| 38 | mock_cls.return_value = instance |
| 39 | instance.__enter__ = MagicMock(return_value=instance) |
| 40 | instance.__exit__ = MagicMock(return_value=False) |
| 41 | for name, val in methods.items(): |
| 42 | getattr(instance, name).return_value = val |
| 43 | return mock_cls |
| 44 | |
| 45 | |
| 46 | def _make_timeline_entry(**overrides): |
| 47 | defaults = { |
| 48 | "rid": 1, |
| 49 | "uuid": "abc123def456", |
| 50 | "event_type": "ci", |
| 51 | "timestamp": datetime.now(UTC), |
| 52 | "user": "dev", |
| 53 | "comment": "fix typo", |
| 54 | "branch": "trunk", |
| 55 | } |
| 56 | defaults.update(overrides) |
| 57 | return TimelineEntry(**defaults) |
| 58 | |
| 59 | |
| 60 | def _make_ticket(**overrides): |
| 61 | defaults = { |
| 62 | "uuid": "ticket-uuid-001", |
| 63 | "title": "Bug report", |
| 64 | "status": "open", |
| 65 | "type": "bug", |
| 66 | "created": datetime.now(UTC), |
| 67 | "owner": "dev", |
| 68 | "body": "Something is broken", |
| 69 | "priority": "high", |
| 70 | "severity": "critical", |
| 71 | } |
| 72 | defaults.update(overrides) |
| 73 | return TicketEntry(**defaults) |
| 74 | |
| 75 | |
| 76 | def _make_wiki_page(**overrides): |
| 77 | defaults = { |
| 78 | "name": "Home", |
| 79 | "content": "# Welcome", |
| 80 | "last_modified": datetime.now(UTC), |
| 81 | "user": "dev", |
| 82 | } |
| 83 | defaults.update(overrides) |
| 84 | return WikiPage(**defaults) |
| 85 | |
| 86 | |
| 87 | # --------------------------------------------------------------------------- |
| 88 | # Fixtures |
| 89 | # --------------------------------------------------------------------------- |
| 90 | |
| 91 | |
| 92 | @pytest.fixture |
| 93 | def fossil_repo_obj(sample_project): |
| 94 | """Return the auto-created FossilRepository for sample_project.""" |
| 95 | return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True) |
| 96 | |
| 97 | |
| 98 | @pytest.fixture |
| 99 | def mirror(fossil_repo_obj, admin_user): |
| 100 | return GitMirror.objects.create( |
| 101 | repository=fossil_repo_obj, |
| 102 | git_remote_url="https://github.com/testorg/testrepo.git", |
| 103 | auth_method="token", |
| 104 | auth_credential="ghp_testtoken123", |
| 105 | sync_direction="push", |
| 106 | sync_mode="scheduled", |
| 107 | sync_tickets=False, |
| 108 | sync_wiki=False, |
| 109 | created_by=admin_user, |
| 110 | ) |
| 111 | |
| 112 | |
| 113 | @pytest.fixture |
| 114 | def webhook(fossil_repo_obj, admin_user): |
| 115 | return Webhook.objects.create( |
| 116 | repository=fossil_repo_obj, |
| 117 | url="https://hooks.example.com/test", |
| 118 | secret="test-secret", |
| 119 | events="all", |
| 120 | is_active=True, |
| 121 | created_by=admin_user, |
| 122 | ) |
| 123 | |
| 124 | |
| 125 | # =================================================================== |
| 126 | # fossil/tasks.py -- sync_repository_metadata |
| 127 | # =================================================================== |
| 128 | |
| 129 | |
| 130 | @pytest.mark.django_db |
| 131 | class TestSyncRepositoryMetadata: |
| 132 | """Test the sync_metadata periodic task.""" |
| 133 | |
| 134 | def test_updates_metadata_from_reader(self, fossil_repo_obj): |
| 135 | """Task reads the .fossil file and updates checkin_count, file_size, project_code.""" |
| 136 | from fossil.tasks import sync_repository_metadata |
| 137 | |
| 138 | timeline_entry = _make_timeline_entry() |
| 139 | reader_mock = _make_reader_mock( |
| 140 | get_checkin_count=42, |
| 141 | get_timeline=[timeline_entry], |
| 142 | get_project_code="abc123project", |
| 143 | ) |
| 144 | |
| 145 | fake_stat = MagicMock() |
| 146 | fake_stat.st_size = 98765 |
| 147 | |
| 148 | with ( |
| 149 | _disk_exists, |
| 150 | patch("fossil.reader.FossilReader", reader_mock), |
| 151 | patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock) as mock_path, |
| 152 | ): |
| 153 | mock_path.return_value = MagicMock() |
| 154 | mock_path.return_value.stat.return_value = fake_stat |
| 155 | |
| 156 | sync_repository_metadata() |
| 157 | |
| 158 | fossil_repo_obj.refresh_from_db() |
| 159 | assert fossil_repo_obj.checkin_count == 42 |
| 160 | assert fossil_repo_obj.file_size_bytes == 98765 |
| 161 | assert fossil_repo_obj.fossil_project_code == "abc123project" |
| 162 | assert fossil_repo_obj.last_checkin_at == timeline_entry.timestamp |
| 163 | |
| 164 | def test_skips_repo_not_on_disk(self, fossil_repo_obj): |
| 165 | """Repos that don't exist on disk should be skipped without error.""" |
| 166 | from fossil.tasks import sync_repository_metadata |
| 167 | |
| 168 | with patch( |
| 169 | "fossil.models.FossilRepository.exists_on_disk", |
| 170 | new_callable=lambda: property(lambda self: False), |
| 171 | ): |
| 172 | # Should complete without error |
| 173 | sync_repository_metadata() |
| 174 | |
| 175 | fossil_repo_obj.refresh_from_db() |
| 176 | assert fossil_repo_obj.checkin_count == 0 # unchanged |
| 177 | |
| 178 | def test_handles_empty_timeline(self, fossil_repo_obj): |
| 179 | """When timeline is empty, last_checkin_at stays None.""" |
| 180 | from fossil.tasks import sync_repository_metadata |
| 181 | |
| 182 | reader_mock = _make_reader_mock( |
| 183 | get_checkin_count=0, |
| 184 | get_timeline=[], |
| 185 | get_project_code="proj-code", |
| 186 | ) |
| 187 | |
| 188 | fake_stat = MagicMock() |
| 189 | fake_stat.st_size = 1024 |
| 190 | |
| 191 | with ( |
| 192 | _disk_exists, |
| 193 | patch("fossil.reader.FossilReader", reader_mock), |
| 194 | patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock) as mock_path, |
| 195 | ): |
| 196 | mock_path.return_value = MagicMock() |
| 197 | mock_path.return_value.stat.return_value = fake_stat |
| 198 | |
| 199 | sync_repository_metadata() |
| 200 | |
| 201 | fossil_repo_obj.refresh_from_db() |
| 202 | assert fossil_repo_obj.last_checkin_at is None |
| 203 | |
| 204 | def test_handles_reader_exception(self, fossil_repo_obj): |
| 205 | """If FossilReader raises, the task logs and moves on.""" |
| 206 | from fossil.tasks import sync_repository_metadata |
| 207 | |
| 208 | reader_mock = MagicMock(side_effect=Exception("corrupt db")) |
| 209 | |
| 210 | with ( |
| 211 | _disk_exists, |
| 212 | patch("fossil.reader.FossilReader", reader_mock), |
| 213 | patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock) as mock_path, |
| 214 | ): |
| 215 | mock_path.return_value = MagicMock() |
| 216 | mock_path.return_value.stat.side_effect = Exception("stat failed") |
| 217 | |
| 218 | # Should not raise |
| 219 | sync_repository_metadata() |
| 220 | |
| 221 | |
| 222 | # =================================================================== |
| 223 | # fossil/tasks.py -- create_snapshot |
| 224 | # =================================================================== |
| 225 | |
| 226 | |
| 227 | @pytest.mark.django_db |
| 228 | class TestCreateSnapshot: |
| 229 | """Test the create_snapshot task.""" |
| 230 | |
| 231 | def _mock_config(self, store_in_db=True): |
| 232 | """Build a constance config mock with FOSSIL_STORE_IN_DB set.""" |
| 233 | cfg = MagicMock() |
| 234 | cfg.FOSSIL_STORE_IN_DB = store_in_db |
| 235 | return cfg |
| 236 | |
| 237 | def test_creates_snapshot_when_enabled(self, fossil_repo_obj, tmp_path, settings): |
| 238 | """Snapshot is created when FOSSIL_STORE_IN_DB is True.""" |
| 239 | from fossil.tasks import create_snapshot |
| 240 | |
| 241 | # Ensure default file storage is configured for the test |
| 242 | settings.STORAGES = { |
| 243 | **settings.STORAGES, |
| 244 | "default": {"BACKEND": "django.core.files.storage.FileSystemStorage"}, |
| 245 | } |
| 246 | settings.MEDIA_ROOT = str(tmp_path / "media") |
| 247 | |
| 248 | # Write a fake fossil file |
| 249 | fossil_file = tmp_path / "test.fossil" |
| 250 | fossil_file.write_bytes(b"FAKE FOSSIL DATA 12345") |
| 251 | |
| 252 | with ( |
| 253 | patch("constance.config", self._mock_config(store_in_db=True)), |
| 254 | patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock, return_value=fossil_file), |
| 255 | _disk_exists, |
| 256 | ): |
| 257 | create_snapshot(fossil_repo_obj.pk, note="manual backup") |
| 258 | |
| 259 | snapshot = FossilSnapshot.objects.filter(repository=fossil_repo_obj).first() |
| 260 | assert snapshot is not None |
| 261 | assert snapshot.note == "manual backup" |
| 262 | assert snapshot.file_size_bytes == len(b"FAKE FOSSIL DATA 12345") |
| 263 | assert snapshot.fossil_hash # should be a sha256 hex string |
| 264 | assert len(snapshot.fossil_hash) == 64 |
| 265 | |
| 266 | def test_skips_when_store_in_db_disabled(self, fossil_repo_obj): |
| 267 | """No snapshot created when FOSSIL_STORE_IN_DB is False.""" |
| 268 | from fossil.tasks import create_snapshot |
| 269 | |
| 270 | with patch("constance.config", self._mock_config(store_in_db=False)): |
| 271 | create_snapshot(fossil_repo_obj.pk, note="should not exist") |
| 272 | |
| 273 | assert FossilSnapshot.objects.filter(repository=fossil_repo_obj).count() == 0 |
| 274 | |
| 275 | def test_skips_for_nonexistent_repo(self): |
| 276 | """Returns early for a repository ID that doesn't exist.""" |
| 277 | from fossil.tasks import create_snapshot |
| 278 | |
| 279 | with patch("constance.config", self._mock_config(store_in_db=True)): |
| 280 | # Should not raise |
| 281 | create_snapshot(99999, note="orphan") |
| 282 | |
| 283 | assert FossilSnapshot.objects.count() == 0 |
| 284 | |
| 285 | def test_skips_when_not_on_disk(self, fossil_repo_obj): |
| 286 | """Returns early when the file doesn't exist on disk.""" |
| 287 | from fossil.tasks import create_snapshot |
| 288 | |
| 289 | with ( |
| 290 | patch("constance.config", self._mock_config(store_in_db=True)), |
| 291 | patch( |
| 292 | "fossil.models.FossilRepository.exists_on_disk", |
| 293 | new_callable=lambda: property(lambda self: False), |
| 294 | ), |
| 295 | ): |
| 296 | create_snapshot(fossil_repo_obj.pk) |
| 297 | |
| 298 | assert FossilSnapshot.objects.filter(repository=fossil_repo_obj).count() == 0 |
| 299 | |
| 300 | def test_skips_duplicate_hash(self, fossil_repo_obj, tmp_path, admin_user): |
| 301 | """If latest snapshot has the same hash, no new snapshot is created.""" |
| 302 | import hashlib |
| 303 | |
| 304 | from fossil.tasks import create_snapshot |
| 305 | |
| 306 | fossil_file = tmp_path / "test.fossil" |
| 307 | data = b"SAME DATA TWICE" |
| 308 | fossil_file.write_bytes(data) |
| 309 | sha = hashlib.sha256(data).hexdigest() |
| 310 | |
| 311 | # Create an existing snapshot with the same hash |
| 312 | FossilSnapshot.objects.create( |
| 313 | repository=fossil_repo_obj, |
| 314 | file_size_bytes=len(data), |
| 315 | fossil_hash=sha, |
| 316 | note="previous", |
| 317 | created_by=admin_user, |
| 318 | ) |
| 319 | |
| 320 | with ( |
| 321 | patch("constance.config", self._mock_config(store_in_db=True)), |
| 322 | patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock, return_value=fossil_file), |
| 323 | _disk_exists, |
| 324 | ): |
| 325 | create_snapshot(fossil_repo_obj.pk, note="duplicate check") |
| 326 | |
| 327 | # Still only one snapshot |
| 328 | assert FossilSnapshot.objects.filter(repository=fossil_repo_obj).count() == 1 |
| 329 | |
| 330 | |
| 331 | # =================================================================== |
| 332 | # fossil/tasks.py -- check_upstream_updates |
| 333 | # =================================================================== |
| 334 | |
| 335 | |
| 336 | @pytest.mark.django_db |
| 337 | class TestCheckUpstreamUpdates: |
| 338 | """Test the check_upstream periodic task.""" |
| 339 | |
| 340 | def test_pulls_and_updates_metadata_when_artifacts_received(self, fossil_repo_obj): |
| 341 | """When upstream has new artifacts, metadata is updated after pull.""" |
| 342 | from fossil.tasks import check_upstream_updates |
| 343 | |
| 344 | # Give the repo a remote URL |
| 345 | fossil_repo_obj.remote_url = "https://fossil.example.com/repo" |
| 346 | fossil_repo_obj.save(update_fields=["remote_url"]) |
| 347 | |
| 348 | cli_mock = MagicMock() |
| 349 | cli_mock.is_available.return_value = True |
| 350 | cli_mock.pull.return_value = {"success": True, "artifacts_received": 5, "message": "received: 5"} |
| 351 | |
| 352 | timeline_entry = _make_timeline_entry() |
| 353 | reader_mock = _make_reader_mock( |
| 354 | get_checkin_count=50, |
| 355 | get_timeline=[timeline_entry], |
| 356 | ) |
| 357 | |
| 358 | fake_stat = MagicMock() |
| 359 | fake_stat.st_size = 200000 |
| 360 | |
| 361 | with ( |
| 362 | _disk_exists, |
| 363 | patch("fossil.cli.FossilCLI", return_value=cli_mock), |
| 364 | patch("fossil.reader.FossilReader", reader_mock), |
| 365 | patch.object(type(fossil_repo_obj), "full_path", new_callable=PropertyMock) as mock_path, |
| 366 | ): |
| 367 | mock_path.return_value = MagicMock() |
| 368 | mock_path.return_value.stat.return_value = fake_stat |
| 369 | |
| 370 | check_upstream_updates() |
| 371 | |
| 372 | fossil_repo_obj.refresh_from_db() |
| 373 | assert fossil_repo_obj.upstream_artifacts_available == 5 |
| 374 | assert fossil_repo_obj.checkin_count == 50 |
| 375 | assert fossil_repo_obj.last_sync_at is not None |
| 376 | assert fossil_repo_obj.file_size_bytes == 200000 |
| 377 | |
| 378 | def test_zero_artifacts_resets_counter(self, fossil_repo_obj): |
| 379 | """When pull returns zero artifacts, upstream count is reset.""" |
| 380 | from fossil.tasks import check_upstream_updates |
| 381 | |
| 382 | fossil_repo_obj.remote_url = "https://fossil.example.com/repo" |
| 383 | fossil_repo_obj.upstream_artifacts_available = 10 |
| 384 | fossil_repo_obj.save(update_fields=["remote_url", "upstream_artifacts_available"]) |
| 385 | |
| 386 | cli_mock = MagicMock() |
| 387 | cli_mock.is_available.return_value = True |
| 388 | cli_mock.pull.return_value = {"success": True, "artifacts_received": 0, "message": "received: 0"} |
| 389 | |
| 390 | with ( |
| 391 | _disk_exists, |
| 392 | patch("fossil.cli.FossilCLI", return_value=cli_mock), |
| 393 | ): |
| 394 | check_upstream_updates() |
| 395 | |
| 396 | fossil_repo_obj.refresh_from_db() |
| 397 | assert fossil_repo_obj.upstream_artifacts_available == 0 |
| 398 | assert fossil_repo_obj.last_sync_at is not None |
| 399 | |
| 400 | def test_skips_when_fossil_not_available(self, fossil_repo_obj): |
| 401 | """When fossil binary is not available, task returns early.""" |
| 402 | from fossil.tasks import check_upstream_updates |
| 403 | |
| 404 | fossil_repo_obj.remote_url = "https://fossil.example.com/repo" |
| 405 | fossil_repo_obj.save(update_fields=["remote_url"]) |
| 406 | |
| 407 | cli_mock = MagicMock() |
| 408 | cli_mock.is_available.return_value = False |
| 409 | |
| 410 | with patch("fossil.cli.FossilCLI", return_value=cli_mock): |
| 411 | check_upstream_updates() |
| 412 | |
| 413 | fossil_repo_obj.refresh_from_db() |
| 414 | assert fossil_repo_obj.last_sync_at is None |
| 415 | |
| 416 | def test_handles_pull_exception(self, fossil_repo_obj): |
| 417 | """If pull raises an exception, the task logs and continues.""" |
| 418 | from fossil.tasks import check_upstream_updates |
| 419 | |
| 420 | fossil_repo_obj.remote_url = "https://fossil.example.com/repo" |
| 421 | fossil_repo_obj.save(update_fields=["remote_url"]) |
| 422 | |
| 423 | cli_mock = MagicMock() |
| 424 | cli_mock.is_available.return_value = True |
| 425 | cli_mock.pull.side_effect = Exception("network error") |
| 426 | |
| 427 | with ( |
| 428 | _disk_exists, |
| 429 | patch("fossil.cli.FossilCLI", return_value=cli_mock), |
| 430 | ): |
| 431 | # Should not raise |
| 432 | check_upstream_updates() |
| 433 | |
| 434 | def test_skips_repos_without_remote_url(self, fossil_repo_obj): |
| 435 | """Repos with empty remote_url are excluded from the queryset.""" |
| 436 | from fossil.tasks import check_upstream_updates |
| 437 | |
| 438 | # fossil_repo_obj.remote_url is "" by default |
| 439 | cli_mock = MagicMock() |
| 440 | cli_mock.is_available.return_value = True |
| 441 | |
| 442 | with patch("fossil.cli.FossilCLI", return_value=cli_mock): |
| 443 | check_upstream_updates() |
| 444 | |
| 445 | # pull should never be called since no repos have remote_url |
| 446 | cli_mock.pull.assert_not_called() |
| 447 | |
| 448 | |
| 449 | # =================================================================== |
| 450 | # fossil/tasks.py -- run_git_sync |
| 451 | # =================================================================== |
| 452 | |
| 453 | |
| 454 | @pytest.mark.django_db |
| 455 | class TestRunGitSync: |
| 456 | """Test the run_git_sync task for Git mirror operations.""" |
| 457 | |
| 458 | @staticmethod |
| 459 | def _git_config(): |
| 460 | cfg = MagicMock() |
| 461 | cfg.GIT_MIRROR_DIR = "/tmp/git-mirrors" |
| 462 | return cfg |
| 463 | |
| 464 | def test_successful_sync_creates_log(self, mirror, fossil_repo_obj): |
| 465 | """A successful git export updates the mirror and creates a success log.""" |
| 466 | from fossil.tasks import run_git_sync |
| 467 | |
| 468 | cli_mock = MagicMock() |
| 469 | cli_mock.is_available.return_value = True |
| 470 | cli_mock.git_export.return_value = {"success": True, "message": "Exported 10 commits"} |
| 471 | |
| 472 | with ( |
| 473 | _disk_exists, |
| 474 | patch("fossil.cli.FossilCLI", return_value=cli_mock), |
| 475 | patch("constance.config", self._git_config()), |
| 476 | ): |
| 477 | run_git_sync(mirror_id=mirror.pk) |
| 478 | |
| 479 | log = SyncLog.objects.get(mirror=mirror) |
| 480 | assert log.status == "success" |
| 481 | assert log.triggered_by == "manual" |
| 482 | assert log.completed_at is not None |
| 483 | |
| 484 | mirror.refresh_from_db() |
| 485 | assert mirror.last_sync_status == "success" |
| 486 | assert mirror.total_syncs == 1 |
| 487 | |
| 488 | def test_failed_sync_records_failure(self, mirror, fossil_repo_obj): |
| 489 | """A failed git export records the failure in log and mirror.""" |
| 490 | from fossil.tasks import run_git_sync |
| 491 | |
| 492 | cli_mock = MagicMock() |
| 493 | cli_mock.is_available.return_value = True |
| 494 | cli_mock.git_export.return_value = {"success": False, "message": "Push rejected by remote"} |
| 495 | |
| 496 | with ( |
| 497 | _disk_exists, |
| 498 | patch("fossil.cli.FossilCLI", return_value=cli_mock), |
| 499 | patch("constance.config", self._git_config()), |
| 500 | ): |
| 501 | run_git_sync(mirror_id=mirror.pk) |
| 502 | |
| 503 | log = SyncLog.objects.get(mirror=mirror) |
| 504 | assert log.status == "failed" |
| 505 | |
| 506 | mirror.refresh_from_db() |
| 507 | assert mirror.last_sync_status == "failed" |
| 508 | |
| 509 | def test_exception_during_sync_creates_failed_log(self, mirror, fossil_repo_obj): |
| 510 | """An unexpected exception during sync records a failed log.""" |
| 511 | from fossil.tasks import run_git_sync |
| 512 | |
| 513 | cli_mock = MagicMock() |
| 514 | cli_mock.is_available.return_value = True |
| 515 | cli_mock.git_export.side_effect = RuntimeError("subprocess crash") |
| 516 | |
| 517 | with ( |
| 518 | _disk_exists, |
| 519 | patch("fossil.cli.FossilCLI", return_value=cli_mock), |
| 520 | patch("constance.config", self._git_config()), |
| 521 | ): |
| 522 | run_git_sync(mirror_id=mirror.pk) |
| 523 | |
| 524 | log = SyncLog.objects.get(mirror=mirror) |
| 525 | assert log.status == "failed" |
| 526 | assert "Unexpected error" in log.message |
| 527 | |
| 528 | def test_credential_redacted_from_log(self, mirror, fossil_repo_obj): |
| 529 | """Auth credentials must not appear in sync log messages.""" |
| 530 | from fossil.tasks import run_git_sync |
| 531 | |
| 532 | token = mirror.auth_credential |
| 533 | cli_mock = MagicMock() |
| 534 | cli_mock.is_available.return_value = True |
| 535 | cli_mock.git_export.return_value = {"success": True, "message": f"Push to remote with {token} auth"} |
| 536 | |
| 537 | with ( |
| 538 | _disk_exists, |
| 539 | patch("fossil.cli.FossilCLI", return_value=cli_mock), |
| 540 | patch("constance.config", self._git_config()), |
| 541 | ): |
| 542 | run_git_sync(mirror_id=mirror.pk) |
| 543 | |
| 544 | log = SyncLog.objects.get(mirror=mirror) |
| 545 | assert token not in log.message |
| 546 | assert "[REDACTED]" in log.message |
| 547 | |
| 548 | def test_skips_when_fossil_not_available(self, mirror): |
| 549 | """When fossil binary is not available, task returns early.""" |
| 550 | from fossil.tasks import run_git_sync |
| 551 | |
| 552 | cli_mock = MagicMock() |
| 553 | cli_mock.is_available.return_value = False |
| 554 | |
| 555 | with patch("fossil.cli.FossilCLI", return_value=cli_mock): |
| 556 | run_git_sync(mirror_id=mirror.pk) |
| 557 | |
| 558 | assert SyncLog.objects.count() == 0 |
| 559 | |
| 560 | def test_skips_disabled_mirrors(self, fossil_repo_obj, admin_user): |
| 561 | """Mirrors with sync_mode='disabled' are excluded.""" |
| 562 | from fossil.tasks import run_git_sync |
| 563 | |
| 564 | disabled_mirror = GitMirror.objects.create( |
| 565 | repository=fossil_repo_obj, |
| 566 | git_remote_url="https://github.com/test/disabled.git", |
| 567 | sync_mode="disabled", |
| 568 | created_by=admin_user, |
| 569 | ) |
| 570 | |
| 571 | cli_mock = MagicMock() |
| 572 | cli_mock.is_available.return_value = True |
| 573 | |
| 574 | with ( |
| 575 | _disk_exists, |
| 576 | patch("fossil.cli.FossilCLI", return_value=cli_mock), |
| 577 | patch("constance.config", self._git_config()), |
| 578 | ): |
| 579 | run_git_sync() |
| 580 | |
| 581 | assert SyncLog.objects.filter(mirror=disabled_mirror).count() == 0 |
| 582 | |
| 583 | def test_chains_ticket_and_wiki_sync_when_enabled(self, mirror, fossil_repo_obj): |
| 584 | """Successful sync chains ticket/wiki sync tasks when enabled.""" |
| 585 | from fossil.tasks import run_git_sync |
| 586 | |
| 587 | mirror.sync_tickets = True |
| 588 | mirror.sync_wiki = True |
| 589 | mirror.save(update_fields=["sync_tickets", "sync_wiki"]) |
| 590 | |
| 591 | cli_mock = MagicMock() |
| 592 | cli_mock.is_available.return_value = True |
| 593 | cli_mock.git_export.return_value = {"success": True, "message": "ok"} |
| 594 | |
| 595 | with ( |
| 596 | _disk_exists, |
| 597 | patch("fossil.cli.FossilCLI", return_value=cli_mock), |
| 598 | patch("constance.config", self._git_config()), |
| 599 | patch("fossil.tasks.sync_tickets_to_github") as mock_tickets, |
| 600 | patch("fossil.tasks.sync_wiki_to_github") as mock_wiki, |
| 601 | ): |
| 602 | run_git_sync(mirror_id=mirror.pk) |
| 603 | |
| 604 | mock_tickets.delay.assert_called_once_with(mirror.id) |
| 605 | mock_wiki.delay.assert_called_once_with(mirror.id) |
| 606 | |
| 607 | def test_schedule_triggered_by(self, mirror, fossil_repo_obj): |
| 608 | """When called without mirror_id, triggered_by is 'schedule'.""" |
| 609 | from fossil.tasks import run_git_sync |
| 610 | |
| 611 | cli_mock = MagicMock() |
| 612 | cli_mock.is_available.return_value = True |
| 613 | cli_mock.git_export.return_value = {"success": True, "message": "ok"} |
| 614 | |
| 615 | with ( |
| 616 | _disk_exists, |
| 617 | patch("fossil.cli.FossilCLI", return_value=cli_mock), |
| 618 | patch("constance.config", self._git_config()), |
| 619 | ): |
| 620 | run_git_sync() # no mirror_id |
| 621 | |
| 622 | log = SyncLog.objects.get(mirror=mirror) |
| 623 | assert log.triggered_by == "schedule" |
| 624 | |
| 625 | |
| 626 | # =================================================================== |
| 627 | # fossil/tasks.py -- dispatch_notifications |
| 628 | # =================================================================== |
| 629 | |
| 630 | |
| 631 | @pytest.mark.django_db |
| 632 | class TestDispatchNotifications: |
| 633 | """Test the dispatch_notifications periodic task.""" |
| 634 | |
| 635 | def test_creates_notifications_for_recent_events(self, fossil_repo_obj, sample_project, admin_user): |
| 636 | """Recent timeline events create notifications for project watchers.""" |
| 637 | from fossil.tasks import dispatch_notifications |
| 638 | |
| 639 | # Create a watcher |
| 640 | ProjectWatch.objects.create( |
| 641 | project=sample_project, |
| 642 | user=admin_user, |
| 643 | email_enabled=True, |
| 644 | created_by=admin_user, |
| 645 | ) |
| 646 | NotificationPreference.objects.create(user=admin_user, delivery_mode="immediate") |
| 647 | |
| 648 | recent_entry = _make_timeline_entry( |
| 649 | event_type="ci", |
| 650 | comment="Added new feature", |
| 651 | user="dev", |
| 652 | ) |
| 653 | |
| 654 | reader_mock = _make_reader_mock(get_timeline=[recent_entry]) |
| 655 | |
| 656 | with ( |
| 657 | _disk_exists, |
| 658 | patch("fossil.reader.FossilReader", reader_mock), |
| 659 | patch("django.core.mail.send_mail"), |
| 660 | patch("django.template.loader.render_to_string", return_value="<html>test</html>"), |
| 661 | ): |
| 662 | dispatch_notifications() |
| 663 | |
| 664 | notif = Notification.objects.filter(user=admin_user, project=sample_project).first() |
| 665 | assert notif is not None |
| 666 | assert "Added new feature" in notif.title or "dev" in notif.title |
| 667 | |
| 668 | def test_skips_when_no_watched_projects(self, fossil_repo_obj): |
| 669 | """Task returns early when nobody is watching any projects.""" |
| 670 | from fossil.tasks import dispatch_notifications |
| 671 | |
| 672 | # No watches exist, so task should complete immediately |
| 673 | dispatch_notifications() |
| 674 | assert Notification.objects.count() == 0 |
| 675 | |
| 676 | def test_skips_repo_not_on_disk(self, fossil_repo_obj, sample_project, admin_user): |
| 677 | """Repos that don't exist on disk are skipped.""" |
| 678 | from fossil.tasks import dispatch_notifications |
| 679 | |
| 680 | ProjectWatch.objects.create( |
| 681 | project=sample_project, |
| 682 | user=admin_user, |
| 683 | email_enabled=True, |
| 684 | created_by=admin_user, |
| 685 | ) |
| 686 | |
| 687 | with patch( |
| 688 | "fossil.models.FossilRepository.exists_on_disk", |
| 689 | new_callable=lambda: property(lambda self: False), |
| 690 | ): |
| 691 | dispatch_notifications() |
| 692 | |
| 693 | assert Notification.objects.count() == 0 |
| 694 | |
| 695 | def test_handles_reader_exception(self, fossil_repo_obj, sample_project, admin_user): |
| 696 | """Reader exceptions are caught and logged per-repo.""" |
| 697 | from fossil.tasks import dispatch_notifications |
| 698 | |
| 699 | ProjectWatch.objects.create( |
| 700 | project=sample_project, |
| 701 | user=admin_user, |
| 702 | email_enabled=True, |
| 703 | created_by=admin_user, |
| 704 | ) |
| 705 | |
| 706 | reader_mock = MagicMock(side_effect=Exception("corrupt db")) |
| 707 | |
| 708 | with ( |
| 709 | _disk_exists, |
| 710 | patch("fossil.reader.FossilReader", reader_mock), |
| 711 | ): |
| 712 | # Should not raise |
| 713 | dispatch_notifications() |
| 714 | |
| 715 | |
| 716 | # =================================================================== |
| 717 | # fossil/tasks.py -- sync_tickets_to_github |
| 718 | # =================================================================== |
| 719 | |
| 720 | |
| 721 | @pytest.mark.django_db |
| 722 | class TestSyncTicketsToGithub: |
| 723 | """Test the sync_tickets_to_github task.""" |
| 724 | |
| 725 | def test_creates_new_github_issues(self, mirror, fossil_repo_obj): |
| 726 | """Unsynced tickets create new GitHub issues with mappings.""" |
| 727 | from fossil.tasks import sync_tickets_to_github |
| 728 | |
| 729 | ticket = _make_ticket(uuid="new-ticket-uuid-001") |
| 730 | detail = _make_ticket(uuid="new-ticket-uuid-001") |
| 731 | |
| 732 | reader_mock = _make_reader_mock( |
| 733 | get_tickets=[ticket], |
| 734 | get_ticket_detail=detail, |
| 735 | get_ticket_comments=[], |
| 736 | ) |
| 737 | |
| 738 | gh_client_mock = MagicMock() |
| 739 | gh_client_mock.create_issue.return_value = {"number": 42, "url": "https://github.com/test/42", "error": ""} |
| 740 | |
| 741 | with ( |
| 742 | _disk_exists, |
| 743 | patch("fossil.reader.FossilReader", reader_mock), |
| 744 | patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), |
| 745 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 746 | ): |
| 747 | sync_tickets_to_github(mirror.pk) |
| 748 | |
| 749 | mapping = TicketSyncMapping.objects.get(mirror=mirror, fossil_ticket_uuid="new-ticket-uuid-001") |
| 750 | assert mapping.github_issue_number == 42 |
| 751 | |
| 752 | log = SyncLog.objects.get(mirror=mirror, triggered_by="ticket_sync") |
| 753 | assert log.status == "success" |
| 754 | assert "1 tickets" in log.message |
| 755 | |
| 756 | def test_updates_existing_github_issue(self, mirror, fossil_repo_obj): |
| 757 | """Already-synced tickets with changed status update the existing issue.""" |
| 758 | from fossil.tasks import sync_tickets_to_github |
| 759 | |
| 760 | # Pre-existing mapping with old status |
| 761 | TicketSyncMapping.objects.create( |
| 762 | mirror=mirror, |
| 763 | fossil_ticket_uuid="existing-ticket-001", |
| 764 | github_issue_number=10, |
| 765 | fossil_status="open", |
| 766 | ) |
| 767 | |
| 768 | ticket = _make_ticket(uuid="existing-ticket-001", status="closed") |
| 769 | detail = _make_ticket(uuid="existing-ticket-001", status="closed") |
| 770 | |
| 771 | reader_mock = _make_reader_mock( |
| 772 | get_tickets=[ticket], |
| 773 | get_ticket_detail=detail, |
| 774 | get_ticket_comments=[], |
| 775 | ) |
| 776 | |
| 777 | gh_client_mock = MagicMock() |
| 778 | gh_client_mock.update_issue.return_value = {"success": True, "error": ""} |
| 779 | |
| 780 | with ( |
| 781 | _disk_exists, |
| 782 | patch("fossil.reader.FossilReader", reader_mock), |
| 783 | patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), |
| 784 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 785 | ): |
| 786 | sync_tickets_to_github(mirror.pk) |
| 787 | |
| 788 | mapping = TicketSyncMapping.objects.get(mirror=mirror, fossil_ticket_uuid="existing-ticket-001") |
| 789 | assert mapping.fossil_status == "closed" |
| 790 | |
| 791 | gh_client_mock.update_issue.assert_called_once() |
| 792 | |
| 793 | def test_skips_already_synced_same_status(self, mirror, fossil_repo_obj): |
| 794 | """Tickets already synced with the same status are skipped.""" |
| 795 | from fossil.tasks import sync_tickets_to_github |
| 796 | |
| 797 | TicketSyncMapping.objects.create( |
| 798 | mirror=mirror, |
| 799 | fossil_ticket_uuid="synced-ticket-001", |
| 800 | github_issue_number=5, |
| 801 | fossil_status="open", |
| 802 | ) |
| 803 | |
| 804 | ticket = _make_ticket(uuid="synced-ticket-001", status="open") |
| 805 | |
| 806 | reader_mock = _make_reader_mock(get_tickets=[ticket]) |
| 807 | |
| 808 | gh_client_mock = MagicMock() |
| 809 | |
| 810 | with ( |
| 811 | _disk_exists, |
| 812 | patch("fossil.reader.FossilReader", reader_mock), |
| 813 | patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), |
| 814 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 815 | ): |
| 816 | sync_tickets_to_github(mirror.pk) |
| 817 | |
| 818 | # Neither create nor update called |
| 819 | gh_client_mock.create_issue.assert_not_called() |
| 820 | gh_client_mock.update_issue.assert_not_called() |
| 821 | |
| 822 | def test_returns_early_for_deleted_mirror(self): |
| 823 | """Task exits gracefully when mirror doesn't exist.""" |
| 824 | from fossil.tasks import sync_tickets_to_github |
| 825 | |
| 826 | sync_tickets_to_github(99999) |
| 827 | assert SyncLog.objects.count() == 0 |
| 828 | |
| 829 | def test_returns_early_when_no_auth_token(self, mirror, fossil_repo_obj): |
| 830 | """Task warns and exits when mirror has no auth_credential.""" |
| 831 | from fossil.tasks import sync_tickets_to_github |
| 832 | |
| 833 | mirror.auth_credential = "" |
| 834 | mirror.save(update_fields=["auth_credential"]) |
| 835 | |
| 836 | with ( |
| 837 | _disk_exists, |
| 838 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 839 | ): |
| 840 | sync_tickets_to_github(mirror.pk) |
| 841 | |
| 842 | # A log is not created because we return before SyncLog.objects.create |
| 843 | assert SyncLog.objects.filter(mirror=mirror, triggered_by="ticket_sync").count() == 0 |
| 844 | |
| 845 | def test_returns_early_when_url_not_parseable(self, mirror, fossil_repo_obj): |
| 846 | """Task exits when git_remote_url can't be parsed to owner/repo.""" |
| 847 | from fossil.tasks import sync_tickets_to_github |
| 848 | |
| 849 | with ( |
| 850 | _disk_exists, |
| 851 | patch("fossil.github_api.parse_github_repo", return_value=None), |
| 852 | ): |
| 853 | sync_tickets_to_github(mirror.pk) |
| 854 | |
| 855 | assert SyncLog.objects.filter(mirror=mirror, triggered_by="ticket_sync").count() == 0 |
| 856 | |
| 857 | def test_handles_exception_during_sync(self, mirror, fossil_repo_obj): |
| 858 | """Unexpected exceptions are caught and logged.""" |
| 859 | from fossil.tasks import sync_tickets_to_github |
| 860 | |
| 861 | reader_mock = MagicMock(side_effect=Exception("reader crash")) |
| 862 | |
| 863 | with ( |
| 864 | _disk_exists, |
| 865 | patch("fossil.reader.FossilReader", reader_mock), |
| 866 | patch("fossil.github_api.GitHubClient"), |
| 867 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 868 | ): |
| 869 | sync_tickets_to_github(mirror.pk) |
| 870 | |
| 871 | log = SyncLog.objects.get(mirror=mirror, triggered_by="ticket_sync") |
| 872 | assert log.status == "failed" |
| 873 | assert "Unexpected error" in log.message |
| 874 | |
| 875 | def test_create_issue_error_recorded(self, mirror, fossil_repo_obj): |
| 876 | """When GitHub create_issue returns an error, it's recorded in the log.""" |
| 877 | from fossil.tasks import sync_tickets_to_github |
| 878 | |
| 879 | ticket = _make_ticket(uuid="fail-create-001") |
| 880 | detail = _make_ticket(uuid="fail-create-001") |
| 881 | |
| 882 | reader_mock = _make_reader_mock( |
| 883 | get_tickets=[ticket], |
| 884 | get_ticket_detail=detail, |
| 885 | get_ticket_comments=[], |
| 886 | ) |
| 887 | |
| 888 | gh_client_mock = MagicMock() |
| 889 | gh_client_mock.create_issue.return_value = {"number": 0, "url": "", "error": "HTTP 403: Forbidden"} |
| 890 | |
| 891 | with ( |
| 892 | _disk_exists, |
| 893 | patch("fossil.reader.FossilReader", reader_mock), |
| 894 | patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), |
| 895 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 896 | ): |
| 897 | sync_tickets_to_github(mirror.pk) |
| 898 | |
| 899 | log = SyncLog.objects.get(mirror=mirror, triggered_by="ticket_sync") |
| 900 | assert log.status == "failed" |
| 901 | assert "Errors" in log.message |
| 902 | |
| 903 | |
| 904 | # =================================================================== |
| 905 | # fossil/tasks.py -- sync_wiki_to_github |
| 906 | # =================================================================== |
| 907 | |
| 908 | |
| 909 | @pytest.mark.django_db |
| 910 | class TestSyncWikiToGithub: |
| 911 | """Test the sync_wiki_to_github task.""" |
| 912 | |
| 913 | def test_syncs_new_wiki_pages(self, mirror, fossil_repo_obj): |
| 914 | """New wiki pages are pushed to GitHub and mappings created.""" |
| 915 | from fossil.tasks import sync_wiki_to_github |
| 916 | |
| 917 | page_listing = _make_wiki_page(name="Home", content="") |
| 918 | full_page = _make_wiki_page(name="Home", content="# Home\nWelcome to the wiki.") |
| 919 | |
| 920 | reader_mock = _make_reader_mock( |
| 921 | get_wiki_pages=[page_listing], |
| 922 | get_wiki_page=full_page, |
| 923 | ) |
| 924 | |
| 925 | gh_client_mock = MagicMock() |
| 926 | gh_client_mock.create_or_update_file.return_value = {"success": True, "sha": "abc123", "error": ""} |
| 927 | |
| 928 | with ( |
| 929 | _disk_exists, |
| 930 | patch("fossil.reader.FossilReader", reader_mock), |
| 931 | patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), |
| 932 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 933 | ): |
| 934 | sync_wiki_to_github(mirror.pk) |
| 935 | |
| 936 | mapping = WikiSyncMapping.objects.get(mirror=mirror, fossil_page_name="Home") |
| 937 | assert mapping.github_path == "wiki/Home.md" |
| 938 | assert mapping.content_hash # should be a sha256 hex string |
| 939 | |
| 940 | log = SyncLog.objects.get(mirror=mirror, triggered_by="wiki_sync") |
| 941 | assert log.status == "success" |
| 942 | assert "1 wiki pages" in log.message |
| 943 | |
| 944 | def test_updates_existing_page_mapping(self, mirror, fossil_repo_obj): |
| 945 | """Changed content updates the existing mapping hash.""" |
| 946 | from fossil.github_api import content_hash |
| 947 | from fossil.tasks import sync_wiki_to_github |
| 948 | |
| 949 | old_hash = content_hash("old content") |
| 950 | WikiSyncMapping.objects.create( |
| 951 | mirror=mirror, |
| 952 | fossil_page_name="Changelog", |
| 953 | content_hash=old_hash, |
| 954 | github_path="wiki/Changelog.md", |
| 955 | ) |
| 956 | |
| 957 | page_listing = _make_wiki_page(name="Changelog", content="") |
| 958 | full_page = _make_wiki_page(name="Changelog", content="# Changelog\nv2.0 release") |
| 959 | |
| 960 | reader_mock = _make_reader_mock( |
| 961 | get_wiki_pages=[page_listing], |
| 962 | get_wiki_page=full_page, |
| 963 | ) |
| 964 | |
| 965 | gh_client_mock = MagicMock() |
| 966 | gh_client_mock.create_or_update_file.return_value = {"success": True, "sha": "def456", "error": ""} |
| 967 | |
| 968 | with ( |
| 969 | _disk_exists, |
| 970 | patch("fossil.reader.FossilReader", reader_mock), |
| 971 | patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), |
| 972 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 973 | ): |
| 974 | sync_wiki_to_github(mirror.pk) |
| 975 | |
| 976 | mapping = WikiSyncMapping.objects.get(mirror=mirror, fossil_page_name="Changelog") |
| 977 | new_hash = content_hash("# Changelog\nv2.0 release") |
| 978 | assert mapping.content_hash == new_hash |
| 979 | |
| 980 | def test_skips_unchanged_content(self, mirror, fossil_repo_obj): |
| 981 | """Pages with unchanged content hash are not re-pushed.""" |
| 982 | from fossil.github_api import content_hash |
| 983 | from fossil.tasks import sync_wiki_to_github |
| 984 | |
| 985 | content = "# Home\nSame content." |
| 986 | WikiSyncMapping.objects.create( |
| 987 | mirror=mirror, |
| 988 | fossil_page_name="Home", |
| 989 | content_hash=content_hash(content), |
| 990 | github_path="wiki/Home.md", |
| 991 | ) |
| 992 | |
| 993 | page_listing = _make_wiki_page(name="Home", content="") |
| 994 | full_page = _make_wiki_page(name="Home", content=content) |
| 995 | |
| 996 | reader_mock = _make_reader_mock( |
| 997 | get_wiki_pages=[page_listing], |
| 998 | get_wiki_page=full_page, |
| 999 | ) |
| 1000 | |
| 1001 | gh_client_mock = MagicMock() |
| 1002 | |
| 1003 | with ( |
| 1004 | _disk_exists, |
| 1005 | patch("fossil.reader.FossilReader", reader_mock), |
| 1006 | patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), |
| 1007 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 1008 | ): |
| 1009 | sync_wiki_to_github(mirror.pk) |
| 1010 | |
| 1011 | gh_client_mock.create_or_update_file.assert_not_called() |
| 1012 | |
| 1013 | def test_skips_empty_page_content(self, mirror, fossil_repo_obj): |
| 1014 | """Pages with empty content after stripping are skipped.""" |
| 1015 | from fossil.tasks import sync_wiki_to_github |
| 1016 | |
| 1017 | page_listing = _make_wiki_page(name="Empty", content="") |
| 1018 | full_page = _make_wiki_page(name="Empty", content=" \n ") |
| 1019 | |
| 1020 | reader_mock = _make_reader_mock( |
| 1021 | get_wiki_pages=[page_listing], |
| 1022 | get_wiki_page=full_page, |
| 1023 | ) |
| 1024 | |
| 1025 | gh_client_mock = MagicMock() |
| 1026 | |
| 1027 | with ( |
| 1028 | _disk_exists, |
| 1029 | patch("fossil.reader.FossilReader", reader_mock), |
| 1030 | patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), |
| 1031 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 1032 | ): |
| 1033 | sync_wiki_to_github(mirror.pk) |
| 1034 | |
| 1035 | gh_client_mock.create_or_update_file.assert_not_called() |
| 1036 | |
| 1037 | def test_returns_early_for_deleted_mirror(self): |
| 1038 | """Task exits for nonexistent mirror.""" |
| 1039 | from fossil.tasks import sync_wiki_to_github |
| 1040 | |
| 1041 | sync_wiki_to_github(99999) |
| 1042 | assert SyncLog.objects.count() == 0 |
| 1043 | |
| 1044 | def test_returns_early_when_no_auth_token(self, mirror, fossil_repo_obj): |
| 1045 | """Task exits when no auth token available.""" |
| 1046 | from fossil.tasks import sync_wiki_to_github |
| 1047 | |
| 1048 | mirror.auth_credential = "" |
| 1049 | mirror.save(update_fields=["auth_credential"]) |
| 1050 | |
| 1051 | with ( |
| 1052 | _disk_exists, |
| 1053 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 1054 | ): |
| 1055 | sync_wiki_to_github(mirror.pk) |
| 1056 | |
| 1057 | assert SyncLog.objects.filter(mirror=mirror, triggered_by="wiki_sync").count() == 0 |
| 1058 | |
| 1059 | def test_handles_github_api_error(self, mirror, fossil_repo_obj): |
| 1060 | """GitHub API errors are recorded in the log.""" |
| 1061 | from fossil.tasks import sync_wiki_to_github |
| 1062 | |
| 1063 | page_listing = _make_wiki_page(name="Failing", content="") |
| 1064 | full_page = _make_wiki_page(name="Failing", content="# Oops") |
| 1065 | |
| 1066 | reader_mock = _make_reader_mock( |
| 1067 | get_wiki_pages=[page_listing], |
| 1068 | get_wiki_page=full_page, |
| 1069 | ) |
| 1070 | |
| 1071 | gh_client_mock = MagicMock() |
| 1072 | gh_client_mock.create_or_update_file.return_value = {"success": False, "sha": "", "error": "HTTP 500"} |
| 1073 | |
| 1074 | with ( |
| 1075 | _disk_exists, |
| 1076 | patch("fossil.reader.FossilReader", reader_mock), |
| 1077 | patch("fossil.github_api.GitHubClient", return_value=gh_client_mock), |
| 1078 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 1079 | ): |
| 1080 | sync_wiki_to_github(mirror.pk) |
| 1081 | |
| 1082 | log = SyncLog.objects.get(mirror=mirror, triggered_by="wiki_sync") |
| 1083 | assert log.status == "failed" |
| 1084 | assert "Errors" in log.message |
| 1085 | |
| 1086 | def test_handles_exception_during_sync(self, mirror, fossil_repo_obj): |
| 1087 | """Unexpected exceptions are caught and recorded.""" |
| 1088 | from fossil.tasks import sync_wiki_to_github |
| 1089 | |
| 1090 | reader_mock = MagicMock(side_effect=Exception("reader crash")) |
| 1091 | |
| 1092 | with ( |
| 1093 | _disk_exists, |
| 1094 | patch("fossil.reader.FossilReader", reader_mock), |
| 1095 | patch("fossil.github_api.GitHubClient"), |
| 1096 | patch("fossil.github_api.parse_github_repo", return_value=("testorg", "testrepo")), |
| 1097 | ): |
| 1098 | sync_wiki_to_github(mirror.pk) |
| 1099 | |
| 1100 | log = SyncLog.objects.get(mirror=mirror, triggered_by="wiki_sync") |
| 1101 | assert log.status == "failed" |
| 1102 | assert "Unexpected error" in log.message |
| 1103 | |
| 1104 | |
| 1105 | # =================================================================== |
| 1106 | # fossil/tasks.py -- dispatch_webhook (additional edge cases) |
| 1107 | # =================================================================== |
| 1108 | |
| 1109 | |
| 1110 | @pytest.mark.django_db |
| 1111 | class TestDispatchWebhookEdgeCases: |
| 1112 | """Edge cases for the dispatch_webhook task not covered by test_webhooks.py.""" |
| 1113 | |
| 1114 | def test_unsafe_url_blocked_at_dispatch_time(self, webhook): |
| 1115 | """URLs that fail safety check at dispatch are blocked and logged.""" |
| 1116 | from fossil.tasks import dispatch_webhook |
| 1117 | |
| 1118 | with patch("core.url_validation.is_safe_outbound_url", return_value=(False, "Private IP detected")): |
| 1119 | dispatch_webhook.apply(args=[webhook.pk, "checkin", {"hash": "abc"}]) |
| 1120 | |
| 1121 | delivery = WebhookDelivery.objects.get(webhook=webhook) |
| 1122 | assert delivery.success is False |
| 1123 | assert delivery.response_status == 0 |
| 1124 | assert "Blocked" in delivery.response_body |
| 1125 | assert "Private IP" in delivery.response_body |
| 1126 | |
| 1127 | def test_request_exception_creates_delivery_and_retries(self, webhook): |
| 1128 | """Network errors create a delivery record and trigger retry.""" |
| 1129 | import requests as req |
| 1130 | |
| 1131 | from fossil.tasks import dispatch_webhook |
| 1132 | |
| 1133 | with ( |
| 1134 | patch("core.url_validation.is_safe_outbound_url", return_value=(True, "")), |
| 1135 | patch("requests.post", side_effect=req.ConnectionError("refused")), |
| 1136 | ): |
| 1137 | dispatch_webhook.apply(args=[webhook.pk, "ticket", {"id": "123"}]) |
| 1138 | |
| 1139 | delivery = WebhookDelivery.objects.filter(webhook=webhook).first() |
| 1140 | assert delivery is not None |
| 1141 | assert delivery.success is False |
| 1142 | assert delivery.response_status == 0 |
| 1143 | assert "refused" in delivery.response_body |
| 1144 | |
| 1145 | |
| 1146 | # =================================================================== |
| 1147 | # accounts/views.py -- _sanitize_ssh_key |
| 1148 | # =================================================================== |
| 1149 | |
| 1150 | |
| 1151 | class TestSanitizeSSHKey: |
| 1152 | """Unit tests for SSH key validation (no DB needed).""" |
| 1153 | |
| 1154 | def test_rejects_key_with_newlines(self): |
| 1155 | from accounts.views import _sanitize_ssh_key |
| 1156 | |
| 1157 | result, error = _sanitize_ssh_key("ssh-ed25519 AAAA key1\nssh-rsa BBBB key2") |
| 1158 | assert result is None |
| 1159 | assert "Newlines" in error |
| 1160 | |
| 1161 | def test_rejects_key_with_carriage_return(self): |
| 1162 | from accounts.views import _sanitize_ssh_key |
| 1163 | |
| 1164 | result, error = _sanitize_ssh_key("ssh-ed25519 AAAA key1\rssh-rsa BBBB key2") |
| 1165 | assert result is None |
| 1166 | assert "Newlines" in error |
| 1167 | |
| 1168 | def test_rejects_key_with_null_byte(self): |
| 1169 | from accounts.views import _sanitize_ssh_key |
| 1170 | |
| 1171 | result, error = _sanitize_ssh_key("ssh-ed25519 AAAA\x00inject") |
| 1172 | assert result is None |
| 1173 | assert "null bytes" in error |
| 1174 | |
| 1175 | def test_rejects_empty_key(self): |
| 1176 | from accounts.views import _sanitize_ssh_key |
| 1177 | |
| 1178 | result, error = _sanitize_ssh_key(" ") |
| 1179 | assert result is None |
| 1180 | assert "empty" in error.lower() |
| 1181 | |
| 1182 | def test_rejects_wrong_part_count(self): |
| 1183 | from accounts.views import _sanitize_ssh_key |
| 1184 | |
| 1185 | result, error = _sanitize_ssh_key("ssh-ed25519") |
| 1186 | assert result is None |
| 1187 | assert "format" in error.lower() |
| 1188 | |
| 1189 | def test_rejects_too_many_parts(self): |
| 1190 | from accounts.views import _sanitize_ssh_key |
| 1191 | |
| 1192 | result, error = _sanitize_ssh_key("ssh-ed25519 AAAA comment extra-part") |
| 1193 | assert result is None |
| 1194 | assert "format" in error.lower() |
| 1195 | |
| 1196 | def test_rejects_unsupported_key_type(self): |
| 1197 | from accounts.views import _sanitize_ssh_key |
| 1198 | |
| 1199 | result, error = _sanitize_ssh_key("ssh-unknown AAAA comment") |
| 1200 | assert result is None |
| 1201 | assert "Unsupported" in error |
| 1202 | |
| 1203 | def test_rejects_bad_base64(self): |
| 1204 | from accounts.views import _sanitize_ssh_key |
| 1205 | |
| 1206 | result, error = _sanitize_ssh_key("ssh-ed25519 !!!invalid comment") |
| 1207 | assert result is None |
| 1208 | assert "encoding" in error.lower() |
| 1209 | |
| 1210 | def test_accepts_valid_ed25519_key(self): |
| 1211 | from accounts.views import _sanitize_ssh_key |
| 1212 | |
| 1213 | key = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFakeKeyDataHere= user@host" |
| 1214 | result, error = _sanitize_ssh_key(key) |
| 1215 | assert result == key |
| 1216 | assert error == "" |
| 1217 | |
| 1218 | def test_accepts_valid_rsa_key(self): |
| 1219 | from accounts.views import _sanitize_ssh_key |
| 1220 | |
| 1221 | key = "ssh-rsa AAAAB3NzaC1yc2EAAAAFakeBase64Data== user@host" |
| 1222 | result, error = _sanitize_ssh_key(key) |
| 1223 | assert result == key |
| 1224 | assert error == "" |
| 1225 | |
| 1226 | def test_accepts_ecdsa_key(self): |
| 1227 | from accounts.views import _sanitize_ssh_key |
| 1228 | |
| 1229 | key = "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTY= user@host" |
| 1230 | result, error = _sanitize_ssh_key(key) |
| 1231 | assert result == key |
| 1232 | assert error == "" |
| 1233 | |
| 1234 | def test_strips_whitespace(self): |
| 1235 | from accounts.views import _sanitize_ssh_key |
| 1236 | |
| 1237 | key = " ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFake= " |
| 1238 | result, error = _sanitize_ssh_key(key) |
| 1239 | assert result is not None |
| 1240 | assert result == key.strip() |
| 1241 | |
| 1242 | |
| 1243 | # =================================================================== |
| 1244 | # accounts/views.py -- _verify_turnstile |
| 1245 | # =================================================================== |
| 1246 | |
| 1247 | |
| 1248 | class TestVerifyTurnstile: |
| 1249 | """Unit tests for Turnstile CAPTCHA verification.""" |
| 1250 | |
| 1251 | @staticmethod |
| 1252 | def _turnstile_config(secret_key=""): |
| 1253 | cfg = MagicMock() |
| 1254 | cfg.TURNSTILE_SECRET_KEY = secret_key |
| 1255 | return cfg |
| 1256 | |
| 1257 | def test_returns_false_when_no_secret_key(self): |
| 1258 | from accounts.views import _verify_turnstile |
| 1259 | |
| 1260 | with patch("constance.config", self._turnstile_config(secret_key="")): |
| 1261 | assert _verify_turnstile("some-token", "1.2.3.4") is False |
| 1262 | |
| 1263 | def test_returns_true_on_success(self): |
| 1264 | from accounts.views import _verify_turnstile |
| 1265 | |
| 1266 | mock_resp = MagicMock() |
| 1267 | mock_resp.status_code = 200 |
| 1268 | mock_resp.json.return_value = {"success": True} |
| 1269 | |
| 1270 | with ( |
| 1271 | patch("constance.config", self._turnstile_config(secret_key="secret-key")), |
| 1272 | patch("requests.post", return_value=mock_resp), |
| 1273 | ): |
| 1274 | assert _verify_turnstile("valid-token", "1.2.3.4") is True |
| 1275 | |
| 1276 | def test_returns_false_on_failed_verification(self): |
| 1277 | from accounts.views import _verify_turnstile |
| 1278 | |
| 1279 | mock_resp = MagicMock() |
| 1280 | mock_resp.status_code = 200 |
| 1281 | mock_resp.json.return_value = {"success": False} |
| 1282 | |
| 1283 | with ( |
| 1284 | patch("constance.config", self._turnstile_config(secret_key="secret-key")), |
| 1285 | patch("requests.post", return_value=mock_resp), |
| 1286 | ): |
| 1287 | assert _verify_turnstile("bad-token", "1.2.3.4") is False |
| 1288 | |
| 1289 | def test_returns_false_on_network_error(self): |
| 1290 | from accounts.views import _verify_turnstile |
| 1291 | |
| 1292 | with ( |
| 1293 | patch("constance.config", self._turnstile_config(secret_key="secret-key")), |
| 1294 | patch("requests.post", side_effect=Exception("connection refused")), |
| 1295 | ): |
| 1296 | assert _verify_turnstile("token", "1.2.3.4") is False |
| 1297 | |
| 1298 | |
| 1299 | # =================================================================== |
| 1300 | # accounts/views.py -- Login Turnstile flow |
| 1301 | # =================================================================== |
| 1302 | |
| 1303 | |
| 1304 | def _login_turnstile_config(): |
| 1305 | cfg = MagicMock() |
| 1306 | cfg.TURNSTILE_ENABLED = True |
| 1307 | cfg.TURNSTILE_SITE_KEY = "site-key-123" |
| 1308 | cfg.TURNSTILE_SECRET_KEY = "secret-key" |
| 1309 | return cfg |
| 1310 | |
| 1311 | |
| 1312 | @pytest.mark.django_db |
| 1313 | class TestLoginTurnstile: |
| 1314 | """Test login view with Turnstile CAPTCHA enabled.""" |
| 1315 | |
| 1316 | def test_turnstile_error_rerenders_form(self, client, admin_user): |
| 1317 | """When Turnstile fails, the login form is re-rendered with error.""" |
| 1318 | with ( |
| 1319 | patch("constance.config", _login_turnstile_config()), |
| 1320 | patch("accounts.views._verify_turnstile", return_value=False), |
| 1321 | ): |
| 1322 | response = client.post( |
| 1323 | "/auth/login/", |
| 1324 | {"username": "admin", "password": "testpass123", "cf-turnstile-response": "bad-token"}, |
| 1325 | ) |
| 1326 | |
| 1327 | assert response.status_code == 200 |
| 1328 | assert b"login" in response.content.lower() |
| 1329 | |
| 1330 | def test_turnstile_context_passed_to_template(self, client): |
| 1331 | """When Turnstile is enabled, context includes turnstile_enabled and site_key.""" |
| 1332 | with patch("constance.config", _login_turnstile_config()): |
| 1333 | response = client.get("/auth/login/") |
| 1334 | |
| 1335 | assert response.status_code == 200 |
| 1336 | assert response.context["turnstile_enabled"] is True |
| 1337 | assert response.context["turnstile_site_key"] == "site-key-123" |
| 1338 | |
| 1339 | |
| 1340 | # =================================================================== |
| 1341 | # accounts/views.py -- SSH key management |
| 1342 | # =================================================================== |
| 1343 | |
| 1344 | |
| 1345 | @pytest.mark.django_db |
| 1346 | class TestSSHKeyViews: |
| 1347 | """Test SSH key list, add, and delete views.""" |
| 1348 | |
| 1349 | def test_list_ssh_keys(self, admin_client, admin_user): |
| 1350 | response = admin_client.get("/auth/ssh-keys/") |
| 1351 | assert response.status_code == 200 |
| 1352 | |
| 1353 | def test_add_valid_ssh_key(self, admin_client, admin_user): |
| 1354 | """Adding a valid SSH key creates the record and regenerates authorized_keys.""" |
| 1355 | from fossil.user_keys import UserSSHKey |
| 1356 | |
| 1357 | with patch("accounts.views._regenerate_authorized_keys"): |
| 1358 | response = admin_client.post( |
| 1359 | "/auth/ssh-keys/", |
| 1360 | { |
| 1361 | "title": "Work Laptop", |
| 1362 | "public_key": "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFakeKeyDataHere= user@host", |
| 1363 | }, |
| 1364 | ) |
| 1365 | |
| 1366 | assert response.status_code == 302 # redirect after success |
| 1367 | key = UserSSHKey.objects.get(user=admin_user, title="Work Laptop") |
| 1368 | assert key.key_type == "ed25519" |
| 1369 | assert key.fingerprint # SHA256 computed |
| 1370 | |
| 1371 | def test_add_invalid_ssh_key_shows_error(self, admin_client, admin_user): |
| 1372 | """Adding an invalid SSH key shows an error message.""" |
| 1373 | response = admin_client.post( |
| 1374 | "/auth/ssh-keys/", |
| 1375 | { |
| 1376 | "title": "Bad Key", |
| 1377 | "public_key": "not-a-real-key", |
| 1378 | }, |
| 1379 | ) |
| 1380 | |
| 1381 | assert response.status_code == 200 # re-renders form |
| 1382 | |
| 1383 | def test_add_ssh_key_with_injection_newline(self, admin_client, admin_user): |
| 1384 | """Keys with newlines are rejected (injection prevention).""" |
| 1385 | from fossil.user_keys import UserSSHKey |
| 1386 | |
| 1387 | response = admin_client.post( |
| 1388 | "/auth/ssh-keys/", |
| 1389 | { |
| 1390 | "title": "Injected Key", |
| 1391 | "public_key": "ssh-ed25519 AAAA key1\nssh-rsa BBBB key2", |
| 1392 | }, |
| 1393 | ) |
| 1394 | |
| 1395 | assert response.status_code == 200 |
| 1396 | assert UserSSHKey.objects.filter(user=admin_user).count() == 0 |
| 1397 | |
| 1398 | def test_delete_ssh_key(self, admin_client, admin_user): |
| 1399 | """Deleting an SSH key soft-deletes it and regenerates authorized_keys.""" |
| 1400 | from fossil.user_keys import UserSSHKey |
| 1401 | |
| 1402 | key = UserSSHKey.objects.create( |
| 1403 | user=admin_user, |
| 1404 | title="Delete Me", |
| 1405 | public_key="ssh-ed25519 AAAA= test", |
| 1406 | created_by=admin_user, |
| 1407 | ) |
| 1408 | |
| 1409 | with patch("accounts.views._regenerate_authorized_keys"): |
| 1410 | response = admin_client.post(f"/auth/ssh-keys/{key.pk}/delete/") |
| 1411 | |
| 1412 | assert response.status_code == 302 |
| 1413 | key.refresh_from_db() |
| 1414 | assert key.deleted_at is not None |
| 1415 | |
| 1416 | def test_delete_ssh_key_htmx(self, admin_client, admin_user): |
| 1417 | """HTMX delete returns HX-Redirect header.""" |
| 1418 | from fossil.user_keys import UserSSHKey |
| 1419 | |
| 1420 | key = UserSSHKey.objects.create( |
| 1421 | user=admin_user, |
| 1422 | title="HX Delete", |
| 1423 | public_key="ssh-ed25519 AAAA= test", |
| 1424 | created_by=admin_user, |
| 1425 | ) |
| 1426 | |
| 1427 | with patch("accounts.views._regenerate_authorized_keys"): |
| 1428 | response = admin_client.post( |
| 1429 | f"/auth/ssh-keys/{key.pk}/delete/", |
| 1430 | HTTP_HX_REQUEST="true", |
| 1431 | ) |
| 1432 | |
| 1433 | assert response.status_code == 200 |
| 1434 | assert response["HX-Redirect"] == "/auth/ssh-keys/" |
| 1435 | |
| 1436 | def test_delete_other_users_key_404(self, admin_client, viewer_user, admin_user): |
| 1437 | """Cannot delete another user's SSH key.""" |
| 1438 | from fossil.user_keys import UserSSHKey |
| 1439 | |
| 1440 | key = UserSSHKey.objects.create( |
| 1441 | user=viewer_user, |
| 1442 | title="Viewer Key", |
| 1443 | public_key="ssh-ed25519 AAAA= test", |
| 1444 | created_by=viewer_user, |
| 1445 | ) |
| 1446 | |
| 1447 | response = admin_client.post(f"/auth/ssh-keys/{key.pk}/delete/") |
| 1448 | assert response.status_code == 404 |
| 1449 | |
| 1450 | def test_ssh_keys_require_login(self, client): |
| 1451 | response = client.get("/auth/ssh-keys/") |
| 1452 | assert response.status_code == 302 |
| 1453 | assert "/auth/login/" in response.url |
| 1454 | |
| 1455 | |
| 1456 | # =================================================================== |
| 1457 | # accounts/views.py -- Notification preferences HTMX |
| 1458 | # =================================================================== |
| 1459 | |
| 1460 | |
| 1461 | @pytest.mark.django_db |
| 1462 | class TestNotificationPreferencesHTMX: |
| 1463 | """Test the HTMX return path for notification preferences.""" |
| 1464 | |
| 1465 | def test_post_htmx_returns_hx_redirect(self, admin_client, admin_user): |
| 1466 | """HTMX POST returns 200 with HX-Redirect header instead of 302.""" |
| 1467 | NotificationPreference.objects.create(user=admin_user) |
| 1468 | |
| 1469 | response = admin_client.post( |
| 1470 | "/auth/notifications/", |
| 1471 | {"delivery_mode": "weekly"}, |
| 1472 | HTTP_HX_REQUEST="true", |
| 1473 | ) |
| 1474 | |
| 1475 | assert response.status_code == 200 |
| 1476 | assert response["HX-Redirect"] == "/auth/notifications/" |
| 1477 | |
| 1478 | |
| 1479 | # =================================================================== |
| 1480 | # accounts/views.py -- _parse_key_type and _compute_fingerprint |
| 1481 | # =================================================================== |
| 1482 | |
| 1483 | |
| 1484 | class TestParseKeyType: |
| 1485 | """Unit tests for SSH key type parsing helper.""" |
| 1486 | |
| 1487 | def test_ed25519(self): |
| 1488 | from accounts.views import _parse_key_type |
| 1489 | |
| 1490 | assert _parse_key_type("ssh-ed25519 AAAA") == "ed25519" |
| 1491 | |
| 1492 | def test_rsa(self): |
| 1493 | from accounts.views import _parse_key_type |
| 1494 | |
| 1495 | assert _parse_key_type("ssh-rsa AAAA") == "rsa" |
| 1496 | |
| 1497 | def test_ecdsa_256(self): |
| 1498 | from accounts.views import _parse_key_type |
| 1499 | |
| 1500 | assert _parse_key_type("ecdsa-sha2-nistp256 AAAA") == "ecdsa" |
| 1501 | |
| 1502 | def test_ecdsa_384(self): |
| 1503 | from accounts.views import _parse_key_type |
| 1504 | |
| 1505 | assert _parse_key_type("ecdsa-sha2-nistp384 AAAA") == "ecdsa" |
| 1506 | |
| 1507 | def test_dsa(self): |
| 1508 | from accounts.views import _parse_key_type |
| 1509 | |
| 1510 | assert _parse_key_type("ssh-dss AAAA") == "dsa" |
| 1511 | |
| 1512 | def test_unknown_type(self): |
| 1513 | from accounts.views import _parse_key_type |
| 1514 | |
| 1515 | assert _parse_key_type("custom-type AAAA") == "custom-type" |
| 1516 | |
| 1517 | def test_empty_string(self): |
| 1518 | from accounts.views import _parse_key_type |
| 1519 | |
| 1520 | assert _parse_key_type("") == "" |
| 1521 | |
| 1522 | |
| 1523 | class TestComputeFingerprint: |
| 1524 | """Unit tests for SSH key fingerprint computation.""" |
| 1525 | |
| 1526 | def test_computes_sha256_fingerprint(self): |
| 1527 | from accounts.views import _compute_fingerprint |
| 1528 | |
| 1529 | # Valid base64 key data |
| 1530 | key = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFakeKeyDataHere= user@host" |
| 1531 | result = _compute_fingerprint(key) |
| 1532 | assert result.startswith("SHA256:") |
| 1533 | |
| 1534 | def test_invalid_base64_returns_empty(self): |
| 1535 | from accounts.views import _compute_fingerprint |
| 1536 | |
| 1537 | key = "ssh-ed25519 !!!notbase64 user@host" |
| 1538 | result = _compute_fingerprint(key) |
| 1539 | assert result == "" |
| 1540 | |
| 1541 | def test_single_part_returns_empty(self): |
| 1542 | from accounts.views import _compute_fingerprint |
| 1543 | |
| 1544 | result = _compute_fingerprint("onlyonepart") |
| 1545 | assert result == "" |
| 1546 | |
| 1547 | |
| 1548 | # =================================================================== |
| 1549 | # accounts/views.py -- profile_token_create scopes edge cases |
| 1550 | # =================================================================== |
| 1551 | |
| 1552 | |
| 1553 | @pytest.mark.django_db |
| 1554 | class TestProfileTokenCreateEdgeCases: |
| 1555 | """Additional edge cases for token creation.""" |
| 1556 | |
| 1557 | def test_create_admin_scope_token(self, admin_client, admin_user): |
| 1558 | """Admin scope is a valid scope.""" |
| 1559 | from accounts.models import PersonalAccessToken |
| 1560 | |
| 1561 | response = admin_client.post( |
| 1562 | "/auth/profile/tokens/create/", |
| 1563 | {"name": "Admin Token", "scopes": "read,write,admin"}, |
| 1564 | ) |
| 1565 | assert response.status_code == 200 |
| 1566 | token = PersonalAccessToken.objects.get(user=admin_user, name="Admin Token") |
| 1567 | assert "admin" in token.scopes |
| 1568 | assert "read" in token.scopes |
| 1569 | assert "write" in token.scopes |
| 1570 | |
| 1571 | def test_create_token_mixed_valid_invalid_scopes(self, admin_client, admin_user): |
| 1572 | """Invalid scopes are filtered out, valid ones kept.""" |
| 1573 | from accounts.models import PersonalAccessToken |
| 1574 | |
| 1575 | admin_client.post( |
| 1576 | "/auth/profile/tokens/create/", |
| 1577 | {"name": "Mixed Scopes", "scopes": "read,destroy,write,hack"}, |
| 1578 | ) |
| 1579 | token = PersonalAccessToken.objects.get(user=admin_user, name="Mixed Scopes") |
| 1580 | assert token.scopes == "read,write" |
| 1581 | |
| 1582 | def test_create_token_whitespace_scopes(self, admin_client, admin_user): |
| 1583 | """Scopes with extra whitespace are handled correctly.""" |
| 1584 | from accounts.models import PersonalAccessToken |
| 1585 | |
| 1586 | admin_client.post( |
| 1587 | "/auth/profile/tokens/create/", |
| 1588 | {"name": "Whitespace", "scopes": " read , write "}, |
| 1589 | ) |
| 1590 | token = PersonalAccessToken.objects.get(user=admin_user, name="Whitespace") |
| 1591 | assert token.scopes == "read,write" |
+1499
| --- a/tests/test_views_coverage.py | ||
| +++ b/tests/test_views_coverage.py | ||
| @@ -0,0 +1,1499 @@ | ||
| 1 | +"""Tests for fossil/views.py -- covering uncovered view functions and helpers. | |
| 2 | + | |
| 3 | +Focuses on views that can be tested by mocking FossilReader (so no real | |
| 4 | +.fossil file is needed) and pure Django CRUD views that don't touch Fossil. | |
| 5 | +""" | |
| 6 | + | |
| 7 | +from datetime import UTC, datetime | |
| 8 | +from types import SimpleNamespace | |
| 9 | +from unittest.mock import MagicMock, patch | |
| 10 | + | |
| 11 | +import pytest | |
| 12 | +from django.contrib.auth.models import User | |
| 13 | +from django.test import Client | |
| 14 | + | |
| 15 | +from fossil.models import FossilRepository | |
| 16 | +from fossil.reader import ( | |
| 17 | + CheckinDetail, | |
| 18 | + FileEntry, | |
| 19 | + RepoMetadata, | |
| 20 | + TicketEntry, | |
| 21 | + TimelineEntry, | |
| 22 | + WikiPage, | |
| 23 | +) | |
| 24 | +from organization.models import Team | |
| 25 | +from projects.models import ProjectTeam | |
| 26 | + | |
| 27 | +# --------------------------------------------------------------------------- | |
| 28 | +# Shared fixtures | |
| 29 | +# --------------------------------------------------------------------------- | |
| 30 | + | |
| 31 | + | |
| 32 | +@pytest.fixture | |
| 33 | +def fossil_repo_obj(sample_project): | |
| 34 | + return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True) | |
| 35 | + | |
| 36 | + | |
| 37 | +@pytest.fixture | |
| 38 | +def writer_user(db, admin_user, sample_project): | |
| 39 | + writer = User.objects.create_user(username="writer_vc", password="testpass123") | |
| 40 | + team = Team.objects.create(name="VC Writers", organization=sample_project.organization, created_by=admin_user) | |
| 41 | + team.members.add(writer) | |
| 42 | + ProjectTeam.objects.create(project=sample_project, team=team, role="write", created_by=admin_user) | |
| 43 | + return writer | |
| 44 | + | |
| 45 | + | |
| 46 | +@pytest.fixture | |
| 47 | +def writer_client(writer_user): | |
| 48 | + c = Client() | |
| 49 | + c.login(username="writer_vc", password="testpass123") | |
| 50 | + return c | |
| 51 | + | |
| 52 | + | |
| 53 | +def _url(slug, path): | |
| 54 | + return f"/projects/{slug}/fossil/{path}" | |
| 55 | + | |
| 56 | + | |
| 57 | +def _mock_reader_ctx(mock_cls, **attrs): | |
| 58 | + """Configure a patched FossilReader class to work as a context manager | |
| 59 | + and attach return values from **attrs to the instance.""" | |
| 60 | + instance = mock_cls.return_value | |
| 61 | + instance.__enter__ = MagicMock(return_value=instance) | |
| 62 | + instance.__exit__ = MagicMock(return_value=False) | |
| 63 | + for key, val in attrs.items(): | |
| 64 | + setattr(instance, key, MagicMock(return_value=val)) | |
| 65 | + return instance | |
| 66 | + | |
| 67 | + | |
| 68 | +def _make_timeline_entry(**overrides): | |
| 69 | + defaults = { | |
| 70 | + "rid": 1, | |
| 71 | + "uuid": "abc123def456", | |
| 72 | + "event_type": "ci", | |
| 73 | + "timestamp": datetime(2026, 3, 1, 12, 0, 0, tzinfo=UTC), | |
| 74 | + "user": "testuser", | |
| 75 | + "comment": "initial commit", | |
| 76 | + "branch": "trunk", | |
| 77 | + "parent_rid": 0, | |
| 78 | + "is_merge": False, | |
| 79 | + "merge_parent_rids": [], | |
| 80 | + "rail": 0, | |
| 81 | + } | |
| 82 | + defaults.update(overrides) | |
| 83 | + return TimelineEntry(**defaults) | |
| 84 | + | |
| 85 | + | |
| 86 | +def _make_file_entry(**overrides): | |
| 87 | + defaults = { | |
| 88 | + "name": "README.md", | |
| 89 | + "uuid": "file-uuid-1", | |
| 90 | + "size": 512, | |
| 91 | + "is_dir": False, | |
| 92 | + "last_commit_message": "initial commit", | |
| 93 | + "last_commit_user": "testuser", | |
| 94 | + "last_commit_time": datetime(2026, 3, 1, 12, 0, 0, tzinfo=UTC), | |
| 95 | + } | |
| 96 | + defaults.update(overrides) | |
| 97 | + return FileEntry(**defaults) | |
| 98 | + | |
| 99 | + | |
| 100 | +# --------------------------------------------------------------------------- | |
| 101 | +# Content rendering helpers (_render_fossil_content, _is_markdown, _rewrite_fossil_links) | |
| 102 | +# --------------------------------------------------------------------------- | |
| 103 | + | |
| 104 | + | |
| 105 | +class TestRenderFossilContent: | |
| 106 | + """Test the content rendering pipeline that converts Fossil wiki/markdown to HTML.""" | |
| 107 | + | |
| 108 | + def test_empty_content(self): | |
| 109 | + from fossil.views import _render_fossil_content | |
| 110 | + | |
| 111 | + assert _render_fossil_content("") == "" | |
| 112 | + | |
| 113 | + def test_markdown_heading(self): | |
| 114 | + from fossil.views import _render_fossil_content | |
| 115 | + | |
| 116 | + html = _render_fossil_content("# Hello World") | |
| 117 | + assert "<h1" in html | |
| 118 | + assert "Hello World" in html | |
| 119 | + | |
| 120 | + def test_markdown_fenced_code(self): | |
| 121 | + from fossil.views import _render_fossil_content | |
| 122 | + | |
| 123 | + content = "```python\nprint('hello')\n```" | |
| 124 | + html = _render_fossil_content(content) | |
| 125 | + assert "print" in html | |
| 126 | + | |
| 127 | + def test_fossil_wiki_link_converted(self): | |
| 128 | + from fossil.views import _render_fossil_content | |
| 129 | + | |
| 130 | + content = "[/info/abc123 | View Checkin]" | |
| 131 | + html = _render_fossil_content(content, project_slug="my-project") | |
| 132 | + assert "/projects/my-project/fossil/checkin/abc123/" in html | |
| 133 | + | |
| 134 | + def test_fossil_wiki_verbatim_block(self): | |
| 135 | + from fossil.views import _render_fossil_content | |
| 136 | + | |
| 137 | + content = "<h1>Title</h1>\n<verbatim>code here</verbatim>" | |
| 138 | + html = _render_fossil_content(content) | |
| 139 | + assert "<pre><code>code here</code></pre>" in html | |
| 140 | + | |
| 141 | + def test_fossil_wiki_list_bullets(self): | |
| 142 | + from fossil.views import _render_fossil_content | |
| 143 | + | |
| 144 | + content = "<p>List:</p>\n* Item one\n* Item two" | |
| 145 | + html = _render_fossil_content(content) | |
| 146 | + assert "<ul>" in html | |
| 147 | + assert "<li>" in html | |
| 148 | + assert "Item one" in html | |
| 149 | + | |
| 150 | + def test_fossil_wiki_ordered_list(self): | |
| 151 | + from fossil.views import _render_fossil_content | |
| 152 | + | |
| 153 | + # Must start with an HTML element so _is_markdown returns False | |
| 154 | + content = "<p>Steps:</p>\n1. Step one\n2. Step two" | |
| 155 | + html = _render_fossil_content(content) | |
| 156 | + assert "<ol>" in html | |
| 157 | + assert "Step one" in html | |
| 158 | + | |
| 159 | + def test_fossil_wiki_nowiki_block(self): | |
| 160 | + from fossil.views import _render_fossil_content | |
| 161 | + | |
| 162 | + content = "<p>Before</p>\n<nowiki><b>Bold</b></nowiki>" | |
| 163 | + html = _render_fossil_content(content) | |
| 164 | + assert "<b>Bold</b>" in html | |
| 165 | + | |
| 166 | + def test_fossil_interwiki_link(self): | |
| 167 | + from fossil.views import _render_fossil_content | |
| 168 | + | |
| 169 | + content = "<p>See [wikipedia:Fossil_(software)]</p>" | |
| 170 | + html = _render_fossil_content(content) | |
| 171 | + assert "en.wikipedia.org/wiki/Fossil_(software)" in html | |
| 172 | + | |
| 173 | + def test_fossil_anchor_link(self): | |
| 174 | + from fossil.views import _render_fossil_content | |
| 175 | + | |
| 176 | + content = "<p>Jump to [#section1]</p>" | |
| 177 | + html = _render_fossil_content(content) | |
| 178 | + assert 'href="#section1"' in html | |
| 179 | + | |
| 180 | + def test_fossil_bare_wiki_link(self): | |
| 181 | + from fossil.views import _render_fossil_content | |
| 182 | + | |
| 183 | + content = "<p>See [PageName]</p>" | |
| 184 | + html = _render_fossil_content(content) | |
| 185 | + assert 'href="PageName"' in html | |
| 186 | + | |
| 187 | + def test_markdown_fossil_link_resolved(self): | |
| 188 | + from fossil.views import _render_fossil_content | |
| 189 | + | |
| 190 | + content = "# Page\n\n[./file.wiki | Link Text]" | |
| 191 | + html = _render_fossil_content(content, project_slug="proj", base_path="www/") | |
| 192 | + assert "Link Text" in html | |
| 193 | + | |
| 194 | + | |
| 195 | +class TestIsMarkdown: | |
| 196 | + def test_heading_detected(self): | |
| 197 | + from fossil.views import _is_markdown | |
| 198 | + | |
| 199 | + assert _is_markdown("# Title\nSome text") is True | |
| 200 | + | |
| 201 | + def test_fenced_code_detected(self): | |
| 202 | + from fossil.views import _is_markdown | |
| 203 | + | |
| 204 | + assert _is_markdown("Some text\n```\ncode\n```") is True | |
| 205 | + | |
| 206 | + def test_html_start_not_markdown(self): | |
| 207 | + from fossil.views import _is_markdown | |
| 208 | + | |
| 209 | + assert _is_markdown("<h1>Title</h1>\n<p>Paragraph</p>") is False | |
| 210 | + | |
| 211 | + def test_multiple_markdown_headings(self): | |
| 212 | + from fossil.views import _is_markdown | |
| 213 | + | |
| 214 | + content = "Some text\n## Heading\n## Another" | |
| 215 | + assert _is_markdown(content) is True | |
| 216 | + | |
| 217 | + def test_plain_text_is_markdown(self): | |
| 218 | + from fossil.views import _is_markdown | |
| 219 | + | |
| 220 | + # Plain text without HTML tags defaults to markdown | |
| 221 | + assert _is_markdown("Just plain text") is True | |
| 222 | + | |
| 223 | + | |
| 224 | +class TestRewriteFossilLinks: | |
| 225 | + def test_info_hash_rewrite(self): | |
| 226 | + from fossil.views import _rewrite_fossil_links | |
| 227 | + | |
| 228 | + html = '<a href="/info/abc123">link</a>' | |
| 229 | + result = _rewrite_fossil_links(html, "myproj") | |
| 230 | + assert "/projects/myproj/fossil/checkin/abc123/" in result | |
| 231 | + | |
| 232 | + def test_doc_trunk_rewrite(self): | |
| 233 | + from fossil.views import _rewrite_fossil_links | |
| 234 | + | |
| 235 | + html = '<a href="/doc/trunk/www/readme.wiki">docs</a>' | |
| 236 | + result = _rewrite_fossil_links(html, "myproj") | |
| 237 | + assert "/projects/myproj/fossil/code/file/www/readme.wiki" in result | |
| 238 | + | |
| 239 | + def test_wiki_path_rewrite(self): | |
| 240 | + from fossil.views import _rewrite_fossil_links | |
| 241 | + | |
| 242 | + html = '<a href="/wiki/HomePage">home</a>' | |
| 243 | + result = _rewrite_fossil_links(html, "myproj") | |
| 244 | + assert "/projects/myproj/fossil/wiki/page/HomePage" in result | |
| 245 | + | |
| 246 | + def test_wiki_query_rewrite(self): | |
| 247 | + from fossil.views import _rewrite_fossil_links | |
| 248 | + | |
| 249 | + html = '<a href="/wiki?name=HomePage">home</a>' | |
| 250 | + result = _rewrite_fossil_links(html, "myproj") | |
| 251 | + assert "/projects/myproj/fossil/wiki/page/HomePage" in result | |
| 252 | + | |
| 253 | + def test_tktview_rewrite(self): | |
| 254 | + from fossil.views import _rewrite_fossil_links | |
| 255 | + | |
| 256 | + html = '<a href="/tktview/abc123">ticket</a>' | |
| 257 | + result = _rewrite_fossil_links(html, "myproj") | |
| 258 | + assert "/projects/myproj/fossil/tickets/abc123/" in result | |
| 259 | + | |
| 260 | + def test_vdiff_rewrite(self): | |
| 261 | + from fossil.views import _rewrite_fossil_links | |
| 262 | + | |
| 263 | + html = '<a href="/vdiff?from=aaa&to=bbb">diff</a>' | |
| 264 | + result = _rewrite_fossil_links(html, "myproj") | |
| 265 | + assert "/projects/myproj/fossil/compare/?from=aaa&to=bbb" in result | |
| 266 | + | |
| 267 | + def test_timeline_rewrite(self): | |
| 268 | + from fossil.views import _rewrite_fossil_links | |
| 269 | + | |
| 270 | + html = '<a href="/timeline?n=20">tl</a>' | |
| 271 | + result = _rewrite_fossil_links(html, "myproj") | |
| 272 | + assert "/projects/myproj/fossil/timeline/" in result | |
| 273 | + | |
| 274 | + def test_forumpost_rewrite(self): | |
| 275 | + from fossil.views import _rewrite_fossil_links | |
| 276 | + | |
| 277 | + html = '<a href="/forumpost/abc123">post</a>' | |
| 278 | + result = _rewrite_fossil_links(html, "myproj") | |
| 279 | + assert "/projects/myproj/fossil/forum/abc123/" in result | |
| 280 | + | |
| 281 | + def test_forum_base_rewrite(self): | |
| 282 | + from fossil.views import _rewrite_fossil_links | |
| 283 | + | |
| 284 | + html = '<a href="/forum">forum</a>' | |
| 285 | + result = _rewrite_fossil_links(html, "myproj") | |
| 286 | + assert "/projects/myproj/fossil/forum/" in result | |
| 287 | + | |
| 288 | + def test_www_path_rewrite(self): | |
| 289 | + from fossil.views import _rewrite_fossil_links | |
| 290 | + | |
| 291 | + html = '<a href="/www/index.html">page</a>' | |
| 292 | + result = _rewrite_fossil_links(html, "myproj") | |
| 293 | + assert "/projects/myproj/fossil/docs/www/index.html" in result | |
| 294 | + | |
| 295 | + def test_dir_rewrite(self): | |
| 296 | + from fossil.views import _rewrite_fossil_links | |
| 297 | + | |
| 298 | + html = '<a href="/dir">browse</a>' | |
| 299 | + result = _rewrite_fossil_links(html, "myproj") | |
| 300 | + assert "/projects/myproj/fossil/code/" in result | |
| 301 | + | |
| 302 | + def test_help_rewrite(self): | |
| 303 | + from fossil.views import _rewrite_fossil_links | |
| 304 | + | |
| 305 | + html = '<a href="/help/clone">help</a>' | |
| 306 | + result = _rewrite_fossil_links(html, "myproj") | |
| 307 | + assert "/projects/myproj/fossil/docs/www/help.wiki" in result | |
| 308 | + | |
| 309 | + def test_external_link_preserved(self): | |
| 310 | + from fossil.views import _rewrite_fossil_links | |
| 311 | + | |
| 312 | + html = '<a href="https://example.com/page">ext</a>' | |
| 313 | + result = _rewrite_fossil_links(html, "myproj") | |
| 314 | + assert "https://example.com/page" in result | |
| 315 | + | |
| 316 | + def test_empty_slug_passthrough(self): | |
| 317 | + from fossil.views import _rewrite_fossil_links | |
| 318 | + | |
| 319 | + html = '<a href="/info/abc">link</a>' | |
| 320 | + assert _rewrite_fossil_links(html, "") == html | |
| 321 | + | |
| 322 | + def test_scheme_link_info(self): | |
| 323 | + from fossil.views import _rewrite_fossil_links | |
| 324 | + | |
| 325 | + html = '<a href="info:abc123">checkin</a>' | |
| 326 | + result = _rewrite_fossil_links(html, "myproj") | |
| 327 | + assert "/projects/myproj/fossil/checkin/abc123/" in result | |
| 328 | + | |
| 329 | + def test_scheme_link_wiki(self): | |
| 330 | + from fossil.views import _rewrite_fossil_links | |
| 331 | + | |
| 332 | + html = '<a href="wiki:PageName">page</a>' | |
| 333 | + result = _rewrite_fossil_links(html, "myproj") | |
| 334 | + assert "/projects/myproj/fossil/wiki/page/PageName" in result | |
| 335 | + | |
| 336 | + def test_builtin_rewrite(self): | |
| 337 | + from fossil.views import _rewrite_fossil_links | |
| 338 | + | |
| 339 | + html = '<a href="/builtin/default.css">skin</a>' | |
| 340 | + result = _rewrite_fossil_links(html, "myproj") | |
| 341 | + assert "/projects/myproj/fossil/code/file/skins/default.css" in result | |
| 342 | + | |
| 343 | + def test_setup_link_not_rewritten(self): | |
| 344 | + from fossil.views import _rewrite_fossil_links | |
| 345 | + | |
| 346 | + html = '<a href="/setup_skin">settings</a>' | |
| 347 | + result = _rewrite_fossil_links(html, "myproj") | |
| 348 | + assert "/setup_skin" in result | |
| 349 | + | |
| 350 | + def test_wiki_file_extension_rewrite(self): | |
| 351 | + from fossil.views import _rewrite_fossil_links | |
| 352 | + | |
| 353 | + html = '<a href="/concepts.wiki">page</a>' | |
| 354 | + result = _rewrite_fossil_links(html, "myproj") | |
| 355 | + assert "/projects/myproj/fossil/docs/www/concepts.wiki" in result | |
| 356 | + | |
| 357 | + def test_external_fossil_scm_rewrite(self): | |
| 358 | + from fossil.views import _rewrite_fossil_links | |
| 359 | + | |
| 360 | + html = '<a href="https://fossil-scm.org/home/info/abc123">ext</a>' | |
| 361 | + result = _rewrite_fossil_links(html, "myproj") | |
| 362 | + assert "/projects/myproj/fossil/checkin/abc123/" in result | |
| 363 | + | |
| 364 | + def test_scheme_link_forum(self): | |
| 365 | + from fossil.views import _rewrite_fossil_links | |
| 366 | + | |
| 367 | + html = '<a href="forum:/forumpost/abc123">post</a>' | |
| 368 | + result = _rewrite_fossil_links(html, "myproj") | |
| 369 | + assert "/projects/myproj/fossil/forum/abc123/" in result | |
| 370 | + | |
| 371 | + | |
| 372 | +# --------------------------------------------------------------------------- | |
| 373 | +# Split diff helper | |
| 374 | +# --------------------------------------------------------------------------- | |
| 375 | + | |
| 376 | + | |
| 377 | +class TestComputeSplitLines: | |
| 378 | + def test_context_lines_both_sides(self): | |
| 379 | + from fossil.views import _compute_split_lines | |
| 380 | + | |
| 381 | + lines = [{"text": " same", "type": "context", "old_num": 1, "new_num": 1}] | |
| 382 | + left, right = _compute_split_lines(lines) | |
| 383 | + assert len(left) == 1 | |
| 384 | + assert left[0]["type"] == "context" | |
| 385 | + assert right[0]["type"] == "context" | |
| 386 | + | |
| 387 | + def test_del_add_paired(self): | |
| 388 | + from fossil.views import _compute_split_lines | |
| 389 | + | |
| 390 | + lines = [ | |
| 391 | + {"text": "-old", "type": "del", "old_num": 1, "new_num": ""}, | |
| 392 | + {"text": "+new", "type": "add", "old_num": "", "new_num": 1}, | |
| 393 | + ] | |
| 394 | + left, right = _compute_split_lines(lines) | |
| 395 | + assert left[0]["type"] == "del" | |
| 396 | + assert right[0]["type"] == "add" | |
| 397 | + | |
| 398 | + def test_orphan_add(self): | |
| 399 | + from fossil.views import _compute_split_lines | |
| 400 | + | |
| 401 | + lines = [{"text": "+added", "type": "add", "old_num": "", "new_num": 1}] | |
| 402 | + left, right = _compute_split_lines(lines) | |
| 403 | + assert left[0]["type"] == "empty" | |
| 404 | + assert right[0]["type"] == "add" | |
| 405 | + | |
| 406 | + def test_header_hunk_both_sides(self): | |
| 407 | + from fossil.views import _compute_split_lines | |
| 408 | + | |
| 409 | + lines = [ | |
| 410 | + {"text": "--- a/f", "type": "header", "old_num": "", "new_num": ""}, | |
| 411 | + {"text": "@@ -1 +1 @@", "type": "hunk", "old_num": "", "new_num": ""}, | |
| 412 | + ] | |
| 413 | + left, right = _compute_split_lines(lines) | |
| 414 | + assert len(left) == 2 | |
| 415 | + assert left[0]["type"] == "header" | |
| 416 | + assert left[1]["type"] == "hunk" | |
| 417 | + | |
| 418 | + def test_uneven_del_add_padded(self): | |
| 419 | + """When there are more deletions than additions, right side gets empty placeholders.""" | |
| 420 | + from fossil.views import _compute_split_lines | |
| 421 | + | |
| 422 | + lines = [ | |
| 423 | + {"text": "-line1", "type": "del", "old_num": 1, "new_num": ""}, | |
| 424 | + {"text": "-line2", "type": "del", "old_num": 2, "new_num": ""}, | |
| 425 | + {"text": "+new1", "type": "add", "old_num": "", "new_num": 1}, | |
| 426 | + ] | |
| 427 | + left, right = _compute_split_lines(lines) | |
| 428 | + assert len(left) == 2 | |
| 429 | + assert left[0]["type"] == "del" | |
| 430 | + assert left[1]["type"] == "del" | |
| 431 | + assert right[0]["type"] == "add" | |
| 432 | + assert right[1]["type"] == "empty" | |
| 433 | + | |
| 434 | + | |
| 435 | +# --------------------------------------------------------------------------- | |
| 436 | +# Timeline view (mocked FossilReader) | |
| 437 | +# --------------------------------------------------------------------------- | |
| 438 | + | |
| 439 | + | |
| 440 | +@pytest.mark.django_db | |
| 441 | +class TestTimelineViewMocked: | |
| 442 | + def test_timeline_renders(self, admin_client, sample_project): | |
| 443 | + slug = sample_project.slug | |
| 444 | + entries = [_make_timeline_entry(rid=1)] | |
| 445 | + with patch("fossil.views.FossilReader") as mock_cls: | |
| 446 | + _mock_reader_ctx(mock_cls, get_timeline=entries) | |
| 447 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 448 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 449 | + mock_grr.return_value = (sample_project, repo, mock_cls.return_value) | |
| 450 | + response = admin_client.get(_url(slug, "timeline/")) | |
| 451 | + assert response.status_code == 200 | |
| 452 | + assert "initial commit" in response.content.decode() | |
| 453 | + | |
| 454 | + def test_timeline_with_type_filter(self, admin_client, sample_project): | |
| 455 | + slug = sample_project.slug | |
| 456 | + entries = [_make_timeline_entry(rid=1, event_type="w", comment="wiki edit")] | |
| 457 | + with patch("fossil.views.FossilReader") as mock_cls: | |
| 458 | + _mock_reader_ctx(mock_cls, get_timeline=entries) | |
| 459 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 460 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 461 | + mock_grr.return_value = (sample_project, repo, mock_cls.return_value) | |
| 462 | + response = admin_client.get(_url(slug, "timeline/?type=w")) | |
| 463 | + assert response.status_code == 200 | |
| 464 | + | |
| 465 | + def test_timeline_htmx_partial(self, admin_client, sample_project): | |
| 466 | + slug = sample_project.slug | |
| 467 | + entries = [_make_timeline_entry(rid=1)] | |
| 468 | + with patch("fossil.views.FossilReader") as mock_cls: | |
| 469 | + _mock_reader_ctx(mock_cls, get_timeline=entries) | |
| 470 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 471 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 472 | + mock_grr.return_value = (sample_project, repo, mock_cls.return_value) | |
| 473 | + response = admin_client.get(_url(slug, "timeline/"), HTTP_HX_REQUEST="true") | |
| 474 | + assert response.status_code == 200 | |
| 475 | + | |
| 476 | + def test_timeline_denied_no_perm(self, no_perm_client, sample_project): | |
| 477 | + response = no_perm_client.get(_url(sample_project.slug, "timeline/")) | |
| 478 | + assert response.status_code == 403 | |
| 479 | + | |
| 480 | + | |
| 481 | +# --------------------------------------------------------------------------- | |
| 482 | +# Ticket list/detail (mocked) | |
| 483 | +# --------------------------------------------------------------------------- | |
| 484 | + | |
| 485 | + | |
| 486 | +@pytest.mark.django_db | |
| 487 | +class TestTicketViewsMocked: | |
| 488 | + def test_ticket_list_renders(self, admin_client, sample_project): | |
| 489 | + slug = sample_project.slug | |
| 490 | + tickets = [ | |
| 491 | + TicketEntry( | |
| 492 | + uuid="tkt-uuid-1", | |
| 493 | + title="Bug report", | |
| 494 | + status="Open", | |
| 495 | + type="Code_Defect", | |
| 496 | + created=datetime(2026, 3, 1, tzinfo=UTC), | |
| 497 | + owner="testuser", | |
| 498 | + ) | |
| 499 | + ] | |
| 500 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 501 | + reader = MagicMock() | |
| 502 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 503 | + reader.__exit__ = MagicMock(return_value=False) | |
| 504 | + reader.get_tickets.return_value = tickets | |
| 505 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 506 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 507 | + response = admin_client.get(_url(slug, "tickets/")) | |
| 508 | + assert response.status_code == 200 | |
| 509 | + assert "Bug report" in response.content.decode() | |
| 510 | + | |
| 511 | + def test_ticket_list_search_filter(self, admin_client, sample_project): | |
| 512 | + slug = sample_project.slug | |
| 513 | + tickets = [ | |
| 514 | + TicketEntry( | |
| 515 | + uuid="t1", title="Login bug", status="Open", type="Code_Defect", created=datetime(2026, 3, 1, tzinfo=UTC), owner="u" | |
| 516 | + ), | |
| 517 | + TicketEntry( | |
| 518 | + uuid="t2", title="Dashboard fix", status="Open", type="Code_Defect", created=datetime(2026, 3, 1, tzinfo=UTC), owner="u" | |
| 519 | + ), | |
| 520 | + ] | |
| 521 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 522 | + reader = MagicMock() | |
| 523 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 524 | + reader.__exit__ = MagicMock(return_value=False) | |
| 525 | + reader.get_tickets.return_value = tickets | |
| 526 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 527 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 528 | + response = admin_client.get(_url(slug, "tickets/?search=login")) | |
| 529 | + assert response.status_code == 200 | |
| 530 | + content = response.content.decode() | |
| 531 | + assert "Login bug" in content | |
| 532 | + # Dashboard should be filtered out | |
| 533 | + assert "Dashboard fix" not in content | |
| 534 | + | |
| 535 | + def test_ticket_list_htmx_partial(self, admin_client, sample_project): | |
| 536 | + slug = sample_project.slug | |
| 537 | + tickets = [ | |
| 538 | + TicketEntry( | |
| 539 | + uuid="t1", title="A ticket", status="Open", type="Code_Defect", created=datetime(2026, 3, 1, tzinfo=UTC), owner="u" | |
| 540 | + ), | |
| 541 | + ] | |
| 542 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 543 | + reader = MagicMock() | |
| 544 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 545 | + reader.__exit__ = MagicMock(return_value=False) | |
| 546 | + reader.get_tickets.return_value = tickets | |
| 547 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 548 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 549 | + response = admin_client.get(_url(slug, "tickets/"), HTTP_HX_REQUEST="true") | |
| 550 | + assert response.status_code == 200 | |
| 551 | + | |
| 552 | + def test_ticket_detail_renders(self, admin_client, sample_project): | |
| 553 | + slug = sample_project.slug | |
| 554 | + ticket = TicketEntry( | |
| 555 | + uuid="tkt-detail-1", | |
| 556 | + title="Detail test", | |
| 557 | + status="Open", | |
| 558 | + type="Code_Defect", | |
| 559 | + created=datetime(2026, 3, 1, tzinfo=UTC), | |
| 560 | + owner="testuser", | |
| 561 | + body="Some description **bold**", | |
| 562 | + ) | |
| 563 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 564 | + reader = MagicMock() | |
| 565 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 566 | + reader.__exit__ = MagicMock(return_value=False) | |
| 567 | + reader.get_ticket_detail.return_value = ticket | |
| 568 | + reader.get_ticket_comments.return_value = [ | |
| 569 | + {"user": "dev", "timestamp": datetime(2026, 3, 2, tzinfo=UTC), "comment": "Working on it"} | |
| 570 | + ] | |
| 571 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 572 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 573 | + response = admin_client.get(_url(slug, "tickets/tkt-detail-1/")) | |
| 574 | + assert response.status_code == 200 | |
| 575 | + content = response.content.decode() | |
| 576 | + assert "Detail test" in content | |
| 577 | + | |
| 578 | + def test_ticket_detail_not_found(self, admin_client, sample_project): | |
| 579 | + slug = sample_project.slug | |
| 580 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 581 | + reader = MagicMock() | |
| 582 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 583 | + reader.__exit__ = MagicMock(return_value=False) | |
| 584 | + reader.get_ticket_detail.return_value = None | |
| 585 | + reader.get_ticket_comments.return_value = [] | |
| 586 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 587 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 588 | + response = admin_client.get(_url(slug, "tickets/nonexistent/")) | |
| 589 | + assert response.status_code == 404 | |
| 590 | + | |
| 591 | + | |
| 592 | +# --------------------------------------------------------------------------- | |
| 593 | +# Wiki list/page (mocked) | |
| 594 | +# --------------------------------------------------------------------------- | |
| 595 | + | |
| 596 | + | |
| 597 | +@pytest.mark.django_db | |
| 598 | +class TestWikiViewsMocked: | |
| 599 | + def test_wiki_list_renders(self, admin_client, sample_project): | |
| 600 | + slug = sample_project.slug | |
| 601 | + pages = [ | |
| 602 | + WikiPage(name="Home", content="# Home", last_modified=datetime(2026, 3, 1, tzinfo=UTC), user="admin"), | |
| 603 | + WikiPage(name="Setup", content="Setup guide", last_modified=datetime(2026, 3, 1, tzinfo=UTC), user="dev"), | |
| 604 | + ] | |
| 605 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 606 | + reader = MagicMock() | |
| 607 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 608 | + reader.__exit__ = MagicMock(return_value=False) | |
| 609 | + reader.get_wiki_pages.return_value = pages | |
| 610 | + reader.get_wiki_page.return_value = pages[0] | |
| 611 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 612 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 613 | + response = admin_client.get(_url(slug, "wiki/")) | |
| 614 | + assert response.status_code == 200 | |
| 615 | + content = response.content.decode() | |
| 616 | + assert "Home" in content | |
| 617 | + assert "Setup" in content | |
| 618 | + | |
| 619 | + def test_wiki_list_search(self, admin_client, sample_project): | |
| 620 | + slug = sample_project.slug | |
| 621 | + pages = [ | |
| 622 | + WikiPage(name="Home", content="# Home", last_modified=datetime(2026, 3, 1, tzinfo=UTC), user="admin"), | |
| 623 | + WikiPage(name="Setup", content="Setup guide", last_modified=datetime(2026, 3, 1, tzinfo=UTC), user="dev"), | |
| 624 | + ] | |
| 625 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 626 | + reader = MagicMock() | |
| 627 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 628 | + reader.__exit__ = MagicMock(return_value=False) | |
| 629 | + reader.get_wiki_pages.return_value = pages | |
| 630 | + reader.get_wiki_page.return_value = None | |
| 631 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 632 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 633 | + response = admin_client.get(_url(slug, "wiki/?search=setup")) | |
| 634 | + assert response.status_code == 200 | |
| 635 | + | |
| 636 | + def test_wiki_page_renders(self, admin_client, sample_project): | |
| 637 | + slug = sample_project.slug | |
| 638 | + page = WikiPage(name="Home", content="# Welcome\nHello world", last_modified=datetime(2026, 3, 1, tzinfo=UTC), user="admin") | |
| 639 | + all_pages = [page] | |
| 640 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 641 | + reader = MagicMock() | |
| 642 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 643 | + reader.__exit__ = MagicMock(return_value=False) | |
| 644 | + reader.get_wiki_page.return_value = page | |
| 645 | + reader.get_wiki_pages.return_value = all_pages | |
| 646 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 647 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 648 | + response = admin_client.get(_url(slug, "wiki/page/Home")) | |
| 649 | + assert response.status_code == 200 | |
| 650 | + assert "Welcome" in response.content.decode() | |
| 651 | + | |
| 652 | + def test_wiki_page_not_found(self, admin_client, sample_project): | |
| 653 | + slug = sample_project.slug | |
| 654 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 655 | + reader = MagicMock() | |
| 656 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 657 | + reader.__exit__ = MagicMock(return_value=False) | |
| 658 | + reader.get_wiki_page.return_value = None | |
| 659 | + reader.get_wiki_pages.return_value = [] | |
| 660 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 661 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 662 | + response = admin_client.get(_url(slug, "wiki/page/NonexistentPage")) | |
| 663 | + assert response.status_code == 404 | |
| 664 | + | |
| 665 | + | |
| 666 | +# --------------------------------------------------------------------------- | |
| 667 | +# Search view (mocked) | |
| 668 | +# --------------------------------------------------------------------------- | |
| 669 | + | |
| 670 | + | |
| 671 | +@pytest.mark.django_db | |
| 672 | +class TestSearchViewMocked: | |
| 673 | + def test_search_with_query(self, admin_client, sample_project): | |
| 674 | + slug = sample_project.slug | |
| 675 | + results = [{"type": "ci", "uuid": "abc", "comment": "found it", "user": "dev"}] | |
| 676 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 677 | + reader = MagicMock() | |
| 678 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 679 | + reader.__exit__ = MagicMock(return_value=False) | |
| 680 | + reader.search.return_value = results | |
| 681 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 682 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 683 | + response = admin_client.get(_url(slug, "search/?q=found")) | |
| 684 | + assert response.status_code == 200 | |
| 685 | + | |
| 686 | + def test_search_empty_query(self, admin_client, sample_project): | |
| 687 | + slug = sample_project.slug | |
| 688 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 689 | + reader = MagicMock() | |
| 690 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 691 | + reader.__exit__ = MagicMock(return_value=False) | |
| 692 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 693 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 694 | + response = admin_client.get(_url(slug, "search/")) | |
| 695 | + assert response.status_code == 200 | |
| 696 | + | |
| 697 | + | |
| 698 | +# --------------------------------------------------------------------------- | |
| 699 | +# Compare checkins view (mocked) | |
| 700 | +# --------------------------------------------------------------------------- | |
| 701 | + | |
| 702 | + | |
| 703 | +@pytest.mark.django_db | |
| 704 | +class TestCompareCheckinsViewMocked: | |
| 705 | + def test_compare_no_params(self, admin_client, sample_project): | |
| 706 | + """Compare page renders without from/to params (shows empty form).""" | |
| 707 | + slug = sample_project.slug | |
| 708 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 709 | + reader = MagicMock() | |
| 710 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 711 | + reader.__exit__ = MagicMock(return_value=False) | |
| 712 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 713 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 714 | + response = admin_client.get(_url(slug, "compare/")) | |
| 715 | + assert response.status_code == 200 | |
| 716 | + | |
| 717 | + def test_compare_with_params(self, admin_client, sample_project): | |
| 718 | + """Compare page with from/to parameters renders diffs.""" | |
| 719 | + slug = sample_project.slug | |
| 720 | + from_detail = CheckinDetail( | |
| 721 | + uuid="aaa111", | |
| 722 | + timestamp=datetime(2026, 3, 1, tzinfo=UTC), | |
| 723 | + user="dev", | |
| 724 | + comment="from commit", | |
| 725 | + files_changed=[{"name": "f.txt", "uuid": "u1", "prev_uuid": "", "change_type": "A"}], | |
| 726 | + ) | |
| 727 | + to_detail = CheckinDetail( | |
| 728 | + uuid="bbb222", | |
| 729 | + timestamp=datetime(2026, 3, 2, tzinfo=UTC), | |
| 730 | + user="dev", | |
| 731 | + comment="to commit", | |
| 732 | + files_changed=[{"name": "f.txt", "uuid": "u2", "prev_uuid": "u1", "change_type": "M"}], | |
| 733 | + ) | |
| 734 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 735 | + reader = MagicMock() | |
| 736 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 737 | + reader.__exit__ = MagicMock(return_value=False) | |
| 738 | + reader.get_checkin_detail.side_effect = lambda uuid: from_detail if "aaa" in uuid else to_detail | |
| 739 | + reader.get_file_content.return_value = b"file content" | |
| 740 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 741 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 742 | + response = admin_client.get(_url(slug, "compare/?from=aaa111&to=bbb222")) | |
| 743 | + assert response.status_code == 200 | |
| 744 | + | |
| 745 | + | |
| 746 | +# --------------------------------------------------------------------------- | |
| 747 | +# Timeline RSS feed (mocked) | |
| 748 | +# --------------------------------------------------------------------------- | |
| 749 | + | |
| 750 | + | |
| 751 | +@pytest.mark.django_db | |
| 752 | +class TestTimelineRssViewMocked: | |
| 753 | + def test_rss_feed(self, admin_client, sample_project): | |
| 754 | + slug = sample_project.slug | |
| 755 | + entries = [_make_timeline_entry(rid=1, comment="rss commit")] | |
| 756 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 757 | + reader = MagicMock() | |
| 758 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 759 | + reader.__exit__ = MagicMock(return_value=False) | |
| 760 | + reader.get_timeline.return_value = entries | |
| 761 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 762 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 763 | + response = admin_client.get(_url(slug, "timeline/rss/")) | |
| 764 | + assert response.status_code == 200 | |
| 765 | + assert response["Content-Type"] == "application/rss+xml" | |
| 766 | + content = response.content.decode() | |
| 767 | + assert "rss commit" in content | |
| 768 | + assert "<rss" in content | |
| 769 | + | |
| 770 | + | |
| 771 | +# --------------------------------------------------------------------------- | |
| 772 | +# Tickets CSV export (mocked) | |
| 773 | +# --------------------------------------------------------------------------- | |
| 774 | + | |
| 775 | + | |
| 776 | +@pytest.mark.django_db | |
| 777 | +class TestTicketsCsvViewMocked: | |
| 778 | + def test_csv_export(self, admin_client, sample_project): | |
| 779 | + slug = sample_project.slug | |
| 780 | + tickets = [ | |
| 781 | + TicketEntry( | |
| 782 | + uuid="csv-uuid", | |
| 783 | + title="Export test", | |
| 784 | + status="Open", | |
| 785 | + type="Code_Defect", | |
| 786 | + created=datetime(2026, 3, 1, tzinfo=UTC), | |
| 787 | + owner="testuser", | |
| 788 | + priority="High", | |
| 789 | + severity="Critical", | |
| 790 | + ) | |
| 791 | + ] | |
| 792 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 793 | + reader = MagicMock() | |
| 794 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 795 | + reader.__exit__ = MagicMock(return_value=False) | |
| 796 | + reader.get_tickets.return_value = tickets | |
| 797 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 798 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 799 | + response = admin_client.get(_url(slug, "tickets/export/")) | |
| 800 | + assert response.status_code == 200 | |
| 801 | + assert response["Content-Type"] == "text/csv" | |
| 802 | + content = response.content.decode() | |
| 803 | + assert "Export test" in content | |
| 804 | + assert "csv-uuid" in content | |
| 805 | + | |
| 806 | + | |
| 807 | +# --------------------------------------------------------------------------- | |
| 808 | +# Branch list view (mocked) | |
| 809 | +# --------------------------------------------------------------------------- | |
| 810 | + | |
| 811 | + | |
| 812 | +@pytest.mark.django_db | |
| 813 | +class TestBranchListViewMocked: | |
| 814 | + def test_branch_list_renders(self, admin_client, sample_project): | |
| 815 | + slug = sample_project.slug | |
| 816 | + branches = [ | |
| 817 | + SimpleNamespace( | |
| 818 | + name="trunk", last_user="dev", last_checkin=datetime(2026, 3, 1, tzinfo=UTC), checkin_count=50, last_uuid="abc123" | |
| 819 | + ), | |
| 820 | + ] | |
| 821 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 822 | + reader = MagicMock() | |
| 823 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 824 | + reader.__exit__ = MagicMock(return_value=False) | |
| 825 | + reader.get_branches.return_value = branches | |
| 826 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 827 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 828 | + response = admin_client.get(_url(slug, "branches/")) | |
| 829 | + assert response.status_code == 200 | |
| 830 | + assert "trunk" in response.content.decode() | |
| 831 | + | |
| 832 | + def test_branch_list_search(self, admin_client, sample_project): | |
| 833 | + slug = sample_project.slug | |
| 834 | + branches = [ | |
| 835 | + SimpleNamespace( | |
| 836 | + name="trunk", last_user="dev", last_checkin=datetime(2026, 3, 1, tzinfo=UTC), checkin_count=50, last_uuid="abc123" | |
| 837 | + ), | |
| 838 | + SimpleNamespace( | |
| 839 | + name="feature-x", last_user="dev", last_checkin=datetime(2026, 3, 1, tzinfo=UTC), checkin_count=5, last_uuid="def456" | |
| 840 | + ), | |
| 841 | + ] | |
| 842 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 843 | + reader = MagicMock() | |
| 844 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 845 | + reader.__exit__ = MagicMock(return_value=False) | |
| 846 | + reader.get_branches.return_value = branches | |
| 847 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 848 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 849 | + response = admin_client.get(_url(slug, "branches/?search=feature")) | |
| 850 | + assert response.status_code == 200 | |
| 851 | + content = response.content.decode() | |
| 852 | + assert "feature-x" in content | |
| 853 | + | |
| 854 | + | |
| 855 | +# --------------------------------------------------------------------------- | |
| 856 | +# Tag list view (mocked) | |
| 857 | +# --------------------------------------------------------------------------- | |
| 858 | + | |
| 859 | + | |
| 860 | +@pytest.mark.django_db | |
| 861 | +class TestTagListViewMocked: | |
| 862 | + def test_tag_list_renders(self, admin_client, sample_project): | |
| 863 | + slug = sample_project.slug | |
| 864 | + tags = [ | |
| 865 | + SimpleNamespace(name="v1.0", uuid="abc123", user="dev", timestamp=datetime(2026, 3, 1, tzinfo=UTC)), | |
| 866 | + ] | |
| 867 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 868 | + reader = MagicMock() | |
| 869 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 870 | + reader.__exit__ = MagicMock(return_value=False) | |
| 871 | + reader.get_tags.return_value = tags | |
| 872 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 873 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 874 | + response = admin_client.get(_url(slug, "tags/")) | |
| 875 | + assert response.status_code == 200 | |
| 876 | + assert "v1.0" in response.content.decode() | |
| 877 | + | |
| 878 | + def test_tag_list_search(self, admin_client, sample_project): | |
| 879 | + slug = sample_project.slug | |
| 880 | + tags = [ | |
| 881 | + SimpleNamespace(name="v1.0", uuid="abc123", user="dev", timestamp=datetime(2026, 3, 1, tzinfo=UTC)), | |
| 882 | + SimpleNamespace(name="v2.0-beta", uuid="def456", user="dev", timestamp=datetime(2026, 3, 1, tzinfo=UTC)), | |
| 883 | + ] | |
| 884 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 885 | + reader = MagicMock() | |
| 886 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 887 | + reader.__exit__ = MagicMock(return_value=False) | |
| 888 | + reader.get_tags.return_value = tags | |
| 889 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 890 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 891 | + response = admin_client.get(_url(slug, "tags/?search=beta")) | |
| 892 | + assert response.status_code == 200 | |
| 893 | + content = response.content.decode() | |
| 894 | + assert "v2.0-beta" in content | |
| 895 | + | |
| 896 | + | |
| 897 | +# --------------------------------------------------------------------------- | |
| 898 | +# Stats view (mocked) | |
| 899 | +# --------------------------------------------------------------------------- | |
| 900 | + | |
| 901 | + | |
| 902 | +@pytest.mark.django_db | |
| 903 | +class TestRepoStatsViewMocked: | |
| 904 | + def test_stats_renders(self, admin_client, sample_project): | |
| 905 | + slug = sample_project.slug | |
| 906 | + stats = {"total_artifacts": 100, "checkin_count": 50, "wiki_events": 5, "ticket_events": 10, "forum_events": 2, "total_events": 67} | |
| 907 | + contributors = [{"user": "dev", "count": 50}] | |
| 908 | + activity = [{"count": c} for c in range(52)] | |
| 909 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 910 | + reader = MagicMock() | |
| 911 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 912 | + reader.__exit__ = MagicMock(return_value=False) | |
| 913 | + reader.get_repo_statistics.return_value = stats | |
| 914 | + reader.get_top_contributors.return_value = contributors | |
| 915 | + reader.get_commit_activity.return_value = activity | |
| 916 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 917 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 918 | + response = admin_client.get(_url(slug, "stats/")) | |
| 919 | + assert response.status_code == 200 | |
| 920 | + content = response.content.decode() | |
| 921 | + assert "Checkins" in content or "50" in content | |
| 922 | + | |
| 923 | + | |
| 924 | +# --------------------------------------------------------------------------- | |
| 925 | +# File history view (mocked) | |
| 926 | +# --------------------------------------------------------------------------- | |
| 927 | + | |
| 928 | + | |
| 929 | +@pytest.mark.django_db | |
| 930 | +class TestFileHistoryViewMocked: | |
| 931 | + def test_file_history_renders(self, admin_client, sample_project): | |
| 932 | + slug = sample_project.slug | |
| 933 | + history = [ | |
| 934 | + {"uuid": "abc", "timestamp": datetime(2026, 3, 1, tzinfo=UTC), "user": "dev", "comment": "edit file"}, | |
| 935 | + ] | |
| 936 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 937 | + reader = MagicMock() | |
| 938 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 939 | + reader.__exit__ = MagicMock(return_value=False) | |
| 940 | + reader.get_file_history.return_value = history | |
| 941 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 942 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 943 | + response = admin_client.get(_url(slug, "code/history/README.md")) | |
| 944 | + assert response.status_code == 200 | |
| 945 | + | |
| 946 | + | |
| 947 | +# --------------------------------------------------------------------------- | |
| 948 | +# Code browser (mocked) -- tests the _build_file_tree helper indirectly | |
| 949 | +# --------------------------------------------------------------------------- | |
| 950 | + | |
| 951 | + | |
| 952 | +@pytest.mark.django_db | |
| 953 | +class TestCodeBrowserViewMocked: | |
| 954 | + def test_code_browser_renders(self, admin_client, sample_project): | |
| 955 | + slug = sample_project.slug | |
| 956 | + files = [ | |
| 957 | + _make_file_entry(name="README.md", uuid="f1"), | |
| 958 | + _make_file_entry(name="src/main.py", uuid="f2"), | |
| 959 | + ] | |
| 960 | + metadata = RepoMetadata(project_name="Test", checkin_count=10) | |
| 961 | + latest = [_make_timeline_entry(rid=1)] | |
| 962 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 963 | + reader = MagicMock() | |
| 964 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 965 | + reader.__exit__ = MagicMock(return_value=False) | |
| 966 | + reader.get_latest_checkin_uuid.return_value = "abc123" | |
| 967 | + reader.get_files_at_checkin.return_value = files | |
| 968 | + reader.get_metadata.return_value = metadata | |
| 969 | + reader.get_timeline.return_value = latest | |
| 970 | + reader.get_file_content.return_value = b"# README\nHello" | |
| 971 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 972 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 973 | + response = admin_client.get(_url(slug, "code/")) | |
| 974 | + assert response.status_code == 200 | |
| 975 | + content = response.content.decode() | |
| 976 | + assert "README" in content | |
| 977 | + | |
| 978 | + def test_code_browser_htmx_partial(self, admin_client, sample_project): | |
| 979 | + slug = sample_project.slug | |
| 980 | + files = [_make_file_entry(name="README.md", uuid="f1")] | |
| 981 | + metadata = RepoMetadata() | |
| 982 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 983 | + reader = MagicMock() | |
| 984 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 985 | + reader.__exit__ = MagicMock(return_value=False) | |
| 986 | + reader.get_latest_checkin_uuid.return_value = "abc" | |
| 987 | + reader.get_files_at_checkin.return_value = files | |
| 988 | + reader.get_metadata.return_value = metadata | |
| 989 | + reader.get_timeline.return_value = [] | |
| 990 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 991 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 992 | + response = admin_client.get(_url(slug, "code/"), HTTP_HX_REQUEST="true") | |
| 993 | + assert response.status_code == 200 | |
| 994 | + | |
| 995 | + | |
| 996 | +# --------------------------------------------------------------------------- | |
| 997 | +# Code file view (mocked) | |
| 998 | +# --------------------------------------------------------------------------- | |
| 999 | + | |
| 1000 | + | |
| 1001 | +@pytest.mark.django_db | |
| 1002 | +class TestCodeFileViewMocked: | |
| 1003 | + def test_code_file_renders(self, admin_client, sample_project): | |
| 1004 | + slug = sample_project.slug | |
| 1005 | + files = [_make_file_entry(name="main.py", uuid="f1")] | |
| 1006 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1007 | + reader = MagicMock() | |
| 1008 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1009 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1010 | + reader.get_latest_checkin_uuid.return_value = "abc" | |
| 1011 | + reader.get_files_at_checkin.return_value = files | |
| 1012 | + reader.get_file_content.return_value = b"print('hello')" | |
| 1013 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1014 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1015 | + response = admin_client.get(_url(slug, "code/file/main.py")) | |
| 1016 | + assert response.status_code == 200 | |
| 1017 | + content = response.content.decode() | |
| 1018 | + assert "print" in content | |
| 1019 | + | |
| 1020 | + def test_code_file_not_found(self, admin_client, sample_project): | |
| 1021 | + slug = sample_project.slug | |
| 1022 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1023 | + reader = MagicMock() | |
| 1024 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1025 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1026 | + reader.get_latest_checkin_uuid.return_value = "abc" | |
| 1027 | + reader.get_files_at_checkin.return_value = [] | |
| 1028 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1029 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1030 | + response = admin_client.get(_url(slug, "code/file/nonexistent.txt")) | |
| 1031 | + assert response.status_code == 404 | |
| 1032 | + | |
| 1033 | + def test_code_file_binary(self, admin_client, sample_project): | |
| 1034 | + slug = sample_project.slug | |
| 1035 | + files = [_make_file_entry(name="image.png", uuid="f1")] | |
| 1036 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1037 | + reader = MagicMock() | |
| 1038 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1039 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1040 | + reader.get_latest_checkin_uuid.return_value = "abc" | |
| 1041 | + reader.get_files_at_checkin.return_value = files | |
| 1042 | + # Deliberately invalid UTF-8 to trigger binary detection | |
| 1043 | + reader.get_file_content.return_value = b"\x89PNG\r\n\x1a\n\x00\x00" | |
| 1044 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1045 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1046 | + response = admin_client.get(_url(slug, "code/file/image.png")) | |
| 1047 | + assert response.status_code == 200 | |
| 1048 | + assert "Binary file" in response.content.decode() | |
| 1049 | + | |
| 1050 | + def test_code_file_rendered_mode(self, admin_client, sample_project): | |
| 1051 | + """Wiki files can be rendered instead of showing source.""" | |
| 1052 | + slug = sample_project.slug | |
| 1053 | + files = [_make_file_entry(name="page.md", uuid="f1")] | |
| 1054 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1055 | + reader = MagicMock() | |
| 1056 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1057 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1058 | + reader.get_latest_checkin_uuid.return_value = "abc" | |
| 1059 | + reader.get_files_at_checkin.return_value = files | |
| 1060 | + reader.get_file_content.return_value = b"# Hello\nWorld" | |
| 1061 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1062 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1063 | + response = admin_client.get(_url(slug, "code/file/page.md?mode=rendered")) | |
| 1064 | + assert response.status_code == 200 | |
| 1065 | + | |
| 1066 | + | |
| 1067 | +# --------------------------------------------------------------------------- | |
| 1068 | +# Code raw download (mocked) | |
| 1069 | +# --------------------------------------------------------------------------- | |
| 1070 | + | |
| 1071 | + | |
| 1072 | +@pytest.mark.django_db | |
| 1073 | +class TestCodeRawViewMocked: | |
| 1074 | + def test_raw_download(self, admin_client, sample_project): | |
| 1075 | + slug = sample_project.slug | |
| 1076 | + files = [_make_file_entry(name="data.csv", uuid="f1")] | |
| 1077 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1078 | + reader = MagicMock() | |
| 1079 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1080 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1081 | + reader.get_latest_checkin_uuid.return_value = "abc" | |
| 1082 | + reader.get_files_at_checkin.return_value = files | |
| 1083 | + reader.get_file_content.return_value = b"col1,col2\na,b" | |
| 1084 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1085 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1086 | + response = admin_client.get(_url(slug, "code/raw/data.csv")) | |
| 1087 | + assert response.status_code == 200 | |
| 1088 | + assert response["Content-Disposition"] == 'attachment; filename="data.csv"' | |
| 1089 | + | |
| 1090 | + def test_raw_file_not_found(self, admin_client, sample_project): | |
| 1091 | + slug = sample_project.slug | |
| 1092 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1093 | + reader = MagicMock() | |
| 1094 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1095 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1096 | + reader.get_latest_checkin_uuid.return_value = "abc" | |
| 1097 | + reader.get_files_at_checkin.return_value = [] | |
| 1098 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1099 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1100 | + response = admin_client.get(_url(slug, "code/raw/missing.txt")) | |
| 1101 | + assert response.status_code == 404 | |
| 1102 | + | |
| 1103 | + | |
| 1104 | +# --------------------------------------------------------------------------- | |
| 1105 | +# Code blame (mocked) | |
| 1106 | +# --------------------------------------------------------------------------- | |
| 1107 | + | |
| 1108 | + | |
| 1109 | +@pytest.mark.django_db | |
| 1110 | +class TestCodeBlameViewMocked: | |
| 1111 | + def test_blame_renders_with_dates(self, admin_client, sample_project): | |
| 1112 | + slug = sample_project.slug | |
| 1113 | + blame_lines = [ | |
| 1114 | + {"user": "dev", "date": "2026-01-01", "uuid": "abc", "line_num": 1, "text": "line one"}, | |
| 1115 | + {"user": "dev", "date": "2026-03-01", "uuid": "def", "line_num": 2, "text": "line two"}, | |
| 1116 | + ] | |
| 1117 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1118 | + reader = MagicMock() | |
| 1119 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1120 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1121 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1122 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1123 | + with patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 1124 | + cli = mock_cli_cls.return_value | |
| 1125 | + cli.is_available.return_value = True | |
| 1126 | + cli.blame.return_value = blame_lines | |
| 1127 | + response = admin_client.get(_url(slug, "code/blame/main.py")) | |
| 1128 | + assert response.status_code == 200 | |
| 1129 | + | |
| 1130 | + def test_blame_no_fossil_binary(self, admin_client, sample_project): | |
| 1131 | + slug = sample_project.slug | |
| 1132 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1133 | + reader = MagicMock() | |
| 1134 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1135 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1136 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1137 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1138 | + with patch("fossil.cli.FossilCLI") as mock_cli_cls: | |
| 1139 | + cli = mock_cli_cls.return_value | |
| 1140 | + cli.is_available.return_value = False | |
| 1141 | + response = admin_client.get(_url(slug, "code/blame/main.py")) | |
| 1142 | + assert response.status_code == 200 | |
| 1143 | + | |
| 1144 | + | |
| 1145 | +# --------------------------------------------------------------------------- | |
| 1146 | +# Toggle watch / notifications | |
| 1147 | +# --------------------------------------------------------------------------- | |
| 1148 | + | |
| 1149 | + | |
| 1150 | +@pytest.mark.django_db | |
| 1151 | +class TestToggleWatch: | |
| 1152 | + def test_watch_project(self, admin_client, sample_project, admin_user): | |
| 1153 | + from fossil.notifications import ProjectWatch | |
| 1154 | + | |
| 1155 | + response = admin_client.post(_url(sample_project.slug, "watch/")) | |
| 1156 | + assert response.status_code == 302 | |
| 1157 | + assert ProjectWatch.objects.filter(user=admin_user, project=sample_project).exists() | |
| 1158 | + | |
| 1159 | + def test_unwatch_project(self, admin_client, sample_project, admin_user): | |
| 1160 | + from fossil.notifications import ProjectWatch | |
| 1161 | + | |
| 1162 | + ProjectWatch.objects.create(user=admin_user, project=sample_project, event_filter="all", created_by=admin_user) | |
| 1163 | + response = admin_client.post(_url(sample_project.slug, "watch/")) | |
| 1164 | + assert response.status_code == 302 | |
| 1165 | + # Should be soft-deleted | |
| 1166 | + assert not ProjectWatch.objects.filter(user=admin_user, project=sample_project, deleted_at__isnull=True).exists() | |
| 1167 | + | |
| 1168 | + def test_watch_with_event_filter(self, admin_client, sample_project, admin_user): | |
| 1169 | + from fossil.notifications import ProjectWatch | |
| 1170 | + | |
| 1171 | + response = admin_client.post(_url(sample_project.slug, "watch/"), {"event_filter": "checkins"}) | |
| 1172 | + assert response.status_code == 302 | |
| 1173 | + watch = ProjectWatch.objects.get(user=admin_user, project=sample_project) | |
| 1174 | + assert watch.event_filter == "checkins" | |
| 1175 | + | |
| 1176 | + def test_watch_denied_anon(self, client, sample_project): | |
| 1177 | + response = client.post(_url(sample_project.slug, "watch/")) | |
| 1178 | + assert response.status_code == 302 # redirect to login | |
| 1179 | + | |
| 1180 | + | |
| 1181 | +# --------------------------------------------------------------------------- | |
| 1182 | +# Checkin detail (mocked) -- the diff computation path | |
| 1183 | +# --------------------------------------------------------------------------- | |
| 1184 | + | |
| 1185 | + | |
| 1186 | +@pytest.mark.django_db | |
| 1187 | +class TestCheckinDetailViewMocked: | |
| 1188 | + def test_checkin_detail_with_diffs(self, admin_client, sample_project): | |
| 1189 | + slug = sample_project.slug | |
| 1190 | + checkin = CheckinDetail( | |
| 1191 | + uuid="abc123full", | |
| 1192 | + timestamp=datetime(2026, 3, 1, tzinfo=UTC), | |
| 1193 | + user="dev", | |
| 1194 | + comment="fix bug", | |
| 1195 | + branch="trunk", | |
| 1196 | + files_changed=[ | |
| 1197 | + {"name": "fix.py", "uuid": "new-uuid", "prev_uuid": "old-uuid", "change_type": "M"}, | |
| 1198 | + ], | |
| 1199 | + ) | |
| 1200 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1201 | + reader = MagicMock() | |
| 1202 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1203 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1204 | + reader.get_checkin_detail.return_value = checkin | |
| 1205 | + | |
| 1206 | + def fake_content(uuid): | |
| 1207 | + if uuid == "old-uuid": | |
| 1208 | + return b"old line\n" | |
| 1209 | + return b"new line\n" | |
| 1210 | + | |
| 1211 | + reader.get_file_content.side_effect = fake_content | |
| 1212 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1213 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1214 | + | |
| 1215 | + with patch("fossil.ci.StatusCheck") as mock_sc: | |
| 1216 | + mock_sc.objects.filter.return_value = [] | |
| 1217 | + response = admin_client.get(_url(slug, "checkin/abc123full/")) | |
| 1218 | + assert response.status_code == 200 | |
| 1219 | + content = response.content.decode() | |
| 1220 | + assert "fix bug" in content | |
| 1221 | + | |
| 1222 | + def test_checkin_not_found(self, admin_client, sample_project): | |
| 1223 | + slug = sample_project.slug | |
| 1224 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1225 | + reader = MagicMock() | |
| 1226 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1227 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1228 | + reader.get_checkin_detail.return_value = None | |
| 1229 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1230 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1231 | + response = admin_client.get(_url(slug, "checkin/nonexistent/")) | |
| 1232 | + assert response.status_code == 404 | |
| 1233 | + | |
| 1234 | + | |
| 1235 | +# --------------------------------------------------------------------------- | |
| 1236 | +# Technote views (mocked) | |
| 1237 | +# --------------------------------------------------------------------------- | |
| 1238 | + | |
| 1239 | + | |
| 1240 | +@pytest.mark.django_db | |
| 1241 | +class TestTechnoteViewsMocked: | |
| 1242 | + def test_technote_list(self, admin_client, sample_project): | |
| 1243 | + slug = sample_project.slug | |
| 1244 | + notes = [SimpleNamespace(uuid="n1", comment="Release notes", user="dev", timestamp=datetime(2026, 3, 1, tzinfo=UTC))] | |
| 1245 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1246 | + reader = MagicMock() | |
| 1247 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1248 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1249 | + reader.get_technotes.return_value = notes | |
| 1250 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1251 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1252 | + response = admin_client.get(_url(slug, "technotes/")) | |
| 1253 | + assert response.status_code == 200 | |
| 1254 | + | |
| 1255 | + def test_technote_detail(self, admin_client, sample_project): | |
| 1256 | + slug = sample_project.slug | |
| 1257 | + note = { | |
| 1258 | + "uuid": "n1", | |
| 1259 | + "comment": "Release v1", | |
| 1260 | + "body": "## Changes\n- Fix", | |
| 1261 | + "user": "dev", | |
| 1262 | + "timestamp": datetime(2026, 3, 1, tzinfo=UTC), | |
| 1263 | + } | |
| 1264 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1265 | + reader = MagicMock() | |
| 1266 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1267 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1268 | + reader.get_technote_detail.return_value = note | |
| 1269 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1270 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1271 | + response = admin_client.get(_url(slug, "technotes/n1/")) | |
| 1272 | + assert response.status_code == 200 | |
| 1273 | + | |
| 1274 | + def test_technote_detail_not_found(self, admin_client, sample_project): | |
| 1275 | + slug = sample_project.slug | |
| 1276 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1277 | + reader = MagicMock() | |
| 1278 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1279 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1280 | + reader.get_technote_detail.return_value = None | |
| 1281 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1282 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1283 | + response = admin_client.get(_url(slug, "technotes/nonexistent/")) | |
| 1284 | + assert response.status_code == 404 | |
| 1285 | + | |
| 1286 | + | |
| 1287 | +# --------------------------------------------------------------------------- | |
| 1288 | +# Unversioned files list (mocked) | |
| 1289 | +# --------------------------------------------------------------------------- | |
| 1290 | + | |
| 1291 | + | |
| 1292 | +@pytest.mark.django_db | |
| 1293 | +class TestUnversionedListViewMocked: | |
| 1294 | + def test_unversioned_list(self, admin_client, sample_project): | |
| 1295 | + slug = sample_project.slug | |
| 1296 | + files = [SimpleNamespace(name="logo.png", size=1024, mtime=datetime(2026, 3, 1, tzinfo=UTC), hash="abc")] | |
| 1297 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1298 | + reader = MagicMock() | |
| 1299 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1300 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1301 | + reader.get_unversioned_files.return_value = files | |
| 1302 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1303 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1304 | + response = admin_client.get(_url(slug, "files/")) | |
| 1305 | + assert response.status_code == 200 | |
| 1306 | + | |
| 1307 | + def test_unversioned_search(self, admin_client, sample_project): | |
| 1308 | + slug = sample_project.slug | |
| 1309 | + files = [ | |
| 1310 | + SimpleNamespace(name="logo.png", size=1024, mtime=datetime(2026, 3, 1, tzinfo=UTC), hash="abc"), | |
| 1311 | + SimpleNamespace(name="data.csv", size=512, mtime=datetime(2026, 3, 1, tzinfo=UTC), hash="def"), | |
| 1312 | + ] | |
| 1313 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1314 | + reader = MagicMock() | |
| 1315 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1316 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1317 | + reader.get_unversioned_files.return_value = files | |
| 1318 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1319 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1320 | + response = admin_client.get(_url(slug, "files/?search=logo")) | |
| 1321 | + assert response.status_code == 200 | |
| 1322 | + | |
| 1323 | + | |
| 1324 | +# --------------------------------------------------------------------------- | |
| 1325 | +# Fossil docs views (mocked) | |
| 1326 | +# --------------------------------------------------------------------------- | |
| 1327 | + | |
| 1328 | + | |
| 1329 | +@pytest.mark.django_db | |
| 1330 | +class TestFossilDocsViewsMocked: | |
| 1331 | + def test_docs_index(self, admin_client, sample_project): | |
| 1332 | + slug = sample_project.slug | |
| 1333 | + response = admin_client.get(_url(slug, "docs/")) | |
| 1334 | + assert response.status_code == 200 | |
| 1335 | + | |
| 1336 | + def test_doc_page_renders(self, admin_client, sample_project): | |
| 1337 | + slug = sample_project.slug | |
| 1338 | + files = [_make_file_entry(name="www/concepts.wiki", uuid="f1")] | |
| 1339 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1340 | + reader = MagicMock() | |
| 1341 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1342 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1343 | + reader.get_latest_checkin_uuid.return_value = "abc" | |
| 1344 | + reader.get_files_at_checkin.return_value = files | |
| 1345 | + reader.get_file_content.return_value = b"<h1>Concepts</h1>\n<p>Text here</p>" | |
| 1346 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1347 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1348 | + response = admin_client.get(_url(slug, "docs/www/concepts.wiki")) | |
| 1349 | + assert response.status_code == 200 | |
| 1350 | + assert "Concepts" in response.content.decode() | |
| 1351 | + | |
| 1352 | + def test_doc_page_not_found(self, admin_client, sample_project): | |
| 1353 | + slug = sample_project.slug | |
| 1354 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1355 | + reader = MagicMock() | |
| 1356 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1357 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1358 | + reader.get_latest_checkin_uuid.return_value = "abc" | |
| 1359 | + reader.get_files_at_checkin.return_value = [] | |
| 1360 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1361 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1362 | + response = admin_client.get(_url(slug, "docs/www/missing.wiki")) | |
| 1363 | + assert response.status_code == 404 | |
| 1364 | + | |
| 1365 | + | |
| 1366 | +# --------------------------------------------------------------------------- | |
| 1367 | +# User activity view (mocked) | |
| 1368 | +# --------------------------------------------------------------------------- | |
| 1369 | + | |
| 1370 | + | |
| 1371 | +@pytest.mark.django_db | |
| 1372 | +class TestUserActivityViewMocked: | |
| 1373 | + def test_user_activity_renders(self, admin_client, sample_project): | |
| 1374 | + slug = sample_project.slug | |
| 1375 | + activity = { | |
| 1376 | + "checkin_count": 25, | |
| 1377 | + "checkins": [{"uuid": "a", "comment": "fix", "timestamp": datetime(2026, 3, 1, tzinfo=UTC)}], | |
| 1378 | + "daily_activity": {"2026-03-01": 5}, | |
| 1379 | + } | |
| 1380 | + with patch("fossil.views._get_repo_and_reader") as mock_grr: | |
| 1381 | + reader = MagicMock() | |
| 1382 | + reader.__enter__ = MagicMock(return_value=reader) | |
| 1383 | + reader.__exit__ = MagicMock(return_value=False) | |
| 1384 | + reader.get_user_activity.return_value = activity | |
| 1385 | + repo = FossilRepository.objects.get(project=sample_project) | |
| 1386 | + mock_grr.return_value = (sample_project, repo, reader) | |
| 1387 | + response = admin_client.get(_url(slug, "user/dev/")) | |
| 1388 | + assert response.status_code == 200 | |
| 1389 | + | |
| 1390 | + | |
| 1391 | +# --------------------------------------------------------------------------- | |
| 1392 | +# Status badge view | |
| 1393 | +# --------------------------------------------------------------------------- | |
| 1394 | + | |
| 1395 | + | |
| 1396 | +@pytest.mark.django_db | |
| 1397 | +class TestStatusBadgeView: | |
| 1398 | + def test_badge_unknown(self, admin_client, sample_project): | |
| 1399 | + slug = sample_project.slug | |
| 1400 | + response = admin_client.get(_url(slug, "api/status/abc123/badge.svg")) | |
| 1401 | + assert response.status_code == 200 | |
| 1402 | + assert response["Content-Type"] == "image/svg+xml" | |
| 1403 | + content = response.content.decode() | |
| 1404 | + assert "unknown" in content | |
| 1405 | + | |
| 1406 | + def test_badge_passing(self, admin_client, sample_project, fossil_repo_obj): | |
| 1407 | + from fossil.ci import StatusCheck | |
| 1408 | + | |
| 1409 | + StatusCheck.objects.create(repository=fossil_repo_obj, checkin_uuid="pass123", context="ci/test", state="success") | |
| 1410 | + response = admin_client.get(_url(sample_project.slug, "api/status/pass123/badge.svg")) | |
| 1411 | + assert response.status_code == 200 | |
| 1412 | + assert "passing" in response.content.decode() | |
| 1413 | + | |
| 1414 | + def test_badge_failing(self, admin_client, sample_project, fossil_repo_obj): | |
| 1415 | + from fossil.ci import StatusCheck | |
| 1416 | + | |
| 1417 | + StatusCheck.objects.create(repository=fossil_repo_obj, checkin_uuid="fail123", context="ci/test", state="failure") | |
| 1418 | + response = admin_client.get(_url(sample_project.slug, "api/status/fail123/badge.svg")) | |
| 1419 | + assert response.status_code == 200 | |
| 1420 | + assert "failing" in response.content.decode() | |
| 1421 | + | |
| 1422 | + def test_badge_pending(self, admin_client, sample_project, fossil_repo_obj): | |
| 1423 | + from fossil.ci import StatusCheck | |
| 1424 | + | |
| 1425 | + StatusCheck.objects.create(repository=fossil_repo_obj, checkin_uuid="pend123", context="ci/test", state="pending") | |
| 1426 | + response = admin_client.get(_url(sample_project.slug, "api/status/pend123/badge.svg")) | |
| 1427 | + assert response.status_code == 200 | |
| 1428 | + assert "pending" in response.content.decode() | |
| 1429 | + | |
| 1430 | + | |
| 1431 | +# --------------------------------------------------------------------------- | |
| 1432 | +# Status check API (GET path) | |
| 1433 | +# --------------------------------------------------------------------------- | |
| 1434 | + | |
| 1435 | + | |
| 1436 | +@pytest.mark.django_db | |
| 1437 | +class TestStatusCheckApiGet: | |
| 1438 | + def test_get_status_checks(self, admin_client, sample_project, fossil_repo_obj): | |
| 1439 | + from fossil.ci import StatusCheck | |
| 1440 | + | |
| 1441 | + StatusCheck.objects.create( | |
| 1442 | + repository=fossil_repo_obj, checkin_uuid="apicheck", context="ci/lint", state="success", description="OK" | |
| 1443 | + ) | |
| 1444 | + response = admin_client.get(_url(sample_project.slug, "api/status?checkin=apicheck")) | |
| 1445 | + assert response.status_code == 200 | |
| 1446 | + data = response.json() | |
| 1447 | + assert data["checkin"] == "apicheck" | |
| 1448 | + assert len(data["checks"]) == 1 | |
| 1449 | + assert data["checks"][0]["context"] == "ci/lint" | |
| 1450 | + | |
| 1451 | + def test_get_status_no_checkin_param(self, admin_client, sample_project, fossil_repo_obj): | |
| 1452 | + response = admin_client.get(_url(sample_project.slug, "api/status")) | |
| 1453 | + assert response.status_code == 400 | |
| 1454 | + | |
| 1455 | + def test_get_status_denied_private(self, client, sample_project, fossil_repo_obj): | |
| 1456 | + """Anonymous user denied on private project.""" | |
| 1457 | + response = client.get(_url(sample_project.slug, "api/status?checkin=abc")) | |
| 1458 | + assert response.status_code == 403 | |
| 1459 | + | |
| 1460 | + | |
| 1461 | +# --------------------------------------------------------------------------- | |
| 1462 | +# Fossil xfer endpoint | |
| 1463 | +# --------------------------------------------------------------------------- | |
| 1464 | + | |
| 1465 | + | |
| 1466 | +@pytest.mark.django_db | |
| 1467 | +class TestFossilXferView: | |
| 1468 | + def test_xfer_get_public_project(self, client, sample_project, fossil_repo_obj): | |
| 1469 | + """GET on xfer endpoint shows clone info for public projects.""" | |
| 1470 | + sample_project.visibility = "public" | |
| 1471 | + sample_project.save() | |
| 1472 | + response = client.get(_url(sample_project.slug, "xfer")) | |
| 1473 | + assert response.status_code == 200 | |
| 1474 | + assert "clone" in response.content.decode().lower() | |
| 1475 | + | |
| 1476 | + def test_xfer_get_private_denied(self, client, sample_project, fossil_repo_obj): | |
| 1477 | + """GET on xfer endpoint denied for private projects without auth.""" | |
| 1478 | + response = client.get(_url(sample_project.slug, "xfer")) | |
| 1479 | + assert response.status_code == 403 | |
| 1480 | + | |
| 1481 | + def test_xfer_method_not_allowed(self, admin_client, sample_project, fossil_repo_obj): | |
| 1482 | + """PUT/PATCH not supported.""" | |
| 1483 | + response = admin_client.put(_url(sample_project.slug, "xfer")) | |
| 1484 | + assert response.status_code == 405 | |
| 1485 | + | |
| 1486 | + | |
| 1487 | +# --------------------------------------------------------------------------- | |
| 1488 | +# Build file tree helper | |
| 1489 | +# --------------------------------------------------------------------------- | |
| 1490 | + | |
| 1491 | + | |
| 1492 | +class TestBuildFileTree: | |
| 1493 | + def test_root_listing(self): | |
| 1494 | + from fossil.views import _build_file_tree | |
| 1495 | + | |
| 1496 | + files = [ | |
| 1497 | + _make_file_entry(name="README.md", uuid="f1"), | |
| 1498 | + _make_file_entry(name="src/main.py", uuid="f2"), | |
| 1499 | + _make_file_ent |
| --- a/tests/test_views_coverage.py | |
| +++ b/tests/test_views_coverage.py | |
| @@ -0,0 +1,1499 @@ | |
| --- a/tests/test_views_coverage.py | |
| +++ b/tests/test_views_coverage.py | |
| @@ -0,0 +1,1499 @@ | |
| 1 | """Tests for fossil/views.py -- covering uncovered view functions and helpers. |
| 2 | |
| 3 | Focuses on views that can be tested by mocking FossilReader (so no real |
| 4 | .fossil file is needed) and pure Django CRUD views that don't touch Fossil. |
| 5 | """ |
| 6 | |
| 7 | from datetime import UTC, datetime |
| 8 | from types import SimpleNamespace |
| 9 | from unittest.mock import MagicMock, patch |
| 10 | |
| 11 | import pytest |
| 12 | from django.contrib.auth.models import User |
| 13 | from django.test import Client |
| 14 | |
| 15 | from fossil.models import FossilRepository |
| 16 | from fossil.reader import ( |
| 17 | CheckinDetail, |
| 18 | FileEntry, |
| 19 | RepoMetadata, |
| 20 | TicketEntry, |
| 21 | TimelineEntry, |
| 22 | WikiPage, |
| 23 | ) |
| 24 | from organization.models import Team |
| 25 | from projects.models import ProjectTeam |
| 26 | |
| 27 | # --------------------------------------------------------------------------- |
| 28 | # Shared fixtures |
| 29 | # --------------------------------------------------------------------------- |
| 30 | |
| 31 | |
| 32 | @pytest.fixture |
| 33 | def fossil_repo_obj(sample_project): |
| 34 | return FossilRepository.objects.get(project=sample_project, deleted_at__isnull=True) |
| 35 | |
| 36 | |
| 37 | @pytest.fixture |
| 38 | def writer_user(db, admin_user, sample_project): |
| 39 | writer = User.objects.create_user(username="writer_vc", password="testpass123") |
| 40 | team = Team.objects.create(name="VC Writers", organization=sample_project.organization, created_by=admin_user) |
| 41 | team.members.add(writer) |
| 42 | ProjectTeam.objects.create(project=sample_project, team=team, role="write", created_by=admin_user) |
| 43 | return writer |
| 44 | |
| 45 | |
| 46 | @pytest.fixture |
| 47 | def writer_client(writer_user): |
| 48 | c = Client() |
| 49 | c.login(username="writer_vc", password="testpass123") |
| 50 | return c |
| 51 | |
| 52 | |
| 53 | def _url(slug, path): |
| 54 | return f"/projects/{slug}/fossil/{path}" |
| 55 | |
| 56 | |
| 57 | def _mock_reader_ctx(mock_cls, **attrs): |
| 58 | """Configure a patched FossilReader class to work as a context manager |
| 59 | and attach return values from **attrs to the instance.""" |
| 60 | instance = mock_cls.return_value |
| 61 | instance.__enter__ = MagicMock(return_value=instance) |
| 62 | instance.__exit__ = MagicMock(return_value=False) |
| 63 | for key, val in attrs.items(): |
| 64 | setattr(instance, key, MagicMock(return_value=val)) |
| 65 | return instance |
| 66 | |
| 67 | |
| 68 | def _make_timeline_entry(**overrides): |
| 69 | defaults = { |
| 70 | "rid": 1, |
| 71 | "uuid": "abc123def456", |
| 72 | "event_type": "ci", |
| 73 | "timestamp": datetime(2026, 3, 1, 12, 0, 0, tzinfo=UTC), |
| 74 | "user": "testuser", |
| 75 | "comment": "initial commit", |
| 76 | "branch": "trunk", |
| 77 | "parent_rid": 0, |
| 78 | "is_merge": False, |
| 79 | "merge_parent_rids": [], |
| 80 | "rail": 0, |
| 81 | } |
| 82 | defaults.update(overrides) |
| 83 | return TimelineEntry(**defaults) |
| 84 | |
| 85 | |
| 86 | def _make_file_entry(**overrides): |
| 87 | defaults = { |
| 88 | "name": "README.md", |
| 89 | "uuid": "file-uuid-1", |
| 90 | "size": 512, |
| 91 | "is_dir": False, |
| 92 | "last_commit_message": "initial commit", |
| 93 | "last_commit_user": "testuser", |
| 94 | "last_commit_time": datetime(2026, 3, 1, 12, 0, 0, tzinfo=UTC), |
| 95 | } |
| 96 | defaults.update(overrides) |
| 97 | return FileEntry(**defaults) |
| 98 | |
| 99 | |
| 100 | # --------------------------------------------------------------------------- |
| 101 | # Content rendering helpers (_render_fossil_content, _is_markdown, _rewrite_fossil_links) |
| 102 | # --------------------------------------------------------------------------- |
| 103 | |
| 104 | |
| 105 | class TestRenderFossilContent: |
| 106 | """Test the content rendering pipeline that converts Fossil wiki/markdown to HTML.""" |
| 107 | |
| 108 | def test_empty_content(self): |
| 109 | from fossil.views import _render_fossil_content |
| 110 | |
| 111 | assert _render_fossil_content("") == "" |
| 112 | |
| 113 | def test_markdown_heading(self): |
| 114 | from fossil.views import _render_fossil_content |
| 115 | |
| 116 | html = _render_fossil_content("# Hello World") |
| 117 | assert "<h1" in html |
| 118 | assert "Hello World" in html |
| 119 | |
| 120 | def test_markdown_fenced_code(self): |
| 121 | from fossil.views import _render_fossil_content |
| 122 | |
| 123 | content = "```python\nprint('hello')\n```" |
| 124 | html = _render_fossil_content(content) |
| 125 | assert "print" in html |
| 126 | |
| 127 | def test_fossil_wiki_link_converted(self): |
| 128 | from fossil.views import _render_fossil_content |
| 129 | |
| 130 | content = "[/info/abc123 | View Checkin]" |
| 131 | html = _render_fossil_content(content, project_slug="my-project") |
| 132 | assert "/projects/my-project/fossil/checkin/abc123/" in html |
| 133 | |
| 134 | def test_fossil_wiki_verbatim_block(self): |
| 135 | from fossil.views import _render_fossil_content |
| 136 | |
| 137 | content = "<h1>Title</h1>\n<verbatim>code here</verbatim>" |
| 138 | html = _render_fossil_content(content) |
| 139 | assert "<pre><code>code here</code></pre>" in html |
| 140 | |
| 141 | def test_fossil_wiki_list_bullets(self): |
| 142 | from fossil.views import _render_fossil_content |
| 143 | |
| 144 | content = "<p>List:</p>\n* Item one\n* Item two" |
| 145 | html = _render_fossil_content(content) |
| 146 | assert "<ul>" in html |
| 147 | assert "<li>" in html |
| 148 | assert "Item one" in html |
| 149 | |
| 150 | def test_fossil_wiki_ordered_list(self): |
| 151 | from fossil.views import _render_fossil_content |
| 152 | |
| 153 | # Must start with an HTML element so _is_markdown returns False |
| 154 | content = "<p>Steps:</p>\n1. Step one\n2. Step two" |
| 155 | html = _render_fossil_content(content) |
| 156 | assert "<ol>" in html |
| 157 | assert "Step one" in html |
| 158 | |
| 159 | def test_fossil_wiki_nowiki_block(self): |
| 160 | from fossil.views import _render_fossil_content |
| 161 | |
| 162 | content = "<p>Before</p>\n<nowiki><b>Bold</b></nowiki>" |
| 163 | html = _render_fossil_content(content) |
| 164 | assert "<b>Bold</b>" in html |
| 165 | |
| 166 | def test_fossil_interwiki_link(self): |
| 167 | from fossil.views import _render_fossil_content |
| 168 | |
| 169 | content = "<p>See [wikipedia:Fossil_(software)]</p>" |
| 170 | html = _render_fossil_content(content) |
| 171 | assert "en.wikipedia.org/wiki/Fossil_(software)" in html |
| 172 | |
| 173 | def test_fossil_anchor_link(self): |
| 174 | from fossil.views import _render_fossil_content |
| 175 | |
| 176 | content = "<p>Jump to [#section1]</p>" |
| 177 | html = _render_fossil_content(content) |
| 178 | assert 'href="#section1"' in html |
| 179 | |
| 180 | def test_fossil_bare_wiki_link(self): |
| 181 | from fossil.views import _render_fossil_content |
| 182 | |
| 183 | content = "<p>See [PageName]</p>" |
| 184 | html = _render_fossil_content(content) |
| 185 | assert 'href="PageName"' in html |
| 186 | |
| 187 | def test_markdown_fossil_link_resolved(self): |
| 188 | from fossil.views import _render_fossil_content |
| 189 | |
| 190 | content = "# Page\n\n[./file.wiki | Link Text]" |
| 191 | html = _render_fossil_content(content, project_slug="proj", base_path="www/") |
| 192 | assert "Link Text" in html |
| 193 | |
| 194 | |
| 195 | class TestIsMarkdown: |
| 196 | def test_heading_detected(self): |
| 197 | from fossil.views import _is_markdown |
| 198 | |
| 199 | assert _is_markdown("# Title\nSome text") is True |
| 200 | |
| 201 | def test_fenced_code_detected(self): |
| 202 | from fossil.views import _is_markdown |
| 203 | |
| 204 | assert _is_markdown("Some text\n```\ncode\n```") is True |
| 205 | |
| 206 | def test_html_start_not_markdown(self): |
| 207 | from fossil.views import _is_markdown |
| 208 | |
| 209 | assert _is_markdown("<h1>Title</h1>\n<p>Paragraph</p>") is False |
| 210 | |
| 211 | def test_multiple_markdown_headings(self): |
| 212 | from fossil.views import _is_markdown |
| 213 | |
| 214 | content = "Some text\n## Heading\n## Another" |
| 215 | assert _is_markdown(content) is True |
| 216 | |
| 217 | def test_plain_text_is_markdown(self): |
| 218 | from fossil.views import _is_markdown |
| 219 | |
| 220 | # Plain text without HTML tags defaults to markdown |
| 221 | assert _is_markdown("Just plain text") is True |
| 222 | |
| 223 | |
| 224 | class TestRewriteFossilLinks: |
| 225 | def test_info_hash_rewrite(self): |
| 226 | from fossil.views import _rewrite_fossil_links |
| 227 | |
| 228 | html = '<a href="/info/abc123">link</a>' |
| 229 | result = _rewrite_fossil_links(html, "myproj") |
| 230 | assert "/projects/myproj/fossil/checkin/abc123/" in result |
| 231 | |
| 232 | def test_doc_trunk_rewrite(self): |
| 233 | from fossil.views import _rewrite_fossil_links |
| 234 | |
| 235 | html = '<a href="/doc/trunk/www/readme.wiki">docs</a>' |
| 236 | result = _rewrite_fossil_links(html, "myproj") |
| 237 | assert "/projects/myproj/fossil/code/file/www/readme.wiki" in result |
| 238 | |
| 239 | def test_wiki_path_rewrite(self): |
| 240 | from fossil.views import _rewrite_fossil_links |
| 241 | |
| 242 | html = '<a href="/wiki/HomePage">home</a>' |
| 243 | result = _rewrite_fossil_links(html, "myproj") |
| 244 | assert "/projects/myproj/fossil/wiki/page/HomePage" in result |
| 245 | |
| 246 | def test_wiki_query_rewrite(self): |
| 247 | from fossil.views import _rewrite_fossil_links |
| 248 | |
| 249 | html = '<a href="/wiki?name=HomePage">home</a>' |
| 250 | result = _rewrite_fossil_links(html, "myproj") |
| 251 | assert "/projects/myproj/fossil/wiki/page/HomePage" in result |
| 252 | |
| 253 | def test_tktview_rewrite(self): |
| 254 | from fossil.views import _rewrite_fossil_links |
| 255 | |
| 256 | html = '<a href="/tktview/abc123">ticket</a>' |
| 257 | result = _rewrite_fossil_links(html, "myproj") |
| 258 | assert "/projects/myproj/fossil/tickets/abc123/" in result |
| 259 | |
| 260 | def test_vdiff_rewrite(self): |
| 261 | from fossil.views import _rewrite_fossil_links |
| 262 | |
| 263 | html = '<a href="/vdiff?from=aaa&to=bbb">diff</a>' |
| 264 | result = _rewrite_fossil_links(html, "myproj") |
| 265 | assert "/projects/myproj/fossil/compare/?from=aaa&to=bbb" in result |
| 266 | |
| 267 | def test_timeline_rewrite(self): |
| 268 | from fossil.views import _rewrite_fossil_links |
| 269 | |
| 270 | html = '<a href="/timeline?n=20">tl</a>' |
| 271 | result = _rewrite_fossil_links(html, "myproj") |
| 272 | assert "/projects/myproj/fossil/timeline/" in result |
| 273 | |
| 274 | def test_forumpost_rewrite(self): |
| 275 | from fossil.views import _rewrite_fossil_links |
| 276 | |
| 277 | html = '<a href="/forumpost/abc123">post</a>' |
| 278 | result = _rewrite_fossil_links(html, "myproj") |
| 279 | assert "/projects/myproj/fossil/forum/abc123/" in result |
| 280 | |
| 281 | def test_forum_base_rewrite(self): |
| 282 | from fossil.views import _rewrite_fossil_links |
| 283 | |
| 284 | html = '<a href="/forum">forum</a>' |
| 285 | result = _rewrite_fossil_links(html, "myproj") |
| 286 | assert "/projects/myproj/fossil/forum/" in result |
| 287 | |
| 288 | def test_www_path_rewrite(self): |
| 289 | from fossil.views import _rewrite_fossil_links |
| 290 | |
| 291 | html = '<a href="/www/index.html">page</a>' |
| 292 | result = _rewrite_fossil_links(html, "myproj") |
| 293 | assert "/projects/myproj/fossil/docs/www/index.html" in result |
| 294 | |
| 295 | def test_dir_rewrite(self): |
| 296 | from fossil.views import _rewrite_fossil_links |
| 297 | |
| 298 | html = '<a href="/dir">browse</a>' |
| 299 | result = _rewrite_fossil_links(html, "myproj") |
| 300 | assert "/projects/myproj/fossil/code/" in result |
| 301 | |
| 302 | def test_help_rewrite(self): |
| 303 | from fossil.views import _rewrite_fossil_links |
| 304 | |
| 305 | html = '<a href="/help/clone">help</a>' |
| 306 | result = _rewrite_fossil_links(html, "myproj") |
| 307 | assert "/projects/myproj/fossil/docs/www/help.wiki" in result |
| 308 | |
| 309 | def test_external_link_preserved(self): |
| 310 | from fossil.views import _rewrite_fossil_links |
| 311 | |
| 312 | html = '<a href="https://example.com/page">ext</a>' |
| 313 | result = _rewrite_fossil_links(html, "myproj") |
| 314 | assert "https://example.com/page" in result |
| 315 | |
| 316 | def test_empty_slug_passthrough(self): |
| 317 | from fossil.views import _rewrite_fossil_links |
| 318 | |
| 319 | html = '<a href="/info/abc">link</a>' |
| 320 | assert _rewrite_fossil_links(html, "") == html |
| 321 | |
| 322 | def test_scheme_link_info(self): |
| 323 | from fossil.views import _rewrite_fossil_links |
| 324 | |
| 325 | html = '<a href="info:abc123">checkin</a>' |
| 326 | result = _rewrite_fossil_links(html, "myproj") |
| 327 | assert "/projects/myproj/fossil/checkin/abc123/" in result |
| 328 | |
| 329 | def test_scheme_link_wiki(self): |
| 330 | from fossil.views import _rewrite_fossil_links |
| 331 | |
| 332 | html = '<a href="wiki:PageName">page</a>' |
| 333 | result = _rewrite_fossil_links(html, "myproj") |
| 334 | assert "/projects/myproj/fossil/wiki/page/PageName" in result |
| 335 | |
| 336 | def test_builtin_rewrite(self): |
| 337 | from fossil.views import _rewrite_fossil_links |
| 338 | |
| 339 | html = '<a href="/builtin/default.css">skin</a>' |
| 340 | result = _rewrite_fossil_links(html, "myproj") |
| 341 | assert "/projects/myproj/fossil/code/file/skins/default.css" in result |
| 342 | |
| 343 | def test_setup_link_not_rewritten(self): |
| 344 | from fossil.views import _rewrite_fossil_links |
| 345 | |
| 346 | html = '<a href="/setup_skin">settings</a>' |
| 347 | result = _rewrite_fossil_links(html, "myproj") |
| 348 | assert "/setup_skin" in result |
| 349 | |
| 350 | def test_wiki_file_extension_rewrite(self): |
| 351 | from fossil.views import _rewrite_fossil_links |
| 352 | |
| 353 | html = '<a href="/concepts.wiki">page</a>' |
| 354 | result = _rewrite_fossil_links(html, "myproj") |
| 355 | assert "/projects/myproj/fossil/docs/www/concepts.wiki" in result |
| 356 | |
| 357 | def test_external_fossil_scm_rewrite(self): |
| 358 | from fossil.views import _rewrite_fossil_links |
| 359 | |
| 360 | html = '<a href="https://fossil-scm.org/home/info/abc123">ext</a>' |
| 361 | result = _rewrite_fossil_links(html, "myproj") |
| 362 | assert "/projects/myproj/fossil/checkin/abc123/" in result |
| 363 | |
| 364 | def test_scheme_link_forum(self): |
| 365 | from fossil.views import _rewrite_fossil_links |
| 366 | |
| 367 | html = '<a href="forum:/forumpost/abc123">post</a>' |
| 368 | result = _rewrite_fossil_links(html, "myproj") |
| 369 | assert "/projects/myproj/fossil/forum/abc123/" in result |
| 370 | |
| 371 | |
| 372 | # --------------------------------------------------------------------------- |
| 373 | # Split diff helper |
| 374 | # --------------------------------------------------------------------------- |
| 375 | |
| 376 | |
| 377 | class TestComputeSplitLines: |
| 378 | def test_context_lines_both_sides(self): |
| 379 | from fossil.views import _compute_split_lines |
| 380 | |
| 381 | lines = [{"text": " same", "type": "context", "old_num": 1, "new_num": 1}] |
| 382 | left, right = _compute_split_lines(lines) |
| 383 | assert len(left) == 1 |
| 384 | assert left[0]["type"] == "context" |
| 385 | assert right[0]["type"] == "context" |
| 386 | |
| 387 | def test_del_add_paired(self): |
| 388 | from fossil.views import _compute_split_lines |
| 389 | |
| 390 | lines = [ |
| 391 | {"text": "-old", "type": "del", "old_num": 1, "new_num": ""}, |
| 392 | {"text": "+new", "type": "add", "old_num": "", "new_num": 1}, |
| 393 | ] |
| 394 | left, right = _compute_split_lines(lines) |
| 395 | assert left[0]["type"] == "del" |
| 396 | assert right[0]["type"] == "add" |
| 397 | |
| 398 | def test_orphan_add(self): |
| 399 | from fossil.views import _compute_split_lines |
| 400 | |
| 401 | lines = [{"text": "+added", "type": "add", "old_num": "", "new_num": 1}] |
| 402 | left, right = _compute_split_lines(lines) |
| 403 | assert left[0]["type"] == "empty" |
| 404 | assert right[0]["type"] == "add" |
| 405 | |
| 406 | def test_header_hunk_both_sides(self): |
| 407 | from fossil.views import _compute_split_lines |
| 408 | |
| 409 | lines = [ |
| 410 | {"text": "--- a/f", "type": "header", "old_num": "", "new_num": ""}, |
| 411 | {"text": "@@ -1 +1 @@", "type": "hunk", "old_num": "", "new_num": ""}, |
| 412 | ] |
| 413 | left, right = _compute_split_lines(lines) |
| 414 | assert len(left) == 2 |
| 415 | assert left[0]["type"] == "header" |
| 416 | assert left[1]["type"] == "hunk" |
| 417 | |
| 418 | def test_uneven_del_add_padded(self): |
| 419 | """When there are more deletions than additions, right side gets empty placeholders.""" |
| 420 | from fossil.views import _compute_split_lines |
| 421 | |
| 422 | lines = [ |
| 423 | {"text": "-line1", "type": "del", "old_num": 1, "new_num": ""}, |
| 424 | {"text": "-line2", "type": "del", "old_num": 2, "new_num": ""}, |
| 425 | {"text": "+new1", "type": "add", "old_num": "", "new_num": 1}, |
| 426 | ] |
| 427 | left, right = _compute_split_lines(lines) |
| 428 | assert len(left) == 2 |
| 429 | assert left[0]["type"] == "del" |
| 430 | assert left[1]["type"] == "del" |
| 431 | assert right[0]["type"] == "add" |
| 432 | assert right[1]["type"] == "empty" |
| 433 | |
| 434 | |
| 435 | # --------------------------------------------------------------------------- |
| 436 | # Timeline view (mocked FossilReader) |
| 437 | # --------------------------------------------------------------------------- |
| 438 | |
| 439 | |
| 440 | @pytest.mark.django_db |
| 441 | class TestTimelineViewMocked: |
| 442 | def test_timeline_renders(self, admin_client, sample_project): |
| 443 | slug = sample_project.slug |
| 444 | entries = [_make_timeline_entry(rid=1)] |
| 445 | with patch("fossil.views.FossilReader") as mock_cls: |
| 446 | _mock_reader_ctx(mock_cls, get_timeline=entries) |
| 447 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 448 | repo = FossilRepository.objects.get(project=sample_project) |
| 449 | mock_grr.return_value = (sample_project, repo, mock_cls.return_value) |
| 450 | response = admin_client.get(_url(slug, "timeline/")) |
| 451 | assert response.status_code == 200 |
| 452 | assert "initial commit" in response.content.decode() |
| 453 | |
| 454 | def test_timeline_with_type_filter(self, admin_client, sample_project): |
| 455 | slug = sample_project.slug |
| 456 | entries = [_make_timeline_entry(rid=1, event_type="w", comment="wiki edit")] |
| 457 | with patch("fossil.views.FossilReader") as mock_cls: |
| 458 | _mock_reader_ctx(mock_cls, get_timeline=entries) |
| 459 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 460 | repo = FossilRepository.objects.get(project=sample_project) |
| 461 | mock_grr.return_value = (sample_project, repo, mock_cls.return_value) |
| 462 | response = admin_client.get(_url(slug, "timeline/?type=w")) |
| 463 | assert response.status_code == 200 |
| 464 | |
| 465 | def test_timeline_htmx_partial(self, admin_client, sample_project): |
| 466 | slug = sample_project.slug |
| 467 | entries = [_make_timeline_entry(rid=1)] |
| 468 | with patch("fossil.views.FossilReader") as mock_cls: |
| 469 | _mock_reader_ctx(mock_cls, get_timeline=entries) |
| 470 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 471 | repo = FossilRepository.objects.get(project=sample_project) |
| 472 | mock_grr.return_value = (sample_project, repo, mock_cls.return_value) |
| 473 | response = admin_client.get(_url(slug, "timeline/"), HTTP_HX_REQUEST="true") |
| 474 | assert response.status_code == 200 |
| 475 | |
| 476 | def test_timeline_denied_no_perm(self, no_perm_client, sample_project): |
| 477 | response = no_perm_client.get(_url(sample_project.slug, "timeline/")) |
| 478 | assert response.status_code == 403 |
| 479 | |
| 480 | |
| 481 | # --------------------------------------------------------------------------- |
| 482 | # Ticket list/detail (mocked) |
| 483 | # --------------------------------------------------------------------------- |
| 484 | |
| 485 | |
| 486 | @pytest.mark.django_db |
| 487 | class TestTicketViewsMocked: |
| 488 | def test_ticket_list_renders(self, admin_client, sample_project): |
| 489 | slug = sample_project.slug |
| 490 | tickets = [ |
| 491 | TicketEntry( |
| 492 | uuid="tkt-uuid-1", |
| 493 | title="Bug report", |
| 494 | status="Open", |
| 495 | type="Code_Defect", |
| 496 | created=datetime(2026, 3, 1, tzinfo=UTC), |
| 497 | owner="testuser", |
| 498 | ) |
| 499 | ] |
| 500 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 501 | reader = MagicMock() |
| 502 | reader.__enter__ = MagicMock(return_value=reader) |
| 503 | reader.__exit__ = MagicMock(return_value=False) |
| 504 | reader.get_tickets.return_value = tickets |
| 505 | repo = FossilRepository.objects.get(project=sample_project) |
| 506 | mock_grr.return_value = (sample_project, repo, reader) |
| 507 | response = admin_client.get(_url(slug, "tickets/")) |
| 508 | assert response.status_code == 200 |
| 509 | assert "Bug report" in response.content.decode() |
| 510 | |
| 511 | def test_ticket_list_search_filter(self, admin_client, sample_project): |
| 512 | slug = sample_project.slug |
| 513 | tickets = [ |
| 514 | TicketEntry( |
| 515 | uuid="t1", title="Login bug", status="Open", type="Code_Defect", created=datetime(2026, 3, 1, tzinfo=UTC), owner="u" |
| 516 | ), |
| 517 | TicketEntry( |
| 518 | uuid="t2", title="Dashboard fix", status="Open", type="Code_Defect", created=datetime(2026, 3, 1, tzinfo=UTC), owner="u" |
| 519 | ), |
| 520 | ] |
| 521 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 522 | reader = MagicMock() |
| 523 | reader.__enter__ = MagicMock(return_value=reader) |
| 524 | reader.__exit__ = MagicMock(return_value=False) |
| 525 | reader.get_tickets.return_value = tickets |
| 526 | repo = FossilRepository.objects.get(project=sample_project) |
| 527 | mock_grr.return_value = (sample_project, repo, reader) |
| 528 | response = admin_client.get(_url(slug, "tickets/?search=login")) |
| 529 | assert response.status_code == 200 |
| 530 | content = response.content.decode() |
| 531 | assert "Login bug" in content |
| 532 | # Dashboard should be filtered out |
| 533 | assert "Dashboard fix" not in content |
| 534 | |
| 535 | def test_ticket_list_htmx_partial(self, admin_client, sample_project): |
| 536 | slug = sample_project.slug |
| 537 | tickets = [ |
| 538 | TicketEntry( |
| 539 | uuid="t1", title="A ticket", status="Open", type="Code_Defect", created=datetime(2026, 3, 1, tzinfo=UTC), owner="u" |
| 540 | ), |
| 541 | ] |
| 542 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 543 | reader = MagicMock() |
| 544 | reader.__enter__ = MagicMock(return_value=reader) |
| 545 | reader.__exit__ = MagicMock(return_value=False) |
| 546 | reader.get_tickets.return_value = tickets |
| 547 | repo = FossilRepository.objects.get(project=sample_project) |
| 548 | mock_grr.return_value = (sample_project, repo, reader) |
| 549 | response = admin_client.get(_url(slug, "tickets/"), HTTP_HX_REQUEST="true") |
| 550 | assert response.status_code == 200 |
| 551 | |
| 552 | def test_ticket_detail_renders(self, admin_client, sample_project): |
| 553 | slug = sample_project.slug |
| 554 | ticket = TicketEntry( |
| 555 | uuid="tkt-detail-1", |
| 556 | title="Detail test", |
| 557 | status="Open", |
| 558 | type="Code_Defect", |
| 559 | created=datetime(2026, 3, 1, tzinfo=UTC), |
| 560 | owner="testuser", |
| 561 | body="Some description **bold**", |
| 562 | ) |
| 563 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 564 | reader = MagicMock() |
| 565 | reader.__enter__ = MagicMock(return_value=reader) |
| 566 | reader.__exit__ = MagicMock(return_value=False) |
| 567 | reader.get_ticket_detail.return_value = ticket |
| 568 | reader.get_ticket_comments.return_value = [ |
| 569 | {"user": "dev", "timestamp": datetime(2026, 3, 2, tzinfo=UTC), "comment": "Working on it"} |
| 570 | ] |
| 571 | repo = FossilRepository.objects.get(project=sample_project) |
| 572 | mock_grr.return_value = (sample_project, repo, reader) |
| 573 | response = admin_client.get(_url(slug, "tickets/tkt-detail-1/")) |
| 574 | assert response.status_code == 200 |
| 575 | content = response.content.decode() |
| 576 | assert "Detail test" in content |
| 577 | |
| 578 | def test_ticket_detail_not_found(self, admin_client, sample_project): |
| 579 | slug = sample_project.slug |
| 580 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 581 | reader = MagicMock() |
| 582 | reader.__enter__ = MagicMock(return_value=reader) |
| 583 | reader.__exit__ = MagicMock(return_value=False) |
| 584 | reader.get_ticket_detail.return_value = None |
| 585 | reader.get_ticket_comments.return_value = [] |
| 586 | repo = FossilRepository.objects.get(project=sample_project) |
| 587 | mock_grr.return_value = (sample_project, repo, reader) |
| 588 | response = admin_client.get(_url(slug, "tickets/nonexistent/")) |
| 589 | assert response.status_code == 404 |
| 590 | |
| 591 | |
| 592 | # --------------------------------------------------------------------------- |
| 593 | # Wiki list/page (mocked) |
| 594 | # --------------------------------------------------------------------------- |
| 595 | |
| 596 | |
| 597 | @pytest.mark.django_db |
| 598 | class TestWikiViewsMocked: |
| 599 | def test_wiki_list_renders(self, admin_client, sample_project): |
| 600 | slug = sample_project.slug |
| 601 | pages = [ |
| 602 | WikiPage(name="Home", content="# Home", last_modified=datetime(2026, 3, 1, tzinfo=UTC), user="admin"), |
| 603 | WikiPage(name="Setup", content="Setup guide", last_modified=datetime(2026, 3, 1, tzinfo=UTC), user="dev"), |
| 604 | ] |
| 605 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 606 | reader = MagicMock() |
| 607 | reader.__enter__ = MagicMock(return_value=reader) |
| 608 | reader.__exit__ = MagicMock(return_value=False) |
| 609 | reader.get_wiki_pages.return_value = pages |
| 610 | reader.get_wiki_page.return_value = pages[0] |
| 611 | repo = FossilRepository.objects.get(project=sample_project) |
| 612 | mock_grr.return_value = (sample_project, repo, reader) |
| 613 | response = admin_client.get(_url(slug, "wiki/")) |
| 614 | assert response.status_code == 200 |
| 615 | content = response.content.decode() |
| 616 | assert "Home" in content |
| 617 | assert "Setup" in content |
| 618 | |
| 619 | def test_wiki_list_search(self, admin_client, sample_project): |
| 620 | slug = sample_project.slug |
| 621 | pages = [ |
| 622 | WikiPage(name="Home", content="# Home", last_modified=datetime(2026, 3, 1, tzinfo=UTC), user="admin"), |
| 623 | WikiPage(name="Setup", content="Setup guide", last_modified=datetime(2026, 3, 1, tzinfo=UTC), user="dev"), |
| 624 | ] |
| 625 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 626 | reader = MagicMock() |
| 627 | reader.__enter__ = MagicMock(return_value=reader) |
| 628 | reader.__exit__ = MagicMock(return_value=False) |
| 629 | reader.get_wiki_pages.return_value = pages |
| 630 | reader.get_wiki_page.return_value = None |
| 631 | repo = FossilRepository.objects.get(project=sample_project) |
| 632 | mock_grr.return_value = (sample_project, repo, reader) |
| 633 | response = admin_client.get(_url(slug, "wiki/?search=setup")) |
| 634 | assert response.status_code == 200 |
| 635 | |
| 636 | def test_wiki_page_renders(self, admin_client, sample_project): |
| 637 | slug = sample_project.slug |
| 638 | page = WikiPage(name="Home", content="# Welcome\nHello world", last_modified=datetime(2026, 3, 1, tzinfo=UTC), user="admin") |
| 639 | all_pages = [page] |
| 640 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 641 | reader = MagicMock() |
| 642 | reader.__enter__ = MagicMock(return_value=reader) |
| 643 | reader.__exit__ = MagicMock(return_value=False) |
| 644 | reader.get_wiki_page.return_value = page |
| 645 | reader.get_wiki_pages.return_value = all_pages |
| 646 | repo = FossilRepository.objects.get(project=sample_project) |
| 647 | mock_grr.return_value = (sample_project, repo, reader) |
| 648 | response = admin_client.get(_url(slug, "wiki/page/Home")) |
| 649 | assert response.status_code == 200 |
| 650 | assert "Welcome" in response.content.decode() |
| 651 | |
| 652 | def test_wiki_page_not_found(self, admin_client, sample_project): |
| 653 | slug = sample_project.slug |
| 654 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 655 | reader = MagicMock() |
| 656 | reader.__enter__ = MagicMock(return_value=reader) |
| 657 | reader.__exit__ = MagicMock(return_value=False) |
| 658 | reader.get_wiki_page.return_value = None |
| 659 | reader.get_wiki_pages.return_value = [] |
| 660 | repo = FossilRepository.objects.get(project=sample_project) |
| 661 | mock_grr.return_value = (sample_project, repo, reader) |
| 662 | response = admin_client.get(_url(slug, "wiki/page/NonexistentPage")) |
| 663 | assert response.status_code == 404 |
| 664 | |
| 665 | |
| 666 | # --------------------------------------------------------------------------- |
| 667 | # Search view (mocked) |
| 668 | # --------------------------------------------------------------------------- |
| 669 | |
| 670 | |
| 671 | @pytest.mark.django_db |
| 672 | class TestSearchViewMocked: |
| 673 | def test_search_with_query(self, admin_client, sample_project): |
| 674 | slug = sample_project.slug |
| 675 | results = [{"type": "ci", "uuid": "abc", "comment": "found it", "user": "dev"}] |
| 676 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 677 | reader = MagicMock() |
| 678 | reader.__enter__ = MagicMock(return_value=reader) |
| 679 | reader.__exit__ = MagicMock(return_value=False) |
| 680 | reader.search.return_value = results |
| 681 | repo = FossilRepository.objects.get(project=sample_project) |
| 682 | mock_grr.return_value = (sample_project, repo, reader) |
| 683 | response = admin_client.get(_url(slug, "search/?q=found")) |
| 684 | assert response.status_code == 200 |
| 685 | |
| 686 | def test_search_empty_query(self, admin_client, sample_project): |
| 687 | slug = sample_project.slug |
| 688 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 689 | reader = MagicMock() |
| 690 | reader.__enter__ = MagicMock(return_value=reader) |
| 691 | reader.__exit__ = MagicMock(return_value=False) |
| 692 | repo = FossilRepository.objects.get(project=sample_project) |
| 693 | mock_grr.return_value = (sample_project, repo, reader) |
| 694 | response = admin_client.get(_url(slug, "search/")) |
| 695 | assert response.status_code == 200 |
| 696 | |
| 697 | |
| 698 | # --------------------------------------------------------------------------- |
| 699 | # Compare checkins view (mocked) |
| 700 | # --------------------------------------------------------------------------- |
| 701 | |
| 702 | |
| 703 | @pytest.mark.django_db |
| 704 | class TestCompareCheckinsViewMocked: |
| 705 | def test_compare_no_params(self, admin_client, sample_project): |
| 706 | """Compare page renders without from/to params (shows empty form).""" |
| 707 | slug = sample_project.slug |
| 708 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 709 | reader = MagicMock() |
| 710 | reader.__enter__ = MagicMock(return_value=reader) |
| 711 | reader.__exit__ = MagicMock(return_value=False) |
| 712 | repo = FossilRepository.objects.get(project=sample_project) |
| 713 | mock_grr.return_value = (sample_project, repo, reader) |
| 714 | response = admin_client.get(_url(slug, "compare/")) |
| 715 | assert response.status_code == 200 |
| 716 | |
| 717 | def test_compare_with_params(self, admin_client, sample_project): |
| 718 | """Compare page with from/to parameters renders diffs.""" |
| 719 | slug = sample_project.slug |
| 720 | from_detail = CheckinDetail( |
| 721 | uuid="aaa111", |
| 722 | timestamp=datetime(2026, 3, 1, tzinfo=UTC), |
| 723 | user="dev", |
| 724 | comment="from commit", |
| 725 | files_changed=[{"name": "f.txt", "uuid": "u1", "prev_uuid": "", "change_type": "A"}], |
| 726 | ) |
| 727 | to_detail = CheckinDetail( |
| 728 | uuid="bbb222", |
| 729 | timestamp=datetime(2026, 3, 2, tzinfo=UTC), |
| 730 | user="dev", |
| 731 | comment="to commit", |
| 732 | files_changed=[{"name": "f.txt", "uuid": "u2", "prev_uuid": "u1", "change_type": "M"}], |
| 733 | ) |
| 734 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 735 | reader = MagicMock() |
| 736 | reader.__enter__ = MagicMock(return_value=reader) |
| 737 | reader.__exit__ = MagicMock(return_value=False) |
| 738 | reader.get_checkin_detail.side_effect = lambda uuid: from_detail if "aaa" in uuid else to_detail |
| 739 | reader.get_file_content.return_value = b"file content" |
| 740 | repo = FossilRepository.objects.get(project=sample_project) |
| 741 | mock_grr.return_value = (sample_project, repo, reader) |
| 742 | response = admin_client.get(_url(slug, "compare/?from=aaa111&to=bbb222")) |
| 743 | assert response.status_code == 200 |
| 744 | |
| 745 | |
| 746 | # --------------------------------------------------------------------------- |
| 747 | # Timeline RSS feed (mocked) |
| 748 | # --------------------------------------------------------------------------- |
| 749 | |
| 750 | |
| 751 | @pytest.mark.django_db |
| 752 | class TestTimelineRssViewMocked: |
| 753 | def test_rss_feed(self, admin_client, sample_project): |
| 754 | slug = sample_project.slug |
| 755 | entries = [_make_timeline_entry(rid=1, comment="rss commit")] |
| 756 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 757 | reader = MagicMock() |
| 758 | reader.__enter__ = MagicMock(return_value=reader) |
| 759 | reader.__exit__ = MagicMock(return_value=False) |
| 760 | reader.get_timeline.return_value = entries |
| 761 | repo = FossilRepository.objects.get(project=sample_project) |
| 762 | mock_grr.return_value = (sample_project, repo, reader) |
| 763 | response = admin_client.get(_url(slug, "timeline/rss/")) |
| 764 | assert response.status_code == 200 |
| 765 | assert response["Content-Type"] == "application/rss+xml" |
| 766 | content = response.content.decode() |
| 767 | assert "rss commit" in content |
| 768 | assert "<rss" in content |
| 769 | |
| 770 | |
| 771 | # --------------------------------------------------------------------------- |
| 772 | # Tickets CSV export (mocked) |
| 773 | # --------------------------------------------------------------------------- |
| 774 | |
| 775 | |
| 776 | @pytest.mark.django_db |
| 777 | class TestTicketsCsvViewMocked: |
| 778 | def test_csv_export(self, admin_client, sample_project): |
| 779 | slug = sample_project.slug |
| 780 | tickets = [ |
| 781 | TicketEntry( |
| 782 | uuid="csv-uuid", |
| 783 | title="Export test", |
| 784 | status="Open", |
| 785 | type="Code_Defect", |
| 786 | created=datetime(2026, 3, 1, tzinfo=UTC), |
| 787 | owner="testuser", |
| 788 | priority="High", |
| 789 | severity="Critical", |
| 790 | ) |
| 791 | ] |
| 792 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 793 | reader = MagicMock() |
| 794 | reader.__enter__ = MagicMock(return_value=reader) |
| 795 | reader.__exit__ = MagicMock(return_value=False) |
| 796 | reader.get_tickets.return_value = tickets |
| 797 | repo = FossilRepository.objects.get(project=sample_project) |
| 798 | mock_grr.return_value = (sample_project, repo, reader) |
| 799 | response = admin_client.get(_url(slug, "tickets/export/")) |
| 800 | assert response.status_code == 200 |
| 801 | assert response["Content-Type"] == "text/csv" |
| 802 | content = response.content.decode() |
| 803 | assert "Export test" in content |
| 804 | assert "csv-uuid" in content |
| 805 | |
| 806 | |
| 807 | # --------------------------------------------------------------------------- |
| 808 | # Branch list view (mocked) |
| 809 | # --------------------------------------------------------------------------- |
| 810 | |
| 811 | |
| 812 | @pytest.mark.django_db |
| 813 | class TestBranchListViewMocked: |
| 814 | def test_branch_list_renders(self, admin_client, sample_project): |
| 815 | slug = sample_project.slug |
| 816 | branches = [ |
| 817 | SimpleNamespace( |
| 818 | name="trunk", last_user="dev", last_checkin=datetime(2026, 3, 1, tzinfo=UTC), checkin_count=50, last_uuid="abc123" |
| 819 | ), |
| 820 | ] |
| 821 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 822 | reader = MagicMock() |
| 823 | reader.__enter__ = MagicMock(return_value=reader) |
| 824 | reader.__exit__ = MagicMock(return_value=False) |
| 825 | reader.get_branches.return_value = branches |
| 826 | repo = FossilRepository.objects.get(project=sample_project) |
| 827 | mock_grr.return_value = (sample_project, repo, reader) |
| 828 | response = admin_client.get(_url(slug, "branches/")) |
| 829 | assert response.status_code == 200 |
| 830 | assert "trunk" in response.content.decode() |
| 831 | |
| 832 | def test_branch_list_search(self, admin_client, sample_project): |
| 833 | slug = sample_project.slug |
| 834 | branches = [ |
| 835 | SimpleNamespace( |
| 836 | name="trunk", last_user="dev", last_checkin=datetime(2026, 3, 1, tzinfo=UTC), checkin_count=50, last_uuid="abc123" |
| 837 | ), |
| 838 | SimpleNamespace( |
| 839 | name="feature-x", last_user="dev", last_checkin=datetime(2026, 3, 1, tzinfo=UTC), checkin_count=5, last_uuid="def456" |
| 840 | ), |
| 841 | ] |
| 842 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 843 | reader = MagicMock() |
| 844 | reader.__enter__ = MagicMock(return_value=reader) |
| 845 | reader.__exit__ = MagicMock(return_value=False) |
| 846 | reader.get_branches.return_value = branches |
| 847 | repo = FossilRepository.objects.get(project=sample_project) |
| 848 | mock_grr.return_value = (sample_project, repo, reader) |
| 849 | response = admin_client.get(_url(slug, "branches/?search=feature")) |
| 850 | assert response.status_code == 200 |
| 851 | content = response.content.decode() |
| 852 | assert "feature-x" in content |
| 853 | |
| 854 | |
| 855 | # --------------------------------------------------------------------------- |
| 856 | # Tag list view (mocked) |
| 857 | # --------------------------------------------------------------------------- |
| 858 | |
| 859 | |
| 860 | @pytest.mark.django_db |
| 861 | class TestTagListViewMocked: |
| 862 | def test_tag_list_renders(self, admin_client, sample_project): |
| 863 | slug = sample_project.slug |
| 864 | tags = [ |
| 865 | SimpleNamespace(name="v1.0", uuid="abc123", user="dev", timestamp=datetime(2026, 3, 1, tzinfo=UTC)), |
| 866 | ] |
| 867 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 868 | reader = MagicMock() |
| 869 | reader.__enter__ = MagicMock(return_value=reader) |
| 870 | reader.__exit__ = MagicMock(return_value=False) |
| 871 | reader.get_tags.return_value = tags |
| 872 | repo = FossilRepository.objects.get(project=sample_project) |
| 873 | mock_grr.return_value = (sample_project, repo, reader) |
| 874 | response = admin_client.get(_url(slug, "tags/")) |
| 875 | assert response.status_code == 200 |
| 876 | assert "v1.0" in response.content.decode() |
| 877 | |
| 878 | def test_tag_list_search(self, admin_client, sample_project): |
| 879 | slug = sample_project.slug |
| 880 | tags = [ |
| 881 | SimpleNamespace(name="v1.0", uuid="abc123", user="dev", timestamp=datetime(2026, 3, 1, tzinfo=UTC)), |
| 882 | SimpleNamespace(name="v2.0-beta", uuid="def456", user="dev", timestamp=datetime(2026, 3, 1, tzinfo=UTC)), |
| 883 | ] |
| 884 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 885 | reader = MagicMock() |
| 886 | reader.__enter__ = MagicMock(return_value=reader) |
| 887 | reader.__exit__ = MagicMock(return_value=False) |
| 888 | reader.get_tags.return_value = tags |
| 889 | repo = FossilRepository.objects.get(project=sample_project) |
| 890 | mock_grr.return_value = (sample_project, repo, reader) |
| 891 | response = admin_client.get(_url(slug, "tags/?search=beta")) |
| 892 | assert response.status_code == 200 |
| 893 | content = response.content.decode() |
| 894 | assert "v2.0-beta" in content |
| 895 | |
| 896 | |
| 897 | # --------------------------------------------------------------------------- |
| 898 | # Stats view (mocked) |
| 899 | # --------------------------------------------------------------------------- |
| 900 | |
| 901 | |
| 902 | @pytest.mark.django_db |
| 903 | class TestRepoStatsViewMocked: |
| 904 | def test_stats_renders(self, admin_client, sample_project): |
| 905 | slug = sample_project.slug |
| 906 | stats = {"total_artifacts": 100, "checkin_count": 50, "wiki_events": 5, "ticket_events": 10, "forum_events": 2, "total_events": 67} |
| 907 | contributors = [{"user": "dev", "count": 50}] |
| 908 | activity = [{"count": c} for c in range(52)] |
| 909 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 910 | reader = MagicMock() |
| 911 | reader.__enter__ = MagicMock(return_value=reader) |
| 912 | reader.__exit__ = MagicMock(return_value=False) |
| 913 | reader.get_repo_statistics.return_value = stats |
| 914 | reader.get_top_contributors.return_value = contributors |
| 915 | reader.get_commit_activity.return_value = activity |
| 916 | repo = FossilRepository.objects.get(project=sample_project) |
| 917 | mock_grr.return_value = (sample_project, repo, reader) |
| 918 | response = admin_client.get(_url(slug, "stats/")) |
| 919 | assert response.status_code == 200 |
| 920 | content = response.content.decode() |
| 921 | assert "Checkins" in content or "50" in content |
| 922 | |
| 923 | |
| 924 | # --------------------------------------------------------------------------- |
| 925 | # File history view (mocked) |
| 926 | # --------------------------------------------------------------------------- |
| 927 | |
| 928 | |
| 929 | @pytest.mark.django_db |
| 930 | class TestFileHistoryViewMocked: |
| 931 | def test_file_history_renders(self, admin_client, sample_project): |
| 932 | slug = sample_project.slug |
| 933 | history = [ |
| 934 | {"uuid": "abc", "timestamp": datetime(2026, 3, 1, tzinfo=UTC), "user": "dev", "comment": "edit file"}, |
| 935 | ] |
| 936 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 937 | reader = MagicMock() |
| 938 | reader.__enter__ = MagicMock(return_value=reader) |
| 939 | reader.__exit__ = MagicMock(return_value=False) |
| 940 | reader.get_file_history.return_value = history |
| 941 | repo = FossilRepository.objects.get(project=sample_project) |
| 942 | mock_grr.return_value = (sample_project, repo, reader) |
| 943 | response = admin_client.get(_url(slug, "code/history/README.md")) |
| 944 | assert response.status_code == 200 |
| 945 | |
| 946 | |
| 947 | # --------------------------------------------------------------------------- |
| 948 | # Code browser (mocked) -- tests the _build_file_tree helper indirectly |
| 949 | # --------------------------------------------------------------------------- |
| 950 | |
| 951 | |
| 952 | @pytest.mark.django_db |
| 953 | class TestCodeBrowserViewMocked: |
| 954 | def test_code_browser_renders(self, admin_client, sample_project): |
| 955 | slug = sample_project.slug |
| 956 | files = [ |
| 957 | _make_file_entry(name="README.md", uuid="f1"), |
| 958 | _make_file_entry(name="src/main.py", uuid="f2"), |
| 959 | ] |
| 960 | metadata = RepoMetadata(project_name="Test", checkin_count=10) |
| 961 | latest = [_make_timeline_entry(rid=1)] |
| 962 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 963 | reader = MagicMock() |
| 964 | reader.__enter__ = MagicMock(return_value=reader) |
| 965 | reader.__exit__ = MagicMock(return_value=False) |
| 966 | reader.get_latest_checkin_uuid.return_value = "abc123" |
| 967 | reader.get_files_at_checkin.return_value = files |
| 968 | reader.get_metadata.return_value = metadata |
| 969 | reader.get_timeline.return_value = latest |
| 970 | reader.get_file_content.return_value = b"# README\nHello" |
| 971 | repo = FossilRepository.objects.get(project=sample_project) |
| 972 | mock_grr.return_value = (sample_project, repo, reader) |
| 973 | response = admin_client.get(_url(slug, "code/")) |
| 974 | assert response.status_code == 200 |
| 975 | content = response.content.decode() |
| 976 | assert "README" in content |
| 977 | |
| 978 | def test_code_browser_htmx_partial(self, admin_client, sample_project): |
| 979 | slug = sample_project.slug |
| 980 | files = [_make_file_entry(name="README.md", uuid="f1")] |
| 981 | metadata = RepoMetadata() |
| 982 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 983 | reader = MagicMock() |
| 984 | reader.__enter__ = MagicMock(return_value=reader) |
| 985 | reader.__exit__ = MagicMock(return_value=False) |
| 986 | reader.get_latest_checkin_uuid.return_value = "abc" |
| 987 | reader.get_files_at_checkin.return_value = files |
| 988 | reader.get_metadata.return_value = metadata |
| 989 | reader.get_timeline.return_value = [] |
| 990 | repo = FossilRepository.objects.get(project=sample_project) |
| 991 | mock_grr.return_value = (sample_project, repo, reader) |
| 992 | response = admin_client.get(_url(slug, "code/"), HTTP_HX_REQUEST="true") |
| 993 | assert response.status_code == 200 |
| 994 | |
| 995 | |
| 996 | # --------------------------------------------------------------------------- |
| 997 | # Code file view (mocked) |
| 998 | # --------------------------------------------------------------------------- |
| 999 | |
| 1000 | |
| 1001 | @pytest.mark.django_db |
| 1002 | class TestCodeFileViewMocked: |
| 1003 | def test_code_file_renders(self, admin_client, sample_project): |
| 1004 | slug = sample_project.slug |
| 1005 | files = [_make_file_entry(name="main.py", uuid="f1")] |
| 1006 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1007 | reader = MagicMock() |
| 1008 | reader.__enter__ = MagicMock(return_value=reader) |
| 1009 | reader.__exit__ = MagicMock(return_value=False) |
| 1010 | reader.get_latest_checkin_uuid.return_value = "abc" |
| 1011 | reader.get_files_at_checkin.return_value = files |
| 1012 | reader.get_file_content.return_value = b"print('hello')" |
| 1013 | repo = FossilRepository.objects.get(project=sample_project) |
| 1014 | mock_grr.return_value = (sample_project, repo, reader) |
| 1015 | response = admin_client.get(_url(slug, "code/file/main.py")) |
| 1016 | assert response.status_code == 200 |
| 1017 | content = response.content.decode() |
| 1018 | assert "print" in content |
| 1019 | |
| 1020 | def test_code_file_not_found(self, admin_client, sample_project): |
| 1021 | slug = sample_project.slug |
| 1022 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1023 | reader = MagicMock() |
| 1024 | reader.__enter__ = MagicMock(return_value=reader) |
| 1025 | reader.__exit__ = MagicMock(return_value=False) |
| 1026 | reader.get_latest_checkin_uuid.return_value = "abc" |
| 1027 | reader.get_files_at_checkin.return_value = [] |
| 1028 | repo = FossilRepository.objects.get(project=sample_project) |
| 1029 | mock_grr.return_value = (sample_project, repo, reader) |
| 1030 | response = admin_client.get(_url(slug, "code/file/nonexistent.txt")) |
| 1031 | assert response.status_code == 404 |
| 1032 | |
| 1033 | def test_code_file_binary(self, admin_client, sample_project): |
| 1034 | slug = sample_project.slug |
| 1035 | files = [_make_file_entry(name="image.png", uuid="f1")] |
| 1036 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1037 | reader = MagicMock() |
| 1038 | reader.__enter__ = MagicMock(return_value=reader) |
| 1039 | reader.__exit__ = MagicMock(return_value=False) |
| 1040 | reader.get_latest_checkin_uuid.return_value = "abc" |
| 1041 | reader.get_files_at_checkin.return_value = files |
| 1042 | # Deliberately invalid UTF-8 to trigger binary detection |
| 1043 | reader.get_file_content.return_value = b"\x89PNG\r\n\x1a\n\x00\x00" |
| 1044 | repo = FossilRepository.objects.get(project=sample_project) |
| 1045 | mock_grr.return_value = (sample_project, repo, reader) |
| 1046 | response = admin_client.get(_url(slug, "code/file/image.png")) |
| 1047 | assert response.status_code == 200 |
| 1048 | assert "Binary file" in response.content.decode() |
| 1049 | |
| 1050 | def test_code_file_rendered_mode(self, admin_client, sample_project): |
| 1051 | """Wiki files can be rendered instead of showing source.""" |
| 1052 | slug = sample_project.slug |
| 1053 | files = [_make_file_entry(name="page.md", uuid="f1")] |
| 1054 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1055 | reader = MagicMock() |
| 1056 | reader.__enter__ = MagicMock(return_value=reader) |
| 1057 | reader.__exit__ = MagicMock(return_value=False) |
| 1058 | reader.get_latest_checkin_uuid.return_value = "abc" |
| 1059 | reader.get_files_at_checkin.return_value = files |
| 1060 | reader.get_file_content.return_value = b"# Hello\nWorld" |
| 1061 | repo = FossilRepository.objects.get(project=sample_project) |
| 1062 | mock_grr.return_value = (sample_project, repo, reader) |
| 1063 | response = admin_client.get(_url(slug, "code/file/page.md?mode=rendered")) |
| 1064 | assert response.status_code == 200 |
| 1065 | |
| 1066 | |
| 1067 | # --------------------------------------------------------------------------- |
| 1068 | # Code raw download (mocked) |
| 1069 | # --------------------------------------------------------------------------- |
| 1070 | |
| 1071 | |
| 1072 | @pytest.mark.django_db |
| 1073 | class TestCodeRawViewMocked: |
| 1074 | def test_raw_download(self, admin_client, sample_project): |
| 1075 | slug = sample_project.slug |
| 1076 | files = [_make_file_entry(name="data.csv", uuid="f1")] |
| 1077 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1078 | reader = MagicMock() |
| 1079 | reader.__enter__ = MagicMock(return_value=reader) |
| 1080 | reader.__exit__ = MagicMock(return_value=False) |
| 1081 | reader.get_latest_checkin_uuid.return_value = "abc" |
| 1082 | reader.get_files_at_checkin.return_value = files |
| 1083 | reader.get_file_content.return_value = b"col1,col2\na,b" |
| 1084 | repo = FossilRepository.objects.get(project=sample_project) |
| 1085 | mock_grr.return_value = (sample_project, repo, reader) |
| 1086 | response = admin_client.get(_url(slug, "code/raw/data.csv")) |
| 1087 | assert response.status_code == 200 |
| 1088 | assert response["Content-Disposition"] == 'attachment; filename="data.csv"' |
| 1089 | |
| 1090 | def test_raw_file_not_found(self, admin_client, sample_project): |
| 1091 | slug = sample_project.slug |
| 1092 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1093 | reader = MagicMock() |
| 1094 | reader.__enter__ = MagicMock(return_value=reader) |
| 1095 | reader.__exit__ = MagicMock(return_value=False) |
| 1096 | reader.get_latest_checkin_uuid.return_value = "abc" |
| 1097 | reader.get_files_at_checkin.return_value = [] |
| 1098 | repo = FossilRepository.objects.get(project=sample_project) |
| 1099 | mock_grr.return_value = (sample_project, repo, reader) |
| 1100 | response = admin_client.get(_url(slug, "code/raw/missing.txt")) |
| 1101 | assert response.status_code == 404 |
| 1102 | |
| 1103 | |
| 1104 | # --------------------------------------------------------------------------- |
| 1105 | # Code blame (mocked) |
| 1106 | # --------------------------------------------------------------------------- |
| 1107 | |
| 1108 | |
| 1109 | @pytest.mark.django_db |
| 1110 | class TestCodeBlameViewMocked: |
| 1111 | def test_blame_renders_with_dates(self, admin_client, sample_project): |
| 1112 | slug = sample_project.slug |
| 1113 | blame_lines = [ |
| 1114 | {"user": "dev", "date": "2026-01-01", "uuid": "abc", "line_num": 1, "text": "line one"}, |
| 1115 | {"user": "dev", "date": "2026-03-01", "uuid": "def", "line_num": 2, "text": "line two"}, |
| 1116 | ] |
| 1117 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1118 | reader = MagicMock() |
| 1119 | reader.__enter__ = MagicMock(return_value=reader) |
| 1120 | reader.__exit__ = MagicMock(return_value=False) |
| 1121 | repo = FossilRepository.objects.get(project=sample_project) |
| 1122 | mock_grr.return_value = (sample_project, repo, reader) |
| 1123 | with patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 1124 | cli = mock_cli_cls.return_value |
| 1125 | cli.is_available.return_value = True |
| 1126 | cli.blame.return_value = blame_lines |
| 1127 | response = admin_client.get(_url(slug, "code/blame/main.py")) |
| 1128 | assert response.status_code == 200 |
| 1129 | |
| 1130 | def test_blame_no_fossil_binary(self, admin_client, sample_project): |
| 1131 | slug = sample_project.slug |
| 1132 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1133 | reader = MagicMock() |
| 1134 | reader.__enter__ = MagicMock(return_value=reader) |
| 1135 | reader.__exit__ = MagicMock(return_value=False) |
| 1136 | repo = FossilRepository.objects.get(project=sample_project) |
| 1137 | mock_grr.return_value = (sample_project, repo, reader) |
| 1138 | with patch("fossil.cli.FossilCLI") as mock_cli_cls: |
| 1139 | cli = mock_cli_cls.return_value |
| 1140 | cli.is_available.return_value = False |
| 1141 | response = admin_client.get(_url(slug, "code/blame/main.py")) |
| 1142 | assert response.status_code == 200 |
| 1143 | |
| 1144 | |
| 1145 | # --------------------------------------------------------------------------- |
| 1146 | # Toggle watch / notifications |
| 1147 | # --------------------------------------------------------------------------- |
| 1148 | |
| 1149 | |
| 1150 | @pytest.mark.django_db |
| 1151 | class TestToggleWatch: |
| 1152 | def test_watch_project(self, admin_client, sample_project, admin_user): |
| 1153 | from fossil.notifications import ProjectWatch |
| 1154 | |
| 1155 | response = admin_client.post(_url(sample_project.slug, "watch/")) |
| 1156 | assert response.status_code == 302 |
| 1157 | assert ProjectWatch.objects.filter(user=admin_user, project=sample_project).exists() |
| 1158 | |
| 1159 | def test_unwatch_project(self, admin_client, sample_project, admin_user): |
| 1160 | from fossil.notifications import ProjectWatch |
| 1161 | |
| 1162 | ProjectWatch.objects.create(user=admin_user, project=sample_project, event_filter="all", created_by=admin_user) |
| 1163 | response = admin_client.post(_url(sample_project.slug, "watch/")) |
| 1164 | assert response.status_code == 302 |
| 1165 | # Should be soft-deleted |
| 1166 | assert not ProjectWatch.objects.filter(user=admin_user, project=sample_project, deleted_at__isnull=True).exists() |
| 1167 | |
| 1168 | def test_watch_with_event_filter(self, admin_client, sample_project, admin_user): |
| 1169 | from fossil.notifications import ProjectWatch |
| 1170 | |
| 1171 | response = admin_client.post(_url(sample_project.slug, "watch/"), {"event_filter": "checkins"}) |
| 1172 | assert response.status_code == 302 |
| 1173 | watch = ProjectWatch.objects.get(user=admin_user, project=sample_project) |
| 1174 | assert watch.event_filter == "checkins" |
| 1175 | |
| 1176 | def test_watch_denied_anon(self, client, sample_project): |
| 1177 | response = client.post(_url(sample_project.slug, "watch/")) |
| 1178 | assert response.status_code == 302 # redirect to login |
| 1179 | |
| 1180 | |
| 1181 | # --------------------------------------------------------------------------- |
| 1182 | # Checkin detail (mocked) -- the diff computation path |
| 1183 | # --------------------------------------------------------------------------- |
| 1184 | |
| 1185 | |
| 1186 | @pytest.mark.django_db |
| 1187 | class TestCheckinDetailViewMocked: |
| 1188 | def test_checkin_detail_with_diffs(self, admin_client, sample_project): |
| 1189 | slug = sample_project.slug |
| 1190 | checkin = CheckinDetail( |
| 1191 | uuid="abc123full", |
| 1192 | timestamp=datetime(2026, 3, 1, tzinfo=UTC), |
| 1193 | user="dev", |
| 1194 | comment="fix bug", |
| 1195 | branch="trunk", |
| 1196 | files_changed=[ |
| 1197 | {"name": "fix.py", "uuid": "new-uuid", "prev_uuid": "old-uuid", "change_type": "M"}, |
| 1198 | ], |
| 1199 | ) |
| 1200 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1201 | reader = MagicMock() |
| 1202 | reader.__enter__ = MagicMock(return_value=reader) |
| 1203 | reader.__exit__ = MagicMock(return_value=False) |
| 1204 | reader.get_checkin_detail.return_value = checkin |
| 1205 | |
| 1206 | def fake_content(uuid): |
| 1207 | if uuid == "old-uuid": |
| 1208 | return b"old line\n" |
| 1209 | return b"new line\n" |
| 1210 | |
| 1211 | reader.get_file_content.side_effect = fake_content |
| 1212 | repo = FossilRepository.objects.get(project=sample_project) |
| 1213 | mock_grr.return_value = (sample_project, repo, reader) |
| 1214 | |
| 1215 | with patch("fossil.ci.StatusCheck") as mock_sc: |
| 1216 | mock_sc.objects.filter.return_value = [] |
| 1217 | response = admin_client.get(_url(slug, "checkin/abc123full/")) |
| 1218 | assert response.status_code == 200 |
| 1219 | content = response.content.decode() |
| 1220 | assert "fix bug" in content |
| 1221 | |
| 1222 | def test_checkin_not_found(self, admin_client, sample_project): |
| 1223 | slug = sample_project.slug |
| 1224 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1225 | reader = MagicMock() |
| 1226 | reader.__enter__ = MagicMock(return_value=reader) |
| 1227 | reader.__exit__ = MagicMock(return_value=False) |
| 1228 | reader.get_checkin_detail.return_value = None |
| 1229 | repo = FossilRepository.objects.get(project=sample_project) |
| 1230 | mock_grr.return_value = (sample_project, repo, reader) |
| 1231 | response = admin_client.get(_url(slug, "checkin/nonexistent/")) |
| 1232 | assert response.status_code == 404 |
| 1233 | |
| 1234 | |
| 1235 | # --------------------------------------------------------------------------- |
| 1236 | # Technote views (mocked) |
| 1237 | # --------------------------------------------------------------------------- |
| 1238 | |
| 1239 | |
| 1240 | @pytest.mark.django_db |
| 1241 | class TestTechnoteViewsMocked: |
| 1242 | def test_technote_list(self, admin_client, sample_project): |
| 1243 | slug = sample_project.slug |
| 1244 | notes = [SimpleNamespace(uuid="n1", comment="Release notes", user="dev", timestamp=datetime(2026, 3, 1, tzinfo=UTC))] |
| 1245 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1246 | reader = MagicMock() |
| 1247 | reader.__enter__ = MagicMock(return_value=reader) |
| 1248 | reader.__exit__ = MagicMock(return_value=False) |
| 1249 | reader.get_technotes.return_value = notes |
| 1250 | repo = FossilRepository.objects.get(project=sample_project) |
| 1251 | mock_grr.return_value = (sample_project, repo, reader) |
| 1252 | response = admin_client.get(_url(slug, "technotes/")) |
| 1253 | assert response.status_code == 200 |
| 1254 | |
| 1255 | def test_technote_detail(self, admin_client, sample_project): |
| 1256 | slug = sample_project.slug |
| 1257 | note = { |
| 1258 | "uuid": "n1", |
| 1259 | "comment": "Release v1", |
| 1260 | "body": "## Changes\n- Fix", |
| 1261 | "user": "dev", |
| 1262 | "timestamp": datetime(2026, 3, 1, tzinfo=UTC), |
| 1263 | } |
| 1264 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1265 | reader = MagicMock() |
| 1266 | reader.__enter__ = MagicMock(return_value=reader) |
| 1267 | reader.__exit__ = MagicMock(return_value=False) |
| 1268 | reader.get_technote_detail.return_value = note |
| 1269 | repo = FossilRepository.objects.get(project=sample_project) |
| 1270 | mock_grr.return_value = (sample_project, repo, reader) |
| 1271 | response = admin_client.get(_url(slug, "technotes/n1/")) |
| 1272 | assert response.status_code == 200 |
| 1273 | |
| 1274 | def test_technote_detail_not_found(self, admin_client, sample_project): |
| 1275 | slug = sample_project.slug |
| 1276 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1277 | reader = MagicMock() |
| 1278 | reader.__enter__ = MagicMock(return_value=reader) |
| 1279 | reader.__exit__ = MagicMock(return_value=False) |
| 1280 | reader.get_technote_detail.return_value = None |
| 1281 | repo = FossilRepository.objects.get(project=sample_project) |
| 1282 | mock_grr.return_value = (sample_project, repo, reader) |
| 1283 | response = admin_client.get(_url(slug, "technotes/nonexistent/")) |
| 1284 | assert response.status_code == 404 |
| 1285 | |
| 1286 | |
| 1287 | # --------------------------------------------------------------------------- |
| 1288 | # Unversioned files list (mocked) |
| 1289 | # --------------------------------------------------------------------------- |
| 1290 | |
| 1291 | |
| 1292 | @pytest.mark.django_db |
| 1293 | class TestUnversionedListViewMocked: |
| 1294 | def test_unversioned_list(self, admin_client, sample_project): |
| 1295 | slug = sample_project.slug |
| 1296 | files = [SimpleNamespace(name="logo.png", size=1024, mtime=datetime(2026, 3, 1, tzinfo=UTC), hash="abc")] |
| 1297 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1298 | reader = MagicMock() |
| 1299 | reader.__enter__ = MagicMock(return_value=reader) |
| 1300 | reader.__exit__ = MagicMock(return_value=False) |
| 1301 | reader.get_unversioned_files.return_value = files |
| 1302 | repo = FossilRepository.objects.get(project=sample_project) |
| 1303 | mock_grr.return_value = (sample_project, repo, reader) |
| 1304 | response = admin_client.get(_url(slug, "files/")) |
| 1305 | assert response.status_code == 200 |
| 1306 | |
| 1307 | def test_unversioned_search(self, admin_client, sample_project): |
| 1308 | slug = sample_project.slug |
| 1309 | files = [ |
| 1310 | SimpleNamespace(name="logo.png", size=1024, mtime=datetime(2026, 3, 1, tzinfo=UTC), hash="abc"), |
| 1311 | SimpleNamespace(name="data.csv", size=512, mtime=datetime(2026, 3, 1, tzinfo=UTC), hash="def"), |
| 1312 | ] |
| 1313 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1314 | reader = MagicMock() |
| 1315 | reader.__enter__ = MagicMock(return_value=reader) |
| 1316 | reader.__exit__ = MagicMock(return_value=False) |
| 1317 | reader.get_unversioned_files.return_value = files |
| 1318 | repo = FossilRepository.objects.get(project=sample_project) |
| 1319 | mock_grr.return_value = (sample_project, repo, reader) |
| 1320 | response = admin_client.get(_url(slug, "files/?search=logo")) |
| 1321 | assert response.status_code == 200 |
| 1322 | |
| 1323 | |
| 1324 | # --------------------------------------------------------------------------- |
| 1325 | # Fossil docs views (mocked) |
| 1326 | # --------------------------------------------------------------------------- |
| 1327 | |
| 1328 | |
| 1329 | @pytest.mark.django_db |
| 1330 | class TestFossilDocsViewsMocked: |
| 1331 | def test_docs_index(self, admin_client, sample_project): |
| 1332 | slug = sample_project.slug |
| 1333 | response = admin_client.get(_url(slug, "docs/")) |
| 1334 | assert response.status_code == 200 |
| 1335 | |
| 1336 | def test_doc_page_renders(self, admin_client, sample_project): |
| 1337 | slug = sample_project.slug |
| 1338 | files = [_make_file_entry(name="www/concepts.wiki", uuid="f1")] |
| 1339 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1340 | reader = MagicMock() |
| 1341 | reader.__enter__ = MagicMock(return_value=reader) |
| 1342 | reader.__exit__ = MagicMock(return_value=False) |
| 1343 | reader.get_latest_checkin_uuid.return_value = "abc" |
| 1344 | reader.get_files_at_checkin.return_value = files |
| 1345 | reader.get_file_content.return_value = b"<h1>Concepts</h1>\n<p>Text here</p>" |
| 1346 | repo = FossilRepository.objects.get(project=sample_project) |
| 1347 | mock_grr.return_value = (sample_project, repo, reader) |
| 1348 | response = admin_client.get(_url(slug, "docs/www/concepts.wiki")) |
| 1349 | assert response.status_code == 200 |
| 1350 | assert "Concepts" in response.content.decode() |
| 1351 | |
| 1352 | def test_doc_page_not_found(self, admin_client, sample_project): |
| 1353 | slug = sample_project.slug |
| 1354 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1355 | reader = MagicMock() |
| 1356 | reader.__enter__ = MagicMock(return_value=reader) |
| 1357 | reader.__exit__ = MagicMock(return_value=False) |
| 1358 | reader.get_latest_checkin_uuid.return_value = "abc" |
| 1359 | reader.get_files_at_checkin.return_value = [] |
| 1360 | repo = FossilRepository.objects.get(project=sample_project) |
| 1361 | mock_grr.return_value = (sample_project, repo, reader) |
| 1362 | response = admin_client.get(_url(slug, "docs/www/missing.wiki")) |
| 1363 | assert response.status_code == 404 |
| 1364 | |
| 1365 | |
| 1366 | # --------------------------------------------------------------------------- |
| 1367 | # User activity view (mocked) |
| 1368 | # --------------------------------------------------------------------------- |
| 1369 | |
| 1370 | |
| 1371 | @pytest.mark.django_db |
| 1372 | class TestUserActivityViewMocked: |
| 1373 | def test_user_activity_renders(self, admin_client, sample_project): |
| 1374 | slug = sample_project.slug |
| 1375 | activity = { |
| 1376 | "checkin_count": 25, |
| 1377 | "checkins": [{"uuid": "a", "comment": "fix", "timestamp": datetime(2026, 3, 1, tzinfo=UTC)}], |
| 1378 | "daily_activity": {"2026-03-01": 5}, |
| 1379 | } |
| 1380 | with patch("fossil.views._get_repo_and_reader") as mock_grr: |
| 1381 | reader = MagicMock() |
| 1382 | reader.__enter__ = MagicMock(return_value=reader) |
| 1383 | reader.__exit__ = MagicMock(return_value=False) |
| 1384 | reader.get_user_activity.return_value = activity |
| 1385 | repo = FossilRepository.objects.get(project=sample_project) |
| 1386 | mock_grr.return_value = (sample_project, repo, reader) |
| 1387 | response = admin_client.get(_url(slug, "user/dev/")) |
| 1388 | assert response.status_code == 200 |
| 1389 | |
| 1390 | |
| 1391 | # --------------------------------------------------------------------------- |
| 1392 | # Status badge view |
| 1393 | # --------------------------------------------------------------------------- |
| 1394 | |
| 1395 | |
| 1396 | @pytest.mark.django_db |
| 1397 | class TestStatusBadgeView: |
| 1398 | def test_badge_unknown(self, admin_client, sample_project): |
| 1399 | slug = sample_project.slug |
| 1400 | response = admin_client.get(_url(slug, "api/status/abc123/badge.svg")) |
| 1401 | assert response.status_code == 200 |
| 1402 | assert response["Content-Type"] == "image/svg+xml" |
| 1403 | content = response.content.decode() |
| 1404 | assert "unknown" in content |
| 1405 | |
| 1406 | def test_badge_passing(self, admin_client, sample_project, fossil_repo_obj): |
| 1407 | from fossil.ci import StatusCheck |
| 1408 | |
| 1409 | StatusCheck.objects.create(repository=fossil_repo_obj, checkin_uuid="pass123", context="ci/test", state="success") |
| 1410 | response = admin_client.get(_url(sample_project.slug, "api/status/pass123/badge.svg")) |
| 1411 | assert response.status_code == 200 |
| 1412 | assert "passing" in response.content.decode() |
| 1413 | |
| 1414 | def test_badge_failing(self, admin_client, sample_project, fossil_repo_obj): |
| 1415 | from fossil.ci import StatusCheck |
| 1416 | |
| 1417 | StatusCheck.objects.create(repository=fossil_repo_obj, checkin_uuid="fail123", context="ci/test", state="failure") |
| 1418 | response = admin_client.get(_url(sample_project.slug, "api/status/fail123/badge.svg")) |
| 1419 | assert response.status_code == 200 |
| 1420 | assert "failing" in response.content.decode() |
| 1421 | |
| 1422 | def test_badge_pending(self, admin_client, sample_project, fossil_repo_obj): |
| 1423 | from fossil.ci import StatusCheck |
| 1424 | |
| 1425 | StatusCheck.objects.create(repository=fossil_repo_obj, checkin_uuid="pend123", context="ci/test", state="pending") |
| 1426 | response = admin_client.get(_url(sample_project.slug, "api/status/pend123/badge.svg")) |
| 1427 | assert response.status_code == 200 |
| 1428 | assert "pending" in response.content.decode() |
| 1429 | |
| 1430 | |
| 1431 | # --------------------------------------------------------------------------- |
| 1432 | # Status check API (GET path) |
| 1433 | # --------------------------------------------------------------------------- |
| 1434 | |
| 1435 | |
| 1436 | @pytest.mark.django_db |
| 1437 | class TestStatusCheckApiGet: |
| 1438 | def test_get_status_checks(self, admin_client, sample_project, fossil_repo_obj): |
| 1439 | from fossil.ci import StatusCheck |
| 1440 | |
| 1441 | StatusCheck.objects.create( |
| 1442 | repository=fossil_repo_obj, checkin_uuid="apicheck", context="ci/lint", state="success", description="OK" |
| 1443 | ) |
| 1444 | response = admin_client.get(_url(sample_project.slug, "api/status?checkin=apicheck")) |
| 1445 | assert response.status_code == 200 |
| 1446 | data = response.json() |
| 1447 | assert data["checkin"] == "apicheck" |
| 1448 | assert len(data["checks"]) == 1 |
| 1449 | assert data["checks"][0]["context"] == "ci/lint" |
| 1450 | |
| 1451 | def test_get_status_no_checkin_param(self, admin_client, sample_project, fossil_repo_obj): |
| 1452 | response = admin_client.get(_url(sample_project.slug, "api/status")) |
| 1453 | assert response.status_code == 400 |
| 1454 | |
| 1455 | def test_get_status_denied_private(self, client, sample_project, fossil_repo_obj): |
| 1456 | """Anonymous user denied on private project.""" |
| 1457 | response = client.get(_url(sample_project.slug, "api/status?checkin=abc")) |
| 1458 | assert response.status_code == 403 |
| 1459 | |
| 1460 | |
| 1461 | # --------------------------------------------------------------------------- |
| 1462 | # Fossil xfer endpoint |
| 1463 | # --------------------------------------------------------------------------- |
| 1464 | |
| 1465 | |
| 1466 | @pytest.mark.django_db |
| 1467 | class TestFossilXferView: |
| 1468 | def test_xfer_get_public_project(self, client, sample_project, fossil_repo_obj): |
| 1469 | """GET on xfer endpoint shows clone info for public projects.""" |
| 1470 | sample_project.visibility = "public" |
| 1471 | sample_project.save() |
| 1472 | response = client.get(_url(sample_project.slug, "xfer")) |
| 1473 | assert response.status_code == 200 |
| 1474 | assert "clone" in response.content.decode().lower() |
| 1475 | |
| 1476 | def test_xfer_get_private_denied(self, client, sample_project, fossil_repo_obj): |
| 1477 | """GET on xfer endpoint denied for private projects without auth.""" |
| 1478 | response = client.get(_url(sample_project.slug, "xfer")) |
| 1479 | assert response.status_code == 403 |
| 1480 | |
| 1481 | def test_xfer_method_not_allowed(self, admin_client, sample_project, fossil_repo_obj): |
| 1482 | """PUT/PATCH not supported.""" |
| 1483 | response = admin_client.put(_url(sample_project.slug, "xfer")) |
| 1484 | assert response.status_code == 405 |
| 1485 | |
| 1486 | |
| 1487 | # --------------------------------------------------------------------------- |
| 1488 | # Build file tree helper |
| 1489 | # --------------------------------------------------------------------------- |
| 1490 | |
| 1491 | |
| 1492 | class TestBuildFileTree: |
| 1493 | def test_root_listing(self): |
| 1494 | from fossil.views import _build_file_tree |
| 1495 | |
| 1496 | files = [ |
| 1497 | _make_file_entry(name="README.md", uuid="f1"), |
| 1498 | _make_file_entry(name="src/main.py", uuid="f2"), |
| 1499 | _make_file_ent |
+6
-3
| --- tests/test_webhooks.py | ||
| +++ tests/test_webhooks.py | ||
| @@ -169,11 +169,12 @@ | ||
| 169 | 169 | def test_get_form(self, admin_client, sample_project, fossil_repo_obj): |
| 170 | 170 | response = admin_client.get(f"/projects/{sample_project.slug}/fossil/webhooks/create/") |
| 171 | 171 | assert response.status_code == 200 |
| 172 | 172 | assert "Create Webhook" in response.content.decode() |
| 173 | 173 | |
| 174 | - def test_create_webhook(self, admin_client, sample_project, fossil_repo_obj): | |
| 174 | + @patch("core.url_validation.is_safe_outbound_url", return_value=(True, "")) | |
| 175 | + def test_create_webhook(self, mock_url_check, admin_client, sample_project, fossil_repo_obj): | |
| 175 | 176 | response = admin_client.post( |
| 176 | 177 | f"/projects/{sample_project.slug}/fossil/webhooks/create/", |
| 177 | 178 | {"url": "https://hooks.example.com/test", "secret": "s3cret", "events": ["checkin", "ticket"], "is_active": "on"}, |
| 178 | 179 | ) |
| 179 | 180 | assert response.status_code == 302 |
| @@ -180,11 +181,12 @@ | ||
| 180 | 181 | hook = Webhook.objects.get(url="https://hooks.example.com/test") |
| 181 | 182 | assert hook.secret == "s3cret" |
| 182 | 183 | assert hook.events == "checkin,ticket" |
| 183 | 184 | assert hook.is_active is True |
| 184 | 185 | |
| 185 | - def test_create_webhook_all_events(self, admin_client, sample_project, fossil_repo_obj): | |
| 186 | + @patch("core.url_validation.is_safe_outbound_url", return_value=(True, "")) | |
| 187 | + def test_create_webhook_all_events(self, mock_url_check, admin_client, sample_project, fossil_repo_obj): | |
| 186 | 188 | response = admin_client.post( |
| 187 | 189 | f"/projects/{sample_project.slug}/fossil/webhooks/create/", |
| 188 | 190 | {"url": "https://hooks.example.com/all", "is_active": "on"}, |
| 189 | 191 | ) |
| 190 | 192 | assert response.status_code == 302 |
| @@ -209,11 +211,12 @@ | ||
| 209 | 211 | assert response.status_code == 200 |
| 210 | 212 | content = response.content.decode() |
| 211 | 213 | assert "example.com/webhook" in content |
| 212 | 214 | assert "Update Webhook" in content |
| 213 | 215 | |
| 214 | - def test_edit_webhook(self, admin_client, sample_project, webhook): | |
| 216 | + @patch("core.url_validation.is_safe_outbound_url", return_value=(True, "")) | |
| 217 | + def test_edit_webhook(self, mock_url_check, admin_client, sample_project, webhook): | |
| 215 | 218 | response = admin_client.post( |
| 216 | 219 | f"/projects/{sample_project.slug}/fossil/webhooks/{webhook.pk}/edit/", |
| 217 | 220 | {"url": "https://new-url.example.com/hook", "events": ["wiki"], "is_active": "on"}, |
| 218 | 221 | ) |
| 219 | 222 | assert response.status_code == 302 |
| 220 | 223 |
| --- tests/test_webhooks.py | |
| +++ tests/test_webhooks.py | |
| @@ -169,11 +169,12 @@ | |
| 169 | def test_get_form(self, admin_client, sample_project, fossil_repo_obj): |
| 170 | response = admin_client.get(f"/projects/{sample_project.slug}/fossil/webhooks/create/") |
| 171 | assert response.status_code == 200 |
| 172 | assert "Create Webhook" in response.content.decode() |
| 173 | |
| 174 | def test_create_webhook(self, admin_client, sample_project, fossil_repo_obj): |
| 175 | response = admin_client.post( |
| 176 | f"/projects/{sample_project.slug}/fossil/webhooks/create/", |
| 177 | {"url": "https://hooks.example.com/test", "secret": "s3cret", "events": ["checkin", "ticket"], "is_active": "on"}, |
| 178 | ) |
| 179 | assert response.status_code == 302 |
| @@ -180,11 +181,12 @@ | |
| 180 | hook = Webhook.objects.get(url="https://hooks.example.com/test") |
| 181 | assert hook.secret == "s3cret" |
| 182 | assert hook.events == "checkin,ticket" |
| 183 | assert hook.is_active is True |
| 184 | |
| 185 | def test_create_webhook_all_events(self, admin_client, sample_project, fossil_repo_obj): |
| 186 | response = admin_client.post( |
| 187 | f"/projects/{sample_project.slug}/fossil/webhooks/create/", |
| 188 | {"url": "https://hooks.example.com/all", "is_active": "on"}, |
| 189 | ) |
| 190 | assert response.status_code == 302 |
| @@ -209,11 +211,12 @@ | |
| 209 | assert response.status_code == 200 |
| 210 | content = response.content.decode() |
| 211 | assert "example.com/webhook" in content |
| 212 | assert "Update Webhook" in content |
| 213 | |
| 214 | def test_edit_webhook(self, admin_client, sample_project, webhook): |
| 215 | response = admin_client.post( |
| 216 | f"/projects/{sample_project.slug}/fossil/webhooks/{webhook.pk}/edit/", |
| 217 | {"url": "https://new-url.example.com/hook", "events": ["wiki"], "is_active": "on"}, |
| 218 | ) |
| 219 | assert response.status_code == 302 |
| 220 |
| --- tests/test_webhooks.py | |
| +++ tests/test_webhooks.py | |
| @@ -169,11 +169,12 @@ | |
| 169 | def test_get_form(self, admin_client, sample_project, fossil_repo_obj): |
| 170 | response = admin_client.get(f"/projects/{sample_project.slug}/fossil/webhooks/create/") |
| 171 | assert response.status_code == 200 |
| 172 | assert "Create Webhook" in response.content.decode() |
| 173 | |
| 174 | @patch("core.url_validation.is_safe_outbound_url", return_value=(True, "")) |
| 175 | def test_create_webhook(self, mock_url_check, admin_client, sample_project, fossil_repo_obj): |
| 176 | response = admin_client.post( |
| 177 | f"/projects/{sample_project.slug}/fossil/webhooks/create/", |
| 178 | {"url": "https://hooks.example.com/test", "secret": "s3cret", "events": ["checkin", "ticket"], "is_active": "on"}, |
| 179 | ) |
| 180 | assert response.status_code == 302 |
| @@ -180,11 +181,12 @@ | |
| 181 | hook = Webhook.objects.get(url="https://hooks.example.com/test") |
| 182 | assert hook.secret == "s3cret" |
| 183 | assert hook.events == "checkin,ticket" |
| 184 | assert hook.is_active is True |
| 185 | |
| 186 | @patch("core.url_validation.is_safe_outbound_url", return_value=(True, "")) |
| 187 | def test_create_webhook_all_events(self, mock_url_check, admin_client, sample_project, fossil_repo_obj): |
| 188 | response = admin_client.post( |
| 189 | f"/projects/{sample_project.slug}/fossil/webhooks/create/", |
| 190 | {"url": "https://hooks.example.com/all", "is_active": "on"}, |
| 191 | ) |
| 192 | assert response.status_code == 302 |
| @@ -209,11 +211,12 @@ | |
| 211 | assert response.status_code == 200 |
| 212 | content = response.content.decode() |
| 213 | assert "example.com/webhook" in content |
| 214 | assert "Update Webhook" in content |
| 215 | |
| 216 | @patch("core.url_validation.is_safe_outbound_url", return_value=(True, "")) |
| 217 | def test_edit_webhook(self, mock_url_check, admin_client, sample_project, webhook): |
| 218 | response = admin_client.post( |
| 219 | f"/projects/{sample_project.slug}/fossil/webhooks/{webhook.pk}/edit/", |
| 220 | {"url": "https://new-url.example.com/hook", "events": ["wiki"], "is_active": "on"}, |
| 221 | ) |
| 222 | assert response.status_code == 302 |
| 223 |