test_sparse_patterns.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359
  1. # test_sparse_patterns.py -- Sparse checkout (full and cone mode) pattern handling
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as public by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Tests for dulwich.sparse_patterns."""
  22. import os
  23. import shutil
  24. import tempfile
  25. import time
  26. from dulwich.index import IndexEntry
  27. from dulwich.repo import Repo
  28. from dulwich.sparse_patterns import (
  29. BlobNotFoundError,
  30. SparseCheckoutConflictError,
  31. apply_included_paths,
  32. compute_included_paths_cone,
  33. compute_included_paths_full,
  34. determine_included_paths,
  35. match_gitignore_patterns,
  36. parse_sparse_patterns,
  37. )
  38. from . import TestCase
  39. class ParseSparsePatternsTests(TestCase):
  40. """Test parse_sparse_patterns function."""
  41. def test_empty_and_comment_lines(self):
  42. lines = [
  43. "",
  44. "# comment here",
  45. " ",
  46. "# another comment",
  47. ]
  48. parsed = parse_sparse_patterns(lines)
  49. self.assertEqual(parsed, [])
  50. def test_simple_patterns(self):
  51. lines = [
  52. "*.py",
  53. "!*.md",
  54. "/docs/",
  55. "!/docs/images/",
  56. ]
  57. parsed = parse_sparse_patterns(lines)
  58. self.assertEqual(len(parsed), 4)
  59. self.assertEqual(parsed[0], ("*.py", False, False, False)) # include *.py
  60. self.assertEqual(parsed[1], ("*.md", True, False, False)) # exclude *.md
  61. self.assertEqual(parsed[2], ("docs", False, True, True)) # anchored, dir_only
  62. self.assertEqual(parsed[3], ("docs/images", True, True, True))
  63. def test_trailing_slash_dir(self):
  64. lines = [
  65. "src/",
  66. ]
  67. parsed = parse_sparse_patterns(lines)
  68. # "src/" => (pattern="src", negation=False, dir_only=True, anchored=False)
  69. self.assertEqual(parsed, [("src", False, True, False)])
  70. def test_negation_anchor(self):
  71. lines = [
  72. "!/foo.txt",
  73. ]
  74. parsed = parse_sparse_patterns(lines)
  75. # => (pattern="foo.txt", negation=True, dir_only=False, anchored=True)
  76. self.assertEqual(parsed, [("foo.txt", True, False, True)])
  77. class MatchGitignorePatternsTests(TestCase):
  78. """Test the match_gitignore_patterns function."""
  79. def test_no_patterns_returns_excluded(self):
  80. """If no patterns are provided, by default we treat the path as excluded."""
  81. self.assertFalse(match_gitignore_patterns("anyfile.py", []))
  82. def test_last_match_wins(self):
  83. """Checks that the last pattern to match determines included vs excluded."""
  84. parsed = parse_sparse_patterns(
  85. [
  86. "*.py", # include
  87. "!foo.py", # exclude
  88. ]
  89. )
  90. # "foo.py" matches first pattern => included
  91. # then matches second pattern => excluded
  92. self.assertFalse(match_gitignore_patterns("foo.py", parsed))
  93. def test_dir_only(self):
  94. """A pattern with a trailing slash should only match directories and subdirectories."""
  95. parsed = parse_sparse_patterns(["docs/"])
  96. # Because we set path_is_dir=False, it won't match
  97. self.assertTrue(
  98. match_gitignore_patterns("docs/readme.md", parsed, path_is_dir=False)
  99. )
  100. self.assertTrue(match_gitignore_patterns("docs", parsed, path_is_dir=True))
  101. # Even if the path name is "docs", if it's a file, won't match:
  102. self.assertFalse(match_gitignore_patterns("docs", parsed, path_is_dir=False))
  103. def test_anchored(self):
  104. """Anchored patterns match from the start of the path only."""
  105. parsed = parse_sparse_patterns(["/foo"])
  106. self.assertTrue(match_gitignore_patterns("foo", parsed))
  107. # But "some/foo" doesn't match because anchored requires start
  108. self.assertFalse(match_gitignore_patterns("some/foo", parsed))
  109. def test_unanchored_uses_fnmatch(self):
  110. parsed = parse_sparse_patterns(["foo"])
  111. self.assertTrue(match_gitignore_patterns("some/foo", parsed))
  112. self.assertFalse(match_gitignore_patterns("some/bar", parsed))
  113. class ComputeIncludedPathsFullTests(TestCase):
  114. """Test compute_included_paths_full using a real ephemeral repo index."""
  115. def setUp(self):
  116. super().setUp()
  117. self.temp_dir = tempfile.mkdtemp()
  118. self.addCleanup(shutil.rmtree, self.temp_dir)
  119. self.repo = Repo.init(self.temp_dir)
  120. def _add_file_to_index(self, relpath, content=b"test"):
  121. full = os.path.join(self.temp_dir, relpath)
  122. os.makedirs(os.path.dirname(full), exist_ok=True)
  123. with open(full, "wb") as f:
  124. f.write(content)
  125. # Stage in the index
  126. self.repo.stage([relpath])
  127. def test_basic_inclusion_exclusion(self):
  128. """Given patterns, check correct set of included paths."""
  129. self._add_file_to_index("foo.py", b"print(1)")
  130. self._add_file_to_index("bar.md", b"markdown")
  131. self._add_file_to_index("docs/readme", b"# docs")
  132. lines = [
  133. "*.py", # include all .py
  134. "!bar.*", # exclude bar.md
  135. "docs/", # include docs dir
  136. ]
  137. included = compute_included_paths_full(self.repo, lines)
  138. self.assertEqual(included, {"foo.py", "docs/readme"})
  139. class ComputeIncludedPathsConeTests(TestCase):
  140. """Test compute_included_paths_cone with ephemeral repo to see included vs excluded."""
  141. def setUp(self):
  142. super().setUp()
  143. self.temp_dir = tempfile.mkdtemp()
  144. self.addCleanup(shutil.rmtree, self.temp_dir)
  145. self.repo = Repo.init(self.temp_dir)
  146. def _add_file_to_index(self, relpath, content=b"test"):
  147. full = os.path.join(self.temp_dir, relpath)
  148. os.makedirs(os.path.dirname(full), exist_ok=True)
  149. with open(full, "wb") as f:
  150. f.write(content)
  151. self.repo.stage([relpath])
  152. def test_cone_mode_patterns(self):
  153. """Simpler pattern handling in cone mode.
  154. Lines in 'cone' style typically look like:
  155. - /* -> include top-level
  156. - !/*/ -> exclude all subdirs
  157. - /docs/ -> reinclude 'docs' directory
  158. """
  159. self._add_file_to_index("topfile", b"hi")
  160. self._add_file_to_index("docs/readme.md", b"stuff")
  161. self._add_file_to_index("lib/code.py", b"stuff")
  162. lines = [
  163. "/*",
  164. "!/*/",
  165. "/docs/",
  166. ]
  167. included = compute_included_paths_cone(self.repo, lines)
  168. # top-level => includes 'topfile'
  169. # subdirs => excluded, except docs/
  170. self.assertEqual(included, {"topfile", "docs/readme.md"})
  171. def test_no_exclude_subdirs(self):
  172. """If lines never specify '!/*/', we include everything by default."""
  173. self._add_file_to_index("topfile", b"hi")
  174. self._add_file_to_index("docs/readme.md", b"stuff")
  175. self._add_file_to_index("lib/code.py", b"stuff")
  176. lines = [
  177. "/*", # top-level
  178. "/docs/", # re-include docs?
  179. ]
  180. included = compute_included_paths_cone(self.repo, lines)
  181. # Because exclude_subdirs was never set, everything is included:
  182. self.assertEqual(
  183. included,
  184. {"topfile", "docs/readme.md", "lib/code.py"},
  185. )
  186. class DetermineIncludedPathsTests(TestCase):
  187. """Test the top-level determine_included_paths function."""
  188. def setUp(self):
  189. super().setUp()
  190. self.temp_dir = tempfile.mkdtemp()
  191. self.addCleanup(shutil.rmtree, self.temp_dir)
  192. self.repo = Repo.init(self.temp_dir)
  193. def _add_file_to_index(self, relpath):
  194. path = os.path.join(self.temp_dir, relpath)
  195. os.makedirs(os.path.dirname(path), exist_ok=True)
  196. with open(path, "wb") as f:
  197. f.write(b"data")
  198. self.repo.stage([relpath])
  199. def test_full_mode(self):
  200. self._add_file_to_index("foo.py")
  201. self._add_file_to_index("bar.md")
  202. lines = ["*.py", "!bar.*"]
  203. included = determine_included_paths(self.repo, lines, cone=False)
  204. self.assertEqual(included, {"foo.py"})
  205. def test_cone_mode(self):
  206. self._add_file_to_index("topfile")
  207. self._add_file_to_index("subdir/anotherfile")
  208. lines = ["/*", "!/*/"]
  209. included = determine_included_paths(self.repo, lines, cone=True)
  210. self.assertEqual(included, {"topfile"})
  211. class ApplyIncludedPathsTests(TestCase):
  212. """Integration tests for apply_included_paths, verifying skip-worktree bits and file removal."""
  213. def setUp(self):
  214. super().setUp()
  215. self.temp_dir = tempfile.mkdtemp()
  216. self.addCleanup(shutil.rmtree, self.temp_dir)
  217. self.repo = Repo.init(self.temp_dir)
  218. # For testing local_modifications_exist logic, we'll need the normalizer
  219. # plus some real content in the object store.
  220. def _commit_blob(self, relpath, content=b"hello"):
  221. """Create a blob object in object_store, stage an index entry for it."""
  222. full = os.path.join(self.temp_dir, relpath)
  223. os.makedirs(os.path.dirname(full), exist_ok=True)
  224. with open(full, "wb") as f:
  225. f.write(content)
  226. self.repo.stage([relpath])
  227. # Actually commit so the object is in the store
  228. self.repo.do_commit(message=b"Commit " + relpath.encode())
  229. def test_set_skip_worktree_bits(self):
  230. """If a path is not in included_paths, skip_worktree bit is set."""
  231. self._commit_blob("keep.py", b"print('keep')")
  232. self._commit_blob("exclude.md", b"# exclude")
  233. included = {"keep.py"}
  234. apply_included_paths(self.repo, included_paths=included, force=False)
  235. idx = self.repo.open_index()
  236. self.assertIn(b"keep.py", idx)
  237. self.assertFalse(idx[b"keep.py"].skip_worktree)
  238. self.assertIn(b"exclude.md", idx)
  239. self.assertTrue(idx[b"exclude.md"].skip_worktree)
  240. # Also check that the exclude.md file was removed from the working tree
  241. exclude_path = os.path.join(self.temp_dir, "exclude.md")
  242. self.assertFalse(os.path.exists(exclude_path))
  243. def test_conflict_with_local_modifications_no_force(self):
  244. """If local modifications exist for an excluded path, raise SparseCheckoutConflictError."""
  245. self._commit_blob("foo.txt", b"original")
  246. # Modify foo.txt on disk
  247. with open(os.path.join(self.temp_dir, "foo.txt"), "ab") as f:
  248. f.write(b" local changes")
  249. with self.assertRaises(SparseCheckoutConflictError):
  250. apply_included_paths(self.repo, included_paths=set(), force=False)
  251. def test_conflict_with_local_modifications_forced_removal(self):
  252. """With force=True, we remove local modifications and skip_worktree the file."""
  253. self._commit_blob("foo.txt", b"original")
  254. with open(os.path.join(self.temp_dir, "foo.txt"), "ab") as f:
  255. f.write(b" local changes")
  256. # This time, pass force=True => file is removed
  257. apply_included_paths(self.repo, included_paths=set(), force=True)
  258. # Check skip-worktree in index
  259. idx = self.repo.open_index()
  260. self.assertTrue(idx[b"foo.txt"].skip_worktree)
  261. # Working tree file removed
  262. self.assertFalse(os.path.exists(os.path.join(self.temp_dir, "foo.txt")))
  263. def test_materialize_included_file_if_missing(self):
  264. """If a path is included but missing from disk, we restore it from the blob in the store."""
  265. self._commit_blob("restored.txt", b"some content")
  266. # Manually remove the file from the working tree
  267. os.remove(os.path.join(self.temp_dir, "restored.txt"))
  268. apply_included_paths(self.repo, included_paths={"restored.txt"}, force=False)
  269. # Should have re-created "restored.txt" from the blob
  270. self.assertTrue(os.path.exists(os.path.join(self.temp_dir, "restored.txt")))
  271. with open(os.path.join(self.temp_dir, "restored.txt"), "rb") as f:
  272. self.assertEqual(f.read(), b"some content")
  273. def test_blob_not_found_raises(self):
  274. """If the object store is missing the blob for an included path, raise BlobNotFoundError."""
  275. # We'll create an entry in the index that references a nonexistent sha
  276. idx = self.repo.open_index()
  277. fake_sha = b"ab" * 20
  278. e = IndexEntry(
  279. ctime=(int(time.time()), 0), # ctime (s, ns)
  280. mtime=(int(time.time()), 0), # mtime (s, ns)
  281. dev=0, # dev
  282. ino=0, # ino
  283. mode=0o100644, # mode
  284. uid=0, # uid
  285. gid=0, # gid
  286. size=0, # size
  287. sha=fake_sha, # sha
  288. flags=0, # flags
  289. extended_flags=0,
  290. )
  291. e.set_skip_worktree(False)
  292. e.sha = fake_sha
  293. idx[(b"missing_file")] = e
  294. idx.write()
  295. with self.assertRaises(BlobNotFoundError):
  296. apply_included_paths(
  297. self.repo, included_paths={"missing_file"}, force=False
  298. )