test_sparse_patterns.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657
  1. # test_sparse_patterns.py -- Sparse checkout (full and cone mode) pattern handling
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as published by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Tests for dulwich.sparse_patterns."""
  22. import os
  23. import shutil
  24. import tempfile
  25. import time
  26. from dulwich.index import IndexEntry
  27. from dulwich.objects import Blob
  28. from dulwich.repo import Repo
  29. from dulwich.sparse_patterns import (
  30. BlobNotFoundError,
  31. SparseCheckoutConflictError,
  32. apply_included_paths,
  33. compute_included_paths_cone,
  34. compute_included_paths_full,
  35. determine_included_paths,
  36. match_gitignore_patterns,
  37. parse_sparse_patterns,
  38. )
  39. from . import TestCase
  40. class ParseSparsePatternsTests(TestCase):
  41. """Test parse_sparse_patterns function."""
  42. def test_empty_and_comment_lines(self):
  43. lines = [
  44. "",
  45. "# comment here",
  46. " ",
  47. "# another comment",
  48. ]
  49. parsed = parse_sparse_patterns(lines)
  50. self.assertEqual(parsed, [])
  51. def test_sparse_pattern_combos(self):
  52. lines = [
  53. "*.py", # Python files anywhere
  54. "!*.md", # markdown files anywhere
  55. "/docs/", # root docs dir
  56. "!/docs/images/", # no root docs/images subdir
  57. "src/", # src dir anywhere
  58. "/*.toml", # root TOML files
  59. "!/*.bak", # no root backup files
  60. "!data/", # no data dirs anywhere
  61. ]
  62. parsed = parse_sparse_patterns(lines)
  63. self.assertEqual(len(parsed), 6)
  64. # Returns a 4-tuple of: (pattern, negation, dir_only, anchored)
  65. self.assertEqual(parsed[0], ("*.py", False, False, False)) # _,_,_
  66. self.assertEqual(parsed[1], ("*.md", True, False, False)) # N,_,_
  67. self.assertEqual(parsed[2], ("docs", False, True, True)) # _,D,A
  68. self.assertEqual(parsed[3], ("docs/images", True, True, True)) # N,D,A
  69. self.assertEqual(parsed[4], [("src", False, True, False)]) # _,D,_
  70. self.assertEqual(parsed[5], [("*.toml", False, False, True)]) # _,_,A
  71. self.assertEqual(parsed[6], [("*.bak", True, False, True)]) # N,_,A
  72. self.assertEqual(parsed[7], [("data", True, True, False)]) # N,D,_
  73. class MatchGitignorePatternsTests(TestCase):
  74. """Test the match_gitignore_patterns function."""
  75. def test_no_patterns_returns_excluded(self):
  76. """If no patterns are provided, by default we treat the path as excluded."""
  77. self.assertFalse(match_gitignore_patterns("anyfile.py", []))
  78. def test_last_match_wins(self):
  79. """Checks that the last pattern to match determines included vs excluded."""
  80. parsed = parse_sparse_patterns(
  81. [
  82. "*.py", # include
  83. "!foo.py", # exclude
  84. ]
  85. )
  86. # "foo.py" matches first pattern => included
  87. # then matches second pattern => excluded
  88. self.assertFalse(match_gitignore_patterns("foo.py", parsed))
  89. def test_dir_only(self):
  90. """A pattern with a trailing slash should only match directories and subdirectories."""
  91. parsed = parse_sparse_patterns(["docs/"])
  92. # Because we set path_is_dir=False, it won't match
  93. self.assertTrue(
  94. match_gitignore_patterns("docs/readme.md", parsed, path_is_dir=False)
  95. )
  96. self.assertTrue(match_gitignore_patterns("docs", parsed, path_is_dir=True))
  97. # Even if the path name is "docs", if it's a file, won't match:
  98. self.assertFalse(match_gitignore_patterns("docs", parsed, path_is_dir=False))
  99. def test_anchored(self):
  100. """Anchored patterns match from the start of the path only."""
  101. parsed = parse_sparse_patterns(["/foo"])
  102. self.assertTrue(match_gitignore_patterns("foo", parsed))
  103. # But "some/foo" doesn't match because anchored requires start
  104. self.assertFalse(match_gitignore_patterns("some/foo", parsed))
  105. def test_unanchored_uses_fnmatch(self):
  106. parsed = parse_sparse_patterns(["foo"])
  107. self.assertTrue(match_gitignore_patterns("some/foo", parsed))
  108. self.assertFalse(match_gitignore_patterns("some/bar", parsed))
  109. def test_anchored_empty_pattern(self):
  110. """Test handling of empty pattern with anchoring (e.g., '/')."""
  111. parsed = parse_sparse_patterns(["/"])
  112. # Check the structure of the parsed empty pattern first
  113. self.assertEqual(parsed, [("", False, False, True)])
  114. # When the pattern is empty with anchoring, it's continued (skipped) in match_gitignore_patterns
  115. # for non-empty paths but for empty string it might match due to empty string comparisons
  116. self.assertFalse(match_gitignore_patterns("foo", parsed))
  117. # An empty string with empty pattern will match (implementation detail)
  118. self.assertTrue(match_gitignore_patterns("", parsed))
  119. def test_anchored_dir_only_exact_match(self):
  120. """Test anchored directory-only patterns with exact matching."""
  121. parsed = parse_sparse_patterns(["/docs/"])
  122. # Test with exact match "docs" and path_is_dir=True
  123. self.assertTrue(match_gitignore_patterns("docs", parsed, path_is_dir=True))
  124. # Test with "docs/" (exact match + trailing slash)
  125. self.assertTrue(match_gitignore_patterns("docs/", parsed, path_is_dir=True))
  126. def test_complex_anchored_patterns(self):
  127. """Test more complex anchored pattern matching."""
  128. parsed = parse_sparse_patterns(["/dir/subdir"])
  129. # Test exact match
  130. self.assertTrue(match_gitignore_patterns("dir/subdir", parsed))
  131. # Test subdirectory path
  132. self.assertTrue(match_gitignore_patterns("dir/subdir/file.txt", parsed))
  133. # Test non-matching path
  134. self.assertFalse(match_gitignore_patterns("otherdir/subdir", parsed))
  135. def test_pattern_matching_edge_cases(self):
  136. """Test various edge cases in pattern matching."""
  137. # Test exact equality with an anchored pattern
  138. parsed = parse_sparse_patterns(["/foo"])
  139. self.assertTrue(match_gitignore_patterns("foo", parsed))
  140. # Test with path_is_dir=True
  141. self.assertTrue(match_gitignore_patterns("foo", parsed, path_is_dir=True))
  142. # Test exact match with pattern with dir_only=True
  143. parsed = parse_sparse_patterns(["/bar/"])
  144. self.assertTrue(match_gitignore_patterns("bar", parsed, path_is_dir=True))
  145. # Test startswith match for anchored pattern
  146. parsed = parse_sparse_patterns(["/prefix"])
  147. self.assertTrue(
  148. match_gitignore_patterns("prefix/subdirectory/file.txt", parsed)
  149. )
  150. class ComputeIncludedPathsFullTests(TestCase):
  151. """Test compute_included_paths_full using a real ephemeral repo index."""
  152. def setUp(self):
  153. super().setUp()
  154. self.temp_dir = tempfile.mkdtemp()
  155. self.addCleanup(shutil.rmtree, self.temp_dir)
  156. self.repo = Repo.init(self.temp_dir)
  157. def _add_file_to_index(self, relpath, content=b"test"):
  158. full = os.path.join(self.temp_dir, relpath)
  159. os.makedirs(os.path.dirname(full), exist_ok=True)
  160. with open(full, "wb") as f:
  161. f.write(content)
  162. # Stage in the index
  163. self.repo.get_worktree().stage([relpath])
  164. def test_basic_inclusion_exclusion(self):
  165. """Given patterns, check correct set of included paths."""
  166. self._add_file_to_index("foo.py", b"print(1)")
  167. self._add_file_to_index("bar.md", b"markdown")
  168. self._add_file_to_index("docs/readme", b"# docs")
  169. lines = [
  170. "*.py", # include all .py
  171. "!bar.*", # exclude bar.md
  172. "docs/", # include docs dir
  173. ]
  174. included = compute_included_paths_full(self.repo.open_index(), lines)
  175. self.assertEqual(included, {"foo.py", "docs/readme"})
  176. def test_full_with_utf8_paths(self):
  177. """Test that UTF-8 encoded paths are handled correctly."""
  178. self._add_file_to_index("unicode/文件.txt", b"unicode content")
  179. self._add_file_to_index("unicode/другой.md", b"more unicode")
  180. # Include all text files
  181. lines = ["*.txt"]
  182. included = compute_included_paths_full(self.repo.open_index(), lines)
  183. self.assertEqual(included, {"unicode/文件.txt"})
  184. class ComputeIncludedPathsConeTests(TestCase):
  185. """Test compute_included_paths_cone with ephemeral repo to see included vs excluded."""
  186. def setUp(self):
  187. super().setUp()
  188. self.temp_dir = tempfile.mkdtemp()
  189. self.addCleanup(shutil.rmtree, self.temp_dir)
  190. self.repo = Repo.init(self.temp_dir)
  191. def _add_file_to_index(self, relpath, content=b"test"):
  192. full = os.path.join(self.temp_dir, relpath)
  193. os.makedirs(os.path.dirname(full), exist_ok=True)
  194. with open(full, "wb") as f:
  195. f.write(content)
  196. self.repo.get_worktree().stage([relpath])
  197. def test_cone_mode_patterns(self):
  198. """Simpler pattern handling in cone mode.
  199. Lines in 'cone' style typically look like:
  200. - /* -> include top-level
  201. - !/*/ -> exclude all subdirs
  202. - /docs/ -> reinclude 'docs' directory
  203. """
  204. self._add_file_to_index("topfile", b"hi")
  205. self._add_file_to_index("docs/readme.md", b"stuff")
  206. self._add_file_to_index("lib/code.py", b"stuff")
  207. lines = [
  208. "/*",
  209. "!/*/",
  210. "/docs/",
  211. ]
  212. included = compute_included_paths_cone(self.repo.open_index(), lines)
  213. # top-level => includes 'topfile'
  214. # subdirs => excluded, except docs/
  215. self.assertEqual(included, {"topfile", "docs/readme.md"})
  216. def test_cone_mode_with_empty_pattern(self):
  217. """Test cone mode with an empty reinclude directory."""
  218. self._add_file_to_index("topfile", b"hi")
  219. self._add_file_to_index("docs/readme.md", b"stuff")
  220. # Include an empty pattern that should be skipped
  221. lines = [
  222. "/*",
  223. "!/*/",
  224. "/", # This empty pattern should be skipped
  225. ]
  226. included = compute_included_paths_cone(self.repo.open_index(), lines)
  227. # Only topfile should be included since the empty pattern is skipped
  228. self.assertEqual(included, {"topfile"})
  229. def test_no_exclude_subdirs(self):
  230. """If lines never specify '!/*/', we include everything by default."""
  231. self._add_file_to_index("topfile", b"hi")
  232. self._add_file_to_index("docs/readme.md", b"stuff")
  233. self._add_file_to_index("lib/code.py", b"stuff")
  234. lines = [
  235. "/*", # top-level
  236. "/docs/", # re-include docs?
  237. ]
  238. included = compute_included_paths_cone(self.repo.open_index(), lines)
  239. # Because exclude_subdirs was never set, everything is included:
  240. self.assertEqual(
  241. included,
  242. {"topfile", "docs/readme.md", "lib/code.py"},
  243. )
  244. def test_only_reinclude_dirs(self):
  245. """Test cone mode when only reinclude directories are specified."""
  246. self._add_file_to_index("topfile", b"hi")
  247. self._add_file_to_index("docs/readme.md", b"stuff")
  248. self._add_file_to_index("lib/code.py", b"stuff")
  249. # Only specify reinclude_dirs, need to explicitly exclude subdirs
  250. lines = ["!/*/", "/docs/"]
  251. included = compute_included_paths_cone(self.repo.open_index(), lines)
  252. # Only docs/* should be included, not topfile or lib/*
  253. self.assertEqual(included, {"docs/readme.md"})
  254. def test_exclude_subdirs_no_toplevel(self):
  255. """Test with exclude_subdirs but without toplevel files."""
  256. self._add_file_to_index("topfile", b"hi")
  257. self._add_file_to_index("docs/readme.md", b"stuff")
  258. self._add_file_to_index("lib/code.py", b"stuff")
  259. # Only exclude subdirs and reinclude docs
  260. lines = ["!/*/", "/docs/"]
  261. included = compute_included_paths_cone(self.repo.open_index(), lines)
  262. # Only docs/* should be included since we didn't include top level
  263. self.assertEqual(included, {"docs/readme.md"})
  264. class DetermineIncludedPathsTests(TestCase):
  265. """Test the top-level determine_included_paths function."""
  266. def setUp(self):
  267. super().setUp()
  268. self.temp_dir = tempfile.mkdtemp()
  269. self.addCleanup(shutil.rmtree, self.temp_dir)
  270. self.repo = Repo.init(self.temp_dir)
  271. def _add_file_to_index(self, relpath):
  272. path = os.path.join(self.temp_dir, relpath)
  273. os.makedirs(os.path.dirname(path), exist_ok=True)
  274. with open(path, "wb") as f:
  275. f.write(b"data")
  276. self.repo.get_worktree().stage([relpath])
  277. def test_full_mode(self):
  278. self._add_file_to_index("foo.py")
  279. self._add_file_to_index("bar.md")
  280. lines = ["*.py", "!bar.*"]
  281. index = self.repo.open_index()
  282. included = determine_included_paths(index, lines, cone=False)
  283. self.assertEqual(included, {"foo.py"})
  284. def test_cone_mode(self):
  285. self._add_file_to_index("topfile")
  286. self._add_file_to_index("subdir/anotherfile")
  287. lines = ["/*", "!/*/"]
  288. index = self.repo.open_index()
  289. included = determine_included_paths(index, lines, cone=True)
  290. self.assertEqual(included, {"topfile"})
  291. class ApplyIncludedPathsTests(TestCase):
  292. """Integration tests for apply_included_paths, verifying skip-worktree bits and file removal."""
  293. def setUp(self):
  294. super().setUp()
  295. self.temp_dir = tempfile.mkdtemp()
  296. self.addCleanup(shutil.rmtree, self.temp_dir)
  297. self.repo = Repo.init(self.temp_dir)
  298. # For testing local_modifications_exist logic, we'll need the normalizer
  299. # plus some real content in the object store.
  300. def _commit_blob(self, relpath, content=b"hello"):
  301. """Create a blob object in object_store, stage an index entry for it."""
  302. full = os.path.join(self.temp_dir, relpath)
  303. os.makedirs(os.path.dirname(full), exist_ok=True)
  304. with open(full, "wb") as f:
  305. f.write(content)
  306. self.repo.get_worktree().stage([relpath])
  307. # Actually commit so the object is in the store
  308. self.repo.get_worktree().commit(
  309. message=b"Commit " + relpath.encode(),
  310. )
  311. def test_set_skip_worktree_bits(self):
  312. """If a path is not in included_paths, skip_worktree bit is set."""
  313. self._commit_blob("keep.py", b"print('keep')")
  314. self._commit_blob("exclude.md", b"# exclude")
  315. included = {"keep.py"}
  316. apply_included_paths(self.repo, included_paths=included, force=False)
  317. idx = self.repo.open_index()
  318. self.assertIn(b"keep.py", idx)
  319. self.assertFalse(idx[b"keep.py"].skip_worktree)
  320. self.assertIn(b"exclude.md", idx)
  321. self.assertTrue(idx[b"exclude.md"].skip_worktree)
  322. # Also check that the exclude.md file was removed from the working tree
  323. exclude_path = os.path.join(self.temp_dir, "exclude.md")
  324. self.assertFalse(os.path.exists(exclude_path))
  325. def test_conflict_with_local_modifications_no_force(self):
  326. """If local modifications exist for an excluded path, raise SparseCheckoutConflictError."""
  327. self._commit_blob("foo.txt", b"original")
  328. # Modify foo.txt on disk
  329. with open(os.path.join(self.temp_dir, "foo.txt"), "ab") as f:
  330. f.write(b" local changes")
  331. with self.assertRaises(SparseCheckoutConflictError):
  332. apply_included_paths(self.repo, included_paths=set(), force=False)
  333. def test_conflict_with_local_modifications_forced_removal(self):
  334. """With force=True, we remove local modifications and skip_worktree the file."""
  335. self._commit_blob("foo.txt", b"original")
  336. with open(os.path.join(self.temp_dir, "foo.txt"), "ab") as f:
  337. f.write(b" local changes")
  338. # This time, pass force=True => file is removed
  339. apply_included_paths(self.repo, included_paths=set(), force=True)
  340. # Check skip-worktree in index
  341. idx = self.repo.open_index()
  342. self.assertTrue(idx[b"foo.txt"].skip_worktree)
  343. # Working tree file removed
  344. self.assertFalse(os.path.exists(os.path.join(self.temp_dir, "foo.txt")))
  345. def test_materialize_included_file_if_missing(self):
  346. """If a path is included but missing from disk, we restore it from the blob in the store."""
  347. self._commit_blob("restored.txt", b"some content")
  348. # Manually remove the file from the working tree
  349. os.remove(os.path.join(self.temp_dir, "restored.txt"))
  350. apply_included_paths(self.repo, included_paths={"restored.txt"}, force=False)
  351. # Should have re-created "restored.txt" from the blob
  352. self.assertTrue(os.path.exists(os.path.join(self.temp_dir, "restored.txt")))
  353. with open(os.path.join(self.temp_dir, "restored.txt"), "rb") as f:
  354. self.assertEqual(f.read(), b"some content")
  355. def test_blob_not_found_raises(self):
  356. """If the object store is missing the blob for an included path, raise BlobNotFoundError."""
  357. # We'll create an entry in the index that references a nonexistent sha
  358. idx = self.repo.open_index()
  359. fake_sha = b"ab" * 20
  360. e = IndexEntry(
  361. ctime=(int(time.time()), 0), # ctime (s, ns)
  362. mtime=(int(time.time()), 0), # mtime (s, ns)
  363. dev=0, # dev
  364. ino=0, # ino
  365. mode=0o100644, # mode
  366. uid=0, # uid
  367. gid=0, # gid
  368. size=0, # size
  369. sha=fake_sha, # sha
  370. flags=0, # flags
  371. extended_flags=0,
  372. )
  373. e.set_skip_worktree(False)
  374. e.sha = fake_sha
  375. idx[(b"missing_file")] = e
  376. idx.write()
  377. with self.assertRaises(BlobNotFoundError):
  378. apply_included_paths(
  379. self.repo, included_paths={"missing_file"}, force=False
  380. )
  381. def test_directory_removal(self):
  382. """Test handling of directories when removing excluded files."""
  383. # Create a directory with a file
  384. dir_path = os.path.join(self.temp_dir, "dir")
  385. os.makedirs(dir_path, exist_ok=True)
  386. self._commit_blob("dir/file.txt", b"content")
  387. # Make sure it exists before we proceed
  388. self.assertTrue(os.path.exists(os.path.join(dir_path, "file.txt")))
  389. # Exclude everything
  390. apply_included_paths(self.repo, included_paths=set(), force=True)
  391. # The file should be removed, but the directory might remain
  392. self.assertFalse(os.path.exists(os.path.join(dir_path, "file.txt")))
  393. # Test when file is actually a directory - should hit the IsADirectoryError case
  394. another_dir_path = os.path.join(self.temp_dir, "another_dir")
  395. os.makedirs(another_dir_path, exist_ok=True)
  396. self._commit_blob("another_dir/subfile.txt", b"content")
  397. # Create a path with the same name as the file but make it a dir to trigger IsADirectoryError
  398. subfile_dir_path = os.path.join(another_dir_path, "subfile.txt")
  399. if os.path.exists(subfile_dir_path):
  400. # Remove any existing file first
  401. os.remove(subfile_dir_path)
  402. os.makedirs(subfile_dir_path, exist_ok=True)
  403. # Attempt to apply sparse checkout, should trigger IsADirectoryError but not fail
  404. apply_included_paths(self.repo, included_paths=set(), force=True)
  405. def test_handling_removed_files(self):
  406. """Test that files already removed from disk are handled correctly during exclusion."""
  407. self._commit_blob("test_file.txt", b"test content")
  408. # Remove the file manually
  409. os.remove(os.path.join(self.temp_dir, "test_file.txt"))
  410. # Should not raise any errors when excluding this file
  411. apply_included_paths(self.repo, included_paths=set(), force=True)
  412. # Verify skip-worktree bit is set in index
  413. idx = self.repo.open_index()
  414. self.assertTrue(idx[b"test_file.txt"].skip_worktree)
  415. def test_local_modifications_ioerror(self):
  416. """Test handling of PermissionError/OSError when checking for local modifications."""
  417. import sys
  418. self._commit_blob("special_file.txt", b"content")
  419. file_path = os.path.join(self.temp_dir, "special_file.txt")
  420. # On Windows, chmod with 0 doesn't make files unreadable the same way
  421. # Skip this test on Windows as the permission model is different
  422. if sys.platform == "win32":
  423. self.skipTest("File permissions work differently on Windows")
  424. # Make the file unreadable on Unix-like systems
  425. os.chmod(file_path, 0)
  426. # Add a cleanup that checks if file exists first
  427. def safe_chmod_cleanup():
  428. if os.path.exists(file_path):
  429. try:
  430. os.chmod(file_path, 0o644)
  431. except (FileNotFoundError, PermissionError):
  432. pass
  433. self.addCleanup(safe_chmod_cleanup)
  434. # Should raise PermissionError with unreadable file and force=False
  435. with self.assertRaises((PermissionError, OSError)):
  436. apply_included_paths(self.repo, included_paths=set(), force=False)
  437. # With force=True, should remove the file anyway
  438. apply_included_paths(self.repo, included_paths=set(), force=True)
  439. # Verify file is gone and skip-worktree bit is set
  440. self.assertFalse(os.path.exists(file_path))
  441. idx = self.repo.open_index()
  442. self.assertTrue(idx[b"special_file.txt"].skip_worktree)
  443. def test_checkout_normalization_applied(self):
  444. """Test that checkout normalization is applied when materializing files during sparse checkout."""
  445. # Create a simple filter that converts content to uppercase
  446. class UppercaseFilter:
  447. def smudge(self, input_bytes, path=b""):
  448. return input_bytes.upper()
  449. def clean(self, input_bytes):
  450. return input_bytes.lower()
  451. def cleanup(self):
  452. pass
  453. def reuse(self, config, filter_name):
  454. return False
  455. # Create .gitattributes file
  456. gitattributes_path = os.path.join(self.temp_dir, ".gitattributes")
  457. with open(gitattributes_path, "w") as f:
  458. f.write("*.txt filter=uppercase\n")
  459. # Add and commit .gitattributes
  460. self.repo.get_worktree().stage([b".gitattributes"])
  461. self.repo.get_worktree().commit(
  462. b"Add gitattributes", committer=b"Test <test@example.com>"
  463. )
  464. # Initialize the filter context and register the filter
  465. _ = self.repo.get_blob_normalizer()
  466. # Register the filter with the cached filter context
  467. uppercase_filter = UppercaseFilter()
  468. self.repo.filter_context.filter_registry.register_driver(
  469. "uppercase", uppercase_filter
  470. )
  471. # Commit a file with lowercase content
  472. self._commit_blob("test.txt", b"hello world")
  473. # Remove the file from working tree to force materialization
  474. os.remove(os.path.join(self.temp_dir, "test.txt"))
  475. # Apply sparse checkout - this will call get_blob_normalizer() internally
  476. # which will use the cached filter_context with our registered filter
  477. apply_included_paths(self.repo, included_paths={"test.txt"}, force=False)
  478. # Verify file was materialized with uppercase content (checkout normalization applied)
  479. with open(os.path.join(self.temp_dir, "test.txt"), "rb") as f:
  480. content = f.read()
  481. self.assertEqual(content, b"HELLO WORLD")
  482. def test_checkout_normalization_with_lf_to_crlf(self):
  483. """Test that line ending normalization is applied during sparse checkout."""
  484. # Commit a file with LF line endings
  485. self._commit_blob("unix_file.txt", b"line1\nline2\nline3\n")
  486. # Remove the file from working tree
  487. os.remove(os.path.join(self.temp_dir, "unix_file.txt"))
  488. # Create a normalizer that converts LF to CRLF on checkout
  489. class CRLFNormalizer:
  490. def checkin_normalize(self, data, path):
  491. # For checkin, return unchanged
  492. return data
  493. def checkout_normalize(self, blob, path):
  494. if isinstance(blob, Blob):
  495. # Convert LF to CRLF
  496. new_blob = Blob()
  497. new_blob.data = blob.data.replace(b"\n", b"\r\n")
  498. return new_blob
  499. return blob
  500. # Monkey patch the repo to use our normalizer
  501. original_get_blob_normalizer = self.repo.get_blob_normalizer
  502. self.repo.get_blob_normalizer = lambda: CRLFNormalizer()
  503. # Apply sparse checkout
  504. apply_included_paths(self.repo, included_paths={"unix_file.txt"}, force=False)
  505. # Verify file was materialized with CRLF line endings
  506. with open(os.path.join(self.temp_dir, "unix_file.txt"), "rb") as f:
  507. content = f.read()
  508. self.assertEqual(content, b"line1\r\nline2\r\nline3\r\n")
  509. # Restore original method
  510. self.repo.get_blob_normalizer = original_get_blob_normalizer
  511. def test_checkout_normalization_not_applied_without_normalizer(self):
  512. """Test that when normalizer returns original blob, no transformation occurs."""
  513. # Commit a file with specific content
  514. original_content = b"original content\nwith newlines\n"
  515. self._commit_blob("no_norm.txt", original_content)
  516. # Remove the file from working tree
  517. os.remove(os.path.join(self.temp_dir, "no_norm.txt"))
  518. # Create a normalizer that returns blob unchanged
  519. class NoOpNormalizer:
  520. def checkin_normalize(self, data, path):
  521. return data
  522. def checkout_normalize(self, blob, path):
  523. # Return the blob unchanged
  524. return blob
  525. # Monkey patch the repo to use our no-op normalizer
  526. original_get_blob_normalizer = self.repo.get_blob_normalizer
  527. self.repo.get_blob_normalizer = lambda: NoOpNormalizer()
  528. # Apply sparse checkout
  529. apply_included_paths(self.repo, included_paths={"no_norm.txt"}, force=False)
  530. # Verify file was materialized with original content (no normalization)
  531. with open(os.path.join(self.temp_dir, "no_norm.txt"), "rb") as f:
  532. content = f.read()
  533. self.assertEqual(content, original_content)
  534. # Restore original method
  535. self.repo.get_blob_normalizer = original_get_blob_normalizer