2
0

test_sparse_patterns.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664
  1. # test_sparse_patterns.py -- Sparse checkout (full and cone mode) pattern handling
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as published by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Tests for dulwich.sparse_patterns."""
  22. import os
  23. import shutil
  24. import tempfile
  25. import time
  26. from dulwich.index import IndexEntry
  27. from dulwich.objects import Blob
  28. from dulwich.repo import Repo
  29. from dulwich.sparse_patterns import (
  30. BlobNotFoundError,
  31. SparseCheckoutConflictError,
  32. apply_included_paths,
  33. compute_included_paths_cone,
  34. compute_included_paths_full,
  35. determine_included_paths,
  36. match_gitignore_patterns,
  37. parse_sparse_patterns,
  38. )
  39. from . import TestCase
  40. class ParseSparsePatternsTests(TestCase):
  41. """Test parse_sparse_patterns function."""
  42. def test_empty_and_comment_lines(self):
  43. lines = [
  44. "",
  45. "# comment here",
  46. " ",
  47. "# another comment",
  48. ]
  49. parsed = parse_sparse_patterns(lines)
  50. self.assertEqual(parsed, [])
  51. def test_simple_patterns(self):
  52. lines = [
  53. "*.py",
  54. "!*.md",
  55. "/docs/",
  56. "!/docs/images/",
  57. ]
  58. parsed = parse_sparse_patterns(lines)
  59. self.assertEqual(len(parsed), 4)
  60. self.assertEqual(parsed[0], ("*.py", False, False, False)) # include *.py
  61. self.assertEqual(parsed[1], ("*.md", True, False, False)) # exclude *.md
  62. self.assertEqual(parsed[2], ("docs", False, True, True)) # anchored, dir_only
  63. self.assertEqual(parsed[3], ("docs/images", True, True, True))
  64. def test_trailing_slash_dir(self):
  65. lines = [
  66. "src/",
  67. ]
  68. parsed = parse_sparse_patterns(lines)
  69. # "src/" => (pattern="src", negation=False, dir_only=True, anchored=False)
  70. self.assertEqual(parsed, [("src", False, True, False)])
  71. def test_negation_anchor(self):
  72. lines = [
  73. "!/foo.txt",
  74. ]
  75. parsed = parse_sparse_patterns(lines)
  76. # => (pattern="foo.txt", negation=True, dir_only=False, anchored=True)
  77. self.assertEqual(parsed, [("foo.txt", True, False, True)])
  78. class MatchGitignorePatternsTests(TestCase):
  79. """Test the match_gitignore_patterns function."""
  80. def test_no_patterns_returns_excluded(self):
  81. """If no patterns are provided, by default we treat the path as excluded."""
  82. self.assertFalse(match_gitignore_patterns("anyfile.py", []))
  83. def test_last_match_wins(self):
  84. """Checks that the last pattern to match determines included vs excluded."""
  85. parsed = parse_sparse_patterns(
  86. [
  87. "*.py", # include
  88. "!foo.py", # exclude
  89. ]
  90. )
  91. # "foo.py" matches first pattern => included
  92. # then matches second pattern => excluded
  93. self.assertFalse(match_gitignore_patterns("foo.py", parsed))
  94. def test_dir_only(self):
  95. """A pattern with a trailing slash should only match directories and subdirectories."""
  96. parsed = parse_sparse_patterns(["docs/"])
  97. # Because we set path_is_dir=False, it won't match
  98. self.assertTrue(
  99. match_gitignore_patterns("docs/readme.md", parsed, path_is_dir=False)
  100. )
  101. self.assertTrue(match_gitignore_patterns("docs", parsed, path_is_dir=True))
  102. # Even if the path name is "docs", if it's a file, won't match:
  103. self.assertFalse(match_gitignore_patterns("docs", parsed, path_is_dir=False))
  104. def test_anchored(self):
  105. """Anchored patterns match from the start of the path only."""
  106. parsed = parse_sparse_patterns(["/foo"])
  107. self.assertTrue(match_gitignore_patterns("foo", parsed))
  108. # But "some/foo" doesn't match because anchored requires start
  109. self.assertFalse(match_gitignore_patterns("some/foo", parsed))
  110. def test_unanchored_uses_fnmatch(self):
  111. parsed = parse_sparse_patterns(["foo"])
  112. self.assertTrue(match_gitignore_patterns("some/foo", parsed))
  113. self.assertFalse(match_gitignore_patterns("some/bar", parsed))
  114. def test_anchored_empty_pattern(self):
  115. """Test handling of empty pattern with anchoring (e.g., '/')."""
  116. parsed = parse_sparse_patterns(["/"])
  117. # Check the structure of the parsed empty pattern first
  118. self.assertEqual(parsed, [("", False, False, True)])
  119. # When the pattern is empty with anchoring, it's continued (skipped) in match_gitignore_patterns
  120. # for non-empty paths but for empty string it might match due to empty string comparisons
  121. self.assertFalse(match_gitignore_patterns("foo", parsed))
  122. # An empty string with empty pattern will match (implementation detail)
  123. self.assertTrue(match_gitignore_patterns("", parsed))
  124. def test_anchored_dir_only_exact_match(self):
  125. """Test anchored directory-only patterns with exact matching."""
  126. parsed = parse_sparse_patterns(["/docs/"])
  127. # Test with exact match "docs" and path_is_dir=True
  128. self.assertTrue(match_gitignore_patterns("docs", parsed, path_is_dir=True))
  129. # Test with "docs/" (exact match + trailing slash)
  130. self.assertTrue(match_gitignore_patterns("docs/", parsed, path_is_dir=True))
  131. def test_complex_anchored_patterns(self):
  132. """Test more complex anchored pattern matching."""
  133. parsed = parse_sparse_patterns(["/dir/subdir"])
  134. # Test exact match
  135. self.assertTrue(match_gitignore_patterns("dir/subdir", parsed))
  136. # Test subdirectory path
  137. self.assertTrue(match_gitignore_patterns("dir/subdir/file.txt", parsed))
  138. # Test non-matching path
  139. self.assertFalse(match_gitignore_patterns("otherdir/subdir", parsed))
  140. def test_pattern_matching_edge_cases(self):
  141. """Test various edge cases in pattern matching."""
  142. # Test exact equality with an anchored pattern
  143. parsed = parse_sparse_patterns(["/foo"])
  144. self.assertTrue(match_gitignore_patterns("foo", parsed))
  145. # Test with path_is_dir=True
  146. self.assertTrue(match_gitignore_patterns("foo", parsed, path_is_dir=True))
  147. # Test exact match with pattern with dir_only=True
  148. parsed = parse_sparse_patterns(["/bar/"])
  149. self.assertTrue(match_gitignore_patterns("bar", parsed, path_is_dir=True))
  150. # Test startswith match for anchored pattern
  151. parsed = parse_sparse_patterns(["/prefix"])
  152. self.assertTrue(
  153. match_gitignore_patterns("prefix/subdirectory/file.txt", parsed)
  154. )
  155. class ComputeIncludedPathsFullTests(TestCase):
  156. """Test compute_included_paths_full using a real ephemeral repo index."""
  157. def setUp(self):
  158. super().setUp()
  159. self.temp_dir = tempfile.mkdtemp()
  160. self.addCleanup(shutil.rmtree, self.temp_dir)
  161. self.repo = Repo.init(self.temp_dir)
  162. def _add_file_to_index(self, relpath, content=b"test"):
  163. full = os.path.join(self.temp_dir, relpath)
  164. os.makedirs(os.path.dirname(full), exist_ok=True)
  165. with open(full, "wb") as f:
  166. f.write(content)
  167. # Stage in the index
  168. self.repo.get_worktree().stage([relpath])
  169. def test_basic_inclusion_exclusion(self):
  170. """Given patterns, check correct set of included paths."""
  171. self._add_file_to_index("foo.py", b"print(1)")
  172. self._add_file_to_index("bar.md", b"markdown")
  173. self._add_file_to_index("docs/readme", b"# docs")
  174. lines = [
  175. "*.py", # include all .py
  176. "!bar.*", # exclude bar.md
  177. "docs/", # include docs dir
  178. ]
  179. included = compute_included_paths_full(self.repo.open_index(), lines)
  180. self.assertEqual(included, {"foo.py", "docs/readme"})
  181. def test_full_with_utf8_paths(self):
  182. """Test that UTF-8 encoded paths are handled correctly."""
  183. self._add_file_to_index("unicode/文件.txt", b"unicode content")
  184. self._add_file_to_index("unicode/другой.md", b"more unicode")
  185. # Include all text files
  186. lines = ["*.txt"]
  187. included = compute_included_paths_full(self.repo.open_index(), lines)
  188. self.assertEqual(included, {"unicode/文件.txt"})
  189. class ComputeIncludedPathsConeTests(TestCase):
  190. """Test compute_included_paths_cone with ephemeral repo to see included vs excluded."""
  191. def setUp(self):
  192. super().setUp()
  193. self.temp_dir = tempfile.mkdtemp()
  194. self.addCleanup(shutil.rmtree, self.temp_dir)
  195. self.repo = Repo.init(self.temp_dir)
  196. def _add_file_to_index(self, relpath, content=b"test"):
  197. full = os.path.join(self.temp_dir, relpath)
  198. os.makedirs(os.path.dirname(full), exist_ok=True)
  199. with open(full, "wb") as f:
  200. f.write(content)
  201. self.repo.get_worktree().stage([relpath])
  202. def test_cone_mode_patterns(self):
  203. """Simpler pattern handling in cone mode.
  204. Lines in 'cone' style typically look like:
  205. - /* -> include top-level
  206. - !/*/ -> exclude all subdirs
  207. - /docs/ -> reinclude 'docs' directory
  208. """
  209. self._add_file_to_index("topfile", b"hi")
  210. self._add_file_to_index("docs/readme.md", b"stuff")
  211. self._add_file_to_index("lib/code.py", b"stuff")
  212. lines = [
  213. "/*",
  214. "!/*/",
  215. "/docs/",
  216. ]
  217. included = compute_included_paths_cone(self.repo.open_index(), lines)
  218. # top-level => includes 'topfile'
  219. # subdirs => excluded, except docs/
  220. self.assertEqual(included, {"topfile", "docs/readme.md"})
  221. def test_cone_mode_with_empty_pattern(self):
  222. """Test cone mode with an empty reinclude directory."""
  223. self._add_file_to_index("topfile", b"hi")
  224. self._add_file_to_index("docs/readme.md", b"stuff")
  225. # Include an empty pattern that should be skipped
  226. lines = [
  227. "/*",
  228. "!/*/",
  229. "/", # This empty pattern should be skipped
  230. ]
  231. included = compute_included_paths_cone(self.repo.open_index(), lines)
  232. # Only topfile should be included since the empty pattern is skipped
  233. self.assertEqual(included, {"topfile"})
  234. def test_no_exclude_subdirs(self):
  235. """If lines never specify '!/*/', we include everything by default."""
  236. self._add_file_to_index("topfile", b"hi")
  237. self._add_file_to_index("docs/readme.md", b"stuff")
  238. self._add_file_to_index("lib/code.py", b"stuff")
  239. lines = [
  240. "/*", # top-level
  241. "/docs/", # re-include docs?
  242. ]
  243. included = compute_included_paths_cone(self.repo.open_index(), lines)
  244. # Because exclude_subdirs was never set, everything is included:
  245. self.assertEqual(
  246. included,
  247. {"topfile", "docs/readme.md", "lib/code.py"},
  248. )
  249. def test_only_reinclude_dirs(self):
  250. """Test cone mode when only reinclude directories are specified."""
  251. self._add_file_to_index("topfile", b"hi")
  252. self._add_file_to_index("docs/readme.md", b"stuff")
  253. self._add_file_to_index("lib/code.py", b"stuff")
  254. # Only specify reinclude_dirs, need to explicitly exclude subdirs
  255. lines = ["!/*/", "/docs/"]
  256. included = compute_included_paths_cone(self.repo.open_index(), lines)
  257. # Only docs/* should be included, not topfile or lib/*
  258. self.assertEqual(included, {"docs/readme.md"})
  259. def test_exclude_subdirs_no_toplevel(self):
  260. """Test with exclude_subdirs but without toplevel files."""
  261. self._add_file_to_index("topfile", b"hi")
  262. self._add_file_to_index("docs/readme.md", b"stuff")
  263. self._add_file_to_index("lib/code.py", b"stuff")
  264. # Only exclude subdirs and reinclude docs
  265. lines = ["!/*/", "/docs/"]
  266. included = compute_included_paths_cone(self.repo.open_index(), lines)
  267. # Only docs/* should be included since we didn't include top level
  268. self.assertEqual(included, {"docs/readme.md"})
  269. class DetermineIncludedPathsTests(TestCase):
  270. """Test the top-level determine_included_paths function."""
  271. def setUp(self):
  272. super().setUp()
  273. self.temp_dir = tempfile.mkdtemp()
  274. self.addCleanup(shutil.rmtree, self.temp_dir)
  275. self.repo = Repo.init(self.temp_dir)
  276. def _add_file_to_index(self, relpath):
  277. path = os.path.join(self.temp_dir, relpath)
  278. os.makedirs(os.path.dirname(path), exist_ok=True)
  279. with open(path, "wb") as f:
  280. f.write(b"data")
  281. self.repo.get_worktree().stage([relpath])
  282. def test_full_mode(self):
  283. self._add_file_to_index("foo.py")
  284. self._add_file_to_index("bar.md")
  285. lines = ["*.py", "!bar.*"]
  286. index = self.repo.open_index()
  287. included = determine_included_paths(index, lines, cone=False)
  288. self.assertEqual(included, {"foo.py"})
  289. def test_cone_mode(self):
  290. self._add_file_to_index("topfile")
  291. self._add_file_to_index("subdir/anotherfile")
  292. lines = ["/*", "!/*/"]
  293. index = self.repo.open_index()
  294. included = determine_included_paths(index, lines, cone=True)
  295. self.assertEqual(included, {"topfile"})
  296. class ApplyIncludedPathsTests(TestCase):
  297. """Integration tests for apply_included_paths, verifying skip-worktree bits and file removal."""
  298. def setUp(self):
  299. super().setUp()
  300. self.temp_dir = tempfile.mkdtemp()
  301. self.addCleanup(shutil.rmtree, self.temp_dir)
  302. self.repo = Repo.init(self.temp_dir)
  303. # For testing local_modifications_exist logic, we'll need the normalizer
  304. # plus some real content in the object store.
  305. def _commit_blob(self, relpath, content=b"hello"):
  306. """Create a blob object in object_store, stage an index entry for it."""
  307. full = os.path.join(self.temp_dir, relpath)
  308. os.makedirs(os.path.dirname(full), exist_ok=True)
  309. with open(full, "wb") as f:
  310. f.write(content)
  311. self.repo.get_worktree().stage([relpath])
  312. # Actually commit so the object is in the store
  313. self.repo.get_worktree().commit(
  314. message=b"Commit " + relpath.encode(),
  315. )
  316. def test_set_skip_worktree_bits(self):
  317. """If a path is not in included_paths, skip_worktree bit is set."""
  318. self._commit_blob("keep.py", b"print('keep')")
  319. self._commit_blob("exclude.md", b"# exclude")
  320. included = {"keep.py"}
  321. apply_included_paths(self.repo, included_paths=included, force=False)
  322. idx = self.repo.open_index()
  323. self.assertIn(b"keep.py", idx)
  324. self.assertFalse(idx[b"keep.py"].skip_worktree)
  325. self.assertIn(b"exclude.md", idx)
  326. self.assertTrue(idx[b"exclude.md"].skip_worktree)
  327. # Also check that the exclude.md file was removed from the working tree
  328. exclude_path = os.path.join(self.temp_dir, "exclude.md")
  329. self.assertFalse(os.path.exists(exclude_path))
  330. def test_conflict_with_local_modifications_no_force(self):
  331. """If local modifications exist for an excluded path, raise SparseCheckoutConflictError."""
  332. self._commit_blob("foo.txt", b"original")
  333. # Modify foo.txt on disk
  334. with open(os.path.join(self.temp_dir, "foo.txt"), "ab") as f:
  335. f.write(b" local changes")
  336. with self.assertRaises(SparseCheckoutConflictError):
  337. apply_included_paths(self.repo, included_paths=set(), force=False)
  338. def test_conflict_with_local_modifications_forced_removal(self):
  339. """With force=True, we remove local modifications and skip_worktree the file."""
  340. self._commit_blob("foo.txt", b"original")
  341. with open(os.path.join(self.temp_dir, "foo.txt"), "ab") as f:
  342. f.write(b" local changes")
  343. # This time, pass force=True => file is removed
  344. apply_included_paths(self.repo, included_paths=set(), force=True)
  345. # Check skip-worktree in index
  346. idx = self.repo.open_index()
  347. self.assertTrue(idx[b"foo.txt"].skip_worktree)
  348. # Working tree file removed
  349. self.assertFalse(os.path.exists(os.path.join(self.temp_dir, "foo.txt")))
  350. def test_materialize_included_file_if_missing(self):
  351. """If a path is included but missing from disk, we restore it from the blob in the store."""
  352. self._commit_blob("restored.txt", b"some content")
  353. # Manually remove the file from the working tree
  354. os.remove(os.path.join(self.temp_dir, "restored.txt"))
  355. apply_included_paths(self.repo, included_paths={"restored.txt"}, force=False)
  356. # Should have re-created "restored.txt" from the blob
  357. self.assertTrue(os.path.exists(os.path.join(self.temp_dir, "restored.txt")))
  358. with open(os.path.join(self.temp_dir, "restored.txt"), "rb") as f:
  359. self.assertEqual(f.read(), b"some content")
  360. def test_blob_not_found_raises(self):
  361. """If the object store is missing the blob for an included path, raise BlobNotFoundError."""
  362. # We'll create an entry in the index that references a nonexistent sha
  363. idx = self.repo.open_index()
  364. fake_sha = b"ab" * 20
  365. e = IndexEntry(
  366. ctime=(int(time.time()), 0), # ctime (s, ns)
  367. mtime=(int(time.time()), 0), # mtime (s, ns)
  368. dev=0, # dev
  369. ino=0, # ino
  370. mode=0o100644, # mode
  371. uid=0, # uid
  372. gid=0, # gid
  373. size=0, # size
  374. sha=fake_sha, # sha
  375. flags=0, # flags
  376. extended_flags=0,
  377. )
  378. e.set_skip_worktree(False)
  379. e.sha = fake_sha
  380. idx[(b"missing_file")] = e
  381. idx.write()
  382. with self.assertRaises(BlobNotFoundError):
  383. apply_included_paths(
  384. self.repo, included_paths={"missing_file"}, force=False
  385. )
  386. def test_directory_removal(self):
  387. """Test handling of directories when removing excluded files."""
  388. # Create a directory with a file
  389. dir_path = os.path.join(self.temp_dir, "dir")
  390. os.makedirs(dir_path, exist_ok=True)
  391. self._commit_blob("dir/file.txt", b"content")
  392. # Make sure it exists before we proceed
  393. self.assertTrue(os.path.exists(os.path.join(dir_path, "file.txt")))
  394. # Exclude everything
  395. apply_included_paths(self.repo, included_paths=set(), force=True)
  396. # The file should be removed, but the directory might remain
  397. self.assertFalse(os.path.exists(os.path.join(dir_path, "file.txt")))
  398. # Test when file is actually a directory - should hit the IsADirectoryError case
  399. another_dir_path = os.path.join(self.temp_dir, "another_dir")
  400. os.makedirs(another_dir_path, exist_ok=True)
  401. self._commit_blob("another_dir/subfile.txt", b"content")
  402. # Create a path with the same name as the file but make it a dir to trigger IsADirectoryError
  403. subfile_dir_path = os.path.join(another_dir_path, "subfile.txt")
  404. if os.path.exists(subfile_dir_path):
  405. # Remove any existing file first
  406. os.remove(subfile_dir_path)
  407. os.makedirs(subfile_dir_path, exist_ok=True)
  408. # Attempt to apply sparse checkout, should trigger IsADirectoryError but not fail
  409. apply_included_paths(self.repo, included_paths=set(), force=True)
  410. def test_handling_removed_files(self):
  411. """Test that files already removed from disk are handled correctly during exclusion."""
  412. self._commit_blob("test_file.txt", b"test content")
  413. # Remove the file manually
  414. os.remove(os.path.join(self.temp_dir, "test_file.txt"))
  415. # Should not raise any errors when excluding this file
  416. apply_included_paths(self.repo, included_paths=set(), force=True)
  417. # Verify skip-worktree bit is set in index
  418. idx = self.repo.open_index()
  419. self.assertTrue(idx[b"test_file.txt"].skip_worktree)
  420. def test_local_modifications_ioerror(self):
  421. """Test handling of PermissionError/OSError when checking for local modifications."""
  422. import sys
  423. self._commit_blob("special_file.txt", b"content")
  424. file_path = os.path.join(self.temp_dir, "special_file.txt")
  425. # On Windows, chmod with 0 doesn't make files unreadable the same way
  426. # Skip this test on Windows as the permission model is different
  427. if sys.platform == "win32":
  428. self.skipTest("File permissions work differently on Windows")
  429. # Make the file unreadable on Unix-like systems
  430. os.chmod(file_path, 0)
  431. # Add a cleanup that checks if file exists first
  432. def safe_chmod_cleanup():
  433. if os.path.exists(file_path):
  434. try:
  435. os.chmod(file_path, 0o644)
  436. except (FileNotFoundError, PermissionError):
  437. pass
  438. self.addCleanup(safe_chmod_cleanup)
  439. # Should raise PermissionError with unreadable file and force=False
  440. with self.assertRaises((PermissionError, OSError)):
  441. apply_included_paths(self.repo, included_paths=set(), force=False)
  442. # With force=True, should remove the file anyway
  443. apply_included_paths(self.repo, included_paths=set(), force=True)
  444. # Verify file is gone and skip-worktree bit is set
  445. self.assertFalse(os.path.exists(file_path))
  446. idx = self.repo.open_index()
  447. self.assertTrue(idx[b"special_file.txt"].skip_worktree)
  448. def test_checkout_normalization_applied(self):
  449. """Test that checkout normalization is applied when materializing files during sparse checkout."""
  450. # Create a simple filter that converts content to uppercase
  451. class UppercaseFilter:
  452. def smudge(self, input_bytes, path=b""):
  453. return input_bytes.upper()
  454. def clean(self, input_bytes):
  455. return input_bytes.lower()
  456. def cleanup(self):
  457. pass
  458. def reuse(self, config, filter_name):
  459. return False
  460. # Create .gitattributes file
  461. gitattributes_path = os.path.join(self.temp_dir, ".gitattributes")
  462. with open(gitattributes_path, "w") as f:
  463. f.write("*.txt filter=uppercase\n")
  464. # Add and commit .gitattributes
  465. self.repo.get_worktree().stage([b".gitattributes"])
  466. self.repo.get_worktree().commit(
  467. b"Add gitattributes", committer=b"Test <test@example.com>"
  468. )
  469. # Initialize the filter context and register the filter
  470. _ = self.repo.get_blob_normalizer()
  471. # Register the filter with the cached filter context
  472. uppercase_filter = UppercaseFilter()
  473. self.repo.filter_context.filter_registry.register_driver(
  474. "uppercase", uppercase_filter
  475. )
  476. # Commit a file with lowercase content
  477. self._commit_blob("test.txt", b"hello world")
  478. # Remove the file from working tree to force materialization
  479. os.remove(os.path.join(self.temp_dir, "test.txt"))
  480. # Apply sparse checkout - this will call get_blob_normalizer() internally
  481. # which will use the cached filter_context with our registered filter
  482. apply_included_paths(self.repo, included_paths={"test.txt"}, force=False)
  483. # Verify file was materialized with uppercase content (checkout normalization applied)
  484. with open(os.path.join(self.temp_dir, "test.txt"), "rb") as f:
  485. content = f.read()
  486. self.assertEqual(content, b"HELLO WORLD")
  487. def test_checkout_normalization_with_lf_to_crlf(self):
  488. """Test that line ending normalization is applied during sparse checkout."""
  489. # Commit a file with LF line endings
  490. self._commit_blob("unix_file.txt", b"line1\nline2\nline3\n")
  491. # Remove the file from working tree
  492. os.remove(os.path.join(self.temp_dir, "unix_file.txt"))
  493. # Create a normalizer that converts LF to CRLF on checkout
  494. class CRLFNormalizer:
  495. def checkin_normalize(self, data, path):
  496. # For checkin, return unchanged
  497. return data
  498. def checkout_normalize(self, blob, path):
  499. if isinstance(blob, Blob):
  500. # Convert LF to CRLF
  501. new_blob = Blob()
  502. new_blob.data = blob.data.replace(b"\n", b"\r\n")
  503. return new_blob
  504. return blob
  505. # Monkey patch the repo to use our normalizer
  506. original_get_blob_normalizer = self.repo.get_blob_normalizer
  507. self.repo.get_blob_normalizer = lambda: CRLFNormalizer()
  508. # Apply sparse checkout
  509. apply_included_paths(self.repo, included_paths={"unix_file.txt"}, force=False)
  510. # Verify file was materialized with CRLF line endings
  511. with open(os.path.join(self.temp_dir, "unix_file.txt"), "rb") as f:
  512. content = f.read()
  513. self.assertEqual(content, b"line1\r\nline2\r\nline3\r\n")
  514. # Restore original method
  515. self.repo.get_blob_normalizer = original_get_blob_normalizer
  516. def test_checkout_normalization_not_applied_without_normalizer(self):
  517. """Test that when normalizer returns original blob, no transformation occurs."""
  518. # Commit a file with specific content
  519. original_content = b"original content\nwith newlines\n"
  520. self._commit_blob("no_norm.txt", original_content)
  521. # Remove the file from working tree
  522. os.remove(os.path.join(self.temp_dir, "no_norm.txt"))
  523. # Create a normalizer that returns blob unchanged
  524. class NoOpNormalizer:
  525. def checkin_normalize(self, data, path):
  526. return data
  527. def checkout_normalize(self, blob, path):
  528. # Return the blob unchanged
  529. return blob
  530. # Monkey patch the repo to use our no-op normalizer
  531. original_get_blob_normalizer = self.repo.get_blob_normalizer
  532. self.repo.get_blob_normalizer = lambda: NoOpNormalizer()
  533. # Apply sparse checkout
  534. apply_included_paths(self.repo, included_paths={"no_norm.txt"}, force=False)
  535. # Verify file was materialized with original content (no normalization)
  536. with open(os.path.join(self.temp_dir, "no_norm.txt"), "rb") as f:
  537. content = f.read()
  538. self.assertEqual(content, original_content)
  539. # Restore original method
  540. self.repo.get_blob_normalizer = original_get_blob_normalizer