2
0

test_index.py 85 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480
  1. # test_index.py -- Tests for the git index
  2. # Copyright (C) 2008-2009 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as public by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Tests for the index."""
  22. import os
  23. import shutil
  24. import stat
  25. import struct
  26. import sys
  27. import tempfile
  28. from io import BytesIO
  29. from dulwich.index import (
  30. Index,
  31. IndexEntry,
  32. SerializedIndexEntry,
  33. _compress_path,
  34. _decode_varint,
  35. _decompress_path,
  36. _encode_varint,
  37. _fs_to_tree_path,
  38. _tree_to_fs_path,
  39. build_index_from_tree,
  40. cleanup_mode,
  41. commit_tree,
  42. get_unstaged_changes,
  43. index_entry_from_directory,
  44. index_entry_from_path,
  45. index_entry_from_stat,
  46. iter_fresh_entries,
  47. read_index,
  48. read_index_dict,
  49. update_working_tree,
  50. validate_path_element_default,
  51. validate_path_element_hfs,
  52. validate_path_element_ntfs,
  53. write_cache_time,
  54. write_index,
  55. write_index_dict,
  56. )
  57. from dulwich.object_store import MemoryObjectStore
  58. from dulwich.objects import S_IFGITLINK, Blob, Commit, Tree
  59. from dulwich.repo import Repo
  60. from . import TestCase, skipIf
  61. def can_symlink() -> bool:
  62. """Return whether running process can create symlinks."""
  63. if sys.platform != "win32":
  64. # Platforms other than Windows should allow symlinks without issues.
  65. return True
  66. test_source = tempfile.mkdtemp()
  67. test_target = test_source + "can_symlink"
  68. try:
  69. os.symlink(test_source, test_target)
  70. except (NotImplementedError, OSError):
  71. return False
  72. return True
  73. class IndexTestCase(TestCase):
  74. datadir = os.path.join(os.path.dirname(__file__), "../testdata/indexes")
  75. def get_simple_index(self, name):
  76. return Index(os.path.join(self.datadir, name))
  77. class SimpleIndexTestCase(IndexTestCase):
  78. def test_len(self) -> None:
  79. self.assertEqual(1, len(self.get_simple_index("index")))
  80. def test_iter(self) -> None:
  81. self.assertEqual([b"bla"], list(self.get_simple_index("index")))
  82. def test_iter_skip_hash(self) -> None:
  83. self.assertEqual([b"bla"], list(self.get_simple_index("index_skip_hash")))
  84. def test_iterobjects(self) -> None:
  85. self.assertEqual(
  86. [(b"bla", b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", 33188)],
  87. list(self.get_simple_index("index").iterobjects()),
  88. )
  89. def test_getitem(self) -> None:
  90. self.assertEqual(
  91. IndexEntry(
  92. (1230680220, 0),
  93. (1230680220, 0),
  94. 2050,
  95. 3761020,
  96. 33188,
  97. 1000,
  98. 1000,
  99. 0,
  100. b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
  101. 0,
  102. 0,
  103. ),
  104. self.get_simple_index("index")[b"bla"],
  105. )
  106. def test_empty(self) -> None:
  107. i = self.get_simple_index("notanindex")
  108. self.assertEqual(0, len(i))
  109. self.assertFalse(os.path.exists(i._filename))
  110. def test_against_empty_tree(self) -> None:
  111. i = self.get_simple_index("index")
  112. changes = list(i.changes_from_tree(MemoryObjectStore(), None))
  113. self.assertEqual(1, len(changes))
  114. (oldname, newname), (oldmode, newmode), (oldsha, newsha) = changes[0]
  115. self.assertEqual(b"bla", newname)
  116. self.assertEqual(b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", newsha)
  117. def test_index_pathlib(self) -> None:
  118. import tempfile
  119. from pathlib import Path
  120. # Create a temporary index file
  121. with tempfile.NamedTemporaryFile(suffix=".index", delete=False) as f:
  122. temp_path = f.name
  123. self.addCleanup(os.unlink, temp_path)
  124. # Test creating Index with pathlib.Path
  125. path_obj = Path(temp_path)
  126. index = Index(path_obj, read=False)
  127. self.assertEqual(str(path_obj), index.path)
  128. # Add an entry and write
  129. index[b"test"] = IndexEntry(
  130. ctime=(0, 0),
  131. mtime=(0, 0),
  132. dev=0,
  133. ino=0,
  134. mode=33188,
  135. uid=0,
  136. gid=0,
  137. size=0,
  138. sha=b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
  139. )
  140. index.write()
  141. # Read it back with pathlib.Path
  142. index2 = Index(path_obj)
  143. self.assertIn(b"test", index2)
  144. class SimpleIndexWriterTestCase(IndexTestCase):
  145. def setUp(self) -> None:
  146. IndexTestCase.setUp(self)
  147. self.tempdir = tempfile.mkdtemp()
  148. def tearDown(self) -> None:
  149. IndexTestCase.tearDown(self)
  150. shutil.rmtree(self.tempdir)
  151. def test_simple_write(self) -> None:
  152. entries = [
  153. (
  154. SerializedIndexEntry(
  155. b"barbla",
  156. (1230680220, 0),
  157. (1230680220, 0),
  158. 2050,
  159. 3761020,
  160. 33188,
  161. 1000,
  162. 1000,
  163. 0,
  164. b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
  165. 0,
  166. 0,
  167. )
  168. )
  169. ]
  170. filename = os.path.join(self.tempdir, "test-simple-write-index")
  171. with open(filename, "wb+") as x:
  172. write_index(x, entries)
  173. with open(filename, "rb") as x:
  174. self.assertEqual(entries, list(read_index(x)))
  175. class ReadIndexDictTests(IndexTestCase):
  176. def setUp(self) -> None:
  177. IndexTestCase.setUp(self)
  178. self.tempdir = tempfile.mkdtemp()
  179. def tearDown(self) -> None:
  180. IndexTestCase.tearDown(self)
  181. shutil.rmtree(self.tempdir)
  182. def test_simple_write(self) -> None:
  183. entries = {
  184. b"barbla": IndexEntry(
  185. (1230680220, 0),
  186. (1230680220, 0),
  187. 2050,
  188. 3761020,
  189. 33188,
  190. 1000,
  191. 1000,
  192. 0,
  193. b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
  194. 0,
  195. 0,
  196. )
  197. }
  198. filename = os.path.join(self.tempdir, "test-simple-write-index")
  199. with open(filename, "wb+") as x:
  200. write_index_dict(x, entries)
  201. with open(filename, "rb") as x:
  202. self.assertEqual(entries, read_index_dict(x))
  203. class CommitTreeTests(TestCase):
  204. def setUp(self) -> None:
  205. super().setUp()
  206. self.store = MemoryObjectStore()
  207. def test_single_blob(self) -> None:
  208. blob = Blob()
  209. blob.data = b"foo"
  210. self.store.add_object(blob)
  211. blobs = [(b"bla", blob.id, stat.S_IFREG)]
  212. rootid = commit_tree(self.store, blobs)
  213. self.assertEqual(rootid, b"1a1e80437220f9312e855c37ac4398b68e5c1d50")
  214. self.assertEqual((stat.S_IFREG, blob.id), self.store[rootid][b"bla"])
  215. self.assertEqual({rootid, blob.id}, set(self.store._data.keys()))
  216. def test_nested(self) -> None:
  217. blob = Blob()
  218. blob.data = b"foo"
  219. self.store.add_object(blob)
  220. blobs = [(b"bla/bar", blob.id, stat.S_IFREG)]
  221. rootid = commit_tree(self.store, blobs)
  222. self.assertEqual(rootid, b"d92b959b216ad0d044671981196781b3258fa537")
  223. dirid = self.store[rootid][b"bla"][1]
  224. self.assertEqual(dirid, b"c1a1deb9788150829579a8b4efa6311e7b638650")
  225. self.assertEqual((stat.S_IFDIR, dirid), self.store[rootid][b"bla"])
  226. self.assertEqual((stat.S_IFREG, blob.id), self.store[dirid][b"bar"])
  227. self.assertEqual({rootid, dirid, blob.id}, set(self.store._data.keys()))
  228. class CleanupModeTests(TestCase):
  229. def assertModeEqual(self, expected, got) -> None:
  230. self.assertEqual(expected, got, f"{expected:o} != {got:o}")
  231. def test_file(self) -> None:
  232. self.assertModeEqual(0o100644, cleanup_mode(0o100000))
  233. def test_executable(self) -> None:
  234. self.assertModeEqual(0o100755, cleanup_mode(0o100711))
  235. self.assertModeEqual(0o100755, cleanup_mode(0o100700))
  236. def test_symlink(self) -> None:
  237. self.assertModeEqual(0o120000, cleanup_mode(0o120711))
  238. def test_dir(self) -> None:
  239. self.assertModeEqual(0o040000, cleanup_mode(0o40531))
  240. def test_submodule(self) -> None:
  241. self.assertModeEqual(0o160000, cleanup_mode(0o160744))
  242. class WriteCacheTimeTests(TestCase):
  243. def test_write_string(self) -> None:
  244. f = BytesIO()
  245. self.assertRaises(TypeError, write_cache_time, f, "foo")
  246. def test_write_int(self) -> None:
  247. f = BytesIO()
  248. write_cache_time(f, 434343)
  249. self.assertEqual(struct.pack(">LL", 434343, 0), f.getvalue())
  250. def test_write_tuple(self) -> None:
  251. f = BytesIO()
  252. write_cache_time(f, (434343, 21))
  253. self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue())
  254. def test_write_float(self) -> None:
  255. f = BytesIO()
  256. write_cache_time(f, 434343.000000021)
  257. self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue())
  258. class IndexEntryFromStatTests(TestCase):
  259. def test_simple(self) -> None:
  260. st = os.stat_result(
  261. (
  262. 16877,
  263. 131078,
  264. 64769,
  265. 154,
  266. 1000,
  267. 1000,
  268. 12288,
  269. 1323629595,
  270. 1324180496,
  271. 1324180496,
  272. )
  273. )
  274. entry = index_entry_from_stat(st, b"22" * 20)
  275. self.assertEqual(
  276. entry,
  277. IndexEntry(
  278. 1324180496,
  279. 1324180496,
  280. 64769,
  281. 131078,
  282. 16384,
  283. 1000,
  284. 1000,
  285. 12288,
  286. b"2222222222222222222222222222222222222222",
  287. 0,
  288. 0,
  289. ),
  290. )
  291. def test_override_mode(self) -> None:
  292. st = os.stat_result(
  293. (
  294. stat.S_IFREG + 0o644,
  295. 131078,
  296. 64769,
  297. 154,
  298. 1000,
  299. 1000,
  300. 12288,
  301. 1323629595,
  302. 1324180496,
  303. 1324180496,
  304. )
  305. )
  306. entry = index_entry_from_stat(st, b"22" * 20, mode=stat.S_IFREG + 0o755)
  307. self.assertEqual(
  308. entry,
  309. IndexEntry(
  310. 1324180496,
  311. 1324180496,
  312. 64769,
  313. 131078,
  314. 33261,
  315. 1000,
  316. 1000,
  317. 12288,
  318. b"2222222222222222222222222222222222222222",
  319. 0,
  320. 0,
  321. ),
  322. )
  323. class BuildIndexTests(TestCase):
  324. def assertReasonableIndexEntry(self, index_entry, mode, filesize, sha) -> None:
  325. self.assertEqual(index_entry.mode, mode) # mode
  326. self.assertEqual(index_entry.size, filesize) # filesize
  327. self.assertEqual(index_entry.sha, sha) # sha
  328. def assertFileContents(self, path, contents, symlink=False) -> None:
  329. if symlink:
  330. self.assertEqual(os.readlink(path), contents)
  331. else:
  332. with open(path, "rb") as f:
  333. self.assertEqual(f.read(), contents)
  334. def test_empty(self) -> None:
  335. repo_dir = tempfile.mkdtemp()
  336. self.addCleanup(shutil.rmtree, repo_dir)
  337. with Repo.init(repo_dir) as repo:
  338. tree = Tree()
  339. repo.object_store.add_object(tree)
  340. build_index_from_tree(
  341. repo.path, repo.index_path(), repo.object_store, tree.id
  342. )
  343. # Verify index entries
  344. index = repo.open_index()
  345. self.assertEqual(len(index), 0)
  346. # Verify no files
  347. self.assertEqual([".git"], os.listdir(repo.path))
  348. def test_git_dir(self) -> None:
  349. repo_dir = tempfile.mkdtemp()
  350. self.addCleanup(shutil.rmtree, repo_dir)
  351. with Repo.init(repo_dir) as repo:
  352. # Populate repo
  353. filea = Blob.from_string(b"file a")
  354. filee = Blob.from_string(b"d")
  355. tree = Tree()
  356. tree[b".git/a"] = (stat.S_IFREG | 0o644, filea.id)
  357. tree[b"c/e"] = (stat.S_IFREG | 0o644, filee.id)
  358. repo.object_store.add_objects([(o, None) for o in [filea, filee, tree]])
  359. build_index_from_tree(
  360. repo.path, repo.index_path(), repo.object_store, tree.id
  361. )
  362. # Verify index entries
  363. index = repo.open_index()
  364. self.assertEqual(len(index), 1)
  365. # filea
  366. apath = os.path.join(repo.path, ".git", "a")
  367. self.assertFalse(os.path.exists(apath))
  368. # filee
  369. epath = os.path.join(repo.path, "c", "e")
  370. self.assertTrue(os.path.exists(epath))
  371. self.assertReasonableIndexEntry(
  372. index[b"c/e"], stat.S_IFREG | 0o644, 1, filee.id
  373. )
  374. self.assertFileContents(epath, b"d")
  375. def test_nonempty(self) -> None:
  376. repo_dir = tempfile.mkdtemp()
  377. self.addCleanup(shutil.rmtree, repo_dir)
  378. with Repo.init(repo_dir) as repo:
  379. # Populate repo
  380. filea = Blob.from_string(b"file a")
  381. fileb = Blob.from_string(b"file b")
  382. filed = Blob.from_string(b"file d")
  383. tree = Tree()
  384. tree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
  385. tree[b"b"] = (stat.S_IFREG | 0o644, fileb.id)
  386. tree[b"c/d"] = (stat.S_IFREG | 0o644, filed.id)
  387. repo.object_store.add_objects(
  388. [(o, None) for o in [filea, fileb, filed, tree]]
  389. )
  390. build_index_from_tree(
  391. repo.path, repo.index_path(), repo.object_store, tree.id
  392. )
  393. # Verify index entries
  394. index = repo.open_index()
  395. self.assertEqual(len(index), 3)
  396. # filea
  397. apath = os.path.join(repo.path, "a")
  398. self.assertTrue(os.path.exists(apath))
  399. self.assertReasonableIndexEntry(
  400. index[b"a"], stat.S_IFREG | 0o644, 6, filea.id
  401. )
  402. self.assertFileContents(apath, b"file a")
  403. # fileb
  404. bpath = os.path.join(repo.path, "b")
  405. self.assertTrue(os.path.exists(bpath))
  406. self.assertReasonableIndexEntry(
  407. index[b"b"], stat.S_IFREG | 0o644, 6, fileb.id
  408. )
  409. self.assertFileContents(bpath, b"file b")
  410. # filed
  411. dpath = os.path.join(repo.path, "c", "d")
  412. self.assertTrue(os.path.exists(dpath))
  413. self.assertReasonableIndexEntry(
  414. index[b"c/d"], stat.S_IFREG | 0o644, 6, filed.id
  415. )
  416. self.assertFileContents(dpath, b"file d")
  417. # Verify no extra files
  418. self.assertEqual([".git", "a", "b", "c"], sorted(os.listdir(repo.path)))
  419. self.assertEqual(["d"], sorted(os.listdir(os.path.join(repo.path, "c"))))
  420. @skipIf(not getattr(os, "sync", None), "Requires sync support")
  421. def test_norewrite(self) -> None:
  422. repo_dir = tempfile.mkdtemp()
  423. self.addCleanup(shutil.rmtree, repo_dir)
  424. with Repo.init(repo_dir) as repo:
  425. # Populate repo
  426. filea = Blob.from_string(b"file a")
  427. filea_path = os.path.join(repo_dir, "a")
  428. tree = Tree()
  429. tree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
  430. repo.object_store.add_objects([(o, None) for o in [filea, tree]])
  431. # First Write
  432. build_index_from_tree(
  433. repo.path, repo.index_path(), repo.object_store, tree.id
  434. )
  435. # Use sync as metadata can be cached on some FS
  436. os.sync()
  437. mtime = os.stat(filea_path).st_mtime
  438. # Test Rewrite
  439. build_index_from_tree(
  440. repo.path, repo.index_path(), repo.object_store, tree.id
  441. )
  442. os.sync()
  443. self.assertEqual(mtime, os.stat(filea_path).st_mtime)
  444. # Modify content
  445. with open(filea_path, "wb") as fh:
  446. fh.write(b"test a")
  447. os.sync()
  448. mtime = os.stat(filea_path).st_mtime
  449. # Test rewrite
  450. build_index_from_tree(
  451. repo.path, repo.index_path(), repo.object_store, tree.id
  452. )
  453. os.sync()
  454. with open(filea_path, "rb") as fh:
  455. self.assertEqual(b"file a", fh.read())
  456. @skipIf(not can_symlink(), "Requires symlink support")
  457. def test_symlink(self) -> None:
  458. repo_dir = tempfile.mkdtemp()
  459. self.addCleanup(shutil.rmtree, repo_dir)
  460. with Repo.init(repo_dir) as repo:
  461. # Populate repo
  462. filed = Blob.from_string(b"file d")
  463. filee = Blob.from_string(b"d")
  464. tree = Tree()
  465. tree[b"c/d"] = (stat.S_IFREG | 0o644, filed.id)
  466. tree[b"c/e"] = (stat.S_IFLNK, filee.id) # symlink
  467. repo.object_store.add_objects([(o, None) for o in [filed, filee, tree]])
  468. build_index_from_tree(
  469. repo.path, repo.index_path(), repo.object_store, tree.id
  470. )
  471. # Verify index entries
  472. index = repo.open_index()
  473. # symlink to d
  474. epath = os.path.join(repo.path, "c", "e")
  475. self.assertTrue(os.path.exists(epath))
  476. self.assertReasonableIndexEntry(
  477. index[b"c/e"],
  478. stat.S_IFLNK,
  479. 0 if sys.platform == "win32" else 1,
  480. filee.id,
  481. )
  482. self.assertFileContents(epath, "d", symlink=True)
  483. def test_no_decode_encode(self) -> None:
  484. repo_dir = tempfile.mkdtemp()
  485. repo_dir_bytes = os.fsencode(repo_dir)
  486. self.addCleanup(shutil.rmtree, repo_dir)
  487. with Repo.init(repo_dir) as repo:
  488. # Populate repo
  489. file = Blob.from_string(b"foo")
  490. tree = Tree()
  491. latin1_name = "À".encode("latin1")
  492. try:
  493. latin1_path = os.path.join(repo_dir_bytes, latin1_name)
  494. except UnicodeDecodeError:
  495. self.skipTest("can not decode as latin1")
  496. utf8_name = "À".encode()
  497. utf8_path = os.path.join(repo_dir_bytes, utf8_name)
  498. tree[latin1_name] = (stat.S_IFREG | 0o644, file.id)
  499. tree[utf8_name] = (stat.S_IFREG | 0o644, file.id)
  500. repo.object_store.add_objects([(o, None) for o in [file, tree]])
  501. try:
  502. build_index_from_tree(
  503. repo.path, repo.index_path(), repo.object_store, tree.id
  504. )
  505. except OSError as e:
  506. if e.errno == 92 and sys.platform == "darwin":
  507. # Our filename isn't supported by the platform :(
  508. self.skipTest(f"can not write filename {e.filename!r}")
  509. else:
  510. raise
  511. except UnicodeDecodeError:
  512. # This happens e.g. with python3.6 on Windows.
  513. # It implicitly decodes using utf8, which doesn't work.
  514. self.skipTest("can not implicitly convert as utf8")
  515. # Verify index entries
  516. index = repo.open_index()
  517. self.assertIn(latin1_name, index)
  518. self.assertIn(utf8_name, index)
  519. self.assertTrue(os.path.exists(latin1_path))
  520. self.assertTrue(os.path.exists(utf8_path))
  521. def test_git_submodule(self) -> None:
  522. repo_dir = tempfile.mkdtemp()
  523. self.addCleanup(shutil.rmtree, repo_dir)
  524. with Repo.init(repo_dir) as repo:
  525. filea = Blob.from_string(b"file alalala")
  526. subtree = Tree()
  527. subtree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
  528. c = Commit()
  529. c.tree = subtree.id
  530. c.committer = c.author = b"Somebody <somebody@example.com>"
  531. c.commit_time = c.author_time = 42342
  532. c.commit_timezone = c.author_timezone = 0
  533. c.parents = []
  534. c.message = b"Subcommit"
  535. tree = Tree()
  536. tree[b"c"] = (S_IFGITLINK, c.id)
  537. repo.object_store.add_objects([(o, None) for o in [tree]])
  538. build_index_from_tree(
  539. repo.path, repo.index_path(), repo.object_store, tree.id
  540. )
  541. # Verify index entries
  542. index = repo.open_index()
  543. self.assertEqual(len(index), 1)
  544. # filea
  545. apath = os.path.join(repo.path, "c/a")
  546. self.assertFalse(os.path.exists(apath))
  547. # dir c
  548. cpath = os.path.join(repo.path, "c")
  549. self.assertTrue(os.path.isdir(cpath))
  550. self.assertEqual(index[b"c"].mode, S_IFGITLINK) # mode
  551. self.assertEqual(index[b"c"].sha, c.id) # sha
  552. def test_git_submodule_exists(self) -> None:
  553. repo_dir = tempfile.mkdtemp()
  554. self.addCleanup(shutil.rmtree, repo_dir)
  555. with Repo.init(repo_dir) as repo:
  556. filea = Blob.from_string(b"file alalala")
  557. subtree = Tree()
  558. subtree[b"a"] = (stat.S_IFREG | 0o644, filea.id)
  559. c = Commit()
  560. c.tree = subtree.id
  561. c.committer = c.author = b"Somebody <somebody@example.com>"
  562. c.commit_time = c.author_time = 42342
  563. c.commit_timezone = c.author_timezone = 0
  564. c.parents = []
  565. c.message = b"Subcommit"
  566. tree = Tree()
  567. tree[b"c"] = (S_IFGITLINK, c.id)
  568. os.mkdir(os.path.join(repo_dir, "c"))
  569. repo.object_store.add_objects([(o, None) for o in [tree]])
  570. build_index_from_tree(
  571. repo.path, repo.index_path(), repo.object_store, tree.id
  572. )
  573. # Verify index entries
  574. index = repo.open_index()
  575. self.assertEqual(len(index), 1)
  576. # filea
  577. apath = os.path.join(repo.path, "c/a")
  578. self.assertFalse(os.path.exists(apath))
  579. # dir c
  580. cpath = os.path.join(repo.path, "c")
  581. self.assertTrue(os.path.isdir(cpath))
  582. self.assertEqual(index[b"c"].mode, S_IFGITLINK) # mode
  583. self.assertEqual(index[b"c"].sha, c.id) # sha
  584. def test_with_line_ending_normalization(self) -> None:
  585. """Test that build_index_from_tree applies line-ending normalization."""
  586. repo_dir = tempfile.mkdtemp()
  587. self.addCleanup(shutil.rmtree, repo_dir)
  588. from dulwich.line_ending import BlobNormalizer
  589. with Repo.init(repo_dir) as repo:
  590. # Set up autocrlf config
  591. config = repo.get_config()
  592. config.set((b"core",), b"autocrlf", b"true")
  593. config.write_to_path()
  594. # Create blob with LF line endings
  595. content_lf = b"line1\nline2\nline3\n"
  596. blob = Blob.from_string(content_lf)
  597. tree = Tree()
  598. tree[b"test.txt"] = (stat.S_IFREG | 0o644, blob.id)
  599. repo.object_store.add_objects([(blob, None), (tree, None)])
  600. # Create blob normalizer
  601. blob_normalizer = BlobNormalizer(config, {})
  602. # Build index with normalization
  603. build_index_from_tree(
  604. repo.path,
  605. repo.index_path(),
  606. repo.object_store,
  607. tree.id,
  608. blob_normalizer=blob_normalizer,
  609. )
  610. # On Windows with autocrlf=true, file should have CRLF line endings
  611. test_file = os.path.join(repo.path, "test.txt")
  612. with open(test_file, "rb") as f:
  613. content = f.read()
  614. # autocrlf=true means LF -> CRLF on checkout (on all platforms for testing)
  615. expected_content = b"line1\r\nline2\r\nline3\r\n"
  616. self.assertEqual(content, expected_content)
  617. class GetUnstagedChangesTests(TestCase):
  618. def test_get_unstaged_changes(self) -> None:
  619. """Unit test for get_unstaged_changes."""
  620. repo_dir = tempfile.mkdtemp()
  621. self.addCleanup(shutil.rmtree, repo_dir)
  622. with Repo.init(repo_dir) as repo:
  623. # Commit a dummy file then modify it
  624. foo1_fullpath = os.path.join(repo_dir, "foo1")
  625. with open(foo1_fullpath, "wb") as f:
  626. f.write(b"origstuff")
  627. foo2_fullpath = os.path.join(repo_dir, "foo2")
  628. with open(foo2_fullpath, "wb") as f:
  629. f.write(b"origstuff")
  630. repo.stage(["foo1", "foo2"])
  631. repo.do_commit(
  632. b"test status",
  633. author=b"author <email>",
  634. committer=b"committer <email>",
  635. )
  636. with open(foo1_fullpath, "wb") as f:
  637. f.write(b"newstuff")
  638. # modify access and modify time of path
  639. os.utime(foo1_fullpath, (0, 0))
  640. changes = get_unstaged_changes(repo.open_index(), repo_dir)
  641. self.assertEqual(list(changes), [b"foo1"])
  642. def test_get_unstaged_changes_with_preload(self) -> None:
  643. """Unit test for get_unstaged_changes with preload_index=True."""
  644. repo_dir = tempfile.mkdtemp()
  645. self.addCleanup(shutil.rmtree, repo_dir)
  646. with Repo.init(repo_dir) as repo:
  647. # Create multiple files to test parallel processing
  648. files = []
  649. for i in range(10):
  650. filename = f"foo{i}"
  651. fullpath = os.path.join(repo_dir, filename)
  652. with open(fullpath, "wb") as f:
  653. f.write(b"origstuff" + str(i).encode())
  654. files.append(filename)
  655. repo.stage(files)
  656. repo.do_commit(
  657. b"test status",
  658. author=b"author <email>",
  659. committer=b"committer <email>",
  660. )
  661. # Modify some files
  662. modified_files = [b"foo1", b"foo3", b"foo5", b"foo7"]
  663. for filename in modified_files:
  664. fullpath = os.path.join(repo_dir, filename.decode())
  665. with open(fullpath, "wb") as f:
  666. f.write(b"newstuff")
  667. os.utime(fullpath, (0, 0))
  668. # Test with preload_index=False (serial)
  669. changes_serial = list(
  670. get_unstaged_changes(repo.open_index(), repo_dir, preload_index=False)
  671. )
  672. changes_serial.sort()
  673. # Test with preload_index=True (parallel)
  674. changes_parallel = list(
  675. get_unstaged_changes(repo.open_index(), repo_dir, preload_index=True)
  676. )
  677. changes_parallel.sort()
  678. # Both should return the same results
  679. self.assertEqual(changes_serial, changes_parallel)
  680. self.assertEqual(changes_serial, sorted(modified_files))
  681. def test_get_unstaged_deleted_changes(self) -> None:
  682. """Unit test for get_unstaged_changes."""
  683. repo_dir = tempfile.mkdtemp()
  684. self.addCleanup(shutil.rmtree, repo_dir)
  685. with Repo.init(repo_dir) as repo:
  686. # Commit a dummy file then remove it
  687. foo1_fullpath = os.path.join(repo_dir, "foo1")
  688. with open(foo1_fullpath, "wb") as f:
  689. f.write(b"origstuff")
  690. repo.stage(["foo1"])
  691. repo.do_commit(
  692. b"test status",
  693. author=b"author <email>",
  694. committer=b"committer <email>",
  695. )
  696. os.unlink(foo1_fullpath)
  697. changes = get_unstaged_changes(repo.open_index(), repo_dir)
  698. self.assertEqual(list(changes), [b"foo1"])
  699. def test_get_unstaged_changes_removed_replaced_by_directory(self) -> None:
  700. """Unit test for get_unstaged_changes."""
  701. repo_dir = tempfile.mkdtemp()
  702. self.addCleanup(shutil.rmtree, repo_dir)
  703. with Repo.init(repo_dir) as repo:
  704. # Commit a dummy file then modify it
  705. foo1_fullpath = os.path.join(repo_dir, "foo1")
  706. with open(foo1_fullpath, "wb") as f:
  707. f.write(b"origstuff")
  708. repo.stage(["foo1"])
  709. repo.do_commit(
  710. b"test status",
  711. author=b"author <email>",
  712. committer=b"committer <email>",
  713. )
  714. os.remove(foo1_fullpath)
  715. os.mkdir(foo1_fullpath)
  716. changes = get_unstaged_changes(repo.open_index(), repo_dir)
  717. self.assertEqual(list(changes), [b"foo1"])
  718. @skipIf(not can_symlink(), "Requires symlink support")
  719. def test_get_unstaged_changes_removed_replaced_by_link(self) -> None:
  720. """Unit test for get_unstaged_changes."""
  721. repo_dir = tempfile.mkdtemp()
  722. self.addCleanup(shutil.rmtree, repo_dir)
  723. with Repo.init(repo_dir) as repo:
  724. # Commit a dummy file then modify it
  725. foo1_fullpath = os.path.join(repo_dir, "foo1")
  726. with open(foo1_fullpath, "wb") as f:
  727. f.write(b"origstuff")
  728. repo.stage(["foo1"])
  729. repo.do_commit(
  730. b"test status",
  731. author=b"author <email>",
  732. committer=b"committer <email>",
  733. )
  734. os.remove(foo1_fullpath)
  735. os.symlink(os.path.dirname(foo1_fullpath), foo1_fullpath)
  736. changes = get_unstaged_changes(repo.open_index(), repo_dir)
  737. self.assertEqual(list(changes), [b"foo1"])
  738. class TestValidatePathElement(TestCase):
  739. def test_default(self) -> None:
  740. self.assertTrue(validate_path_element_default(b"bla"))
  741. self.assertTrue(validate_path_element_default(b".bla"))
  742. self.assertFalse(validate_path_element_default(b".git"))
  743. self.assertFalse(validate_path_element_default(b".giT"))
  744. self.assertFalse(validate_path_element_default(b".."))
  745. self.assertTrue(validate_path_element_default(b"git~1"))
  746. def test_ntfs(self) -> None:
  747. self.assertTrue(validate_path_element_ntfs(b"bla"))
  748. self.assertTrue(validate_path_element_ntfs(b".bla"))
  749. self.assertFalse(validate_path_element_ntfs(b".git"))
  750. self.assertFalse(validate_path_element_ntfs(b".giT"))
  751. self.assertFalse(validate_path_element_ntfs(b".."))
  752. self.assertFalse(validate_path_element_ntfs(b"git~1"))
  753. def test_hfs(self) -> None:
  754. # Normal paths should pass
  755. self.assertTrue(validate_path_element_hfs(b"bla"))
  756. self.assertTrue(validate_path_element_hfs(b".bla"))
  757. # Basic .git variations should fail
  758. self.assertFalse(validate_path_element_hfs(b".git"))
  759. self.assertFalse(validate_path_element_hfs(b".giT"))
  760. self.assertFalse(validate_path_element_hfs(b".GIT"))
  761. self.assertFalse(validate_path_element_hfs(b".."))
  762. # git~1 should also fail on HFS+
  763. self.assertFalse(validate_path_element_hfs(b"git~1"))
  764. # Test HFS+ Unicode normalization attacks
  765. # .g\u200cit (zero-width non-joiner)
  766. self.assertFalse(validate_path_element_hfs(b".g\xe2\x80\x8cit"))
  767. # .gi\u200dt (zero-width joiner)
  768. self.assertFalse(validate_path_element_hfs(b".gi\xe2\x80\x8dt"))
  769. # Test other ignorable characters
  770. # .g\ufeffit (zero-width no-break space)
  771. self.assertFalse(validate_path_element_hfs(b".g\xef\xbb\xbfit"))
  772. # Valid Unicode that shouldn't be confused with .git
  773. self.assertTrue(validate_path_element_hfs(b".g\xc3\xaft")) # .gït
  774. self.assertTrue(validate_path_element_hfs(b"git")) # git without dot
  775. class TestTreeFSPathConversion(TestCase):
  776. def test_tree_to_fs_path(self) -> None:
  777. tree_path = "délwíçh/foo".encode()
  778. fs_path = _tree_to_fs_path(b"/prefix/path", tree_path)
  779. self.assertEqual(
  780. fs_path,
  781. os.fsencode(os.path.join("/prefix/path", "délwíçh", "foo")),
  782. )
  783. def test_tree_to_fs_path_windows_separator(self) -> None:
  784. tree_path = b"path/with/slash"
  785. original_sep = os.sep.encode("ascii")
  786. # Temporarily modify os_sep_bytes to test Windows path conversion
  787. # This simulates Windows behavior on all platforms for testing
  788. import dulwich.index
  789. dulwich.index.os_sep_bytes = b"\\"
  790. self.addCleanup(setattr, dulwich.index, "os_sep_bytes", original_sep)
  791. fs_path = _tree_to_fs_path(b"/prefix/path", tree_path)
  792. # The function should join the prefix path with the converted tree path
  793. # The expected behavior is that the path separators in the tree_path are
  794. # converted to the platform-specific separator (which we've set to backslash)
  795. expected_path = os.path.join(b"/prefix/path", b"path\\with\\slash")
  796. self.assertEqual(fs_path, expected_path)
  797. def test_fs_to_tree_path_str(self) -> None:
  798. fs_path = os.path.join(os.path.join("délwíçh", "foo"))
  799. tree_path = _fs_to_tree_path(fs_path)
  800. self.assertEqual(tree_path, "délwíçh/foo".encode())
  801. def test_fs_to_tree_path_bytes(self) -> None:
  802. fs_path = os.path.join(os.fsencode(os.path.join("délwíçh", "foo")))
  803. tree_path = _fs_to_tree_path(fs_path)
  804. self.assertEqual(tree_path, "délwíçh/foo".encode())
  805. def test_fs_to_tree_path_windows_separator(self) -> None:
  806. # Test conversion of Windows paths to tree paths
  807. fs_path = b"path\\with\\backslash"
  808. original_sep = os.sep.encode("ascii")
  809. # Temporarily modify os_sep_bytes to test Windows path conversion
  810. import dulwich.index
  811. dulwich.index.os_sep_bytes = b"\\"
  812. self.addCleanup(setattr, dulwich.index, "os_sep_bytes", original_sep)
  813. tree_path = _fs_to_tree_path(fs_path)
  814. self.assertEqual(tree_path, b"path/with/backslash")
  815. class TestIndexEntryFromPath(TestCase):
  816. def setUp(self):
  817. self.tempdir = tempfile.mkdtemp()
  818. self.addCleanup(shutil.rmtree, self.tempdir)
  819. def test_index_entry_from_path_file(self) -> None:
  820. """Test creating index entry from a regular file."""
  821. # Create a test file
  822. test_file = os.path.join(self.tempdir, "testfile")
  823. with open(test_file, "wb") as f:
  824. f.write(b"test content")
  825. # Get the index entry
  826. entry = index_entry_from_path(os.fsencode(test_file))
  827. # Verify the entry was created with the right mode
  828. self.assertIsNotNone(entry)
  829. self.assertEqual(cleanup_mode(os.stat(test_file).st_mode), entry.mode)
  830. @skipIf(not can_symlink(), "Requires symlink support")
  831. def test_index_entry_from_path_symlink(self) -> None:
  832. """Test creating index entry from a symlink."""
  833. # Create a target file
  834. target_file = os.path.join(self.tempdir, "target")
  835. with open(target_file, "wb") as f:
  836. f.write(b"target content")
  837. # Create a symlink
  838. link_file = os.path.join(self.tempdir, "symlink")
  839. os.symlink(target_file, link_file)
  840. # Get the index entry
  841. entry = index_entry_from_path(os.fsencode(link_file))
  842. # Verify the entry was created with the right mode
  843. self.assertIsNotNone(entry)
  844. self.assertEqual(cleanup_mode(os.lstat(link_file).st_mode), entry.mode)
  845. def test_index_entry_from_path_directory(self) -> None:
  846. """Test creating index entry from a directory (should return None)."""
  847. # Create a directory
  848. test_dir = os.path.join(self.tempdir, "testdir")
  849. os.mkdir(test_dir)
  850. # Get the index entry for a directory
  851. entry = index_entry_from_path(os.fsencode(test_dir))
  852. # Should return None for regular directories
  853. self.assertIsNone(entry)
  854. def test_index_entry_from_directory_regular(self) -> None:
  855. """Test index_entry_from_directory with a regular directory."""
  856. # Create a directory
  857. test_dir = os.path.join(self.tempdir, "testdir")
  858. os.mkdir(test_dir)
  859. # Get stat for the directory
  860. st = os.lstat(test_dir)
  861. # Get the index entry for a regular directory
  862. entry = index_entry_from_directory(st, os.fsencode(test_dir))
  863. # Should return None for regular directories
  864. self.assertIsNone(entry)
  865. def test_index_entry_from_directory_git_submodule(self) -> None:
  866. """Test index_entry_from_directory with a Git submodule."""
  867. # Create a git repository that will be a submodule
  868. sub_repo_dir = os.path.join(self.tempdir, "subrepo")
  869. os.mkdir(sub_repo_dir)
  870. # Create the .git directory to make it look like a git repo
  871. git_dir = os.path.join(sub_repo_dir, ".git")
  872. os.mkdir(git_dir)
  873. # Create HEAD file with a fake commit SHA
  874. head_sha = b"1234567890" * 4 # 40-char fake SHA
  875. with open(os.path.join(git_dir, "HEAD"), "wb") as f:
  876. f.write(head_sha)
  877. # Get stat for the submodule directory
  878. st = os.lstat(sub_repo_dir)
  879. # Get the index entry for a git submodule directory
  880. entry = index_entry_from_directory(st, os.fsencode(sub_repo_dir))
  881. # Since we don't have a proper git setup, this might still return None
  882. # This test just ensures the code path is executed
  883. if entry is not None:
  884. # If an entry is returned, it should have the gitlink mode
  885. self.assertEqual(entry.mode, S_IFGITLINK)
  886. def test_index_entry_from_path_with_object_store(self) -> None:
  887. """Test creating index entry with object store."""
  888. # Create a test file
  889. test_file = os.path.join(self.tempdir, "testfile")
  890. with open(test_file, "wb") as f:
  891. f.write(b"test content")
  892. # Create a memory object store
  893. object_store = MemoryObjectStore()
  894. # Get the index entry and add to object store
  895. entry = index_entry_from_path(os.fsencode(test_file), object_store)
  896. # Verify we can access the blob from the object store
  897. self.assertIsNotNone(entry)
  898. blob = object_store[entry.sha]
  899. self.assertEqual(b"test content", blob.data)
  900. def test_iter_fresh_entries(self) -> None:
  901. """Test iterating over fresh entries."""
  902. # Create some test files
  903. file1 = os.path.join(self.tempdir, "file1")
  904. with open(file1, "wb") as f:
  905. f.write(b"file1 content")
  906. file2 = os.path.join(self.tempdir, "file2")
  907. with open(file2, "wb") as f:
  908. f.write(b"file2 content")
  909. # Create a memory object store
  910. object_store = MemoryObjectStore()
  911. # Get fresh entries
  912. paths = [b"file1", b"file2", b"nonexistent"]
  913. entries = dict(
  914. iter_fresh_entries(paths, os.fsencode(self.tempdir), object_store)
  915. )
  916. # Verify both files got entries but nonexistent file is None
  917. self.assertIn(b"file1", entries)
  918. self.assertIn(b"file2", entries)
  919. self.assertIn(b"nonexistent", entries)
  920. self.assertIsNotNone(entries[b"file1"])
  921. self.assertIsNotNone(entries[b"file2"])
  922. self.assertIsNone(entries[b"nonexistent"])
  923. # Check that blobs were added to object store
  924. blob1 = object_store[entries[b"file1"].sha]
  925. self.assertEqual(b"file1 content", blob1.data)
  926. blob2 = object_store[entries[b"file2"].sha]
  927. self.assertEqual(b"file2 content", blob2.data)
  928. def test_read_submodule_head(self) -> None:
  929. """Test reading the HEAD of a submodule."""
  930. from dulwich.index import read_submodule_head
  931. # Create a test repo that will be our "submodule"
  932. sub_repo_dir = os.path.join(self.tempdir, "subrepo")
  933. os.mkdir(sub_repo_dir)
  934. submodule_repo = Repo.init(sub_repo_dir)
  935. # Create a file and commit it to establish a HEAD
  936. test_file = os.path.join(sub_repo_dir, "testfile")
  937. with open(test_file, "wb") as f:
  938. f.write(b"test content")
  939. submodule_repo.stage(["testfile"])
  940. commit_id = submodule_repo.do_commit(b"Test commit for submodule")
  941. # Test reading the HEAD
  942. head_sha = read_submodule_head(sub_repo_dir)
  943. self.assertEqual(commit_id, head_sha)
  944. # Test with bytes path
  945. head_sha_bytes = read_submodule_head(os.fsencode(sub_repo_dir))
  946. self.assertEqual(commit_id, head_sha_bytes)
  947. # Test with non-existent path
  948. non_repo_dir = os.path.join(self.tempdir, "nonrepo")
  949. os.mkdir(non_repo_dir)
  950. self.assertIsNone(read_submodule_head(non_repo_dir))
  951. # Test with path that doesn't have a .git directory
  952. not_git_dir = os.path.join(self.tempdir, "notgit")
  953. os.mkdir(not_git_dir)
  954. self.assertIsNone(read_submodule_head(not_git_dir))
  955. def test_has_directory_changed(self) -> None:
  956. """Test checking if a directory has changed."""
  957. from dulwich.index import IndexEntry, _has_directory_changed
  958. # Setup mock IndexEntry
  959. mock_entry = IndexEntry(
  960. (1230680220, 0),
  961. (1230680220, 0),
  962. 2050,
  963. 3761020,
  964. 33188,
  965. 1000,
  966. 1000,
  967. 0,
  968. b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
  969. 0,
  970. 0,
  971. )
  972. # Test with a regular directory (not a submodule)
  973. reg_dir = os.path.join(self.tempdir, "regular_dir")
  974. os.mkdir(reg_dir)
  975. # Should return True for regular directory
  976. self.assertTrue(_has_directory_changed(os.fsencode(reg_dir), mock_entry))
  977. # Create a git repository to test submodule scenarios
  978. sub_repo_dir = os.path.join(self.tempdir, "subrepo")
  979. os.mkdir(sub_repo_dir)
  980. submodule_repo = Repo.init(sub_repo_dir)
  981. # Create a file and commit it to establish a HEAD
  982. test_file = os.path.join(sub_repo_dir, "testfile")
  983. with open(test_file, "wb") as f:
  984. f.write(b"test content")
  985. submodule_repo.stage(["testfile"])
  986. commit_id = submodule_repo.do_commit(b"Test commit for submodule")
  987. # Create an entry with the correct commit SHA
  988. correct_entry = IndexEntry(
  989. (1230680220, 0),
  990. (1230680220, 0),
  991. 2050,
  992. 3761020,
  993. 33188,
  994. 1000,
  995. 1000,
  996. 0,
  997. commit_id,
  998. 0,
  999. 0,
  1000. )
  1001. # Create an entry with an incorrect commit SHA
  1002. incorrect_entry = IndexEntry(
  1003. (1230680220, 0),
  1004. (1230680220, 0),
  1005. 2050,
  1006. 3761020,
  1007. 33188,
  1008. 1000,
  1009. 1000,
  1010. 0,
  1011. b"0000000000000000000000000000000000000000",
  1012. 0,
  1013. 0,
  1014. )
  1015. # Should return False for submodule with correct SHA
  1016. self.assertFalse(
  1017. _has_directory_changed(os.fsencode(sub_repo_dir), correct_entry)
  1018. )
  1019. # Should return True for submodule with incorrect SHA
  1020. self.assertTrue(
  1021. _has_directory_changed(os.fsencode(sub_repo_dir), incorrect_entry)
  1022. )
  1023. def test_get_unstaged_changes(self) -> None:
  1024. """Test detecting unstaged changes in a working tree."""
  1025. from dulwich.index import (
  1026. ConflictedIndexEntry,
  1027. Index,
  1028. IndexEntry,
  1029. get_unstaged_changes,
  1030. )
  1031. # Create a test repo
  1032. repo_dir = tempfile.mkdtemp()
  1033. self.addCleanup(shutil.rmtree, repo_dir)
  1034. # Create test index
  1035. index = Index(os.path.join(repo_dir, "index"))
  1036. # Create an actual hash of our test content
  1037. from dulwich.objects import Blob
  1038. test_blob = Blob()
  1039. test_blob.data = b"initial content"
  1040. # Create some test files with known contents
  1041. file1_path = os.path.join(repo_dir, "file1")
  1042. with open(file1_path, "wb") as f:
  1043. f.write(b"initial content")
  1044. file2_path = os.path.join(repo_dir, "file2")
  1045. with open(file2_path, "wb") as f:
  1046. f.write(b"initial content")
  1047. # Add them to index
  1048. entry1 = IndexEntry(
  1049. (1230680220, 0),
  1050. (1230680220, 0),
  1051. 2050,
  1052. 3761020,
  1053. 33188,
  1054. 1000,
  1055. 1000,
  1056. 0,
  1057. b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", # Not matching actual content
  1058. 0,
  1059. 0,
  1060. )
  1061. entry2 = IndexEntry(
  1062. (1230680220, 0),
  1063. (1230680220, 0),
  1064. 2050,
  1065. 3761020,
  1066. 33188,
  1067. 1000,
  1068. 1000,
  1069. 0,
  1070. test_blob.id, # Will be content's real hash
  1071. 0,
  1072. 0,
  1073. )
  1074. # Add a file that has a conflict
  1075. entry_conflict = ConflictedIndexEntry(b"conflict", {0: None, 1: None, 2: None})
  1076. index._byname = {
  1077. b"file1": entry1,
  1078. b"file2": entry2,
  1079. b"file3": IndexEntry(
  1080. (1230680220, 0),
  1081. (1230680220, 0),
  1082. 2050,
  1083. 3761020,
  1084. 33188,
  1085. 1000,
  1086. 1000,
  1087. 0,
  1088. b"0000000000000000000000000000000000000000",
  1089. 0,
  1090. 0,
  1091. ),
  1092. b"conflict": entry_conflict,
  1093. }
  1094. # Get unstaged changes
  1095. changes = list(get_unstaged_changes(index, repo_dir))
  1096. # File1 should be unstaged (content doesn't match hash)
  1097. # File3 doesn't exist (deleted)
  1098. # Conflict is always unstaged
  1099. self.assertEqual(sorted(changes), [b"conflict", b"file1", b"file3"])
  1100. # Create directory where there should be a file
  1101. os.mkdir(os.path.join(repo_dir, "file4"))
  1102. index._byname[b"file4"] = entry1
  1103. # Get unstaged changes again
  1104. changes = list(get_unstaged_changes(index, repo_dir))
  1105. # Now file4 should also be unstaged because it's a directory instead of a file
  1106. self.assertEqual(sorted(changes), [b"conflict", b"file1", b"file3", b"file4"])
  1107. # Create a custom blob filter function
  1108. def filter_blob_callback(blob, path):
  1109. # Modify blob to make it look changed
  1110. blob.data = b"modified " + blob.data
  1111. return blob
  1112. # Get unstaged changes with blob filter
  1113. changes = list(get_unstaged_changes(index, repo_dir, filter_blob_callback))
  1114. # Now both file1 and file2 should be unstaged due to the filter
  1115. self.assertEqual(
  1116. sorted(changes), [b"conflict", b"file1", b"file2", b"file3", b"file4"]
  1117. )
  1118. class TestManyFilesFeature(TestCase):
  1119. """Tests for the manyFiles feature (index version 4 and skipHash)."""
  1120. def setUp(self):
  1121. self.tempdir = tempfile.mkdtemp()
  1122. self.addCleanup(shutil.rmtree, self.tempdir)
  1123. def test_index_version_4_parsing(self):
  1124. """Test that index version 4 files can be parsed."""
  1125. index_path = os.path.join(self.tempdir, "index")
  1126. # Create an index with version 4
  1127. index = Index(index_path, read=False, version=4)
  1128. # Add some entries
  1129. entry = IndexEntry(
  1130. ctime=(1234567890, 0),
  1131. mtime=(1234567890, 0),
  1132. dev=1,
  1133. ino=1,
  1134. mode=0o100644,
  1135. uid=1000,
  1136. gid=1000,
  1137. size=5,
  1138. sha=b"0" * 40,
  1139. )
  1140. index[b"test.txt"] = entry
  1141. # Write and read back
  1142. index.write()
  1143. # Read the index back
  1144. index2 = Index(index_path)
  1145. self.assertEqual(index2._version, 4)
  1146. self.assertIn(b"test.txt", index2)
  1147. def test_skip_hash_feature(self):
  1148. """Test that skipHash feature works correctly."""
  1149. index_path = os.path.join(self.tempdir, "index")
  1150. # Create an index with skipHash enabled
  1151. index = Index(index_path, read=False, skip_hash=True)
  1152. # Add some entries
  1153. entry = IndexEntry(
  1154. ctime=(1234567890, 0),
  1155. mtime=(1234567890, 0),
  1156. dev=1,
  1157. ino=1,
  1158. mode=0o100644,
  1159. uid=1000,
  1160. gid=1000,
  1161. size=5,
  1162. sha=b"0" * 40,
  1163. )
  1164. index[b"test.txt"] = entry
  1165. # Write the index
  1166. index.write()
  1167. # Verify the file was written with zero hash
  1168. with open(index_path, "rb") as f:
  1169. f.seek(-20, 2) # Seek to last 20 bytes
  1170. trailing_hash = f.read(20)
  1171. self.assertEqual(trailing_hash, b"\x00" * 20)
  1172. # Verify we can still read it back
  1173. index2 = Index(index_path)
  1174. self.assertIn(b"test.txt", index2)
  1175. def test_version_4_no_padding(self):
  1176. """Test that version 4 entries have no padding."""
  1177. # Create entries with names that would show compression benefits
  1178. entries = [
  1179. SerializedIndexEntry(
  1180. name=b"src/main/java/com/example/Service.java",
  1181. ctime=(1234567890, 0),
  1182. mtime=(1234567890, 0),
  1183. dev=1,
  1184. ino=1,
  1185. mode=0o100644,
  1186. uid=1000,
  1187. gid=1000,
  1188. size=5,
  1189. sha=b"0" * 40,
  1190. flags=0,
  1191. extended_flags=0,
  1192. ),
  1193. SerializedIndexEntry(
  1194. name=b"src/main/java/com/example/Controller.java",
  1195. ctime=(1234567890, 0),
  1196. mtime=(1234567890, 0),
  1197. dev=1,
  1198. ino=2,
  1199. mode=0o100644,
  1200. uid=1000,
  1201. gid=1000,
  1202. size=5,
  1203. sha=b"1" * 40,
  1204. flags=0,
  1205. extended_flags=0,
  1206. ),
  1207. ]
  1208. # Test version 2 (with padding, full paths)
  1209. buf_v2 = BytesIO()
  1210. from dulwich.index import write_cache_entry
  1211. previous_path = b""
  1212. for entry in entries:
  1213. # Set proper flags for v2
  1214. entry_v2 = SerializedIndexEntry(
  1215. entry.name,
  1216. entry.ctime,
  1217. entry.mtime,
  1218. entry.dev,
  1219. entry.ino,
  1220. entry.mode,
  1221. entry.uid,
  1222. entry.gid,
  1223. entry.size,
  1224. entry.sha,
  1225. len(entry.name),
  1226. entry.extended_flags,
  1227. )
  1228. write_cache_entry(buf_v2, entry_v2, version=2, previous_path=previous_path)
  1229. previous_path = entry.name
  1230. v2_data = buf_v2.getvalue()
  1231. # Test version 4 (path compression, no padding)
  1232. buf_v4 = BytesIO()
  1233. previous_path = b""
  1234. for entry in entries:
  1235. write_cache_entry(buf_v4, entry, version=4, previous_path=previous_path)
  1236. previous_path = entry.name
  1237. v4_data = buf_v4.getvalue()
  1238. # Version 4 should be shorter due to compression and no padding
  1239. self.assertLess(len(v4_data), len(v2_data))
  1240. # Both should parse correctly
  1241. buf_v2.seek(0)
  1242. from dulwich.index import read_cache_entry
  1243. previous_path = b""
  1244. parsed_v2_entries = []
  1245. for _ in entries:
  1246. parsed = read_cache_entry(buf_v2, version=2, previous_path=previous_path)
  1247. parsed_v2_entries.append(parsed)
  1248. previous_path = parsed.name
  1249. buf_v4.seek(0)
  1250. previous_path = b""
  1251. parsed_v4_entries = []
  1252. for _ in entries:
  1253. parsed = read_cache_entry(buf_v4, version=4, previous_path=previous_path)
  1254. parsed_v4_entries.append(parsed)
  1255. previous_path = parsed.name
  1256. # Both should have the same paths
  1257. for v2_entry, v4_entry in zip(parsed_v2_entries, parsed_v4_entries):
  1258. self.assertEqual(v2_entry.name, v4_entry.name)
  1259. self.assertEqual(v2_entry.sha, v4_entry.sha)
  1260. class TestManyFilesRepoIntegration(TestCase):
  1261. """Tests for manyFiles feature integration with Repo."""
  1262. def setUp(self):
  1263. self.tempdir = tempfile.mkdtemp()
  1264. self.addCleanup(shutil.rmtree, self.tempdir)
  1265. def test_repo_with_manyfiles_config(self):
  1266. """Test that a repository with feature.manyFiles=true uses the right settings."""
  1267. # Create a new repository
  1268. repo = Repo.init(self.tempdir)
  1269. # Set feature.manyFiles=true in config
  1270. config = repo.get_config()
  1271. config.set(b"feature", b"manyFiles", b"true")
  1272. config.write_to_path()
  1273. # Open the index - should have skipHash enabled and version 4
  1274. index = repo.open_index()
  1275. self.assertTrue(index._skip_hash)
  1276. self.assertEqual(index._version, 4)
  1277. def test_repo_with_explicit_index_settings(self):
  1278. """Test that explicit index.version and index.skipHash work."""
  1279. # Create a new repository
  1280. repo = Repo.init(self.tempdir)
  1281. # Set explicit index settings
  1282. config = repo.get_config()
  1283. config.set(b"index", b"version", b"3")
  1284. config.set(b"index", b"skipHash", b"false")
  1285. config.write_to_path()
  1286. # Open the index - should respect explicit settings
  1287. index = repo.open_index()
  1288. self.assertFalse(index._skip_hash)
  1289. self.assertEqual(index._version, 3)
  1290. class TestPathPrefixCompression(TestCase):
  1291. """Tests for index version 4 path prefix compression."""
  1292. def setUp(self):
  1293. self.tempdir = tempfile.mkdtemp()
  1294. self.addCleanup(shutil.rmtree, self.tempdir)
  1295. def test_varint_encoding_decoding(self):
  1296. """Test variable-width integer encoding and decoding."""
  1297. test_values = [0, 1, 127, 128, 255, 256, 16383, 16384, 65535, 65536]
  1298. for value in test_values:
  1299. encoded = _encode_varint(value)
  1300. decoded, _ = _decode_varint(encoded, 0)
  1301. self.assertEqual(value, decoded, f"Failed for value {value}")
  1302. def test_path_compression_simple(self):
  1303. """Test simple path compression cases."""
  1304. # Test case 1: No common prefix
  1305. compressed = _compress_path(b"file1.txt", b"")
  1306. decompressed, _ = _decompress_path(compressed, 0, b"")
  1307. self.assertEqual(b"file1.txt", decompressed)
  1308. # Test case 2: Common prefix
  1309. compressed = _compress_path(b"src/file2.txt", b"src/file1.txt")
  1310. decompressed, _ = _decompress_path(compressed, 0, b"src/file1.txt")
  1311. self.assertEqual(b"src/file2.txt", decompressed)
  1312. # Test case 3: Completely different paths
  1313. compressed = _compress_path(b"docs/readme.md", b"src/file1.txt")
  1314. decompressed, _ = _decompress_path(compressed, 0, b"src/file1.txt")
  1315. self.assertEqual(b"docs/readme.md", decompressed)
  1316. def test_path_compression_deep_directories(self):
  1317. """Test compression with deep directory structures."""
  1318. path1 = b"src/main/java/com/example/service/UserService.java"
  1319. path2 = b"src/main/java/com/example/service/OrderService.java"
  1320. path3 = b"src/main/java/com/example/model/User.java"
  1321. # Compress path2 relative to path1
  1322. compressed = _compress_path(path2, path1)
  1323. decompressed, _ = _decompress_path(compressed, 0, path1)
  1324. self.assertEqual(path2, decompressed)
  1325. # Compress path3 relative to path2
  1326. compressed = _compress_path(path3, path2)
  1327. decompressed, _ = _decompress_path(compressed, 0, path2)
  1328. self.assertEqual(path3, decompressed)
  1329. def test_index_version_4_with_compression(self):
  1330. """Test full index version 4 write/read with path compression."""
  1331. index_path = os.path.join(self.tempdir, "index")
  1332. # Create an index with version 4
  1333. index = Index(index_path, read=False, version=4)
  1334. # Add multiple entries with common prefixes
  1335. paths = [
  1336. b"src/main/java/App.java",
  1337. b"src/main/java/Utils.java",
  1338. b"src/main/resources/config.properties",
  1339. b"src/test/java/AppTest.java",
  1340. b"docs/README.md",
  1341. b"docs/INSTALL.md",
  1342. ]
  1343. for i, path in enumerate(paths):
  1344. entry = IndexEntry(
  1345. ctime=(1234567890, 0),
  1346. mtime=(1234567890, 0),
  1347. dev=1,
  1348. ino=i + 1,
  1349. mode=0o100644,
  1350. uid=1000,
  1351. gid=1000,
  1352. size=10,
  1353. sha=f"{i:040d}".encode(),
  1354. )
  1355. index[path] = entry
  1356. # Write and read back
  1357. index.write()
  1358. # Read the index back
  1359. index2 = Index(index_path)
  1360. self.assertEqual(index2._version, 4)
  1361. # Verify all paths were preserved correctly
  1362. for path in paths:
  1363. self.assertIn(path, index2)
  1364. # Verify the index file is smaller than version 2 would be
  1365. with open(index_path, "rb") as f:
  1366. v4_size = len(f.read())
  1367. # Create equivalent version 2 index for comparison
  1368. index_v2_path = os.path.join(self.tempdir, "index_v2")
  1369. index_v2 = Index(index_v2_path, read=False, version=2)
  1370. for path in paths:
  1371. entry = IndexEntry(
  1372. ctime=(1234567890, 0),
  1373. mtime=(1234567890, 0),
  1374. dev=1,
  1375. ino=1,
  1376. mode=0o100644,
  1377. uid=1000,
  1378. gid=1000,
  1379. size=10,
  1380. sha=b"0" * 40,
  1381. )
  1382. index_v2[path] = entry
  1383. index_v2.write()
  1384. with open(index_v2_path, "rb") as f:
  1385. v2_size = len(f.read())
  1386. # Version 4 should be smaller due to compression
  1387. self.assertLess(
  1388. v4_size, v2_size, "Version 4 index should be smaller than version 2"
  1389. )
  1390. def test_path_compression_edge_cases(self):
  1391. """Test edge cases in path compression."""
  1392. # Empty paths
  1393. compressed = _compress_path(b"", b"")
  1394. decompressed, _ = _decompress_path(compressed, 0, b"")
  1395. self.assertEqual(b"", decompressed)
  1396. # Path identical to previous
  1397. compressed = _compress_path(b"same.txt", b"same.txt")
  1398. decompressed, _ = _decompress_path(compressed, 0, b"same.txt")
  1399. self.assertEqual(b"same.txt", decompressed)
  1400. # Path shorter than previous
  1401. compressed = _compress_path(b"short", b"very/long/path/file.txt")
  1402. decompressed, _ = _decompress_path(compressed, 0, b"very/long/path/file.txt")
  1403. self.assertEqual(b"short", decompressed)
  1404. class TestUpdateWorkingTree(TestCase):
  1405. def setUp(self):
  1406. self.tempdir = tempfile.mkdtemp()
  1407. def cleanup_tempdir():
  1408. """Remove tempdir, handling read-only files on Windows."""
  1409. def remove_readonly(func, path, excinfo):
  1410. """Error handler for Windows read-only files."""
  1411. import stat
  1412. if sys.platform == "win32" and excinfo[0] is PermissionError:
  1413. os.chmod(path, stat.S_IWRITE)
  1414. func(path)
  1415. else:
  1416. raise
  1417. shutil.rmtree(self.tempdir, onerror=remove_readonly)
  1418. self.addCleanup(cleanup_tempdir)
  1419. self.repo = Repo.init(self.tempdir)
  1420. def test_update_working_tree_with_blob_normalizer(self):
  1421. """Test update_working_tree with a blob normalizer."""
  1422. # Create a simple blob normalizer that converts CRLF to LF
  1423. class TestBlobNormalizer:
  1424. def checkout_normalize(self, blob, path):
  1425. # Convert CRLF to LF during checkout
  1426. new_blob = Blob()
  1427. new_blob.data = blob.data.replace(b"\r\n", b"\n")
  1428. return new_blob
  1429. # Create a tree with a file containing CRLF
  1430. blob = Blob()
  1431. blob.data = b"Hello\r\nWorld\r\n"
  1432. self.repo.object_store.add_object(blob)
  1433. tree = Tree()
  1434. tree[b"test.txt"] = (0o100644, blob.id)
  1435. self.repo.object_store.add_object(tree)
  1436. # Update working tree with normalizer
  1437. normalizer = TestBlobNormalizer()
  1438. update_working_tree(
  1439. self.repo,
  1440. None, # old_tree_id
  1441. tree.id, # new_tree_id
  1442. blob_normalizer=normalizer,
  1443. )
  1444. # Check that the file was written with LF line endings
  1445. test_file = os.path.join(self.tempdir, "test.txt")
  1446. with open(test_file, "rb") as f:
  1447. content = f.read()
  1448. self.assertEqual(b"Hello\nWorld\n", content)
  1449. # Check that the index has the original blob SHA
  1450. index = self.repo.open_index()
  1451. self.assertEqual(blob.id, index[b"test.txt"].sha)
  1452. def test_update_working_tree_without_blob_normalizer(self):
  1453. """Test update_working_tree without a blob normalizer."""
  1454. # Create a tree with a file containing CRLF
  1455. blob = Blob()
  1456. blob.data = b"Hello\r\nWorld\r\n"
  1457. self.repo.object_store.add_object(blob)
  1458. tree = Tree()
  1459. tree[b"test.txt"] = (0o100644, blob.id)
  1460. self.repo.object_store.add_object(tree)
  1461. # Update working tree without normalizer
  1462. update_working_tree(
  1463. self.repo,
  1464. None, # old_tree_id
  1465. tree.id, # new_tree_id
  1466. blob_normalizer=None,
  1467. )
  1468. # Check that the file was written with original CRLF line endings
  1469. test_file = os.path.join(self.tempdir, "test.txt")
  1470. with open(test_file, "rb") as f:
  1471. content = f.read()
  1472. self.assertEqual(b"Hello\r\nWorld\r\n", content)
  1473. # Check that the index has the blob SHA
  1474. index = self.repo.open_index()
  1475. self.assertEqual(blob.id, index[b"test.txt"].sha)
  1476. def test_update_working_tree_remove_directory(self):
  1477. """Test that update_working_tree properly removes directories."""
  1478. # Create initial tree with a directory containing files
  1479. blob1 = Blob()
  1480. blob1.data = b"content1"
  1481. self.repo.object_store.add_object(blob1)
  1482. blob2 = Blob()
  1483. blob2.data = b"content2"
  1484. self.repo.object_store.add_object(blob2)
  1485. tree1 = Tree()
  1486. tree1[b"dir/file1.txt"] = (0o100644, blob1.id)
  1487. tree1[b"dir/file2.txt"] = (0o100644, blob2.id)
  1488. self.repo.object_store.add_object(tree1)
  1489. # Update to tree1 (create directory with files)
  1490. update_working_tree(self.repo, None, tree1.id)
  1491. # Verify directory and files exist
  1492. dir_path = os.path.join(self.tempdir, "dir")
  1493. self.assertTrue(os.path.isdir(dir_path))
  1494. self.assertTrue(os.path.exists(os.path.join(dir_path, "file1.txt")))
  1495. self.assertTrue(os.path.exists(os.path.join(dir_path, "file2.txt")))
  1496. # Create empty tree (remove everything)
  1497. tree2 = Tree()
  1498. self.repo.object_store.add_object(tree2)
  1499. # Update to empty tree
  1500. update_working_tree(self.repo, tree1.id, tree2.id)
  1501. # Verify directory was removed
  1502. self.assertFalse(os.path.exists(dir_path))
  1503. def test_update_working_tree_submodule_to_file(self):
  1504. """Test replacing a submodule directory with a file."""
  1505. # Create tree with submodule
  1506. submodule_sha = b"a" * 40
  1507. tree1 = Tree()
  1508. tree1[b"submodule"] = (S_IFGITLINK, submodule_sha)
  1509. self.repo.object_store.add_object(tree1)
  1510. # Update to tree with submodule
  1511. update_working_tree(self.repo, None, tree1.id)
  1512. # Verify submodule directory exists with .git file
  1513. submodule_path = os.path.join(self.tempdir, "submodule")
  1514. self.assertTrue(os.path.isdir(submodule_path))
  1515. self.assertTrue(os.path.exists(os.path.join(submodule_path, ".git")))
  1516. # Create tree with file at same path
  1517. blob = Blob()
  1518. blob.data = b"file content"
  1519. self.repo.object_store.add_object(blob)
  1520. tree2 = Tree()
  1521. tree2[b"submodule"] = (0o100644, blob.id)
  1522. self.repo.object_store.add_object(tree2)
  1523. # Update to tree with file (should remove submodule directory and create file)
  1524. update_working_tree(self.repo, tree1.id, tree2.id)
  1525. # Verify it's now a file
  1526. self.assertTrue(os.path.isfile(submodule_path))
  1527. with open(submodule_path, "rb") as f:
  1528. self.assertEqual(b"file content", f.read())
  1529. def test_update_working_tree_directory_with_nested_subdir(self):
  1530. """Test removing directory with nested subdirectories."""
  1531. # Create tree with nested directories
  1532. blob = Blob()
  1533. blob.data = b"deep content"
  1534. self.repo.object_store.add_object(blob)
  1535. tree1 = Tree()
  1536. tree1[b"a/b/c/file.txt"] = (0o100644, blob.id)
  1537. self.repo.object_store.add_object(tree1)
  1538. # Update to tree1
  1539. update_working_tree(self.repo, None, tree1.id)
  1540. # Verify nested structure exists
  1541. path_a = os.path.join(self.tempdir, "a")
  1542. path_b = os.path.join(path_a, "b")
  1543. path_c = os.path.join(path_b, "c")
  1544. file_path = os.path.join(path_c, "file.txt")
  1545. self.assertTrue(os.path.exists(file_path))
  1546. # Create empty tree
  1547. tree2 = Tree()
  1548. self.repo.object_store.add_object(tree2)
  1549. # Update to empty tree
  1550. update_working_tree(self.repo, tree1.id, tree2.id)
  1551. # Verify all directories were removed
  1552. self.assertFalse(os.path.exists(path_a))
  1553. def test_update_working_tree_file_replaced_by_dir_not_removed(self):
  1554. """Test that a directory replacing a git file is left alone if not empty."""
  1555. # Create tree with a file
  1556. blob = Blob()
  1557. blob.data = b"file content"
  1558. self.repo.object_store.add_object(blob)
  1559. tree1 = Tree()
  1560. tree1[b"path"] = (0o100644, blob.id)
  1561. self.repo.object_store.add_object(tree1)
  1562. # Update to tree1
  1563. update_working_tree(self.repo, None, tree1.id)
  1564. # Verify file exists
  1565. file_path = os.path.join(self.tempdir, "path")
  1566. self.assertTrue(os.path.isfile(file_path))
  1567. # Manually replace file with directory containing untracked file
  1568. os.remove(file_path)
  1569. os.mkdir(file_path)
  1570. with open(os.path.join(file_path, "untracked.txt"), "w") as f:
  1571. f.write("untracked content")
  1572. # Create empty tree
  1573. tree2 = Tree()
  1574. self.repo.object_store.add_object(tree2)
  1575. # Update should succeed but leave the directory alone
  1576. update_working_tree(self.repo, tree1.id, tree2.id)
  1577. # Directory should still exist with its contents
  1578. self.assertTrue(os.path.isdir(file_path))
  1579. self.assertTrue(os.path.exists(os.path.join(file_path, "untracked.txt")))
  1580. def test_update_working_tree_file_replaced_by_empty_dir_removed(self):
  1581. """Test that an empty directory replacing a git file is removed."""
  1582. # Create tree with a file
  1583. blob = Blob()
  1584. blob.data = b"file content"
  1585. self.repo.object_store.add_object(blob)
  1586. tree1 = Tree()
  1587. tree1[b"path"] = (0o100644, blob.id)
  1588. self.repo.object_store.add_object(tree1)
  1589. # Update to tree1
  1590. update_working_tree(self.repo, None, tree1.id)
  1591. # Verify file exists
  1592. file_path = os.path.join(self.tempdir, "path")
  1593. self.assertTrue(os.path.isfile(file_path))
  1594. # Manually replace file with empty directory
  1595. os.remove(file_path)
  1596. os.mkdir(file_path)
  1597. # Create empty tree
  1598. tree2 = Tree()
  1599. self.repo.object_store.add_object(tree2)
  1600. # Update should remove the empty directory
  1601. update_working_tree(self.repo, tree1.id, tree2.id)
  1602. # Directory should be gone
  1603. self.assertFalse(os.path.exists(file_path))
  1604. def test_update_working_tree_symlink_transitions(self):
  1605. """Test transitions involving symlinks."""
  1606. # Skip on Windows where symlinks might not be supported
  1607. if sys.platform == "win32":
  1608. self.skipTest("Symlinks not fully supported on Windows")
  1609. # Create tree with symlink
  1610. blob1 = Blob()
  1611. blob1.data = b"target/path"
  1612. self.repo.object_store.add_object(blob1)
  1613. tree1 = Tree()
  1614. tree1[b"link"] = (0o120000, blob1.id) # Symlink mode
  1615. self.repo.object_store.add_object(tree1)
  1616. # Update to tree with symlink
  1617. update_working_tree(self.repo, None, tree1.id)
  1618. link_path = os.path.join(self.tempdir, "link")
  1619. self.assertTrue(os.path.islink(link_path))
  1620. self.assertEqual(b"target/path", os.readlink(link_path).encode())
  1621. # Test 1: Replace symlink with regular file
  1622. blob2 = Blob()
  1623. blob2.data = b"file content"
  1624. self.repo.object_store.add_object(blob2)
  1625. tree2 = Tree()
  1626. tree2[b"link"] = (0o100644, blob2.id)
  1627. self.repo.object_store.add_object(tree2)
  1628. update_working_tree(self.repo, tree1.id, tree2.id)
  1629. self.assertFalse(os.path.islink(link_path))
  1630. self.assertTrue(os.path.isfile(link_path))
  1631. with open(link_path, "rb") as f:
  1632. self.assertEqual(b"file content", f.read())
  1633. # Test 2: Replace file with symlink
  1634. update_working_tree(self.repo, tree2.id, tree1.id)
  1635. self.assertTrue(os.path.islink(link_path))
  1636. self.assertEqual(b"target/path", os.readlink(link_path).encode())
  1637. # Test 3: Replace symlink with directory (manually)
  1638. os.unlink(link_path)
  1639. os.mkdir(link_path)
  1640. # Create empty tree
  1641. tree3 = Tree()
  1642. self.repo.object_store.add_object(tree3)
  1643. # Should remove empty directory
  1644. update_working_tree(self.repo, tree1.id, tree3.id)
  1645. self.assertFalse(os.path.exists(link_path))
  1646. def test_update_working_tree_modified_file_to_dir_transition(self):
  1647. """Test that modified files are not removed when they should be directories."""
  1648. # Create tree with file
  1649. blob1 = Blob()
  1650. blob1.data = b"original content"
  1651. self.repo.object_store.add_object(blob1)
  1652. tree1 = Tree()
  1653. tree1[b"path"] = (0o100644, blob1.id)
  1654. self.repo.object_store.add_object(tree1)
  1655. # Update to tree1
  1656. update_working_tree(self.repo, None, tree1.id)
  1657. file_path = os.path.join(self.tempdir, "path")
  1658. # Modify the file locally
  1659. with open(file_path, "w") as f:
  1660. f.write("modified content")
  1661. # Create tree where path is a directory with file
  1662. blob2 = Blob()
  1663. blob2.data = b"subfile content"
  1664. self.repo.object_store.add_object(blob2)
  1665. tree2 = Tree()
  1666. tree2[b"path/subfile"] = (0o100644, blob2.id)
  1667. self.repo.object_store.add_object(tree2)
  1668. # Update should fail because can't create directory where modified file exists
  1669. with self.assertRaises(IOError):
  1670. update_working_tree(self.repo, tree1.id, tree2.id)
  1671. # File should still exist with modifications
  1672. self.assertTrue(os.path.isfile(file_path))
  1673. with open(file_path) as f:
  1674. self.assertEqual("modified content", f.read())
  1675. def test_update_working_tree_executable_transitions(self):
  1676. """Test transitions involving executable bit changes."""
  1677. # Skip on Windows where executable bit is not supported
  1678. if sys.platform == "win32":
  1679. self.skipTest("Executable bit not supported on Windows")
  1680. # Create tree with non-executable file
  1681. blob = Blob()
  1682. blob.data = b"#!/bin/sh\necho hello"
  1683. self.repo.object_store.add_object(blob)
  1684. tree1 = Tree()
  1685. tree1[b"script.sh"] = (0o100644, blob.id) # Non-executable
  1686. self.repo.object_store.add_object(tree1)
  1687. # Update to tree1
  1688. update_working_tree(self.repo, None, tree1.id)
  1689. script_path = os.path.join(self.tempdir, "script.sh")
  1690. self.assertTrue(os.path.isfile(script_path))
  1691. # Check it's not executable
  1692. mode = os.stat(script_path).st_mode
  1693. self.assertFalse(mode & stat.S_IXUSR)
  1694. # Create tree with executable file (same content)
  1695. tree2 = Tree()
  1696. tree2[b"script.sh"] = (0o100755, blob.id) # Executable
  1697. self.repo.object_store.add_object(tree2)
  1698. # Update to tree2
  1699. update_working_tree(self.repo, tree1.id, tree2.id)
  1700. # Check it's now executable
  1701. mode = os.stat(script_path).st_mode
  1702. self.assertTrue(mode & stat.S_IXUSR)
  1703. def test_update_working_tree_submodule_with_untracked_files(self):
  1704. """Test that submodules with untracked files are not removed."""
  1705. from dulwich.objects import S_IFGITLINK, Tree
  1706. # Create tree with submodule
  1707. submodule_sha = b"a" * 40
  1708. tree1 = Tree()
  1709. tree1[b"submodule"] = (S_IFGITLINK, submodule_sha)
  1710. self.repo.object_store.add_object(tree1)
  1711. # Update to tree with submodule
  1712. update_working_tree(self.repo, None, tree1.id)
  1713. # Add untracked file to submodule directory
  1714. submodule_path = os.path.join(self.tempdir, "submodule")
  1715. untracked_path = os.path.join(submodule_path, "untracked.txt")
  1716. with open(untracked_path, "w") as f:
  1717. f.write("untracked content")
  1718. # Create empty tree
  1719. tree2 = Tree()
  1720. self.repo.object_store.add_object(tree2)
  1721. # Update should not remove submodule directory with untracked files
  1722. update_working_tree(self.repo, tree1.id, tree2.id)
  1723. # Directory should still exist with untracked file
  1724. self.assertTrue(os.path.isdir(submodule_path))
  1725. self.assertTrue(os.path.exists(untracked_path))
  1726. def test_update_working_tree_dir_to_file_with_subdir(self):
  1727. """Test replacing directory structure with a file."""
  1728. # Create tree with nested directory structure
  1729. blob1 = Blob()
  1730. blob1.data = b"content1"
  1731. self.repo.object_store.add_object(blob1)
  1732. blob2 = Blob()
  1733. blob2.data = b"content2"
  1734. self.repo.object_store.add_object(blob2)
  1735. tree1 = Tree()
  1736. tree1[b"dir/subdir/file1"] = (0o100644, blob1.id)
  1737. tree1[b"dir/subdir/file2"] = (0o100644, blob2.id)
  1738. self.repo.object_store.add_object(tree1)
  1739. # Update to tree1
  1740. update_working_tree(self.repo, None, tree1.id)
  1741. # Verify structure exists
  1742. dir_path = os.path.join(self.tempdir, "dir")
  1743. self.assertTrue(os.path.isdir(dir_path))
  1744. # Add an untracked file to make directory truly non-empty
  1745. untracked_path = os.path.join(dir_path, "untracked.txt")
  1746. with open(untracked_path, "w") as f:
  1747. f.write("untracked content")
  1748. # Create tree with file at "dir" path
  1749. blob3 = Blob()
  1750. blob3.data = b"replacement file"
  1751. self.repo.object_store.add_object(blob3)
  1752. tree2 = Tree()
  1753. tree2[b"dir"] = (0o100644, blob3.id)
  1754. self.repo.object_store.add_object(tree2)
  1755. # Update should fail because directory is not empty
  1756. with self.assertRaises(IsADirectoryError):
  1757. update_working_tree(self.repo, tree1.id, tree2.id)
  1758. # Directory should still exist
  1759. self.assertTrue(os.path.isdir(dir_path))
  1760. def test_update_working_tree_case_sensitivity(self):
  1761. """Test handling of case-sensitive filename changes."""
  1762. # Create tree with lowercase file
  1763. blob1 = Blob()
  1764. blob1.data = b"lowercase content"
  1765. self.repo.object_store.add_object(blob1)
  1766. tree1 = Tree()
  1767. tree1[b"readme.txt"] = (0o100644, blob1.id)
  1768. self.repo.object_store.add_object(tree1)
  1769. # Update to tree1
  1770. update_working_tree(self.repo, None, tree1.id)
  1771. # Create tree with uppercase file (different content)
  1772. blob2 = Blob()
  1773. blob2.data = b"uppercase content"
  1774. self.repo.object_store.add_object(blob2)
  1775. tree2 = Tree()
  1776. tree2[b"README.txt"] = (0o100644, blob2.id)
  1777. self.repo.object_store.add_object(tree2)
  1778. # Update to tree2
  1779. update_working_tree(self.repo, tree1.id, tree2.id)
  1780. # Check what exists (behavior depends on filesystem)
  1781. lowercase_path = os.path.join(self.tempdir, "readme.txt")
  1782. uppercase_path = os.path.join(self.tempdir, "README.txt")
  1783. # On case-insensitive filesystems, one will overwrite the other
  1784. # On case-sensitive filesystems, both may exist
  1785. self.assertTrue(
  1786. os.path.exists(lowercase_path) or os.path.exists(uppercase_path)
  1787. )
  1788. def test_update_working_tree_deeply_nested_removal(self):
  1789. """Test removal of deeply nested directory structures."""
  1790. # Create deeply nested structure
  1791. blob = Blob()
  1792. blob.data = b"deep content"
  1793. self.repo.object_store.add_object(blob)
  1794. tree1 = Tree()
  1795. # Create a very deep path
  1796. deep_path = b"/".join([b"level%d" % i for i in range(10)])
  1797. tree1[deep_path + b"/file.txt"] = (0o100644, blob.id)
  1798. self.repo.object_store.add_object(tree1)
  1799. # Update to tree1
  1800. update_working_tree(self.repo, None, tree1.id)
  1801. # Verify deep structure exists
  1802. current_path = self.tempdir
  1803. for i in range(10):
  1804. current_path = os.path.join(current_path, f"level{i}")
  1805. self.assertTrue(os.path.isdir(current_path))
  1806. # Create empty tree
  1807. tree2 = Tree()
  1808. self.repo.object_store.add_object(tree2)
  1809. # Update should remove all empty directories
  1810. update_working_tree(self.repo, tree1.id, tree2.id)
  1811. # Verify top level directory is gone
  1812. top_level = os.path.join(self.tempdir, "level0")
  1813. self.assertFalse(os.path.exists(top_level))
  1814. def test_update_working_tree_read_only_files(self):
  1815. """Test handling of read-only files during updates."""
  1816. # Create tree with file
  1817. blob1 = Blob()
  1818. blob1.data = b"original content"
  1819. self.repo.object_store.add_object(blob1)
  1820. tree1 = Tree()
  1821. tree1[b"readonly.txt"] = (0o100644, blob1.id)
  1822. self.repo.object_store.add_object(tree1)
  1823. # Update to tree1
  1824. update_working_tree(self.repo, None, tree1.id)
  1825. # Make file read-only
  1826. file_path = os.path.join(self.tempdir, "readonly.txt")
  1827. os.chmod(file_path, 0o444) # Read-only
  1828. # Create tree with modified file
  1829. blob2 = Blob()
  1830. blob2.data = b"new content"
  1831. self.repo.object_store.add_object(blob2)
  1832. tree2 = Tree()
  1833. tree2[b"readonly.txt"] = (0o100644, blob2.id)
  1834. self.repo.object_store.add_object(tree2)
  1835. # Update should handle read-only file
  1836. update_working_tree(self.repo, tree1.id, tree2.id)
  1837. # Verify content was updated
  1838. with open(file_path, "rb") as f:
  1839. self.assertEqual(b"new content", f.read())
  1840. def test_update_working_tree_invalid_filenames(self):
  1841. """Test handling of invalid filenames for the platform."""
  1842. # Create tree with potentially problematic filenames
  1843. blob = Blob()
  1844. blob.data = b"content"
  1845. self.repo.object_store.add_object(blob)
  1846. tree = Tree()
  1847. # Add files with names that might be invalid on some platforms
  1848. tree[b"valid.txt"] = (0o100644, blob.id)
  1849. if sys.platform != "win32":
  1850. # These are invalid on Windows but valid on Unix
  1851. tree[b"file:with:colons.txt"] = (0o100644, blob.id)
  1852. tree[b"file<with>brackets.txt"] = (0o100644, blob.id)
  1853. self.repo.object_store.add_object(tree)
  1854. # Update should skip invalid files based on validation
  1855. update_working_tree(self.repo, None, tree.id)
  1856. # Valid file should exist
  1857. self.assertTrue(os.path.exists(os.path.join(self.tempdir, "valid.txt")))
  1858. def test_update_working_tree_symlink_to_directory(self):
  1859. """Test replacing a symlink pointing to a directory with a real directory."""
  1860. if sys.platform == "win32":
  1861. self.skipTest("Symlinks not fully supported on Windows")
  1862. # Create a target directory
  1863. target_dir = os.path.join(self.tempdir, "target")
  1864. os.mkdir(target_dir)
  1865. with open(os.path.join(target_dir, "file.txt"), "w") as f:
  1866. f.write("target file")
  1867. # Create tree with symlink pointing to directory
  1868. blob1 = Blob()
  1869. blob1.data = b"target" # Relative path to target directory
  1870. self.repo.object_store.add_object(blob1)
  1871. tree1 = Tree()
  1872. tree1[b"link"] = (0o120000, blob1.id)
  1873. self.repo.object_store.add_object(tree1)
  1874. # Update to tree1
  1875. update_working_tree(self.repo, None, tree1.id)
  1876. link_path = os.path.join(self.tempdir, "link")
  1877. self.assertTrue(os.path.islink(link_path))
  1878. # Create tree with actual directory at same path
  1879. blob2 = Blob()
  1880. blob2.data = b"new file content"
  1881. self.repo.object_store.add_object(blob2)
  1882. tree2 = Tree()
  1883. tree2[b"link/newfile.txt"] = (0o100644, blob2.id)
  1884. self.repo.object_store.add_object(tree2)
  1885. # Update should replace symlink with actual directory
  1886. update_working_tree(self.repo, tree1.id, tree2.id)
  1887. self.assertFalse(os.path.islink(link_path))
  1888. self.assertTrue(os.path.isdir(link_path))
  1889. self.assertTrue(os.path.exists(os.path.join(link_path, "newfile.txt")))
  1890. def test_update_working_tree_comprehensive_transitions(self):
  1891. """Test all possible file type transitions comprehensively."""
  1892. # Skip on Windows where symlinks might not be supported
  1893. if sys.platform == "win32":
  1894. self.skipTest("Symlinks not fully supported on Windows")
  1895. # Create blobs for different file types
  1896. file_blob = Blob()
  1897. file_blob.data = b"regular file content"
  1898. self.repo.object_store.add_object(file_blob)
  1899. exec_blob = Blob()
  1900. exec_blob.data = b"#!/bin/sh\necho executable"
  1901. self.repo.object_store.add_object(exec_blob)
  1902. link_blob = Blob()
  1903. link_blob.data = b"target/path"
  1904. self.repo.object_store.add_object(link_blob)
  1905. submodule_sha = b"a" * 40
  1906. # Test 1: Regular file → Submodule
  1907. tree1 = Tree()
  1908. tree1[b"item"] = (0o100644, file_blob.id)
  1909. self.repo.object_store.add_object(tree1)
  1910. tree2 = Tree()
  1911. tree2[b"item"] = (S_IFGITLINK, submodule_sha)
  1912. self.repo.object_store.add_object(tree2)
  1913. update_working_tree(self.repo, None, tree1.id)
  1914. self.assertTrue(os.path.isfile(os.path.join(self.tempdir, "item")))
  1915. update_working_tree(self.repo, tree1.id, tree2.id)
  1916. self.assertTrue(os.path.isdir(os.path.join(self.tempdir, "item")))
  1917. # Test 2: Submodule → Executable file
  1918. tree3 = Tree()
  1919. tree3[b"item"] = (0o100755, exec_blob.id)
  1920. self.repo.object_store.add_object(tree3)
  1921. update_working_tree(self.repo, tree2.id, tree3.id)
  1922. item_path = os.path.join(self.tempdir, "item")
  1923. self.assertTrue(os.path.isfile(item_path))
  1924. if sys.platform != "win32":
  1925. self.assertTrue(os.access(item_path, os.X_OK))
  1926. # Test 3: Executable file → Symlink
  1927. tree4 = Tree()
  1928. tree4[b"item"] = (0o120000, link_blob.id)
  1929. self.repo.object_store.add_object(tree4)
  1930. update_working_tree(self.repo, tree3.id, tree4.id)
  1931. self.assertTrue(os.path.islink(item_path))
  1932. # Test 4: Symlink → Submodule
  1933. tree5 = Tree()
  1934. tree5[b"item"] = (S_IFGITLINK, submodule_sha)
  1935. self.repo.object_store.add_object(tree5)
  1936. update_working_tree(self.repo, tree4.id, tree5.id)
  1937. self.assertTrue(os.path.isdir(item_path))
  1938. # Test 5: Clean up - Submodule → absent
  1939. tree6 = Tree()
  1940. self.repo.object_store.add_object(tree6)
  1941. update_working_tree(self.repo, tree5.id, tree6.id)
  1942. self.assertFalse(os.path.exists(item_path))
  1943. # Test 6: Symlink → Executable file
  1944. tree7 = Tree()
  1945. tree7[b"item2"] = (0o120000, link_blob.id)
  1946. self.repo.object_store.add_object(tree7)
  1947. update_working_tree(self.repo, tree6.id, tree7.id)
  1948. item2_path = os.path.join(self.tempdir, "item2")
  1949. self.assertTrue(os.path.islink(item2_path))
  1950. tree8 = Tree()
  1951. tree8[b"item2"] = (0o100755, exec_blob.id)
  1952. self.repo.object_store.add_object(tree8)
  1953. update_working_tree(self.repo, tree7.id, tree8.id)
  1954. self.assertTrue(os.path.isfile(item2_path))
  1955. if sys.platform != "win32":
  1956. self.assertTrue(os.access(item2_path, os.X_OK))
  1957. def test_update_working_tree_partial_update_failure(self):
  1958. """Test handling when update fails partway through."""
  1959. # Create initial tree
  1960. blob1 = Blob()
  1961. blob1.data = b"file1 content"
  1962. self.repo.object_store.add_object(blob1)
  1963. blob2 = Blob()
  1964. blob2.data = b"file2 content"
  1965. self.repo.object_store.add_object(blob2)
  1966. tree1 = Tree()
  1967. tree1[b"file1.txt"] = (0o100644, blob1.id)
  1968. tree1[b"file2.txt"] = (0o100644, blob2.id)
  1969. self.repo.object_store.add_object(tree1)
  1970. # Update to tree1
  1971. update_working_tree(self.repo, None, tree1.id)
  1972. # Create a directory where file2.txt is, to cause a conflict
  1973. file2_path = os.path.join(self.tempdir, "file2.txt")
  1974. os.remove(file2_path)
  1975. os.mkdir(file2_path)
  1976. # Add untracked file to prevent removal
  1977. with open(os.path.join(file2_path, "blocker.txt"), "w") as f:
  1978. f.write("blocking content")
  1979. # Create tree with updates to both files
  1980. blob3 = Blob()
  1981. blob3.data = b"file1 updated"
  1982. self.repo.object_store.add_object(blob3)
  1983. blob4 = Blob()
  1984. blob4.data = b"file2 updated"
  1985. self.repo.object_store.add_object(blob4)
  1986. tree2 = Tree()
  1987. tree2[b"file1.txt"] = (0o100644, blob3.id)
  1988. tree2[b"file2.txt"] = (0o100644, blob4.id)
  1989. self.repo.object_store.add_object(tree2)
  1990. # Update should partially succeed - file1 updated, file2 blocked
  1991. try:
  1992. update_working_tree(self.repo, tree1.id, tree2.id)
  1993. except IsADirectoryError:
  1994. # Expected to fail on file2 because it's a directory
  1995. pass
  1996. # file1 should be updated
  1997. with open(os.path.join(self.tempdir, "file1.txt"), "rb") as f:
  1998. self.assertEqual(b"file1 updated", f.read())
  1999. # file2 should still be a directory
  2000. self.assertTrue(os.path.isdir(file2_path))