test_object_store.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655
  1. # test_object_store.py -- tests for object_store.py
  2. # Copyright (C) 2008 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as public by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Tests for the object store interface."""
  22. import os
  23. import shutil
  24. import stat
  25. import sys
  26. import tempfile
  27. from contextlib import closing
  28. from io import BytesIO
  29. from dulwich.errors import NotTreeError
  30. from dulwich.index import commit_tree
  31. from dulwich.object_store import (
  32. DiskObjectStore,
  33. MemoryObjectStore,
  34. ObjectStoreGraphWalker,
  35. OverlayObjectStore,
  36. commit_tree_changes,
  37. read_packs_file,
  38. tree_lookup_path,
  39. )
  40. from dulwich.objects import (
  41. S_IFGITLINK,
  42. Blob,
  43. EmptyFileException,
  44. SubmoduleEncountered,
  45. Tree,
  46. TreeEntry,
  47. sha_to_hex,
  48. )
  49. from dulwich.pack import REF_DELTA, write_pack_objects
  50. from dulwich.tests.test_object_store import ObjectStoreTests, PackBasedObjectStoreTests
  51. from dulwich.tests.utils import build_pack, make_object
  52. from . import TestCase
  53. testobject = make_object(Blob, data=b"yummy data")
  54. class OverlayObjectStoreTests(ObjectStoreTests, TestCase):
  55. def setUp(self) -> None:
  56. TestCase.setUp(self)
  57. self.bases = [MemoryObjectStore(), MemoryObjectStore()]
  58. self.store = OverlayObjectStore(self.bases, self.bases[0])
  59. class MemoryObjectStoreTests(ObjectStoreTests, TestCase):
  60. def setUp(self) -> None:
  61. TestCase.setUp(self)
  62. self.store = MemoryObjectStore()
  63. def test_add_pack(self) -> None:
  64. o = MemoryObjectStore()
  65. f, commit, abort = o.add_pack()
  66. try:
  67. b = make_object(Blob, data=b"more yummy data")
  68. write_pack_objects(f.write, [(b, None)])
  69. except BaseException:
  70. abort()
  71. raise
  72. else:
  73. commit()
  74. def test_add_pack_emtpy(self) -> None:
  75. o = MemoryObjectStore()
  76. f, commit, abort = o.add_pack()
  77. commit()
  78. def test_add_thin_pack(self) -> None:
  79. o = MemoryObjectStore()
  80. blob = make_object(Blob, data=b"yummy data")
  81. o.add_object(blob)
  82. f = BytesIO()
  83. entries = build_pack(
  84. f,
  85. [
  86. (REF_DELTA, (blob.id, b"more yummy data")),
  87. ],
  88. store=o,
  89. )
  90. o.add_thin_pack(f.read, None)
  91. packed_blob_sha = sha_to_hex(entries[0][3])
  92. self.assertEqual(
  93. (Blob.type_num, b"more yummy data"), o.get_raw(packed_blob_sha)
  94. )
  95. def test_add_thin_pack_empty(self) -> None:
  96. o = MemoryObjectStore()
  97. f = BytesIO()
  98. entries = build_pack(f, [], store=o)
  99. self.assertEqual([], entries)
  100. o.add_thin_pack(f.read, None)
  101. def test_add_pack_data_with_deltas(self) -> None:
  102. """Test that add_pack_data properly handles delta objects.
  103. This test verifies that MemoryObjectStore.add_pack_data can handle
  104. pack data containing delta objects. Before the fix for issue #1179,
  105. this would fail with AssertionError when trying to call sha_file()
  106. on unresolved delta objects.
  107. The fix routes through add_pack() which properly resolves deltas.
  108. """
  109. o1 = MemoryObjectStore()
  110. o2 = MemoryObjectStore()
  111. base_blob = make_object(Blob, data=b"base data")
  112. o1.add_object(base_blob)
  113. # Create a pack with a delta object
  114. f = BytesIO()
  115. entries = build_pack(
  116. f,
  117. [
  118. (REF_DELTA, (base_blob.id, b"more data")),
  119. ],
  120. store=o1,
  121. )
  122. # Use add_thin_pack which internally calls add_pack_data
  123. # This demonstrates the scenario where delta resolution is needed
  124. f.seek(0)
  125. o2.add_object(base_blob) # Need base object for thin pack
  126. o2.add_thin_pack(f.read, None)
  127. # Verify the delta object was properly resolved and added
  128. packed_blob_sha = sha_to_hex(entries[0][3])
  129. self.assertIn(packed_blob_sha, o2)
  130. self.assertEqual((Blob.type_num, b"more data"), o2.get_raw(packed_blob_sha))
  131. class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
  132. def setUp(self) -> None:
  133. TestCase.setUp(self)
  134. self.store_dir = tempfile.mkdtemp()
  135. self.addCleanup(shutil.rmtree, self.store_dir)
  136. self.store = DiskObjectStore.init(self.store_dir)
  137. def tearDown(self) -> None:
  138. TestCase.tearDown(self)
  139. PackBasedObjectStoreTests.tearDown(self)
  140. def test_loose_compression_level(self) -> None:
  141. alternate_dir = tempfile.mkdtemp()
  142. self.addCleanup(shutil.rmtree, alternate_dir)
  143. alternate_store = DiskObjectStore(alternate_dir, loose_compression_level=6)
  144. b2 = make_object(Blob, data=b"yummy data")
  145. alternate_store.add_object(b2)
  146. def test_alternates(self) -> None:
  147. alternate_dir = tempfile.mkdtemp()
  148. self.addCleanup(shutil.rmtree, alternate_dir)
  149. alternate_store = DiskObjectStore(alternate_dir)
  150. b2 = make_object(Blob, data=b"yummy data")
  151. alternate_store.add_object(b2)
  152. store = DiskObjectStore(self.store_dir)
  153. self.assertRaises(KeyError, store.__getitem__, b2.id)
  154. store.add_alternate_path(alternate_dir)
  155. self.assertIn(b2.id, store)
  156. self.assertEqual(b2, store[b2.id])
  157. def test_read_alternate_paths(self) -> None:
  158. store = DiskObjectStore(self.store_dir)
  159. abs_path = os.path.abspath(os.path.normpath("/abspath"))
  160. # ensures in particular existence of the alternates file
  161. store.add_alternate_path(abs_path)
  162. self.assertEqual(set(store._read_alternate_paths()), {abs_path})
  163. store.add_alternate_path("relative-path")
  164. self.assertIn(
  165. os.path.join(store.path, "relative-path"),
  166. set(store._read_alternate_paths()),
  167. )
  168. # arguably, add_alternate_path() could strip comments.
  169. # Meanwhile it's more convenient to use it than to import INFODIR
  170. store.add_alternate_path("# comment")
  171. for alt_path in store._read_alternate_paths():
  172. self.assertNotIn("#", alt_path)
  173. def test_file_modes(self) -> None:
  174. self.store.add_object(testobject)
  175. path = self.store._get_shafile_path(testobject.id)
  176. mode = os.stat(path).st_mode
  177. packmode = "0o100444" if sys.platform != "win32" else "0o100666"
  178. self.assertEqual(oct(mode), packmode)
  179. def test_corrupted_object_raise_exception(self) -> None:
  180. """Corrupted sha1 disk file should raise specific exception."""
  181. self.store.add_object(testobject)
  182. self.assertEqual(
  183. (Blob.type_num, b"yummy data"), self.store.get_raw(testobject.id)
  184. )
  185. self.assertTrue(self.store.contains_loose(testobject.id))
  186. self.assertIsNotNone(self.store._get_loose_object(testobject.id))
  187. path = self.store._get_shafile_path(testobject.id)
  188. old_mode = os.stat(path).st_mode
  189. os.chmod(path, 0o600)
  190. with open(path, "wb") as f: # corrupt the file
  191. f.write(b"")
  192. os.chmod(path, old_mode)
  193. expected_error_msg = "Corrupted empty file detected"
  194. try:
  195. self.store.contains_loose(testobject.id)
  196. except EmptyFileException as e:
  197. self.assertEqual(str(e), expected_error_msg)
  198. try:
  199. self.store._get_loose_object(testobject.id)
  200. except EmptyFileException as e:
  201. self.assertEqual(str(e), expected_error_msg)
  202. # this does not change iteration on loose objects though
  203. self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
  204. def test_tempfile_in_loose_store(self) -> None:
  205. self.store.add_object(testobject)
  206. self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
  207. # add temporary files to the loose store
  208. for i in range(256):
  209. dirname = os.path.join(self.store_dir, f"{i:02x}")
  210. if not os.path.isdir(dirname):
  211. os.makedirs(dirname)
  212. fd, n = tempfile.mkstemp(prefix="tmp_obj_", dir=dirname)
  213. os.close(fd)
  214. self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
  215. def test_add_alternate_path(self) -> None:
  216. store = DiskObjectStore(self.store_dir)
  217. self.assertEqual([], list(store._read_alternate_paths()))
  218. store.add_alternate_path(os.path.abspath("/foo/path"))
  219. self.assertEqual(
  220. [os.path.abspath("/foo/path")], list(store._read_alternate_paths())
  221. )
  222. if sys.platform == "win32":
  223. store.add_alternate_path("D:\\bar\\path")
  224. else:
  225. store.add_alternate_path("/bar/path")
  226. if sys.platform == "win32":
  227. self.assertEqual(
  228. [os.path.abspath("/foo/path"), "D:\\bar\\path"],
  229. list(store._read_alternate_paths()),
  230. )
  231. else:
  232. self.assertEqual(
  233. [os.path.abspath("/foo/path"), "/bar/path"],
  234. list(store._read_alternate_paths()),
  235. )
  236. def test_rel_alternative_path(self) -> None:
  237. alternate_dir = tempfile.mkdtemp()
  238. self.addCleanup(shutil.rmtree, alternate_dir)
  239. alternate_store = DiskObjectStore(alternate_dir)
  240. b2 = make_object(Blob, data=b"yummy data")
  241. alternate_store.add_object(b2)
  242. store = DiskObjectStore(self.store_dir)
  243. self.assertRaises(KeyError, store.__getitem__, b2.id)
  244. store.add_alternate_path(os.path.relpath(alternate_dir, self.store_dir))
  245. self.assertEqual(list(alternate_store), list(store.alternates[0]))
  246. self.assertIn(b2.id, store)
  247. self.assertEqual(b2, store[b2.id])
  248. def test_pack_dir(self) -> None:
  249. o = DiskObjectStore(self.store_dir)
  250. self.assertEqual(os.path.join(self.store_dir, "pack"), o.pack_dir)
  251. def test_add_pack(self) -> None:
  252. o = DiskObjectStore(self.store_dir)
  253. self.addCleanup(o.close)
  254. f, commit, abort = o.add_pack()
  255. try:
  256. b = make_object(Blob, data=b"more yummy data")
  257. write_pack_objects(f.write, [(b, None)])
  258. except BaseException:
  259. abort()
  260. raise
  261. else:
  262. commit()
  263. def test_add_thin_pack(self) -> None:
  264. o = DiskObjectStore(self.store_dir)
  265. try:
  266. blob = make_object(Blob, data=b"yummy data")
  267. o.add_object(blob)
  268. f = BytesIO()
  269. entries = build_pack(
  270. f,
  271. [
  272. (REF_DELTA, (blob.id, b"more yummy data")),
  273. ],
  274. store=o,
  275. )
  276. with o.add_thin_pack(f.read, None) as pack:
  277. packed_blob_sha = sha_to_hex(entries[0][3])
  278. pack.check_length_and_checksum()
  279. self.assertEqual(sorted([blob.id, packed_blob_sha]), list(pack))
  280. self.assertTrue(o.contains_packed(packed_blob_sha))
  281. self.assertTrue(o.contains_packed(blob.id))
  282. self.assertEqual(
  283. (Blob.type_num, b"more yummy data"),
  284. o.get_raw(packed_blob_sha),
  285. )
  286. finally:
  287. o.close()
  288. def test_add_thin_pack_empty(self) -> None:
  289. with closing(DiskObjectStore(self.store_dir)) as o:
  290. f = BytesIO()
  291. entries = build_pack(f, [], store=o)
  292. self.assertEqual([], entries)
  293. o.add_thin_pack(f.read, None)
  294. def test_pack_index_version_config(self) -> None:
  295. # Test that pack.indexVersion configuration is respected
  296. from dulwich.config import ConfigDict
  297. from dulwich.pack import load_pack_index
  298. # Create config with pack.indexVersion = 1
  299. config = ConfigDict()
  300. config[(b"pack",)] = {b"indexVersion": b"1"}
  301. # Create object store with config
  302. store_dir = tempfile.mkdtemp()
  303. self.addCleanup(shutil.rmtree, store_dir)
  304. os.makedirs(os.path.join(store_dir, "pack"))
  305. store = DiskObjectStore.from_config(store_dir, config)
  306. # Create some objects to pack
  307. b1 = make_object(Blob, data=b"blob1")
  308. b2 = make_object(Blob, data=b"blob2")
  309. store.add_objects([(b1, None), (b2, None)])
  310. # Add a pack
  311. f, commit, abort = store.add_pack()
  312. try:
  313. # build_pack expects (type_num, data) tuples
  314. objects_spec = [
  315. (b1.type_num, b1.as_raw_string()),
  316. (b2.type_num, b2.as_raw_string()),
  317. ]
  318. build_pack(f, objects_spec, store=store)
  319. commit()
  320. except:
  321. abort()
  322. raise
  323. # Find the created pack index
  324. pack_dir = os.path.join(store_dir, "pack")
  325. idx_files = [f for f in os.listdir(pack_dir) if f.endswith(".idx")]
  326. self.assertEqual(1, len(idx_files))
  327. # Load and verify it's version 1
  328. idx_path = os.path.join(pack_dir, idx_files[0])
  329. idx = load_pack_index(idx_path)
  330. self.assertEqual(1, idx.version)
  331. # Test version 3
  332. config[(b"pack",)] = {b"indexVersion": b"3"}
  333. store_dir2 = tempfile.mkdtemp()
  334. self.addCleanup(shutil.rmtree, store_dir2)
  335. os.makedirs(os.path.join(store_dir2, "pack"))
  336. store2 = DiskObjectStore.from_config(store_dir2, config)
  337. b3 = make_object(Blob, data=b"blob3")
  338. store2.add_objects([(b3, None)])
  339. f2, commit2, abort2 = store2.add_pack()
  340. try:
  341. objects_spec2 = [(b3.type_num, b3.as_raw_string())]
  342. build_pack(f2, objects_spec2, store=store2)
  343. commit2()
  344. except:
  345. abort2()
  346. raise
  347. # Find and verify version 3 index
  348. pack_dir2 = os.path.join(store_dir2, "pack")
  349. idx_files2 = [f for f in os.listdir(pack_dir2) if f.endswith(".idx")]
  350. self.assertEqual(1, len(idx_files2))
  351. idx_path2 = os.path.join(pack_dir2, idx_files2[0])
  352. idx2 = load_pack_index(idx_path2)
  353. self.assertEqual(3, idx2.version)
  354. class TreeLookupPathTests(TestCase):
  355. def setUp(self) -> None:
  356. TestCase.setUp(self)
  357. self.store = MemoryObjectStore()
  358. blob_a = make_object(Blob, data=b"a")
  359. blob_b = make_object(Blob, data=b"b")
  360. blob_c = make_object(Blob, data=b"c")
  361. for blob in [blob_a, blob_b, blob_c]:
  362. self.store.add_object(blob)
  363. blobs = [
  364. (b"a", blob_a.id, 0o100644),
  365. (b"ad/b", blob_b.id, 0o100644),
  366. (b"ad/bd/c", blob_c.id, 0o100755),
  367. (b"ad/c", blob_c.id, 0o100644),
  368. (b"c", blob_c.id, 0o100644),
  369. (b"d", blob_c.id, S_IFGITLINK),
  370. ]
  371. self.tree_id = commit_tree(self.store, blobs)
  372. def get_object(self, sha):
  373. return self.store[sha]
  374. def test_lookup_blob(self) -> None:
  375. o_id = tree_lookup_path(self.get_object, self.tree_id, b"a")[1]
  376. self.assertIsInstance(self.store[o_id], Blob)
  377. def test_lookup_tree(self) -> None:
  378. o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad")[1]
  379. self.assertIsInstance(self.store[o_id], Tree)
  380. o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd")[1]
  381. self.assertIsInstance(self.store[o_id], Tree)
  382. o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd/")[1]
  383. self.assertIsInstance(self.store[o_id], Tree)
  384. def test_lookup_submodule(self) -> None:
  385. tree_lookup_path(self.get_object, self.tree_id, b"d")[1]
  386. self.assertRaises(
  387. SubmoduleEncountered,
  388. tree_lookup_path,
  389. self.get_object,
  390. self.tree_id,
  391. b"d/a",
  392. )
  393. def test_lookup_nonexistent(self) -> None:
  394. self.assertRaises(
  395. KeyError, tree_lookup_path, self.get_object, self.tree_id, b"j"
  396. )
  397. def test_lookup_not_tree(self) -> None:
  398. self.assertRaises(
  399. NotTreeError,
  400. tree_lookup_path,
  401. self.get_object,
  402. self.tree_id,
  403. b"ad/b/j",
  404. )
  405. class ObjectStoreGraphWalkerTests(TestCase):
  406. def get_walker(self, heads, parent_map):
  407. new_parent_map = {
  408. k * 40: [(p * 40) for p in ps] for (k, ps) in parent_map.items()
  409. }
  410. return ObjectStoreGraphWalker(
  411. [x * 40 for x in heads], new_parent_map.__getitem__
  412. )
  413. def test_ack_invalid_value(self) -> None:
  414. gw = self.get_walker([], {})
  415. self.assertRaises(ValueError, gw.ack, "tooshort")
  416. def test_empty(self) -> None:
  417. gw = self.get_walker([], {})
  418. self.assertIs(None, next(gw))
  419. gw.ack(b"a" * 40)
  420. self.assertIs(None, next(gw))
  421. def test_descends(self) -> None:
  422. gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
  423. self.assertEqual(b"a" * 40, next(gw))
  424. self.assertEqual(b"b" * 40, next(gw))
  425. def test_present(self) -> None:
  426. gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
  427. gw.ack(b"a" * 40)
  428. self.assertIs(None, next(gw))
  429. def test_parent_present(self) -> None:
  430. gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
  431. self.assertEqual(b"a" * 40, next(gw))
  432. gw.ack(b"a" * 40)
  433. self.assertIs(None, next(gw))
  434. def test_child_ack_later(self) -> None:
  435. gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": [b"c"], b"c": []})
  436. self.assertEqual(b"a" * 40, next(gw))
  437. self.assertEqual(b"b" * 40, next(gw))
  438. gw.ack(b"a" * 40)
  439. self.assertIs(None, next(gw))
  440. def test_only_once(self) -> None:
  441. # a b
  442. # | |
  443. # c d
  444. # \ /
  445. # e
  446. gw = self.get_walker(
  447. [b"a", b"b"],
  448. {
  449. b"a": [b"c"],
  450. b"b": [b"d"],
  451. b"c": [b"e"],
  452. b"d": [b"e"],
  453. b"e": [],
  454. },
  455. )
  456. walk = []
  457. acked = False
  458. walk.append(next(gw))
  459. walk.append(next(gw))
  460. # A branch (a, c) or (b, d) may be done after 2 steps or 3 depending on
  461. # the order walked: 3-step walks include (a, b, c) and (b, a, d), etc.
  462. if walk == [b"a" * 40, b"c" * 40] or walk == [b"b" * 40, b"d" * 40]:
  463. gw.ack(walk[0])
  464. acked = True
  465. walk.append(next(gw))
  466. if not acked and walk[2] == b"c" * 40:
  467. gw.ack(b"a" * 40)
  468. elif not acked and walk[2] == b"d" * 40:
  469. gw.ack(b"b" * 40)
  470. walk.append(next(gw))
  471. self.assertIs(None, next(gw))
  472. self.assertEqual([b"a" * 40, b"b" * 40, b"c" * 40, b"d" * 40], sorted(walk))
  473. self.assertLess(walk.index(b"a" * 40), walk.index(b"c" * 40))
  474. self.assertLess(walk.index(b"b" * 40), walk.index(b"d" * 40))
  475. class CommitTreeChangesTests(TestCase):
  476. def setUp(self) -> None:
  477. super().setUp()
  478. self.store = MemoryObjectStore()
  479. self.blob_a = make_object(Blob, data=b"a")
  480. self.blob_b = make_object(Blob, data=b"b")
  481. self.blob_c = make_object(Blob, data=b"c")
  482. for blob in [self.blob_a, self.blob_b, self.blob_c]:
  483. self.store.add_object(blob)
  484. blobs = [
  485. (b"a", self.blob_a.id, 0o100644),
  486. (b"ad/b", self.blob_b.id, 0o100644),
  487. (b"ad/bd/c", self.blob_c.id, 0o100755),
  488. (b"ad/c", self.blob_c.id, 0o100644),
  489. (b"c", self.blob_c.id, 0o100644),
  490. ]
  491. self.tree_id = commit_tree(self.store, blobs)
  492. def test_no_changes(self) -> None:
  493. self.assertEqual(
  494. self.store[self.tree_id],
  495. commit_tree_changes(self.store, self.store[self.tree_id], []),
  496. )
  497. def test_add_blob(self) -> None:
  498. blob_d = make_object(Blob, data=b"d")
  499. new_tree = commit_tree_changes(
  500. self.store, self.store[self.tree_id], [(b"d", 0o100644, blob_d.id)]
  501. )
  502. self.assertEqual(
  503. new_tree[b"d"],
  504. (33188, b"c59d9b6344f1af00e504ba698129f07a34bbed8d"),
  505. )
  506. def test_add_blob_in_dir(self) -> None:
  507. blob_d = make_object(Blob, data=b"d")
  508. new_tree = commit_tree_changes(
  509. self.store,
  510. self.store[self.tree_id],
  511. [(b"e/f/d", 0o100644, blob_d.id)],
  512. )
  513. self.assertEqual(
  514. new_tree.items(),
  515. [
  516. TreeEntry(path=b"a", mode=stat.S_IFREG | 0o100644, sha=self.blob_a.id),
  517. TreeEntry(
  518. path=b"ad",
  519. mode=stat.S_IFDIR,
  520. sha=b"0e2ce2cd7725ff4817791be31ccd6e627e801f4a",
  521. ),
  522. TreeEntry(path=b"c", mode=stat.S_IFREG | 0o100644, sha=self.blob_c.id),
  523. TreeEntry(
  524. path=b"e",
  525. mode=stat.S_IFDIR,
  526. sha=b"6ab344e288724ac2fb38704728b8896e367ed108",
  527. ),
  528. ],
  529. )
  530. e_tree = self.store[new_tree[b"e"][1]]
  531. self.assertEqual(
  532. e_tree.items(),
  533. [
  534. TreeEntry(
  535. path=b"f",
  536. mode=stat.S_IFDIR,
  537. sha=b"24d2c94d8af232b15a0978c006bf61ef4479a0a5",
  538. )
  539. ],
  540. )
  541. f_tree = self.store[e_tree[b"f"][1]]
  542. self.assertEqual(
  543. f_tree.items(),
  544. [TreeEntry(path=b"d", mode=stat.S_IFREG | 0o100644, sha=blob_d.id)],
  545. )
  546. def test_delete_blob(self) -> None:
  547. new_tree = commit_tree_changes(
  548. self.store, self.store[self.tree_id], [(b"ad/bd/c", None, None)]
  549. )
  550. self.assertEqual(set(new_tree), {b"a", b"ad", b"c"})
  551. ad_tree = self.store[new_tree[b"ad"][1]]
  552. self.assertEqual(set(ad_tree), {b"b", b"c"})
  553. class TestReadPacksFile(TestCase):
  554. def test_read_packs(self) -> None:
  555. self.assertEqual(
  556. ["pack-1.pack"],
  557. list(
  558. read_packs_file(
  559. BytesIO(
  560. b"""P pack-1.pack
  561. """
  562. )
  563. )
  564. ),
  565. )