test_pack.py 67 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860
  1. # test_pack.py -- Tests for the handling of git packs.
  2. # Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
  3. # Copyright (C) 2008 Jelmer Vernooij <jelmer@jelmer.uk>
  4. #
  5. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  6. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  7. # General Public License as published by the Free Software Foundation; version 2.0
  8. # or (at your option) any later version. You can redistribute it and/or
  9. # modify it under the terms of either of these two licenses.
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. #
  17. # You should have received a copy of the licenses; if not, see
  18. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  19. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  20. # License, Version 2.0.
  21. #
  22. """Tests for Dulwich packs."""
  23. import os
  24. import shutil
  25. import sys
  26. import tempfile
  27. import zlib
  28. from hashlib import sha1
  29. from io import BytesIO
  30. from typing import NoReturn
  31. from dulwich.errors import ApplyDeltaError, ChecksumMismatch
  32. from dulwich.file import GitFile
  33. from dulwich.object_format import DEFAULT_OBJECT_FORMAT
  34. from dulwich.object_store import MemoryObjectStore
  35. from dulwich.objects import Blob, Commit, Tree, hex_to_sha, sha_to_hex
  36. from dulwich.pack import (
  37. OFS_DELTA,
  38. REF_DELTA,
  39. DeltaChainIterator,
  40. MemoryPackIndex,
  41. Pack,
  42. PackData,
  43. PackIndex3,
  44. PackStreamReader,
  45. UnpackedObject,
  46. UnresolvedDeltas,
  47. _create_delta_py,
  48. _delta_encode_size,
  49. _encode_copy_operation,
  50. apply_delta,
  51. compute_file_sha,
  52. create_delta,
  53. deltify_pack_objects,
  54. load_pack_index,
  55. read_zlib_chunks,
  56. unpack_object,
  57. write_pack,
  58. write_pack_header,
  59. write_pack_index_v1,
  60. write_pack_index_v2,
  61. write_pack_index_v3,
  62. write_pack_object,
  63. )
  64. from dulwich.tests.utils import (
  65. build_pack,
  66. ext_functest_builder,
  67. functest_builder,
  68. make_object,
  69. )
  70. try:
  71. from dulwich._pack import create_delta as _create_delta_rs
  72. except ImportError:
  73. _create_delta_rs = None
  74. from . import TestCase
  75. pack1_sha = b"bc63ddad95e7321ee734ea11a7a62d314e0d7481"
  76. a_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8"
  77. tree_sha = b"b2a2766a2879c209ab1176e7e778b81ae422eeaa"
  78. commit_sha = b"f18faa16531ac570a3fdc8c7ca16682548dafd12"
  79. indexmode = "0o100644" if sys.platform != "win32" else "0o100666"
  80. class PackTests(TestCase):
  81. """Base class for testing packs."""
  82. def setUp(self) -> None:
  83. super().setUp()
  84. self.tempdir = tempfile.mkdtemp()
  85. self.addCleanup(shutil.rmtree, self.tempdir)
  86. datadir = os.path.abspath(
  87. os.path.join(os.path.dirname(__file__), "../testdata/packs")
  88. )
  89. def get_pack_index(self, sha):
  90. """Returns a PackIndex from the datadir with the given sha."""
  91. return load_pack_index(
  92. os.path.join(self.datadir, "pack-{}.idx".format(sha.decode("ascii"))),
  93. DEFAULT_OBJECT_FORMAT,
  94. )
  95. def get_pack_data(self, sha):
  96. """Returns a PackData object from the datadir with the given sha."""
  97. return PackData(
  98. os.path.join(self.datadir, "pack-{}.pack".format(sha.decode("ascii"))),
  99. object_format=DEFAULT_OBJECT_FORMAT,
  100. )
  101. def get_pack(self, sha):
  102. return Pack(
  103. os.path.join(self.datadir, "pack-{}".format(sha.decode("ascii"))),
  104. object_format=DEFAULT_OBJECT_FORMAT,
  105. )
  106. def assertSucceeds(self, func, *args, **kwargs) -> None:
  107. try:
  108. func(*args, **kwargs)
  109. except ChecksumMismatch as e:
  110. self.fail(e)
  111. class PackIndexTests(PackTests):
  112. """Class that tests the index of packfiles."""
  113. def test_object_offset(self) -> None:
  114. """Tests that the correct object offset is returned from the index."""
  115. p = self.get_pack_index(pack1_sha)
  116. self.assertRaises(KeyError, p.object_offset, pack1_sha)
  117. self.assertEqual(p.object_offset(a_sha), 178)
  118. self.assertEqual(p.object_offset(tree_sha), 138)
  119. self.assertEqual(p.object_offset(commit_sha), 12)
  120. def test_object_sha1(self) -> None:
  121. """Tests that the correct object offset is returned from the index."""
  122. p = self.get_pack_index(pack1_sha)
  123. self.assertRaises(KeyError, p.object_sha1, 876)
  124. self.assertEqual(p.object_sha1(178), hex_to_sha(a_sha))
  125. self.assertEqual(p.object_sha1(138), hex_to_sha(tree_sha))
  126. self.assertEqual(p.object_sha1(12), hex_to_sha(commit_sha))
  127. def test_iter_prefix(self) -> None:
  128. p = self.get_pack_index(pack1_sha)
  129. self.assertEqual([p.object_sha1(178)], list(p.iter_prefix(hex_to_sha(a_sha))))
  130. self.assertEqual(
  131. [p.object_sha1(178)], list(p.iter_prefix(hex_to_sha(a_sha)[:5]))
  132. )
  133. self.assertEqual(
  134. [p.object_sha1(178)], list(p.iter_prefix(hex_to_sha(a_sha)[:2]))
  135. )
  136. def test_index_len(self) -> None:
  137. p = self.get_pack_index(pack1_sha)
  138. self.assertEqual(3, len(p))
  139. def test_get_stored_checksum(self) -> None:
  140. p = self.get_pack_index(pack1_sha)
  141. self.assertEqual(
  142. b"f2848e2ad16f329ae1c92e3b95e91888daa5bd01",
  143. sha_to_hex(p.get_stored_checksum()),
  144. )
  145. self.assertEqual(
  146. b"721980e866af9a5f93ad674144e1459b8ba3e7b7",
  147. sha_to_hex(p.get_pack_checksum()),
  148. )
  149. def test_index_check(self) -> None:
  150. p = self.get_pack_index(pack1_sha)
  151. self.assertSucceeds(p.check)
  152. def test_iterentries(self) -> None:
  153. p = self.get_pack_index(pack1_sha)
  154. entries = [(sha_to_hex(s), o, c) for s, o, c in p.iterentries()]
  155. self.assertEqual(
  156. [
  157. (b"6f670c0fb53f9463760b7295fbb814e965fb20c8", 178, None),
  158. (b"b2a2766a2879c209ab1176e7e778b81ae422eeaa", 138, None),
  159. (b"f18faa16531ac570a3fdc8c7ca16682548dafd12", 12, None),
  160. ],
  161. entries,
  162. )
  163. def test_iter(self) -> None:
  164. p = self.get_pack_index(pack1_sha)
  165. self.assertEqual({tree_sha, commit_sha, a_sha}, set(p))
  166. class TestPackDeltas(TestCase):
  167. test_string1 = b"The answer was flailing in the wind"
  168. test_string2 = b"The answer was falling down the pipe"
  169. test_string3 = b"zzzzz"
  170. test_string_empty = b""
  171. test_string_big = b"Z" * 8192
  172. test_string_huge = b"Z" * 100000
  173. def _test_roundtrip(self, base, target) -> None:
  174. self.assertEqual(
  175. target, b"".join(apply_delta(base, list(create_delta(base, target))))
  176. )
  177. def test_nochange(self) -> None:
  178. self._test_roundtrip(self.test_string1, self.test_string1)
  179. def test_nochange_huge(self) -> None:
  180. self._test_roundtrip(self.test_string_huge, self.test_string_huge)
  181. def test_change(self) -> None:
  182. self._test_roundtrip(self.test_string1, self.test_string2)
  183. def test_rewrite(self) -> None:
  184. self._test_roundtrip(self.test_string1, self.test_string3)
  185. def test_empty_to_big(self) -> None:
  186. self._test_roundtrip(self.test_string_empty, self.test_string_big)
  187. def test_empty_to_huge(self) -> None:
  188. self._test_roundtrip(self.test_string_empty, self.test_string_huge)
  189. def test_huge_copy(self) -> None:
  190. self._test_roundtrip(
  191. self.test_string_huge + self.test_string1,
  192. self.test_string_huge + self.test_string2,
  193. )
  194. def test_dest_overflow(self) -> None:
  195. self.assertRaises(
  196. ApplyDeltaError,
  197. apply_delta,
  198. b"a" * 0x10000,
  199. b"\x80\x80\x04\x80\x80\x04\x80" + b"a" * 0x10000,
  200. )
  201. self.assertRaises(
  202. ApplyDeltaError, apply_delta, b"", b"\x00\x80\x02\xb0\x11\x11"
  203. )
  204. def test_apply_delta_invalid_opcode(self) -> None:
  205. """Test apply_delta with an invalid opcode."""
  206. # Create a delta with an invalid opcode (0xff is not valid)
  207. invalid_delta = [b"\xff\x01\x02"]
  208. base = b"test base"
  209. # Should raise ApplyDeltaError
  210. self.assertRaises(ApplyDeltaError, apply_delta, base, invalid_delta)
  211. def test_create_delta_insert_only(self) -> None:
  212. """Test create_delta when only insertions are required."""
  213. base = b""
  214. target = b"brand new content"
  215. delta = list(create_delta(base, target))
  216. # Apply the delta to verify it works correctly
  217. result = apply_delta(base, delta)
  218. self.assertEqual(target, b"".join(result))
  219. def test_create_delta_copy_only(self) -> None:
  220. """Test create_delta when only copy operations are required."""
  221. base = b"content to be copied"
  222. target = b"content to be copied" # Identical to base
  223. delta = list(create_delta(base, target))
  224. # Apply the delta to verify
  225. result = apply_delta(base, delta)
  226. self.assertEqual(target, b"".join(result))
  227. def test_pypy_issue(self) -> None:
  228. # Test for https://github.com/jelmer/dulwich/issues/509 /
  229. # https://bitbucket.org/pypy/pypy/issues/2499/cpyext-pystring_asstring-doesnt-work
  230. chunks = [
  231. b"tree 03207ccf58880a748188836155ceed72f03d65d6\n"
  232. b"parent 408fbab530fd4abe49249a636a10f10f44d07a21\n"
  233. b"author Victor Stinner <victor.stinner@gmail.com> "
  234. b"1421355207 +0100\n"
  235. b"committer Victor Stinner <victor.stinner@gmail.com> "
  236. b"1421355207 +0100\n"
  237. b"\n"
  238. b"Backout changeset 3a06020af8cf\n"
  239. b"\nStreamWriter: close() now clears the reference to the "
  240. b"transport\n"
  241. b"\nStreamWriter now raises an exception if it is closed: "
  242. b"write(), writelines(),\n"
  243. b"write_eof(), can_write_eof(), get_extra_info(), drain().\n"
  244. ]
  245. delta = [
  246. b"\xcd\x03\xad\x03]tree ff3c181a393d5a7270cddc01ea863818a8621ca8\n"
  247. b"parent 20a103cc90135494162e819f98d0edfc1f1fba6b\x91]7\x0510738"
  248. b"\x91\x99@\x0b10738 +0100\x93\x04\x01\xc9"
  249. ]
  250. res = apply_delta(chunks, delta)
  251. expected = [
  252. b"tree ff3c181a393d5a7270cddc01ea863818a8621ca8\n"
  253. b"parent 20a103cc90135494162e819f98d0edfc1f1fba6b",
  254. b"\nauthor Victor Stinner <victor.stinner@gmail.com> 14213",
  255. b"10738",
  256. b" +0100\ncommitter Victor Stinner <victor.stinner@gmail.com> 14213",
  257. b"10738 +0100",
  258. b"\n\nStreamWriter: close() now clears the reference to the "
  259. b"transport\n\n"
  260. b"StreamWriter now raises an exception if it is closed: "
  261. b"write(), writelines(),\n"
  262. b"write_eof(), can_write_eof(), get_extra_info(), drain().\n",
  263. ]
  264. self.assertEqual(b"".join(expected), b"".join(res))
  265. def _do_test_create_delta_various_cases(self, create_delta_func):
  266. """Test create_delta with various input cases for both Python and Rust versions."""
  267. import types
  268. # Helper to normalize delta output (Rust returns bytes, Python returns Iterator[bytes])
  269. def get_delta(base, target):
  270. result = create_delta_func(base, target)
  271. # Check if it's a Rust extension (returns bytes directly)
  272. if isinstance(create_delta_func, types.BuiltinFunctionType):
  273. return result
  274. # Python version returns iterator
  275. return b"".join(result)
  276. # Test case 1: Identical content
  277. base = b"hello world"
  278. target = b"hello world"
  279. delta = get_delta(base, target)
  280. result = b"".join(apply_delta(base, delta))
  281. self.assertEqual(target, result)
  282. # Test case 2: Complete rewrite
  283. base = b"aaaaaaaaaa"
  284. target = b"bbbbbbbbbb"
  285. delta = get_delta(base, target)
  286. result = b"".join(apply_delta(base, delta))
  287. self.assertEqual(target, result)
  288. # Test case 3: Partial replacement
  289. base = b"The quick brown fox jumps over the lazy dog"
  290. target = b"The quick brown cat jumps over the lazy dog"
  291. delta = get_delta(base, target)
  292. result = b"".join(apply_delta(base, delta))
  293. self.assertEqual(target, result)
  294. # Test case 4: Insertion at end
  295. base = b"hello"
  296. target = b"hello world"
  297. delta = get_delta(base, target)
  298. result = b"".join(apply_delta(base, delta))
  299. self.assertEqual(target, result)
  300. # Test case 5: Deletion from end
  301. base = b"hello world"
  302. target = b"hello"
  303. delta = get_delta(base, target)
  304. result = b"".join(apply_delta(base, delta))
  305. self.assertEqual(target, result)
  306. # Test case 6: Empty base
  307. base = b""
  308. target = b"new content"
  309. delta = get_delta(base, target)
  310. result = b"".join(apply_delta(base, delta))
  311. self.assertEqual(target, result)
  312. # Test case 7: Empty target
  313. base = b"old content"
  314. target = b""
  315. delta = get_delta(base, target)
  316. result = b"".join(apply_delta(base, delta))
  317. self.assertEqual(target, result)
  318. # Test case 8: Large content
  319. base = b"x" * 10000
  320. target = b"x" * 9000 + b"y" * 1000
  321. delta = get_delta(base, target)
  322. result = b"".join(apply_delta(base, delta))
  323. self.assertEqual(target, result)
  324. # Test case 9: Multiple changes
  325. base = b"line1\nline2\nline3\nline4\n"
  326. target = b"line1\nmodified2\nline3\nmodified4\n"
  327. delta = get_delta(base, target)
  328. result = b"".join(apply_delta(base, delta))
  329. self.assertEqual(target, result)
  330. # Test both Python and Rust versions
  331. test_create_delta_py = functest_builder(
  332. _do_test_create_delta_various_cases, _create_delta_py
  333. )
  334. test_create_delta_extension = ext_functest_builder(
  335. _do_test_create_delta_various_cases, _create_delta_rs
  336. )
  337. def _do_test_create_delta_output_consistency(self, create_delta_func):
  338. """Test that create_delta produces consistent and valid output."""
  339. import types
  340. # Helper to normalize delta output
  341. def get_delta(base, target):
  342. result = create_delta_func(base, target)
  343. if isinstance(create_delta_func, types.BuiltinFunctionType):
  344. return result
  345. return b"".join(result)
  346. test_cases = [
  347. (b"", b""),
  348. (b"a", b"a"),
  349. (b"abc", b"abc"),
  350. (b"abc", b"def"),
  351. (b"hello world", b"hello rust"),
  352. (b"x" * 100, b"y" * 100),
  353. (b"same prefix but different suffix", b"same prefix with new suffix"),
  354. ]
  355. for base, target in test_cases:
  356. delta = get_delta(base, target)
  357. # Verify delta can be applied
  358. result = b"".join(apply_delta(base, delta))
  359. self.assertEqual(
  360. target,
  361. result,
  362. f"Delta failed for base={base[:20]}... target={target[:20]}...",
  363. )
  364. # Verify delta is not empty (should have at least header)
  365. self.assertGreater(len(delta), 0)
  366. test_create_delta_output_consistency_py = functest_builder(
  367. _do_test_create_delta_output_consistency, _create_delta_py
  368. )
  369. test_create_delta_output_consistency_extension = ext_functest_builder(
  370. _do_test_create_delta_output_consistency, _create_delta_rs
  371. )
  372. def _do_test_create_delta_produces_valid_deltas(self, create_delta_func):
  373. """Test that deltas produced are valid Git delta format."""
  374. import types
  375. # Helper to normalize delta output
  376. def get_delta(base, target):
  377. result = create_delta_func(base, target)
  378. if isinstance(create_delta_func, types.BuiltinFunctionType):
  379. return result
  380. return b"".join(result)
  381. base = b"The quick brown fox"
  382. target = b"The slow brown fox"
  383. delta = get_delta(base, target)
  384. # A valid delta should have:
  385. # 1. Base size header
  386. # 2. Target size header
  387. # 3. Delta operations
  388. self.assertGreater(len(delta), 2) # At minimum 2 header bytes
  389. # Apply delta to verify it's valid
  390. result = b"".join(apply_delta(base, delta))
  391. self.assertEqual(target, result)
  392. test_create_delta_valid_format_py = functest_builder(
  393. _do_test_create_delta_produces_valid_deltas, _create_delta_py
  394. )
  395. test_create_delta_valid_format_extension = ext_functest_builder(
  396. _do_test_create_delta_produces_valid_deltas, _create_delta_rs
  397. )
  398. class TestPackData(PackTests):
  399. """Tests getting the data from the packfile."""
  400. def test_create_pack(self) -> None:
  401. self.get_pack_data(pack1_sha).close()
  402. def test_from_file(self) -> None:
  403. path = os.path.join(
  404. self.datadir, "pack-{}.pack".format(pack1_sha.decode("ascii"))
  405. )
  406. with open(path, "rb") as f:
  407. PackData.from_file(f, DEFAULT_OBJECT_FORMAT, os.path.getsize(path))
  408. def test_pack_len(self) -> None:
  409. with self.get_pack_data(pack1_sha) as p:
  410. self.assertEqual(3, len(p))
  411. def test_index_check(self) -> None:
  412. with self.get_pack_data(pack1_sha) as p:
  413. self.assertSucceeds(p.check)
  414. def test_get_stored_checksum(self) -> None:
  415. """Test getting the stored checksum of the pack data."""
  416. with self.get_pack_data(pack1_sha) as p:
  417. checksum = p.get_stored_checksum()
  418. self.assertEqual(20, len(checksum))
  419. # Verify it's a valid SHA1 hash (20 bytes)
  420. self.assertIsInstance(checksum, bytes)
  421. # Removed test_check_pack_data_size as it was accessing private attributes
  422. def test_close_twice(self) -> None:
  423. """Test that calling close multiple times is safe."""
  424. p = self.get_pack_data(pack1_sha)
  425. p.close()
  426. # Second close should not raise an exception
  427. p.close()
  428. def test_iter_unpacked(self) -> None:
  429. with self.get_pack_data(pack1_sha) as p:
  430. commit_data = (
  431. b"tree b2a2766a2879c209ab1176e7e778b81ae422eeaa\n"
  432. b"author James Westby <jw+debian@jameswestby.net> "
  433. b"1174945067 +0100\n"
  434. b"committer James Westby <jw+debian@jameswestby.net> "
  435. b"1174945067 +0100\n"
  436. b"\n"
  437. b"Test commit\n"
  438. )
  439. blob_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8"
  440. tree_data = b"100644 a\0" + hex_to_sha(blob_sha)
  441. actual = list(p.iter_unpacked())
  442. self.assertEqual(
  443. [
  444. UnpackedObject(
  445. offset=12,
  446. pack_type_num=1,
  447. decomp_chunks=[commit_data],
  448. crc32=None,
  449. ),
  450. UnpackedObject(
  451. offset=138,
  452. pack_type_num=2,
  453. decomp_chunks=[tree_data],
  454. crc32=None,
  455. ),
  456. UnpackedObject(
  457. offset=178,
  458. pack_type_num=3,
  459. decomp_chunks=[b"test 1\n"],
  460. crc32=None,
  461. ),
  462. ],
  463. actual,
  464. )
  465. def test_iterentries(self) -> None:
  466. with self.get_pack_data(pack1_sha) as p:
  467. entries = {(sha_to_hex(s), o, c) for s, o, c in p.iterentries()}
  468. self.assertEqual(
  469. {
  470. (
  471. b"6f670c0fb53f9463760b7295fbb814e965fb20c8",
  472. 178,
  473. 1373561701,
  474. ),
  475. (
  476. b"b2a2766a2879c209ab1176e7e778b81ae422eeaa",
  477. 138,
  478. 912998690,
  479. ),
  480. (
  481. b"f18faa16531ac570a3fdc8c7ca16682548dafd12",
  482. 12,
  483. 3775879613,
  484. ),
  485. },
  486. entries,
  487. )
  488. def test_create_index_v1(self) -> None:
  489. with self.get_pack_data(pack1_sha) as p:
  490. filename = os.path.join(self.tempdir, "v1test.idx")
  491. p.create_index_v1(filename)
  492. idx1 = load_pack_index(filename, DEFAULT_OBJECT_FORMAT)
  493. idx2 = self.get_pack_index(pack1_sha)
  494. self.assertEqual(oct(os.stat(filename).st_mode), indexmode)
  495. self.assertEqual(idx1, idx2)
  496. def test_create_index_v2(self) -> None:
  497. with self.get_pack_data(pack1_sha) as p:
  498. filename = os.path.join(self.tempdir, "v2test.idx")
  499. p.create_index_v2(filename)
  500. idx1 = load_pack_index(filename, DEFAULT_OBJECT_FORMAT)
  501. idx2 = self.get_pack_index(pack1_sha)
  502. self.assertEqual(oct(os.stat(filename).st_mode), indexmode)
  503. self.assertEqual(idx1, idx2)
  504. def test_create_index_v3(self) -> None:
  505. with self.get_pack_data(pack1_sha) as p:
  506. filename = os.path.join(self.tempdir, "v3test.idx")
  507. p.create_index_v3(filename)
  508. idx1 = load_pack_index(filename, DEFAULT_OBJECT_FORMAT)
  509. idx2 = self.get_pack_index(pack1_sha)
  510. self.assertEqual(oct(os.stat(filename).st_mode), indexmode)
  511. self.assertEqual(idx1, idx2)
  512. self.assertIsInstance(idx1, PackIndex3)
  513. self.assertEqual(idx1.version, 3)
  514. def test_create_index_version3(self) -> None:
  515. with self.get_pack_data(pack1_sha) as p:
  516. filename = os.path.join(self.tempdir, "version3test.idx")
  517. p.create_index(filename, version=3)
  518. idx = load_pack_index(filename, DEFAULT_OBJECT_FORMAT)
  519. self.assertIsInstance(idx, PackIndex3)
  520. self.assertEqual(idx.version, 3)
  521. def test_compute_file_sha(self) -> None:
  522. f = BytesIO(b"abcd1234wxyz")
  523. try:
  524. self.assertEqual(
  525. sha1(b"abcd1234wxyz").hexdigest(),
  526. compute_file_sha(f, DEFAULT_OBJECT_FORMAT.hash_func).hexdigest(),
  527. )
  528. self.assertEqual(
  529. sha1(b"abcd1234wxyz").hexdigest(),
  530. compute_file_sha(
  531. f, DEFAULT_OBJECT_FORMAT.hash_func, buffer_size=5
  532. ).hexdigest(),
  533. )
  534. self.assertEqual(
  535. sha1(b"abcd1234").hexdigest(),
  536. compute_file_sha(
  537. f, DEFAULT_OBJECT_FORMAT.hash_func, end_ofs=-4
  538. ).hexdigest(),
  539. )
  540. self.assertEqual(
  541. sha1(b"1234wxyz").hexdigest(),
  542. compute_file_sha(
  543. f, DEFAULT_OBJECT_FORMAT.hash_func, start_ofs=4
  544. ).hexdigest(),
  545. )
  546. self.assertEqual(
  547. sha1(b"1234").hexdigest(),
  548. compute_file_sha(
  549. f, DEFAULT_OBJECT_FORMAT.hash_func, start_ofs=4, end_ofs=-4
  550. ).hexdigest(),
  551. )
  552. finally:
  553. f.close()
  554. def test_compute_file_sha_short_file(self) -> None:
  555. f = BytesIO(b"abcd1234wxyz")
  556. try:
  557. self.assertRaises(
  558. AssertionError,
  559. compute_file_sha,
  560. f,
  561. DEFAULT_OBJECT_FORMAT.hash_func,
  562. -20,
  563. )
  564. self.assertRaises(
  565. AssertionError,
  566. compute_file_sha,
  567. f,
  568. DEFAULT_OBJECT_FORMAT.hash_func,
  569. 0,
  570. 20,
  571. )
  572. self.assertRaises(
  573. AssertionError,
  574. compute_file_sha,
  575. f,
  576. DEFAULT_OBJECT_FORMAT.hash_func,
  577. 10,
  578. -12,
  579. )
  580. finally:
  581. f.close()
  582. class TestPack(PackTests):
  583. def test_len(self) -> None:
  584. with self.get_pack(pack1_sha) as p:
  585. self.assertEqual(3, len(p))
  586. def test_contains(self) -> None:
  587. with self.get_pack(pack1_sha) as p:
  588. self.assertIn(tree_sha, p)
  589. def test_get(self) -> None:
  590. with self.get_pack(pack1_sha) as p:
  591. self.assertEqual(type(p[tree_sha]), Tree)
  592. def test_iter(self) -> None:
  593. with self.get_pack(pack1_sha) as p:
  594. self.assertEqual({tree_sha, commit_sha, a_sha}, set(p))
  595. def test_iterobjects(self) -> None:
  596. with self.get_pack(pack1_sha) as p:
  597. expected = {p[s] for s in [commit_sha, tree_sha, a_sha]}
  598. self.assertEqual(expected, set(list(p.iterobjects())))
  599. def test_pack_tuples(self) -> None:
  600. with self.get_pack(pack1_sha) as p:
  601. tuples = p.pack_tuples()
  602. expected = {(p[s], None) for s in [commit_sha, tree_sha, a_sha]}
  603. self.assertEqual(expected, set(list(tuples)))
  604. self.assertEqual(expected, set(list(tuples)))
  605. self.assertEqual(3, len(tuples))
  606. # Removed test_pack_tuples_with_progress as it was using parameters not supported by the API
  607. def test_get_object_at(self) -> None:
  608. """Tests random access for non-delta objects."""
  609. with self.get_pack(pack1_sha) as p:
  610. obj = p[a_sha]
  611. self.assertEqual(obj.type_name, b"blob")
  612. self.assertEqual(obj.sha().hexdigest().encode("ascii"), a_sha)
  613. obj = p[tree_sha]
  614. self.assertEqual(obj.type_name, b"tree")
  615. self.assertEqual(obj.sha().hexdigest().encode("ascii"), tree_sha)
  616. obj = p[commit_sha]
  617. self.assertEqual(obj.type_name, b"commit")
  618. self.assertEqual(obj.sha().hexdigest().encode("ascii"), commit_sha)
  619. def test_copy(self) -> None:
  620. with self.get_pack(pack1_sha) as origpack:
  621. self.assertSucceeds(origpack.index.check)
  622. basename = os.path.join(self.tempdir, "Elch")
  623. write_pack(
  624. basename, origpack.pack_tuples(), object_format=DEFAULT_OBJECT_FORMAT
  625. )
  626. with Pack(basename, object_format=DEFAULT_OBJECT_FORMAT) as newpack:
  627. self.assertEqual(origpack, newpack)
  628. self.assertSucceeds(newpack.index.check)
  629. self.assertEqual(origpack.name(), newpack.name())
  630. # Note: We don't compare pack data checksums here because Git does
  631. # not require deterministic object ordering in pack files. The same
  632. # set of objects can be written in different orders (e.g., due to
  633. # dict iteration order differences across Python versions/platforms),
  634. # producing different but equally valid pack files with different
  635. # checksums. The assertEqual above already verifies both packs
  636. # contain the same objects by comparing their indices.
  637. wrong_version = origpack.index.version != newpack.index.version
  638. orig_checksum = origpack.index.get_stored_checksum()
  639. new_checksum = newpack.index.get_stored_checksum()
  640. self.assertTrue(wrong_version or orig_checksum == new_checksum)
  641. def test_commit_obj(self) -> None:
  642. with self.get_pack(pack1_sha) as p:
  643. commit = p[commit_sha]
  644. self.assertEqual(b"James Westby <jw+debian@jameswestby.net>", commit.author)
  645. self.assertEqual([], commit.parents)
  646. def _copy_pack(self, origpack):
  647. basename = os.path.join(self.tempdir, "somepack")
  648. write_pack(
  649. basename, origpack.pack_tuples(), object_format=DEFAULT_OBJECT_FORMAT
  650. )
  651. return Pack(basename, object_format=DEFAULT_OBJECT_FORMAT)
  652. def test_keep_no_message(self) -> None:
  653. with self.get_pack(pack1_sha) as p:
  654. p = self._copy_pack(p)
  655. with p:
  656. keepfile_name = p.keep()
  657. # file should exist
  658. self.assertTrue(os.path.exists(keepfile_name))
  659. with open(keepfile_name) as f:
  660. buf = f.read()
  661. self.assertEqual("", buf)
  662. def test_keep_message(self) -> None:
  663. with self.get_pack(pack1_sha) as p:
  664. p = self._copy_pack(p)
  665. msg = b"some message"
  666. with p:
  667. keepfile_name = p.keep(msg)
  668. # file should exist
  669. self.assertTrue(os.path.exists(keepfile_name))
  670. # and contain the right message, with a linefeed
  671. with open(keepfile_name, "rb") as f:
  672. buf = f.read()
  673. self.assertEqual(msg + b"\n", buf)
  674. def test_name(self) -> None:
  675. with self.get_pack(pack1_sha) as p:
  676. self.assertEqual(pack1_sha, p.name())
  677. def test_length_mismatch(self) -> None:
  678. with self.get_pack_data(pack1_sha) as data:
  679. index = self.get_pack_index(pack1_sha)
  680. pack = Pack.from_objects(data, index)
  681. pack.check_length_and_checksum()
  682. data._file.seek(12)
  683. bad_file = BytesIO()
  684. write_pack_header(bad_file.write, 9999)
  685. bad_file.write(data._file.read())
  686. bad_file = BytesIO(bad_file.getvalue())
  687. bad_data = PackData("", file=bad_file, object_format=DEFAULT_OBJECT_FORMAT)
  688. bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
  689. self.assertRaises(AssertionError, lambda: bad_pack.data)
  690. self.assertRaises(AssertionError, bad_pack.check_length_and_checksum)
  691. def test_checksum_mismatch(self) -> None:
  692. with self.get_pack_data(pack1_sha) as data:
  693. index = self.get_pack_index(pack1_sha)
  694. pack = Pack.from_objects(data, index)
  695. pack.check_length_and_checksum()
  696. data._file.seek(0)
  697. bad_file = BytesIO(data._file.read()[:-20] + (b"\xff" * 20))
  698. bad_data = PackData("", file=bad_file, object_format=DEFAULT_OBJECT_FORMAT)
  699. bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
  700. self.assertRaises(ChecksumMismatch, lambda: bad_pack.data)
  701. self.assertRaises(ChecksumMismatch, bad_pack.check_length_and_checksum)
  702. def test_iterobjects_2(self) -> None:
  703. with self.get_pack(pack1_sha) as p:
  704. objs = {o.id: o for o in p.iterobjects()}
  705. self.assertEqual(3, len(objs))
  706. self.assertEqual(sorted(objs), sorted(p.index))
  707. self.assertIsInstance(objs[a_sha], Blob)
  708. self.assertIsInstance(objs[tree_sha], Tree)
  709. self.assertIsInstance(objs[commit_sha], Commit)
  710. def test_iterobjects_subset(self) -> None:
  711. with self.get_pack(pack1_sha) as p:
  712. objs = {o.id: o for o in p.iterobjects_subset([commit_sha])}
  713. self.assertEqual(1, len(objs))
  714. self.assertIsInstance(objs[commit_sha], Commit)
  715. def test_iterobjects_subset_empty(self) -> None:
  716. """Test iterobjects_subset with an empty subset."""
  717. with self.get_pack(pack1_sha) as p:
  718. objs = list(p.iterobjects_subset([]))
  719. self.assertEqual(0, len(objs))
  720. def test_iterobjects_subset_nonexistent(self) -> None:
  721. """Test iterobjects_subset with non-existent object IDs."""
  722. with self.get_pack(pack1_sha) as p:
  723. # Create a fake SHA that doesn't exist in the pack
  724. fake_sha = b"1" * 40
  725. # KeyError is expected when trying to access a non-existent object
  726. # We'll use a try-except block to test the behavior
  727. try:
  728. list(p.iterobjects_subset([fake_sha]))
  729. self.fail("Expected KeyError when accessing non-existent object")
  730. except KeyError:
  731. pass # This is the expected behavior
  732. def test_check_length_and_checksum(self) -> None:
  733. """Test that check_length_and_checksum works correctly."""
  734. with self.get_pack(pack1_sha) as p:
  735. # This should not raise an exception
  736. p.check_length_and_checksum()
  737. class TestThinPack(PackTests):
  738. def setUp(self) -> None:
  739. super().setUp()
  740. self.store = MemoryObjectStore()
  741. self.blobs = {}
  742. for blob in (b"foo", b"bar", b"foo1234", b"bar2468"):
  743. self.blobs[blob] = make_object(Blob, data=blob)
  744. self.store.add_object(self.blobs[b"foo"])
  745. self.store.add_object(self.blobs[b"bar"])
  746. # Build a thin pack. 'foo' is as an external reference, 'bar' an
  747. # internal reference.
  748. self.pack_dir = tempfile.mkdtemp()
  749. self.addCleanup(shutil.rmtree, self.pack_dir)
  750. self.pack_prefix = os.path.join(self.pack_dir, "pack")
  751. with open(self.pack_prefix + ".pack", "wb") as f:
  752. build_pack(
  753. f,
  754. [
  755. (REF_DELTA, (self.blobs[b"foo"].id, b"foo1234")),
  756. (Blob.type_num, b"bar"),
  757. (REF_DELTA, (self.blobs[b"bar"].id, b"bar2468")),
  758. ],
  759. store=self.store,
  760. )
  761. # Index the new pack.
  762. with self.make_pack(True) as pack:
  763. with PackData(pack._data_path, object_format=DEFAULT_OBJECT_FORMAT) as data:
  764. data.create_index(
  765. self.pack_prefix + ".idx", resolve_ext_ref=pack.resolve_ext_ref
  766. )
  767. del self.store[self.blobs[b"bar"].id]
  768. def make_pack(self, resolve_ext_ref):
  769. return Pack(
  770. self.pack_prefix,
  771. object_format=DEFAULT_OBJECT_FORMAT,
  772. resolve_ext_ref=self.store.get_raw if resolve_ext_ref else None,
  773. )
  774. def test_get_raw(self) -> None:
  775. with self.make_pack(False) as p:
  776. self.assertRaises(KeyError, p.get_raw, self.blobs[b"foo1234"].id)
  777. with self.make_pack(True) as p:
  778. self.assertEqual((3, b"foo1234"), p.get_raw(self.blobs[b"foo1234"].id))
  779. def test_get_unpacked_object(self) -> None:
  780. self.maxDiff = None
  781. with self.make_pack(False) as p:
  782. expected = UnpackedObject(
  783. 7,
  784. delta_base=b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c",
  785. decomp_chunks=[b"\x03\x07\x90\x03\x041234"],
  786. )
  787. expected.offset = 12
  788. got = p.get_unpacked_object(self.blobs[b"foo1234"].id)
  789. self.assertEqual(expected, got)
  790. with self.make_pack(True) as p:
  791. expected = UnpackedObject(
  792. 7,
  793. delta_base=b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c",
  794. decomp_chunks=[b"\x03\x07\x90\x03\x041234"],
  795. )
  796. expected.offset = 12
  797. got = p.get_unpacked_object(self.blobs[b"foo1234"].id)
  798. self.assertEqual(
  799. expected,
  800. got,
  801. )
  802. def test_iterobjects(self) -> None:
  803. with self.make_pack(False) as p:
  804. self.assertRaises(UnresolvedDeltas, list, p.iterobjects())
  805. with self.make_pack(True) as p:
  806. self.assertEqual(
  807. sorted(
  808. [
  809. self.blobs[b"foo1234"].id,
  810. self.blobs[b"bar"].id,
  811. self.blobs[b"bar2468"].id,
  812. ]
  813. ),
  814. sorted(o.id for o in p.iterobjects()),
  815. )
  816. class WritePackTests(TestCase):
  817. def test_write_pack_header(self) -> None:
  818. f = BytesIO()
  819. try:
  820. write_pack_header(f.write, 42)
  821. self.assertEqual(b"PACK\x00\x00\x00\x02\x00\x00\x00*", f.getvalue())
  822. finally:
  823. f.close()
  824. def test_write_pack_object(self) -> None:
  825. f = BytesIO()
  826. try:
  827. f.write(b"header")
  828. offset = f.tell()
  829. crc32 = write_pack_object(
  830. f.write, Blob.type_num, b"blob", object_format=DEFAULT_OBJECT_FORMAT
  831. )
  832. self.assertEqual(crc32, zlib.crc32(f.getvalue()[6:]) & 0xFFFFFFFF)
  833. f.write(b"x") # unpack_object needs extra trailing data.
  834. f.seek(offset)
  835. unpacked, unused = unpack_object(
  836. f.read, DEFAULT_OBJECT_FORMAT.hash_func, compute_crc32=True
  837. )
  838. self.assertEqual(Blob.type_num, unpacked.pack_type_num)
  839. self.assertEqual(Blob.type_num, unpacked.obj_type_num)
  840. self.assertEqual([b"blob"], unpacked.decomp_chunks)
  841. self.assertEqual(crc32, unpacked.crc32)
  842. self.assertEqual(b"x", unused)
  843. finally:
  844. f.close()
  845. def test_write_pack_object_sha(self) -> None:
  846. f = BytesIO()
  847. f.write(b"header")
  848. offset = f.tell()
  849. sha_a = sha1(b"foo")
  850. sha_b = sha_a.copy()
  851. write_pack_object(
  852. f.write,
  853. Blob.type_num,
  854. b"blob",
  855. sha=sha_a,
  856. object_format=DEFAULT_OBJECT_FORMAT,
  857. )
  858. self.assertNotEqual(sha_a.digest(), sha_b.digest())
  859. sha_b.update(f.getvalue()[offset:])
  860. self.assertEqual(sha_a.digest(), sha_b.digest())
  861. def test_write_pack_object_compression_level(self) -> None:
  862. f = BytesIO()
  863. f.write(b"header")
  864. offset = f.tell()
  865. sha_a = sha1(b"foo")
  866. sha_b = sha_a.copy()
  867. write_pack_object(
  868. f.write,
  869. Blob.type_num,
  870. b"blob",
  871. sha=sha_a,
  872. compression_level=6,
  873. object_format=DEFAULT_OBJECT_FORMAT,
  874. )
  875. self.assertNotEqual(sha_a.digest(), sha_b.digest())
  876. sha_b.update(f.getvalue()[offset:])
  877. self.assertEqual(sha_a.digest(), sha_b.digest())
  878. pack_checksum = hex_to_sha("721980e866af9a5f93ad674144e1459b8ba3e7b7")
  879. class BaseTestPackIndexWriting:
  880. def assertSucceeds(self, func, *args, **kwargs) -> None:
  881. try:
  882. func(*args, **kwargs)
  883. except ChecksumMismatch as e:
  884. self.fail(e)
  885. def index(self, filename, entries, pack_checksum) -> NoReturn:
  886. raise NotImplementedError(self.index)
  887. def test_empty(self) -> None:
  888. idx = self.index("empty.idx", [], pack_checksum)
  889. self.assertEqual(idx.get_pack_checksum(), pack_checksum)
  890. self.assertEqual(0, len(idx))
  891. def test_large(self) -> None:
  892. entry1_sha = hex_to_sha("4e6388232ec39792661e2e75db8fb117fc869ce6")
  893. entry2_sha = hex_to_sha("e98f071751bd77f59967bfa671cd2caebdccc9a2")
  894. entries = [
  895. (entry1_sha, 0xF2972D0830529B87, 24),
  896. (entry2_sha, (~0xF2972D0830529B87) & (2**64 - 1), 92),
  897. ]
  898. if not self._supports_large:
  899. self.assertRaises(
  900. TypeError, self.index, "single.idx", entries, pack_checksum
  901. )
  902. return
  903. idx = self.index("single.idx", entries, pack_checksum)
  904. self.assertEqual(idx.get_pack_checksum(), pack_checksum)
  905. self.assertEqual(2, len(idx))
  906. actual_entries = list(idx.iterentries())
  907. self.assertEqual(len(entries), len(actual_entries))
  908. for mine, actual in zip(entries, actual_entries):
  909. my_sha, my_offset, my_crc = mine
  910. actual_sha, actual_offset, actual_crc = actual
  911. self.assertEqual(my_sha, actual_sha)
  912. self.assertEqual(my_offset, actual_offset)
  913. if self._has_crc32_checksum:
  914. self.assertEqual(my_crc, actual_crc)
  915. else:
  916. self.assertIsNone(actual_crc)
  917. def test_single(self) -> None:
  918. entry_sha = hex_to_sha("6f670c0fb53f9463760b7295fbb814e965fb20c8")
  919. my_entries = [(entry_sha, 178, 42)]
  920. idx = self.index("single.idx", my_entries, pack_checksum)
  921. self.assertEqual(idx.get_pack_checksum(), pack_checksum)
  922. self.assertEqual(1, len(idx))
  923. actual_entries = list(idx.iterentries())
  924. self.assertEqual(len(my_entries), len(actual_entries))
  925. for mine, actual in zip(my_entries, actual_entries):
  926. my_sha, my_offset, my_crc = mine
  927. actual_sha, actual_offset, actual_crc = actual
  928. self.assertEqual(my_sha, actual_sha)
  929. self.assertEqual(my_offset, actual_offset)
  930. if self._has_crc32_checksum:
  931. self.assertEqual(my_crc, actual_crc)
  932. else:
  933. self.assertIsNone(actual_crc)
  934. class BaseTestFilePackIndexWriting(BaseTestPackIndexWriting):
  935. def setUp(self) -> None:
  936. self.tempdir = tempfile.mkdtemp()
  937. def tearDown(self) -> None:
  938. shutil.rmtree(self.tempdir)
  939. def index(self, filename, entries, pack_checksum):
  940. path = os.path.join(self.tempdir, filename)
  941. self.writeIndex(path, entries, pack_checksum)
  942. idx = load_pack_index(path, DEFAULT_OBJECT_FORMAT)
  943. self.assertSucceeds(idx.check)
  944. self.assertEqual(idx.version, self._expected_version)
  945. return idx
  946. def writeIndex(self, filename, entries, pack_checksum) -> None:
  947. # FIXME: Write to BytesIO instead rather than hitting disk ?
  948. with GitFile(filename, "wb") as f:
  949. self._write_fn(f, entries, pack_checksum)
  950. class TestMemoryIndexWriting(TestCase, BaseTestPackIndexWriting):
  951. def setUp(self) -> None:
  952. TestCase.setUp(self)
  953. self._has_crc32_checksum = True
  954. self._supports_large = True
  955. def index(self, filename, entries, pack_checksum):
  956. from dulwich.object_format import DEFAULT_OBJECT_FORMAT
  957. return MemoryPackIndex(entries, DEFAULT_OBJECT_FORMAT, pack_checksum)
  958. def tearDown(self) -> None:
  959. TestCase.tearDown(self)
  960. class TestPackIndexWritingv1(TestCase, BaseTestFilePackIndexWriting):
  961. def setUp(self) -> None:
  962. TestCase.setUp(self)
  963. BaseTestFilePackIndexWriting.setUp(self)
  964. self._has_crc32_checksum = False
  965. self._expected_version = 1
  966. self._supports_large = False
  967. self._write_fn = write_pack_index_v1
  968. def tearDown(self) -> None:
  969. TestCase.tearDown(self)
  970. BaseTestFilePackIndexWriting.tearDown(self)
  971. class TestPackIndexWritingv2(TestCase, BaseTestFilePackIndexWriting):
  972. def setUp(self) -> None:
  973. TestCase.setUp(self)
  974. BaseTestFilePackIndexWriting.setUp(self)
  975. self._has_crc32_checksum = True
  976. self._supports_large = True
  977. self._expected_version = 2
  978. self._write_fn = write_pack_index_v2
  979. def tearDown(self) -> None:
  980. TestCase.tearDown(self)
  981. BaseTestFilePackIndexWriting.tearDown(self)
  982. class TestPackIndexWritingv3(TestCase, BaseTestFilePackIndexWriting):
  983. def setUp(self) -> None:
  984. TestCase.setUp(self)
  985. BaseTestFilePackIndexWriting.setUp(self)
  986. self._has_crc32_checksum = True
  987. self._supports_large = True
  988. self._expected_version = 3
  989. self._write_fn = write_pack_index_v3
  990. def tearDown(self) -> None:
  991. TestCase.tearDown(self)
  992. BaseTestFilePackIndexWriting.tearDown(self)
  993. def test_load_v3_index_returns_packindex3(self) -> None:
  994. """Test that loading a v3 index file returns a PackIndex3 instance."""
  995. entries = [(b"abcd" * 5, 0, zlib.crc32(b""))]
  996. filename = os.path.join(self.tempdir, "test.idx")
  997. self.writeIndex(filename, entries, b"1234567890" * 2)
  998. idx = load_pack_index(filename, DEFAULT_OBJECT_FORMAT)
  999. self.assertIsInstance(idx, PackIndex3)
  1000. self.assertEqual(idx.version, 3)
  1001. self.assertEqual(idx.hash_format, 1) # SHA-1
  1002. self.assertEqual(idx.hash_size, 20)
  1003. self.assertEqual(idx.shortened_oid_len, 20)
  1004. def test_v3_hash_algorithm(self) -> None:
  1005. """Test v3 index correctly handles hash algorithm field."""
  1006. entries = [(b"a" * 20, 42, zlib.crc32(b"data"))]
  1007. filename = os.path.join(self.tempdir, "test_hash.idx")
  1008. # Write v3 index with SHA-1 (algorithm=1)
  1009. with GitFile(filename, "wb") as f:
  1010. write_pack_index_v3(f, entries, b"1" * 20, hash_format=1)
  1011. idx = load_pack_index(filename, DEFAULT_OBJECT_FORMAT)
  1012. self.assertEqual(idx.hash_format, 1)
  1013. self.assertEqual(idx.hash_size, 20)
  1014. def test_v3_sha256_length(self) -> None:
  1015. """Test v3 index with SHA-256 hash length."""
  1016. # For now, test that SHA-256 is not yet implemented
  1017. entries = [(b"a" * 32, 42, zlib.crc32(b"data"))]
  1018. filename = os.path.join(self.tempdir, "test_sha256.idx")
  1019. # SHA-256 should raise NotImplementedError
  1020. with self.assertRaises(NotImplementedError) as cm:
  1021. with GitFile(filename, "wb") as f:
  1022. write_pack_index_v3(f, entries, b"1" * 32, hash_format=2)
  1023. self.assertIn("SHA-256", str(cm.exception))
  1024. def test_v3_invalid_hash_algorithm(self) -> None:
  1025. """Test v3 index with invalid hash algorithm."""
  1026. entries = [(b"a" * 20, 42, zlib.crc32(b"data"))]
  1027. filename = os.path.join(self.tempdir, "test_invalid.idx")
  1028. # Invalid hash algorithm should raise ValueError
  1029. with self.assertRaises(ValueError) as cm:
  1030. with GitFile(filename, "wb") as f:
  1031. write_pack_index_v3(f, entries, b"1" * 20, hash_format=99)
  1032. self.assertIn("Unknown hash algorithm", str(cm.exception))
  1033. def test_v3_wrong_hash_length(self) -> None:
  1034. """Test v3 index with mismatched hash length."""
  1035. # Entry with wrong hash length for SHA-1
  1036. entries = [(b"a" * 15, 42, zlib.crc32(b"data"))] # Too short
  1037. filename = os.path.join(self.tempdir, "test_wrong_len.idx")
  1038. with self.assertRaises(ValueError) as cm:
  1039. with GitFile(filename, "wb") as f:
  1040. write_pack_index_v3(f, entries, b"1" * 20, hash_format=1)
  1041. self.assertIn("wrong length", str(cm.exception))
  1042. class WritePackIndexTests(TestCase):
  1043. """Tests for the configurable write_pack_index function."""
  1044. def test_default_pack_index_version_constant(self) -> None:
  1045. from dulwich.pack import DEFAULT_PACK_INDEX_VERSION
  1046. # Ensure the constant is set to version 2 (current Git default)
  1047. self.assertEqual(2, DEFAULT_PACK_INDEX_VERSION)
  1048. def test_write_pack_index_defaults_to_v2(self) -> None:
  1049. import tempfile
  1050. from dulwich.pack import (
  1051. DEFAULT_PACK_INDEX_VERSION,
  1052. load_pack_index,
  1053. write_pack_index,
  1054. )
  1055. tempdir = tempfile.mkdtemp()
  1056. self.addCleanup(shutil.rmtree, tempdir)
  1057. entries = [(b"1" * 20, 42, zlib.crc32(b"data"))]
  1058. filename = os.path.join(tempdir, "test_default.idx")
  1059. with GitFile(filename, "wb") as f:
  1060. write_pack_index(f, entries, b"P" * 20)
  1061. idx = load_pack_index(filename, DEFAULT_OBJECT_FORMAT)
  1062. self.assertEqual(DEFAULT_PACK_INDEX_VERSION, idx.version)
  1063. def test_write_pack_index_version_1(self) -> None:
  1064. import tempfile
  1065. from dulwich.pack import load_pack_index, write_pack_index
  1066. tempdir = tempfile.mkdtemp()
  1067. self.addCleanup(shutil.rmtree, tempdir)
  1068. entries = [(b"1" * 20, 42, zlib.crc32(b"data"))]
  1069. filename = os.path.join(tempdir, "test_v1.idx")
  1070. with GitFile(filename, "wb") as f:
  1071. write_pack_index(f, entries, b"P" * 20, version=1)
  1072. idx = load_pack_index(filename, DEFAULT_OBJECT_FORMAT)
  1073. self.assertEqual(1, idx.version)
  1074. def test_write_pack_index_version_3(self) -> None:
  1075. import tempfile
  1076. from dulwich.pack import load_pack_index, write_pack_index
  1077. tempdir = tempfile.mkdtemp()
  1078. self.addCleanup(shutil.rmtree, tempdir)
  1079. entries = [(b"1" * 20, 42, zlib.crc32(b"data"))]
  1080. filename = os.path.join(tempdir, "test_v3.idx")
  1081. with GitFile(filename, "wb") as f:
  1082. write_pack_index(f, entries, b"P" * 20, version=3)
  1083. idx = load_pack_index(filename, DEFAULT_OBJECT_FORMAT)
  1084. self.assertEqual(3, idx.version)
  1085. def test_write_pack_index_invalid_version(self) -> None:
  1086. import tempfile
  1087. from dulwich.pack import write_pack_index
  1088. tempdir = tempfile.mkdtemp()
  1089. self.addCleanup(shutil.rmtree, tempdir)
  1090. entries = [(b"1" * 20, 42, zlib.crc32(b"data"))]
  1091. filename = os.path.join(tempdir, "test_invalid.idx")
  1092. with self.assertRaises(ValueError) as cm:
  1093. with GitFile(filename, "wb") as f:
  1094. write_pack_index(f, entries, b"P" * 20, version=99)
  1095. self.assertIn("Unsupported pack index version: 99", str(cm.exception))
  1096. class MockFileWithoutFileno:
  1097. """Mock file-like object without fileno method."""
  1098. def __init__(self, content):
  1099. self.content = content
  1100. self.position = 0
  1101. def read(self, size=None):
  1102. if size is None:
  1103. result = self.content[self.position :]
  1104. self.position = len(self.content)
  1105. else:
  1106. result = self.content[self.position : self.position + size]
  1107. self.position += size
  1108. return result
  1109. def seek(self, position):
  1110. self.position = position
  1111. def tell(self):
  1112. return self.position
  1113. # Removed the PackWithoutMmapTests class since it was using private methods
  1114. class ReadZlibTests(TestCase):
  1115. decomp = (
  1116. b"tree 4ada885c9196b6b6fa08744b5862bf92896fc002\n"
  1117. b"parent None\n"
  1118. b"author Jelmer Vernooij <jelmer@samba.org> 1228980214 +0000\n"
  1119. b"committer Jelmer Vernooij <jelmer@samba.org> 1228980214 +0000\n"
  1120. b"\n"
  1121. b"Provide replacement for mmap()'s offset argument."
  1122. )
  1123. comp = zlib.compress(decomp)
  1124. extra = b"nextobject"
  1125. def setUp(self) -> None:
  1126. super().setUp()
  1127. self.read = BytesIO(self.comp + self.extra).read
  1128. self.unpacked = UnpackedObject(
  1129. Tree.type_num, decomp_len=len(self.decomp), crc32=0
  1130. )
  1131. def test_decompress_size(self) -> None:
  1132. good_decomp_len = len(self.decomp)
  1133. self.unpacked.decomp_len = -1
  1134. self.assertRaises(ValueError, read_zlib_chunks, self.read, self.unpacked)
  1135. self.unpacked.decomp_len = good_decomp_len - 1
  1136. self.assertRaises(zlib.error, read_zlib_chunks, self.read, self.unpacked)
  1137. self.unpacked.decomp_len = good_decomp_len + 1
  1138. self.assertRaises(zlib.error, read_zlib_chunks, self.read, self.unpacked)
  1139. def test_decompress_truncated(self) -> None:
  1140. read = BytesIO(self.comp[:10]).read
  1141. self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked)
  1142. read = BytesIO(self.comp).read
  1143. self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked)
  1144. def test_decompress_empty(self) -> None:
  1145. unpacked = UnpackedObject(Tree.type_num, decomp_len=0)
  1146. comp = zlib.compress(b"")
  1147. read = BytesIO(comp + self.extra).read
  1148. unused = read_zlib_chunks(read, unpacked)
  1149. self.assertEqual(b"", b"".join(unpacked.decomp_chunks))
  1150. self.assertNotEqual(b"", unused)
  1151. self.assertEqual(self.extra, unused + read())
  1152. def test_decompress_no_crc32(self) -> None:
  1153. self.unpacked.crc32 = None
  1154. read_zlib_chunks(self.read, self.unpacked)
  1155. self.assertEqual(None, self.unpacked.crc32)
  1156. def _do_decompress_test(self, buffer_size, **kwargs) -> None:
  1157. unused = read_zlib_chunks(
  1158. self.read, self.unpacked, buffer_size=buffer_size, **kwargs
  1159. )
  1160. self.assertEqual(self.decomp, b"".join(self.unpacked.decomp_chunks))
  1161. self.assertEqual(zlib.crc32(self.comp), self.unpacked.crc32)
  1162. self.assertNotEqual(b"", unused)
  1163. self.assertEqual(self.extra, unused + self.read())
  1164. def test_simple_decompress(self) -> None:
  1165. self._do_decompress_test(4096)
  1166. self.assertEqual(None, self.unpacked.comp_chunks)
  1167. # These buffer sizes are not intended to be realistic, but rather simulate
  1168. # larger buffer sizes that may end at various places.
  1169. def test_decompress_buffer_size_1(self) -> None:
  1170. self._do_decompress_test(1)
  1171. def test_decompress_buffer_size_2(self) -> None:
  1172. self._do_decompress_test(2)
  1173. def test_decompress_buffer_size_3(self) -> None:
  1174. self._do_decompress_test(3)
  1175. def test_decompress_buffer_size_4(self) -> None:
  1176. self._do_decompress_test(4)
  1177. def test_decompress_include_comp(self) -> None:
  1178. self._do_decompress_test(4096, include_comp=True)
  1179. self.assertEqual(self.comp, b"".join(self.unpacked.comp_chunks))
  1180. class DeltifyTests(TestCase):
  1181. def test_empty(self) -> None:
  1182. self.assertEqual([], list(deltify_pack_objects([])))
  1183. def test_single(self) -> None:
  1184. b = Blob.from_string(b"foo")
  1185. self.assertEqual(
  1186. [
  1187. UnpackedObject(
  1188. b.type_num,
  1189. sha=b.sha().digest(),
  1190. delta_base=None,
  1191. decomp_chunks=b.as_raw_chunks(),
  1192. )
  1193. ],
  1194. list(deltify_pack_objects([(b, b"")])),
  1195. )
  1196. def test_simple_delta(self) -> None:
  1197. b1 = Blob.from_string(b"a" * 101)
  1198. b2 = Blob.from_string(b"a" * 100)
  1199. delta = list(create_delta(b1.as_raw_chunks(), b2.as_raw_chunks()))
  1200. self.assertEqual(
  1201. [
  1202. UnpackedObject(
  1203. b1.type_num,
  1204. sha=b1.sha().digest(),
  1205. delta_base=None,
  1206. decomp_chunks=b1.as_raw_chunks(),
  1207. ),
  1208. UnpackedObject(
  1209. b2.type_num,
  1210. sha=b2.sha().digest(),
  1211. delta_base=b1.sha().digest(),
  1212. decomp_chunks=delta,
  1213. ),
  1214. ],
  1215. list(deltify_pack_objects([(b1, b""), (b2, b"")])),
  1216. )
  1217. class TestPackStreamReader(TestCase):
  1218. def test_read_objects_emtpy(self) -> None:
  1219. f = BytesIO()
  1220. build_pack(f, [])
  1221. reader = PackStreamReader(DEFAULT_OBJECT_FORMAT.hash_func, f.read)
  1222. self.assertEqual(0, len(list(reader.read_objects())))
  1223. def test_read_objects(self) -> None:
  1224. f = BytesIO()
  1225. entries = build_pack(
  1226. f,
  1227. [
  1228. (Blob.type_num, b"blob"),
  1229. (OFS_DELTA, (0, b"blob1")),
  1230. ],
  1231. )
  1232. reader = PackStreamReader(DEFAULT_OBJECT_FORMAT.hash_func, f.read)
  1233. objects = list(reader.read_objects(compute_crc32=True))
  1234. self.assertEqual(2, len(objects))
  1235. unpacked_blob, unpacked_delta = objects
  1236. self.assertEqual(entries[0][0], unpacked_blob.offset)
  1237. self.assertEqual(Blob.type_num, unpacked_blob.pack_type_num)
  1238. self.assertEqual(Blob.type_num, unpacked_blob.obj_type_num)
  1239. self.assertEqual(None, unpacked_blob.delta_base)
  1240. self.assertEqual(b"blob", b"".join(unpacked_blob.decomp_chunks))
  1241. self.assertEqual(entries[0][4], unpacked_blob.crc32)
  1242. self.assertEqual(entries[1][0], unpacked_delta.offset)
  1243. self.assertEqual(OFS_DELTA, unpacked_delta.pack_type_num)
  1244. self.assertEqual(None, unpacked_delta.obj_type_num)
  1245. self.assertEqual(
  1246. unpacked_delta.offset - unpacked_blob.offset,
  1247. unpacked_delta.delta_base,
  1248. )
  1249. delta = create_delta(b"blob", b"blob1")
  1250. self.assertEqual(b"".join(delta), b"".join(unpacked_delta.decomp_chunks))
  1251. self.assertEqual(entries[1][4], unpacked_delta.crc32)
  1252. def test_read_objects_buffered(self) -> None:
  1253. f = BytesIO()
  1254. build_pack(
  1255. f,
  1256. [
  1257. (Blob.type_num, b"blob"),
  1258. (OFS_DELTA, (0, b"blob1")),
  1259. ],
  1260. )
  1261. reader = PackStreamReader(
  1262. DEFAULT_OBJECT_FORMAT.hash_func, f.read, zlib_bufsize=4
  1263. )
  1264. self.assertEqual(2, len(list(reader.read_objects())))
  1265. def test_read_objects_empty(self) -> None:
  1266. reader = PackStreamReader(DEFAULT_OBJECT_FORMAT.hash_func, BytesIO().read)
  1267. self.assertRaises(AssertionError, list, reader.read_objects())
  1268. class TestPackIterator(DeltaChainIterator):
  1269. _compute_crc32 = True
  1270. def __init__(self, *args, **kwargs) -> None:
  1271. super().__init__(*args, **kwargs)
  1272. self._unpacked_offsets: set[int] = set()
  1273. def _result(self, unpacked):
  1274. """Return entries in the same format as build_pack."""
  1275. return (
  1276. unpacked.offset,
  1277. unpacked.obj_type_num,
  1278. b"".join(unpacked.obj_chunks),
  1279. unpacked.sha(),
  1280. unpacked.crc32,
  1281. )
  1282. def _resolve_object(self, offset, pack_type_num, base_chunks):
  1283. assert offset not in self._unpacked_offsets, (
  1284. f"Attempted to re-inflate offset {offset}"
  1285. )
  1286. self._unpacked_offsets.add(offset)
  1287. return super()._resolve_object(offset, pack_type_num, base_chunks)
  1288. class DeltaChainIteratorTests(TestCase):
  1289. def setUp(self) -> None:
  1290. super().setUp()
  1291. self.store = MemoryObjectStore()
  1292. self.fetched = set()
  1293. def store_blobs(self, blobs_data):
  1294. blobs = []
  1295. for data in blobs_data:
  1296. blob = make_object(Blob, data=data)
  1297. blobs.append(blob)
  1298. self.store.add_object(blob)
  1299. return blobs
  1300. def get_raw_no_repeat(self, bin_sha):
  1301. """Wrapper around store.get_raw that doesn't allow repeat lookups."""
  1302. hex_sha = sha_to_hex(bin_sha)
  1303. self.assertNotIn(
  1304. hex_sha, self.fetched, f"Attempted to re-fetch object {hex_sha}"
  1305. )
  1306. self.fetched.add(hex_sha)
  1307. return self.store.get_raw(hex_sha)
  1308. def make_pack_iter(self, f, thin=None):
  1309. if thin is None:
  1310. thin = bool(list(self.store))
  1311. resolve_ext_ref = (thin and self.get_raw_no_repeat) or None
  1312. data = PackData("test.pack", file=f, object_format=DEFAULT_OBJECT_FORMAT)
  1313. self.addCleanup(data.close)
  1314. return TestPackIterator.for_pack_data(data, resolve_ext_ref=resolve_ext_ref)
  1315. def make_pack_iter_subset(self, f, subset, thin=None):
  1316. if thin is None:
  1317. thin = bool(list(self.store))
  1318. resolve_ext_ref = (thin and self.get_raw_no_repeat) or None
  1319. data = PackData("test.pack", file=f, object_format=DEFAULT_OBJECT_FORMAT)
  1320. assert data
  1321. index = MemoryPackIndex.for_pack(data)
  1322. pack = Pack.from_objects(data, index)
  1323. self.addCleanup(pack.close)
  1324. return TestPackIterator.for_pack_subset(
  1325. pack, subset, resolve_ext_ref=resolve_ext_ref
  1326. )
  1327. def assertEntriesMatch(self, expected_indexes, entries, pack_iter) -> None:
  1328. expected = [entries[i] for i in expected_indexes]
  1329. self.assertEqual(expected, list(pack_iter._walk_all_chains()))
  1330. def test_no_deltas(self) -> None:
  1331. f = BytesIO()
  1332. entries = build_pack(
  1333. f,
  1334. [
  1335. (Commit.type_num, b"commit"),
  1336. (Blob.type_num, b"blob"),
  1337. (Tree.type_num, b"tree"),
  1338. ],
  1339. )
  1340. self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f))
  1341. f.seek(0)
  1342. self.assertEntriesMatch([], entries, self.make_pack_iter_subset(f, []))
  1343. f.seek(0)
  1344. self.assertEntriesMatch(
  1345. [1, 0],
  1346. entries,
  1347. self.make_pack_iter_subset(f, [entries[0][3], entries[1][3]]),
  1348. )
  1349. f.seek(0)
  1350. self.assertEntriesMatch(
  1351. [1, 0],
  1352. entries,
  1353. self.make_pack_iter_subset(
  1354. f, [sha_to_hex(entries[0][3]), sha_to_hex(entries[1][3])]
  1355. ),
  1356. )
  1357. def test_ofs_deltas(self) -> None:
  1358. f = BytesIO()
  1359. entries = build_pack(
  1360. f,
  1361. [
  1362. (Blob.type_num, b"blob"),
  1363. (OFS_DELTA, (0, b"blob1")),
  1364. (OFS_DELTA, (0, b"blob2")),
  1365. ],
  1366. )
  1367. # Delta resolution changed to DFS
  1368. self.assertEntriesMatch([0, 2, 1], entries, self.make_pack_iter(f))
  1369. f.seek(0)
  1370. self.assertEntriesMatch(
  1371. [0, 2, 1],
  1372. entries,
  1373. self.make_pack_iter_subset(f, [entries[1][3], entries[2][3]]),
  1374. )
  1375. def test_ofs_deltas_chain(self) -> None:
  1376. f = BytesIO()
  1377. entries = build_pack(
  1378. f,
  1379. [
  1380. (Blob.type_num, b"blob"),
  1381. (OFS_DELTA, (0, b"blob1")),
  1382. (OFS_DELTA, (1, b"blob2")),
  1383. ],
  1384. )
  1385. self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f))
  1386. def test_ref_deltas(self) -> None:
  1387. f = BytesIO()
  1388. entries = build_pack(
  1389. f,
  1390. [
  1391. (REF_DELTA, (1, b"blob1")),
  1392. (Blob.type_num, (b"blob")),
  1393. (REF_DELTA, (1, b"blob2")),
  1394. ],
  1395. )
  1396. # Delta resolution changed to DFS
  1397. self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f))
  1398. def test_ref_deltas_chain(self) -> None:
  1399. f = BytesIO()
  1400. entries = build_pack(
  1401. f,
  1402. [
  1403. (REF_DELTA, (2, b"blob1")),
  1404. (Blob.type_num, (b"blob")),
  1405. (REF_DELTA, (1, b"blob2")),
  1406. ],
  1407. )
  1408. self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f))
  1409. def test_ofs_and_ref_deltas(self) -> None:
  1410. # Deltas pending on this offset are popped before deltas depending on
  1411. # this ref.
  1412. f = BytesIO()
  1413. entries = build_pack(
  1414. f,
  1415. [
  1416. (REF_DELTA, (1, b"blob1")),
  1417. (Blob.type_num, (b"blob")),
  1418. (OFS_DELTA, (1, b"blob2")),
  1419. ],
  1420. )
  1421. # Delta resolution changed to DFS
  1422. self.assertEntriesMatch([1, 0, 2], entries, self.make_pack_iter(f))
  1423. def test_mixed_chain(self) -> None:
  1424. f = BytesIO()
  1425. entries = build_pack(
  1426. f,
  1427. [
  1428. (Blob.type_num, b"blob"),
  1429. (REF_DELTA, (2, b"blob2")),
  1430. (OFS_DELTA, (0, b"blob1")),
  1431. (OFS_DELTA, (1, b"blob3")),
  1432. (OFS_DELTA, (0, b"bob")),
  1433. ],
  1434. )
  1435. # Delta resolution changed to DFS
  1436. self.assertEntriesMatch([0, 4, 2, 1, 3], entries, self.make_pack_iter(f))
  1437. def test_long_chain(self) -> None:
  1438. n = 100
  1439. objects_spec = [(Blob.type_num, b"blob")]
  1440. for i in range(n):
  1441. objects_spec.append((OFS_DELTA, (i, b"blob" + str(i).encode("ascii"))))
  1442. f = BytesIO()
  1443. entries = build_pack(f, objects_spec)
  1444. self.assertEntriesMatch(range(n + 1), entries, self.make_pack_iter(f))
  1445. def test_branchy_chain(self) -> None:
  1446. n = 100
  1447. objects_spec = [(Blob.type_num, b"blob")]
  1448. for i in range(n):
  1449. objects_spec.append((OFS_DELTA, (0, b"blob" + str(i).encode("ascii"))))
  1450. f = BytesIO()
  1451. entries = build_pack(f, objects_spec)
  1452. # Delta resolution changed to DFS
  1453. indices = [0, *list(range(100, 0, -1))]
  1454. self.assertEntriesMatch(indices, entries, self.make_pack_iter(f))
  1455. def test_ext_ref(self) -> None:
  1456. (blob,) = self.store_blobs([b"blob"])
  1457. f = BytesIO()
  1458. entries = build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store)
  1459. pack_iter = self.make_pack_iter(f)
  1460. self.assertEntriesMatch([0], entries, pack_iter)
  1461. self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
  1462. def test_ext_ref_chain(self) -> None:
  1463. (blob,) = self.store_blobs([b"blob"])
  1464. f = BytesIO()
  1465. entries = build_pack(
  1466. f,
  1467. [
  1468. (REF_DELTA, (1, b"blob2")),
  1469. (REF_DELTA, (blob.id, b"blob1")),
  1470. ],
  1471. store=self.store,
  1472. )
  1473. pack_iter = self.make_pack_iter(f)
  1474. self.assertEntriesMatch([1, 0], entries, pack_iter)
  1475. self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
  1476. def test_ext_ref_chain_degenerate(self) -> None:
  1477. # Test a degenerate case where the sender is sending a REF_DELTA
  1478. # object that expands to an object already in the repository.
  1479. (blob,) = self.store_blobs([b"blob"])
  1480. (blob2,) = self.store_blobs([b"blob2"])
  1481. assert blob.id < blob2.id
  1482. f = BytesIO()
  1483. entries = build_pack(
  1484. f,
  1485. [
  1486. (REF_DELTA, (blob.id, b"blob2")),
  1487. (REF_DELTA, (0, b"blob3")),
  1488. ],
  1489. store=self.store,
  1490. )
  1491. pack_iter = self.make_pack_iter(f)
  1492. self.assertEntriesMatch([0, 1], entries, pack_iter)
  1493. self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
  1494. def test_ext_ref_multiple_times(self) -> None:
  1495. (blob,) = self.store_blobs([b"blob"])
  1496. f = BytesIO()
  1497. entries = build_pack(
  1498. f,
  1499. [
  1500. (REF_DELTA, (blob.id, b"blob1")),
  1501. (REF_DELTA, (blob.id, b"blob2")),
  1502. ],
  1503. store=self.store,
  1504. )
  1505. pack_iter = self.make_pack_iter(f)
  1506. self.assertEntriesMatch([0, 1], entries, pack_iter)
  1507. self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
  1508. def test_multiple_ext_refs(self) -> None:
  1509. b1, b2 = self.store_blobs([b"foo", b"bar"])
  1510. f = BytesIO()
  1511. entries = build_pack(
  1512. f,
  1513. [
  1514. (REF_DELTA, (b1.id, b"foo1")),
  1515. (REF_DELTA, (b2.id, b"bar2")),
  1516. ],
  1517. store=self.store,
  1518. )
  1519. pack_iter = self.make_pack_iter(f)
  1520. self.assertEntriesMatch([0, 1], entries, pack_iter)
  1521. self.assertEqual([hex_to_sha(b1.id), hex_to_sha(b2.id)], pack_iter.ext_refs())
  1522. def test_bad_ext_ref_non_thin_pack(self) -> None:
  1523. (blob,) = self.store_blobs([b"blob"])
  1524. f = BytesIO()
  1525. build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store)
  1526. pack_iter = self.make_pack_iter(f, thin=False)
  1527. try:
  1528. list(pack_iter._walk_all_chains())
  1529. self.fail()
  1530. except UnresolvedDeltas as e:
  1531. self.assertEqual([blob.id], e.shas)
  1532. def test_bad_ext_ref_thin_pack(self) -> None:
  1533. b1, b2, b3 = self.store_blobs([b"foo", b"bar", b"baz"])
  1534. f = BytesIO()
  1535. build_pack(
  1536. f,
  1537. [
  1538. (REF_DELTA, (1, b"foo99")),
  1539. (REF_DELTA, (b1.id, b"foo1")),
  1540. (REF_DELTA, (b2.id, b"bar2")),
  1541. (REF_DELTA, (b3.id, b"baz3")),
  1542. ],
  1543. store=self.store,
  1544. )
  1545. del self.store[b2.id]
  1546. del self.store[b3.id]
  1547. pack_iter = self.make_pack_iter(f)
  1548. try:
  1549. list(pack_iter._walk_all_chains())
  1550. self.fail()
  1551. except UnresolvedDeltas as e:
  1552. self.assertEqual((sorted([b2.id, b3.id]),), (sorted(e.shas),))
  1553. def test_ext_ref_deltified_object_based_on_itself(self) -> None:
  1554. b1_content = b"foo"
  1555. (b1,) = self.store_blobs([b1_content])
  1556. f = BytesIO()
  1557. build_pack(
  1558. f,
  1559. [
  1560. # b1's content refers to bl1's object ID as delta base
  1561. (REF_DELTA, (b1.id, b1_content)),
  1562. ],
  1563. store=self.store,
  1564. )
  1565. fsize = f.tell()
  1566. f.seek(0)
  1567. packdata = PackData.from_file(f, DEFAULT_OBJECT_FORMAT, fsize)
  1568. td = tempfile.mkdtemp()
  1569. idx_path = os.path.join(td, "test.idx")
  1570. self.addCleanup(shutil.rmtree, td)
  1571. packdata.create_index(
  1572. idx_path,
  1573. version=2,
  1574. resolve_ext_ref=self.get_raw_no_repeat,
  1575. )
  1576. packindex = load_pack_index(idx_path, DEFAULT_OBJECT_FORMAT)
  1577. pack = Pack.from_objects(packdata, packindex)
  1578. try:
  1579. # Attempting to open this REF_DELTA object would loop forever
  1580. pack[b1.id]
  1581. except UnresolvedDeltas as e:
  1582. self.assertEqual([b1.id], [sha_to_hex(sha) for sha in e.shas])
  1583. class DeltaEncodeSizeTests(TestCase):
  1584. def test_basic(self) -> None:
  1585. self.assertEqual(b"\x00", _delta_encode_size(0))
  1586. self.assertEqual(b"\x01", _delta_encode_size(1))
  1587. self.assertEqual(b"\xfa\x01", _delta_encode_size(250))
  1588. self.assertEqual(b"\xe8\x07", _delta_encode_size(1000))
  1589. self.assertEqual(b"\xa0\x8d\x06", _delta_encode_size(100000))
  1590. class EncodeCopyOperationTests(TestCase):
  1591. def test_basic(self) -> None:
  1592. self.assertEqual(b"\x80", _encode_copy_operation(0, 0))
  1593. self.assertEqual(b"\x91\x01\x0a", _encode_copy_operation(1, 10))
  1594. self.assertEqual(b"\xb1\x64\xe8\x03", _encode_copy_operation(100, 1000))
  1595. self.assertEqual(b"\x93\xe8\x03\x01", _encode_copy_operation(1000, 1))