test_pack.py 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056
  1. # test_pack.py -- Tests for the handling of git packs.
  2. # Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
  3. # Copyright (C) 2008 Jelmer Vernooij <jelmer@samba.org>
  4. #
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as public by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Tests for Dulwich packs."""
  22. from io import BytesIO
  23. from hashlib import sha1
  24. import os
  25. import shutil
  26. import tempfile
  27. import zlib
  28. from dulwich.errors import (
  29. ApplyDeltaError,
  30. ChecksumMismatch,
  31. )
  32. from dulwich.file import (
  33. GitFile,
  34. )
  35. from dulwich.object_store import (
  36. MemoryObjectStore,
  37. )
  38. from dulwich.objects import (
  39. hex_to_sha,
  40. sha_to_hex,
  41. Commit,
  42. Tree,
  43. Blob,
  44. )
  45. from dulwich.pack import (
  46. OFS_DELTA,
  47. REF_DELTA,
  48. MemoryPackIndex,
  49. Pack,
  50. PackData,
  51. apply_delta,
  52. create_delta,
  53. deltify_pack_objects,
  54. load_pack_index,
  55. UnpackedObject,
  56. read_zlib_chunks,
  57. write_pack_header,
  58. write_pack_index_v1,
  59. write_pack_index_v2,
  60. write_pack_object,
  61. write_pack,
  62. unpack_object,
  63. compute_file_sha,
  64. PackStreamReader,
  65. DeltaChainIterator,
  66. _delta_encode_size,
  67. _encode_copy_operation,
  68. )
  69. from dulwich.tests import (
  70. TestCase,
  71. )
  72. from dulwich.tests.utils import (
  73. make_object,
  74. build_pack,
  75. )
  76. pack1_sha = b'bc63ddad95e7321ee734ea11a7a62d314e0d7481'
  77. a_sha = b'6f670c0fb53f9463760b7295fbb814e965fb20c8'
  78. tree_sha = b'b2a2766a2879c209ab1176e7e778b81ae422eeaa'
  79. commit_sha = b'f18faa16531ac570a3fdc8c7ca16682548dafd12'
  80. class PackTests(TestCase):
  81. """Base class for testing packs"""
  82. def setUp(self):
  83. super(PackTests, self).setUp()
  84. self.tempdir = tempfile.mkdtemp()
  85. self.addCleanup(shutil.rmtree, self.tempdir)
  86. datadir = os.path.abspath(os.path.join(os.path.dirname(__file__),
  87. 'data/packs'))
  88. def get_pack_index(self, sha):
  89. """Returns a PackIndex from the datadir with the given sha"""
  90. return load_pack_index(os.path.join(self.datadir, 'pack-%s.idx' % sha.decode('ascii')))
  91. def get_pack_data(self, sha):
  92. """Returns a PackData object from the datadir with the given sha"""
  93. return PackData(os.path.join(self.datadir, 'pack-%s.pack' % sha.decode('ascii')))
  94. def get_pack(self, sha):
  95. return Pack(os.path.join(self.datadir, 'pack-%s' % sha.decode('ascii')))
  96. def assertSucceeds(self, func, *args, **kwargs):
  97. try:
  98. func(*args, **kwargs)
  99. except ChecksumMismatch as e:
  100. self.fail(e)
  101. class PackIndexTests(PackTests):
  102. """Class that tests the index of packfiles"""
  103. def test_object_index(self):
  104. """Tests that the correct object offset is returned from the index."""
  105. p = self.get_pack_index(pack1_sha)
  106. self.assertRaises(KeyError, p.object_index, pack1_sha)
  107. self.assertEqual(p.object_index(a_sha), 178)
  108. self.assertEqual(p.object_index(tree_sha), 138)
  109. self.assertEqual(p.object_index(commit_sha), 12)
  110. def test_index_len(self):
  111. p = self.get_pack_index(pack1_sha)
  112. self.assertEqual(3, len(p))
  113. def test_get_stored_checksum(self):
  114. p = self.get_pack_index(pack1_sha)
  115. self.assertEqual(b'f2848e2ad16f329ae1c92e3b95e91888daa5bd01',
  116. sha_to_hex(p.get_stored_checksum()))
  117. self.assertEqual(b'721980e866af9a5f93ad674144e1459b8ba3e7b7',
  118. sha_to_hex(p.get_pack_checksum()))
  119. def test_index_check(self):
  120. p = self.get_pack_index(pack1_sha)
  121. self.assertSucceeds(p.check)
  122. def test_iterentries(self):
  123. p = self.get_pack_index(pack1_sha)
  124. entries = [(sha_to_hex(s), o, c) for s, o, c in p.iterentries()]
  125. self.assertEqual([
  126. (b'6f670c0fb53f9463760b7295fbb814e965fb20c8', 178, None),
  127. (b'b2a2766a2879c209ab1176e7e778b81ae422eeaa', 138, None),
  128. (b'f18faa16531ac570a3fdc8c7ca16682548dafd12', 12, None)
  129. ], entries)
  130. def test_iter(self):
  131. p = self.get_pack_index(pack1_sha)
  132. self.assertEqual(set([tree_sha, commit_sha, a_sha]), set(p))
  133. class TestPackDeltas(TestCase):
  134. test_string1 = b'The answer was flailing in the wind'
  135. test_string2 = b'The answer was falling down the pipe'
  136. test_string3 = b'zzzzz'
  137. test_string_empty = b''
  138. test_string_big = b'Z' * 8192
  139. test_string_huge = b'Z' * 100000
  140. def _test_roundtrip(self, base, target):
  141. self.assertEqual(target,
  142. b''.join(apply_delta(base, create_delta(base, target))))
  143. def test_nochange(self):
  144. self._test_roundtrip(self.test_string1, self.test_string1)
  145. def test_nochange_huge(self):
  146. self._test_roundtrip(self.test_string_huge, self.test_string_huge)
  147. def test_change(self):
  148. self._test_roundtrip(self.test_string1, self.test_string2)
  149. def test_rewrite(self):
  150. self._test_roundtrip(self.test_string1, self.test_string3)
  151. def test_empty_to_big(self):
  152. self._test_roundtrip(self.test_string_empty, self.test_string_big)
  153. def test_empty_to_huge(self):
  154. self._test_roundtrip(self.test_string_empty, self.test_string_huge)
  155. def test_huge_copy(self):
  156. self._test_roundtrip(self.test_string_huge + self.test_string1,
  157. self.test_string_huge + self.test_string2)
  158. def test_dest_overflow(self):
  159. self.assertRaises(
  160. ApplyDeltaError,
  161. apply_delta, b'a'*0x10000, b'\x80\x80\x04\x80\x80\x04\x80' + b'a'*0x10000)
  162. self.assertRaises(
  163. ApplyDeltaError,
  164. apply_delta, b'', b'\x00\x80\x02\xb0\x11\x11')
  165. class TestPackData(PackTests):
  166. """Tests getting the data from the packfile."""
  167. def test_create_pack(self):
  168. self.get_pack_data(pack1_sha).close()
  169. def test_from_file(self):
  170. path = os.path.join(self.datadir, 'pack-%s.pack' % pack1_sha.decode('ascii'))
  171. with open(path, 'rb') as f:
  172. PackData.from_file(f, os.path.getsize(path))
  173. def test_pack_len(self):
  174. with self.get_pack_data(pack1_sha) as p:
  175. self.assertEqual(3, len(p))
  176. def test_index_check(self):
  177. with self.get_pack_data(pack1_sha) as p:
  178. self.assertSucceeds(p.check)
  179. def test_iterobjects(self):
  180. with self.get_pack_data(pack1_sha) as p:
  181. commit_data = (b'tree b2a2766a2879c209ab1176e7e778b81ae422eeaa\n'
  182. b'author James Westby <jw+debian@jameswestby.net> '
  183. b'1174945067 +0100\n'
  184. b'committer James Westby <jw+debian@jameswestby.net> '
  185. b'1174945067 +0100\n'
  186. b'\n'
  187. b'Test commit\n')
  188. blob_sha = b'6f670c0fb53f9463760b7295fbb814e965fb20c8'
  189. tree_data = b'100644 a\0' + hex_to_sha(blob_sha)
  190. actual = []
  191. for offset, type_num, chunks, crc32 in p.iterobjects():
  192. actual.append((offset, type_num, b''.join(chunks), crc32))
  193. self.assertEqual([
  194. (12, 1, commit_data, 3775879613),
  195. (138, 2, tree_data, 912998690),
  196. (178, 3, b'test 1\n', 1373561701)
  197. ], actual)
  198. def test_iterentries(self):
  199. with self.get_pack_data(pack1_sha) as p:
  200. entries = set((sha_to_hex(s), o, c) for s, o, c in p.iterentries())
  201. self.assertEqual(set([
  202. (b'6f670c0fb53f9463760b7295fbb814e965fb20c8', 178, 1373561701),
  203. (b'b2a2766a2879c209ab1176e7e778b81ae422eeaa', 138, 912998690),
  204. (b'f18faa16531ac570a3fdc8c7ca16682548dafd12', 12, 3775879613),
  205. ]), entries)
  206. def test_create_index_v1(self):
  207. with self.get_pack_data(pack1_sha) as p:
  208. filename = os.path.join(self.tempdir, 'v1test.idx')
  209. p.create_index_v1(filename)
  210. idx1 = load_pack_index(filename)
  211. idx2 = self.get_pack_index(pack1_sha)
  212. self.assertEqual(idx1, idx2)
  213. def test_create_index_v2(self):
  214. with self.get_pack_data(pack1_sha) as p:
  215. filename = os.path.join(self.tempdir, 'v2test.idx')
  216. p.create_index_v2(filename)
  217. idx1 = load_pack_index(filename)
  218. idx2 = self.get_pack_index(pack1_sha)
  219. self.assertEqual(idx1, idx2)
  220. def test_compute_file_sha(self):
  221. f = BytesIO(b'abcd1234wxyz')
  222. self.assertEqual(sha1(b'abcd1234wxyz').hexdigest(),
  223. compute_file_sha(f).hexdigest())
  224. self.assertEqual(sha1(b'abcd1234wxyz').hexdigest(),
  225. compute_file_sha(f, buffer_size=5).hexdigest())
  226. self.assertEqual(sha1(b'abcd1234').hexdigest(),
  227. compute_file_sha(f, end_ofs=-4).hexdigest())
  228. self.assertEqual(sha1(b'1234wxyz').hexdigest(),
  229. compute_file_sha(f, start_ofs=4).hexdigest())
  230. self.assertEqual(
  231. sha1(b'1234').hexdigest(),
  232. compute_file_sha(f, start_ofs=4, end_ofs=-4).hexdigest())
  233. def test_compute_file_sha_short_file(self):
  234. f = BytesIO(b'abcd1234wxyz')
  235. self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=-20)
  236. self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=20)
  237. self.assertRaises(AssertionError, compute_file_sha, f, start_ofs=10,
  238. end_ofs=-12)
  239. class TestPack(PackTests):
  240. def test_len(self):
  241. with self.get_pack(pack1_sha) as p:
  242. self.assertEqual(3, len(p))
  243. def test_contains(self):
  244. with self.get_pack(pack1_sha) as p:
  245. self.assertTrue(tree_sha in p)
  246. def test_get(self):
  247. with self.get_pack(pack1_sha) as p:
  248. self.assertEqual(type(p[tree_sha]), Tree)
  249. def test_iter(self):
  250. with self.get_pack(pack1_sha) as p:
  251. self.assertEqual(set([tree_sha, commit_sha, a_sha]), set(p))
  252. def test_iterobjects(self):
  253. with self.get_pack(pack1_sha) as p:
  254. expected = set([p[s] for s in [commit_sha, tree_sha, a_sha]])
  255. self.assertEqual(expected, set(list(p.iterobjects())))
  256. def test_pack_tuples(self):
  257. with self.get_pack(pack1_sha) as p:
  258. tuples = p.pack_tuples()
  259. expected = set([(p[s], None) for s in [commit_sha, tree_sha, a_sha]])
  260. self.assertEqual(expected, set(list(tuples)))
  261. self.assertEqual(expected, set(list(tuples)))
  262. self.assertEqual(3, len(tuples))
  263. def test_get_object_at(self):
  264. """Tests random access for non-delta objects"""
  265. with self.get_pack(pack1_sha) as p:
  266. obj = p[a_sha]
  267. self.assertEqual(obj.type_name, b'blob')
  268. self.assertEqual(obj.sha().hexdigest().encode('ascii'), a_sha)
  269. obj = p[tree_sha]
  270. self.assertEqual(obj.type_name, b'tree')
  271. self.assertEqual(obj.sha().hexdigest().encode('ascii'), tree_sha)
  272. obj = p[commit_sha]
  273. self.assertEqual(obj.type_name, b'commit')
  274. self.assertEqual(obj.sha().hexdigest().encode('ascii'), commit_sha)
  275. def test_copy(self):
  276. with self.get_pack(pack1_sha) as origpack:
  277. self.assertSucceeds(origpack.index.check)
  278. basename = os.path.join(self.tempdir, 'Elch')
  279. write_pack(basename, origpack.pack_tuples())
  280. with Pack(basename) as newpack:
  281. self.assertEqual(origpack, newpack)
  282. self.assertSucceeds(newpack.index.check)
  283. self.assertEqual(origpack.name(), newpack.name())
  284. self.assertEqual(origpack.index.get_pack_checksum(),
  285. newpack.index.get_pack_checksum())
  286. wrong_version = origpack.index.version != newpack.index.version
  287. orig_checksum = origpack.index.get_stored_checksum()
  288. new_checksum = newpack.index.get_stored_checksum()
  289. self.assertTrue(wrong_version or orig_checksum == new_checksum)
  290. def test_commit_obj(self):
  291. with self.get_pack(pack1_sha) as p:
  292. commit = p[commit_sha]
  293. self.assertEqual(b'James Westby <jw+debian@jameswestby.net>',
  294. commit.author)
  295. self.assertEqual([], commit.parents)
  296. def _copy_pack(self, origpack):
  297. basename = os.path.join(self.tempdir, 'somepack')
  298. write_pack(basename, origpack.pack_tuples())
  299. return Pack(basename)
  300. def test_keep_no_message(self):
  301. with self.get_pack(pack1_sha) as p:
  302. p = self._copy_pack(p)
  303. with p:
  304. keepfile_name = p.keep()
  305. # file should exist
  306. self.assertTrue(os.path.exists(keepfile_name))
  307. with open(keepfile_name, 'r') as f:
  308. buf = f.read()
  309. self.assertEqual('', buf)
  310. def test_keep_message(self):
  311. with self.get_pack(pack1_sha) as p:
  312. p = self._copy_pack(p)
  313. msg = b'some message'
  314. with p:
  315. keepfile_name = p.keep(msg)
  316. # file should exist
  317. self.assertTrue(os.path.exists(keepfile_name))
  318. # and contain the right message, with a linefeed
  319. with open(keepfile_name, 'rb') as f:
  320. buf = f.read()
  321. self.assertEqual(msg + b'\n', buf)
  322. def test_name(self):
  323. with self.get_pack(pack1_sha) as p:
  324. self.assertEqual(pack1_sha, p.name())
  325. def test_length_mismatch(self):
  326. with self.get_pack_data(pack1_sha) as data:
  327. index = self.get_pack_index(pack1_sha)
  328. Pack.from_objects(data, index).check_length_and_checksum()
  329. data._file.seek(12)
  330. bad_file = BytesIO()
  331. write_pack_header(bad_file, 9999)
  332. bad_file.write(data._file.read())
  333. bad_file = BytesIO(bad_file.getvalue())
  334. bad_data = PackData('', file=bad_file)
  335. bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
  336. self.assertRaises(AssertionError, lambda: bad_pack.data)
  337. self.assertRaises(AssertionError,
  338. lambda: bad_pack.check_length_and_checksum())
  339. def test_checksum_mismatch(self):
  340. with self.get_pack_data(pack1_sha) as data:
  341. index = self.get_pack_index(pack1_sha)
  342. Pack.from_objects(data, index).check_length_and_checksum()
  343. data._file.seek(0)
  344. bad_file = BytesIO(data._file.read()[:-20] + (b'\xff' * 20))
  345. bad_data = PackData('', file=bad_file)
  346. bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
  347. self.assertRaises(ChecksumMismatch, lambda: bad_pack.data)
  348. self.assertRaises(ChecksumMismatch, lambda:
  349. bad_pack.check_length_and_checksum())
  350. def test_iterobjects_2(self):
  351. with self.get_pack(pack1_sha) as p:
  352. objs = dict((o.id, o) for o in p.iterobjects())
  353. self.assertEqual(3, len(objs))
  354. self.assertEqual(sorted(objs), sorted(p.index))
  355. self.assertTrue(isinstance(objs[a_sha], Blob))
  356. self.assertTrue(isinstance(objs[tree_sha], Tree))
  357. self.assertTrue(isinstance(objs[commit_sha], Commit))
  358. class TestThinPack(PackTests):
  359. def setUp(self):
  360. super(TestThinPack, self).setUp()
  361. self.store = MemoryObjectStore()
  362. self.blobs = {}
  363. for blob in (b'foo', b'bar', b'foo1234', b'bar2468'):
  364. self.blobs[blob] = make_object(Blob, data=blob)
  365. self.store.add_object(self.blobs[b'foo'])
  366. self.store.add_object(self.blobs[b'bar'])
  367. # Build a thin pack. 'foo' is as an external reference, 'bar' an
  368. # internal reference.
  369. self.pack_dir = tempfile.mkdtemp()
  370. self.addCleanup(shutil.rmtree, self.pack_dir)
  371. self.pack_prefix = os.path.join(self.pack_dir, 'pack')
  372. with open(self.pack_prefix + '.pack', 'wb') as f:
  373. build_pack(f, [
  374. (REF_DELTA, (self.blobs[b'foo'].id, b'foo1234')),
  375. (Blob.type_num, b'bar'),
  376. (REF_DELTA, (self.blobs[b'bar'].id, b'bar2468'))],
  377. store=self.store)
  378. # Index the new pack.
  379. with self.make_pack(True) as pack:
  380. with PackData(pack._data_path) as data:
  381. data.pack = pack
  382. data.create_index(self.pack_prefix + '.idx')
  383. del self.store[self.blobs[b'bar'].id]
  384. def make_pack(self, resolve_ext_ref):
  385. return Pack(
  386. self.pack_prefix,
  387. resolve_ext_ref=self.store.get_raw if resolve_ext_ref else None)
  388. def test_get_raw(self):
  389. with self.make_pack(False) as p:
  390. self.assertRaises(
  391. KeyError, p.get_raw, self.blobs[b'foo1234'].id)
  392. with self.make_pack(True) as p:
  393. self.assertEqual(
  394. (3, b'foo1234'),
  395. p.get_raw(self.blobs[b'foo1234'].id))
  396. def test_iterobjects(self):
  397. with self.make_pack(False) as p:
  398. self.assertRaises(KeyError, list, p.iterobjects())
  399. with self.make_pack(True) as p:
  400. self.assertEqual(
  401. sorted([self.blobs[b'foo1234'].id, self.blobs[b'bar'].id,
  402. self.blobs[b'bar2468'].id]),
  403. sorted(o.id for o in p.iterobjects()))
  404. class WritePackTests(TestCase):
  405. def test_write_pack_header(self):
  406. f = BytesIO()
  407. write_pack_header(f, 42)
  408. self.assertEqual(b'PACK\x00\x00\x00\x02\x00\x00\x00*',
  409. f.getvalue())
  410. def test_write_pack_object(self):
  411. f = BytesIO()
  412. f.write(b'header')
  413. offset = f.tell()
  414. crc32 = write_pack_object(f, Blob.type_num, b'blob')
  415. self.assertEqual(crc32, zlib.crc32(f.getvalue()[6:]) & 0xffffffff)
  416. f.write(b'x') # unpack_object needs extra trailing data.
  417. f.seek(offset)
  418. unpacked, unused = unpack_object(f.read, compute_crc32=True)
  419. self.assertEqual(Blob.type_num, unpacked.pack_type_num)
  420. self.assertEqual(Blob.type_num, unpacked.obj_type_num)
  421. self.assertEqual([b'blob'], unpacked.decomp_chunks)
  422. self.assertEqual(crc32, unpacked.crc32)
  423. self.assertEqual(b'x', unused)
  424. def test_write_pack_object_sha(self):
  425. f = BytesIO()
  426. f.write(b'header')
  427. offset = f.tell()
  428. sha_a = sha1(b'foo')
  429. sha_b = sha_a.copy()
  430. write_pack_object(f, Blob.type_num, b'blob', sha=sha_a)
  431. self.assertNotEqual(sha_a.digest(), sha_b.digest())
  432. sha_b.update(f.getvalue()[offset:])
  433. self.assertEqual(sha_a.digest(), sha_b.digest())
  434. pack_checksum = hex_to_sha('721980e866af9a5f93ad674144e1459b8ba3e7b7')
  435. class BaseTestPackIndexWriting(object):
  436. def assertSucceeds(self, func, *args, **kwargs):
  437. try:
  438. func(*args, **kwargs)
  439. except ChecksumMismatch as e:
  440. self.fail(e)
  441. def index(self, filename, entries, pack_checksum):
  442. raise NotImplementedError(self.index)
  443. def test_empty(self):
  444. idx = self.index('empty.idx', [], pack_checksum)
  445. self.assertEqual(idx.get_pack_checksum(), pack_checksum)
  446. self.assertEqual(0, len(idx))
  447. def test_large(self):
  448. entry1_sha = hex_to_sha('4e6388232ec39792661e2e75db8fb117fc869ce6')
  449. entry2_sha = hex_to_sha('e98f071751bd77f59967bfa671cd2caebdccc9a2')
  450. entries = [(entry1_sha, 0xf2972d0830529b87, 24),
  451. (entry2_sha, (~0xf2972d0830529b87)&(2**64-1), 92)]
  452. if not self._supports_large:
  453. self.assertRaises(TypeError, self.index, 'single.idx',
  454. entries, pack_checksum)
  455. return
  456. idx = self.index('single.idx', entries, pack_checksum)
  457. self.assertEqual(idx.get_pack_checksum(), pack_checksum)
  458. self.assertEqual(2, len(idx))
  459. actual_entries = list(idx.iterentries())
  460. self.assertEqual(len(entries), len(actual_entries))
  461. for mine, actual in zip(entries, actual_entries):
  462. my_sha, my_offset, my_crc = mine
  463. actual_sha, actual_offset, actual_crc = actual
  464. self.assertEqual(my_sha, actual_sha)
  465. self.assertEqual(my_offset, actual_offset)
  466. if self._has_crc32_checksum:
  467. self.assertEqual(my_crc, actual_crc)
  468. else:
  469. self.assertTrue(actual_crc is None)
  470. def test_single(self):
  471. entry_sha = hex_to_sha('6f670c0fb53f9463760b7295fbb814e965fb20c8')
  472. my_entries = [(entry_sha, 178, 42)]
  473. idx = self.index('single.idx', my_entries, pack_checksum)
  474. self.assertEqual(idx.get_pack_checksum(), pack_checksum)
  475. self.assertEqual(1, len(idx))
  476. actual_entries = list(idx.iterentries())
  477. self.assertEqual(len(my_entries), len(actual_entries))
  478. for mine, actual in zip(my_entries, actual_entries):
  479. my_sha, my_offset, my_crc = mine
  480. actual_sha, actual_offset, actual_crc = actual
  481. self.assertEqual(my_sha, actual_sha)
  482. self.assertEqual(my_offset, actual_offset)
  483. if self._has_crc32_checksum:
  484. self.assertEqual(my_crc, actual_crc)
  485. else:
  486. self.assertTrue(actual_crc is None)
  487. class BaseTestFilePackIndexWriting(BaseTestPackIndexWriting):
  488. def setUp(self):
  489. self.tempdir = tempfile.mkdtemp()
  490. def tearDown(self):
  491. shutil.rmtree(self.tempdir)
  492. def index(self, filename, entries, pack_checksum):
  493. path = os.path.join(self.tempdir, filename)
  494. self.writeIndex(path, entries, pack_checksum)
  495. idx = load_pack_index(path)
  496. self.assertSucceeds(idx.check)
  497. self.assertEqual(idx.version, self._expected_version)
  498. return idx
  499. def writeIndex(self, filename, entries, pack_checksum):
  500. # FIXME: Write to BytesIO instead rather than hitting disk ?
  501. with GitFile(filename, "wb") as f:
  502. self._write_fn(f, entries, pack_checksum)
  503. class TestMemoryIndexWriting(TestCase, BaseTestPackIndexWriting):
  504. def setUp(self):
  505. TestCase.setUp(self)
  506. self._has_crc32_checksum = True
  507. self._supports_large = True
  508. def index(self, filename, entries, pack_checksum):
  509. return MemoryPackIndex(entries, pack_checksum)
  510. def tearDown(self):
  511. TestCase.tearDown(self)
  512. class TestPackIndexWritingv1(TestCase, BaseTestFilePackIndexWriting):
  513. def setUp(self):
  514. TestCase.setUp(self)
  515. BaseTestFilePackIndexWriting.setUp(self)
  516. self._has_crc32_checksum = False
  517. self._expected_version = 1
  518. self._supports_large = False
  519. self._write_fn = write_pack_index_v1
  520. def tearDown(self):
  521. TestCase.tearDown(self)
  522. BaseTestFilePackIndexWriting.tearDown(self)
  523. class TestPackIndexWritingv2(TestCase, BaseTestFilePackIndexWriting):
  524. def setUp(self):
  525. TestCase.setUp(self)
  526. BaseTestFilePackIndexWriting.setUp(self)
  527. self._has_crc32_checksum = True
  528. self._supports_large = True
  529. self._expected_version = 2
  530. self._write_fn = write_pack_index_v2
  531. def tearDown(self):
  532. TestCase.tearDown(self)
  533. BaseTestFilePackIndexWriting.tearDown(self)
  534. class ReadZlibTests(TestCase):
  535. decomp = (
  536. b'tree 4ada885c9196b6b6fa08744b5862bf92896fc002\n'
  537. b'parent None\n'
  538. b'author Jelmer Vernooij <jelmer@samba.org> 1228980214 +0000\n'
  539. b'committer Jelmer Vernooij <jelmer@samba.org> 1228980214 +0000\n'
  540. b'\n'
  541. b"Provide replacement for mmap()'s offset argument.")
  542. comp = zlib.compress(decomp)
  543. extra = b'nextobject'
  544. def setUp(self):
  545. super(ReadZlibTests, self).setUp()
  546. self.read = BytesIO(self.comp + self.extra).read
  547. self.unpacked = UnpackedObject(Tree.type_num, None, len(self.decomp), 0)
  548. def test_decompress_size(self):
  549. good_decomp_len = len(self.decomp)
  550. self.unpacked.decomp_len = -1
  551. self.assertRaises(ValueError, read_zlib_chunks, self.read,
  552. self.unpacked)
  553. self.unpacked.decomp_len = good_decomp_len - 1
  554. self.assertRaises(zlib.error, read_zlib_chunks, self.read,
  555. self.unpacked)
  556. self.unpacked.decomp_len = good_decomp_len + 1
  557. self.assertRaises(zlib.error, read_zlib_chunks, self.read,
  558. self.unpacked)
  559. def test_decompress_truncated(self):
  560. read = BytesIO(self.comp[:10]).read
  561. self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked)
  562. read = BytesIO(self.comp).read
  563. self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked)
  564. def test_decompress_empty(self):
  565. unpacked = UnpackedObject(Tree.type_num, None, 0, None)
  566. comp = zlib.compress(b'')
  567. read = BytesIO(comp + self.extra).read
  568. unused = read_zlib_chunks(read, unpacked)
  569. self.assertEqual(b'', b''.join(unpacked.decomp_chunks))
  570. self.assertNotEqual(b'', unused)
  571. self.assertEqual(self.extra, unused + read())
  572. def test_decompress_no_crc32(self):
  573. self.unpacked.crc32 = None
  574. read_zlib_chunks(self.read, self.unpacked)
  575. self.assertEqual(None, self.unpacked.crc32)
  576. def _do_decompress_test(self, buffer_size, **kwargs):
  577. unused = read_zlib_chunks(self.read, self.unpacked,
  578. buffer_size=buffer_size, **kwargs)
  579. self.assertEqual(self.decomp, b''.join(self.unpacked.decomp_chunks))
  580. self.assertEqual(zlib.crc32(self.comp), self.unpacked.crc32)
  581. self.assertNotEqual(b'', unused)
  582. self.assertEqual(self.extra, unused + self.read())
  583. def test_simple_decompress(self):
  584. self._do_decompress_test(4096)
  585. self.assertEqual(None, self.unpacked.comp_chunks)
  586. # These buffer sizes are not intended to be realistic, but rather simulate
  587. # larger buffer sizes that may end at various places.
  588. def test_decompress_buffer_size_1(self):
  589. self._do_decompress_test(1)
  590. def test_decompress_buffer_size_2(self):
  591. self._do_decompress_test(2)
  592. def test_decompress_buffer_size_3(self):
  593. self._do_decompress_test(3)
  594. def test_decompress_buffer_size_4(self):
  595. self._do_decompress_test(4)
  596. def test_decompress_include_comp(self):
  597. self._do_decompress_test(4096, include_comp=True)
  598. self.assertEqual(self.comp, b''.join(self.unpacked.comp_chunks))
  599. class DeltifyTests(TestCase):
  600. def test_empty(self):
  601. self.assertEqual([], list(deltify_pack_objects([])))
  602. def test_single(self):
  603. b = Blob.from_string(b"foo")
  604. self.assertEqual(
  605. [(b.type_num, b.sha().digest(), None, b.as_raw_string())],
  606. list(deltify_pack_objects([(b, b"")])))
  607. def test_simple_delta(self):
  608. b1 = Blob.from_string(b"a" * 101)
  609. b2 = Blob.from_string(b"a" * 100)
  610. delta = create_delta(b1.as_raw_string(), b2.as_raw_string())
  611. self.assertEqual([
  612. (b1.type_num, b1.sha().digest(), None, b1.as_raw_string()),
  613. (b2.type_num, b2.sha().digest(), b1.sha().digest(), delta)
  614. ],
  615. list(deltify_pack_objects([(b1, b""), (b2, b"")])))
  616. class TestPackStreamReader(TestCase):
  617. def test_read_objects_emtpy(self):
  618. f = BytesIO()
  619. build_pack(f, [])
  620. reader = PackStreamReader(f.read)
  621. self.assertEqual(0, len(list(reader.read_objects())))
  622. def test_read_objects(self):
  623. f = BytesIO()
  624. entries = build_pack(f, [
  625. (Blob.type_num, b'blob'),
  626. (OFS_DELTA, (0, b'blob1')),
  627. ])
  628. reader = PackStreamReader(f.read)
  629. objects = list(reader.read_objects(compute_crc32=True))
  630. self.assertEqual(2, len(objects))
  631. unpacked_blob, unpacked_delta = objects
  632. self.assertEqual(entries[0][0], unpacked_blob.offset)
  633. self.assertEqual(Blob.type_num, unpacked_blob.pack_type_num)
  634. self.assertEqual(Blob.type_num, unpacked_blob.obj_type_num)
  635. self.assertEqual(None, unpacked_blob.delta_base)
  636. self.assertEqual(b'blob', b''.join(unpacked_blob.decomp_chunks))
  637. self.assertEqual(entries[0][4], unpacked_blob.crc32)
  638. self.assertEqual(entries[1][0], unpacked_delta.offset)
  639. self.assertEqual(OFS_DELTA, unpacked_delta.pack_type_num)
  640. self.assertEqual(None, unpacked_delta.obj_type_num)
  641. self.assertEqual(unpacked_delta.offset - unpacked_blob.offset,
  642. unpacked_delta.delta_base)
  643. delta = create_delta(b'blob', b'blob1')
  644. self.assertEqual(delta, b''.join(unpacked_delta.decomp_chunks))
  645. self.assertEqual(entries[1][4], unpacked_delta.crc32)
  646. def test_read_objects_buffered(self):
  647. f = BytesIO()
  648. build_pack(f, [
  649. (Blob.type_num, b'blob'),
  650. (OFS_DELTA, (0, b'blob1')),
  651. ])
  652. reader = PackStreamReader(f.read, zlib_bufsize=4)
  653. self.assertEqual(2, len(list(reader.read_objects())))
  654. def test_read_objects_empty(self):
  655. reader = PackStreamReader(BytesIO().read)
  656. self.assertEqual([], list(reader.read_objects()))
  657. class TestPackIterator(DeltaChainIterator):
  658. _compute_crc32 = True
  659. def __init__(self, *args, **kwargs):
  660. super(TestPackIterator, self).__init__(*args, **kwargs)
  661. self._unpacked_offsets = set()
  662. def _result(self, unpacked):
  663. """Return entries in the same format as build_pack."""
  664. return (unpacked.offset, unpacked.obj_type_num,
  665. b''.join(unpacked.obj_chunks), unpacked.sha(), unpacked.crc32)
  666. def _resolve_object(self, offset, pack_type_num, base_chunks):
  667. assert offset not in self._unpacked_offsets, (
  668. 'Attempted to re-inflate offset %i' % offset)
  669. self._unpacked_offsets.add(offset)
  670. return super(TestPackIterator, self)._resolve_object(
  671. offset, pack_type_num, base_chunks)
  672. class DeltaChainIteratorTests(TestCase):
  673. def setUp(self):
  674. super(DeltaChainIteratorTests, self).setUp()
  675. self.store = MemoryObjectStore()
  676. self.fetched = set()
  677. def store_blobs(self, blobs_data):
  678. blobs = []
  679. for data in blobs_data:
  680. blob = make_object(Blob, data=data)
  681. blobs.append(blob)
  682. self.store.add_object(blob)
  683. return blobs
  684. def get_raw_no_repeat(self, bin_sha):
  685. """Wrapper around store.get_raw that doesn't allow repeat lookups."""
  686. hex_sha = sha_to_hex(bin_sha)
  687. self.assertFalse(hex_sha in self.fetched,
  688. 'Attempted to re-fetch object %s' % hex_sha)
  689. self.fetched.add(hex_sha)
  690. return self.store.get_raw(hex_sha)
  691. def make_pack_iter(self, f, thin=None):
  692. if thin is None:
  693. thin = bool(list(self.store))
  694. resolve_ext_ref = thin and self.get_raw_no_repeat or None
  695. data = PackData('test.pack', file=f)
  696. return TestPackIterator.for_pack_data(
  697. data, resolve_ext_ref=resolve_ext_ref)
  698. def assertEntriesMatch(self, expected_indexes, entries, pack_iter):
  699. expected = [entries[i] for i in expected_indexes]
  700. self.assertEqual(expected, list(pack_iter._walk_all_chains()))
  701. def test_no_deltas(self):
  702. f = BytesIO()
  703. entries = build_pack(f, [
  704. (Commit.type_num, b'commit'),
  705. (Blob.type_num, b'blob'),
  706. (Tree.type_num, b'tree'),
  707. ])
  708. self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f))
  709. def test_ofs_deltas(self):
  710. f = BytesIO()
  711. entries = build_pack(f, [
  712. (Blob.type_num, b'blob'),
  713. (OFS_DELTA, (0, b'blob1')),
  714. (OFS_DELTA, (0, b'blob2')),
  715. ])
  716. self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f))
  717. def test_ofs_deltas_chain(self):
  718. f = BytesIO()
  719. entries = build_pack(f, [
  720. (Blob.type_num, b'blob'),
  721. (OFS_DELTA, (0, b'blob1')),
  722. (OFS_DELTA, (1, b'blob2')),
  723. ])
  724. self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f))
  725. def test_ref_deltas(self):
  726. f = BytesIO()
  727. entries = build_pack(f, [
  728. (REF_DELTA, (1, b'blob1')),
  729. (Blob.type_num, (b'blob')),
  730. (REF_DELTA, (1, b'blob2')),
  731. ])
  732. self.assertEntriesMatch([1, 0, 2], entries, self.make_pack_iter(f))
  733. def test_ref_deltas_chain(self):
  734. f = BytesIO()
  735. entries = build_pack(f, [
  736. (REF_DELTA, (2, b'blob1')),
  737. (Blob.type_num, (b'blob')),
  738. (REF_DELTA, (1, b'blob2')),
  739. ])
  740. self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f))
  741. def test_ofs_and_ref_deltas(self):
  742. # Deltas pending on this offset are popped before deltas depending on
  743. # this ref.
  744. f = BytesIO()
  745. entries = build_pack(f, [
  746. (REF_DELTA, (1, b'blob1')),
  747. (Blob.type_num, (b'blob')),
  748. (OFS_DELTA, (1, b'blob2')),
  749. ])
  750. self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f))
  751. def test_mixed_chain(self):
  752. f = BytesIO()
  753. entries = build_pack(f, [
  754. (Blob.type_num, b'blob'),
  755. (REF_DELTA, (2, b'blob2')),
  756. (OFS_DELTA, (0, b'blob1')),
  757. (OFS_DELTA, (1, b'blob3')),
  758. (OFS_DELTA, (0, b'bob')),
  759. ])
  760. self.assertEntriesMatch([0, 2, 4, 1, 3], entries,
  761. self.make_pack_iter(f))
  762. def test_long_chain(self):
  763. n = 100
  764. objects_spec = [(Blob.type_num, b'blob')]
  765. for i in range(n):
  766. objects_spec.append((OFS_DELTA, (i, b'blob' + str(i).encode('ascii'))))
  767. f = BytesIO()
  768. entries = build_pack(f, objects_spec)
  769. self.assertEntriesMatch(range(n + 1), entries, self.make_pack_iter(f))
  770. def test_branchy_chain(self):
  771. n = 100
  772. objects_spec = [(Blob.type_num, b'blob')]
  773. for i in range(n):
  774. objects_spec.append((OFS_DELTA, (0, b'blob' + str(i).encode('ascii'))))
  775. f = BytesIO()
  776. entries = build_pack(f, objects_spec)
  777. self.assertEntriesMatch(range(n + 1), entries, self.make_pack_iter(f))
  778. def test_ext_ref(self):
  779. blob, = self.store_blobs([b'blob'])
  780. f = BytesIO()
  781. entries = build_pack(f, [(REF_DELTA, (blob.id, b'blob1'))],
  782. store=self.store)
  783. pack_iter = self.make_pack_iter(f)
  784. self.assertEntriesMatch([0], entries, pack_iter)
  785. self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
  786. def test_ext_ref_chain(self):
  787. blob, = self.store_blobs([b'blob'])
  788. f = BytesIO()
  789. entries = build_pack(f, [
  790. (REF_DELTA, (1, b'blob2')),
  791. (REF_DELTA, (blob.id, b'blob1')),
  792. ], store=self.store)
  793. pack_iter = self.make_pack_iter(f)
  794. self.assertEntriesMatch([1, 0], entries, pack_iter)
  795. self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
  796. def test_ext_ref_chain_degenerate(self):
  797. # Test a degenerate case where the sender is sending a REF_DELTA
  798. # object that expands to an object already in the repository.
  799. blob, = self.store_blobs([b'blob'])
  800. blob2, = self.store_blobs([b'blob2'])
  801. assert blob.id < blob2.id
  802. f = BytesIO()
  803. entries = build_pack(f, [
  804. (REF_DELTA, (blob.id, b'blob2')),
  805. (REF_DELTA, (0, b'blob3')),
  806. ], store=self.store)
  807. pack_iter = self.make_pack_iter(f)
  808. self.assertEntriesMatch([0, 1], entries, pack_iter)
  809. self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
  810. def test_ext_ref_multiple_times(self):
  811. blob, = self.store_blobs([b'blob'])
  812. f = BytesIO()
  813. entries = build_pack(f, [
  814. (REF_DELTA, (blob.id, b'blob1')),
  815. (REF_DELTA, (blob.id, b'blob2')),
  816. ], store=self.store)
  817. pack_iter = self.make_pack_iter(f)
  818. self.assertEntriesMatch([0, 1], entries, pack_iter)
  819. self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
  820. def test_multiple_ext_refs(self):
  821. b1, b2 = self.store_blobs([b'foo', b'bar'])
  822. f = BytesIO()
  823. entries = build_pack(f, [
  824. (REF_DELTA, (b1.id, b'foo1')),
  825. (REF_DELTA, (b2.id, b'bar2')),
  826. ], store=self.store)
  827. pack_iter = self.make_pack_iter(f)
  828. self.assertEntriesMatch([0, 1], entries, pack_iter)
  829. self.assertEqual([hex_to_sha(b1.id), hex_to_sha(b2.id)],
  830. pack_iter.ext_refs())
  831. def test_bad_ext_ref_non_thin_pack(self):
  832. blob, = self.store_blobs([b'blob'])
  833. f = BytesIO()
  834. entries = build_pack(f, [(REF_DELTA, (blob.id, b'blob1'))],
  835. store=self.store)
  836. pack_iter = self.make_pack_iter(f, thin=False)
  837. try:
  838. list(pack_iter._walk_all_chains())
  839. self.fail()
  840. except KeyError as e:
  841. self.assertEqual(([blob.id],), e.args)
  842. def test_bad_ext_ref_thin_pack(self):
  843. b1, b2, b3 = self.store_blobs([b'foo', b'bar', b'baz'])
  844. f = BytesIO()
  845. build_pack(f, [
  846. (REF_DELTA, (1, b'foo99')),
  847. (REF_DELTA, (b1.id, b'foo1')),
  848. (REF_DELTA, (b2.id, b'bar2')),
  849. (REF_DELTA, (b3.id, b'baz3')),
  850. ], store=self.store)
  851. del self.store[b2.id]
  852. del self.store[b3.id]
  853. pack_iter = self.make_pack_iter(f)
  854. try:
  855. list(pack_iter._walk_all_chains())
  856. self.fail()
  857. except KeyError as e:
  858. self.assertEqual((sorted([b2.id, b3.id]),), (sorted(e.args[0]),))
  859. class DeltaEncodeSizeTests(TestCase):
  860. def test_basic(self):
  861. self.assertEqual(b'\x00', _delta_encode_size(0))
  862. self.assertEqual(b'\x01', _delta_encode_size(1))
  863. self.assertEqual(b'\xfa\x01', _delta_encode_size(250))
  864. self.assertEqual(b'\xe8\x07', _delta_encode_size(1000))
  865. self.assertEqual(b'\xa0\x8d\x06', _delta_encode_size(100000))
  866. class EncodeCopyOperationTests(TestCase):
  867. def test_basic(self):
  868. self.assertEqual(b'\x80', _encode_copy_operation(0, 0))
  869. self.assertEqual(b'\x91\x01\x0a', _encode_copy_operation(1, 10))
  870. self.assertEqual(b'\xb1\x64\xe8\x03', _encode_copy_operation(100, 1000))
  871. self.assertEqual(b'\x93\xe8\x03\x01', _encode_copy_operation(1000, 1))