utils.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366
  1. # utils.py -- Test utilities for Dulwich.
  2. # Copyright (C) 2010 Google, Inc.
  3. #
  4. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  5. # General Public License as public by the Free Software Foundation; version 2.0
  6. # or (at your option) any later version. You can redistribute it and/or
  7. # modify it under the terms of either of these two licenses.
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. #
  15. # You should have received a copy of the licenses; if not, see
  16. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  17. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  18. # License, Version 2.0.
  19. #
  20. """Utility functions common to Dulwich tests."""
  21. import datetime
  22. import os
  23. import shutil
  24. import tempfile
  25. import time
  26. import types
  27. import warnings
  28. from dulwich.tests import SkipTest
  29. from ..index import commit_tree
  30. from ..objects import Commit, FixedSha, Tag, object_class
  31. from ..pack import (
  32. DELTA_TYPES,
  33. OFS_DELTA,
  34. REF_DELTA,
  35. SHA1Writer,
  36. create_delta,
  37. obj_sha,
  38. write_pack_header,
  39. write_pack_object,
  40. )
  41. from ..repo import Repo
  42. # Plain files are very frequently used in tests, so let the mode be very short.
  43. F = 0o100644 # Shorthand mode for Files.
  44. def open_repo(name, temp_dir=None):
  45. """Open a copy of a repo in a temporary directory.
  46. Use this function for accessing repos in dulwich/tests/data/repos to avoid
  47. accidentally or intentionally modifying those repos in place. Use
  48. tear_down_repo to delete any temp files created.
  49. Args:
  50. name: The name of the repository, relative to
  51. dulwich/tests/data/repos
  52. temp_dir: temporary directory to initialize to. If not provided, a
  53. temporary directory will be created.
  54. Returns: An initialized Repo object that lives in a temporary directory.
  55. """
  56. if temp_dir is None:
  57. temp_dir = tempfile.mkdtemp()
  58. repo_dir = os.path.join(
  59. os.path.dirname(__file__), "..", "..", "testdata", "repos", name
  60. )
  61. temp_repo_dir = os.path.join(temp_dir, name)
  62. shutil.copytree(repo_dir, temp_repo_dir, symlinks=True)
  63. return Repo(temp_repo_dir)
  64. def tear_down_repo(repo):
  65. """Tear down a test repository."""
  66. repo.close()
  67. temp_dir = os.path.dirname(repo.path.rstrip(os.sep))
  68. shutil.rmtree(temp_dir)
  69. def make_object(cls, **attrs):
  70. """Make an object for testing and assign some members.
  71. This method creates a new subclass to allow arbitrary attribute
  72. reassignment, which is not otherwise possible with objects having
  73. __slots__.
  74. Args:
  75. attrs: dict of attributes to set on the new object.
  76. Returns: A newly initialized object of type cls.
  77. """
  78. class TestObject(cls):
  79. """Class that inherits from the given class, but without __slots__.
  80. Note that classes with __slots__ can't have arbitrary attributes
  81. monkey-patched in, so this is a class that is exactly the same only
  82. with a __dict__ instead of __slots__.
  83. """
  84. TestObject.__name__ = "TestObject_" + cls.__name__
  85. obj = TestObject()
  86. for name, value in attrs.items():
  87. if name == "id":
  88. # id property is read-only, so we overwrite sha instead.
  89. sha = FixedSha(value)
  90. obj.sha = lambda: sha
  91. else:
  92. setattr(obj, name, value)
  93. return obj
  94. def make_commit(**attrs):
  95. """Make a Commit object with a default set of members.
  96. Args:
  97. attrs: dict of attributes to overwrite from the default values.
  98. Returns: A newly initialized Commit object.
  99. """
  100. default_time = 1262304000 # 2010-01-01 00:00:00
  101. all_attrs = {
  102. "author": b"Test Author <test@nodomain.com>",
  103. "author_time": default_time,
  104. "author_timezone": 0,
  105. "committer": b"Test Committer <test@nodomain.com>",
  106. "commit_time": default_time,
  107. "commit_timezone": 0,
  108. "message": b"Test message.",
  109. "parents": [],
  110. "tree": b"0" * 40,
  111. }
  112. all_attrs.update(attrs)
  113. return make_object(Commit, **all_attrs)
  114. def make_tag(target, **attrs):
  115. """Make a Tag object with a default set of values.
  116. Args:
  117. target: object to be tagged (Commit, Blob, Tree, etc)
  118. attrs: dict of attributes to overwrite from the default values.
  119. Returns: A newly initialized Tag object.
  120. """
  121. target_id = target.id
  122. target_type = object_class(target.type_name)
  123. default_time = int(time.mktime(datetime.datetime(2010, 1, 1).timetuple()))
  124. all_attrs = {
  125. "tagger": b"Test Author <test@nodomain.com>",
  126. "tag_time": default_time,
  127. "tag_timezone": 0,
  128. "message": b"Test message.",
  129. "object": (target_type, target_id),
  130. "name": b"Test Tag",
  131. }
  132. all_attrs.update(attrs)
  133. return make_object(Tag, **all_attrs)
  134. def functest_builder(method, func):
  135. """Generate a test method that tests the given function."""
  136. def do_test(self):
  137. method(self, func)
  138. return do_test
  139. def ext_functest_builder(method, func):
  140. """Generate a test method that tests the given extension function.
  141. This is intended to generate test methods that test both a pure-Python
  142. version and an extension version using common test code. The extension test
  143. will raise SkipTest if the extension is not found.
  144. Sample usage:
  145. class MyTest(TestCase);
  146. def _do_some_test(self, func_impl):
  147. self.assertEqual('foo', func_impl())
  148. test_foo = functest_builder(_do_some_test, foo_py)
  149. test_foo_extension = ext_functest_builder(_do_some_test, _foo_c)
  150. Args:
  151. method: The method to run. It must must two parameters, self and the
  152. function implementation to test.
  153. func: The function implementation to pass to method.
  154. """
  155. def do_test(self):
  156. if not isinstance(func, types.BuiltinFunctionType):
  157. raise SkipTest("%s extension not found" % func)
  158. method(self, func)
  159. return do_test
  160. def build_pack(f, objects_spec, store=None):
  161. """Write test pack data from a concise spec.
  162. Args:
  163. f: A file-like object to write the pack to.
  164. objects_spec: A list of (type_num, obj). For non-delta types, obj
  165. is the string of that object's data.
  166. For delta types, obj is a tuple of (base, data), where:
  167. * base can be either an index in objects_spec of the base for that
  168. * delta; or for a ref delta, a SHA, in which case the resulting pack
  169. * will be thin and the base will be an external ref.
  170. * data is a string of the full, non-deltified data for that object.
  171. Note that offsets/refs and deltas are computed within this function.
  172. store: An optional ObjectStore for looking up external refs.
  173. Returns: A list of tuples in the order specified by objects_spec:
  174. (offset, type num, data, sha, CRC32)
  175. """
  176. sf = SHA1Writer(f)
  177. num_objects = len(objects_spec)
  178. write_pack_header(sf.write, num_objects)
  179. full_objects = {}
  180. offsets = {}
  181. crc32s = {}
  182. while len(full_objects) < num_objects:
  183. for i, (type_num, data) in enumerate(objects_spec):
  184. if type_num not in DELTA_TYPES:
  185. full_objects[i] = (type_num, data, obj_sha(type_num, [data]))
  186. continue
  187. base, data = data
  188. if isinstance(base, int):
  189. if base not in full_objects:
  190. continue
  191. base_type_num, _, _ = full_objects[base]
  192. else:
  193. base_type_num, _ = store.get_raw(base)
  194. full_objects[i] = (
  195. base_type_num,
  196. data,
  197. obj_sha(base_type_num, [data]),
  198. )
  199. for i, (type_num, obj) in enumerate(objects_spec):
  200. offset = f.tell()
  201. if type_num == OFS_DELTA:
  202. base_index, data = obj
  203. base = offset - offsets[base_index]
  204. _, base_data, _ = full_objects[base_index]
  205. obj = (base, list(create_delta(base_data, data)))
  206. elif type_num == REF_DELTA:
  207. base_ref, data = obj
  208. if isinstance(base_ref, int):
  209. _, base_data, base = full_objects[base_ref]
  210. else:
  211. base_type_num, base_data = store.get_raw(base_ref)
  212. base = obj_sha(base_type_num, base_data)
  213. obj = (base, list(create_delta(base_data, data)))
  214. crc32 = write_pack_object(sf.write, type_num, obj)
  215. offsets[i] = offset
  216. crc32s[i] = crc32
  217. expected = []
  218. for i in range(num_objects):
  219. type_num, data, sha = full_objects[i]
  220. assert len(sha) == 20
  221. expected.append((offsets[i], type_num, data, sha, crc32s[i]))
  222. sf.write_sha()
  223. f.seek(0)
  224. return expected
  225. def build_commit_graph(object_store, commit_spec, trees=None, attrs=None):
  226. """Build a commit graph from a concise specification.
  227. Sample usage:
  228. >>> c1, c2, c3 = build_commit_graph(store, [[1], [2, 1], [3, 1, 2]])
  229. >>> store[store[c3].parents[0]] == c1
  230. True
  231. >>> store[store[c3].parents[1]] == c2
  232. True
  233. If not otherwise specified, commits will refer to the empty tree and have
  234. commit times increasing in the same order as the commit spec.
  235. Args:
  236. object_store: An ObjectStore to commit objects to.
  237. commit_spec: An iterable of iterables of ints defining the commit
  238. graph. Each entry defines one commit, and entries must be in
  239. topological order. The first element of each entry is a commit number,
  240. and the remaining elements are its parents. The commit numbers are only
  241. meaningful for the call to make_commits; since real commit objects are
  242. created, they will get created with real, opaque SHAs.
  243. trees: An optional dict of commit number -> tree spec for building
  244. trees for commits. The tree spec is an iterable of (path, blob, mode)
  245. or (path, blob) entries; if mode is omitted, it defaults to the normal
  246. file mode (0100644).
  247. attrs: A dict of commit number -> (dict of attribute -> value) for
  248. assigning additional values to the commits.
  249. Returns: The list of commit objects created.
  250. Raises:
  251. ValueError: If an undefined commit identifier is listed as a parent.
  252. """
  253. if trees is None:
  254. trees = {}
  255. if attrs is None:
  256. attrs = {}
  257. commit_time = 0
  258. nums = {}
  259. commits = []
  260. for commit in commit_spec:
  261. commit_num = commit[0]
  262. try:
  263. parent_ids = [nums[pn] for pn in commit[1:]]
  264. except KeyError as exc:
  265. (missing_parent,) = exc.args
  266. raise ValueError("Unknown parent %i" % missing_parent) from exc
  267. blobs = []
  268. for entry in trees.get(commit_num, []):
  269. if len(entry) == 2:
  270. path, blob = entry
  271. entry = (path, blob, F)
  272. path, blob, mode = entry
  273. blobs.append((path, blob.id, mode))
  274. object_store.add_object(blob)
  275. tree_id = commit_tree(object_store, blobs)
  276. commit_attrs = {
  277. "message": ("Commit %i" % commit_num).encode("ascii"),
  278. "parents": parent_ids,
  279. "tree": tree_id,
  280. "commit_time": commit_time,
  281. }
  282. commit_attrs.update(attrs.get(commit_num, {}))
  283. commit_obj = make_commit(**commit_attrs)
  284. # By default, increment the time by a lot. Out-of-order commits should
  285. # be closer together than this because their main cause is clock skew.
  286. commit_time = commit_attrs["commit_time"] + 100
  287. nums[commit_num] = commit_obj.id
  288. object_store.add_object(commit_obj)
  289. commits.append(commit_obj)
  290. return commits
  291. def setup_warning_catcher():
  292. """Wrap warnings.showwarning with code that records warnings."""
  293. caught_warnings = []
  294. original_showwarning = warnings.showwarning
  295. def custom_showwarning(*args, **kwargs):
  296. caught_warnings.append(args[0])
  297. warnings.showwarning = custom_showwarning
  298. def restore_showwarning():
  299. warnings.showwarning = original_showwarning
  300. return caught_warnings, restore_showwarning