diff_tree.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599
  1. # diff_tree.py -- Utilities for diffing files and trees.
  2. # Copyright (C) 2010 Google, Inc.
  3. #
  4. # This program is free software; you can redistribute it and/or
  5. # modify it under the terms of the GNU General Public License
  6. # as published by the Free Software Foundation; either version 2
  7. # or (at your option) a later version of the License.
  8. #
  9. # This program is distributed in the hope that it will be useful,
  10. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. # GNU General Public License for more details.
  13. #
  14. # You should have received a copy of the GNU General Public License
  15. # along with this program; if not, write to the Free Software
  16. # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
  17. # MA 02110-1301, USA.
  18. """Utilities for diffing files and trees."""
  19. from cStringIO import StringIO
  20. import itertools
  21. import stat
  22. from dulwich._compat import (
  23. defaultdict,
  24. namedtuple,
  25. )
  26. from dulwich.objects import (
  27. S_ISGITLINK,
  28. TreeEntry,
  29. )
  30. # TreeChange type constants.
  31. CHANGE_ADD = 'add'
  32. CHANGE_MODIFY = 'modify'
  33. CHANGE_DELETE = 'delete'
  34. CHANGE_RENAME = 'rename'
  35. CHANGE_COPY = 'copy'
  36. CHANGE_UNCHANGED = 'unchanged'
  37. RENAME_CHANGE_TYPES = (CHANGE_RENAME, CHANGE_COPY)
  38. _NULL_ENTRY = TreeEntry(None, None, None)
  39. _MAX_SCORE = 100
  40. RENAME_THRESHOLD = 60
  41. MAX_FILES = 200
  42. REWRITE_THRESHOLD = None
  43. class TreeChange(namedtuple('TreeChange', ['type', 'old', 'new'])):
  44. """Named tuple a single change between two trees."""
  45. @classmethod
  46. def add(cls, new):
  47. return cls(CHANGE_ADD, _NULL_ENTRY, new)
  48. @classmethod
  49. def delete(cls, old):
  50. return cls(CHANGE_DELETE, old, _NULL_ENTRY)
  51. def _tree_entries(path, tree):
  52. result = []
  53. if not tree:
  54. return result
  55. for entry in tree.iteritems(name_order=True):
  56. result.append(entry.in_path(path))
  57. return result
  58. def _merge_entries(path, tree1, tree2):
  59. """Merge the entries of two trees.
  60. :param path: A path to prepend to all tree entry names.
  61. :param tree1: The first Tree object to iterate, or None.
  62. :param tree2: The second Tree object to iterate, or None.
  63. :return: A list of pairs of TreeEntry objects for each pair of entries in
  64. the trees. If an entry exists in one tree but not the other, the other
  65. entry will have all attributes set to None. If neither entry's path is
  66. None, they are guaranteed to match.
  67. """
  68. entries1 = _tree_entries(path, tree1)
  69. entries2 = _tree_entries(path, tree2)
  70. i1 = i2 = 0
  71. len1 = len(entries1)
  72. len2 = len(entries2)
  73. result = []
  74. while i1 < len1 and i2 < len2:
  75. entry1 = entries1[i1]
  76. entry2 = entries2[i2]
  77. if entry1.path < entry2.path:
  78. result.append((entry1, _NULL_ENTRY))
  79. i1 += 1
  80. elif entry1.path > entry2.path:
  81. result.append((_NULL_ENTRY, entry2))
  82. i2 += 1
  83. else:
  84. result.append((entry1, entry2))
  85. i1 += 1
  86. i2 += 1
  87. for i in xrange(i1, len1):
  88. result.append((entries1[i], _NULL_ENTRY))
  89. for i in xrange(i2, len2):
  90. result.append((_NULL_ENTRY, entries2[i]))
  91. return result
  92. def _is_tree(entry):
  93. mode = entry.mode
  94. if mode is None:
  95. return False
  96. return stat.S_ISDIR(mode)
  97. def walk_trees(store, tree1_id, tree2_id, prune_identical=False):
  98. """Recursively walk all the entries of two trees.
  99. Iteration is depth-first pre-order, as in e.g. os.walk.
  100. :param store: An ObjectStore for looking up objects.
  101. :param tree1_id: The SHA of the first Tree object to iterate, or None.
  102. :param tree2_id: The SHA of the second Tree object to iterate, or None.
  103. :param prune_identical: If True, identical subtrees will not be walked.
  104. :return: Iterator over Pairs of TreeEntry objects for each pair of entries
  105. in the trees and their subtrees recursively. If an entry exists in one
  106. tree but not the other, the other entry will have all attributes set
  107. to None. If neither entry's path is None, they are guaranteed to
  108. match.
  109. """
  110. # This could be fairly easily generalized to >2 trees if we find a use case.
  111. mode1 = tree1_id and stat.S_IFDIR or None
  112. mode2 = tree2_id and stat.S_IFDIR or None
  113. todo = [(TreeEntry('', mode1, tree1_id), TreeEntry('', mode2, tree2_id))]
  114. while todo:
  115. entry1, entry2 = todo.pop()
  116. is_tree1 = _is_tree(entry1)
  117. is_tree2 = _is_tree(entry2)
  118. if prune_identical and is_tree1 and is_tree2 and entry1 == entry2:
  119. continue
  120. tree1 = is_tree1 and store[entry1.sha] or None
  121. tree2 = is_tree2 and store[entry2.sha] or None
  122. path = entry1.path or entry2.path
  123. todo.extend(reversed(_merge_entries(path, tree1, tree2)))
  124. yield entry1, entry2
  125. def _skip_tree(entry):
  126. if entry.mode is None or stat.S_ISDIR(entry.mode):
  127. return _NULL_ENTRY
  128. return entry
  129. def tree_changes(store, tree1_id, tree2_id, want_unchanged=False,
  130. rename_detector=None):
  131. """Find the differences between the contents of two trees.
  132. :param store: An ObjectStore for looking up objects.
  133. :param tree1_id: The SHA of the source tree.
  134. :param tree2_id: The SHA of the target tree.
  135. :param want_unchanged: If True, include TreeChanges for unmodified entries
  136. as well.
  137. :param rename_detector: RenameDetector object for detecting renames.
  138. :return: Iterator over TreeChange instances for each change between the
  139. source and target tree.
  140. """
  141. if (rename_detector is not None and tree1_id is not None and
  142. tree2_id is not None):
  143. for change in rename_detector.changes_with_renames(
  144. tree1_id, tree2_id, want_unchanged=want_unchanged):
  145. yield change
  146. return
  147. entries = walk_trees(store, tree1_id, tree2_id,
  148. prune_identical=(not want_unchanged))
  149. for entry1, entry2 in entries:
  150. if entry1 == entry2 and not want_unchanged:
  151. continue
  152. # Treat entries for trees as missing.
  153. entry1 = _skip_tree(entry1)
  154. entry2 = _skip_tree(entry2)
  155. if entry1 != _NULL_ENTRY and entry2 != _NULL_ENTRY:
  156. if stat.S_IFMT(entry1.mode) != stat.S_IFMT(entry2.mode):
  157. # File type changed: report as delete/add.
  158. yield TreeChange.delete(entry1)
  159. entry1 = _NULL_ENTRY
  160. change_type = CHANGE_ADD
  161. elif entry1 == entry2:
  162. change_type = CHANGE_UNCHANGED
  163. else:
  164. change_type = CHANGE_MODIFY
  165. elif entry1 != _NULL_ENTRY:
  166. change_type = CHANGE_DELETE
  167. elif entry2 != _NULL_ENTRY:
  168. change_type = CHANGE_ADD
  169. else:
  170. # Both were None because at least one was a tree.
  171. continue
  172. yield TreeChange(change_type, entry1, entry2)
  173. def _all_eq(seq, key, value):
  174. for e in seq:
  175. if key(e) != value:
  176. return False
  177. return True
  178. def _all_same(seq, key):
  179. return _all_eq(seq[1:], key, key(seq[0]))
  180. def _matches_any_parent(store, parent_tree_ids, changes):
  181. have = [c for c in changes if c is not None]
  182. assert have
  183. new = have[0].new
  184. # Look in changes for parents we already have first.
  185. for change in have:
  186. if new.sha == change.old.sha:
  187. return True
  188. # A change may be None if that path was unchanged, so we need to actually
  189. # look up the SHA for that path in any parent trees.
  190. # TODO: We could precompute these old_shas (e.g. by passing want_unchanged
  191. # to tree_changes), but the assumption is that the cost of tree lookups due
  192. # to conflicts is less than the savings we're getting by pruning identical
  193. # subtrees.
  194. missing = [p for p, c in zip(parent_tree_ids, changes) if c is None]
  195. get = store.__getitem__
  196. for parent_tree_id in missing:
  197. tree = get(parent_tree_id)
  198. try:
  199. _, old_sha = tree.lookup_path(get, new.path)
  200. except KeyError:
  201. continue
  202. if new.sha == old_sha:
  203. return True
  204. return False
  205. def tree_changes_for_merge(store, parent_tree_ids, tree_id,
  206. rename_detector=None):
  207. """Get the tree changes for a merge tree relative to all its parents.
  208. :param store: An ObjectStore for looking up objects.
  209. :param parent_tree_ids: An iterable of the SHAs of the parent trees.
  210. :param tree_id: The SHA of the merge tree.
  211. :param rename_detector: RenameDetector object for detecting renames.
  212. :yield: Lists of TreeChange objects, one per conflicted path in the merge.
  213. Each list contains one element per parent, with the TreeChange for that
  214. path relative to that parent. An element may be None if it never existed
  215. in one parent and was deleted in two others.
  216. A path is only included in the output if it is a conflict, i.e. its SHA
  217. in the merge tree is not found in any of the parents, or in the case of
  218. deletes, if not all of the old SHAs match.
  219. """
  220. all_parent_changes = [tree_changes(store, t, tree_id,
  221. rename_detector=rename_detector)
  222. for t in parent_tree_ids]
  223. num_parents = len(parent_tree_ids)
  224. changes_by_path = defaultdict(lambda: [None] * num_parents)
  225. # Organize by path.
  226. for i, parent_changes in enumerate(all_parent_changes):
  227. for change in parent_changes:
  228. if change.type == CHANGE_DELETE:
  229. path = change.old.path
  230. else:
  231. path = change.new.path
  232. changes_by_path[path][i] = change
  233. old_sha = lambda c: c.old.sha
  234. change_type = lambda c: c.type
  235. # Yield only conflicting changes.
  236. for _, changes in sorted(changes_by_path.iteritems()):
  237. assert len(changes) == num_parents
  238. have = [c for c in changes if c is not None]
  239. if _all_eq(have, change_type, CHANGE_DELETE):
  240. if not _all_same(have, old_sha):
  241. yield changes
  242. elif not _matches_any_parent(store, parent_tree_ids, changes):
  243. yield changes
  244. _BLOCK_SIZE = 64
  245. def _count_blocks(obj):
  246. """Count the blocks in an object.
  247. Splits the data into blocks either on lines or <=64-byte chunks of lines.
  248. :param obj: The object to count blocks for.
  249. :return: A dict of block hashcode -> total bytes occurring.
  250. """
  251. block_counts = defaultdict(int)
  252. block = StringIO()
  253. n = 0
  254. # Cache attrs as locals to avoid expensive lookups in the inner loop.
  255. block_write = block.write
  256. block_seek = block.seek
  257. block_truncate = block.truncate
  258. block_getvalue = block.getvalue
  259. for c in itertools.chain(*obj.as_raw_chunks()):
  260. block_write(c)
  261. n += 1
  262. if c == '\n' or n == _BLOCK_SIZE:
  263. value = block_getvalue()
  264. block_counts[hash(value)] += len(value)
  265. block_seek(0)
  266. block_truncate()
  267. n = 0
  268. if n > 0:
  269. last_block = block_getvalue()
  270. block_counts[hash(last_block)] += len(last_block)
  271. return block_counts
  272. def _common_bytes(blocks1, blocks2):
  273. """Count the number of common bytes in two block count dicts.
  274. :param block1: The first dict of block hashcode -> total bytes.
  275. :param block2: The second dict of block hashcode -> total bytes.
  276. :return: The number of bytes in common between blocks1 and blocks2. This is
  277. only approximate due to possible hash collisions.
  278. """
  279. # Iterate over the smaller of the two dicts, since this is symmetrical.
  280. if len(blocks1) > len(blocks2):
  281. blocks1, blocks2 = blocks2, blocks1
  282. score = 0
  283. for block, count1 in blocks1.iteritems():
  284. count2 = blocks2.get(block)
  285. if count2:
  286. score += min(count1, count2)
  287. return score
  288. def _similarity_score(obj1, obj2, block_cache=None):
  289. """Compute a similarity score for two objects.
  290. :param obj1: The first object to score.
  291. :param obj2: The second object to score.
  292. :param block_cache: An optional dict of SHA to block counts to cache results
  293. between calls.
  294. :return: The similarity score between the two objects, defined as the number
  295. of bytes in common between the two objects divided by the maximum size,
  296. scaled to the range 0-100.
  297. """
  298. if block_cache is None:
  299. block_cache = {}
  300. if obj1.id not in block_cache:
  301. block_cache[obj1.id] = _count_blocks(obj1)
  302. if obj2.id not in block_cache:
  303. block_cache[obj2.id] = _count_blocks(obj2)
  304. common_bytes = _common_bytes(block_cache[obj1.id], block_cache[obj2.id])
  305. max_size = max(obj1.raw_length(), obj2.raw_length())
  306. if not max_size:
  307. return _MAX_SCORE
  308. return int(float(common_bytes) * _MAX_SCORE / max_size)
  309. def _tree_change_key(entry):
  310. # Sort by old path then new path. If only one exists, use it for both keys.
  311. path1 = entry.old.path
  312. path2 = entry.new.path
  313. if path1 is None:
  314. path1 = path2
  315. if path2 is None:
  316. path2 = path1
  317. return (path1, path2)
  318. class RenameDetector(object):
  319. """Object for handling rename detection between two trees."""
  320. def __init__(self, store, rename_threshold=RENAME_THRESHOLD,
  321. max_files=MAX_FILES,
  322. rewrite_threshold=REWRITE_THRESHOLD,
  323. find_copies_harder=False):
  324. """Initialize the rename detector.
  325. :param store: An ObjectStore for looking up objects.
  326. :param rename_threshold: The threshold similarity score for considering
  327. an add/delete pair to be a rename/copy; see _similarity_score.
  328. :param max_files: The maximum number of adds and deletes to consider, or
  329. None for no limit. The detector is guaranteed to compare no more
  330. than max_files ** 2 add/delete pairs. This limit is provided because
  331. rename detection can be quadratic in the project size. If the limit
  332. is exceeded, no content rename detection is attempted.
  333. :param rewrite_threshold: The threshold similarity score below which a
  334. modify should be considered a delete/add, or None to not break
  335. modifies; see _similarity_score.
  336. :param find_copies_harder: If True, consider unmodified files when
  337. detecting copies.
  338. """
  339. self._store = store
  340. self._rename_threshold = rename_threshold
  341. self._rewrite_threshold = rewrite_threshold
  342. self._max_files = max_files
  343. self._find_copies_harder = find_copies_harder
  344. self._want_unchanged = False
  345. def _reset(self):
  346. self._adds = []
  347. self._deletes = []
  348. self._changes = []
  349. def _should_split(self, change):
  350. if (self._rewrite_threshold is None or change.type != CHANGE_MODIFY or
  351. change.old.sha == change.new.sha):
  352. return False
  353. old_obj = self._store[change.old.sha]
  354. new_obj = self._store[change.new.sha]
  355. return _similarity_score(old_obj, new_obj) < self._rewrite_threshold
  356. def _collect_changes(self, tree1_id, tree2_id):
  357. want_unchanged = self._find_copies_harder or self._want_unchanged
  358. for change in tree_changes(self._store, tree1_id, tree2_id,
  359. want_unchanged=want_unchanged):
  360. if change.type == CHANGE_ADD:
  361. self._adds.append(change)
  362. elif change.type == CHANGE_DELETE:
  363. self._deletes.append(change)
  364. elif self._should_split(change):
  365. self._deletes.append(TreeChange.delete(change.old))
  366. self._adds.append(TreeChange.add(change.new))
  367. elif ((self._find_copies_harder and change.type == CHANGE_UNCHANGED)
  368. or change.type == CHANGE_MODIFY):
  369. # Treat all modifies as potential deletes for rename detection,
  370. # but don't split them (to avoid spurious renames). Setting
  371. # find_copies_harder means we treat unchanged the same as
  372. # modified.
  373. self._deletes.append(change)
  374. else:
  375. self._changes.append(change)
  376. def _prune(self, add_paths, delete_paths):
  377. self._adds = [a for a in self._adds if a.new.path not in add_paths]
  378. self._deletes = [d for d in self._deletes
  379. if d.old.path not in delete_paths]
  380. def _find_exact_renames(self):
  381. add_map = defaultdict(list)
  382. for add in self._adds:
  383. add_map[add.new.sha].append(add.new)
  384. delete_map = defaultdict(list)
  385. for delete in self._deletes:
  386. # Keep track of whether the delete was actually marked as a delete.
  387. # If not, it needs to be marked as a copy.
  388. is_delete = delete.type == CHANGE_DELETE
  389. delete_map[delete.old.sha].append((delete.old, is_delete))
  390. add_paths = set()
  391. delete_paths = set()
  392. for sha, sha_deletes in delete_map.iteritems():
  393. sha_adds = add_map[sha]
  394. for (old, is_delete), new in itertools.izip(sha_deletes, sha_adds):
  395. if stat.S_IFMT(old.mode) != stat.S_IFMT(new.mode):
  396. continue
  397. if is_delete:
  398. delete_paths.add(old.path)
  399. add_paths.add(new.path)
  400. new_type = is_delete and CHANGE_RENAME or CHANGE_COPY
  401. self._changes.append(TreeChange(new_type, old, new))
  402. num_extra_adds = len(sha_adds) - len(sha_deletes)
  403. # TODO(dborowitz): Less arbitrary way of dealing with extra copies.
  404. old = sha_deletes[0][0]
  405. if num_extra_adds:
  406. for new in sha_adds[-num_extra_adds:]:
  407. add_paths.add(new.path)
  408. self._changes.append(TreeChange(CHANGE_COPY, old, new))
  409. self._prune(add_paths, delete_paths)
  410. def _find_content_renames(self):
  411. # TODO: Optimizations:
  412. # - Compare object sizes before counting blocks.
  413. # - Skip if delete's S_IFMT differs from all adds.
  414. # - Skip if adds or deletes is empty.
  415. # Match C git's behavior of not attempting to find content renames if
  416. # the matrix size exceeds the threshold.
  417. if len(self._adds) * len(self._deletes) > self._max_files ** 2:
  418. return
  419. check_paths = self._rename_threshold is not None
  420. candidates = []
  421. for delete in self._deletes:
  422. if S_ISGITLINK(delete.old.mode):
  423. continue # Git links don't exist in this repo.
  424. old_sha = delete.old.sha
  425. old_obj = self._store[old_sha]
  426. old_blocks = _count_blocks(old_obj)
  427. for add in self._adds:
  428. if stat.S_IFMT(delete.old.mode) != stat.S_IFMT(add.new.mode):
  429. continue
  430. new_obj = self._store[add.new.sha]
  431. score = _similarity_score(old_obj, new_obj,
  432. block_cache={old_sha: old_blocks})
  433. if score > self._rename_threshold:
  434. if check_paths and delete.old.path == add.new.path:
  435. # If the paths match, this must be a split modify, so
  436. # make sure it comes out as a modify.
  437. new_type = CHANGE_MODIFY
  438. elif delete.type != CHANGE_DELETE:
  439. # If it's in deletes but not marked as a delete, it must
  440. # have been added due to find_copies_harder, and needs
  441. # to be marked as a copy.
  442. new_type = CHANGE_COPY
  443. else:
  444. new_type = CHANGE_RENAME
  445. rename = TreeChange(new_type, delete.old, add.new)
  446. candidates.append((-score, rename))
  447. # Sort scores from highest to lowest, but keep names in ascending order.
  448. candidates.sort()
  449. delete_paths = set()
  450. add_paths = set()
  451. for _, change in candidates:
  452. new_path = change.new.path
  453. if new_path in add_paths:
  454. continue
  455. old_path = change.old.path
  456. orig_type = change.type
  457. if old_path in delete_paths:
  458. change = TreeChange(CHANGE_COPY, change.old, change.new)
  459. # If the candidate was originally a copy, that means it came from a
  460. # modified or unchanged path, so we don't want to prune it.
  461. if orig_type != CHANGE_COPY:
  462. delete_paths.add(old_path)
  463. add_paths.add(new_path)
  464. self._changes.append(change)
  465. self._prune(add_paths, delete_paths)
  466. def _join_modifies(self):
  467. if self._rewrite_threshold is None:
  468. return
  469. modifies = {}
  470. delete_map = dict((d.old.path, d) for d in self._deletes)
  471. for add in self._adds:
  472. path = add.new.path
  473. delete = delete_map.get(path)
  474. if (delete is not None and
  475. stat.S_IFMT(delete.old.mode) == stat.S_IFMT(add.new.mode)):
  476. modifies[path] = TreeChange(CHANGE_MODIFY, delete.old, add.new)
  477. self._adds = [a for a in self._adds if a.new.path not in modifies]
  478. self._deletes = [a for a in self._deletes if a.new.path not in modifies]
  479. self._changes += modifies.values()
  480. def _sorted_changes(self):
  481. result = []
  482. result.extend(self._adds)
  483. result.extend(self._deletes)
  484. result.extend(self._changes)
  485. result.sort(key=_tree_change_key)
  486. return result
  487. def _prune_unchanged(self):
  488. if self._want_unchanged:
  489. return
  490. self._deletes = [d for d in self._deletes if d.type != CHANGE_UNCHANGED]
  491. def changes_with_renames(self, tree1_id, tree2_id, want_unchanged=False):
  492. """Iterate TreeChanges between two tree SHAs, with rename detection."""
  493. self._reset()
  494. self._want_unchanged = want_unchanged
  495. self._collect_changes(tree1_id, tree2_id)
  496. self._find_exact_renames()
  497. self._find_content_renames()
  498. self._join_modifies()
  499. self._prune_unchanged()
  500. return self._sorted_changes()
  501. # Hold on to the pure-python implementations for testing.
  502. _is_tree_py = _is_tree
  503. _merge_entries_py = _merge_entries
  504. _count_blocks_py = _count_blocks
  505. try:
  506. # Try to import C versions
  507. from dulwich._diff_tree import _is_tree, _merge_entries, _count_blocks
  508. except ImportError:
  509. pass