commit_graph.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606
  1. # commit_graph.py -- Git commit graph file format support
  2. # Copyright (C) 2024 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as public by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. """Git commit graph file format support.
  10. Git's commit graph files store commit metadata and generation numbers
  11. for faster graph traversal operations like merge-base computation.
  12. The commit graph format is documented at:
  13. https://git-scm.com/docs/gitformat-commit-graph
  14. """
  15. import os
  16. import struct
  17. from collections.abc import Iterator
  18. from typing import TYPE_CHECKING, BinaryIO, Optional, Union
  19. if TYPE_CHECKING:
  20. from .object_store import BaseObjectStore
  21. from .objects import Commit, ObjectID, hex_to_sha, sha_to_hex
  22. # File format constants
  23. COMMIT_GRAPH_SIGNATURE = b"CGPH"
  24. COMMIT_GRAPH_VERSION = 1
  25. HASH_VERSION_SHA1 = 1
  26. HASH_VERSION_SHA256 = 2
  27. # Chunk IDs
  28. CHUNK_OID_FANOUT = b"OIDF"
  29. CHUNK_OID_LOOKUP = b"OIDL"
  30. CHUNK_COMMIT_DATA = b"CDAT"
  31. CHUNK_GENERATION_DATA = b"GDA2"
  32. CHUNK_GENERATION_DATA_OVERFLOW = b"GDO2"
  33. CHUNK_EXTRA_EDGE_LIST = b"EDGE"
  34. CHUNK_BLOOM_FILTER_INDEX = b"BIDX"
  35. CHUNK_BLOOM_FILTER_DATA = b"BDAT"
  36. CHUNK_BASE_GRAPHS_LIST = b"BASE"
  37. # Generation number constants
  38. GENERATION_NUMBER_INFINITY = 0xFFFFFFFF
  39. GENERATION_NUMBER_ZERO = 0
  40. GENERATION_NUMBER_V1_MAX = 0x3FFFFFFF
  41. # Parent encoding constants
  42. GRAPH_PARENT_MISSING = 0x70000000
  43. GRAPH_PARENT_NONE = 0x70000000
  44. GRAPH_EXTRA_EDGES_NEEDED = 0x80000000
  45. GRAPH_LAST_EDGE = 0x80000000
  46. class CommitGraphEntry:
  47. """Represents a single commit entry in the commit graph."""
  48. def __init__(
  49. self,
  50. commit_id: ObjectID,
  51. tree_id: ObjectID,
  52. parents: list[ObjectID],
  53. generation: int,
  54. commit_time: int,
  55. ) -> None:
  56. self.commit_id = commit_id
  57. self.tree_id = tree_id
  58. self.parents = parents
  59. self.generation = generation
  60. self.commit_time = commit_time
  61. def __repr__(self) -> str:
  62. return (
  63. f"CommitGraphEntry(commit_id={self.commit_id!r}, "
  64. f"tree_id={self.tree_id!r}, parents={self.parents!r}, "
  65. f"generation={self.generation}, commit_time={self.commit_time})"
  66. )
  67. class CommitGraphChunk:
  68. """Represents a chunk in the commit graph file."""
  69. def __init__(self, chunk_id: bytes, data: bytes) -> None:
  70. self.chunk_id = chunk_id
  71. self.data = data
  72. def __repr__(self) -> str:
  73. return f"CommitGraphChunk(chunk_id={self.chunk_id!r}, size={len(self.data)})"
  74. class CommitGraph:
  75. """Git commit graph file reader/writer."""
  76. def __init__(self, hash_version: int = HASH_VERSION_SHA1) -> None:
  77. self.hash_version = hash_version
  78. self.chunks: dict[bytes, CommitGraphChunk] = {}
  79. self.entries: list[CommitGraphEntry] = []
  80. self._oid_to_index: dict[ObjectID, int] = {}
  81. self._hash_size = 20 if hash_version == HASH_VERSION_SHA1 else 32
  82. @classmethod
  83. def from_file(cls, f: BinaryIO) -> "CommitGraph":
  84. """Read commit graph from file."""
  85. graph = cls()
  86. graph._read_from_file(f)
  87. return graph
  88. def _read_from_file(self, f: BinaryIO) -> None:
  89. """Read commit graph data from file."""
  90. # Read header
  91. signature = f.read(4)
  92. if signature != COMMIT_GRAPH_SIGNATURE:
  93. raise ValueError(f"Invalid commit graph signature: {signature!r}")
  94. version = struct.unpack(">B", f.read(1))[0]
  95. if version != COMMIT_GRAPH_VERSION:
  96. raise ValueError(f"Unsupported commit graph version: {version}")
  97. self.hash_version = struct.unpack(">B", f.read(1))[0]
  98. if self.hash_version not in (HASH_VERSION_SHA1, HASH_VERSION_SHA256):
  99. raise ValueError(f"Unsupported hash version: {self.hash_version}")
  100. self._hash_size = 20 if self.hash_version == HASH_VERSION_SHA1 else 32
  101. num_chunks = struct.unpack(">B", f.read(1))[0]
  102. struct.unpack(">B", f.read(1))[0]
  103. # Read table of contents
  104. toc_entries = []
  105. for _ in range(num_chunks + 1): # +1 for terminating entry
  106. chunk_id = f.read(4)
  107. offset = struct.unpack(">Q", f.read(8))[0]
  108. toc_entries.append((chunk_id, offset))
  109. # Read chunks
  110. # Offsets in TOC are absolute from start of file
  111. for i in range(num_chunks):
  112. chunk_id, offset = toc_entries[i]
  113. next_offset = toc_entries[i + 1][1]
  114. chunk_size = next_offset - offset
  115. f.seek(offset)
  116. chunk_data = f.read(chunk_size)
  117. self.chunks[chunk_id] = CommitGraphChunk(chunk_id, chunk_data)
  118. # Parse chunks
  119. self._parse_chunks()
  120. def _parse_chunks(self) -> None:
  121. """Parse chunk data into entries."""
  122. if CHUNK_OID_LOOKUP not in self.chunks:
  123. raise ValueError("Missing required OID lookup chunk")
  124. if CHUNK_COMMIT_DATA not in self.chunks:
  125. raise ValueError("Missing required commit data chunk")
  126. # Parse OID lookup chunk
  127. oid_lookup_data = self.chunks[CHUNK_OID_LOOKUP].data
  128. num_commits = len(oid_lookup_data) // self._hash_size
  129. oids = []
  130. for i in range(num_commits):
  131. start = i * self._hash_size
  132. end = start + self._hash_size
  133. oid = oid_lookup_data[start:end]
  134. oids.append(oid)
  135. self._oid_to_index[oid] = i
  136. # Parse commit data chunk
  137. commit_data = self.chunks[CHUNK_COMMIT_DATA].data
  138. expected_size = num_commits * (self._hash_size + 16)
  139. if len(commit_data) != expected_size:
  140. raise ValueError(
  141. f"Invalid commit data chunk size: {len(commit_data)}, expected {expected_size}"
  142. )
  143. self.entries = []
  144. for i in range(num_commits):
  145. offset = i * (self._hash_size + 16)
  146. # Tree OID
  147. tree_id = commit_data[offset : offset + self._hash_size]
  148. offset += self._hash_size
  149. # Parent positions (2 x 4 bytes)
  150. parent1_pos, parent2_pos = struct.unpack(
  151. ">LL", commit_data[offset : offset + 8]
  152. )
  153. offset += 8
  154. # Generation number and commit time (2 x 4 bytes)
  155. gen_and_time = struct.unpack(">LL", commit_data[offset : offset + 8])
  156. generation = gen_and_time[0] >> 2 # Upper 30 bits
  157. commit_time = gen_and_time[1] | (
  158. (gen_and_time[0] & 0x3) << 32
  159. ) # 34 bits total
  160. # Parse parents
  161. parents = []
  162. if parent1_pos < GRAPH_PARENT_MISSING:
  163. if parent1_pos >= len(oids):
  164. raise ValueError(f"Invalid parent1 position: {parent1_pos}")
  165. parents.append(oids[parent1_pos])
  166. if parent2_pos < GRAPH_PARENT_MISSING:
  167. if parent2_pos >= len(oids):
  168. raise ValueError(f"Invalid parent2 position: {parent2_pos}")
  169. parents.append(oids[parent2_pos])
  170. elif parent2_pos >= GRAPH_EXTRA_EDGES_NEEDED:
  171. # Handle extra edges (3+ parents)
  172. edge_offset = parent2_pos & ~GRAPH_EXTRA_EDGES_NEEDED
  173. parents.extend(self._parse_extra_edges(edge_offset, oids))
  174. entry = CommitGraphEntry(
  175. commit_id=sha_to_hex(oids[i]),
  176. tree_id=sha_to_hex(tree_id),
  177. parents=[sha_to_hex(p) for p in parents],
  178. generation=generation,
  179. commit_time=commit_time,
  180. )
  181. self.entries.append(entry)
  182. def _parse_extra_edges(self, offset: int, oids: list[bytes]) -> list[bytes]:
  183. """Parse extra parent edges for commits with 3+ parents."""
  184. if CHUNK_EXTRA_EDGE_LIST not in self.chunks:
  185. return []
  186. edge_data = self.chunks[CHUNK_EXTRA_EDGE_LIST].data
  187. parents = []
  188. while offset < len(edge_data):
  189. parent_pos = struct.unpack(">L", edge_data[offset : offset + 4])[0]
  190. offset += 4
  191. if parent_pos & GRAPH_LAST_EDGE:
  192. parent_pos &= ~GRAPH_LAST_EDGE
  193. if parent_pos < len(oids):
  194. parents.append(oids[parent_pos])
  195. break
  196. else:
  197. if parent_pos < len(oids):
  198. parents.append(oids[parent_pos])
  199. return parents
  200. def get_entry_by_oid(self, oid: ObjectID) -> Optional[CommitGraphEntry]:
  201. """Get commit graph entry by commit OID."""
  202. # Convert hex ObjectID to binary if needed for lookup
  203. if isinstance(oid, bytes) and len(oid) == 40:
  204. # Input is hex ObjectID, convert to binary for internal lookup
  205. lookup_oid = hex_to_sha(oid)
  206. else:
  207. # Input is already binary
  208. lookup_oid = oid
  209. index = self._oid_to_index.get(lookup_oid)
  210. if index is not None:
  211. return self.entries[index]
  212. return None
  213. def get_generation_number(self, oid: ObjectID) -> Optional[int]:
  214. """Get generation number for a commit."""
  215. entry = self.get_entry_by_oid(oid)
  216. return entry.generation if entry else None
  217. def get_parents(self, oid: ObjectID) -> Optional[list[ObjectID]]:
  218. """Get parent commit IDs for a commit."""
  219. entry = self.get_entry_by_oid(oid)
  220. return entry.parents if entry else None
  221. def write_to_file(self, f: BinaryIO) -> None:
  222. """Write commit graph to file."""
  223. if not self.entries:
  224. raise ValueError("Cannot write empty commit graph")
  225. # Sort entries by commit ID for consistent output
  226. sorted_entries = sorted(self.entries, key=lambda e: e.commit_id)
  227. # Build OID lookup chunk
  228. oid_lookup_data = b""
  229. for entry in sorted_entries:
  230. oid_lookup_data += hex_to_sha(entry.commit_id)
  231. # Build commit data chunk
  232. commit_data = b""
  233. # Create OID to index mapping for parent lookups
  234. oid_to_index = {entry.commit_id: i for i, entry in enumerate(sorted_entries)}
  235. for entry in sorted_entries:
  236. # Tree OID (20 bytes)
  237. commit_data += hex_to_sha(entry.tree_id)
  238. # Parent positions (2 x 4 bytes)
  239. if len(entry.parents) == 0:
  240. parent1_pos = GRAPH_PARENT_MISSING
  241. parent2_pos = GRAPH_PARENT_MISSING
  242. elif len(entry.parents) == 1:
  243. parent1_pos = oid_to_index.get(entry.parents[0], GRAPH_PARENT_MISSING)
  244. parent2_pos = GRAPH_PARENT_MISSING
  245. elif len(entry.parents) == 2:
  246. parent1_pos = oid_to_index.get(entry.parents[0], GRAPH_PARENT_MISSING)
  247. parent2_pos = oid_to_index.get(entry.parents[1], GRAPH_PARENT_MISSING)
  248. else:
  249. # More than 2 parents - would need extra edge list chunk
  250. # For now, just store first two parents
  251. parent1_pos = oid_to_index.get(entry.parents[0], GRAPH_PARENT_MISSING)
  252. parent2_pos = oid_to_index.get(entry.parents[1], GRAPH_PARENT_MISSING)
  253. commit_data += struct.pack(">LL", parent1_pos, parent2_pos)
  254. # Generation and commit time (2 x 4 bytes)
  255. gen_and_time = (entry.generation << 2) | (entry.commit_time >> 32)
  256. commit_time_lower = entry.commit_time & 0xFFFFFFFF
  257. commit_data += struct.pack(">LL", gen_and_time, commit_time_lower)
  258. # Build fanout table
  259. fanout_data = b""
  260. fanout_counts = [0] * 256
  261. for i, entry in enumerate(sorted_entries):
  262. commit_oid_bytes = hex_to_sha(entry.commit_id)
  263. fanout_counts[commit_oid_bytes[0]] = i + 1
  264. # Fill in gaps - each fanout entry should be cumulative
  265. for i in range(1, 256):
  266. if fanout_counts[i] == 0:
  267. fanout_counts[i] = fanout_counts[i - 1]
  268. for count in fanout_counts:
  269. fanout_data += struct.pack(">L", count)
  270. # Calculate chunk offsets
  271. header_size = (
  272. 8 # signature + version + hash_version + num_chunks + base_graph_count
  273. )
  274. toc_size = 4 * 12 # 4 entries (3 chunks + terminator) * 12 bytes each
  275. chunk1_offset = header_size + toc_size # OID Fanout
  276. chunk2_offset = chunk1_offset + len(fanout_data) # OID Lookup
  277. chunk3_offset = chunk2_offset + len(oid_lookup_data) # Commit Data
  278. terminator_offset = chunk3_offset + len(commit_data)
  279. # Write header
  280. f.write(COMMIT_GRAPH_SIGNATURE)
  281. f.write(struct.pack(">B", COMMIT_GRAPH_VERSION))
  282. f.write(struct.pack(">B", self.hash_version))
  283. f.write(struct.pack(">B", 3)) # 3 chunks
  284. f.write(struct.pack(">B", 0)) # 0 base graphs
  285. # Write table of contents
  286. f.write(CHUNK_OID_FANOUT + struct.pack(">Q", chunk1_offset))
  287. f.write(CHUNK_OID_LOOKUP + struct.pack(">Q", chunk2_offset))
  288. f.write(CHUNK_COMMIT_DATA + struct.pack(">Q", chunk3_offset))
  289. f.write(b"\x00\x00\x00\x00" + struct.pack(">Q", terminator_offset))
  290. # Write chunks
  291. f.write(fanout_data)
  292. f.write(oid_lookup_data)
  293. f.write(commit_data)
  294. def __len__(self) -> int:
  295. """Return number of commits in the graph."""
  296. return len(self.entries)
  297. def __iter__(self) -> Iterator["CommitGraphEntry"]:
  298. """Iterate over commit graph entries."""
  299. return iter(self.entries)
  300. def read_commit_graph(path: Union[str, bytes]) -> Optional[CommitGraph]:
  301. """Read commit graph from file path."""
  302. if isinstance(path, str):
  303. path = path.encode()
  304. if not os.path.exists(path):
  305. return None
  306. with open(path, "rb") as f:
  307. return CommitGraph.from_file(f)
  308. def find_commit_graph_file(git_dir: Union[str, bytes]) -> Optional[bytes]:
  309. """Find commit graph file in a Git repository."""
  310. if isinstance(git_dir, str):
  311. git_dir = git_dir.encode()
  312. # Standard location: .git/objects/info/commit-graph
  313. commit_graph_path = os.path.join(git_dir, b"objects", b"info", b"commit-graph")
  314. if os.path.exists(commit_graph_path):
  315. return commit_graph_path
  316. # Chain files in .git/objects/info/commit-graphs/
  317. commit_graphs_dir = os.path.join(git_dir, b"objects", b"info", b"commit-graphs")
  318. if os.path.exists(commit_graphs_dir):
  319. # Look for graph-{hash}.graph files
  320. for filename in os.listdir(commit_graphs_dir):
  321. if filename.startswith(b"graph-") and filename.endswith(b".graph"):
  322. return os.path.join(commit_graphs_dir, filename)
  323. return None
  324. def generate_commit_graph(
  325. object_store: "BaseObjectStore", commit_ids: list[ObjectID]
  326. ) -> CommitGraph:
  327. """Generate a commit graph from a set of commits.
  328. Args:
  329. object_store: Object store to retrieve commits from
  330. commit_ids: List of commit IDs to include in the graph
  331. Returns:
  332. CommitGraph object containing the specified commits
  333. """
  334. graph = CommitGraph()
  335. if not commit_ids:
  336. return graph
  337. # Ensure all commit_ids are in the correct format for object store access
  338. # DiskObjectStore expects hex ObjectIDs (40-byte hex strings)
  339. normalized_commit_ids = []
  340. for commit_id in commit_ids:
  341. if isinstance(commit_id, bytes) and len(commit_id) == 40:
  342. # Already hex ObjectID
  343. normalized_commit_ids.append(commit_id)
  344. elif isinstance(commit_id, bytes) and len(commit_id) == 20:
  345. # Binary SHA, convert to hex ObjectID
  346. normalized_commit_ids.append(sha_to_hex(commit_id))
  347. else:
  348. # Assume it's already correct format
  349. normalized_commit_ids.append(commit_id)
  350. # Build a map of all commits and their metadata
  351. commit_map: dict[bytes, Commit] = {}
  352. for commit_id in normalized_commit_ids:
  353. try:
  354. commit_obj = object_store[commit_id]
  355. if commit_obj.type_name != b"commit":
  356. continue
  357. assert isinstance(commit_obj, Commit)
  358. commit_map[commit_id] = commit_obj
  359. except KeyError:
  360. # Commit not found, skip
  361. continue
  362. # Calculate generation numbers using topological sort
  363. generation_map: dict[bytes, int] = {}
  364. def calculate_generation(commit_id: ObjectID) -> int:
  365. if commit_id in generation_map:
  366. return generation_map[commit_id]
  367. if commit_id not in commit_map:
  368. # Unknown commit, assume generation 0
  369. generation_map[commit_id] = 0
  370. return 0
  371. commit_obj = commit_map[commit_id]
  372. if not commit_obj.parents:
  373. # Root commit
  374. generation_map[commit_id] = 1
  375. return 1
  376. # Calculate based on parents
  377. max_parent_gen = 0
  378. for parent_id in commit_obj.parents:
  379. parent_gen = calculate_generation(parent_id)
  380. max_parent_gen = max(max_parent_gen, parent_gen)
  381. generation = max_parent_gen + 1
  382. generation_map[commit_id] = generation
  383. return generation
  384. # Calculate generation numbers for all commits
  385. for commit_id in commit_map:
  386. calculate_generation(commit_id)
  387. # Build commit graph entries
  388. for commit_id, commit_obj in commit_map.items():
  389. # commit_id is already hex ObjectID from normalized_commit_ids
  390. commit_hex = commit_id
  391. # Handle tree ID - might already be hex ObjectID
  392. if isinstance(commit_obj.tree, bytes) and len(commit_obj.tree) == 40:
  393. tree_hex = commit_obj.tree # Already hex ObjectID
  394. else:
  395. tree_hex = sha_to_hex(commit_obj.tree) # Binary, convert to hex
  396. # Handle parent IDs - might already be hex ObjectIDs
  397. parents_hex = []
  398. for parent_id in commit_obj.parents:
  399. if isinstance(parent_id, bytes) and len(parent_id) == 40:
  400. parents_hex.append(parent_id) # Already hex ObjectID
  401. else:
  402. parents_hex.append(sha_to_hex(parent_id)) # Binary, convert to hex
  403. entry = CommitGraphEntry(
  404. commit_id=commit_hex,
  405. tree_id=tree_hex,
  406. parents=parents_hex,
  407. generation=generation_map[commit_id],
  408. commit_time=commit_obj.commit_time,
  409. )
  410. graph.entries.append(entry)
  411. # Build the OID to index mapping for lookups
  412. graph._oid_to_index = {}
  413. for i, entry in enumerate(graph.entries):
  414. binary_oid = hex_to_sha(entry.commit_id.decode())
  415. graph._oid_to_index[binary_oid] = i
  416. return graph
  417. def write_commit_graph(
  418. git_dir: Union[str, bytes],
  419. object_store: "BaseObjectStore",
  420. commit_ids: list[ObjectID],
  421. ) -> None:
  422. """Write a commit graph file for the given commits.
  423. Args:
  424. git_dir: Git directory path
  425. object_store: Object store to retrieve commits from
  426. commit_ids: List of commit IDs to include in the graph
  427. """
  428. if isinstance(git_dir, str):
  429. git_dir = git_dir.encode()
  430. # Generate the commit graph
  431. graph = generate_commit_graph(object_store, commit_ids)
  432. if not graph.entries:
  433. return # Nothing to write
  434. # Ensure the objects/info directory exists
  435. info_dir = os.path.join(git_dir, b"objects", b"info")
  436. os.makedirs(info_dir, exist_ok=True)
  437. # Write using GitFile for atomic operation
  438. from .file import GitFile
  439. graph_path = os.path.join(info_dir, b"commit-graph")
  440. with GitFile(graph_path, "wb") as f:
  441. from typing import BinaryIO, cast
  442. graph.write_to_file(cast(BinaryIO, f))
  443. def get_reachable_commits(
  444. object_store: "BaseObjectStore", start_commits: list[ObjectID]
  445. ) -> list[ObjectID]:
  446. """Get all commits reachable from the given starting commits.
  447. Args:
  448. object_store: Object store to retrieve commits from
  449. start_commits: List of starting commit IDs
  450. Returns:
  451. List of all reachable commit IDs (including the starting commits)
  452. """
  453. visited = set()
  454. reachable = []
  455. stack = []
  456. # Normalize commit IDs for object store access and tracking
  457. for commit_id in start_commits:
  458. if isinstance(commit_id, bytes) and len(commit_id) == 40:
  459. # Hex ObjectID - use directly for object store access
  460. if commit_id not in visited:
  461. stack.append(commit_id)
  462. elif isinstance(commit_id, bytes) and len(commit_id) == 20:
  463. # Binary SHA, convert to hex ObjectID for object store access
  464. hex_id = sha_to_hex(commit_id)
  465. if hex_id not in visited:
  466. stack.append(hex_id)
  467. else:
  468. # Assume it's already correct format
  469. if commit_id not in visited:
  470. stack.append(commit_id)
  471. while stack:
  472. commit_id = stack.pop()
  473. if commit_id in visited:
  474. continue
  475. visited.add(commit_id)
  476. try:
  477. commit_obj = object_store[commit_id]
  478. if not isinstance(commit_obj, Commit):
  479. continue
  480. # Add to reachable list (commit_id is already hex ObjectID)
  481. reachable.append(commit_id)
  482. # Add parents to stack
  483. for parent_id in commit_obj.parents:
  484. if parent_id not in visited:
  485. stack.append(parent_id)
  486. except KeyError:
  487. # Commit not found, skip
  488. continue
  489. return reachable