Parcourir la source

Add more docstrings

Jelmer Vernooij il y a 5 mois
Parent
commit
5e58e37a74

+ 8 - 0
dulwich/__main__.py

@@ -1,3 +1,11 @@
+"""Entry point for running dulwich as a module.
+
+This module allows dulwich to be run as a Python module using the -m flag:
+    python -m dulwich
+
+It serves as the main entry point for the dulwich command-line interface.
+"""
+
 from . import cli
 
 if __name__ == "__main__":

+ 169 - 1
dulwich/cli.py

@@ -444,6 +444,8 @@ class Command:
 
 
 class cmd_archive(Command):
+    """Create an archive of files from a named tree."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -470,6 +472,8 @@ class cmd_archive(Command):
 
 
 class cmd_add(Command):
+    """Add file contents to the index."""
+
     def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("path", nargs="+")
@@ -484,6 +488,8 @@ class cmd_add(Command):
 
 
 class cmd_annotate(Command):
+    """Annotate each line in a file with commit information."""
+
     def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("path", help="Path to file to annotate")
@@ -501,12 +507,16 @@ class cmd_annotate(Command):
 
 
 class cmd_blame(Command):
+    """Show what revision and author last modified each line of a file."""
+
     def run(self, argv) -> None:
         # blame is an alias for annotate
         cmd_annotate().run(argv)
 
 
 class cmd_rm(Command):
+    """Remove files from the working tree and from the index."""
+
     def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -519,6 +529,8 @@ class cmd_rm(Command):
 
 
 class cmd_mv(Command):
+    """Move or rename a file, a directory, or a symlink."""
+
     def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -535,6 +547,8 @@ class cmd_mv(Command):
 
 
 class cmd_fetch_pack(Command):
+    """Receive missing objects from another repository."""
+
     def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("--all", action="store_true")
@@ -554,6 +568,8 @@ class cmd_fetch_pack(Command):
 
 
 class cmd_fetch(Command):
+    """Download objects and refs from another repository."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("location", help="Remote location to fetch from")
@@ -571,6 +587,8 @@ class cmd_fetch(Command):
 
 
 class cmd_for_each_ref(Command):
+    """Output information on each ref."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("pattern", type=str, nargs="?")
@@ -580,6 +598,8 @@ class cmd_for_each_ref(Command):
 
 
 class cmd_fsck(Command):
+    """Verify the connectivity and validity of objects in the database."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(args)
@@ -588,6 +608,8 @@ class cmd_fsck(Command):
 
 
 class cmd_log(Command):
+    """Show commit logs."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -616,6 +638,8 @@ class cmd_log(Command):
 
 
 class cmd_diff(Command):
+    """Show changes between commits, commit and working tree, etc."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -718,6 +742,8 @@ class cmd_diff(Command):
 
 
 class cmd_dump_pack(Command):
+    """Dump the contents of a pack file for debugging."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("filename", help="Pack file to dump")
@@ -739,6 +765,8 @@ class cmd_dump_pack(Command):
 
 
 class cmd_dump_index(Command):
+    """Show information about a pack index file."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("filename", help="Index file to dump")
@@ -751,6 +779,8 @@ class cmd_dump_index(Command):
 
 
 class cmd_init(Command):
+    """Create an empty Git repository or reinitialize an existing one."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -765,6 +795,8 @@ class cmd_init(Command):
 
 
 class cmd_clone(Command):
+    """Clone a repository into a new directory."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -864,6 +896,8 @@ def _get_commit_message_with_template(initial_message, repo=None, commit=None):
 
 
 class cmd_commit(Command):
+    """Record changes to the repository."""
+
     def run(self, args) -> Optional[int]:
         parser = argparse.ArgumentParser()
         parser.add_argument("--message", "-m", help="Commit message")
@@ -914,6 +948,8 @@ class cmd_commit(Command):
 
 
 class cmd_commit_tree(Command):
+    """Create a new commit object from a tree."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("--message", "-m", required=True, help="Commit message")
@@ -923,11 +959,15 @@ class cmd_commit_tree(Command):
 
 
 class cmd_update_server_info(Command):
+    """Update auxiliary info file to help dumb servers."""
+
     def run(self, args) -> None:
         porcelain.update_server_info(".")
 
 
 class cmd_symbolic_ref(Command):
+    """Read, modify and delete symbolic refs."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("name", help="Symbolic reference name")
@@ -958,6 +998,8 @@ class cmd_symbolic_ref(Command):
 
 
 class cmd_pack_refs(Command):
+    """Pack heads and tags for efficient repository access."""
+
     def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("--all", action="store_true")
@@ -970,6 +1012,8 @@ class cmd_pack_refs(Command):
 
 
 class cmd_show(Command):
+    """Show various types of objects."""
+
     def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("objectish", type=str, nargs="*")
@@ -1018,6 +1062,8 @@ class cmd_show(Command):
 
 
 class cmd_diff_tree(Command):
+    """Compare the content and mode of trees."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("old_tree", help="Old tree SHA")
@@ -1027,6 +1073,8 @@ class cmd_diff_tree(Command):
 
 
 class cmd_rev_list(Command):
+    """List commit objects in reverse chronological order."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("commits", nargs="+", help="Commit IDs to list")
@@ -1035,6 +1083,8 @@ class cmd_rev_list(Command):
 
 
 class cmd_tag(Command):
+    """Create, list, delete or verify a tag object."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -1054,6 +1104,8 @@ class cmd_tag(Command):
 
 
 class cmd_repack(Command):
+    """Pack unpacked objects in a repository."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(args)
@@ -1061,6 +1113,8 @@ class cmd_repack(Command):
 
 
 class cmd_reflog(Command):
+    """Manage reflog information."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -1098,6 +1152,8 @@ class cmd_reflog(Command):
 
 
 class cmd_reset(Command):
+    """Reset current HEAD to the specified state."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         mode_group = parser.add_mutually_exclusive_group()
@@ -1126,6 +1182,8 @@ class cmd_reset(Command):
 
 
 class cmd_revert(Command):
+    """Revert some existing commits."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -1147,6 +1205,8 @@ class cmd_revert(Command):
 
 
 class cmd_daemon(Command):
+    """Run a simple Git protocol server."""
+
     def run(self, args) -> None:
         from dulwich import log_utils
 
@@ -1176,6 +1236,8 @@ class cmd_daemon(Command):
 
 
 class cmd_web_daemon(Command):
+    """Run a simple HTTP server for Git repositories."""
+
     def run(self, args) -> None:
         from dulwich import log_utils
 
@@ -1203,6 +1265,8 @@ class cmd_web_daemon(Command):
 
 
 class cmd_write_tree(Command):
+    """Create a tree object from the current index."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(args)
@@ -1210,6 +1274,8 @@ class cmd_write_tree(Command):
 
 
 class cmd_receive_pack(Command):
+    """Receive what is pushed into the repository."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("gitdir", nargs="?", default=".", help="Git directory")
@@ -1218,6 +1284,8 @@ class cmd_receive_pack(Command):
 
 
 class cmd_upload_pack(Command):
+    """Send objects packed back to git-fetch-pack."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("gitdir", nargs="?", default=".", help="Git directory")
@@ -1226,6 +1294,8 @@ class cmd_upload_pack(Command):
 
 
 class cmd_status(Command):
+    """Show the working tree status."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("gitdir", nargs="?", default=".", help="Git directory")
@@ -1252,6 +1322,8 @@ class cmd_status(Command):
 
 
 class cmd_ls_remote(Command):
+    """List references in a remote repository."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -1272,6 +1344,8 @@ class cmd_ls_remote(Command):
 
 
 class cmd_ls_tree(Command):
+    """List the contents of a tree object."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -1298,6 +1372,8 @@ class cmd_ls_tree(Command):
 
 
 class cmd_pack_objects(Command):
+    """Create a packed archive of objects."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -1334,6 +1410,8 @@ class cmd_pack_objects(Command):
 
 
 class cmd_unpack_objects(Command):
+    """Unpack objects from a packed archive."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("pack_file", help="Pack file to unpack")
@@ -1344,6 +1422,8 @@ class cmd_unpack_objects(Command):
 
 
 class cmd_prune(Command):
+    """Prune all unreachable objects from the object database."""
+
     def run(self, args) -> Optional[int]:
         import datetime
         import time
@@ -1406,6 +1486,8 @@ class cmd_prune(Command):
 
 
 class cmd_pull(Command):
+    """Fetch from and integrate with another repository or a local branch."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("from_location", type=str)
@@ -1423,6 +1505,8 @@ class cmd_pull(Command):
 
 
 class cmd_push(Command):
+    """Update remote refs along with associated objects."""
+
     def run(self, argv) -> Optional[int]:
         parser = argparse.ArgumentParser()
         parser.add_argument("-f", "--force", action="store_true", help="Force")
@@ -1441,6 +1525,8 @@ class cmd_push(Command):
 
 
 class cmd_remote_add(Command):
+    """Add a remote repository."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("name", help="Name of the remote")
@@ -1450,6 +1536,8 @@ class cmd_remote_add(Command):
 
 
 class SuperCommand(Command):
+    """Base class for commands that have subcommands."""
+
     subcommands: ClassVar[dict[str, type[Command]]] = {}
     default_command: ClassVar[Optional[type[Command]]] = None
 
@@ -1474,12 +1562,16 @@ class SuperCommand(Command):
 
 
 class cmd_remote(SuperCommand):
+    """Manage set of tracked repositories."""
+
     subcommands: ClassVar[dict[str, type[Command]]] = {
         "add": cmd_remote_add,
     }
 
 
 class cmd_submodule_list(Command):
+    """List submodules."""
+
     def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(argv)
@@ -1488,6 +1580,8 @@ class cmd_submodule_list(Command):
 
 
 class cmd_submodule_init(Command):
+    """Initialize submodules."""
+
     def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(argv)
@@ -1495,6 +1589,8 @@ class cmd_submodule_init(Command):
 
 
 class cmd_submodule_add(Command):
+    """Add a submodule."""
+
     def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("url", help="URL of repository to add as submodule")
@@ -1505,6 +1601,8 @@ class cmd_submodule_add(Command):
 
 
 class cmd_submodule_update(Command):
+    """Update submodules."""
+
     def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -1524,6 +1622,8 @@ class cmd_submodule_update(Command):
 
 
 class cmd_submodule(SuperCommand):
+    """Initialize, update or inspect submodules."""
+
     subcommands: ClassVar[dict[str, type[Command]]] = {
         "add": cmd_submodule_add,
         "init": cmd_submodule_init,
@@ -1535,6 +1635,8 @@ class cmd_submodule(SuperCommand):
 
 
 class cmd_check_ignore(Command):
+    """Check whether files are excluded by gitignore."""
+
     def run(self, args):
         parser = argparse.ArgumentParser()
         parser.add_argument("paths", nargs="+", help="Paths to check")
@@ -1547,6 +1649,8 @@ class cmd_check_ignore(Command):
 
 
 class cmd_check_mailmap(Command):
+    """Show canonical names and email addresses of contacts."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("identities", nargs="+", help="Identities to check")
@@ -1557,6 +1661,8 @@ class cmd_check_mailmap(Command):
 
 
 class cmd_branch(Command):
+    """List, create, or delete branches."""
+
     def run(self, args) -> Optional[int]:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -1587,6 +1693,8 @@ class cmd_branch(Command):
 
 
 class cmd_checkout(Command):
+    """Switch branches or restore working tree files."""
+
     def run(self, args) -> Optional[int]:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -1622,6 +1730,8 @@ class cmd_checkout(Command):
 
 
 class cmd_stash_list(Command):
+    """List stash entries."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(args)
@@ -1630,6 +1740,8 @@ class cmd_stash_list(Command):
 
 
 class cmd_stash_push(Command):
+    """Save your local modifications to a new stash."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(args)
@@ -1638,6 +1750,8 @@ class cmd_stash_push(Command):
 
 
 class cmd_stash_pop(Command):
+    """Apply a stash and remove it from the stash list."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(args)
@@ -1646,7 +1760,7 @@ class cmd_stash_pop(Command):
 
 
 class cmd_bisect(SuperCommand):
-    """Git bisect command implementation."""
+    """Use binary search to find the commit that introduced a bug."""
 
     subcommands: ClassVar[dict[str, type[Command]]] = {}
 
@@ -1774,6 +1888,8 @@ class cmd_bisect(SuperCommand):
 
 
 class cmd_stash(SuperCommand):
+    """Stash the changes in a dirty working directory away."""
+
     subcommands: ClassVar[dict[str, type[Command]]] = {
         "list": cmd_stash_list,
         "pop": cmd_stash_pop,
@@ -1782,6 +1898,8 @@ class cmd_stash(SuperCommand):
 
 
 class cmd_ls_files(Command):
+    """Show information about files in the index and working tree."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(args)
@@ -1790,6 +1908,8 @@ class cmd_ls_files(Command):
 
 
 class cmd_describe(Command):
+    """Give an object a human readable name based on an available ref."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(args)
@@ -1797,6 +1917,8 @@ class cmd_describe(Command):
 
 
 class cmd_merge(Command):
+    """Join two or more development histories together."""
+
     def run(self, args) -> Optional[int]:
         parser = argparse.ArgumentParser()
         parser.add_argument("commit", type=str, help="Commit to merge")
@@ -1841,6 +1963,8 @@ class cmd_merge(Command):
 
 
 class cmd_notes_add(Command):
+    """Add notes to a commit."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("object", help="Object to annotate")
@@ -1854,6 +1978,8 @@ class cmd_notes_add(Command):
 
 
 class cmd_notes_show(Command):
+    """Show notes for a commit."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("object", help="Object to show notes for")
@@ -1870,6 +1996,8 @@ class cmd_notes_show(Command):
 
 
 class cmd_notes_remove(Command):
+    """Remove notes for a commit."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("object", help="Object to remove notes from")
@@ -1886,6 +2014,8 @@ class cmd_notes_remove(Command):
 
 
 class cmd_notes_list(Command):
+    """List all note objects."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -1899,6 +2029,8 @@ class cmd_notes_list(Command):
 
 
 class cmd_notes(SuperCommand):
+    """Add or inspect object notes."""
+
     subcommands: ClassVar[dict[str, type[Command]]] = {
         "add": cmd_notes_add,
         "show": cmd_notes_show,
@@ -1910,6 +2042,8 @@ class cmd_notes(SuperCommand):
 
 
 class cmd_cherry_pick(Command):
+    """Apply the changes introduced by some existing commits."""
+
     def run(self, args) -> Optional[int]:
         parser = argparse.ArgumentParser(
             description="Apply the changes introduced by some existing commits"
@@ -1978,6 +2112,8 @@ class cmd_cherry_pick(Command):
 
 
 class cmd_merge_tree(Command):
+    """Show three-way merge without touching index."""
+
     def run(self, args) -> Optional[int]:
         parser = argparse.ArgumentParser(
             description="Perform a tree-level merge without touching the working directory"
@@ -2040,6 +2176,8 @@ class cmd_merge_tree(Command):
 
 
 class cmd_gc(Command):
+    """Cleanup unnecessary files and optimize the local repository."""
+
     def run(self, args) -> Optional[int]:
         import datetime
         import time
@@ -2136,6 +2274,8 @@ class cmd_gc(Command):
 
 
 class cmd_count_objects(Command):
+    """Count unpacked number of objects and their disk consumption."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -2164,6 +2304,8 @@ class cmd_count_objects(Command):
 
 
 class cmd_rebase(Command):
+    """Reapply commits on top of another base tip."""
+
     def run(self, args) -> int:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -2233,6 +2375,8 @@ class cmd_rebase(Command):
 
 
 class cmd_filter_branch(Command):
+    """Rewrite branches."""
+
     def run(self, args) -> Optional[int]:
         import subprocess
 
@@ -2467,6 +2611,8 @@ class cmd_filter_branch(Command):
 
 
 class cmd_lfs(Command):
+    """Git Large File Storage management."""
+
     """Git LFS management commands."""
 
     def run(self, argv) -> None:
@@ -2641,6 +2787,8 @@ class cmd_lfs(Command):
 
 
 class cmd_help(Command):
+    """Display help information about git."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -2667,6 +2815,8 @@ For a list of supported commands, see 'dulwich help -a'.
 
 
 class cmd_format_patch(Command):
+    """Prepare patches for e-mail submission."""
+
     def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
@@ -2719,6 +2869,8 @@ class cmd_format_patch(Command):
 
 
 class cmd_bundle(Command):
+    """Create, unpack, and manipulate bundle files."""
+
     def run(self, args) -> int:
         if not args:
             print("Usage: bundle <create|verify|list-heads|unbundle> <options>")
@@ -2918,6 +3070,8 @@ class cmd_bundle(Command):
 
 
 class cmd_worktree_add(Command):
+    """Create a new worktree."""
+
     """Add a new worktree to the repository."""
 
     def run(self, args) -> Optional[int]:
@@ -2966,6 +3120,8 @@ class cmd_worktree_add(Command):
 
 
 class cmd_worktree_list(Command):
+    """List worktrees."""
+
     """List details of each worktree."""
 
     def run(self, args) -> Optional[int]:
@@ -3018,6 +3174,8 @@ class cmd_worktree_list(Command):
 class cmd_worktree_remove(Command):
     """Remove a worktree."""
 
+    """Remove a worktree."""
+
     def run(self, args) -> Optional[int]:
         parser = argparse.ArgumentParser(
             description="Remove a worktree", prog="dulwich worktree remove"
@@ -3039,6 +3197,8 @@ class cmd_worktree_remove(Command):
 class cmd_worktree_prune(Command):
     """Prune worktree information."""
 
+    """Prune worktree information."""
+
     def run(self, args) -> Optional[int]:
         parser = argparse.ArgumentParser(
             description="Prune worktree information", prog="dulwich worktree prune"
@@ -3075,6 +3235,8 @@ class cmd_worktree_prune(Command):
 
 
 class cmd_worktree_lock(Command):
+    """Lock a worktree to prevent it from being pruned."""
+
     """Lock a worktree."""
 
     def run(self, args) -> Optional[int]:
@@ -3096,6 +3258,8 @@ class cmd_worktree_lock(Command):
 
 
 class cmd_worktree_unlock(Command):
+    """Unlock a locked worktree."""
+
     """Unlock a worktree."""
 
     def run(self, args) -> Optional[int]:
@@ -3114,6 +3278,8 @@ class cmd_worktree_unlock(Command):
 
 
 class cmd_worktree_move(Command):
+    """Move a worktree to a new location."""
+
     """Move a worktree."""
 
     def run(self, args) -> Optional[int]:
@@ -3137,6 +3303,8 @@ class cmd_worktree_move(Command):
 class cmd_worktree(SuperCommand):
     """Manage multiple working trees."""
 
+    """Manage multiple working trees."""
+
     subcommands: ClassVar[dict[str, type[Command]]] = {
         "add": cmd_worktree_add,
         "list": cmd_worktree_list,

+ 160 - 1
dulwich/client.py

@@ -259,6 +259,12 @@ class ReportStatusParser:
 
 
 def negotiate_protocol_version(proto) -> int:
+    """Negotiate the protocol version to use.
+
+    Args:
+      proto: Protocol instance to negotiate with
+    Returns: Protocol version (0, 1, or 2)
+    """
     pkt = proto.read_pkt_line()
     if pkt is not None and pkt.strip() == b"version 2":
         return 2
@@ -267,6 +273,12 @@ def negotiate_protocol_version(proto) -> int:
 
 
 def read_server_capabilities(pkt_seq):
+    """Read server capabilities from a packet sequence.
+
+    Args:
+      pkt_seq: Sequence of packets from server
+    Returns: Set of server capabilities
+    """
     server_capabilities = []
     for pkt in pkt_seq:
         server_capabilities.append(pkt)
@@ -276,6 +288,12 @@ def read_server_capabilities(pkt_seq):
 def read_pkt_refs_v2(
     pkt_seq,
 ) -> tuple[dict[bytes, bytes], dict[bytes, bytes], dict[bytes, bytes]]:
+    """Read packet references in protocol v2 format.
+
+    Args:
+      pkt_seq: Sequence of packets
+    Returns: Tuple of (refs dict, symrefs dict, peeled dict)
+    """
     refs = {}
     symrefs = {}
     peeled = {}
@@ -299,6 +317,12 @@ def read_pkt_refs_v2(
 
 
 def read_pkt_refs_v1(pkt_seq) -> tuple[dict[bytes, bytes], set[bytes]]:
+    """Read packet references in protocol v1 format.
+
+    Args:
+      pkt_seq: Sequence of packets
+    Returns: Tuple of (refs dict, server capabilities set)
+    """
     server_capabilities = None
     refs = {}
     # Receive refs from server
@@ -392,6 +416,7 @@ class FetchPackResult(_DeprecatedDictProxy):
         self.new_unshallow = new_unshallow
 
     def __eq__(self, other):
+        """Check equality with another FetchPackResult."""
         if isinstance(other, dict):
             self._warn_deprecated()
             return self.refs == other
@@ -402,6 +427,7 @@ class FetchPackResult(_DeprecatedDictProxy):
         )
 
     def __repr__(self) -> str:
+        """Return string representation of FetchPackResult."""
         return f"{self.__class__.__name__}({self.refs!r}, {self.symrefs!r}, {self.agent!r})"
 
 
@@ -428,12 +454,14 @@ class LsRemoteResult(_DeprecatedDictProxy):
         )
 
     def __eq__(self, other):
+        """Check equality with another LsRemoteResult."""
         if isinstance(other, dict):
             self._warn_deprecated()
             return self.refs == other
         return self.refs == other.refs and self.symrefs == other.symrefs
 
     def __repr__(self) -> str:
+        """Return string representation of LsRemoteResult."""
         return f"{self.__class__.__name__}({self.refs!r}, {self.symrefs!r})"
 
 
@@ -453,16 +481,24 @@ class SendPackResult(_DeprecatedDictProxy):
         self.ref_status = ref_status
 
     def __eq__(self, other):
+        """Check equality with another SendPackResult."""
         if isinstance(other, dict):
             self._warn_deprecated()
             return self.refs == other
         return self.refs == other.refs and self.agent == other.agent
 
     def __repr__(self) -> str:
+        """Return string representation of SendPackResult."""
         return f"{self.__class__.__name__}({self.refs!r}, {self.agent!r})"
 
 
 def _read_shallow_updates(pkt_seq):
+    """Read shallow/unshallow updates from a packet sequence.
+
+    Args:
+      pkt_seq: Sequence of packets
+    Returns: Tuple of (new_shallow set, new_unshallow set)
+    """
     new_shallow = set()
     new_unshallow = set()
     for pkt in pkt_seq:
@@ -482,6 +518,8 @@ def _read_shallow_updates(pkt_seq):
 
 
 class _v1ReceivePackHeader:
+    """Handler for v1 receive-pack header."""
+
     def __init__(self, capabilities, old_refs, new_refs) -> None:
         self.want: list[bytes] = []
         self.have: list[bytes] = []
@@ -489,6 +527,7 @@ class _v1ReceivePackHeader:
         self.sent_capabilities = False
 
     def __iter__(self):
+        """Iterate over the receive-pack header lines."""
         return self._it
 
     def _handle_receive_pack_head(self, capabilities, old_refs, new_refs):
@@ -554,6 +593,14 @@ def _read_side_band64k_data(pkt_seq: Iterable[bytes]) -> Iterator[tuple[int, byt
 
 
 def find_capability(capabilities, key, value):
+    """Find a capability in the list of capabilities.
+
+    Args:
+      capabilities: List of capabilities
+      key: Capability key to search for
+      value: Optional specific value to match
+    Returns: The matching capability or None
+    """
     for capability in capabilities:
         k, v = parse_capability(capability)
         if k != key:
@@ -1553,6 +1600,13 @@ class TCPGitClient(TraditionalGitClient):
     """A Git Client that works over TCP directly (i.e. git://)."""
 
     def __init__(self, host, port=None, **kwargs) -> None:
+        """Initialize a TCPGitClient.
+
+        Args:
+          host: Hostname or IP address to connect to
+          port: Port number (defaults to TCP_GIT_PORT)
+          **kwargs: Additional arguments for GitClient
+        """
         if port is None:
             port = TCP_GIT_PORT
         self._host = host
@@ -1561,9 +1615,26 @@ class TCPGitClient(TraditionalGitClient):
 
     @classmethod
     def from_parsedurl(cls, parsedurl, **kwargs):
+        """Create an instance of TCPGitClient from a parsed URL.
+
+        Args:
+          parsedurl: Result of urlparse()
+          **kwargs: Additional arguments for the client
+
+        Returns:
+          A TCPGitClient instance
+        """
         return cls(parsedurl.hostname, port=parsedurl.port, **kwargs)
 
     def get_url(self, path):
+        """Get the URL for a TCP git connection.
+
+        Args:
+          path: Repository path
+
+        Returns:
+          git:// URL for the path
+        """
         netloc = self._host
         if self._port is not None and self._port != TCP_GIT_PORT:
             netloc += f":{self._port}"
@@ -1642,15 +1713,25 @@ class SubprocessWrapper:
     """A socket-like object that talks to a subprocess via pipes."""
 
     def __init__(self, proc) -> None:
+        """Initialize a SubprocessWrapper.
+
+        Args:
+          proc: Subprocess.Popen instance to wrap
+        """
         self.proc = proc
         self.read = BufferedReader(proc.stdout).read
         self.write = proc.stdin.write
 
     @property
     def stderr(self):
+        """Return the stderr stream of the subprocess."""
         return self.proc.stderr
 
     def can_read(self):
+        """Check if there is data available to read.
+
+        Returns: True if data is available, False otherwise
+        """
         if sys.platform == "win32":
             from msvcrt import get_osfhandle
 
@@ -1660,6 +1741,14 @@ class SubprocessWrapper:
             return _fileno_can_read(self.proc.stdout.fileno())
 
     def close(self, timeout: Optional[int] = 60) -> None:
+        """Close the subprocess and wait for it to terminate.
+
+        Args:
+          timeout: Maximum time to wait for subprocess to terminate (seconds)
+
+        Raises:
+          GitProtocolError: If subprocess doesn't terminate within timeout
+        """
         self.proc.stdin.close()
         self.proc.stdout.close()
         if self.proc.stderr:
@@ -1697,6 +1786,15 @@ class SubprocessGitClient(TraditionalGitClient):
 
     @classmethod
     def from_parsedurl(cls, parsedurl, **kwargs):
+        """Create an instance of SubprocessGitClient from a parsed URL.
+
+        Args:
+          parsedurl: Result of urlparse()
+          **kwargs: Additional arguments for the client
+
+        Returns:
+          A SubprocessGitClient instance
+        """
         return cls(**kwargs)
 
     git_command: Optional[str] = None
@@ -1754,14 +1852,39 @@ class LocalGitClient(GitClient):
         # Ignore the thin_packs argument
 
     def get_url(self, path):
+        """Get the URL for a local file path.
+
+        Args:
+          path: Local file path
+
+        Returns:
+          file:// URL for the path
+        """
         return urlunsplit(("file", "", path, "", ""))
 
     @classmethod
     def from_parsedurl(cls, parsedurl, **kwargs):
+        """Create an instance of LocalGitClient from a parsed URL.
+
+        Args:
+          parsedurl: Result of urlparse()
+          **kwargs: Additional arguments for the client
+
+        Returns:
+          A LocalGitClient instance
+        """
         return cls(**kwargs)
 
     @classmethod
     def _open_repo(cls, path):
+        """Open a local repository.
+
+        Args:
+          path: Repository path (as bytes or str)
+
+        Returns:
+          Repo instance wrapped in a closing context manager
+        """
         if not isinstance(path, str):
             path = os.fsdecode(path)
         return closing(Repo(path))
@@ -1965,10 +2088,27 @@ class BundleClient(GitClient):
         self._report_activity = report_activity
 
     def get_url(self, path):
+        """Get the URL for a bundle file path.
+
+        Args:
+          path: Bundle file path
+
+        Returns:
+          The path unchanged (bundle files use local paths)
+        """
         return path
 
     @classmethod
     def from_parsedurl(cls, parsedurl, **kwargs):
+        """Create an instance of BundleClient from a parsed URL.
+
+        Args:
+          parsedurl: Result of urlparse()
+          **kwargs: Additional arguments for the client
+
+        Returns:
+          A BundleClient instance
+        """
         return cls(**kwargs)
 
     @classmethod
@@ -1983,6 +2123,17 @@ class BundleClient(GitClient):
 
     @classmethod
     def _open_bundle(cls, path):
+        """Open and parse a bundle file.
+
+        Args:
+          path: Path to the bundle file (bytes or str)
+
+        Returns:
+          Bundle object with parsed metadata
+
+        Raises:
+          AssertionError: If bundle format is unsupported
+        """
         if not isinstance(path, str):
             path = os.fsdecode(path)
         # Read bundle metadata without PackData to avoid file handle issues
@@ -2037,7 +2188,15 @@ class BundleClient(GitClient):
 
     @staticmethod
     def _skip_to_pack_data(f, version):
-        """Skip to the pack data section in a bundle file."""
+        """Skip to the pack data section in a bundle file.
+
+        Args:
+          f: File object positioned at the beginning of the bundle
+          version: Bundle format version (2 or 3)
+
+        Raises:
+          AssertionError: If bundle header is invalid
+        """
         # Skip header
         header = f.readline()
         if header not in (b"# v2 git bundle\n", b"# v3 git bundle\n"):

+ 10 - 0
dulwich/cloud/__init__.py

@@ -0,0 +1,10 @@
+"""Cloud storage backends for dulwich.
+
+This package provides support for storing Git repositories in various
+cloud storage services. It includes implementations for different cloud
+providers that can be used as alternative storage backends to the
+traditional filesystem-based storage.
+
+Available backends:
+- GCS (Google Cloud Storage): Store Git objects in Google Cloud Storage buckets
+"""

+ 13 - 0
dulwich/contrib/__init__.py

@@ -19,3 +19,16 @@
 # License, Version 2.0.
 #
 
+"""Contributed implementations and extensions for dulwich.
+
+This package contains various contributed modules that extend dulwich
+functionality. These modules are maintained as part of dulwich but may
+have additional dependencies or more specialized use cases.
+
+Available modules:
+- diffstat: Generate diff statistics similar to git's --stat option
+- paramiko_vendor: SSH client implementation using paramiko
+- release_robot: Automated release management utilities
+- requests_vendor: HTTP client implementation using requests
+- swift: OpenStack Swift object storage backend
+"""

+ 10 - 0
dulwich/contrib/diffstat.py

@@ -33,6 +33,16 @@
 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 # THE SOFTWARE.
 
+"""Generate diff statistics similar to git's --stat option.
+
+This module provides functionality to parse unified diff output and generate
+statistics about changes, including:
+- Number of lines added and removed per file
+- Binary file detection
+- File rename detection
+- Formatted output similar to git diff --stat
+"""
+
 import re
 import sys
 from typing import Optional

+ 13 - 0
dulwich/lfs.py

@@ -19,6 +19,19 @@
 # License, Version 2.0.
 #
 
+"""Git Large File Storage (LFS) support.
+
+This module provides support for Git LFS, which is a Git extension for
+versioning large files. It replaces large files with text pointers inside Git,
+while storing the file contents on a remote server.
+
+Key components:
+- LFS pointer file parsing and creation
+- LFS object storage and retrieval
+- HTTP client for LFS server communication
+- Integration with dulwich repositories
+"""
+
 import hashlib
 import json
 import logging

+ 274 - 1
dulwich/object_store.py

@@ -192,6 +192,8 @@ def get_depth(
 
 
 class PackContainer(Protocol):
+    """Protocol for containers that can accept pack files."""
+
     def add_pack(self) -> tuple[BytesIO, Callable[[], None], Callable[[], None]]:
         """Add a new pack."""
 
@@ -202,6 +204,16 @@ class BaseObjectStore:
     def determine_wants_all(
         self, refs: dict[Ref, ObjectID], depth: Optional[int] = None
     ) -> list[ObjectID]:
+        """Determine all objects that are wanted by the client.
+
+        Args:
+          refs: Dictionary mapping ref names to object IDs
+          depth: Shallow fetch depth (None for full fetch)
+
+        Returns:
+          List of object IDs that are wanted
+        """
+
         def _want_deepen(sha):
             if not depth:
                 return False
@@ -326,6 +338,18 @@ class BaseObjectStore:
     def iterobjects_subset(
         self, shas: Iterable[bytes], *, allow_missing: bool = False
     ) -> Iterator[ShaFile]:
+        """Iterate over a subset of objects in the store.
+
+        Args:
+          shas: Iterable of object SHAs to retrieve
+          allow_missing: If True, skip missing objects; if False, raise KeyError
+
+        Returns:
+          Iterator of ShaFile objects
+
+        Raises:
+          KeyError: If an object is missing and allow_missing is False
+        """
         for sha in shas:
             try:
                 yield self[sha]
@@ -508,6 +532,13 @@ class BaseObjectStore:
 
 
 class PackBasedObjectStore(BaseObjectStore, PackedObjectContainer):
+    """Object store that uses pack files for storage.
+
+    This class provides a base implementation for object stores that use
+    Git pack files as their primary storage mechanism. It handles caching
+    of open pack files and provides configuration for pack file operations.
+    """
+
     def __init__(
         self,
         pack_compression_level=-1,
@@ -519,6 +550,18 @@ class PackBasedObjectStore(BaseObjectStore, PackedObjectContainer):
         pack_threads=None,
         pack_big_file_threshold=None,
     ) -> None:
+        """Initialize a PackBasedObjectStore.
+
+        Args:
+          pack_compression_level: Compression level for pack files (-1 to 9)
+          pack_index_version: Pack index version to use
+          pack_delta_window_size: Window size for delta compression
+          pack_window_memory: Maximum memory to use for delta window
+          pack_delta_cache_size: Cache size for delta operations
+          pack_depth: Maximum depth for pack deltas
+          pack_threads: Number of threads to use for packing
+          pack_big_file_threshold: Threshold for treating files as "big"
+        """
         self._pack_cache: dict[str, Pack] = {}
         self.pack_compression_level = pack_compression_level
         self.pack_index_version = pack_index_version
@@ -561,6 +604,11 @@ class PackBasedObjectStore(BaseObjectStore, PackedObjectContainer):
 
     @property
     def alternates(self):
+        """Get the list of alternate object stores.
+
+        Returns:
+          List of alternate BaseObjectStore instances
+        """
         return []
 
     def contains_packed(self, sha) -> bool:
@@ -635,6 +683,10 @@ class PackBasedObjectStore(BaseObjectStore, PackedObjectContainer):
         raise NotImplementedError(self._update_pack_cache)
 
     def close(self) -> None:
+        """Close the object store and release resources.
+
+        This method closes all cached pack files and frees associated resources.
+        """
         self._clear_cached_packs()
 
     @property
@@ -804,6 +856,20 @@ class PackBasedObjectStore(BaseObjectStore, PackedObjectContainer):
         allow_missing: bool = False,
         convert_ofs_delta: bool = True,
     ) -> Iterator[UnpackedObject]:
+        """Iterate over a subset of objects, yielding UnpackedObject instances.
+
+        Args:
+          shas: Set of object SHAs to retrieve
+          include_comp: Whether to include compressed data
+          allow_missing: If True, skip missing objects; if False, raise KeyError
+          convert_ofs_delta: Whether to convert OFS_DELTA objects
+
+        Returns:
+          Iterator of UnpackedObject instances
+
+        Raises:
+          KeyError: If an object is missing and allow_missing is False
+        """
         todo: set[bytes] = set(shas)
         for p in self._iter_cached_packs():
             for unpacked in p.iter_unpacked_subset(
@@ -841,6 +907,20 @@ class PackBasedObjectStore(BaseObjectStore, PackedObjectContainer):
     def iterobjects_subset(
         self, shas: Iterable[bytes], *, allow_missing: bool = False
     ) -> Iterator[ShaFile]:
+        """Iterate over a subset of objects in the store.
+
+        This method searches for objects in pack files, alternates, and loose storage.
+
+        Args:
+          shas: Iterable of object SHAs to retrieve
+          allow_missing: If True, skip missing objects; if False, raise KeyError
+
+        Returns:
+          Iterator of ShaFile objects
+
+        Raises:
+          KeyError: If an object is missing and allow_missing is False
+        """
         todo: set[bytes] = set(shas)
         for p in self._iter_cached_packs():
             for o in p.iterobjects_subset(todo, allow_missing=True):
@@ -974,10 +1054,24 @@ class DiskObjectStore(PackBasedObjectStore):
         self._use_commit_graph = True  # Default to true
 
     def __repr__(self) -> str:
+        """Return string representation of DiskObjectStore.
+
+        Returns:
+          String representation including the store path
+        """
         return f"<{self.__class__.__name__}({self.path!r})>"
 
     @classmethod
     def from_config(cls, path: Union[str, os.PathLike], config):
+        """Create a DiskObjectStore from a configuration object.
+
+        Args:
+          path: Path to the object store directory
+          config: Configuration object to read settings from
+
+        Returns:
+          New DiskObjectStore instance configured according to config
+        """
         try:
             default_compression_level = int(
                 config.get((b"core",), b"compression").decode()
@@ -1053,6 +1147,13 @@ class DiskObjectStore(PackBasedObjectStore):
 
     @property
     def alternates(self):
+        """Get the list of alternate object stores.
+
+        Reads from .git/objects/info/alternates if not already cached.
+
+        Returns:
+          List of DiskObjectStore instances for alternate object directories
+        """
         if self._alternates is not None:
             return self._alternates
         self._alternates = []
@@ -1181,6 +1282,14 @@ class DiskObjectStore(PackBasedObjectStore):
             return None
 
     def delete_loose_object(self, sha) -> None:
+        """Delete a loose object from disk.
+
+        Args:
+          sha: SHA1 of the object to delete
+
+        Raises:
+          FileNotFoundError: If the object file doesn't exist
+        """
         os.remove(self._get_shafile_path(sha))
 
     def get_object_mtime(self, sha):
@@ -1383,6 +1492,16 @@ class DiskObjectStore(PackBasedObjectStore):
 
     @classmethod
     def init(cls, path: Union[str, os.PathLike]):
+        """Initialize a new disk object store.
+
+        Creates the necessary directory structure for a Git object store.
+
+        Args:
+          path: Path where the object store should be created
+
+        Returns:
+          New DiskObjectStore instance
+        """
         try:
             os.mkdir(path)
         except FileExistsError:
@@ -1392,6 +1511,14 @@ class DiskObjectStore(PackBasedObjectStore):
         return cls(path)
 
     def iter_prefix(self, prefix):
+        """Iterate over all object SHAs with the given prefix.
+
+        Args:
+          prefix: Hex prefix to search for (as bytes)
+
+        Returns:
+          Iterator of object SHAs (as bytes) matching the prefix
+        """
         if len(prefix) < 2:
             yield from super().iter_prefix(prefix)
             return
@@ -1568,6 +1695,10 @@ class MemoryObjectStore(BaseObjectStore):
     """Object store that keeps all objects in memory."""
 
     def __init__(self) -> None:
+        """Initialize a MemoryObjectStore.
+
+        Creates an empty in-memory object store.
+        """
         super().__init__()
         self._data: dict[str, ShaFile] = {}
         self.pack_compression_level = -1
@@ -1608,6 +1739,17 @@ class MemoryObjectStore(BaseObjectStore):
         return obj.type_num, obj.as_raw_string()
 
     def __getitem__(self, name: ObjectID):
+        """Retrieve an object by SHA.
+
+        Args:
+          name: SHA of the object (as hex string or bytes)
+
+        Returns:
+          Copy of the ShaFile object
+
+        Raises:
+          KeyError: If the object is not found
+        """
         return self._data[self._to_hexsha(name)].copy()
 
     def __delitem__(self, name: ObjectID) -> None:
@@ -1713,6 +1855,11 @@ class ObjectIterator(Protocol):
     """Interface for iterating over objects."""
 
     def iterobjects(self) -> Iterator[ShaFile]:
+        """Iterate over all objects.
+
+        Returns:
+          Iterator of ShaFile objects
+        """
         raise NotImplementedError(self.iterobjects)
 
 
@@ -1818,6 +1965,17 @@ class MissingObjectFinder:
         get_tagged=None,
         get_parents=lambda commit: commit.parents,
     ) -> None:
+        """Initialize a MissingObjectFinder.
+
+        Args:
+          object_store: Object store containing objects
+          haves: SHA1s of objects already present in target
+          wants: SHA1s of objects to send
+          shallow: Set of shallow commit SHA1s
+          progress: Optional progress reporting callback
+          get_tagged: Function returning dict of pointed-to sha -> tag sha
+          get_parents: Function for getting commit parents
+        """
         self.object_store = object_store
         if shallow is None:
             shallow = set()
@@ -1881,14 +2039,32 @@ class MissingObjectFinder:
         self._tagged = (get_tagged and get_tagged()) or {}
 
     def get_remote_has(self):
+        """Get the set of SHAs the remote has.
+
+        Returns:
+          Set of SHA1s that the remote side already has
+        """
         return self.remote_has
 
     def add_todo(
         self, entries: Iterable[tuple[ObjectID, Optional[bytes], Optional[int], bool]]
     ) -> None:
+        """Add objects to the todo list.
+
+        Args:
+          entries: Iterable of tuples (sha, name, type_num, is_leaf)
+        """
         self.objects_to_send.update([e for e in entries if e[0] not in self.sha_done])
 
     def __next__(self) -> tuple[bytes, Optional[PackHint]]:
+        """Get the next object to send.
+
+        Returns:
+          Tuple of (sha, pack_hint)
+
+        Raises:
+          StopIteration: When no more objects to send
+        """
         while True:
             if not self.objects_to_send:
                 self.progress(
@@ -1929,6 +2105,11 @@ class MissingObjectFinder:
         return (sha, pack_hint)
 
     def __iter__(self):
+        """Return iterator over objects to send.
+
+        Returns:
+          Self (this class implements the iterator protocol)
+        """
         return self
 
 
@@ -2061,27 +2242,60 @@ class OverlayObjectStore(BaseObjectStore):
     """Object store that can overlay multiple object stores."""
 
     def __init__(self, bases, add_store=None) -> None:
+        """Initialize an OverlayObjectStore.
+
+        Args:
+          bases: List of base object stores to overlay
+          add_store: Optional store to write new objects to
+        """
         self.bases = bases
         self.add_store = add_store
 
     def add_object(self, object):
+        """Add a single object to the store.
+
+        Args:
+          object: Object to add
+
+        Raises:
+          NotImplementedError: If no add_store was provided
+        """
         if self.add_store is None:
             raise NotImplementedError(self.add_object)
         return self.add_store.add_object(object)
 
     def add_objects(self, objects, progress=None):
+        """Add multiple objects to the store.
+
+        Args:
+          objects: Iterator of objects to add
+          progress: Optional progress reporting callback
+
+        Raises:
+          NotImplementedError: If no add_store was provided
+        """
         if self.add_store is None:
             raise NotImplementedError(self.add_object)
         return self.add_store.add_objects(objects, progress)
 
     @property
     def packs(self):
+        """Get the list of packs from all overlaid stores.
+
+        Returns:
+          Combined list of packs from all base stores
+        """
         ret = []
         for b in self.bases:
             ret.extend(b.packs)
         return ret
 
     def __iter__(self):
+        """Iterate over all object SHAs in the overlaid stores.
+
+        Returns:
+          Iterator of object SHAs (deduped across stores)
+        """
         done = set()
         for b in self.bases:
             for o_id in b:
@@ -2092,6 +2306,18 @@ class OverlayObjectStore(BaseObjectStore):
     def iterobjects_subset(
         self, shas: Iterable[bytes], *, allow_missing: bool = False
     ) -> Iterator[ShaFile]:
+        """Iterate over a subset of objects from the overlaid stores.
+
+        Args:
+          shas: Iterable of object SHAs to retrieve
+          allow_missing: If True, skip missing objects; if False, raise KeyError
+
+        Returns:
+          Iterator of ShaFile objects
+
+        Raises:
+          KeyError: If an object is missing and allow_missing is False
+        """
         todo = set(shas)
         found: set[bytes] = set()
 
@@ -2116,6 +2342,20 @@ class OverlayObjectStore(BaseObjectStore):
         allow_missing: bool = False,
         convert_ofs_delta=True,
     ) -> Iterator[ShaFile]:
+        """Iterate over unpacked objects from the overlaid stores.
+
+        Args:
+          shas: Iterable of object SHAs to retrieve
+          include_comp: Whether to include compressed data
+          allow_missing: If True, skip missing objects; if False, raise KeyError
+          convert_ofs_delta: Whether to convert OFS_DELTA objects
+
+        Returns:
+          Iterator of unpacked objects
+
+        Raises:
+          KeyError: If an object is missing and allow_missing is False
+        """
         todo = set(shas)
         for b in self.bases:
             for o in b.iter_unpacked_subset(
@@ -2130,6 +2370,17 @@ class OverlayObjectStore(BaseObjectStore):
             raise KeyError(o.id)
 
     def get_raw(self, sha_id):
+        """Get the raw object data from the overlaid stores.
+
+        Args:
+          sha_id: SHA of the object
+
+        Returns:
+          Tuple of (type_num, raw_data)
+
+        Raises:
+          KeyError: If object not found in any base store
+        """
         for b in self.bases:
             try:
                 return b.get_raw(sha_id)
@@ -2138,12 +2389,28 @@ class OverlayObjectStore(BaseObjectStore):
         raise KeyError(sha_id)
 
     def contains_packed(self, sha) -> bool:
+        """Check if an object is packed in any base store.
+
+        Args:
+          sha: SHA of the object
+
+        Returns:
+          True if object is packed in any base store
+        """
         for b in self.bases:
             if b.contains_packed(sha):
                 return True
         return False
 
     def contains_loose(self, sha) -> bool:
+        """Check if an object is loose in any base store.
+
+        Args:
+          sha: SHA of the object
+
+        Returns:
+          True if object is loose in any base store
+        """
         for b in self.bases:
             if b.contains_loose(sha):
                 return True
@@ -2172,8 +2439,14 @@ class BucketBasedObjectStore(PackBasedObjectStore):
         return None
 
     def delete_loose_object(self, sha) -> None:
+        """Delete a loose object (no-op for bucket stores).
+
+        Bucket-based stores don't have loose objects, so this is a no-op.
+
+        Args:
+          sha: SHA of the object to delete
+        """
         # Doesn't exist..
-        pass
 
     def _remove_pack(self, name) -> None:
         raise NotImplementedError(self._remove_pack)

+ 94 - 5
dulwich/pack.py

@@ -167,6 +167,8 @@ class PackedObjectContainer(ObjectContainer):
 
 
 class UnpackedObjectStream:
+    """Abstract base class for a stream of unpacked objects."""
+
     def __iter__(self) -> Iterator["UnpackedObject"]:
         raise NotImplementedError(self.__iter__)
 
@@ -192,6 +194,8 @@ def take_msb_bytes(
 
 
 class PackFileDisappeared(Exception):
+    """Raised when a pack file unexpectedly disappears."""
+
     def __init__(self, obj: object) -> None:
         self.obj = obj
 
@@ -294,9 +298,11 @@ class UnpackedObject:
         return True
 
     def __ne__(self, other: object) -> bool:
+        """Check inequality with another UnpackedObject."""
         return not (self == other)
 
     def __repr__(self) -> str:
+        """Return string representation of this UnpackedObject."""
         data = [f"{s}={getattr(self, s)!r}" for s in self.__slots__]
         return "{}({})".format(self.__class__.__name__, ", ".join(data))
 
@@ -399,6 +405,13 @@ def load_pack_index(path: Union[str, os.PathLike]) -> "PackIndex":
 def _load_file_contents(
     f: Union[IO[bytes], _GitFile], size: Optional[int] = None
 ) -> tuple[Union[bytes, Any], int]:
+    """Load contents from a file, preferring mmap when possible.
+
+    Args:
+      f: File-like object to load
+      size: Expected size, or None to determine from file
+    Returns: Tuple of (contents, size)
+    """
     try:
         fd = f.fileno()
     except (UnsupportedOperation, AttributeError):
@@ -494,6 +507,7 @@ class PackIndex:
         return True
 
     def __ne__(self, other: object) -> bool:
+        """Check if this pack index is not equal to another."""
         return not self.__eq__(other)
 
     def __len__(self) -> int:
@@ -560,10 +574,10 @@ class PackIndex:
         raise NotImplementedError(self._itersha)
 
     def close(self) -> None:
-        pass
+        """Close any open files."""
 
     def check(self) -> None:
-        pass
+        """Check the consistency of this pack index."""
 
 
 class MemoryPackIndex(PackIndex):
@@ -589,33 +603,46 @@ class MemoryPackIndex(PackIndex):
         self._pack_checksum = pack_checksum
 
     def get_pack_checksum(self) -> Optional[bytes]:
+        """Return the SHA checksum stored for the corresponding packfile."""
         return self._pack_checksum
 
     def __len__(self) -> int:
+        """Return the number of entries in this pack index."""
         return len(self._entries)
 
     def object_offset(self, sha: bytes) -> int:
+        """Return the offset for the given SHA.
+
+        Args:
+          sha: SHA to look up (binary or hex)
+        Returns: Offset in the pack file
+        """
         if len(sha) == 40:
             sha = hex_to_sha(sha)
         return self._by_sha[sha]
 
     def object_sha1(self, offset: int) -> bytes:
+        """Return the SHA1 for the object at the given offset."""
         return self._by_offset[offset]
 
     def _itersha(self) -> Iterator[bytes]:
+        """Iterate over all SHA1s in the index."""
         return iter(self._by_sha)
 
     def iterentries(self) -> Iterator[PackIndexEntry]:
+        """Iterate over all index entries."""
         return iter(self._entries)
 
     @classmethod
     def for_pack(cls, pack_data: "PackData") -> "MemoryPackIndex":
+        """Create a MemoryPackIndex from a PackData object."""
         return MemoryPackIndex(
             list(pack_data.sorted_entries()), pack_data.get_stored_checksum()
         )
 
     @classmethod
     def clone(cls, other_index: "PackIndex") -> "MemoryPackIndex":
+        """Create a copy of another PackIndex in memory."""
         return cls(list(other_index.iterentries()), other_index.get_pack_checksum())
 
 
@@ -660,6 +687,7 @@ class FilePackIndex(PackIndex):
 
     @property
     def path(self) -> str:
+        """Return the path to this index file."""
         return os.fspath(self._filename)
 
     def __eq__(self, other: object) -> bool:
@@ -673,6 +701,7 @@ class FilePackIndex(PackIndex):
         return super().__eq__(other)
 
     def close(self) -> None:
+        """Close the underlying file and any mmap."""
         self._file.close()
         close_fn = getattr(self._contents, "close", None)
         if close_fn is not None:
@@ -703,6 +732,7 @@ class FilePackIndex(PackIndex):
         raise NotImplementedError(self._unpack_crc32_checksum)
 
     def _itersha(self) -> Iterator[bytes]:
+        """Iterate over all SHA1s in the index."""
         for i in range(len(self)):
             yield self._unpack_name(i)
 
@@ -716,6 +746,15 @@ class FilePackIndex(PackIndex):
             yield self._unpack_entry(i)
 
     def _read_fan_out_table(self, start_offset: int) -> list[int]:
+        """Read the fan-out table from the index.
+
+        The fan-out table contains 256 entries mapping first byte values
+        to the number of objects with SHA1s less than or equal to that byte.
+
+        Args:
+          start_offset: Offset in the file where the fan-out table starts
+        Returns: List of 256 integers
+        """
         ret = []
         for i in range(0x100):
             fanout_entry = self._contents[
@@ -821,18 +860,22 @@ class PackIndex1(FilePackIndex):
         self._fan_out_table = self._read_fan_out_table(0)
 
     def _unpack_entry(self, i):
+        """Unpack the i-th entry from the v1 index."""
         (offset, name) = unpack_from(">L20s", self._contents, (0x100 * 4) + (i * 24))
         return (name, offset, None)
 
     def _unpack_name(self, i):
+        """Unpack the i-th SHA1 from the v1 index."""
         offset = (0x100 * 4) + (i * 24) + 4
         return self._contents[offset : offset + 20]
 
     def _unpack_offset(self, i):
+        """Unpack the i-th offset from the v1 index."""
         offset = (0x100 * 4) + (i * 24)
         return unpack_from(">L", self._contents, offset)[0]
 
     def _unpack_crc32_checksum(self, i) -> None:
+        """Return None as v1 indexes don't store CRC32 checksums."""
         # Not stored in v1 index files
         return None
 
@@ -858,6 +901,7 @@ class PackIndex2(FilePackIndex):
         )
 
     def _unpack_entry(self, i):
+        """Unpack the i-th entry from the v2 index."""
         return (
             self._unpack_name(i),
             self._unpack_offset(i),
@@ -865,10 +909,15 @@ class PackIndex2(FilePackIndex):
         )
 
     def _unpack_name(self, i):
+        """Unpack the i-th SHA1 from the v2 index."""
         offset = self._name_table_offset + i * 20
         return self._contents[offset : offset + 20]
 
     def _unpack_offset(self, i):
+        """Unpack the i-th offset from the v2 index.
+
+        Handles large offsets (>2GB) by reading from the large offset table.
+        """
         offset = self._pack_offset_table_offset + i * 4
         offset = unpack_from(">L", self._contents, offset)[0]
         if offset & (2**31):
@@ -877,6 +926,7 @@ class PackIndex2(FilePackIndex):
         return offset
 
     def _unpack_crc32_checksum(self, i):
+        """Unpack the i-th CRC32 checksum from the v2 index."""
         return unpack_from(">L", self._contents, self._crc32_table_offset + i * 4)[0]
 
 
@@ -963,6 +1013,12 @@ def read_pack_header(read) -> tuple[int, int]:
 
 
 def chunks_length(chunks: Union[bytes, Iterable[bytes]]) -> int:
+    """Get the total length of a sequence of chunks.
+
+    Args:
+      chunks: Either a single bytes object or an iterable of bytes
+    Returns: Total length in bytes
+    """
     if isinstance(chunks, bytes):
         return len(chunks)
     else:
@@ -1047,7 +1103,12 @@ def unpack_object(
 
 
 def _compute_object_size(value):
-    """Compute the size of a unresolved object for use with LRUSizeCache."""
+    """Compute the size of an unresolved object for use with LRUSizeCache.
+
+    Args:
+      value: Tuple of (type_num, object_chunks)
+    Returns: Size in bytes
+    """
     (num, obj) = value
     if num in DELTA_TYPES:
         return chunks_length(obj[1])
@@ -1084,6 +1145,7 @@ class PackStreamReader:
           read: The read callback to read from.
           size: The maximum number of bytes to read; the particular
             behavior is callback-specific.
+        Returns: Bytes read
         """
         data = read(size)
 
@@ -1107,6 +1169,7 @@ class PackStreamReader:
         return data
 
     def _buf_len(self):
+        """Get the number of bytes in the read buffer."""
         buf = self._rbuf
         start = buf.tell()
         buf.seek(0, SEEK_END)
@@ -1116,6 +1179,7 @@ class PackStreamReader:
 
     @property
     def offset(self):
+        """Return the current offset in the pack stream."""
         return self._offset - self._buf_len()
 
     def read(self, size):
@@ -1218,7 +1282,13 @@ class PackStreamCopier(PackStreamReader):
         self._delta_iter = delta_iter
 
     def _read(self, read, size):
-        """Read data from the read callback and write it to the file."""
+        """Read data from the read callback and write it to the file.
+
+        Args:
+          read: Read callback function
+          size: Number of bytes to read
+        Returns: Data read
+        """
         data = super()._read(read, size)
         self.outfile.write(data)
         return data
@@ -1240,7 +1310,13 @@ class PackStreamCopier(PackStreamReader):
 
 
 def obj_sha(type, chunks):
-    """Compute the SHA for a numeric type and object chunks."""
+    """Compute the SHA for a numeric type and object chunks.
+
+    Args:
+      type: Numeric type of the object
+      chunks: Object data as bytes or iterable of bytes
+    Returns: SHA-1 digest (20 bytes)
+    """
     sha = sha1()
     sha.update(object_header(type, chunks_length(chunks)))
     if isinstance(chunks, bytes):
@@ -1365,6 +1441,7 @@ class PackData:
         return cls(filename=path)
 
     def close(self) -> None:
+        """Close the underlying pack file."""
         self._file.close()
 
     def __enter__(self):
@@ -1374,11 +1451,17 @@ class PackData:
         self.close()
 
     def __eq__(self, other):
+        """Check equality based on pack checksum."""
         if isinstance(other, PackData):
             return self.get_stored_checksum() == other.get_stored_checksum()
         return False
 
     def _get_size(self):
+        """Get the size of the pack file.
+
+        Returns: Size in bytes
+        Raises: AssertionError if file is too small to be a pack
+        """
         if self._size is not None:
             return self._size
         self._size = os.path.getsize(self._filename)
@@ -1399,6 +1482,12 @@ class PackData:
         return compute_file_sha(self._file, end_ofs=-20).digest()
 
     def iter_unpacked(self, *, include_comp: bool = False):
+        """Iterate over unpacked objects in the pack.
+
+        Args:
+          include_comp: If True, include compressed object data
+        Yields: UnpackedObject instances
+        """
         self._file.seek(self._header_size)
 
         if self._num_objects is None:

+ 87 - 0
dulwich/porcelain.py

@@ -206,15 +206,44 @@ class NoneStream(RawIOBase):
     """Fallback if stdout or stderr are unavailable, does nothing."""
 
     def read(self, size=-1) -> None:
+        """Read bytes (no-op for NoneStream).
+
+        Args:
+          size: Number of bytes to read
+
+        Returns:
+          None
+        """
         return None
 
     def readall(self) -> bytes:
+        """Read all bytes (returns empty bytes).
+
+        Returns:
+          Empty bytes object
+        """
         return b""
 
     def readinto(self, b) -> None:
+        """Read bytes into buffer (no-op for NoneStream).
+
+        Args:
+          b: Buffer to read into
+
+        Returns:
+          None
+        """
         return None
 
     def write(self, b) -> None:
+        """Write bytes (no-op for NoneStream).
+
+        Args:
+          b: Bytes to write
+
+        Returns:
+          None
+        """
         return None
 
 
@@ -229,6 +258,11 @@ class Error(Exception):
     """Porcelain-based error."""
 
     def __init__(self, msg) -> None:
+        """Initialize an Error.
+
+        Args:
+          msg: Error message
+        """
         super().__init__(msg)
 
 
@@ -412,6 +446,12 @@ class DivergedBranches(Error):
     """Branches have diverged and fast-forward is not possible."""
 
     def __init__(self, current_sha, new_sha) -> None:
+        """Initialize a DivergedBranches error.
+
+        Args:
+          current_sha: SHA of the current branch head
+          new_sha: SHA of the new branch head
+        """
         self.current_sha = current_sha
         self.new_sha = new_sha
 
@@ -497,6 +537,12 @@ def symbolic_ref(repo: RepoPath, ref_name, force=False) -> None:
 
 
 def pack_refs(repo: RepoPath, all=False) -> None:
+    """Pack loose refs into a single file.
+
+    Args:
+      repo: Path to the repository
+      all: If True, pack all refs; if False, only pack already-packed refs
+    """
     with open_repo_closing(repo) as repo_obj:
         repo_obj.refs.pack_refs(all=all)
 
@@ -1094,6 +1140,16 @@ move = mv
 
 
 def commit_decode(commit, contents, default_encoding=DEFAULT_ENCODING):
+    """Decode commit message contents to unicode.
+
+    Args:
+      commit: Commit object
+      contents: Raw commit message bytes
+      default_encoding: Encoding to use if none specified in commit
+
+    Returns:
+      Decoded commit message as unicode string
+    """
     if commit.encoding:
         encoding = commit.encoding.decode("ascii")
     else:
@@ -1102,6 +1158,16 @@ def commit_decode(commit, contents, default_encoding=DEFAULT_ENCODING):
 
 
 def commit_encode(commit, contents, default_encoding=DEFAULT_ENCODING):
+    """Encode commit message contents to bytes.
+
+    Args:
+      commit: Commit object
+      contents: Commit message as unicode string
+      default_encoding: Encoding to use if none specified in commit
+
+    Returns:
+      Encoded commit message as bytes
+    """
     if commit.encoding:
         encoding = commit.encoding.decode("ascii")
     else:
@@ -1246,6 +1312,17 @@ def show_tag(repo: RepoPath, tag, decode, outstream=sys.stdout) -> None:
 
 
 def show_object(repo: RepoPath, obj, decode, outstream):
+    """Display a git object.
+
+    Args:
+      repo: Path to the repository
+      obj: Git object to display (blob, tree, commit, or tag)
+      decode: Function for decoding bytes to unicode string
+      outstream: Stream to write output to
+
+    Returns:
+      Result of the appropriate show_* function
+    """
     return {
         b"tree": show_tree,
         b"blob": show_blob,
@@ -2047,6 +2124,16 @@ def reset(
 def get_remote_repo(
     repo: Repo, remote_location: Optional[Union[str, bytes]] = None
 ) -> tuple[Optional[str], str]:
+    """Get the remote repository information.
+
+    Args:
+      repo: Local repository object
+      remote_location: Optional remote name or URL; defaults to branch remote
+
+    Returns:
+      Tuple of (remote_name, remote_url) where remote_name may be None
+      if remote_location is a URL rather than a configured remote
+    """
     config = repo.get_config()
     if remote_location is None:
         remote_location = get_branch_remote(repo)

+ 157 - 2
dulwich/server.py

@@ -183,6 +183,11 @@ class DictBackend(Backend):
     """Trivial backend that looks up Git repositories in a dictionary."""
 
     def __init__(self, repos) -> None:
+        """Initialize a DictBackend.
+
+        Args:
+          repos: Dictionary mapping repository paths to BackendRepo instances
+        """
         self.repos = repos
 
     def open_repository(self, path: str) -> BackendRepo:
@@ -199,10 +204,23 @@ class FileSystemBackend(Backend):
     """Simple backend looking up Git repositories in the local file system."""
 
     def __init__(self, root=os.sep) -> None:
+        """Initialize a FileSystemBackend.
+
+        Args:
+          root: Root directory to serve repositories from
+        """
         super().__init__()
         self.root = (os.path.abspath(root) + os.sep).replace(os.sep * 2, os.sep)
 
     def open_repository(self, path):
+        """Open a repository from the filesystem.
+
+        Args:
+          path: Path to the repository relative to the root
+        Returns: Repo instance
+        Raises:
+          NotGitRepository: If path is outside the root or not a git repository
+        """
         logger.debug("opening repository at %s", path)
         # Ensure path is a string to avoid TypeError when joining with self.root
         path = os.fspath(path)
@@ -220,11 +238,19 @@ class Handler:
     """Smart protocol command handler base class."""
 
     def __init__(self, backend, proto, stateless_rpc=False) -> None:
+        """Initialize a Handler.
+
+        Args:
+          backend: Backend instance for repository access
+          proto: Protocol instance for communication
+          stateless_rpc: Whether this is a stateless RPC session
+        """
         self.backend = backend
         self.proto = proto
         self.stateless_rpc = stateless_rpc
 
     def handle(self) -> None:
+        """Handle a request."""
         raise NotImplementedError(self.handle)
 
 
@@ -232,6 +258,13 @@ class PackHandler(Handler):
     """Protocol handler for packs."""
 
     def __init__(self, backend, proto, stateless_rpc=False) -> None:
+        """Initialize a PackHandler.
+
+        Args:
+          backend: Backend instance for repository access
+          proto: Protocol instance for communication
+          stateless_rpc: Whether this is a stateless RPC session
+        """
         super().__init__(backend, proto, stateless_rpc)
         self._client_capabilities: Optional[set[bytes]] = None
         # Flags needed for the no-done capability
@@ -239,6 +272,7 @@ class PackHandler(Handler):
 
     @classmethod
     def capabilities(cls) -> Iterable[bytes]:
+        """Return a list of capabilities supported by this handler."""
         raise NotImplementedError(cls.capabilities)
 
     @classmethod
@@ -257,6 +291,13 @@ class PackHandler(Handler):
         return []
 
     def set_client_capabilities(self, caps: Iterable[bytes]) -> None:
+        """Set the client capabilities and validate them.
+
+        Args:
+          caps: List of capabilities requested by the client
+        Raises:
+          GitProtocolError: If client requests unsupported capability or lacks required ones
+        """
         allowable_caps = set(self.innocuous_capabilities())
         allowable_caps.update(self.capabilities())
         for cap in caps:
@@ -275,6 +316,14 @@ class PackHandler(Handler):
         logger.info("Client capabilities: %s", caps)
 
     def has_capability(self, cap: bytes) -> bool:
+        """Check if the client supports a specific capability.
+
+        Args:
+          cap: Capability name to check
+        Returns: True if the client supports the capability
+        Raises:
+          GitProtocolError: If called before client capabilities are set
+        """
         if self._client_capabilities is None:
             raise GitProtocolError(
                 f"Server attempted to access capability {cap!r} before asking client"
@@ -282,6 +331,7 @@ class PackHandler(Handler):
         return cap in self._client_capabilities
 
     def notify_done(self) -> None:
+        """Notify that the 'done' command has been received from the client."""
         self._done_received = True
 
 
@@ -291,6 +341,15 @@ class UploadPackHandler(PackHandler):
     def __init__(
         self, backend, args, proto, stateless_rpc=False, advertise_refs=False
     ) -> None:
+        """Initialize an UploadPackHandler.
+
+        Args:
+          backend: Backend instance for repository access
+          args: Command arguments (first arg is repository path)
+          proto: Protocol instance for communication
+          stateless_rpc: Whether this is a stateless RPC session
+          advertise_refs: Whether to advertise refs
+        """
         super().__init__(backend, proto, stateless_rpc=stateless_rpc)
         self.repo = backend.open_repository(args[0])
         self._graph_walker = None
@@ -302,6 +361,7 @@ class UploadPackHandler(PackHandler):
 
     @classmethod
     def capabilities(cls):
+        """Return the list of capabilities supported by upload-pack."""
         return [
             CAPABILITY_MULTI_ACK_DETAILED,
             CAPABILITY_MULTI_ACK,
@@ -316,6 +376,7 @@ class UploadPackHandler(PackHandler):
 
     @classmethod
     def required_capabilities(cls):
+        """Return the list of capabilities required for upload-pack."""
         return (
             CAPABILITY_SIDE_BAND_64K,
             CAPABILITY_THIN_PACK,
@@ -323,9 +384,14 @@ class UploadPackHandler(PackHandler):
         )
 
     def progress(self, message: bytes) -> None:
-        pass
+        """Send a progress message to the client.
+
+        Args:
+          message: Progress message to send
+        """
 
     def _start_pack_send_phase(self) -> None:
+        """Start the pack sending phase, setting up sideband if supported."""
         if self.has_capability(CAPABILITY_SIDE_BAND_64K):
             # The provided haves are processed, and it is safe to send side-
             # band data now.
@@ -373,6 +439,11 @@ class UploadPackHandler(PackHandler):
         return tagged
 
     def handle(self) -> None:
+        """Handle an upload-pack request.
+
+        This method processes the client's wants and haves, determines which
+        objects to send, and writes the pack data to the client.
+        """
         # Note the fact that client is only processing responses related
         # to the have lines it sent, and any other data (including side-
         # band) will be be considered a fatal error.
@@ -471,6 +542,15 @@ def _split_proto_line(line, allowed):
 
 
 def _want_satisfied(store: ObjectContainer, haves, want, earliest) -> bool:
+    """Check if a specific want is satisfied by a set of haves.
+
+    Args:
+      store: Object store to retrieve objects from
+      haves: Set of commit IDs the client has
+      want: Commit ID the client wants
+      earliest: Earliest commit time to consider
+    Returns: True if the want is satisfied by the haves
+    """
     o = store[want]
     pending = collections.deque([o])
     known = {want}
@@ -517,10 +597,17 @@ def _all_wants_satisfied(store: ObjectContainer, haves, wants) -> bool:
 
 
 class AckGraphWalkerImpl:
+    """Base class for acknowledgment graph walker implementations."""
+
     def __init__(self, graph_walker):
         raise NotImplementedError
 
     def ack(self, have_ref: ObjectID) -> None:
+        """Acknowledge a have reference.
+
+        Args:
+          have_ref: Object ID to acknowledge
+        """
         raise NotImplementedError
 
 
@@ -541,6 +628,14 @@ class _ProtocolGraphWalker:
     def __init__(
         self, handler, object_store: ObjectContainer, get_peeled, get_symrefs
     ) -> None:
+        """Initialize a ProtocolGraphWalker.
+
+        Args:
+          handler: Protocol handler instance
+          object_store: Object store for retrieving objects
+          get_peeled: Function to get peeled refs
+          get_symrefs: Function to get symbolic refs
+        """
         self.handler = handler
         self.store: ObjectContainer = object_store
         self.get_peeled = get_peeled
@@ -640,23 +735,42 @@ class _ProtocolGraphWalker:
         return want_revs
 
     def unread_proto_line(self, command, value) -> None:
+        """Push a command back to be read again.
+
+        Args:
+          command: Command name
+          value: Command value
+        """
         if isinstance(value, int):
             value = str(value).encode("ascii")
         self.proto.unread_pkt_line(command + b" " + value)
 
     def nak(self) -> None:
-        pass
+        """Send a NAK response."""
 
     def ack(self, have_ref):
+        """Acknowledge a have reference.
+
+        Args:
+          have_ref: SHA to acknowledge (40 bytes hex)
+
+        Raises:
+          ValueError: If have_ref is not 40 bytes
+        """
         if len(have_ref) != 40:
             raise ValueError(f"invalid sha {have_ref!r}")
         return self._impl.ack(have_ref)
 
     def reset(self) -> None:
+        """Reset the graph walker cache."""
         self._cached = True
         self._cache_index = 0
 
     def next(self):
+        """Get the next SHA from the graph walker.
+
+        Returns: Next SHA or None if done
+        """
         if not self._cached:
             if not self._impl and self.stateless_rpc:
                 return None
@@ -681,6 +795,11 @@ class _ProtocolGraphWalker:
         return _split_proto_line(self.proto.read_pkt_line(), allowed)
 
     def _handle_shallow_request(self, wants) -> None:
+        """Handle shallow clone requests from the client.
+
+        Args:
+          wants: List of wanted object SHAs
+        """
         while True:
             command, val = self.read_proto_line((COMMAND_DEEPEN, COMMAND_SHALLOW))
             if command == COMMAND_DEEPEN:
@@ -700,6 +819,12 @@ class _ProtocolGraphWalker:
         self.update_shallow(new_shallow, unshallow)
 
     def update_shallow(self, new_shallow, unshallow):
+        """Update shallow/unshallow information to the client.
+
+        Args:
+          new_shallow: Set of newly shallow commits
+          unshallow: Set of commits to unshallow
+        """
         for sha in sorted(new_shallow):
             self.proto.write_pkt_line(format_shallow_line(sha))
         for sha in sorted(unshallow):
@@ -708,20 +833,40 @@ class _ProtocolGraphWalker:
         self.proto.write_pkt_line(None)
 
     def notify_done(self) -> None:
+        """Notify that the client sent 'done'."""
         # relay the message down to the handler.
         self.handler.notify_done()
 
     def send_ack(self, sha, ack_type=b"") -> None:
+        """Send an ACK to the client.
+
+        Args:
+          sha: SHA to acknowledge
+          ack_type: Type of ACK (e.g., b'continue', b'ready')
+        """
         self.proto.write_pkt_line(format_ack_line(sha, ack_type))
 
     def send_nak(self) -> None:
+        """Send a NAK to the client."""
         self.proto.write_pkt_line(NAK_LINE)
 
     def handle_done(self, done_required, done_received):
+        """Handle the 'done' command.
+
+        Args:
+          done_required: Whether done is required
+          done_received: Whether done was received
+        Returns: True if done handling succeeded
+        """
         # Delegate this to the implementation.
         return self._impl.handle_done(done_required, done_received)
 
     def set_wants(self, wants) -> None:
+        """Set the list of wanted objects.
+
+        Args:
+          wants: List of wanted object SHAs
+        """
         self._wants = wants
 
     def all_wants_satisfied(self, haves):
@@ -735,6 +880,11 @@ class _ProtocolGraphWalker:
         return _all_wants_satisfied(self.store, haves, self._wants)
 
     def set_ack_type(self, ack_type) -> None:
+        """Set the acknowledgment type for the graph walker.
+
+        Args:
+          ack_type: One of SINGLE_ACK, MULTI_ACK, or MULTI_ACK_DETAILED
+        """
         impl_classes: dict[int, type[AckGraphWalkerImpl]] = {
             MULTI_ACK: MultiAckGraphWalkerImpl,
             MULTI_ACK_DETAILED: MultiAckDetailedGraphWalkerImpl,
@@ -750,6 +900,11 @@ class SingleAckGraphWalkerImpl(AckGraphWalkerImpl):
     """Graph walker implementation that speaks the single-ack protocol."""
 
     def __init__(self, walker) -> None:
+        """Initialize a SingleAckGraphWalkerImpl.
+
+        Args:
+          walker: Parent ProtocolGraphWalker instance
+        """
         self.walker = walker
         self._common: list[bytes] = []