Просмотр исходного кода

Use immutable container type annotations where appropriate (#1903)

Fixes #1894
Jelmer Vernooij 3 месяцев назад
Родитель
Сommit
44b30981dc

+ 3 - 2
dulwich/annotate.py

@@ -27,6 +27,7 @@ Python's difflib.
 """
 """
 
 
 import difflib
 import difflib
+from collections.abc import Sequence
 from typing import TYPE_CHECKING, Optional
 from typing import TYPE_CHECKING, Optional
 
 
 from dulwich.objects import Blob
 from dulwich.objects import Blob
@@ -49,7 +50,7 @@ if TYPE_CHECKING:
 
 
 
 
 def update_lines(
 def update_lines(
-    annotated_lines: list[tuple[tuple["Commit", "TreeEntry"], bytes]],
+    annotated_lines: Sequence[tuple[tuple["Commit", "TreeEntry"], bytes]],
     new_history_data: tuple["Commit", "TreeEntry"],
     new_history_data: tuple["Commit", "TreeEntry"],
     new_blob: "Blob",
     new_blob: "Blob",
 ) -> list[tuple[tuple["Commit", "TreeEntry"], bytes]]:
 ) -> list[tuple[tuple["Commit", "TreeEntry"], bytes]]:
@@ -76,7 +77,7 @@ def annotate_lines(
     commit_id: bytes,
     commit_id: bytes,
     path: bytes,
     path: bytes,
     order: str = ORDER_DATE,
     order: str = ORDER_DATE,
-    lines: Optional[list[tuple[tuple["Commit", "TreeEntry"], bytes]]] = None,
+    lines: Optional[Sequence[tuple[tuple["Commit", "TreeEntry"], bytes]]] = None,
     follow: bool = True,
     follow: bool = True,
 ) -> list[tuple[tuple["Commit", "TreeEntry"], bytes]]:
 ) -> list[tuple[tuple["Commit", "TreeEntry"], bytes]]:
     """Annotate the lines of a blob.
     """Annotate the lines of a blob.

+ 3 - 3
dulwich/attrs.py

@@ -23,7 +23,7 @@
 
 
 import os
 import os
 import re
 import re
-from collections.abc import Generator, Iterator, Mapping
+from collections.abc import Generator, Iterator, Mapping, Sequence
 from typing import (
 from typing import (
     IO,
     IO,
     Optional,
     Optional,
@@ -199,7 +199,7 @@ class Pattern:
 
 
 
 
 def match_path(
 def match_path(
-    patterns: list[tuple[Pattern, Mapping[bytes, AttributeValue]]], path: bytes
+    patterns: Sequence[tuple[Pattern, Mapping[bytes, AttributeValue]]], path: bytes
 ) -> dict[bytes, AttributeValue]:
 ) -> dict[bytes, AttributeValue]:
     """Get attributes for a path by matching against patterns.
     """Get attributes for a path by matching against patterns.
 
 
@@ -297,7 +297,7 @@ class GitAttributes:
         return match_path(self._patterns, path)
         return match_path(self._patterns, path)
 
 
     def add_patterns(
     def add_patterns(
-        self, patterns: list[tuple[Pattern, Mapping[bytes, AttributeValue]]]
+        self, patterns: Sequence[tuple[Pattern, Mapping[bytes, AttributeValue]]]
     ) -> None:
     ) -> None:
         """Add patterns to the collection.
         """Add patterns to the collection.
 
 

+ 5 - 4
dulwich/bisect.py

@@ -21,6 +21,7 @@
 """Git bisect implementation."""
 """Git bisect implementation."""
 
 
 import os
 import os
+from collections.abc import Sequence, Set
 from typing import Optional
 from typing import Optional
 
 
 from dulwich.object_store import peel_sha
 from dulwich.object_store import peel_sha
@@ -48,8 +49,8 @@ class BisectState:
     def start(
     def start(
         self,
         self,
         bad: Optional[bytes] = None,
         bad: Optional[bytes] = None,
-        good: Optional[list[bytes]] = None,
-        paths: Optional[list[bytes]] = None,
+        good: Optional[Sequence[bytes]] = None,
+        paths: Optional[Sequence[bytes]] = None,
         no_checkout: bool = False,
         no_checkout: bool = False,
         term_bad: str = "bad",
         term_bad: str = "bad",
         term_good: str = "good",
         term_good: str = "good",
@@ -186,7 +187,7 @@ class BisectState:
 
 
         return self._find_next_commit()
         return self._find_next_commit()
 
 
-    def skip(self, revs: Optional[list[bytes]] = None) -> Optional[bytes]:
+    def skip(self, revs: Optional[Sequence[bytes]] = None) -> Optional[bytes]:
         """Skip one or more commits.
         """Skip one or more commits.
 
 
         Args:
         Args:
@@ -367,7 +368,7 @@ class BisectState:
         return next_commit
         return next_commit
 
 
     def _find_bisect_candidates(
     def _find_bisect_candidates(
-        self, bad_sha: bytes, good_shas: list[bytes], skip_shas: set[bytes]
+        self, bad_sha: bytes, good_shas: Sequence[bytes], skip_shas: Set[bytes]
     ) -> list[bytes]:
     ) -> list[bytes]:
         """Find all commits between good and bad commits.
         """Find all commits between good and bad commits.
 
 

+ 3 - 3
dulwich/bundle.py

@@ -21,7 +21,7 @@
 
 
 """Bundle format support."""
 """Bundle format support."""
 
 
-from collections.abc import Iterator
+from collections.abc import Iterator, Sequence
 from typing import (
 from typing import (
     TYPE_CHECKING,
     TYPE_CHECKING,
     BinaryIO,
     BinaryIO,
@@ -222,8 +222,8 @@ def write_bundle(f: BinaryIO, bundle: Bundle) -> None:
 
 
 def create_bundle_from_repo(
 def create_bundle_from_repo(
     repo: "BaseRepo",
     repo: "BaseRepo",
-    refs: Optional[list[bytes]] = None,
-    prerequisites: Optional[list[bytes]] = None,
+    refs: Optional[Sequence[bytes]] = None,
+    prerequisites: Optional[Sequence[bytes]] = None,
     version: Optional[int] = None,
     version: Optional[int] = None,
     capabilities: Optional[dict[str, Optional[str]]] = None,
     capabilities: Optional[dict[str, Optional[str]]] = None,
     progress: Optional[Callable[[str], None]] = None,
     progress: Optional[Callable[[str], None]] = None,

+ 98 - 94
dulwich/cli.py

@@ -38,7 +38,7 @@ import subprocess
 import sys
 import sys
 import tempfile
 import tempfile
 import types
 import types
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Mapping, Sequence
 from pathlib import Path
 from pathlib import Path
 from types import TracebackType
 from types import TracebackType
 from typing import (
 from typing import (
@@ -220,7 +220,9 @@ def detect_terminal_width() -> int:
 
 
 
 
 def write_columns(
 def write_columns(
-    items: Union[Iterator[bytes], list[bytes]], out: TextIO, width: Optional[int] = None
+    items: Union[Iterator[bytes], Sequence[bytes]],
+    out: TextIO,
+    width: Optional[int] = None,
 ) -> None:
 ) -> None:
     """Display items in formatted columns based on terminal width.
     """Display items in formatted columns based on terminal width.
 
 
@@ -240,7 +242,9 @@ def write_columns(
 
 
     item_names = [item.decode() for item in items]
     item_names = [item.decode() for item in items]
 
 
-    def columns(names: list[str], width: int, num_cols: int) -> tuple[bool, list[int]]:
+    def columns(
+        names: Sequence[str], width: int, num_cols: int
+    ) -> tuple[bool, list[int]]:
         if num_cols <= 0:
         if num_cols <= 0:
             return False, []
             return False, []
 
 
@@ -742,7 +746,7 @@ def enable_pager() -> None:
 class Command:
 class Command:
     """A Dulwich subcommand."""
     """A Dulwich subcommand."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Run the command."""
         """Run the command."""
         raise NotImplementedError(self.run)
         raise NotImplementedError(self.run)
 
 
@@ -750,7 +754,7 @@ class Command:
 class cmd_archive(Command):
 class cmd_archive(Command):
     """Create an archive of files from a named tree."""
     """Create an archive of files from a named tree."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the archive command.
         """Execute the archive command.
 
 
         Args:
         Args:
@@ -796,7 +800,7 @@ class cmd_archive(Command):
 class cmd_add(Command):
 class cmd_add(Command):
     """Add file contents to the index."""
     """Add file contents to the index."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the add command.
         """Execute the add command.
 
 
         Args:
         Args:
@@ -817,7 +821,7 @@ class cmd_add(Command):
 class cmd_annotate(Command):
 class cmd_annotate(Command):
     """Annotate each line in a file with commit information."""
     """Annotate each line in a file with commit information."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the annotate command.
         """Execute the annotate command.
 
 
         Args:
         Args:
@@ -841,7 +845,7 @@ class cmd_annotate(Command):
 class cmd_blame(Command):
 class cmd_blame(Command):
     """Show what revision and author last modified each line of a file."""
     """Show what revision and author last modified each line of a file."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the blame command.
         """Execute the blame command.
 
 
         Args:
         Args:
@@ -854,7 +858,7 @@ class cmd_blame(Command):
 class cmd_rm(Command):
 class cmd_rm(Command):
     """Remove files from the working tree and from the index."""
     """Remove files from the working tree and from the index."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the rm command.
         """Execute the rm command.
 
 
         Args:
         Args:
@@ -873,7 +877,7 @@ class cmd_rm(Command):
 class cmd_mv(Command):
 class cmd_mv(Command):
     """Move or rename a file, a directory, or a symlink."""
     """Move or rename a file, a directory, or a symlink."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the mv command.
         """Execute the mv command.
 
 
         Args:
         Args:
@@ -896,7 +900,7 @@ class cmd_mv(Command):
 class cmd_fetch_pack(Command):
 class cmd_fetch_pack(Command):
     """Receive missing objects from another repository."""
     """Receive missing objects from another repository."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the fetch-pack command.
         """Execute the fetch-pack command.
 
 
         Args:
         Args:
@@ -914,7 +918,7 @@ class cmd_fetch_pack(Command):
         else:
         else:
 
 
             def determine_wants(
             def determine_wants(
-                refs: dict[bytes, bytes], depth: Optional[int] = None
+                refs: Mapping[bytes, bytes], depth: Optional[int] = None
             ) -> list[bytes]:
             ) -> list[bytes]:
                 return [y.encode("utf-8") for y in args.refs if y not in r.object_store]
                 return [y.encode("utf-8") for y in args.refs if y not in r.object_store]
 
 
@@ -924,7 +928,7 @@ class cmd_fetch_pack(Command):
 class cmd_fetch(Command):
 class cmd_fetch(Command):
     """Download objects and refs from another repository."""
     """Download objects and refs from another repository."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the fetch command.
         """Execute the fetch command.
 
 
         Args:
         Args:
@@ -949,7 +953,7 @@ class cmd_fetch(Command):
 class cmd_for_each_ref(Command):
 class cmd_for_each_ref(Command):
     """Output information on each ref."""
     """Output information on each ref."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the for-each-ref command.
         """Execute the for-each-ref command.
 
 
         Args:
         Args:
@@ -965,7 +969,7 @@ class cmd_for_each_ref(Command):
 class cmd_fsck(Command):
 class cmd_fsck(Command):
     """Verify the connectivity and validity of objects in the database."""
     """Verify the connectivity and validity of objects in the database."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the fsck command.
         """Execute the fsck command.
 
 
         Args:
         Args:
@@ -980,7 +984,7 @@ class cmd_fsck(Command):
 class cmd_log(Command):
 class cmd_log(Command):
     """Show commit logs."""
     """Show commit logs."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the log command.
         """Execute the log command.
 
 
         Args:
         Args:
@@ -1015,7 +1019,7 @@ class cmd_log(Command):
 class cmd_diff(Command):
 class cmd_diff(Command):
     """Show changes between commits, commit and working tree, etc."""
     """Show changes between commits, commit and working tree, etc."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the diff command.
         """Execute the diff command.
 
 
         Args:
         Args:
@@ -1147,7 +1151,7 @@ class cmd_diff(Command):
 class cmd_dump_pack(Command):
 class cmd_dump_pack(Command):
     """Dump the contents of a pack file for debugging."""
     """Dump the contents of a pack file for debugging."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the dump-pack command.
         """Execute the dump-pack command.
 
 
         Args:
         Args:
@@ -1183,7 +1187,7 @@ class cmd_dump_pack(Command):
 class cmd_dump_index(Command):
 class cmd_dump_index(Command):
     """Show information about a pack index file."""
     """Show information about a pack index file."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the dump-index command.
         """Execute the dump-index command.
 
 
         Args:
         Args:
@@ -1202,7 +1206,7 @@ class cmd_dump_index(Command):
 class cmd_init(Command):
 class cmd_init(Command):
     """Create an empty Git repository or reinitialize an existing one."""
     """Create an empty Git repository or reinitialize an existing one."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the init command.
         """Execute the init command.
 
 
         Args:
         Args:
@@ -1223,7 +1227,7 @@ class cmd_init(Command):
 class cmd_clone(Command):
 class cmd_clone(Command):
     """Clone a repository into a new directory."""
     """Clone a repository into a new directory."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the clone command.
         """Execute the clone command.
 
 
         Args:
         Args:
@@ -1333,7 +1337,7 @@ def _get_commit_message_with_template(
 class cmd_commit(Command):
 class cmd_commit(Command):
     """Record changes to the repository."""
     """Record changes to the repository."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the commit command.
         """Execute the commit command.
 
 
         Args:
         Args:
@@ -1398,7 +1402,7 @@ class cmd_commit(Command):
 class cmd_commit_tree(Command):
 class cmd_commit_tree(Command):
     """Create a new commit object from a tree."""
     """Create a new commit object from a tree."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the commit-tree command.
         """Execute the commit-tree command.
 
 
         Args:
         Args:
@@ -1414,7 +1418,7 @@ class cmd_commit_tree(Command):
 class cmd_update_server_info(Command):
 class cmd_update_server_info(Command):
     """Update auxiliary info file to help dumb servers."""
     """Update auxiliary info file to help dumb servers."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the update-server-info command.
         """Execute the update-server-info command.
 
 
         Args:
         Args:
@@ -1426,7 +1430,7 @@ class cmd_update_server_info(Command):
 class cmd_symbolic_ref(Command):
 class cmd_symbolic_ref(Command):
     """Read, modify and delete symbolic refs."""
     """Read, modify and delete symbolic refs."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the symbolic-ref command.
         """Execute the symbolic-ref command.
 
 
         Args:
         Args:
@@ -1475,7 +1479,7 @@ class cmd_symbolic_ref(Command):
 class cmd_pack_refs(Command):
 class cmd_pack_refs(Command):
     """Pack heads and tags for efficient repository access."""
     """Pack heads and tags for efficient repository access."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the pack-refs command.
         """Execute the pack-refs command.
 
 
         Args:
         Args:
@@ -1494,7 +1498,7 @@ class cmd_pack_refs(Command):
 class cmd_show(Command):
 class cmd_show(Command):
     """Show various types of objects."""
     """Show various types of objects."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the show command.
         """Execute the show command.
 
 
         Args:
         Args:
@@ -1553,7 +1557,7 @@ class cmd_show(Command):
 class cmd_diff_tree(Command):
 class cmd_diff_tree(Command):
     """Compare the content and mode of trees."""
     """Compare the content and mode of trees."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the diff-tree command.
         """Execute the diff-tree command.
 
 
         Args:
         Args:
@@ -1569,7 +1573,7 @@ class cmd_diff_tree(Command):
 class cmd_rev_list(Command):
 class cmd_rev_list(Command):
     """List commit objects in reverse chronological order."""
     """List commit objects in reverse chronological order."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the rev-list command.
         """Execute the rev-list command.
 
 
         Args:
         Args:
@@ -1584,7 +1588,7 @@ class cmd_rev_list(Command):
 class cmd_tag(Command):
 class cmd_tag(Command):
     """Create, list, delete or verify a tag object."""
     """Create, list, delete or verify a tag object."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the tag command.
         """Execute the tag command.
 
 
         Args:
         Args:
@@ -1613,7 +1617,7 @@ class cmd_tag(Command):
 class cmd_repack(Command):
 class cmd_repack(Command):
     """Pack unpacked objects in a repository."""
     """Pack unpacked objects in a repository."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the repack command.
         """Execute the repack command.
 
 
         Args:
         Args:
@@ -1627,7 +1631,7 @@ class cmd_repack(Command):
 class cmd_reflog(Command):
 class cmd_reflog(Command):
     """Manage reflog information."""
     """Manage reflog information."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the reflog command.
         """Execute the reflog command.
 
 
         Args:
         Args:
@@ -1679,7 +1683,7 @@ class cmd_reflog(Command):
 class cmd_reset(Command):
 class cmd_reset(Command):
     """Reset current HEAD to the specified state."""
     """Reset current HEAD to the specified state."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the reset command.
         """Execute the reset command.
 
 
         Args:
         Args:
@@ -1714,7 +1718,7 @@ class cmd_reset(Command):
 class cmd_revert(Command):
 class cmd_revert(Command):
     """Revert some existing commits."""
     """Revert some existing commits."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the revert command.
         """Execute the revert command.
 
 
         Args:
         Args:
@@ -1745,7 +1749,7 @@ class cmd_revert(Command):
 class cmd_daemon(Command):
 class cmd_daemon(Command):
     """Run a simple Git protocol server."""
     """Run a simple Git protocol server."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the daemon command.
         """Execute the daemon command.
 
 
         Args:
         Args:
@@ -1785,7 +1789,7 @@ class cmd_daemon(Command):
 class cmd_web_daemon(Command):
 class cmd_web_daemon(Command):
     """Run a simple HTTP server for Git repositories."""
     """Run a simple HTTP server for Git repositories."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the web-daemon command.
         """Execute the web-daemon command.
 
 
         Args:
         Args:
@@ -1823,7 +1827,7 @@ class cmd_web_daemon(Command):
 class cmd_write_tree(Command):
 class cmd_write_tree(Command):
     """Create a tree object from the current index."""
     """Create a tree object from the current index."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the write-tree command.
         """Execute the write-tree command.
 
 
         Args:
         Args:
@@ -1837,7 +1841,7 @@ class cmd_write_tree(Command):
 class cmd_receive_pack(Command):
 class cmd_receive_pack(Command):
     """Receive what is pushed into the repository."""
     """Receive what is pushed into the repository."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the receive-pack command.
         """Execute the receive-pack command.
 
 
         Args:
         Args:
@@ -1852,7 +1856,7 @@ class cmd_receive_pack(Command):
 class cmd_upload_pack(Command):
 class cmd_upload_pack(Command):
     """Send objects packed back to git-fetch-pack."""
     """Send objects packed back to git-fetch-pack."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the upload-pack command.
         """Execute the upload-pack command.
 
 
         Args:
         Args:
@@ -1867,7 +1871,7 @@ class cmd_upload_pack(Command):
 class cmd_shortlog(Command):
 class cmd_shortlog(Command):
     """Show a shortlog of commits by author."""
     """Show a shortlog of commits by author."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the shortlog command with the given CLI arguments.
         """Execute the shortlog command with the given CLI arguments.
 
 
         Args:
         Args:
@@ -1903,7 +1907,7 @@ class cmd_shortlog(Command):
 class cmd_status(Command):
 class cmd_status(Command):
     """Show the working tree status."""
     """Show the working tree status."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the status command.
         """Execute the status command.
 
 
         Args:
         Args:
@@ -1936,7 +1940,7 @@ class cmd_status(Command):
 class cmd_ls_remote(Command):
 class cmd_ls_remote(Command):
     """List references in a remote repository."""
     """List references in a remote repository."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the ls-remote command.
         """Execute the ls-remote command.
 
 
         Args:
         Args:
@@ -1966,7 +1970,7 @@ class cmd_ls_remote(Command):
 class cmd_ls_tree(Command):
 class cmd_ls_tree(Command):
     """List the contents of a tree object."""
     """List the contents of a tree object."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the ls-tree command.
         """Execute the ls-tree command.
 
 
         Args:
         Args:
@@ -1999,7 +2003,7 @@ class cmd_ls_tree(Command):
 class cmd_pack_objects(Command):
 class cmd_pack_objects(Command):
     """Create a packed archive of objects."""
     """Create a packed archive of objects."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the pack-objects command.
         """Execute the pack-objects command.
 
 
         Args:
         Args:
@@ -2043,7 +2047,7 @@ class cmd_pack_objects(Command):
 class cmd_unpack_objects(Command):
 class cmd_unpack_objects(Command):
     """Unpack objects from a packed archive."""
     """Unpack objects from a packed archive."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the unpack-objects command.
         """Execute the unpack-objects command.
 
 
         Args:
         Args:
@@ -2060,7 +2064,7 @@ class cmd_unpack_objects(Command):
 class cmd_prune(Command):
 class cmd_prune(Command):
     """Prune all unreachable objects from the object database."""
     """Prune all unreachable objects from the object database."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the prune command.
         """Execute the prune command.
 
 
         Args:
         Args:
@@ -2129,7 +2133,7 @@ class cmd_prune(Command):
 class cmd_pull(Command):
 class cmd_pull(Command):
     """Fetch from and integrate with another repository or a local branch."""
     """Fetch from and integrate with another repository or a local branch."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the pull command.
         """Execute the pull command.
 
 
         Args:
         Args:
@@ -2153,7 +2157,7 @@ class cmd_pull(Command):
 class cmd_push(Command):
 class cmd_push(Command):
     """Update remote refs along with associated objects."""
     """Update remote refs along with associated objects."""
 
 
-    def run(self, argv: list[str]) -> Optional[int]:
+    def run(self, argv: Sequence[str]) -> Optional[int]:
         """Execute the push command.
         """Execute the push command.
 
 
         Args:
         Args:
@@ -2178,7 +2182,7 @@ class cmd_push(Command):
 class cmd_remote_add(Command):
 class cmd_remote_add(Command):
     """Add a remote repository."""
     """Add a remote repository."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the remote-add command.
         """Execute the remote-add command.
 
 
         Args:
         Args:
@@ -2197,7 +2201,7 @@ class SuperCommand(Command):
     subcommands: ClassVar[dict[str, type[Command]]] = {}
     subcommands: ClassVar[dict[str, type[Command]]] = {}
     default_command: ClassVar[Optional[type[Command]]] = None
     default_command: ClassVar[Optional[type[Command]]] = None
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the subcommand command.
         """Execute the subcommand command.
 
 
         Args:
         Args:
@@ -2231,7 +2235,7 @@ class cmd_remote(SuperCommand):
 class cmd_submodule_list(Command):
 class cmd_submodule_list(Command):
     """List submodules."""
     """List submodules."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the submodule-list command.
         """Execute the submodule-list command.
 
 
         Args:
         Args:
@@ -2246,7 +2250,7 @@ class cmd_submodule_list(Command):
 class cmd_submodule_init(Command):
 class cmd_submodule_init(Command):
     """Initialize submodules."""
     """Initialize submodules."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the submodule-init command.
         """Execute the submodule-init command.
 
 
         Args:
         Args:
@@ -2260,7 +2264,7 @@ class cmd_submodule_init(Command):
 class cmd_submodule_add(Command):
 class cmd_submodule_add(Command):
     """Add a submodule."""
     """Add a submodule."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the submodule-add command.
         """Execute the submodule-add command.
 
 
         Args:
         Args:
@@ -2277,7 +2281,7 @@ class cmd_submodule_add(Command):
 class cmd_submodule_update(Command):
 class cmd_submodule_update(Command):
     """Update submodules."""
     """Update submodules."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the submodule-update command.
         """Execute the submodule-update command.
 
 
         Args:
         Args:
@@ -2316,7 +2320,7 @@ class cmd_submodule(SuperCommand):
 class cmd_check_ignore(Command):
 class cmd_check_ignore(Command):
     """Check whether files are excluded by gitignore."""
     """Check whether files are excluded by gitignore."""
 
 
-    def run(self, args: list[str]) -> int:
+    def run(self, args: Sequence[str]) -> int:
         """Execute the check-ignore command.
         """Execute the check-ignore command.
 
 
         Args:
         Args:
@@ -2335,7 +2339,7 @@ class cmd_check_ignore(Command):
 class cmd_check_mailmap(Command):
 class cmd_check_mailmap(Command):
     """Show canonical names and email addresses of contacts."""
     """Show canonical names and email addresses of contacts."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the check-mailmap command.
         """Execute the check-mailmap command.
 
 
         Args:
         Args:
@@ -2352,7 +2356,7 @@ class cmd_check_mailmap(Command):
 class cmd_branch(Command):
 class cmd_branch(Command):
     """List, create, or delete branches."""
     """List, create, or delete branches."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the branch command.
         """Execute the branch command.
 
 
         Args:
         Args:
@@ -2401,7 +2405,7 @@ class cmd_branch(Command):
         parsed_args = parser.parse_args(args)
         parsed_args = parser.parse_args(args)
 
 
         def print_branches(
         def print_branches(
-            branches: Union[Iterator[bytes], list[bytes]], use_columns: bool = False
+            branches: Union[Iterator[bytes], Sequence[bytes]], use_columns: bool = False
         ) -> None:
         ) -> None:
             if use_columns:
             if use_columns:
                 write_columns(branches, sys.stdout)
                 write_columns(branches, sys.stdout)
@@ -2464,7 +2468,7 @@ class cmd_branch(Command):
 class cmd_checkout(Command):
 class cmd_checkout(Command):
     """Switch branches or restore working tree files."""
     """Switch branches or restore working tree files."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the checkout command.
         """Execute the checkout command.
 
 
         Args:
         Args:
@@ -2509,7 +2513,7 @@ class cmd_checkout(Command):
 class cmd_stash_list(Command):
 class cmd_stash_list(Command):
     """List stash entries."""
     """List stash entries."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the stash-list command.
         """Execute the stash-list command.
 
 
         Args:
         Args:
@@ -2533,7 +2537,7 @@ class cmd_stash_list(Command):
 class cmd_stash_push(Command):
 class cmd_stash_push(Command):
     """Save your local modifications to a new stash."""
     """Save your local modifications to a new stash."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the stash-push command.
         """Execute the stash-push command.
 
 
         Args:
         Args:
@@ -2548,7 +2552,7 @@ class cmd_stash_push(Command):
 class cmd_stash_pop(Command):
 class cmd_stash_pop(Command):
     """Apply a stash and remove it from the stash list."""
     """Apply a stash and remove it from the stash list."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the stash-pop command.
         """Execute the stash-pop command.
 
 
         Args:
         Args:
@@ -2565,7 +2569,7 @@ class cmd_bisect(SuperCommand):
 
 
     subcommands: ClassVar[dict[str, type[Command]]] = {}
     subcommands: ClassVar[dict[str, type[Command]]] = {}
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the bisect command.
         """Execute the bisect command.
 
 
         Args:
         Args:
@@ -2717,7 +2721,7 @@ class cmd_stash(SuperCommand):
 class cmd_ls_files(Command):
 class cmd_ls_files(Command):
     """Show information about files in the index and working tree."""
     """Show information about files in the index and working tree."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the ls-files command.
         """Execute the ls-files command.
 
 
         Args:
         Args:
@@ -2732,7 +2736,7 @@ class cmd_ls_files(Command):
 class cmd_describe(Command):
 class cmd_describe(Command):
     """Give an object a human readable name based on an available ref."""
     """Give an object a human readable name based on an available ref."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the describe command.
         """Execute the describe command.
 
 
         Args:
         Args:
@@ -2746,7 +2750,7 @@ class cmd_describe(Command):
 class cmd_merge(Command):
 class cmd_merge(Command):
     """Join two or more development histories together."""
     """Join two or more development histories together."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the merge command.
         """Execute the merge command.
 
 
         Args:
         Args:
@@ -2799,7 +2803,7 @@ class cmd_merge(Command):
 class cmd_notes_add(Command):
 class cmd_notes_add(Command):
     """Add notes to a commit."""
     """Add notes to a commit."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the notes-add command.
         """Execute the notes-add command.
 
 
         Args:
         Args:
@@ -2821,7 +2825,7 @@ class cmd_notes_add(Command):
 class cmd_notes_show(Command):
 class cmd_notes_show(Command):
     """Show notes for a commit."""
     """Show notes for a commit."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the notes-show command.
         """Execute the notes-show command.
 
 
         Args:
         Args:
@@ -2844,7 +2848,7 @@ class cmd_notes_show(Command):
 class cmd_notes_remove(Command):
 class cmd_notes_remove(Command):
     """Remove notes for a commit."""
     """Remove notes for a commit."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the notes-remove command.
         """Execute the notes-remove command.
 
 
         Args:
         Args:
@@ -2867,7 +2871,7 @@ class cmd_notes_remove(Command):
 class cmd_notes_list(Command):
 class cmd_notes_list(Command):
     """List all note objects."""
     """List all note objects."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the notes-list command.
         """Execute the notes-list command.
 
 
         Args:
         Args:
@@ -2900,7 +2904,7 @@ class cmd_notes(SuperCommand):
 class cmd_cherry_pick(Command):
 class cmd_cherry_pick(Command):
     """Apply the changes introduced by some existing commits."""
     """Apply the changes introduced by some existing commits."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the cherry-pick command.
         """Execute the cherry-pick command.
 
 
         Args:
         Args:
@@ -2975,7 +2979,7 @@ class cmd_cherry_pick(Command):
 class cmd_merge_tree(Command):
 class cmd_merge_tree(Command):
     """Show three-way merge without touching index."""
     """Show three-way merge without touching index."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the merge-tree command.
         """Execute the merge-tree command.
 
 
         Args:
         Args:
@@ -3044,7 +3048,7 @@ class cmd_merge_tree(Command):
 class cmd_gc(Command):
 class cmd_gc(Command):
     """Cleanup unnecessary files and optimize the local repository."""
     """Cleanup unnecessary files and optimize the local repository."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the gc command.
         """Execute the gc command.
 
 
         Args:
         Args:
@@ -3154,7 +3158,7 @@ class cmd_gc(Command):
 class cmd_count_objects(Command):
 class cmd_count_objects(Command):
     """Count unpacked number of objects and their disk consumption."""
     """Count unpacked number of objects and their disk consumption."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the count-objects command.
         """Execute the count-objects command.
 
 
         Args:
         Args:
@@ -3189,7 +3193,7 @@ class cmd_count_objects(Command):
 class cmd_rebase(Command):
 class cmd_rebase(Command):
     """Reapply commits on top of another base tip."""
     """Reapply commits on top of another base tip."""
 
 
-    def run(self, args: list[str]) -> int:
+    def run(self, args: Sequence[str]) -> int:
         """Execute the rebase command.
         """Execute the rebase command.
 
 
         Args:
         Args:
@@ -3313,7 +3317,7 @@ class cmd_rebase(Command):
 class cmd_filter_branch(Command):
 class cmd_filter_branch(Command):
     """Rewrite branches."""
     """Rewrite branches."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the filter-branch command.
         """Execute the filter-branch command.
 
 
         Args:
         Args:
@@ -3462,13 +3466,13 @@ class cmd_filter_branch(Command):
         parent_filter = None
         parent_filter = None
         if parsed_args.parent_filter:
         if parsed_args.parent_filter:
 
 
-            def parent_filter(parents: list[bytes]) -> list[bytes]:
+            def parent_filter(parents: Sequence[bytes]) -> list[bytes]:
                 parent_str = " ".join(p.hex() for p in parents)
                 parent_str = " ".join(p.hex() for p in parents)
                 result = run_filter(
                 result = run_filter(
                     parsed_args.parent_filter, input_data=parent_str.encode()
                     parsed_args.parent_filter, input_data=parent_str.encode()
                 )
                 )
                 if result is None:
                 if result is None:
-                    return parents
+                    return list(parents)
 
 
                 output = result.decode().strip()
                 output = result.decode().strip()
                 if not output:
                 if not output:
@@ -3582,7 +3586,7 @@ class cmd_lfs(Command):
 
 
     """Git LFS management commands."""
     """Git LFS management commands."""
 
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the lfs command.
         """Execute the lfs command.
 
 
         Args:
         Args:
@@ -3771,7 +3775,7 @@ class cmd_lfs(Command):
 class cmd_help(Command):
 class cmd_help(Command):
     """Display help information about git."""
     """Display help information about git."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the help command.
         """Execute the help command.
 
 
         Args:
         Args:
@@ -3802,7 +3806,7 @@ class cmd_help(Command):
 class cmd_format_patch(Command):
 class cmd_format_patch(Command):
     """Prepare patches for e-mail submission."""
     """Prepare patches for e-mail submission."""
 
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the format-patch command.
         """Execute the format-patch command.
 
 
         Args:
         Args:
@@ -3866,7 +3870,7 @@ class cmd_format_patch(Command):
 class cmd_bundle(Command):
 class cmd_bundle(Command):
     """Create, unpack, and manipulate bundle files."""
     """Create, unpack, and manipulate bundle files."""
 
 
-    def run(self, args: list[str]) -> int:
+    def run(self, args: Sequence[str]) -> int:
         """Execute the bundle command.
         """Execute the bundle command.
 
 
         Args:
         Args:
@@ -3891,7 +3895,7 @@ class cmd_bundle(Command):
             logger.error("Unknown bundle subcommand: %s", subcommand)
             logger.error("Unknown bundle subcommand: %s", subcommand)
             return 1
             return 1
 
 
-    def _create(self, args: list[str]) -> int:
+    def _create(self, args: Sequence[str]) -> int:
         parser = argparse.ArgumentParser(prog="bundle create")
         parser = argparse.ArgumentParser(prog="bundle create")
         parser.add_argument(
         parser.add_argument(
             "-q", "--quiet", action="store_true", help="Suppress progress"
             "-q", "--quiet", action="store_true", help="Suppress progress"
@@ -3983,7 +3987,7 @@ class cmd_bundle(Command):
 
 
         return 0
         return 0
 
 
-    def _verify(self, args: list[str]) -> int:
+    def _verify(self, args: Sequence[str]) -> int:
         parser = argparse.ArgumentParser(prog="bundle verify")
         parser = argparse.ArgumentParser(prog="bundle verify")
         parser.add_argument(
         parser.add_argument(
             "-q", "--quiet", action="store_true", help="Suppress output"
             "-q", "--quiet", action="store_true", help="Suppress output"
@@ -4023,7 +4027,7 @@ class cmd_bundle(Command):
                 bundle = read_bundle(f)
                 bundle = read_bundle(f)
                 return verify_bundle(bundle)
                 return verify_bundle(bundle)
 
 
-    def _list_heads(self, args: list[str]) -> int:
+    def _list_heads(self, args: Sequence[str]) -> int:
         parser = argparse.ArgumentParser(prog="bundle list-heads")
         parser = argparse.ArgumentParser(prog="bundle list-heads")
         parser.add_argument("file", help="Bundle file (use - for stdin)")
         parser.add_argument("file", help="Bundle file (use - for stdin)")
         parser.add_argument("refnames", nargs="*", help="Only show these refs")
         parser.add_argument("refnames", nargs="*", help="Only show these refs")
@@ -4045,7 +4049,7 @@ class cmd_bundle(Command):
 
 
         return 0
         return 0
 
 
-    def _unbundle(self, args: list[str]) -> int:
+    def _unbundle(self, args: Sequence[str]) -> int:
         parser = argparse.ArgumentParser(prog="bundle unbundle")
         parser = argparse.ArgumentParser(prog="bundle unbundle")
         parser.add_argument("--progress", action="store_true", help="Show progress")
         parser.add_argument("--progress", action="store_true", help="Show progress")
         parser.add_argument("file", help="Bundle file (use - for stdin)")
         parser.add_argument("file", help="Bundle file (use - for stdin)")
@@ -4096,7 +4100,7 @@ class cmd_worktree_add(Command):
 
 
     """Add a new worktree to the repository."""
     """Add a new worktree to the repository."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-add command.
         """Execute the worktree-add command.
 
 
         Args:
         Args:
@@ -4151,7 +4155,7 @@ class cmd_worktree_list(Command):
 
 
     """List details of each worktree."""
     """List details of each worktree."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-list command.
         """Execute the worktree-list command.
 
 
         Args:
         Args:
@@ -4213,7 +4217,7 @@ class cmd_worktree_remove(Command):
 
 
     """Remove a worktree."""
     """Remove a worktree."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-remove command.
         """Execute the worktree-remove command.
 
 
         Args:
         Args:
@@ -4241,7 +4245,7 @@ class cmd_worktree_prune(Command):
 
 
     """Prune worktree information."""
     """Prune worktree information."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-prune command.
         """Execute the worktree-prune command.
 
 
         Args:
         Args:
@@ -4286,7 +4290,7 @@ class cmd_worktree_lock(Command):
 
 
     """Lock a worktree."""
     """Lock a worktree."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-lock command.
         """Execute the worktree-lock command.
 
 
         Args:
         Args:
@@ -4314,7 +4318,7 @@ class cmd_worktree_unlock(Command):
 
 
     """Unlock a worktree."""
     """Unlock a worktree."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-unlock command.
         """Execute the worktree-unlock command.
 
 
         Args:
         Args:
@@ -4339,7 +4343,7 @@ class cmd_worktree_move(Command):
 
 
     """Move a worktree."""
     """Move a worktree."""
 
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-move command.
         """Execute the worktree-move command.
 
 
         Args:
         Args:
@@ -4451,7 +4455,7 @@ commands = {
 }
 }
 
 
 
 
-def main(argv: Optional[list[str]] = None) -> Optional[int]:
+def main(argv: Optional[Sequence[str]] = None) -> Optional[int]:
     """Main entry point for the Dulwich CLI.
     """Main entry point for the Dulwich CLI.
 
 
     Args:
     Args:

+ 33 - 33
dulwich/client.py

@@ -47,7 +47,7 @@ import select
 import socket
 import socket
 import subprocess
 import subprocess
 import sys
 import sys
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Mapping, Sequence, Set
 from contextlib import closing
 from contextlib import closing
 from io import BufferedReader, BytesIO
 from io import BufferedReader, BytesIO
 from typing import (
 from typing import (
@@ -87,8 +87,8 @@ if TYPE_CHECKING:
 
 
         def __call__(
         def __call__(
             self,
             self,
-            have: set[bytes],
-            want: set[bytes],
+            have: Set[bytes],
+            want: Set[bytes],
             ofs_delta: bool = False,
             ofs_delta: bool = False,
         ) -> tuple[int, Iterator[UnpackedObject]]:
         ) -> tuple[int, Iterator[UnpackedObject]]:
             """Generate pack data for the given have and want sets."""
             """Generate pack data for the given have and want sets."""
@@ -99,7 +99,7 @@ if TYPE_CHECKING:
 
 
         def __call__(
         def __call__(
             self,
             self,
-            refs: dict[bytes, bytes],
+            refs: Mapping[bytes, bytes],
             depth: Optional[int] = None,
             depth: Optional[int] = None,
         ) -> list[bytes]:
         ) -> list[bytes]:
             """Determine the objects to fetch from the given refs."""
             """Determine the objects to fetch from the given refs."""
@@ -188,7 +188,7 @@ logger = logging.getLogger(__name__)
 class InvalidWants(Exception):
 class InvalidWants(Exception):
     """Invalid wants."""
     """Invalid wants."""
 
 
-    def __init__(self, wants: set[bytes]) -> None:
+    def __init__(self, wants: Set[bytes]) -> None:
         """Initialize InvalidWants exception.
         """Initialize InvalidWants exception.
 
 
         Args:
         Args:
@@ -214,7 +214,7 @@ class HTTPUnauthorized(Exception):
         self.url = url
         self.url = url
 
 
 
 
-def _to_optional_dict(refs: dict[bytes, bytes]) -> dict[bytes, Optional[bytes]]:
+def _to_optional_dict(refs: Mapping[bytes, bytes]) -> dict[bytes, Optional[bytes]]:
     """Convert a dict[bytes, bytes] to dict[bytes, Optional[bytes]].
     """Convert a dict[bytes, bytes] to dict[bytes, Optional[bytes]].
 
 
     This is needed for compatibility with result types that expect Optional values.
     This is needed for compatibility with result types that expect Optional values.
@@ -614,9 +614,9 @@ def _read_shallow_updates(pkt_seq: Iterable[bytes]) -> tuple[set[bytes], set[byt
 class _v1ReceivePackHeader:
 class _v1ReceivePackHeader:
     def __init__(
     def __init__(
         self,
         self,
-        capabilities: list[bytes],
-        old_refs: dict[bytes, bytes],
-        new_refs: dict[bytes, bytes],
+        capabilities: Sequence[bytes],
+        old_refs: Mapping[bytes, bytes],
+        new_refs: Mapping[bytes, bytes],
     ) -> None:
     ) -> None:
         self.want: set[bytes] = set()
         self.want: set[bytes] = set()
         self.have: set[bytes] = set()
         self.have: set[bytes] = set()
@@ -628,9 +628,9 @@ class _v1ReceivePackHeader:
 
 
     def _handle_receive_pack_head(
     def _handle_receive_pack_head(
         self,
         self,
-        capabilities: list[bytes],
-        old_refs: dict[bytes, bytes],
-        new_refs: dict[bytes, bytes],
+        capabilities: Sequence[bytes],
+        old_refs: Mapping[bytes, bytes],
+        new_refs: Mapping[bytes, bytes],
     ) -> Iterator[Optional[bytes]]:
     ) -> Iterator[Optional[bytes]]:
         """Handle the head of a 'git-receive-pack' request.
         """Handle the head of a 'git-receive-pack' request.
 
 
@@ -798,7 +798,7 @@ def _handle_upload_pack_head(
 
 
 def _handle_upload_pack_tail(
 def _handle_upload_pack_tail(
     proto: "Protocol",
     proto: "Protocol",
-    capabilities: set[bytes],
+    capabilities: Set[bytes],
     graph_walker: "GraphWalker",
     graph_walker: "GraphWalker",
     pack_data: Callable[[bytes], int],
     pack_data: Callable[[bytes], int],
     progress: Optional[Callable[[bytes], None]] = None,
     progress: Optional[Callable[[bytes], None]] = None,
@@ -996,7 +996,7 @@ class GitClient:
         branch: Optional[str] = None,
         branch: Optional[str] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
     ) -> Repo:
     ) -> Repo:
@@ -1093,7 +1093,7 @@ class GitClient:
         determine_wants: Optional["DetermineWantsFunc"] = None,
         determine_wants: Optional["DetermineWantsFunc"] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
     ) -> FetchPackResult:
@@ -1171,7 +1171,7 @@ class GitClient:
         *,
         *,
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
     ) -> FetchPackResult:
@@ -1205,7 +1205,7 @@ class GitClient:
         self,
         self,
         path: bytes,
         path: bytes,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> LsRemoteResult:
     ) -> LsRemoteResult:
         """Retrieve the current refs from a git smart server.
         """Retrieve the current refs from a git smart server.
 
 
@@ -1220,7 +1220,7 @@ class GitClient:
         raise NotImplementedError(self.get_refs)
         raise NotImplementedError(self.get_refs)
 
 
     @staticmethod
     @staticmethod
-    def _should_send_pack(new_refs: dict[bytes, bytes]) -> bool:
+    def _should_send_pack(new_refs: Mapping[bytes, bytes]) -> bool:
         # The packfile MUST NOT be sent if the only command used is delete.
         # The packfile MUST NOT be sent if the only command used is delete.
         return any(sha != ZERO_SHA for sha in new_refs.values())
         return any(sha != ZERO_SHA for sha in new_refs.values())
 
 
@@ -1236,7 +1236,7 @@ class GitClient:
     def _handle_receive_pack_tail(
     def _handle_receive_pack_tail(
         self,
         self,
         proto: Protocol,
         proto: Protocol,
-        capabilities: set[bytes],
+        capabilities: Set[bytes],
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
     ) -> Optional[dict[bytes, Optional[str]]]:
     ) -> Optional[dict[bytes, Optional[str]]]:
         """Handle the tail of a 'git-receive-pack' request.
         """Handle the tail of a 'git-receive-pack' request.
@@ -1317,7 +1317,7 @@ class GitClient:
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         write_error: Optional[Callable[[bytes], None]] = None,
         write_error: Optional[Callable[[bytes], None]] = None,
         format: Optional[bytes] = None,
         format: Optional[bytes] = None,
-        subdirs: Optional[list[bytes]] = None,
+        subdirs: Optional[Sequence[bytes]] = None,
         prefix: Optional[bytes] = None,
         prefix: Optional[bytes] = None,
     ) -> None:
     ) -> None:
         """Retrieve an archive of the specified tree."""
         """Retrieve an archive of the specified tree."""
@@ -1333,7 +1333,7 @@ class GitClient:
         )
         )
 
 
 
 
-def check_wants(wants: set[bytes], refs: dict[bytes, bytes]) -> None:
+def check_wants(wants: Set[bytes], refs: Mapping[bytes, bytes]) -> None:
     """Check that a set of wants is valid.
     """Check that a set of wants is valid.
 
 
     Args:
     Args:
@@ -1527,7 +1527,7 @@ class TraditionalGitClient(GitClient):
         pack_data: Callable[[bytes], int],
         pack_data: Callable[[bytes], int],
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
     ) -> FetchPackResult:
@@ -1675,7 +1675,7 @@ class TraditionalGitClient(GitClient):
         self,
         self,
         path: bytes,
         path: bytes,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> LsRemoteResult:
     ) -> LsRemoteResult:
         """Retrieve the current refs from a git smart server."""
         """Retrieve the current refs from a git smart server."""
         # stock `git ls-remote` uses upload-pack
         # stock `git ls-remote` uses upload-pack
@@ -1739,7 +1739,7 @@ class TraditionalGitClient(GitClient):
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         write_error: Optional[Callable[[bytes], None]] = None,
         write_error: Optional[Callable[[bytes], None]] = None,
         format: Optional[bytes] = None,
         format: Optional[bytes] = None,
-        subdirs: Optional[list[bytes]] = None,
+        subdirs: Optional[Sequence[bytes]] = None,
         prefix: Optional[bytes] = None,
         prefix: Optional[bytes] = None,
     ) -> None:
     ) -> None:
         """Request an archive of a specific commit.
         """Request an archive of a specific commit.
@@ -2266,7 +2266,7 @@ class LocalGitClient(GitClient):
         determine_wants: Optional["DetermineWantsFunc"] = None,
         determine_wants: Optional["DetermineWantsFunc"] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[bytes]] = None,
+        ref_prefix: Optional[Sequence[bytes]] = None,
         filter_spec: Optional[bytes] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
     ) -> FetchPackResult:
@@ -2311,7 +2311,7 @@ class LocalGitClient(GitClient):
         pack_data: Callable[[bytes], int],
         pack_data: Callable[[bytes], int],
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
     ) -> FetchPackResult:
@@ -2368,7 +2368,7 @@ class LocalGitClient(GitClient):
         self,
         self,
         path: Union[str, bytes],
         path: Union[str, bytes],
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> LsRemoteResult:
     ) -> LsRemoteResult:
         """Retrieve the current refs from a local on-disk repository."""
         """Retrieve the current refs from a local on-disk repository."""
         with self._open_repo(path) as target:
         with self._open_repo(path) as target:
@@ -2586,7 +2586,7 @@ class BundleClient(GitClient):
         determine_wants: Optional["DetermineWantsFunc"] = None,
         determine_wants: Optional["DetermineWantsFunc"] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
     ) -> FetchPackResult:
@@ -2638,7 +2638,7 @@ class BundleClient(GitClient):
         pack_data: Callable[[bytes], int],
         pack_data: Callable[[bytes], int],
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
     ) -> FetchPackResult:
@@ -2681,7 +2681,7 @@ class BundleClient(GitClient):
         self,
         self,
         path: Union[str, bytes],
         path: Union[str, bytes],
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> LsRemoteResult:
     ) -> LsRemoteResult:
         """Retrieve the current refs from a bundle file."""
         """Retrieve the current refs from a bundle file."""
         bundle = self._open_bundle(path)
         bundle = self._open_bundle(path)
@@ -3358,7 +3358,7 @@ class AbstractHttpGitClient(GitClient):
         service: bytes,
         service: bytes,
         base_url: str,
         base_url: str,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> tuple[
     ) -> tuple[
         dict[Ref, Optional[ObjectID]],
         dict[Ref, Optional[ObjectID]],
         set[bytes],
         set[bytes],
@@ -3632,7 +3632,7 @@ class AbstractHttpGitClient(GitClient):
         pack_data: Callable[[bytes], int],
         pack_data: Callable[[bytes], int],
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
     ) -> FetchPackResult:
@@ -3772,7 +3772,7 @@ class AbstractHttpGitClient(GitClient):
         self,
         self,
         path: Union[str, bytes],
         path: Union[str, bytes],
         protocol_version: Optional[int] = None,
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> LsRemoteResult:
     ) -> LsRemoteResult:
         """Retrieve the current refs from a git smart server."""
         """Retrieve the current refs from a git smart server."""
         url = self._get_url(path)
         url = self._get_url(path)

+ 5 - 5
dulwich/commit_graph.py

@@ -18,7 +18,7 @@ https://git-scm.com/docs/gitformat-commit-graph
 
 
 import os
 import os
 import struct
 import struct
-from collections.abc import Iterator
+from collections.abc import Iterator, Sequence
 from typing import TYPE_CHECKING, BinaryIO, Optional, Union
 from typing import TYPE_CHECKING, BinaryIO, Optional, Union
 
 
 from .file import _GitFile
 from .file import _GitFile
@@ -246,7 +246,7 @@ class CommitGraph:
             )
             )
             self.entries.append(entry)
             self.entries.append(entry)
 
 
-    def _parse_extra_edges(self, offset: int, oids: list[bytes]) -> list[bytes]:
+    def _parse_extra_edges(self, offset: int, oids: Sequence[bytes]) -> list[bytes]:
         """Parse extra parent edges for commits with 3+ parents."""
         """Parse extra parent edges for commits with 3+ parents."""
         if CHUNK_EXTRA_EDGE_LIST not in self.chunks:
         if CHUNK_EXTRA_EDGE_LIST not in self.chunks:
             return []
             return []
@@ -425,7 +425,7 @@ def find_commit_graph_file(git_dir: Union[str, bytes]) -> Optional[bytes]:
 
 
 
 
 def generate_commit_graph(
 def generate_commit_graph(
-    object_store: "BaseObjectStore", commit_ids: list[ObjectID]
+    object_store: "BaseObjectStore", commit_ids: Sequence[ObjectID]
 ) -> CommitGraph:
 ) -> CommitGraph:
     """Generate a commit graph from a set of commits.
     """Generate a commit graph from a set of commits.
 
 
@@ -540,7 +540,7 @@ def generate_commit_graph(
 def write_commit_graph(
 def write_commit_graph(
     git_dir: Union[str, bytes],
     git_dir: Union[str, bytes],
     object_store: "BaseObjectStore",
     object_store: "BaseObjectStore",
-    commit_ids: list[ObjectID],
+    commit_ids: Sequence[ObjectID],
 ) -> None:
 ) -> None:
     """Write a commit graph file for the given commits.
     """Write a commit graph file for the given commits.
 
 
@@ -571,7 +571,7 @@ def write_commit_graph(
 
 
 
 
 def get_reachable_commits(
 def get_reachable_commits(
-    object_store: "BaseObjectStore", start_commits: list[ObjectID]
+    object_store: "BaseObjectStore", start_commits: Sequence[ObjectID]
 ) -> list[ObjectID]:
 ) -> list[ObjectID]:
     """Get all commits reachable from the given starting commits.
     """Get all commits reachable from the given starting commits.
 
 

+ 6 - 5
dulwich/config.py

@@ -34,6 +34,7 @@ from collections.abc import (
     Iterable,
     Iterable,
     Iterator,
     Iterator,
     KeysView,
     KeysView,
+    Mapping,
     MutableMapping,
     MutableMapping,
     ValuesView,
     ValuesView,
 )
 )
@@ -932,7 +933,7 @@ class ConfigFile(ConfigDict):
         include_depth: int = 0,
         include_depth: int = 0,
         max_include_depth: int = DEFAULT_MAX_INCLUDE_DEPTH,
         max_include_depth: int = DEFAULT_MAX_INCLUDE_DEPTH,
         file_opener: Optional[FileOpener] = None,
         file_opener: Optional[FileOpener] = None,
-        condition_matchers: Optional[dict[str, ConditionMatcher]] = None,
+        condition_matchers: Optional[Mapping[str, ConditionMatcher]] = None,
     ) -> "ConfigFile":
     ) -> "ConfigFile":
         """Read configuration from a file-like object.
         """Read configuration from a file-like object.
 
 
@@ -1038,7 +1039,7 @@ class ConfigFile(ConfigDict):
         include_depth: int,
         include_depth: int,
         max_include_depth: int,
         max_include_depth: int,
         file_opener: Optional[FileOpener],
         file_opener: Optional[FileOpener],
-        condition_matchers: Optional[dict[str, ConditionMatcher]],
+        condition_matchers: Optional[Mapping[str, ConditionMatcher]],
     ) -> None:
     ) -> None:
         """Handle include/includeIf directives during config parsing."""
         """Handle include/includeIf directives during config parsing."""
         if (
         if (
@@ -1068,7 +1069,7 @@ class ConfigFile(ConfigDict):
         include_depth: int,
         include_depth: int,
         max_include_depth: int,
         max_include_depth: int,
         file_opener: Optional[FileOpener],
         file_opener: Optional[FileOpener],
-        condition_matchers: Optional[dict[str, ConditionMatcher]],
+        condition_matchers: Optional[Mapping[str, ConditionMatcher]],
     ) -> None:
     ) -> None:
         """Process an include or includeIf directive."""
         """Process an include or includeIf directive."""
         path_str = path_value.decode(self.encoding, errors="replace")
         path_str = path_value.decode(self.encoding, errors="replace")
@@ -1156,7 +1157,7 @@ class ConfigFile(ConfigDict):
         self,
         self,
         condition: str,
         condition: str,
         config_dir: Optional[str] = None,
         config_dir: Optional[str] = None,
-        condition_matchers: Optional[dict[str, ConditionMatcher]] = None,
+        condition_matchers: Optional[Mapping[str, ConditionMatcher]] = None,
     ) -> bool:
     ) -> bool:
         """Evaluate an includeIf condition."""
         """Evaluate an includeIf condition."""
         # Try custom matchers first if provided
         # Try custom matchers first if provided
@@ -1246,7 +1247,7 @@ class ConfigFile(ConfigDict):
         *,
         *,
         max_include_depth: int = DEFAULT_MAX_INCLUDE_DEPTH,
         max_include_depth: int = DEFAULT_MAX_INCLUDE_DEPTH,
         file_opener: Optional[FileOpener] = None,
         file_opener: Optional[FileOpener] = None,
-        condition_matchers: Optional[dict[str, ConditionMatcher]] = None,
+        condition_matchers: Optional[Mapping[str, ConditionMatcher]] = None,
     ) -> "ConfigFile":
     ) -> "ConfigFile":
         """Read configuration from a file on disk.
         """Read configuration from a file on disk.
 
 

+ 3 - 2
dulwich/contrib/diffstat.py

@@ -45,6 +45,7 @@ statistics about changes, including:
 
 
 import re
 import re
 import sys
 import sys
+from collections.abc import Sequence
 from typing import Optional
 from typing import Optional
 
 
 # only needs to detect git style diffs as this is for
 # only needs to detect git style diffs as this is for
@@ -67,7 +68,7 @@ _GIT_UNCHANGED_START = b" "
 
 
 
 
 def _parse_patch(
 def _parse_patch(
-    lines: list[bytes],
+    lines: Sequence[bytes],
 ) -> tuple[list[bytes], list[bool], list[tuple[int, int]]]:
 ) -> tuple[list[bytes], list[bool], list[tuple[int, int]]]:
     """Parse a git style diff or patch to generate diff stats.
     """Parse a git style diff or patch to generate diff stats.
 
 
@@ -121,7 +122,7 @@ def _parse_patch(
 
 
 # note must all done using bytes not string because on linux filenames
 # note must all done using bytes not string because on linux filenames
 # may not be encodable even to utf-8
 # may not be encodable even to utf-8
-def diffstat(lines: list[bytes], max_width: int = 80) -> bytes:
+def diffstat(lines: Sequence[bytes], max_width: int = 80) -> bytes:
     """Generate summary statistics from a git style diff ala (git diff tag1 tag2 --stat).
     """Generate summary statistics from a git style diff ala (git diff tag1 tag2 --stat).
 
 
     Args:
     Args:

+ 6 - 8
dulwich/contrib/swift.py

@@ -36,7 +36,7 @@ import sys
 import tempfile
 import tempfile
 import urllib.parse as urlparse
 import urllib.parse as urlparse
 import zlib
 import zlib
-from collections.abc import Iterator
+from collections.abc import Iterator, Mapping
 from configparser import ConfigParser
 from configparser import ConfigParser
 from io import BytesIO
 from io import BytesIO
 from typing import Any, BinaryIO, Callable, Optional, Union, cast
 from typing import Any, BinaryIO, Callable, Optional, Union, cast
@@ -1004,7 +1004,7 @@ class SwiftInfoRefsContainer(InfoRefsContainer):
                 return False
                 return False
         return refs
         return refs
 
 
-    def _write_refs(self, refs: dict[bytes, bytes]) -> None:
+    def _write_refs(self, refs: Mapping[bytes, bytes]) -> None:
         f = BytesIO()
         f = BytesIO()
         f.writelines(write_info_refs(refs, cast("ObjectContainer", self.store)))
         f.writelines(write_info_refs(refs, cast("ObjectContainer", self.store)))
         self.scon.put_object(self.filename, f)
         self.scon.put_object(self.filename, f)
@@ -1250,17 +1250,15 @@ def main(argv: list[str] = sys.argv) -> None:
         "daemon": cmd_daemon,
         "daemon": cmd_daemon,
     }
     }
 
 
-    if len(sys.argv) < 2:
-        print(
-            "Usage: {} <{}> [OPTIONS...]".format(sys.argv[0], "|".join(commands.keys()))
-        )
+    if len(argv) < 2:
+        print("Usage: {} <{}> [OPTIONS...]".format(argv[0], "|".join(commands.keys())))
         sys.exit(1)
         sys.exit(1)
 
 
-    cmd = sys.argv[1]
+    cmd = argv[1]
     if cmd not in commands:
     if cmd not in commands:
         print(f"No such subcommand: {cmd}")
         print(f"No such subcommand: {cmd}")
         sys.exit(1)
         sys.exit(1)
-    commands[cmd](sys.argv[2:])
+    commands[cmd](argv[2:])
 
 
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":

+ 5 - 5
dulwich/diff.py

@@ -49,7 +49,7 @@ import logging
 import os
 import os
 import stat
 import stat
 import sys
 import sys
-from collections.abc import Iterable
+from collections.abc import Iterable, Sequence
 from typing import BinaryIO, Optional, Union
 from typing import BinaryIO, Optional, Union
 
 
 if sys.version_info >= (3, 12):
 if sys.version_info >= (3, 12):
@@ -66,7 +66,7 @@ from .repo import Repo
 logger = logging.getLogger(__name__)
 logger = logging.getLogger(__name__)
 
 
 
 
-def should_include_path(path: bytes, paths: Optional[list[bytes]]) -> bool:
+def should_include_path(path: bytes, paths: Optional[Sequence[bytes]]) -> bool:
     """Check if a path should be included based on path filters.
     """Check if a path should be included based on path filters.
 
 
     Args:
     Args:
@@ -85,7 +85,7 @@ def diff_index_to_tree(
     repo: Repo,
     repo: Repo,
     outstream: BinaryIO,
     outstream: BinaryIO,
     commit_sha: Optional[bytes] = None,
     commit_sha: Optional[bytes] = None,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
     diff_algorithm: Optional[str] = None,
     diff_algorithm: Optional[str] = None,
 ) -> None:
 ) -> None:
     """Show staged changes (index vs commit).
     """Show staged changes (index vs commit).
@@ -130,7 +130,7 @@ def diff_working_tree_to_tree(
     repo: Repo,
     repo: Repo,
     outstream: BinaryIO,
     outstream: BinaryIO,
     commit_sha: bytes,
     commit_sha: bytes,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
     diff_algorithm: Optional[str] = None,
     diff_algorithm: Optional[str] = None,
 ) -> None:
 ) -> None:
     """Compare working tree to a specific commit.
     """Compare working tree to a specific commit.
@@ -375,7 +375,7 @@ def diff_working_tree_to_tree(
 def diff_working_tree_to_index(
 def diff_working_tree_to_index(
     repo: Repo,
     repo: Repo,
     outstream: BinaryIO,
     outstream: BinaryIO,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
     diff_algorithm: Optional[str] = None,
     diff_algorithm: Optional[str] = None,
 ) -> None:
 ) -> None:
     """Compare working tree to index.
     """Compare working tree to index.

+ 11 - 8
dulwich/diff_tree.py

@@ -23,7 +23,8 @@
 
 
 import stat
 import stat
 from collections import defaultdict
 from collections import defaultdict
-from collections.abc import Iterator
+from collections.abc import Iterator, Mapping, Sequence
+from collections.abc import Set as AbstractSet
 from io import BytesIO
 from io import BytesIO
 from itertools import chain
 from itertools import chain
 from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, TypeVar
 from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, TypeVar
@@ -143,7 +144,7 @@ def walk_trees(
     tree1_id: Optional[ObjectID],
     tree1_id: Optional[ObjectID],
     tree2_id: Optional[ObjectID],
     tree2_id: Optional[ObjectID],
     prune_identical: bool = False,
     prune_identical: bool = False,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
 ) -> Iterator[tuple[Optional[TreeEntry], Optional[TreeEntry]]]:
 ) -> Iterator[tuple[Optional[TreeEntry], Optional[TreeEntry]]]:
     """Recursively walk all the entries of two trees.
     """Recursively walk all the entries of two trees.
 
 
@@ -262,7 +263,7 @@ def tree_changes(
     rename_detector: Optional["RenameDetector"] = None,
     rename_detector: Optional["RenameDetector"] = None,
     include_trees: bool = False,
     include_trees: bool = False,
     change_type_same: bool = False,
     change_type_same: bool = False,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
 ) -> Iterator[TreeChange]:
 ) -> Iterator[TreeChange]:
     """Find the differences between the contents of two trees.
     """Find the differences between the contents of two trees.
 
 
@@ -331,20 +332,20 @@ T = TypeVar("T")
 U = TypeVar("U")
 U = TypeVar("U")
 
 
 
 
-def _all_eq(seq: list[T], key: Callable[[T], U], value: U) -> bool:
+def _all_eq(seq: Sequence[T], key: Callable[[T], U], value: U) -> bool:
     for e in seq:
     for e in seq:
         if key(e) != value:
         if key(e) != value:
             return False
             return False
     return True
     return True
 
 
 
 
-def _all_same(seq: list[Any], key: Callable[[Any], Any]) -> bool:
+def _all_same(seq: Sequence[Any], key: Callable[[Any], Any]) -> bool:
     return _all_eq(seq[1:], key, key(seq[0]))
     return _all_eq(seq[1:], key, key(seq[0]))
 
 
 
 
 def tree_changes_for_merge(
 def tree_changes_for_merge(
     store: BaseObjectStore,
     store: BaseObjectStore,
-    parent_tree_ids: list[ObjectID],
+    parent_tree_ids: Sequence[ObjectID],
     tree_id: ObjectID,
     tree_id: ObjectID,
     rename_detector: Optional["RenameDetector"] = None,
     rename_detector: Optional["RenameDetector"] = None,
 ) -> Iterator[list[Optional[TreeChange]]]:
 ) -> Iterator[list[Optional[TreeChange]]]:
@@ -451,7 +452,7 @@ def _count_blocks(obj: ShaFile) -> dict[int, int]:
     return block_counts
     return block_counts
 
 
 
 
-def _common_bytes(blocks1: dict[int, int], blocks2: dict[int, int]) -> int:
+def _common_bytes(blocks1: Mapping[int, int], blocks2: Mapping[int, int]) -> int:
     """Count the number of common bytes in two block count dicts.
     """Count the number of common bytes in two block count dicts.
 
 
     Args:
     Args:
@@ -608,7 +609,9 @@ class RenameDetector:
         ):
         ):
             self._add_change(change)
             self._add_change(change)
 
 
-    def _prune(self, add_paths: set[bytes], delete_paths: set[bytes]) -> None:
+    def _prune(
+        self, add_paths: AbstractSet[bytes], delete_paths: AbstractSet[bytes]
+    ) -> None:
         def check_add(a: TreeChange) -> bool:
         def check_add(a: TreeChange) -> bool:
             assert a.new is not None
             assert a.new is not None
             return a.new.path not in add_paths
             return a.new.path not in add_paths

+ 4 - 2
dulwich/dumb.py

@@ -24,7 +24,7 @@
 import os
 import os
 import tempfile
 import tempfile
 import zlib
 import zlib
-from collections.abc import Iterator, Sequence
+from collections.abc import Iterator, Mapping, Sequence
 from io import BytesIO
 from io import BytesIO
 from typing import Any, Callable, Optional
 from typing import Any, Callable, Optional
 from urllib.parse import urljoin
 from urllib.parse import urljoin
@@ -434,7 +434,9 @@ class DumbRemoteHTTPRepo:
 
 
     def fetch_pack_data(
     def fetch_pack_data(
         self,
         self,
-        determine_wants: Callable[[dict[Ref, ObjectID], Optional[int]], list[ObjectID]],
+        determine_wants: Callable[
+            [Mapping[Ref, ObjectID], Optional[int]], list[ObjectID]
+        ],
         graph_walker: object,
         graph_walker: object,
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         *,
         *,

+ 2 - 1
dulwich/errors.py

@@ -27,6 +27,7 @@
 # that raises the error.
 # that raises the error.
 
 
 import binascii
 import binascii
+from collections.abc import Sequence
 from typing import Optional, Union
 from typing import Optional, Union
 
 
 
 
@@ -197,7 +198,7 @@ class SendPackError(GitProtocolError):
 class HangupException(GitProtocolError):
 class HangupException(GitProtocolError):
     """Hangup exception."""
     """Hangup exception."""
 
 
-    def __init__(self, stderr_lines: Optional[list[bytes]] = None) -> None:
+    def __init__(self, stderr_lines: Optional[Sequence[bytes]] = None) -> None:
         """Initialize a HangupException.
         """Initialize a HangupException.
 
 
         Args:
         Args:

+ 3 - 2
dulwich/filter_branch.py

@@ -24,6 +24,7 @@
 import os
 import os
 import tempfile
 import tempfile
 import warnings
 import warnings
+from collections.abc import Sequence
 from typing import Callable, Optional, TypedDict
 from typing import Callable, Optional, TypedDict
 
 
 from .index import Index, build_index_from_tree
 from .index import Index, build_index_from_tree
@@ -58,7 +59,7 @@ class CommitFilter:
         filter_message: Optional[Callable[[bytes], Optional[bytes]]] = None,
         filter_message: Optional[Callable[[bytes], Optional[bytes]]] = None,
         tree_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
         tree_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
         index_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
         index_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
-        parent_filter: Optional[Callable[[list[bytes]], list[bytes]]] = None,
+        parent_filter: Optional[Callable[[Sequence[bytes]], list[bytes]]] = None,
         commit_filter: Optional[Callable[[Commit, bytes], Optional[bytes]]] = None,
         commit_filter: Optional[Callable[[Commit, bytes], Optional[bytes]]] = None,
         subdirectory_filter: Optional[bytes] = None,
         subdirectory_filter: Optional[bytes] = None,
         prune_empty: bool = False,
         prune_empty: bool = False,
@@ -377,7 +378,7 @@ class CommitFilter:
 def filter_refs(
 def filter_refs(
     refs: RefsContainer,
     refs: RefsContainer,
     object_store: BaseObjectStore,
     object_store: BaseObjectStore,
-    ref_names: list[bytes],
+    ref_names: Sequence[bytes],
     commit_filter: CommitFilter,
     commit_filter: CommitFilter,
     *,
     *,
     keep_original: bool = True,
     keep_original: bool = True,

+ 10 - 6
dulwich/graph.py

@@ -20,7 +20,7 @@
 
 
 """Implementation of merge-base following the approach of git."""
 """Implementation of merge-base following the approach of git."""
 
 
-from collections.abc import Iterator
+from collections.abc import Iterator, Mapping, Sequence
 from heapq import heappop, heappush
 from heapq import heappop, heappush
 from typing import TYPE_CHECKING, Callable, Generic, Optional, TypeVar
 from typing import TYPE_CHECKING, Callable, Generic, Optional, TypeVar
 
 
@@ -77,7 +77,7 @@ class WorkList(Generic[T]):
 def _find_lcas(
 def _find_lcas(
     lookup_parents: Callable[[ObjectID], list[ObjectID]],
     lookup_parents: Callable[[ObjectID], list[ObjectID]],
     c1: ObjectID,
     c1: ObjectID,
-    c2s: list[ObjectID],
+    c2s: Sequence[ObjectID],
     lookup_stamp: Callable[[ObjectID], int],
     lookup_stamp: Callable[[ObjectID], int],
     min_stamp: int = 0,
     min_stamp: int = 0,
     shallows: Optional[set[ObjectID]] = None,
     shallows: Optional[set[ObjectID]] = None,
@@ -104,7 +104,9 @@ def _find_lcas(
     _DNC = 4  # Do Not Consider
     _DNC = 4  # Do Not Consider
     _LCA = 8  # potential LCA (Lowest Common Ancestor)
     _LCA = 8  # potential LCA (Lowest Common Ancestor)
 
 
-    def _has_candidates(wlst: WorkList[ObjectID], cstates: dict[ObjectID, int]) -> bool:
+    def _has_candidates(
+        wlst: WorkList[ObjectID], cstates: Mapping[ObjectID, int]
+    ) -> bool:
         """Check if there are any candidate commits in the work list.
         """Check if there are any candidate commits in the work list.
 
 
         Args:
         Args:
@@ -203,7 +205,7 @@ def _find_lcas(
 
 
 
 
 # actual git sorts these based on commit times
 # actual git sorts these based on commit times
-def find_merge_base(repo: "BaseRepo", commit_ids: list[ObjectID]) -> list[ObjectID]:
+def find_merge_base(repo: "BaseRepo", commit_ids: Sequence[ObjectID]) -> list[ObjectID]:
     """Find lowest common ancestors of commit_ids[0] and *any* of commits_ids[1:].
     """Find lowest common ancestors of commit_ids[0] and *any* of commits_ids[1:].
 
 
     Args:
     Args:
@@ -236,7 +238,7 @@ def find_merge_base(repo: "BaseRepo", commit_ids: list[ObjectID]) -> list[Object
     c1 = commit_ids[0]
     c1 = commit_ids[0]
     if not len(commit_ids) > 1:
     if not len(commit_ids) > 1:
         return [c1]
         return [c1]
-    c2s = commit_ids[1:]
+    c2s = list(commit_ids[1:])
     if c1 in c2s:
     if c1 in c2s:
         return [c1]
         return [c1]
     lcas = _find_lcas(
     lcas = _find_lcas(
@@ -245,7 +247,9 @@ def find_merge_base(repo: "BaseRepo", commit_ids: list[ObjectID]) -> list[Object
     return lcas
     return lcas
 
 
 
 
-def find_octopus_base(repo: "BaseRepo", commit_ids: list[ObjectID]) -> list[ObjectID]:
+def find_octopus_base(
+    repo: "BaseRepo", commit_ids: Sequence[ObjectID]
+) -> list[ObjectID]:
     """Find lowest common ancestors of *all* provided commit_ids.
     """Find lowest common ancestors of *all* provided commit_ids.
 
 
     Args:
     Args:

+ 4 - 3
dulwich/greenthreads.py

@@ -23,6 +23,7 @@
 
 
 """Utility module for querying an ObjectStore with gevent."""
 """Utility module for querying an ObjectStore with gevent."""
 
 
+from collections.abc import Sequence
 from typing import Callable, Optional
 from typing import Callable, Optional
 
 
 import gevent
 import gevent
@@ -39,7 +40,7 @@ from .objects import Commit, ObjectID, Tag
 
 
 def _split_commits_and_tags(
 def _split_commits_and_tags(
     obj_store: BaseObjectStore,
     obj_store: BaseObjectStore,
-    lst: list[ObjectID],
+    lst: Sequence[ObjectID],
     *,
     *,
     ignore_unknown: bool = False,
     ignore_unknown: bool = False,
     pool: pool.Pool,
     pool: pool.Pool,
@@ -82,8 +83,8 @@ class GreenThreadsMissingObjectFinder(MissingObjectFinder):
     def __init__(
     def __init__(
         self,
         self,
         object_store: BaseObjectStore,
         object_store: BaseObjectStore,
-        haves: list[ObjectID],
-        wants: list[ObjectID],
+        haves: Sequence[ObjectID],
+        wants: Sequence[ObjectID],
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         get_tagged: Optional[Callable[[], dict[ObjectID, ObjectID]]] = None,
         get_tagged: Optional[Callable[[], dict[ObjectID, ObjectID]]] = None,
         concurrency: int = 1,
         concurrency: int = 1,

+ 4 - 1
dulwich/hooks.py

@@ -23,6 +23,7 @@
 
 
 import os
 import os
 import subprocess
 import subprocess
+from collections.abc import Sequence
 from typing import Any, Callable, Optional
 from typing import Any, Callable, Optional
 
 
 from .errors import HookError
 from .errors import HookError
@@ -188,7 +189,9 @@ class PostReceiveShellHook(ShellHook):
         filepath = os.path.join(controldir, "hooks", "post-receive")
         filepath = os.path.join(controldir, "hooks", "post-receive")
         ShellHook.__init__(self, "post-receive", path=filepath, numparam=0)
         ShellHook.__init__(self, "post-receive", path=filepath, numparam=0)
 
 
-    def execute(self, client_refs: list[tuple[bytes, bytes, bytes]]) -> Optional[bytes]:
+    def execute(
+        self, client_refs: Sequence[tuple[bytes, bytes, bytes]]
+    ) -> Optional[bytes]:
         """Execute the post-receive hook.
         """Execute the post-receive hook.
 
 
         Args:
         Args:

+ 3 - 3
dulwich/ignore.py

@@ -28,7 +28,7 @@ For example, use "dir/" instead of "dir" to check if a directory is ignored.
 
 
 import os.path
 import os.path
 import re
 import re
-from collections.abc import Iterable
+from collections.abc import Iterable, Sequence
 from contextlib import suppress
 from contextlib import suppress
 from typing import TYPE_CHECKING, BinaryIO, Optional, Union
 from typing import TYPE_CHECKING, BinaryIO, Optional, Union
 
 
@@ -47,7 +47,7 @@ def _pattern_to_str(pattern: Union["Pattern", bytes, str]) -> str:
     return pattern_data.decode() if isinstance(pattern_data, bytes) else pattern_data
     return pattern_data.decode() if isinstance(pattern_data, bytes) else pattern_data
 
 
 
 
-def _check_parent_exclusion(path: str, matching_patterns: list["Pattern"]) -> bool:
+def _check_parent_exclusion(path: str, matching_patterns: Sequence["Pattern"]) -> bool:
     """Check if a parent directory exclusion prevents negation patterns from taking effect.
     """Check if a parent directory exclusion prevents negation patterns from taking effect.
 
 
     Args:
     Args:
@@ -163,7 +163,7 @@ def _translate_segment(segment: bytes) -> bytes:
     return res
     return res
 
 
 
 
-def _handle_double_asterisk(segments: list[bytes], i: int) -> tuple[bytes, bool]:
+def _handle_double_asterisk(segments: Sequence[bytes], i: int) -> tuple[bytes, bool]:
     """Handle ** segment processing, returns (regex_part, skip_next)."""
     """Handle ** segment processing, returns (regex_part, skip_next)."""
     # Check if ** is at end
     # Check if ** is at end
     remaining = segments[i + 1 :]
     remaining = segments[i + 1 :]

+ 6 - 6
dulwich/index.py

@@ -28,7 +28,7 @@ import stat
 import struct
 import struct
 import sys
 import sys
 import types
 import types
-from collections.abc import Generator, Iterable, Iterator
+from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
 from dataclasses import dataclass
 from dataclasses import dataclass
 from enum import Enum
 from enum import Enum
 from typing import (
 from typing import (
@@ -876,9 +876,9 @@ def read_index_dict(
 
 
 def write_index(
 def write_index(
     f: IO[bytes],
     f: IO[bytes],
-    entries: list[SerializedIndexEntry],
+    entries: Sequence[SerializedIndexEntry],
     version: Optional[int] = None,
     version: Optional[int] = None,
-    extensions: Optional[list[IndexExtension]] = None,
+    extensions: Optional[Sequence[IndexExtension]] = None,
 ) -> None:
 ) -> None:
     """Write an index file.
     """Write an index file.
 
 
@@ -917,9 +917,9 @@ def write_index(
 
 
 def write_index_dict(
 def write_index_dict(
     f: IO[bytes],
     f: IO[bytes],
-    entries: dict[bytes, Union[IndexEntry, ConflictedIndexEntry]],
+    entries: Mapping[bytes, Union[IndexEntry, ConflictedIndexEntry]],
     version: Optional[int] = None,
     version: Optional[int] = None,
-    extensions: Optional[list[IndexExtension]] = None,
+    extensions: Optional[Sequence[IndexExtension]] = None,
 ) -> None:
 ) -> None:
     """Write an index file based on the contents of a dictionary.
     """Write an index file based on the contents of a dictionary.
 
 
@@ -2102,7 +2102,7 @@ def _transition_to_absent(
 
 
 
 
 def detect_case_only_renames(
 def detect_case_only_renames(
-    changes: list["TreeChange"],
+    changes: Sequence["TreeChange"],
     config: "Config",
     config: "Config",
 ) -> list["TreeChange"]:
 ) -> list["TreeChange"]:
     """Detect and transform case-only renames in a list of tree changes.
     """Detect and transform case-only renames in a list of tree changes.

+ 2 - 2
dulwich/lfs.py

@@ -37,7 +37,7 @@ import json
 import logging
 import logging
 import os
 import os
 import tempfile
 import tempfile
-from collections.abc import Iterable
+from collections.abc import Iterable, Mapping
 from dataclasses import dataclass
 from dataclasses import dataclass
 from typing import TYPE_CHECKING, Any, BinaryIO, Optional, Union
 from typing import TYPE_CHECKING, Any, BinaryIO, Optional, Union
 from urllib.parse import urljoin, urlparse
 from urllib.parse import urljoin, urlparse
@@ -478,7 +478,7 @@ class LFSClient:
         response_data = json.loads(response)
         response_data = json.loads(response)
         return self._parse_batch_response(response_data)
         return self._parse_batch_response(response_data)
 
 
-    def _parse_batch_response(self, data: dict[str, Any]) -> LFSBatchResponse:
+    def _parse_batch_response(self, data: Mapping[str, Any]) -> LFSBatchResponse:
         """Parse JSON response into LFSBatchResponse dataclass."""
         """Parse JSON response into LFSBatchResponse dataclass."""
         objects = []
         objects = []
         for obj_data in data.get("objects", []):
         for obj_data in data.get("objects", []):

+ 4 - 1
dulwich/lfs_server.py

@@ -25,6 +25,7 @@ import hashlib
 import json
 import json
 import tempfile
 import tempfile
 import typing
 import typing
+from collections.abc import Mapping
 from http.server import BaseHTTPRequestHandler, HTTPServer
 from http.server import BaseHTTPRequestHandler, HTTPServer
 from typing import Optional
 from typing import Optional
 
 
@@ -36,7 +37,9 @@ class LFSRequestHandler(BaseHTTPRequestHandler):
 
 
     server: "LFSServer"  # Type annotation for the server attribute
     server: "LFSServer"  # Type annotation for the server attribute
 
 
-    def send_json_response(self, status_code: int, data: dict[str, typing.Any]) -> None:
+    def send_json_response(
+        self, status_code: int, data: Mapping[str, typing.Any]
+    ) -> None:
         """Send a JSON response."""
         """Send a JSON response."""
         response = json.dumps(data).encode("utf-8")
         response = json.dumps(data).encode("utf-8")
         self.send_response(status_code)
         self.send_response(status_code)

+ 5 - 4
dulwich/line_ending.py

@@ -138,6 +138,7 @@ Sources:
 """
 """
 
 
 import logging
 import logging
+from collections.abc import Mapping
 from typing import TYPE_CHECKING, Any, Callable, Optional, Union
 from typing import TYPE_CHECKING, Any, Callable, Optional, Union
 
 
 if TYPE_CHECKING:
 if TYPE_CHECKING:
@@ -424,7 +425,7 @@ def get_clean_filter_autocrlf(
 # Backwards compatibility wrappers
 # Backwards compatibility wrappers
 @replace_me(since="0.23.1", remove_in="0.25.0")
 @replace_me(since="0.23.1", remove_in="0.25.0")
 def get_checkout_filter(
 def get_checkout_filter(
-    core_eol: str, core_autocrlf: Union[bool, str], git_attributes: dict[str, Any]
+    core_eol: str, core_autocrlf: Union[bool, str], git_attributes: Mapping[str, Any]
 ) -> Optional[Callable[[bytes], bytes]]:
 ) -> Optional[Callable[[bytes], bytes]]:
     """Deprecated: Use get_smudge_filter instead."""
     """Deprecated: Use get_smudge_filter instead."""
     # Convert core_autocrlf to bytes for compatibility
     # Convert core_autocrlf to bytes for compatibility
@@ -441,7 +442,7 @@ def get_checkout_filter(
 
 
 @replace_me(since="0.23.1", remove_in="0.25.0")
 @replace_me(since="0.23.1", remove_in="0.25.0")
 def get_checkin_filter(
 def get_checkin_filter(
-    core_eol: str, core_autocrlf: Union[bool, str], git_attributes: dict[str, Any]
+    core_eol: str, core_autocrlf: Union[bool, str], git_attributes: Mapping[str, Any]
 ) -> Optional[Callable[[bytes], bytes]]:
 ) -> Optional[Callable[[bytes], bytes]]:
     """Deprecated: Use get_clean_filter instead."""
     """Deprecated: Use get_clean_filter instead."""
     # Convert core_autocrlf to bytes for compatibility
     # Convert core_autocrlf to bytes for compatibility
@@ -481,7 +482,7 @@ class BlobNormalizer(FilterBlobNormalizer):
     def __init__(
     def __init__(
         self,
         self,
         config_stack: "StackedConfig",
         config_stack: "StackedConfig",
-        gitattributes: dict[str, Any],
+        gitattributes: Mapping[str, Any],
         core_eol: str = "native",
         core_eol: str = "native",
         autocrlf: bytes = b"false",
         autocrlf: bytes = b"false",
         safecrlf: bytes = b"false",
         safecrlf: bytes = b"false",
@@ -634,7 +635,7 @@ class TreeBlobNormalizer(BlobNormalizer):
     def __init__(
     def __init__(
         self,
         self,
         config_stack: "StackedConfig",
         config_stack: "StackedConfig",
-        git_attributes: dict[str, Any],
+        git_attributes: Mapping[str, Any],
         object_store: "BaseObjectStore",
         object_store: "BaseObjectStore",
         tree: Optional[ObjectID] = None,
         tree: Optional[ObjectID] = None,
         core_eol: str = "native",
         core_eol: str = "native",

+ 5 - 4
dulwich/merge.py

@@ -1,5 +1,6 @@
 """Git merge implementation."""
 """Git merge implementation."""
 
 
+from collections.abc import Sequence
 from difflib import SequenceMatcher
 from difflib import SequenceMatcher
 from typing import TYPE_CHECKING, Optional
 from typing import TYPE_CHECKING, Optional
 
 
@@ -19,9 +20,9 @@ from dulwich.objects import S_ISGITLINK, Blob, Commit, Tree, is_blob, is_tree
 
 
 
 
 def make_merge3(
 def make_merge3(
-    base: list[bytes],
-    a: list[bytes],
-    b: list[bytes],
+    base: Sequence[bytes],
+    a: Sequence[bytes],
+    b: Sequence[bytes],
     is_cherrypick: bool = False,
     is_cherrypick: bool = False,
     sequence_matcher: Optional[type[SequenceMatcher[bytes]]] = None,
     sequence_matcher: Optional[type[SequenceMatcher[bytes]]] = None,
 ) -> "merge3.Merge3":
 ) -> "merge3.Merge3":
@@ -49,7 +50,7 @@ class MergeConflict(Exception):
 
 
 
 
 def _can_merge_lines(
 def _can_merge_lines(
-    base_lines: list[bytes], a_lines: list[bytes], b_lines: list[bytes]
+    base_lines: Sequence[bytes], a_lines: Sequence[bytes], b_lines: Sequence[bytes]
 ) -> bool:
 ) -> bool:
     """Check if lines can be merged without conflict."""
     """Check if lines can be merged without conflict."""
     # If one side is unchanged, we can take the other side
     # If one side is unchanged, we can take the other side

+ 6 - 4
dulwich/notes.py

@@ -21,7 +21,7 @@
 """Git notes handling."""
 """Git notes handling."""
 
 
 import stat
 import stat
-from collections.abc import Iterator
+from collections.abc import Iterator, Sequence
 from typing import TYPE_CHECKING, Optional
 from typing import TYPE_CHECKING, Optional
 
 
 from .objects import Blob, Tree
 from .objects import Blob, Tree
@@ -240,7 +240,7 @@ class NotesTree:
 
 
             # Build new tree structure
             # Build new tree structure
             def update_tree(
             def update_tree(
-                tree: Tree, components: list[bytes], blob_sha: bytes
+                tree: Tree, components: Sequence[bytes], blob_sha: bytes
             ) -> Tree:
             ) -> Tree:
                 """Update tree with new note entry.
                 """Update tree with new note entry.
 
 
@@ -411,7 +411,9 @@ class NotesTree:
         components = path.split(b"/")
         components = path.split(b"/")
 
 
         # Build new tree structure
         # Build new tree structure
-        def update_tree(tree: Tree, components: list[bytes], blob_sha: bytes) -> Tree:
+        def update_tree(
+            tree: Tree, components: Sequence[bytes], blob_sha: bytes
+        ) -> Tree:
             """Update tree with new note entry.
             """Update tree with new note entry.
 
 
             Args:
             Args:
@@ -485,7 +487,7 @@ class NotesTree:
         components = path.split(b"/")
         components = path.split(b"/")
 
 
         # Build new tree structure without the note
         # Build new tree structure without the note
-        def remove_from_tree(tree: Tree, components: list[bytes]) -> Optional[Tree]:
+        def remove_from_tree(tree: Tree, components: Sequence[bytes]) -> Optional[Tree]:
             """Remove note entry from tree.
             """Remove note entry from tree.
 
 
             Args:
             Args:

+ 13 - 11
dulwich/object_store.py

@@ -29,7 +29,7 @@ import stat
 import sys
 import sys
 import time
 import time
 import warnings
 import warnings
-from collections.abc import Iterable, Iterator, Sequence
+from collections.abc import Iterable, Iterator, Mapping, Sequence, Set
 from contextlib import suppress
 from contextlib import suppress
 from io import BytesIO
 from io import BytesIO
 from pathlib import Path
 from pathlib import Path
@@ -232,7 +232,7 @@ class BaseObjectStore:
     """Object store interface."""
     """Object store interface."""
 
 
     def determine_wants_all(
     def determine_wants_all(
-        self, refs: dict[Ref, ObjectID], depth: Optional[int] = None
+        self, refs: Mapping[Ref, ObjectID], depth: Optional[int] = None
     ) -> list[ObjectID]:
     ) -> list[ObjectID]:
         """Determine which objects are wanted based on refs."""
         """Determine which objects are wanted based on refs."""
 
 
@@ -314,7 +314,7 @@ class BaseObjectStore:
         include_trees: bool = False,
         include_trees: bool = False,
         change_type_same: bool = False,
         change_type_same: bool = False,
         rename_detector: Optional["RenameDetector"] = None,
         rename_detector: Optional["RenameDetector"] = None,
-        paths: Optional[list[bytes]] = None,
+        paths: Optional[Sequence[bytes]] = None,
     ) -> Iterator[
     ) -> Iterator[
         tuple[
         tuple[
             tuple[Optional[bytes], Optional[bytes]],
             tuple[Optional[bytes], Optional[bytes]],
@@ -444,7 +444,7 @@ class BaseObjectStore:
         self,
         self,
         haves: Iterable[bytes],
         haves: Iterable[bytes],
         wants: Iterable[bytes],
         wants: Iterable[bytes],
-        shallow: Optional[set[bytes]] = None,
+        shallow: Optional[Set[bytes]] = None,
         progress: Optional[Callable[..., None]] = None,
         progress: Optional[Callable[..., None]] = None,
         get_tagged: Optional[Callable[[], dict[bytes, bytes]]] = None,
         get_tagged: Optional[Callable[[], dict[bytes, bytes]]] = None,
         get_parents: Callable[..., list[bytes]] = lambda commit: commit.parents,
         get_parents: Callable[..., list[bytes]] = lambda commit: commit.parents,
@@ -495,7 +495,7 @@ class BaseObjectStore:
         self,
         self,
         have: Iterable[bytes],
         have: Iterable[bytes],
         want: Iterable[bytes],
         want: Iterable[bytes],
-        shallow: Optional[set[bytes]] = None,
+        shallow: Optional[Set[bytes]] = None,
         progress: Optional[Callable[..., None]] = None,
         progress: Optional[Callable[..., None]] = None,
         ofs_delta: bool = True,
         ofs_delta: bool = True,
     ) -> tuple[int, Iterator[UnpackedObject]]:
     ) -> tuple[int, Iterator[UnpackedObject]]:
@@ -590,7 +590,7 @@ class BaseObjectStore:
         return None
         return None
 
 
     def write_commit_graph(
     def write_commit_graph(
-        self, refs: Optional[list[bytes]] = None, reachable: bool = True
+        self, refs: Optional[Sequence[bytes]] = None, reachable: bool = True
     ) -> None:
     ) -> None:
         """Write a commit graph file for this object store.
         """Write a commit graph file for this object store.
 
 
@@ -740,7 +740,7 @@ class PackBasedObjectStore(BaseObjectStore, PackedObjectContainer):
         self,
         self,
         have: Iterable[bytes],
         have: Iterable[bytes],
         want: Iterable[bytes],
         want: Iterable[bytes],
-        shallow: Optional[set[bytes]] = None,
+        shallow: Optional[Set[bytes]] = None,
         progress: Optional[Callable[..., None]] = None,
         progress: Optional[Callable[..., None]] = None,
         ofs_delta: bool = True,
         ofs_delta: bool = True,
     ) -> tuple[int, Iterator[UnpackedObject]]:
     ) -> tuple[int, Iterator[UnpackedObject]]:
@@ -850,7 +850,7 @@ class PackBasedObjectStore(BaseObjectStore, PackedObjectContainer):
 
 
     def repack(
     def repack(
         self,
         self,
-        exclude: Optional[set[bytes]] = None,
+        exclude: Optional[Set[bytes]] = None,
         progress: Optional[Callable[[str], None]] = None,
         progress: Optional[Callable[[str], None]] = None,
     ) -> int:
     ) -> int:
         """Repack the packs in this repository.
         """Repack the packs in this repository.
@@ -2126,7 +2126,7 @@ class MissingObjectFinder:
         haves: Iterable[bytes],
         haves: Iterable[bytes],
         wants: Iterable[bytes],
         wants: Iterable[bytes],
         *,
         *,
-        shallow: Optional[set[bytes]] = None,
+        shallow: Optional[Set[bytes]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         get_tagged: Optional[Callable[[], dict[bytes, bytes]]] = None,
         get_tagged: Optional[Callable[[], dict[bytes, bytes]]] = None,
         get_parents: Callable[[Commit], list[bytes]] = lambda commit: commit.parents,
         get_parents: Callable[[Commit], list[bytes]] = lambda commit: commit.parents,
@@ -2370,7 +2370,7 @@ class ObjectStoreGraphWalker:
 def commit_tree_changes(
 def commit_tree_changes(
     object_store: BaseObjectStore,
     object_store: BaseObjectStore,
     tree: Union[ObjectID, Tree],
     tree: Union[ObjectID, Tree],
-    changes: list[tuple[bytes, Optional[int], Optional[bytes]]],
+    changes: Sequence[tuple[bytes, Optional[int], Optional[bytes]]],
 ) -> ObjectID:
 ) -> ObjectID:
     """Commit a specified set of changes to a tree structure.
     """Commit a specified set of changes to a tree structure.
 
 
@@ -2434,7 +2434,9 @@ class OverlayObjectStore(BaseObjectStore):
     """Object store that can overlay multiple object stores."""
     """Object store that can overlay multiple object stores."""
 
 
     def __init__(
     def __init__(
-        self, bases: list[BaseObjectStore], add_store: Optional[BaseObjectStore] = None
+        self,
+        bases: list[BaseObjectStore],
+        add_store: Optional[BaseObjectStore] = None,
     ) -> None:
     ) -> None:
         """Initialize an OverlayObjectStore.
         """Initialize an OverlayObjectStore.
 
 

+ 2 - 2
dulwich/objects.py

@@ -28,7 +28,7 @@ import posixpath
 import stat
 import stat
 import sys
 import sys
 import zlib
 import zlib
-from collections.abc import Callable, Iterable, Iterator
+from collections.abc import Callable, Iterable, Iterator, Sequence
 from hashlib import sha1
 from hashlib import sha1
 from io import BufferedIOBase, BytesIO
 from io import BufferedIOBase, BytesIO
 from typing import (
 from typing import (
@@ -832,7 +832,7 @@ def _parse_message(
 
 
 
 
 def _format_message(
 def _format_message(
-    headers: list[tuple[bytes, bytes]], body: Optional[bytes]
+    headers: Sequence[tuple[bytes, bytes]], body: Optional[bytes]
 ) -> Iterator[bytes]:
 ) -> Iterator[bytes]:
     for field, value in headers:
     for field, value in headers:
         lines = value.split(b"\n")
         lines = value.split(b"\n")

+ 5 - 4
dulwich/objectspec.py

@@ -21,6 +21,7 @@
 
 
 """Object specification."""
 """Object specification."""
 
 
+from collections.abc import Sequence
 from typing import TYPE_CHECKING, Optional, Union
 from typing import TYPE_CHECKING, Optional, Union
 
 
 from .objects import Commit, ShaFile, Tag, Tree
 from .objects import Commit, ShaFile, Tag, Tree
@@ -288,7 +289,7 @@ def parse_reftuple(
 def parse_reftuples(
 def parse_reftuples(
     lh_container: Union["Repo", "RefsContainer"],
     lh_container: Union["Repo", "RefsContainer"],
     rh_container: Union["Repo", "RefsContainer"],
     rh_container: Union["Repo", "RefsContainer"],
-    refspecs: Union[bytes, list[bytes]],
+    refspecs: Union[bytes, Sequence[bytes]],
     force: bool = False,
     force: bool = False,
 ) -> list[tuple[Optional["Ref"], Optional["Ref"], bool]]:
 ) -> list[tuple[Optional["Ref"], Optional["Ref"], bool]]:
     """Parse a list of reftuple specs to a list of reftuples.
     """Parse a list of reftuple specs to a list of reftuples.
@@ -302,7 +303,7 @@ def parse_reftuples(
     Raises:
     Raises:
       KeyError: If one of the refs can not be found
       KeyError: If one of the refs can not be found
     """
     """
-    if not isinstance(refspecs, list):
+    if isinstance(refspecs, bytes):
         refspecs = [refspecs]
         refspecs = [refspecs]
     ret = []
     ret = []
     # TODO: Support * in refspecs
     # TODO: Support * in refspecs
@@ -313,7 +314,7 @@ def parse_reftuples(
 
 
 def parse_refs(
 def parse_refs(
     container: Union["Repo", "RefsContainer"],
     container: Union["Repo", "RefsContainer"],
-    refspecs: Union[bytes, str, list[Union[bytes, str]]],
+    refspecs: Union[bytes, str, Sequence[Union[bytes, str]]],
 ) -> list["Ref"]:
 ) -> list["Ref"]:
     """Parse a list of refspecs to a list of refs.
     """Parse a list of refspecs to a list of refs.
 
 
@@ -325,7 +326,7 @@ def parse_refs(
       KeyError: If one of the refs can not be found
       KeyError: If one of the refs can not be found
     """
     """
     # TODO: Support * in refspecs
     # TODO: Support * in refspecs
-    if not isinstance(refspecs, list):
+    if isinstance(refspecs, (bytes, str)):
         refspecs = [refspecs]
         refspecs = [refspecs]
     ret = []
     ret = []
     for refspec in refspecs:
     for refspec in refspecs:

+ 4 - 4
dulwich/pack.py

@@ -48,7 +48,7 @@ import struct
 import sys
 import sys
 import warnings
 import warnings
 import zlib
 import zlib
-from collections.abc import Iterable, Iterator, Sequence
+from collections.abc import Iterable, Iterator, Sequence, Set
 from hashlib import sha1
 from hashlib import sha1
 from itertools import chain
 from itertools import chain
 from os import SEEK_CUR, SEEK_END
 from os import SEEK_CUR, SEEK_END
@@ -2601,9 +2601,9 @@ def write_pack_header(
 
 
 def find_reusable_deltas(
 def find_reusable_deltas(
     container: PackedObjectContainer,
     container: PackedObjectContainer,
-    object_ids: set[bytes],
+    object_ids: Set[bytes],
     *,
     *,
-    other_haves: Optional[set[bytes]] = None,
+    other_haves: Optional[Set[bytes]] = None,
     progress: Optional[Callable[..., None]] = None,
     progress: Optional[Callable[..., None]] = None,
 ) -> Iterator[UnpackedObject]:
 ) -> Iterator[UnpackedObject]:
     """Find deltas in a pack that can be reused.
     """Find deltas in a pack that can be reused.
@@ -3834,7 +3834,7 @@ class Pack:
 
 
 def extend_pack(
 def extend_pack(
     f: BinaryIO,
     f: BinaryIO,
-    object_ids: set[ObjectID],
+    object_ids: Set[ObjectID],
     get_raw: Callable[[ObjectID], tuple[int, bytes]],
     get_raw: Callable[[ObjectID], tuple[int, bytes]],
     *,
     *,
     compression_level: int = -1,
     compression_level: int = -1,

+ 6 - 6
dulwich/patch.py

@@ -27,7 +27,7 @@ on.
 
 
 import email.parser
 import email.parser
 import time
 import time
-from collections.abc import Generator
+from collections.abc import Generator, Sequence
 from difflib import SequenceMatcher
 from difflib import SequenceMatcher
 from typing import (
 from typing import (
     IO,
     IO,
@@ -165,8 +165,8 @@ def _format_range_unified(start: int, stop: int) -> str:
 
 
 
 
 def unified_diff(
 def unified_diff(
-    a: list[bytes],
-    b: list[bytes],
+    a: Sequence[bytes],
+    b: Sequence[bytes],
     fromfile: bytes = b"",
     fromfile: bytes = b"",
     tofile: bytes = b"",
     tofile: bytes = b"",
     fromfiledate: str = "",
     fromfiledate: str = "",
@@ -216,7 +216,7 @@ def unified_diff(
 
 
 
 
 def _get_sequence_matcher(
 def _get_sequence_matcher(
-    algorithm: str, a: list[bytes], b: list[bytes]
+    algorithm: str, a: Sequence[bytes], b: Sequence[bytes]
 ) -> SequenceMatcher[bytes]:
 ) -> SequenceMatcher[bytes]:
     """Get appropriate sequence matcher for the given algorithm.
     """Get appropriate sequence matcher for the given algorithm.
 
 
@@ -245,8 +245,8 @@ def _get_sequence_matcher(
 
 
 
 
 def unified_diff_with_algorithm(
 def unified_diff_with_algorithm(
-    a: list[bytes],
-    b: list[bytes],
+    a: Sequence[bytes],
+    b: Sequence[bytes],
     fromfile: bytes = b"",
     fromfile: bytes = b"",
     tofile: bytes = b"",
     tofile: bytes = b"",
     fromfiledate: str = "",
     fromfiledate: str = "",

+ 45 - 36
dulwich/porcelain.py

@@ -87,7 +87,8 @@ import stat
 import sys
 import sys
 import time
 import time
 from collections import namedtuple
 from collections import namedtuple
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Sequence
+from collections.abc import Set as AbstractSet
 from contextlib import AbstractContextManager, closing, contextmanager
 from contextlib import AbstractContextManager, closing, contextmanager
 from dataclasses import dataclass
 from dataclasses import dataclass
 from io import BytesIO, RawIOBase
 from io import BytesIO, RawIOBase
@@ -757,7 +758,7 @@ def clone(
     filter_spec: Optional[str] = None,
     filter_spec: Optional[str] = None,
     protocol_version: Optional[int] = None,
     protocol_version: Optional[int] = None,
     recurse_submodules: bool = False,
     recurse_submodules: bool = False,
-    **kwargs: Union[Union[str, bytes], list[Union[str, bytes]]],
+    **kwargs: Union[Union[str, bytes], Sequence[Union[str, bytes]]],
 ) -> Repo:
 ) -> Repo:
     """Clone a local or remote git repository.
     """Clone a local or remote git repository.
 
 
@@ -865,7 +866,9 @@ def clone(
 def add(
 def add(
     repo: Union[str, os.PathLike[str], Repo] = ".",
     repo: Union[str, os.PathLike[str], Repo] = ".",
     paths: Optional[
     paths: Optional[
-        Union[list[Union[str, bytes, os.PathLike[str]]], str, bytes, os.PathLike[str]]
+        Union[
+            Sequence[Union[str, bytes, os.PathLike[str]]], str, bytes, os.PathLike[str]
+        ]
     ] = None,
     ] = None,
 ) -> tuple[list[str], set[str]]:
 ) -> tuple[list[str], set[str]]:
     """Add files to the staging area.
     """Add files to the staging area.
@@ -916,7 +919,7 @@ def add(
             # When no paths specified, add all untracked and modified files from repo root
             # When no paths specified, add all untracked and modified files from repo root
             paths = [str(repo_path)]
             paths = [str(repo_path)]
         relpaths = []
         relpaths = []
-        if not isinstance(paths, list):
+        if isinstance(paths, (str, bytes, os.PathLike)):
             paths = [paths]
             paths = [paths]
         for p in paths:
         for p in paths:
             # Handle bytes paths by decoding them
             # Handle bytes paths by decoding them
@@ -1067,7 +1070,7 @@ def clean(
 
 
 def remove(
 def remove(
     repo: Union[str, os.PathLike[str], Repo] = ".",
     repo: Union[str, os.PathLike[str], Repo] = ".",
-    paths: list[Union[str, bytes, os.PathLike[str]]] = [],
+    paths: Sequence[Union[str, bytes, os.PathLike[str]]] = [],
     cached: bool = False,
     cached: bool = False,
 ) -> None:
 ) -> None:
     """Remove files from the staging area.
     """Remove files from the staging area.
@@ -1498,7 +1501,7 @@ def print_name_status(changes: Iterator[TreeChange]) -> Iterator[str]:
 
 
 def log(
 def log(
     repo: RepoPath = ".",
     repo: RepoPath = ".",
-    paths: Optional[list[Union[str, bytes]]] = None,
+    paths: Optional[Sequence[Union[str, bytes]]] = None,
     outstream: TextIO = sys.stdout,
     outstream: TextIO = sys.stdout,
     max_entries: Optional[int] = None,
     max_entries: Optional[int] = None,
     reverse: bool = False,
     reverse: bool = False,
@@ -1547,7 +1550,7 @@ def log(
 # TODO(jelmer): better default for encoding?
 # TODO(jelmer): better default for encoding?
 def show(
 def show(
     repo: RepoPath = ".",
     repo: RepoPath = ".",
-    objects: Optional[list[Union[str, bytes]]] = None,
+    objects: Optional[Sequence[Union[str, bytes]]] = None,
     outstream: TextIO = sys.stdout,
     outstream: TextIO = sys.stdout,
     default_encoding: str = DEFAULT_ENCODING,
     default_encoding: str = DEFAULT_ENCODING,
 ) -> None:
 ) -> None:
@@ -1562,7 +1565,7 @@ def show(
     """
     """
     if objects is None:
     if objects is None:
         objects = ["HEAD"]
         objects = ["HEAD"]
-    if not isinstance(objects, list):
+    if isinstance(objects, (str, bytes)):
         objects = [objects]
         objects = [objects]
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         for objectish in objects:
         for objectish in objects:
@@ -1618,7 +1621,7 @@ def diff(
     commit: Optional[Union[str, bytes, Commit]] = None,
     commit: Optional[Union[str, bytes, Commit]] = None,
     commit2: Optional[Union[str, bytes, Commit]] = None,
     commit2: Optional[Union[str, bytes, Commit]] = None,
     staged: bool = False,
     staged: bool = False,
-    paths: Optional[list[Union[str, bytes]]] = None,
+    paths: Optional[Sequence[Union[str, bytes]]] = None,
     outstream: BinaryIO = default_bytes_out_stream,
     outstream: BinaryIO = default_bytes_out_stream,
     diff_algorithm: Optional[str] = None,
     diff_algorithm: Optional[str] = None,
 ) -> None:
 ) -> None:
@@ -1724,7 +1727,7 @@ def diff(
 
 
 def rev_list(
 def rev_list(
     repo: RepoPath,
     repo: RepoPath,
-    commits: list[Union[str, bytes]],
+    commits: Sequence[Union[str, bytes]],
     outstream: BinaryIO = default_bytes_out_stream,
     outstream: BinaryIO = default_bytes_out_stream,
 ) -> None:
 ) -> None:
     """Lists commit objects in reverse chronological order.
     """Lists commit objects in reverse chronological order.
@@ -1815,7 +1818,7 @@ def submodule_list(repo: RepoPath) -> Iterator[tuple[str, str]]:
 
 
 def submodule_update(
 def submodule_update(
     repo: Union[str, os.PathLike[str], Repo],
     repo: Union[str, os.PathLike[str], Repo],
-    paths: Optional[list[Union[str, bytes, os.PathLike[str]]]] = None,
+    paths: Optional[Sequence[Union[str, bytes, os.PathLike[str]]]] = None,
     init: bool = False,
     init: bool = False,
     force: bool = False,
     force: bool = False,
     errstream: Optional[BinaryIO] = None,
     errstream: Optional[BinaryIO] = None,
@@ -2378,7 +2381,7 @@ def get_remote_repo(
 def push(
 def push(
     repo: RepoPath,
     repo: RepoPath,
     remote_location: Optional[Union[str, bytes]] = None,
     remote_location: Optional[Union[str, bytes]] = None,
-    refspecs: Optional[Union[Union[str, bytes], list[Union[str, bytes]]]] = None,
+    refspecs: Optional[Union[Union[str, bytes], Sequence[Union[str, bytes]]]] = None,
     outstream: BinaryIO = default_bytes_out_stream,
     outstream: BinaryIO = default_bytes_out_stream,
     errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
     errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
     force: bool = False,
     force: bool = False,
@@ -2483,11 +2486,14 @@ def push(
         try:
         try:
 
 
             def generate_pack_data_wrapper(
             def generate_pack_data_wrapper(
-                have: set[bytes], want: set[bytes], ofs_delta: bool = False
+                have: AbstractSet[bytes],
+                want: AbstractSet[bytes],
+                ofs_delta: bool = False,
             ) -> tuple[int, Iterator[UnpackedObject]]:
             ) -> tuple[int, Iterator[UnpackedObject]]:
                 # Wrap to match the expected signature
                 # Wrap to match the expected signature
+                # Convert AbstractSet to set since generate_pack_data expects set
                 return r.generate_pack_data(
                 return r.generate_pack_data(
-                    have, want, progress=None, ofs_delta=ofs_delta
+                    set(have), set(want), progress=None, ofs_delta=ofs_delta
                 )
                 )
 
 
             result = client.send_pack(
             result = client.send_pack(
@@ -2528,7 +2534,7 @@ def push(
 def pull(
 def pull(
     repo: RepoPath,
     repo: RepoPath,
     remote_location: Optional[Union[str, bytes]] = None,
     remote_location: Optional[Union[str, bytes]] = None,
-    refspecs: Optional[Union[Union[str, bytes], list[Union[str, bytes]]]] = None,
+    refspecs: Optional[Union[Union[str, bytes], Sequence[Union[str, bytes]]]] = None,
     outstream: BinaryIO = default_bytes_out_stream,
     outstream: BinaryIO = default_bytes_out_stream,
     errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
     errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
     fast_forward: bool = True,
     fast_forward: bool = True,
@@ -3154,7 +3160,7 @@ def _make_tag_ref(name: Union[str, bytes]) -> Ref:
 
 
 
 
 def branch_delete(
 def branch_delete(
-    repo: RepoPath, name: Union[str, bytes, list[Union[str, bytes]]]
+    repo: RepoPath, name: Union[str, bytes, Sequence[Union[str, bytes]]]
 ) -> None:
 ) -> None:
     """Delete a branch.
     """Delete a branch.
 
 
@@ -3163,12 +3169,12 @@ def branch_delete(
       name: Name of the branch
       name: Name of the branch
     """
     """
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
-        if isinstance(name, list):
+        if isinstance(name, (list, tuple)):
             names = name
             names = name
         else:
         else:
             names = [name]
             names = [name]
-        for name in names:
-            del r.refs[_make_branch_ref(name)]
+        for branch_name in names:
+            del r.refs[_make_branch_ref(branch_name)]
 
 
 
 
 def branch_create(
 def branch_create(
@@ -3753,7 +3759,7 @@ def repack(repo: RepoPath) -> None:
 
 
 def pack_objects(
 def pack_objects(
     repo: RepoPath,
     repo: RepoPath,
-    object_ids: list[bytes],
+    object_ids: Sequence[bytes],
     packf: BinaryIO,
     packf: BinaryIO,
     idxf: Optional[BinaryIO],
     idxf: Optional[BinaryIO],
     delta_window_size: Optional[int] = None,
     delta_window_size: Optional[int] = None,
@@ -3913,7 +3919,7 @@ def _quote_path(path: str) -> str:
 
 
 def check_ignore(
 def check_ignore(
     repo: RepoPath,
     repo: RepoPath,
-    paths: list[Union[str, bytes, os.PathLike[str]]],
+    paths: Sequence[Union[str, bytes, os.PathLike[str]]],
     no_index: bool = False,
     no_index: bool = False,
     quote_path: bool = True,
     quote_path: bool = True,
 ) -> Iterator[str]:
 ) -> Iterator[str]:
@@ -4400,7 +4406,7 @@ def cone_mode_init(repo: Union[str, os.PathLike[str], Repo]) -> None:
 
 
 
 
 def cone_mode_set(
 def cone_mode_set(
-    repo: Union[str, os.PathLike[str], Repo], dirs: list[str], force: bool = False
+    repo: Union[str, os.PathLike[str], Repo], dirs: Sequence[str], force: bool = False
 ) -> None:
 ) -> None:
     """Overwrite the existing 'cone-mode' sparse patterns with a new set of directories.
     """Overwrite the existing 'cone-mode' sparse patterns with a new set of directories.
 
 
@@ -4425,7 +4431,7 @@ def cone_mode_set(
 
 
 
 
 def cone_mode_add(
 def cone_mode_add(
-    repo: Union[str, os.PathLike[str], Repo], dirs: list[str], force: bool = False
+    repo: Union[str, os.PathLike[str], Repo], dirs: Sequence[str], force: bool = False
 ) -> None:
 ) -> None:
     """Add new directories to the existing 'cone-mode' sparse-checkout patterns.
     """Add new directories to the existing 'cone-mode' sparse-checkout patterns.
 
 
@@ -4450,7 +4456,7 @@ def cone_mode_add(
             for pat in repo_obj.get_worktree().get_sparse_checkout_patterns()
             for pat in repo_obj.get_worktree().get_sparse_checkout_patterns()
             if pat not in base_patterns
             if pat not in base_patterns
         ]
         ]
-        added_dirs = existing_dirs + (dirs or [])
+        added_dirs = existing_dirs + list(dirs or [])
         repo_obj.get_worktree().set_cone_mode_patterns(dirs=added_dirs)
         repo_obj.get_worktree().set_cone_mode_patterns(dirs=added_dirs)
         new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
         new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
         sparse_checkout(repo_obj, patterns=new_patterns, force=force, cone=True)
         sparse_checkout(repo_obj, patterns=new_patterns, force=force, cone=True)
@@ -5135,7 +5141,7 @@ def cherry_pick(  # noqa: D417
 
 
 def revert(
 def revert(
     repo: Union[str, os.PathLike[str], Repo],
     repo: Union[str, os.PathLike[str], Repo],
-    commits: Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]],
+    commits: Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]],
     no_commit: bool = False,
     no_commit: bool = False,
     message: Optional[Union[str, bytes]] = None,
     message: Optional[Union[str, bytes]] = None,
     author: Optional[bytes] = None,
     author: Optional[bytes] = None,
@@ -5614,7 +5620,7 @@ def filter_branch(
     filter_message: Optional[Callable[[bytes], Optional[bytes]]] = None,
     filter_message: Optional[Callable[[bytes], Optional[bytes]]] = None,
     tree_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
     tree_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
     index_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
     index_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
-    parent_filter: Optional[Callable[[list[bytes]], list[bytes]]] = None,
+    parent_filter: Optional[Callable[[Sequence[bytes]], list[bytes]]] = None,
     commit_filter: Optional[Callable[[Commit, bytes], Optional[bytes]]] = None,
     commit_filter: Optional[Callable[[Commit, bytes], Optional[bytes]]] = None,
     subdirectory_filter: Optional[Union[str, bytes]] = None,
     subdirectory_filter: Optional[Union[str, bytes]] = None,
     prune_empty: bool = False,
     prune_empty: bool = False,
@@ -5872,9 +5878,9 @@ def bisect_start(
     repo: Union[str, os.PathLike[str], Repo] = ".",
     repo: Union[str, os.PathLike[str], Repo] = ".",
     bad: Optional[Union[str, bytes, Commit, Tag]] = None,
     bad: Optional[Union[str, bytes, Commit, Tag]] = None,
     good: Optional[
     good: Optional[
-        Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]]
+        Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]]
     ] = None,
     ] = None,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
     no_checkout: bool = False,
     no_checkout: bool = False,
     term_bad: str = "bad",
     term_bad: str = "bad",
     term_good: str = "good",
     term_good: str = "good",
@@ -5893,8 +5899,8 @@ def bisect_start(
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         state = BisectState(r)
         state = BisectState(r)
 
 
-        # Convert single good commit to list
-        if good is not None and not isinstance(good, list):
+        # Convert single good commit to sequence
+        if good is not None and isinstance(good, (str, bytes, Commit, Tag)):
             good = [good]
             good = [good]
 
 
         # Parse commits
         # Parse commits
@@ -5987,7 +5993,7 @@ def bisect_good(
 def bisect_skip(
 def bisect_skip(
     repo: Union[str, os.PathLike[str], Repo] = ".",
     repo: Union[str, os.PathLike[str], Repo] = ".",
     revs: Optional[
     revs: Optional[
-        Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]]
+        Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]]
     ] = None,
     ] = None,
 ) -> Optional[bytes]:
 ) -> Optional[bytes]:
     """Skip one or more commits.
     """Skip one or more commits.
@@ -6005,8 +6011,8 @@ def bisect_skip(
         if revs is None:
         if revs is None:
             rev_shas = None
             rev_shas = None
         else:
         else:
-            # Convert single rev to list
-            if not isinstance(revs, list):
+            # Convert single rev to sequence
+            if isinstance(revs, (str, bytes, Commit, Tag)):
                 revs = [revs]
                 revs = [revs]
             rev_shas = [parse_commit(r, rev).id for rev in revs]
             rev_shas = [parse_commit(r, rev).id for rev in revs]
 
 
@@ -6135,7 +6141,8 @@ def reflog(
 
 
 
 
 def lfs_track(
 def lfs_track(
-    repo: Union[str, os.PathLike[str], Repo] = ".", patterns: Optional[list[str]] = None
+    repo: Union[str, os.PathLike[str], Repo] = ".",
+    patterns: Optional[Sequence[str]] = None,
 ) -> list[str]:
 ) -> list[str]:
     """Track file patterns with Git LFS.
     """Track file patterns with Git LFS.
 
 
@@ -6187,7 +6194,8 @@ def lfs_track(
 
 
 
 
 def lfs_untrack(
 def lfs_untrack(
-    repo: Union[str, os.PathLike[str], Repo] = ".", patterns: Optional[list[str]] = None
+    repo: Union[str, os.PathLike[str], Repo] = ".",
+    patterns: Optional[Sequence[str]] = None,
 ) -> list[str]:
 ) -> list[str]:
     """Untrack file patterns from Git LFS.
     """Untrack file patterns from Git LFS.
 
 
@@ -6472,7 +6480,8 @@ def lfs_migrate(
 
 
 
 
 def lfs_pointer_check(
 def lfs_pointer_check(
-    repo: Union[str, os.PathLike[str], Repo] = ".", paths: Optional[list[str]] = None
+    repo: Union[str, os.PathLike[str], Repo] = ".",
+    paths: Optional[Sequence[str]] = None,
 ) -> dict[str, Optional[Any]]:
 ) -> dict[str, Optional[Any]]:
     """Check if files are valid LFS pointers.
     """Check if files are valid LFS pointers.
 
 

+ 2 - 2
dulwich/protocol.py

@@ -23,7 +23,7 @@
 """Generic functions for talking the git smart server protocol."""
 """Generic functions for talking the git smart server protocol."""
 
 
 import types
 import types
-from collections.abc import Iterable
+from collections.abc import Iterable, Sequence
 from io import BytesIO
 from io import BytesIO
 from os import SEEK_END
 from os import SEEK_END
 from typing import Callable, Optional
 from typing import Callable, Optional
@@ -714,7 +714,7 @@ def format_capability_line(capabilities: Iterable[bytes]) -> bytes:
 
 
 
 
 def format_ref_line(
 def format_ref_line(
-    ref: bytes, sha: bytes, capabilities: Optional[list[bytes]] = None
+    ref: bytes, sha: bytes, capabilities: Optional[Sequence[bytes]] = None
 ) -> bytes:
 ) -> bytes:
     """Format a ref advertisement line.
     """Format a ref advertisement line.
 
 

+ 2 - 1
dulwich/rebase.py

@@ -24,6 +24,7 @@
 import os
 import os
 import shutil
 import shutil
 import subprocess
 import subprocess
+from collections.abc import Sequence
 from dataclasses import dataclass
 from dataclasses import dataclass
 from enum import Enum
 from enum import Enum
 from typing import Callable, Optional, Protocol, TypedDict
 from typing import Callable, Optional, Protocol, TypedDict
@@ -341,7 +342,7 @@ class RebaseTodo:
         return cls(entries)
         return cls(entries)
 
 
     @classmethod
     @classmethod
-    def from_commits(cls, commits: list[Commit]) -> "RebaseTodo":
+    def from_commits(cls, commits: Sequence[Commit]) -> "RebaseTodo":
         """Create a todo list from a list of commits.
         """Create a todo list from a list of commits.
 
 
         Args:
         Args:

+ 10 - 10
dulwich/refs.py

@@ -25,7 +25,7 @@
 import os
 import os
 import types
 import types
 import warnings
 import warnings
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Mapping
 from contextlib import suppress
 from contextlib import suppress
 from typing import (
 from typing import (
     IO,
     IO,
@@ -218,7 +218,7 @@ class RefsContainer:
         """
         """
         raise NotImplementedError(self.get_packed_refs)
         raise NotImplementedError(self.get_packed_refs)
 
 
-    def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]) -> None:
+    def add_packed_refs(self, new_refs: Mapping[Ref, Optional[ObjectID]]) -> None:
         """Add the given refs as packed refs.
         """Add the given refs as packed refs.
 
 
         Args:
         Args:
@@ -241,7 +241,7 @@ class RefsContainer:
     def import_refs(
     def import_refs(
         self,
         self,
         base: Ref,
         base: Ref,
-        other: dict[Ref, ObjectID],
+        other: Mapping[Ref, ObjectID],
         committer: Optional[bytes] = None,
         committer: Optional[bytes] = None,
         timestamp: Optional[bytes] = None,
         timestamp: Optional[bytes] = None,
         timezone: Optional[bytes] = None,
         timezone: Optional[bytes] = None,
@@ -756,14 +756,14 @@ class DictRefsContainer(RefsContainer):
         """Get peeled version of a reference."""
         """Get peeled version of a reference."""
         return self._peeled.get(name)
         return self._peeled.get(name)
 
 
-    def _update(self, refs: dict[bytes, bytes]) -> None:
+    def _update(self, refs: Mapping[bytes, bytes]) -> None:
         """Update multiple refs; intended only for testing."""
         """Update multiple refs; intended only for testing."""
         # TODO(dborowitz): replace this with a public function that uses
         # TODO(dborowitz): replace this with a public function that uses
         # set_if_equal.
         # set_if_equal.
         for ref, sha in refs.items():
         for ref, sha in refs.items():
             self.set_if_equals(ref, None, sha)
             self.set_if_equals(ref, None, sha)
 
 
-    def _update_peeled(self, peeled: dict[bytes, bytes]) -> None:
+    def _update_peeled(self, peeled: Mapping[bytes, bytes]) -> None:
         """Update cached peeled refs; intended only for testing."""
         """Update cached peeled refs; intended only for testing."""
         self._peeled.update(peeled)
         self._peeled.update(peeled)
 
 
@@ -940,7 +940,7 @@ class DiskRefsContainer(RefsContainer):
                         self._packed_refs[name] = sha
                         self._packed_refs[name] = sha
         return self._packed_refs
         return self._packed_refs
 
 
-    def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]) -> None:
+    def add_packed_refs(self, new_refs: Mapping[Ref, Optional[ObjectID]]) -> None:
         """Add the given refs as packed refs.
         """Add the given refs as packed refs.
 
 
         Args:
         Args:
@@ -1405,8 +1405,8 @@ def read_packed_refs_with_peeled(
 
 
 def write_packed_refs(
 def write_packed_refs(
     f: IO[bytes],
     f: IO[bytes],
-    packed_refs: dict[bytes, bytes],
-    peeled_refs: Optional[dict[bytes, bytes]] = None,
+    packed_refs: Mapping[bytes, bytes],
+    peeled_refs: Optional[Mapping[bytes, bytes]] = None,
 ) -> None:
 ) -> None:
     """Write a packed refs file.
     """Write a packed refs file.
 
 
@@ -1442,7 +1442,7 @@ def read_info_refs(f: BinaryIO) -> dict[bytes, bytes]:
 
 
 
 
 def write_info_refs(
 def write_info_refs(
-    refs: dict[bytes, bytes], store: ObjectContainer
+    refs: Mapping[bytes, bytes], store: ObjectContainer
 ) -> Iterator[bytes]:
 ) -> Iterator[bytes]:
     """Generate info refs."""
     """Generate info refs."""
     # TODO: Avoid recursive import :(
     # TODO: Avoid recursive import :(
@@ -1593,7 +1593,7 @@ def _import_remote_refs(
 
 
 
 
 def serialize_refs(
 def serialize_refs(
-    store: ObjectContainer, refs: dict[bytes, bytes]
+    store: ObjectContainer, refs: Mapping[bytes, bytes]
 ) -> dict[bytes, bytes]:
 ) -> dict[bytes, bytes]:
     """Serialize refs with peeled refs.
     """Serialize refs with peeled refs.
 
 

+ 3 - 2
dulwich/reftable.py

@@ -13,6 +13,7 @@ import shutil
 import struct
 import struct
 import time
 import time
 import zlib
 import zlib
+from collections.abc import Mapping
 from dataclasses import dataclass
 from dataclasses import dataclass
 from io import BytesIO
 from io import BytesIO
 from types import TracebackType
 from types import TracebackType
@@ -1082,14 +1083,14 @@ class ReftableRefsContainer(RefsContainer):
         table_name = f"0x{min_idx:016x}-0x{max_idx:016x}-{hash_part:08x}.ref"
         table_name = f"0x{min_idx:016x}-0x{max_idx:016x}-{hash_part:08x}.ref"
         return os.path.join(self.reftable_dir, table_name)
         return os.path.join(self.reftable_dir, table_name)
 
 
-    def add_packed_refs(self, new_refs: dict[bytes, Optional[bytes]]) -> None:
+    def add_packed_refs(self, new_refs: Mapping[bytes, Optional[bytes]]) -> None:
         """Add packed refs. Creates a new reftable file with all refs consolidated."""
         """Add packed refs. Creates a new reftable file with all refs consolidated."""
         if not new_refs:
         if not new_refs:
             return
             return
 
 
         self._write_batch_updates(new_refs)
         self._write_batch_updates(new_refs)
 
 
-    def _write_batch_updates(self, updates: dict[bytes, Optional[bytes]]) -> None:
+    def _write_batch_updates(self, updates: Mapping[bytes, Optional[bytes]]) -> None:
         """Write multiple ref updates to a single reftable file."""
         """Write multiple ref updates to a single reftable file."""
         if not updates:
         if not updates:
             return
             return

+ 13 - 13
dulwich/repo.py

@@ -34,7 +34,7 @@ import stat
 import sys
 import sys
 import time
 import time
 import warnings
 import warnings
-from collections.abc import Generator, Iterable, Iterator
+from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
 from io import BytesIO
 from io import BytesIO
 from types import TracebackType
 from types import TracebackType
 from typing import (
 from typing import (
@@ -300,7 +300,7 @@ def parse_graftpoints(
     return grafts
     return grafts
 
 
 
 
-def serialize_graftpoints(graftpoints: dict[bytes, list[bytes]]) -> bytes:
+def serialize_graftpoints(graftpoints: Mapping[bytes, Sequence[bytes]]) -> bytes:
     """Convert a dictionary of grafts into string.
     """Convert a dictionary of grafts into string.
 
 
     The graft dictionary is:
     The graft dictionary is:
@@ -508,7 +508,7 @@ class BaseRepo:
         self,
         self,
         target: "BaseRepo",
         target: "BaseRepo",
         determine_wants: Optional[
         determine_wants: Optional[
-            Callable[[dict[bytes, bytes], Optional[int]], list[bytes]]
+            Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]]
         ] = None,
         ] = None,
         progress: Optional[Callable[..., None]] = None,
         progress: Optional[Callable[..., None]] = None,
         depth: Optional[int] = None,
         depth: Optional[int] = None,
@@ -536,7 +536,7 @@ class BaseRepo:
 
 
     def fetch_pack_data(
     def fetch_pack_data(
         self,
         self,
-        determine_wants: Callable[[dict[bytes, bytes], Optional[int]], list[bytes]],
+        determine_wants: Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]],
         graph_walker: "GraphWalker",
         graph_walker: "GraphWalker",
         progress: Optional[Callable[[bytes], None]],
         progress: Optional[Callable[[bytes], None]],
         *,
         *,
@@ -571,7 +571,7 @@ class BaseRepo:
 
 
     def find_missing_objects(
     def find_missing_objects(
         self,
         self,
-        determine_wants: Callable[[dict[bytes, bytes], Optional[int]], list[bytes]],
+        determine_wants: Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]],
         graph_walker: "GraphWalker",
         graph_walker: "GraphWalker",
         progress: Optional[Callable[[bytes], None]],
         progress: Optional[Callable[[bytes], None]],
         *,
         *,
@@ -917,12 +917,12 @@ class BaseRepo:
 
 
     def get_walker(
     def get_walker(
         self,
         self,
-        include: Optional[list[bytes]] = None,
-        exclude: Optional[list[bytes]] = None,
+        include: Optional[Sequence[bytes]] = None,
+        exclude: Optional[Sequence[bytes]] = None,
         order: str = "date",
         order: str = "date",
         reverse: bool = False,
         reverse: bool = False,
         max_entries: Optional[int] = None,
         max_entries: Optional[int] = None,
-        paths: Optional[list[bytes]] = None,
+        paths: Optional[Sequence[bytes]] = None,
         rename_detector: Optional["RenameDetector"] = None,
         rename_detector: Optional["RenameDetector"] = None,
         follow: bool = False,
         follow: bool = False,
         since: Optional[int] = None,
         since: Optional[int] = None,
@@ -1060,7 +1060,7 @@ class BaseRepo:
 
 
         self._graftpoints.update(updated_graftpoints)
         self._graftpoints.update(updated_graftpoints)
 
 
-    def _remove_graftpoints(self, to_remove: list[bytes] = []) -> None:
+    def _remove_graftpoints(self, to_remove: Sequence[bytes] = ()) -> None:
         """Remove graftpoints.
         """Remove graftpoints.
 
 
         Args:
         Args:
@@ -1583,7 +1583,7 @@ class Repo(BaseRepo):
         return self.get_worktree().stage(fs_paths)
         return self.get_worktree().stage(fs_paths)
 
 
     @replace_me(remove_in="0.26.0")
     @replace_me(remove_in="0.26.0")
-    def unstage(self, fs_paths: list[str]) -> None:
+    def unstage(self, fs_paths: Sequence[str]) -> None:
         """Unstage specific file in the index.
         """Unstage specific file in the index.
 
 
         Args:
         Args:
@@ -2192,7 +2192,7 @@ class Repo(BaseRepo):
         return self.get_worktree().get_sparse_checkout_patterns()
         return self.get_worktree().get_sparse_checkout_patterns()
 
 
     @replace_me(remove_in="0.26.0")
     @replace_me(remove_in="0.26.0")
-    def set_sparse_checkout_patterns(self, patterns: list[str]) -> None:
+    def set_sparse_checkout_patterns(self, patterns: Sequence[str]) -> None:
         """Write the given sparse-checkout patterns into info/sparse-checkout.
         """Write the given sparse-checkout patterns into info/sparse-checkout.
 
 
         Creates the info/ directory if it does not exist.
         Creates the info/ directory if it does not exist.
@@ -2203,7 +2203,7 @@ class Repo(BaseRepo):
         return self.get_worktree().set_sparse_checkout_patterns(patterns)
         return self.get_worktree().set_sparse_checkout_patterns(patterns)
 
 
     @replace_me(remove_in="0.26.0")
     @replace_me(remove_in="0.26.0")
-    def set_cone_mode_patterns(self, dirs: Union[list[str], None] = None) -> None:
+    def set_cone_mode_patterns(self, dirs: Union[Sequence[str], None] = None) -> None:
         """Write the given cone-mode directory patterns into info/sparse-checkout.
         """Write the given cone-mode directory patterns into info/sparse-checkout.
 
 
         For each directory to include, add an inclusion line that "undoes" the prior
         For each directory to include, add an inclusion line that "undoes" the prior
@@ -2510,7 +2510,7 @@ class MemoryRepo(BaseRepo):
     def init_bare(
     def init_bare(
         cls,
         cls,
         objects: Iterable[ShaFile],
         objects: Iterable[ShaFile],
-        refs: dict[bytes, bytes],
+        refs: Mapping[bytes, bytes],
         format: Optional[int] = None,
         format: Optional[int] = None,
     ) -> "MemoryRepo":
     ) -> "MemoryRepo":
         """Create a new bare repository in memory.
         """Create a new bare repository in memory.

+ 16 - 13
dulwich/server.py

@@ -50,7 +50,8 @@ import socketserver
 import sys
 import sys
 import time
 import time
 import zlib
 import zlib
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Mapping, Sequence
+from collections.abc import Set as AbstractSet
 from functools import partial
 from functools import partial
 from typing import IO, TYPE_CHECKING, Callable, Optional, Union
 from typing import IO, TYPE_CHECKING, Callable, Optional, Union
 from typing import Protocol as TypingProtocol
 from typing import Protocol as TypingProtocol
@@ -176,7 +177,7 @@ class BackendRepo(TypingProtocol):
 
 
     def find_missing_objects(
     def find_missing_objects(
         self,
         self,
-        determine_wants: Callable[[dict[bytes, bytes], Optional[int]], list[bytes]],
+        determine_wants: Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]],
         graph_walker: "_ProtocolGraphWalker",
         graph_walker: "_ProtocolGraphWalker",
         progress: Optional[Callable[[bytes], None]],
         progress: Optional[Callable[[bytes], None]],
         *,
         *,
@@ -395,7 +396,7 @@ class UploadPackHandler(PackHandler):
     def __init__(
     def __init__(
         self,
         self,
         backend: Backend,
         backend: Backend,
-        args: list[str],
+        args: Sequence[str],
         proto: Protocol,
         proto: Protocol,
         stateless_rpc: bool = False,
         stateless_rpc: bool = False,
         advertise_refs: bool = False,
         advertise_refs: bool = False,
@@ -472,7 +473,7 @@ class UploadPackHandler(PackHandler):
 
 
     def get_tagged(
     def get_tagged(
         self,
         self,
-        refs: Optional[dict[bytes, bytes]] = None,
+        refs: Optional[Mapping[bytes, bytes]] = None,
         repo: Optional[BackendRepo] = None,
         repo: Optional[BackendRepo] = None,
     ) -> dict[ObjectID, ObjectID]:
     ) -> dict[ObjectID, ObjectID]:
         """Get a dict of peeled values of tags to their original tag shas.
         """Get a dict of peeled values of tags to their original tag shas.
@@ -526,7 +527,7 @@ class UploadPackHandler(PackHandler):
         wants = []
         wants = []
 
 
         def wants_wrapper(
         def wants_wrapper(
-            refs: dict[bytes, bytes], depth: Optional[int] = None
+            refs: Mapping[bytes, bytes], depth: Optional[int] = None
         ) -> list[bytes]:
         ) -> list[bytes]:
             wants.extend(graph_walker.determine_wants(refs, depth))
             wants.extend(graph_walker.determine_wants(refs, depth))
             return wants
             return wants
@@ -652,7 +653,7 @@ def _want_satisfied(
 
 
 
 
 def _all_wants_satisfied(
 def _all_wants_satisfied(
-    store: ObjectContainer, haves: set[bytes], wants: set[bytes]
+    store: ObjectContainer, haves: AbstractSet[bytes], wants: set[bytes]
 ) -> bool:
 ) -> bool:
     """Check whether all the current wants are satisfied by a set of haves.
     """Check whether all the current wants are satisfied by a set of haves.
 
 
@@ -746,7 +747,7 @@ class _ProtocolGraphWalker:
         self._impl: Optional[AckGraphWalkerImpl] = None
         self._impl: Optional[AckGraphWalkerImpl] = None
 
 
     def determine_wants(
     def determine_wants(
-        self, heads: dict[bytes, bytes], depth: Optional[int] = None
+        self, heads: Mapping[bytes, bytes], depth: Optional[int] = None
     ) -> list[bytes]:
     ) -> list[bytes]:
         """Determine the wants for a set of heads.
         """Determine the wants for a set of heads.
 
 
@@ -897,7 +898,7 @@ class _ProtocolGraphWalker:
         """
         """
         return _split_proto_line(self.proto.read_pkt_line(), allowed)
         return _split_proto_line(self.proto.read_pkt_line(), allowed)
 
 
-    def _handle_shallow_request(self, wants: list[bytes]) -> None:
+    def _handle_shallow_request(self, wants: Sequence[bytes]) -> None:
         """Handle shallow clone requests from the client.
         """Handle shallow clone requests from the client.
 
 
         Args:
         Args:
@@ -923,7 +924,9 @@ class _ProtocolGraphWalker:
 
 
         self.update_shallow(new_shallow, unshallow)
         self.update_shallow(new_shallow, unshallow)
 
 
-    def update_shallow(self, new_shallow: set[bytes], unshallow: set[bytes]) -> None:
+    def update_shallow(
+        self, new_shallow: AbstractSet[bytes], unshallow: AbstractSet[bytes]
+    ) -> None:
         """Update shallow/unshallow information to the client.
         """Update shallow/unshallow information to the client.
 
 
         Args:
         Args:
@@ -975,7 +978,7 @@ class _ProtocolGraphWalker:
         """
         """
         self._wants = wants
         self._wants = wants
 
 
-    def all_wants_satisfied(self, haves: set[bytes]) -> bool:
+    def all_wants_satisfied(self, haves: AbstractSet[bytes]) -> bool:
         """Check whether all the current wants are satisfied by a set of haves.
         """Check whether all the current wants are satisfied by a set of haves.
 
 
         Args:
         Args:
@@ -1253,7 +1256,7 @@ class ReceivePackHandler(PackHandler):
     def __init__(
     def __init__(
         self,
         self,
         backend: Backend,
         backend: Backend,
-        args: list[str],
+        args: Sequence[str],
         proto: Protocol,
         proto: Protocol,
         stateless_rpc: bool = False,
         stateless_rpc: bool = False,
         advertise_refs: bool = False,
         advertise_refs: bool = False,
@@ -1351,7 +1354,7 @@ class ReceivePackHandler(PackHandler):
                 ref_status = b"bad ref"
                 ref_status = b"bad ref"
             yield (ref, ref_status)
             yield (ref, ref_status)
 
 
-    def _report_status(self, status: list[tuple[bytes, bytes]]) -> None:
+    def _report_status(self, status: Sequence[tuple[bytes, bytes]]) -> None:
         """Report status to client.
         """Report status to client.
 
 
         Args:
         Args:
@@ -1456,7 +1459,7 @@ class UploadArchiveHandler(Handler):
     def __init__(
     def __init__(
         self,
         self,
         backend: Backend,
         backend: Backend,
-        args: list[str],
+        args: Sequence[str],
         proto: Protocol,
         proto: Protocol,
         stateless_rpc: bool = False,
         stateless_rpc: bool = False,
     ) -> None:
     ) -> None:

+ 9 - 6
dulwich/sparse_patterns.py

@@ -22,6 +22,7 @@
 """Sparse checkout pattern handling."""
 """Sparse checkout pattern handling."""
 
 
 import os
 import os
+from collections.abc import Sequence, Set
 from fnmatch import fnmatch
 from fnmatch import fnmatch
 
 
 from .file import ensure_dir_exists
 from .file import ensure_dir_exists
@@ -38,7 +39,9 @@ class BlobNotFoundError(Exception):
     """Raised when a requested blob is not found in the repository's object store."""
     """Raised when a requested blob is not found in the repository's object store."""
 
 
 
 
-def determine_included_paths(index: Index, lines: list[str], cone: bool) -> set[str]:
+def determine_included_paths(
+    index: Index, lines: Sequence[str], cone: bool
+) -> set[str]:
     """Determine which paths in the index should be included based on either a full-pattern match or a cone-mode approach.
     """Determine which paths in the index should be included based on either a full-pattern match or a cone-mode approach.
 
 
     Args:
     Args:
@@ -55,7 +58,7 @@ def determine_included_paths(index: Index, lines: list[str], cone: bool) -> set[
         return compute_included_paths_full(index, lines)
         return compute_included_paths_full(index, lines)
 
 
 
 
-def compute_included_paths_full(index: Index, lines: list[str]) -> set[str]:
+def compute_included_paths_full(index: Index, lines: Sequence[str]) -> set[str]:
     """Use .gitignore-style parsing and matching to determine included paths.
     """Use .gitignore-style parsing and matching to determine included paths.
 
 
     Each file path in the index is tested against the parsed sparse patterns.
     Each file path in the index is tested against the parsed sparse patterns.
@@ -78,7 +81,7 @@ def compute_included_paths_full(index: Index, lines: list[str]) -> set[str]:
     return included
     return included
 
 
 
 
-def compute_included_paths_cone(index: Index, lines: list[str]) -> set[str]:
+def compute_included_paths_cone(index: Index, lines: Sequence[str]) -> set[str]:
     """Implement a simplified 'cone' approach for sparse-checkout.
     """Implement a simplified 'cone' approach for sparse-checkout.
 
 
     By default, this can include top-level files, exclude all subdirectories,
     By default, this can include top-level files, exclude all subdirectories,
@@ -135,7 +138,7 @@ def compute_included_paths_cone(index: Index, lines: list[str]) -> set[str]:
 
 
 
 
 def apply_included_paths(
 def apply_included_paths(
-    repo: Repo, included_paths: set[str], force: bool = False
+    repo: Repo, included_paths: Set[str], force: bool = False
 ) -> None:
 ) -> None:
     """Apply the sparse-checkout inclusion set to the index and working tree.
     """Apply the sparse-checkout inclusion set to the index and working tree.
 
 
@@ -226,7 +229,7 @@ def apply_included_paths(
                         f.write(blob.data)
                         f.write(blob.data)
 
 
 
 
-def parse_sparse_patterns(lines: list[str]) -> list[tuple[str, bool, bool, bool]]:
+def parse_sparse_patterns(lines: Sequence[str]) -> list[tuple[str, bool, bool, bool]]:
     """Parse pattern lines from a sparse-checkout file (.git/info/sparse-checkout).
     """Parse pattern lines from a sparse-checkout file (.git/info/sparse-checkout).
 
 
     This simplified parser:
     This simplified parser:
@@ -277,7 +280,7 @@ def parse_sparse_patterns(lines: list[str]) -> list[tuple[str, bool, bool, bool]
 
 
 def match_gitignore_patterns(
 def match_gitignore_patterns(
     path_str: str,
     path_str: str,
-    parsed_patterns: list[tuple[str, bool, bool, bool]],
+    parsed_patterns: Sequence[tuple[str, bool, bool, bool]],
     path_is_dir: bool = False,
     path_is_dir: bool = False,
 ) -> bool:
 ) -> bool:
     """Check whether a path is included based on .gitignore-style patterns.
     """Check whether a path is included based on .gitignore-style patterns.

+ 4 - 4
dulwich/walk.py

@@ -23,7 +23,7 @@
 
 
 import collections
 import collections
 import heapq
 import heapq
-from collections.abc import Iterator
+from collections.abc import Iterator, Sequence
 from itertools import chain
 from itertools import chain
 from typing import TYPE_CHECKING, Any, Callable, Optional, Union, cast
 from typing import TYPE_CHECKING, Any, Callable, Optional, Union, cast
 
 
@@ -274,12 +274,12 @@ class Walker:
     def __init__(
     def __init__(
         self,
         self,
         store: "BaseObjectStore",
         store: "BaseObjectStore",
-        include: list[bytes],
-        exclude: Optional[list[bytes]] = None,
+        include: Sequence[bytes],
+        exclude: Optional[Sequence[bytes]] = None,
         order: str = "date",
         order: str = "date",
         reverse: bool = False,
         reverse: bool = False,
         max_entries: Optional[int] = None,
         max_entries: Optional[int] = None,
-        paths: Optional[list[bytes]] = None,
+        paths: Optional[Sequence[bytes]] = None,
         rename_detector: Optional[RenameDetector] = None,
         rename_detector: Optional[RenameDetector] = None,
         follow: bool = False,
         follow: bool = False,
         since: Optional[int] = None,
         since: Optional[int] = None,

+ 2 - 2
dulwich/web.py

@@ -51,7 +51,7 @@ import os
 import re
 import re
 import sys
 import sys
 import time
 import time
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Sequence
 from io import BytesIO
 from io import BytesIO
 from types import TracebackType
 from types import TracebackType
 from typing import (
 from typing import (
@@ -613,7 +613,7 @@ class HTTPGitRequest:
         self,
         self,
         status: str = HTTP_OK,
         status: str = HTTP_OK,
         content_type: Optional[str] = None,
         content_type: Optional[str] = None,
-        headers: Optional[list[tuple[str, str]]] = None,
+        headers: Optional[Sequence[tuple[str, str]]] = None,
     ) -> Callable[[bytes], object]:
     ) -> Callable[[bytes], object]:
         """Begin a response with the given status and other headers."""
         """Begin a response with the given status and other headers."""
         if headers:
         if headers:

+ 4 - 1
dulwich/whitespace.py

@@ -24,6 +24,7 @@ This module implements Git's core.whitespace configuration and related
 whitespace error detection capabilities.
 whitespace error detection capabilities.
 """
 """
 
 
+from collections.abc import Sequence, Set
 from typing import Optional
 from typing import Optional
 
 
 # Default whitespace errors Git checks for
 # Default whitespace errors Git checks for
@@ -217,7 +218,9 @@ class WhitespaceChecker:
 
 
 
 
 def fix_whitespace_errors(
 def fix_whitespace_errors(
-    content: bytes, errors: list[tuple[str, int]], fix_types: Optional[set[str]] = None
+    content: bytes,
+    errors: Sequence[tuple[str, int]],
+    fix_types: Optional[Set[str]] = None,
 ) -> bytes:
 ) -> bytes:
     """Fix whitespace errors in content.
     """Fix whitespace errors in content.
 
 

+ 5 - 5
dulwich/worktree.py

@@ -31,7 +31,7 @@ import sys
 import tempfile
 import tempfile
 import time
 import time
 import warnings
 import warnings
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Sequence
 from contextlib import contextmanager
 from contextlib import contextmanager
 from pathlib import Path
 from pathlib import Path
 from typing import Any, Callable, Union
 from typing import Any, Callable, Union
@@ -332,7 +332,7 @@ class WorkTree:
                     index[tree_path] = index_entry_from_stat(st, blob.id)
                     index[tree_path] = index_entry_from_stat(st, blob.id)
         index.write()
         index.write()
 
 
-    def unstage(self, fs_paths: list[str]) -> None:
+    def unstage(self, fs_paths: Sequence[str]) -> None:
         """Unstage specific file in the index.
         """Unstage specific file in the index.
 
 
         Args:
         Args:
@@ -411,7 +411,7 @@ class WorkTree:
         tree: ObjectID | None = None,
         tree: ObjectID | None = None,
         encoding: bytes | None = None,
         encoding: bytes | None = None,
         ref: Ref | None = b"HEAD",
         ref: Ref | None = b"HEAD",
-        merge_heads: list[ObjectID] | None = None,
+        merge_heads: Sequence[ObjectID] | None = None,
         no_verify: bool = False,
         no_verify: bool = False,
         sign: bool = False,
         sign: bool = False,
     ) -> ObjectID:
     ) -> ObjectID:
@@ -700,7 +700,7 @@ class WorkTree:
         except FileNotFoundError:
         except FileNotFoundError:
             return []
             return []
 
 
-    def set_sparse_checkout_patterns(self, patterns: list[str]) -> None:
+    def set_sparse_checkout_patterns(self, patterns: Sequence[str]) -> None:
         """Write the given sparse-checkout patterns into info/sparse-checkout.
         """Write the given sparse-checkout patterns into info/sparse-checkout.
 
 
         Creates the info/ directory if it does not exist.
         Creates the info/ directory if it does not exist.
@@ -716,7 +716,7 @@ class WorkTree:
             for pat in patterns:
             for pat in patterns:
                 f.write(pat + "\n")
                 f.write(pat + "\n")
 
 
-    def set_cone_mode_patterns(self, dirs: list[str] | None = None) -> None:
+    def set_cone_mode_patterns(self, dirs: Sequence[str] | None = None) -> None:
         """Write the given cone-mode directory patterns into info/sparse-checkout.
         """Write the given cone-mode directory patterns into info/sparse-checkout.
 
 
         For each directory to include, add an inclusion line that "undoes" the prior
         For each directory to include, add an inclusion line that "undoes" the prior

+ 2 - 1
tests/__init__.py

@@ -38,6 +38,7 @@ import tempfile
 
 
 # If Python itself provides an exception, use that
 # If Python itself provides an exception, use that
 import unittest
 import unittest
+from collections.abc import Sequence
 from typing import ClassVar, Optional
 from typing import ClassVar, Optional
 from unittest import SkipTest, expectedFailure, skipIf
 from unittest import SkipTest, expectedFailure, skipIf
 from unittest import TestCase as _TestCase
 from unittest import TestCase as _TestCase
@@ -88,7 +89,7 @@ class BlackboxTestCase(TestCase):
         else:
         else:
             raise SkipTest(f"Unable to find binary {name}")
             raise SkipTest(f"Unable to find binary {name}")
 
 
-    def run_command(self, name: str, args: list[str]) -> subprocess.Popen[bytes]:
+    def run_command(self, name: str, args: Sequence[str]) -> subprocess.Popen[bytes]:
         """Run a Dulwich command.
         """Run a Dulwich command.
 
 
         Args:
         Args:

+ 6 - 5
tests/compat/test_check_ignore.py

@@ -23,6 +23,7 @@
 
 
 import os
 import os
 import tempfile
 import tempfile
+from collections.abc import Sequence
 
 
 from dulwich import porcelain
 from dulwich import porcelain
 from dulwich.repo import Repo
 from dulwich.repo import Repo
@@ -65,7 +66,7 @@ class CheckIgnoreCompatTestCase(CompatTestCase):
         full_path = os.path.join(self.test_dir, path)
         full_path = os.path.join(self.test_dir, path)
         os.makedirs(full_path, exist_ok=True)
         os.makedirs(full_path, exist_ok=True)
 
 
-    def _git_check_ignore(self, paths: list[str]) -> set[str]:
+    def _git_check_ignore(self, paths: Sequence[str]) -> set[str]:
         """Run git check-ignore and return set of ignored paths."""
         """Run git check-ignore and return set of ignored paths."""
         try:
         try:
             output = run_git_or_fail(
             output = run_git_or_fail(
@@ -80,7 +81,7 @@ class CheckIgnoreCompatTestCase(CompatTestCase):
             # git check-ignore returns non-zero when no paths are ignored
             # git check-ignore returns non-zero when no paths are ignored
             return set()
             return set()
 
 
-    def _dulwich_check_ignore(self, paths: list[str]) -> set[str]:
+    def _dulwich_check_ignore(self, paths: Sequence[str]) -> set[str]:
         """Run dulwich check_ignore and return set of ignored paths."""
         """Run dulwich check_ignore and return set of ignored paths."""
         # Convert to absolute paths relative to the test directory
         # Convert to absolute paths relative to the test directory
         abs_paths = [os.path.join(self.test_dir, path) for path in paths]
         abs_paths = [os.path.join(self.test_dir, path) for path in paths]
@@ -112,7 +113,7 @@ class CheckIgnoreCompatTestCase(CompatTestCase):
                 result.add(path.replace("\\", "/"))
                 result.add(path.replace("\\", "/"))
         return result
         return result
 
 
-    def _assert_ignore_match(self, paths: list[str]) -> None:
+    def _assert_ignore_match(self, paths: Sequence[str]) -> None:
         """Assert that dulwich and git return the same ignored paths."""
         """Assert that dulwich and git return the same ignored paths."""
         git_ignored = self._git_check_ignore(paths)
         git_ignored = self._git_check_ignore(paths)
         dulwich_ignored = self._dulwich_check_ignore(paths)
         dulwich_ignored = self._dulwich_check_ignore(paths)
@@ -1158,7 +1159,7 @@ class CheckIgnoreCompatTestCase(CompatTestCase):
         ]
         ]
         self._assert_ignore_match(paths)
         self._assert_ignore_match(paths)
 
 
-    def _git_check_ignore_quoted(self, paths: list[str]) -> set[str]:
+    def _git_check_ignore_quoted(self, paths: Sequence[str]) -> set[str]:
         """Run git check-ignore with default quoting and return set of ignored paths."""
         """Run git check-ignore with default quoting and return set of ignored paths."""
         try:
         try:
             # Use default git settings (core.quotePath=true by default)
             # Use default git settings (core.quotePath=true by default)
@@ -1174,7 +1175,7 @@ class CheckIgnoreCompatTestCase(CompatTestCase):
             # git check-ignore returns non-zero when no paths are ignored
             # git check-ignore returns non-zero when no paths are ignored
             return set()
             return set()
 
 
-    def _dulwich_check_ignore_quoted(self, paths: list[str]) -> set[str]:
+    def _dulwich_check_ignore_quoted(self, paths: Sequence[str]) -> set[str]:
         """Run dulwich check_ignore with quote_path=True and return set of ignored paths."""
         """Run dulwich check_ignore with quote_path=True and return set of ignored paths."""
         # Convert to absolute paths relative to the test directory
         # Convert to absolute paths relative to the test directory
         abs_paths = [os.path.join(self.test_dir, path) for path in paths]
         abs_paths = [os.path.join(self.test_dir, path) for path in paths]

+ 3 - 2
tests/test_dumb.py

@@ -22,6 +22,7 @@
 """Tests for dumb HTTP git repositories."""
 """Tests for dumb HTTP git repositories."""
 
 
 import zlib
 import zlib
+from collections.abc import Mapping
 from typing import Callable, Optional, Union
 from typing import Callable, Optional, Union
 from unittest import TestCase
 from unittest import TestCase
 from unittest.mock import Mock
 from unittest.mock import Mock
@@ -264,7 +265,7 @@ fedcba9876543210fedcba9876543210fedcba98\trefs/tags/v1.0
         graph_walker = Mock()
         graph_walker = Mock()
 
 
         def determine_wants(
         def determine_wants(
-            refs: dict[bytes, bytes], depth: Optional[int] = None
+            refs: Mapping[bytes, bytes], depth: Optional[int] = None
         ) -> list[bytes]:
         ) -> list[bytes]:
             return []
             return []
 
 
@@ -291,7 +292,7 @@ fedcba9876543210fedcba9876543210fedcba98\trefs/tags/v1.0
         graph_walker.ack.return_value = []  # No existing objects
         graph_walker.ack.return_value = []  # No existing objects
 
 
         def determine_wants(
         def determine_wants(
-            refs: dict[bytes, bytes], depth: Optional[int] = None
+            refs: Mapping[bytes, bytes], depth: Optional[int] = None
         ) -> list[bytes]:
         ) -> list[bytes]:
             return [blob_sha]
             return [blob_sha]