Browse Source

Use immutable container type annotations where appropriate

Fixes #1894
Jelmer Vernooij 3 months ago
parent
commit
266a37e7c5

+ 3 - 3
Cargo.lock

@@ -10,7 +10,7 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
 
 [[package]]
 name = "diff-tree-py"
-version = "0.24.1"
+version = "0.24.2"
 dependencies = [
  "pyo3",
 ]
@@ -50,7 +50,7 @@ dependencies = [
 
 [[package]]
 name = "objects-py"
-version = "0.24.1"
+version = "0.24.2"
 dependencies = [
  "memchr",
  "pyo3",
@@ -64,7 +64,7 @@ checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
 
 [[package]]
 name = "pack-py"
-version = "0.24.1"
+version = "0.24.2"
 dependencies = [
  "memchr",
  "pyo3",

+ 3 - 2
dulwich/annotate.py

@@ -27,6 +27,7 @@ Python's difflib.
 """
 
 import difflib
+from collections.abc import Sequence
 from typing import TYPE_CHECKING, Optional
 
 from dulwich.objects import Blob
@@ -49,7 +50,7 @@ if TYPE_CHECKING:
 
 
 def update_lines(
-    annotated_lines: list[tuple[tuple["Commit", "TreeEntry"], bytes]],
+    annotated_lines: Sequence[tuple[tuple["Commit", "TreeEntry"], bytes]],
     new_history_data: tuple["Commit", "TreeEntry"],
     new_blob: "Blob",
 ) -> list[tuple[tuple["Commit", "TreeEntry"], bytes]]:
@@ -76,7 +77,7 @@ def annotate_lines(
     commit_id: bytes,
     path: bytes,
     order: str = ORDER_DATE,
-    lines: Optional[list[tuple[tuple["Commit", "TreeEntry"], bytes]]] = None,
+    lines: Optional[Sequence[tuple[tuple["Commit", "TreeEntry"], bytes]]] = None,
     follow: bool = True,
 ) -> list[tuple[tuple["Commit", "TreeEntry"], bytes]]:
     """Annotate the lines of a blob.

+ 3 - 3
dulwich/attrs.py

@@ -23,7 +23,7 @@
 
 import os
 import re
-from collections.abc import Generator, Iterator, Mapping
+from collections.abc import Generator, Iterator, Mapping, Sequence
 from typing import (
     IO,
     Optional,
@@ -199,7 +199,7 @@ class Pattern:
 
 
 def match_path(
-    patterns: list[tuple[Pattern, Mapping[bytes, AttributeValue]]], path: bytes
+    patterns: Sequence[tuple[Pattern, Mapping[bytes, AttributeValue]]], path: bytes
 ) -> dict[bytes, AttributeValue]:
     """Get attributes for a path by matching against patterns.
 
@@ -297,7 +297,7 @@ class GitAttributes:
         return match_path(self._patterns, path)
 
     def add_patterns(
-        self, patterns: list[tuple[Pattern, Mapping[bytes, AttributeValue]]]
+        self, patterns: Sequence[tuple[Pattern, Mapping[bytes, AttributeValue]]]
     ) -> None:
         """Add patterns to the collection.
 

+ 5 - 4
dulwich/bisect.py

@@ -21,6 +21,7 @@
 """Git bisect implementation."""
 
 import os
+from collections.abc import Sequence, Set
 from typing import Optional
 
 from dulwich.object_store import peel_sha
@@ -48,8 +49,8 @@ class BisectState:
     def start(
         self,
         bad: Optional[bytes] = None,
-        good: Optional[list[bytes]] = None,
-        paths: Optional[list[bytes]] = None,
+        good: Optional[Sequence[bytes]] = None,
+        paths: Optional[Sequence[bytes]] = None,
         no_checkout: bool = False,
         term_bad: str = "bad",
         term_good: str = "good",
@@ -186,7 +187,7 @@ class BisectState:
 
         return self._find_next_commit()
 
-    def skip(self, revs: Optional[list[bytes]] = None) -> Optional[bytes]:
+    def skip(self, revs: Optional[Sequence[bytes]] = None) -> Optional[bytes]:
         """Skip one or more commits.
 
         Args:
@@ -367,7 +368,7 @@ class BisectState:
         return next_commit
 
     def _find_bisect_candidates(
-        self, bad_sha: bytes, good_shas: list[bytes], skip_shas: set[bytes]
+        self, bad_sha: bytes, good_shas: Sequence[bytes], skip_shas: Set[bytes]
     ) -> list[bytes]:
         """Find all commits between good and bad commits.
 

+ 3 - 3
dulwich/bundle.py

@@ -21,7 +21,7 @@
 
 """Bundle format support."""
 
-from collections.abc import Iterator
+from collections.abc import Iterator, Sequence
 from typing import (
     TYPE_CHECKING,
     BinaryIO,
@@ -222,8 +222,8 @@ def write_bundle(f: BinaryIO, bundle: Bundle) -> None:
 
 def create_bundle_from_repo(
     repo: "BaseRepo",
-    refs: Optional[list[bytes]] = None,
-    prerequisites: Optional[list[bytes]] = None,
+    refs: Optional[Sequence[bytes]] = None,
+    prerequisites: Optional[Sequence[bytes]] = None,
     version: Optional[int] = None,
     capabilities: Optional[dict[str, Optional[str]]] = None,
     progress: Optional[Callable[[str], None]] = None,

+ 98 - 94
dulwich/cli.py

@@ -38,7 +38,7 @@ import subprocess
 import sys
 import tempfile
 import types
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Mapping, Sequence
 from pathlib import Path
 from types import TracebackType
 from typing import (
@@ -220,7 +220,9 @@ def detect_terminal_width() -> int:
 
 
 def write_columns(
-    items: Union[Iterator[bytes], list[bytes]], out: TextIO, width: Optional[int] = None
+    items: Union[Iterator[bytes], Sequence[bytes]],
+    out: TextIO,
+    width: Optional[int] = None,
 ) -> None:
     """Display items in formatted columns based on terminal width.
 
@@ -240,7 +242,9 @@ def write_columns(
 
     item_names = [item.decode() for item in items]
 
-    def columns(names: list[str], width: int, num_cols: int) -> tuple[bool, list[int]]:
+    def columns(
+        names: Sequence[str], width: int, num_cols: int
+    ) -> tuple[bool, list[int]]:
         if num_cols <= 0:
             return False, []
 
@@ -742,7 +746,7 @@ def enable_pager() -> None:
 class Command:
     """A Dulwich subcommand."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Run the command."""
         raise NotImplementedError(self.run)
 
@@ -750,7 +754,7 @@ class Command:
 class cmd_archive(Command):
     """Create an archive of files from a named tree."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the archive command.
 
         Args:
@@ -796,7 +800,7 @@ class cmd_archive(Command):
 class cmd_add(Command):
     """Add file contents to the index."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the add command.
 
         Args:
@@ -817,7 +821,7 @@ class cmd_add(Command):
 class cmd_annotate(Command):
     """Annotate each line in a file with commit information."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the annotate command.
 
         Args:
@@ -841,7 +845,7 @@ class cmd_annotate(Command):
 class cmd_blame(Command):
     """Show what revision and author last modified each line of a file."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the blame command.
 
         Args:
@@ -854,7 +858,7 @@ class cmd_blame(Command):
 class cmd_rm(Command):
     """Remove files from the working tree and from the index."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the rm command.
 
         Args:
@@ -873,7 +877,7 @@ class cmd_rm(Command):
 class cmd_mv(Command):
     """Move or rename a file, a directory, or a symlink."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the mv command.
 
         Args:
@@ -896,7 +900,7 @@ class cmd_mv(Command):
 class cmd_fetch_pack(Command):
     """Receive missing objects from another repository."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the fetch-pack command.
 
         Args:
@@ -914,7 +918,7 @@ class cmd_fetch_pack(Command):
         else:
 
             def determine_wants(
-                refs: dict[bytes, bytes], depth: Optional[int] = None
+                refs: Mapping[bytes, bytes], depth: Optional[int] = None
             ) -> list[bytes]:
                 return [y.encode("utf-8") for y in args.refs if y not in r.object_store]
 
@@ -924,7 +928,7 @@ class cmd_fetch_pack(Command):
 class cmd_fetch(Command):
     """Download objects and refs from another repository."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the fetch command.
 
         Args:
@@ -949,7 +953,7 @@ class cmd_fetch(Command):
 class cmd_for_each_ref(Command):
     """Output information on each ref."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the for-each-ref command.
 
         Args:
@@ -965,7 +969,7 @@ class cmd_for_each_ref(Command):
 class cmd_fsck(Command):
     """Verify the connectivity and validity of objects in the database."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the fsck command.
 
         Args:
@@ -980,7 +984,7 @@ class cmd_fsck(Command):
 class cmd_log(Command):
     """Show commit logs."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the log command.
 
         Args:
@@ -1015,7 +1019,7 @@ class cmd_log(Command):
 class cmd_diff(Command):
     """Show changes between commits, commit and working tree, etc."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the diff command.
 
         Args:
@@ -1147,7 +1151,7 @@ class cmd_diff(Command):
 class cmd_dump_pack(Command):
     """Dump the contents of a pack file for debugging."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the dump-pack command.
 
         Args:
@@ -1183,7 +1187,7 @@ class cmd_dump_pack(Command):
 class cmd_dump_index(Command):
     """Show information about a pack index file."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the dump-index command.
 
         Args:
@@ -1202,7 +1206,7 @@ class cmd_dump_index(Command):
 class cmd_init(Command):
     """Create an empty Git repository or reinitialize an existing one."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the init command.
 
         Args:
@@ -1223,7 +1227,7 @@ class cmd_init(Command):
 class cmd_clone(Command):
     """Clone a repository into a new directory."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the clone command.
 
         Args:
@@ -1333,7 +1337,7 @@ def _get_commit_message_with_template(
 class cmd_commit(Command):
     """Record changes to the repository."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the commit command.
 
         Args:
@@ -1398,7 +1402,7 @@ class cmd_commit(Command):
 class cmd_commit_tree(Command):
     """Create a new commit object from a tree."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the commit-tree command.
 
         Args:
@@ -1414,7 +1418,7 @@ class cmd_commit_tree(Command):
 class cmd_update_server_info(Command):
     """Update auxiliary info file to help dumb servers."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the update-server-info command.
 
         Args:
@@ -1426,7 +1430,7 @@ class cmd_update_server_info(Command):
 class cmd_symbolic_ref(Command):
     """Read, modify and delete symbolic refs."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the symbolic-ref command.
 
         Args:
@@ -1475,7 +1479,7 @@ class cmd_symbolic_ref(Command):
 class cmd_pack_refs(Command):
     """Pack heads and tags for efficient repository access."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the pack-refs command.
 
         Args:
@@ -1494,7 +1498,7 @@ class cmd_pack_refs(Command):
 class cmd_show(Command):
     """Show various types of objects."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the show command.
 
         Args:
@@ -1553,7 +1557,7 @@ class cmd_show(Command):
 class cmd_diff_tree(Command):
     """Compare the content and mode of trees."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the diff-tree command.
 
         Args:
@@ -1569,7 +1573,7 @@ class cmd_diff_tree(Command):
 class cmd_rev_list(Command):
     """List commit objects in reverse chronological order."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the rev-list command.
 
         Args:
@@ -1584,7 +1588,7 @@ class cmd_rev_list(Command):
 class cmd_tag(Command):
     """Create, list, delete or verify a tag object."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the tag command.
 
         Args:
@@ -1613,7 +1617,7 @@ class cmd_tag(Command):
 class cmd_repack(Command):
     """Pack unpacked objects in a repository."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the repack command.
 
         Args:
@@ -1627,7 +1631,7 @@ class cmd_repack(Command):
 class cmd_reflog(Command):
     """Manage reflog information."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the reflog command.
 
         Args:
@@ -1679,7 +1683,7 @@ class cmd_reflog(Command):
 class cmd_reset(Command):
     """Reset current HEAD to the specified state."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the reset command.
 
         Args:
@@ -1714,7 +1718,7 @@ class cmd_reset(Command):
 class cmd_revert(Command):
     """Revert some existing commits."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the revert command.
 
         Args:
@@ -1745,7 +1749,7 @@ class cmd_revert(Command):
 class cmd_daemon(Command):
     """Run a simple Git protocol server."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the daemon command.
 
         Args:
@@ -1785,7 +1789,7 @@ class cmd_daemon(Command):
 class cmd_web_daemon(Command):
     """Run a simple HTTP server for Git repositories."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the web-daemon command.
 
         Args:
@@ -1823,7 +1827,7 @@ class cmd_web_daemon(Command):
 class cmd_write_tree(Command):
     """Create a tree object from the current index."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the write-tree command.
 
         Args:
@@ -1837,7 +1841,7 @@ class cmd_write_tree(Command):
 class cmd_receive_pack(Command):
     """Receive what is pushed into the repository."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the receive-pack command.
 
         Args:
@@ -1852,7 +1856,7 @@ class cmd_receive_pack(Command):
 class cmd_upload_pack(Command):
     """Send objects packed back to git-fetch-pack."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the upload-pack command.
 
         Args:
@@ -1867,7 +1871,7 @@ class cmd_upload_pack(Command):
 class cmd_shortlog(Command):
     """Show a shortlog of commits by author."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the shortlog command with the given CLI arguments.
 
         Args:
@@ -1903,7 +1907,7 @@ class cmd_shortlog(Command):
 class cmd_status(Command):
     """Show the working tree status."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the status command.
 
         Args:
@@ -1936,7 +1940,7 @@ class cmd_status(Command):
 class cmd_ls_remote(Command):
     """List references in a remote repository."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the ls-remote command.
 
         Args:
@@ -1966,7 +1970,7 @@ class cmd_ls_remote(Command):
 class cmd_ls_tree(Command):
     """List the contents of a tree object."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the ls-tree command.
 
         Args:
@@ -1999,7 +2003,7 @@ class cmd_ls_tree(Command):
 class cmd_pack_objects(Command):
     """Create a packed archive of objects."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the pack-objects command.
 
         Args:
@@ -2043,7 +2047,7 @@ class cmd_pack_objects(Command):
 class cmd_unpack_objects(Command):
     """Unpack objects from a packed archive."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the unpack-objects command.
 
         Args:
@@ -2060,7 +2064,7 @@ class cmd_unpack_objects(Command):
 class cmd_prune(Command):
     """Prune all unreachable objects from the object database."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the prune command.
 
         Args:
@@ -2129,7 +2133,7 @@ class cmd_prune(Command):
 class cmd_pull(Command):
     """Fetch from and integrate with another repository or a local branch."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the pull command.
 
         Args:
@@ -2153,7 +2157,7 @@ class cmd_pull(Command):
 class cmd_push(Command):
     """Update remote refs along with associated objects."""
 
-    def run(self, argv: list[str]) -> Optional[int]:
+    def run(self, argv: Sequence[str]) -> Optional[int]:
         """Execute the push command.
 
         Args:
@@ -2178,7 +2182,7 @@ class cmd_push(Command):
 class cmd_remote_add(Command):
     """Add a remote repository."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the remote-add command.
 
         Args:
@@ -2197,7 +2201,7 @@ class SuperCommand(Command):
     subcommands: ClassVar[dict[str, type[Command]]] = {}
     default_command: ClassVar[Optional[type[Command]]] = None
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the subcommand command.
 
         Args:
@@ -2231,7 +2235,7 @@ class cmd_remote(SuperCommand):
 class cmd_submodule_list(Command):
     """List submodules."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the submodule-list command.
 
         Args:
@@ -2246,7 +2250,7 @@ class cmd_submodule_list(Command):
 class cmd_submodule_init(Command):
     """Initialize submodules."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the submodule-init command.
 
         Args:
@@ -2260,7 +2264,7 @@ class cmd_submodule_init(Command):
 class cmd_submodule_add(Command):
     """Add a submodule."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the submodule-add command.
 
         Args:
@@ -2277,7 +2281,7 @@ class cmd_submodule_add(Command):
 class cmd_submodule_update(Command):
     """Update submodules."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the submodule-update command.
 
         Args:
@@ -2316,7 +2320,7 @@ class cmd_submodule(SuperCommand):
 class cmd_check_ignore(Command):
     """Check whether files are excluded by gitignore."""
 
-    def run(self, args: list[str]) -> int:
+    def run(self, args: Sequence[str]) -> int:
         """Execute the check-ignore command.
 
         Args:
@@ -2335,7 +2339,7 @@ class cmd_check_ignore(Command):
 class cmd_check_mailmap(Command):
     """Show canonical names and email addresses of contacts."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the check-mailmap command.
 
         Args:
@@ -2352,7 +2356,7 @@ class cmd_check_mailmap(Command):
 class cmd_branch(Command):
     """List, create, or delete branches."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the branch command.
 
         Args:
@@ -2401,7 +2405,7 @@ class cmd_branch(Command):
         parsed_args = parser.parse_args(args)
 
         def print_branches(
-            branches: Union[Iterator[bytes], list[bytes]], use_columns: bool = False
+            branches: Union[Iterator[bytes], Sequence[bytes]], use_columns: bool = False
         ) -> None:
             if use_columns:
                 write_columns(branches, sys.stdout)
@@ -2464,7 +2468,7 @@ class cmd_branch(Command):
 class cmd_checkout(Command):
     """Switch branches or restore working tree files."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the checkout command.
 
         Args:
@@ -2509,7 +2513,7 @@ class cmd_checkout(Command):
 class cmd_stash_list(Command):
     """List stash entries."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the stash-list command.
 
         Args:
@@ -2533,7 +2537,7 @@ class cmd_stash_list(Command):
 class cmd_stash_push(Command):
     """Save your local modifications to a new stash."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the stash-push command.
 
         Args:
@@ -2548,7 +2552,7 @@ class cmd_stash_push(Command):
 class cmd_stash_pop(Command):
     """Apply a stash and remove it from the stash list."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the stash-pop command.
 
         Args:
@@ -2565,7 +2569,7 @@ class cmd_bisect(SuperCommand):
 
     subcommands: ClassVar[dict[str, type[Command]]] = {}
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the bisect command.
 
         Args:
@@ -2717,7 +2721,7 @@ class cmd_stash(SuperCommand):
 class cmd_ls_files(Command):
     """Show information about files in the index and working tree."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the ls-files command.
 
         Args:
@@ -2732,7 +2736,7 @@ class cmd_ls_files(Command):
 class cmd_describe(Command):
     """Give an object a human readable name based on an available ref."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the describe command.
 
         Args:
@@ -2746,7 +2750,7 @@ class cmd_describe(Command):
 class cmd_merge(Command):
     """Join two or more development histories together."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the merge command.
 
         Args:
@@ -2799,7 +2803,7 @@ class cmd_merge(Command):
 class cmd_notes_add(Command):
     """Add notes to a commit."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the notes-add command.
 
         Args:
@@ -2821,7 +2825,7 @@ class cmd_notes_add(Command):
 class cmd_notes_show(Command):
     """Show notes for a commit."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the notes-show command.
 
         Args:
@@ -2844,7 +2848,7 @@ class cmd_notes_show(Command):
 class cmd_notes_remove(Command):
     """Remove notes for a commit."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the notes-remove command.
 
         Args:
@@ -2867,7 +2871,7 @@ class cmd_notes_remove(Command):
 class cmd_notes_list(Command):
     """List all note objects."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the notes-list command.
 
         Args:
@@ -2900,7 +2904,7 @@ class cmd_notes(SuperCommand):
 class cmd_cherry_pick(Command):
     """Apply the changes introduced by some existing commits."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the cherry-pick command.
 
         Args:
@@ -2975,7 +2979,7 @@ class cmd_cherry_pick(Command):
 class cmd_merge_tree(Command):
     """Show three-way merge without touching index."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the merge-tree command.
 
         Args:
@@ -3044,7 +3048,7 @@ class cmd_merge_tree(Command):
 class cmd_gc(Command):
     """Cleanup unnecessary files and optimize the local repository."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the gc command.
 
         Args:
@@ -3154,7 +3158,7 @@ class cmd_gc(Command):
 class cmd_count_objects(Command):
     """Count unpacked number of objects and their disk consumption."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the count-objects command.
 
         Args:
@@ -3189,7 +3193,7 @@ class cmd_count_objects(Command):
 class cmd_rebase(Command):
     """Reapply commits on top of another base tip."""
 
-    def run(self, args: list[str]) -> int:
+    def run(self, args: Sequence[str]) -> int:
         """Execute the rebase command.
 
         Args:
@@ -3313,7 +3317,7 @@ class cmd_rebase(Command):
 class cmd_filter_branch(Command):
     """Rewrite branches."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the filter-branch command.
 
         Args:
@@ -3462,13 +3466,13 @@ class cmd_filter_branch(Command):
         parent_filter = None
         if parsed_args.parent_filter:
 
-            def parent_filter(parents: list[bytes]) -> list[bytes]:
+            def parent_filter(parents: Sequence[bytes]) -> list[bytes]:
                 parent_str = " ".join(p.hex() for p in parents)
                 result = run_filter(
                     parsed_args.parent_filter, input_data=parent_str.encode()
                 )
                 if result is None:
-                    return parents
+                    return list(parents)
 
                 output = result.decode().strip()
                 if not output:
@@ -3582,7 +3586,7 @@ class cmd_lfs(Command):
 
     """Git LFS management commands."""
 
-    def run(self, argv: list[str]) -> None:
+    def run(self, argv: Sequence[str]) -> None:
         """Execute the lfs command.
 
         Args:
@@ -3771,7 +3775,7 @@ class cmd_lfs(Command):
 class cmd_help(Command):
     """Display help information about git."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the help command.
 
         Args:
@@ -3802,7 +3806,7 @@ class cmd_help(Command):
 class cmd_format_patch(Command):
     """Prepare patches for e-mail submission."""
 
-    def run(self, args: list[str]) -> None:
+    def run(self, args: Sequence[str]) -> None:
         """Execute the format-patch command.
 
         Args:
@@ -3866,7 +3870,7 @@ class cmd_format_patch(Command):
 class cmd_bundle(Command):
     """Create, unpack, and manipulate bundle files."""
 
-    def run(self, args: list[str]) -> int:
+    def run(self, args: Sequence[str]) -> int:
         """Execute the bundle command.
 
         Args:
@@ -3891,7 +3895,7 @@ class cmd_bundle(Command):
             logger.error("Unknown bundle subcommand: %s", subcommand)
             return 1
 
-    def _create(self, args: list[str]) -> int:
+    def _create(self, args: Sequence[str]) -> int:
         parser = argparse.ArgumentParser(prog="bundle create")
         parser.add_argument(
             "-q", "--quiet", action="store_true", help="Suppress progress"
@@ -3983,7 +3987,7 @@ class cmd_bundle(Command):
 
         return 0
 
-    def _verify(self, args: list[str]) -> int:
+    def _verify(self, args: Sequence[str]) -> int:
         parser = argparse.ArgumentParser(prog="bundle verify")
         parser.add_argument(
             "-q", "--quiet", action="store_true", help="Suppress output"
@@ -4023,7 +4027,7 @@ class cmd_bundle(Command):
                 bundle = read_bundle(f)
                 return verify_bundle(bundle)
 
-    def _list_heads(self, args: list[str]) -> int:
+    def _list_heads(self, args: Sequence[str]) -> int:
         parser = argparse.ArgumentParser(prog="bundle list-heads")
         parser.add_argument("file", help="Bundle file (use - for stdin)")
         parser.add_argument("refnames", nargs="*", help="Only show these refs")
@@ -4045,7 +4049,7 @@ class cmd_bundle(Command):
 
         return 0
 
-    def _unbundle(self, args: list[str]) -> int:
+    def _unbundle(self, args: Sequence[str]) -> int:
         parser = argparse.ArgumentParser(prog="bundle unbundle")
         parser.add_argument("--progress", action="store_true", help="Show progress")
         parser.add_argument("file", help="Bundle file (use - for stdin)")
@@ -4096,7 +4100,7 @@ class cmd_worktree_add(Command):
 
     """Add a new worktree to the repository."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-add command.
 
         Args:
@@ -4151,7 +4155,7 @@ class cmd_worktree_list(Command):
 
     """List details of each worktree."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-list command.
 
         Args:
@@ -4213,7 +4217,7 @@ class cmd_worktree_remove(Command):
 
     """Remove a worktree."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-remove command.
 
         Args:
@@ -4241,7 +4245,7 @@ class cmd_worktree_prune(Command):
 
     """Prune worktree information."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-prune command.
 
         Args:
@@ -4286,7 +4290,7 @@ class cmd_worktree_lock(Command):
 
     """Lock a worktree."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-lock command.
 
         Args:
@@ -4314,7 +4318,7 @@ class cmd_worktree_unlock(Command):
 
     """Unlock a worktree."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-unlock command.
 
         Args:
@@ -4339,7 +4343,7 @@ class cmd_worktree_move(Command):
 
     """Move a worktree."""
 
-    def run(self, args: list[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> Optional[int]:
         """Execute the worktree-move command.
 
         Args:
@@ -4451,7 +4455,7 @@ commands = {
 }
 
 
-def main(argv: Optional[list[str]] = None) -> Optional[int]:
+def main(argv: Optional[Sequence[str]] = None) -> Optional[int]:
     """Main entry point for the Dulwich CLI.
 
     Args:

+ 33 - 33
dulwich/client.py

@@ -47,7 +47,7 @@ import select
 import socket
 import subprocess
 import sys
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Mapping, Sequence, Set
 from contextlib import closing
 from io import BufferedReader, BytesIO
 from typing import (
@@ -87,8 +87,8 @@ if TYPE_CHECKING:
 
         def __call__(
             self,
-            have: set[bytes],
-            want: set[bytes],
+            have: Set[bytes],
+            want: Set[bytes],
             ofs_delta: bool = False,
         ) -> tuple[int, Iterator[UnpackedObject]]:
             """Generate pack data for the given have and want sets."""
@@ -99,7 +99,7 @@ if TYPE_CHECKING:
 
         def __call__(
             self,
-            refs: dict[bytes, bytes],
+            refs: Mapping[bytes, bytes],
             depth: Optional[int] = None,
         ) -> list[bytes]:
             """Determine the objects to fetch from the given refs."""
@@ -188,7 +188,7 @@ logger = logging.getLogger(__name__)
 class InvalidWants(Exception):
     """Invalid wants."""
 
-    def __init__(self, wants: set[bytes]) -> None:
+    def __init__(self, wants: Set[bytes]) -> None:
         """Initialize InvalidWants exception.
 
         Args:
@@ -214,7 +214,7 @@ class HTTPUnauthorized(Exception):
         self.url = url
 
 
-def _to_optional_dict(refs: dict[bytes, bytes]) -> dict[bytes, Optional[bytes]]:
+def _to_optional_dict(refs: Mapping[bytes, bytes]) -> dict[bytes, Optional[bytes]]:
     """Convert a dict[bytes, bytes] to dict[bytes, Optional[bytes]].
 
     This is needed for compatibility with result types that expect Optional values.
@@ -614,9 +614,9 @@ def _read_shallow_updates(pkt_seq: Iterable[bytes]) -> tuple[set[bytes], set[byt
 class _v1ReceivePackHeader:
     def __init__(
         self,
-        capabilities: list[bytes],
-        old_refs: dict[bytes, bytes],
-        new_refs: dict[bytes, bytes],
+        capabilities: Sequence[bytes],
+        old_refs: Mapping[bytes, bytes],
+        new_refs: Mapping[bytes, bytes],
     ) -> None:
         self.want: set[bytes] = set()
         self.have: set[bytes] = set()
@@ -628,9 +628,9 @@ class _v1ReceivePackHeader:
 
     def _handle_receive_pack_head(
         self,
-        capabilities: list[bytes],
-        old_refs: dict[bytes, bytes],
-        new_refs: dict[bytes, bytes],
+        capabilities: Sequence[bytes],
+        old_refs: Mapping[bytes, bytes],
+        new_refs: Mapping[bytes, bytes],
     ) -> Iterator[Optional[bytes]]:
         """Handle the head of a 'git-receive-pack' request.
 
@@ -798,7 +798,7 @@ def _handle_upload_pack_head(
 
 def _handle_upload_pack_tail(
     proto: "Protocol",
-    capabilities: set[bytes],
+    capabilities: Set[bytes],
     graph_walker: "GraphWalker",
     pack_data: Callable[[bytes], int],
     progress: Optional[Callable[[bytes], None]] = None,
@@ -996,7 +996,7 @@ class GitClient:
         branch: Optional[str] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
     ) -> Repo:
@@ -1093,7 +1093,7 @@ class GitClient:
         determine_wants: Optional["DetermineWantsFunc"] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
@@ -1171,7 +1171,7 @@ class GitClient:
         *,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
@@ -1205,7 +1205,7 @@ class GitClient:
         self,
         path: bytes,
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> LsRemoteResult:
         """Retrieve the current refs from a git smart server.
 
@@ -1220,7 +1220,7 @@ class GitClient:
         raise NotImplementedError(self.get_refs)
 
     @staticmethod
-    def _should_send_pack(new_refs: dict[bytes, bytes]) -> bool:
+    def _should_send_pack(new_refs: Mapping[bytes, bytes]) -> bool:
         # The packfile MUST NOT be sent if the only command used is delete.
         return any(sha != ZERO_SHA for sha in new_refs.values())
 
@@ -1236,7 +1236,7 @@ class GitClient:
     def _handle_receive_pack_tail(
         self,
         proto: Protocol,
-        capabilities: set[bytes],
+        capabilities: Set[bytes],
         progress: Optional[Callable[[bytes], None]] = None,
     ) -> Optional[dict[bytes, Optional[str]]]:
         """Handle the tail of a 'git-receive-pack' request.
@@ -1317,7 +1317,7 @@ class GitClient:
         progress: Optional[Callable[[bytes], None]] = None,
         write_error: Optional[Callable[[bytes], None]] = None,
         format: Optional[bytes] = None,
-        subdirs: Optional[list[bytes]] = None,
+        subdirs: Optional[Sequence[bytes]] = None,
         prefix: Optional[bytes] = None,
     ) -> None:
         """Retrieve an archive of the specified tree."""
@@ -1333,7 +1333,7 @@ class GitClient:
         )
 
 
-def check_wants(wants: set[bytes], refs: dict[bytes, bytes]) -> None:
+def check_wants(wants: Set[bytes], refs: Mapping[bytes, bytes]) -> None:
     """Check that a set of wants is valid.
 
     Args:
@@ -1527,7 +1527,7 @@ class TraditionalGitClient(GitClient):
         pack_data: Callable[[bytes], int],
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
@@ -1675,7 +1675,7 @@ class TraditionalGitClient(GitClient):
         self,
         path: bytes,
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> LsRemoteResult:
         """Retrieve the current refs from a git smart server."""
         # stock `git ls-remote` uses upload-pack
@@ -1739,7 +1739,7 @@ class TraditionalGitClient(GitClient):
         progress: Optional[Callable[[bytes], None]] = None,
         write_error: Optional[Callable[[bytes], None]] = None,
         format: Optional[bytes] = None,
-        subdirs: Optional[list[bytes]] = None,
+        subdirs: Optional[Sequence[bytes]] = None,
         prefix: Optional[bytes] = None,
     ) -> None:
         """Request an archive of a specific commit.
@@ -2266,7 +2266,7 @@ class LocalGitClient(GitClient):
         determine_wants: Optional["DetermineWantsFunc"] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[bytes]] = None,
+        ref_prefix: Optional[Sequence[bytes]] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
@@ -2311,7 +2311,7 @@ class LocalGitClient(GitClient):
         pack_data: Callable[[bytes], int],
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
@@ -2368,7 +2368,7 @@ class LocalGitClient(GitClient):
         self,
         path: Union[str, bytes],
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> LsRemoteResult:
         """Retrieve the current refs from a local on-disk repository."""
         with self._open_repo(path) as target:
@@ -2586,7 +2586,7 @@ class BundleClient(GitClient):
         determine_wants: Optional["DetermineWantsFunc"] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
@@ -2638,7 +2638,7 @@ class BundleClient(GitClient):
         pack_data: Callable[[bytes], int],
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
@@ -2681,7 +2681,7 @@ class BundleClient(GitClient):
         self,
         path: Union[str, bytes],
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> LsRemoteResult:
         """Retrieve the current refs from a bundle file."""
         bundle = self._open_bundle(path)
@@ -3358,7 +3358,7 @@ class AbstractHttpGitClient(GitClient):
         service: bytes,
         base_url: str,
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> tuple[
         dict[Ref, Optional[ObjectID]],
         set[bytes],
@@ -3632,7 +3632,7 @@ class AbstractHttpGitClient(GitClient):
         pack_data: Callable[[bytes], int],
         progress: Optional[Callable[[bytes], None]] = None,
         depth: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
         filter_spec: Optional[bytes] = None,
         protocol_version: Optional[int] = None,
     ) -> FetchPackResult:
@@ -3772,7 +3772,7 @@ class AbstractHttpGitClient(GitClient):
         self,
         path: Union[str, bytes],
         protocol_version: Optional[int] = None,
-        ref_prefix: Optional[list[Ref]] = None,
+        ref_prefix: Optional[Sequence[Ref]] = None,
     ) -> LsRemoteResult:
         """Retrieve the current refs from a git smart server."""
         url = self._get_url(path)

+ 5 - 5
dulwich/commit_graph.py

@@ -18,7 +18,7 @@ https://git-scm.com/docs/gitformat-commit-graph
 
 import os
 import struct
-from collections.abc import Iterator
+from collections.abc import Iterator, Sequence
 from typing import TYPE_CHECKING, BinaryIO, Optional, Union
 
 from .file import _GitFile
@@ -246,7 +246,7 @@ class CommitGraph:
             )
             self.entries.append(entry)
 
-    def _parse_extra_edges(self, offset: int, oids: list[bytes]) -> list[bytes]:
+    def _parse_extra_edges(self, offset: int, oids: Sequence[bytes]) -> list[bytes]:
         """Parse extra parent edges for commits with 3+ parents."""
         if CHUNK_EXTRA_EDGE_LIST not in self.chunks:
             return []
@@ -425,7 +425,7 @@ def find_commit_graph_file(git_dir: Union[str, bytes]) -> Optional[bytes]:
 
 
 def generate_commit_graph(
-    object_store: "BaseObjectStore", commit_ids: list[ObjectID]
+    object_store: "BaseObjectStore", commit_ids: Sequence[ObjectID]
 ) -> CommitGraph:
     """Generate a commit graph from a set of commits.
 
@@ -540,7 +540,7 @@ def generate_commit_graph(
 def write_commit_graph(
     git_dir: Union[str, bytes],
     object_store: "BaseObjectStore",
-    commit_ids: list[ObjectID],
+    commit_ids: Sequence[ObjectID],
 ) -> None:
     """Write a commit graph file for the given commits.
 
@@ -571,7 +571,7 @@ def write_commit_graph(
 
 
 def get_reachable_commits(
-    object_store: "BaseObjectStore", start_commits: list[ObjectID]
+    object_store: "BaseObjectStore", start_commits: Sequence[ObjectID]
 ) -> list[ObjectID]:
     """Get all commits reachable from the given starting commits.
 

+ 6 - 5
dulwich/config.py

@@ -34,6 +34,7 @@ from collections.abc import (
     Iterable,
     Iterator,
     KeysView,
+    Mapping,
     MutableMapping,
     ValuesView,
 )
@@ -932,7 +933,7 @@ class ConfigFile(ConfigDict):
         include_depth: int = 0,
         max_include_depth: int = DEFAULT_MAX_INCLUDE_DEPTH,
         file_opener: Optional[FileOpener] = None,
-        condition_matchers: Optional[dict[str, ConditionMatcher]] = None,
+        condition_matchers: Optional[Mapping[str, ConditionMatcher]] = None,
     ) -> "ConfigFile":
         """Read configuration from a file-like object.
 
@@ -1038,7 +1039,7 @@ class ConfigFile(ConfigDict):
         include_depth: int,
         max_include_depth: int,
         file_opener: Optional[FileOpener],
-        condition_matchers: Optional[dict[str, ConditionMatcher]],
+        condition_matchers: Optional[Mapping[str, ConditionMatcher]],
     ) -> None:
         """Handle include/includeIf directives during config parsing."""
         if (
@@ -1068,7 +1069,7 @@ class ConfigFile(ConfigDict):
         include_depth: int,
         max_include_depth: int,
         file_opener: Optional[FileOpener],
-        condition_matchers: Optional[dict[str, ConditionMatcher]],
+        condition_matchers: Optional[Mapping[str, ConditionMatcher]],
     ) -> None:
         """Process an include or includeIf directive."""
         path_str = path_value.decode(self.encoding, errors="replace")
@@ -1156,7 +1157,7 @@ class ConfigFile(ConfigDict):
         self,
         condition: str,
         config_dir: Optional[str] = None,
-        condition_matchers: Optional[dict[str, ConditionMatcher]] = None,
+        condition_matchers: Optional[Mapping[str, ConditionMatcher]] = None,
     ) -> bool:
         """Evaluate an includeIf condition."""
         # Try custom matchers first if provided
@@ -1246,7 +1247,7 @@ class ConfigFile(ConfigDict):
         *,
         max_include_depth: int = DEFAULT_MAX_INCLUDE_DEPTH,
         file_opener: Optional[FileOpener] = None,
-        condition_matchers: Optional[dict[str, ConditionMatcher]] = None,
+        condition_matchers: Optional[Mapping[str, ConditionMatcher]] = None,
     ) -> "ConfigFile":
         """Read configuration from a file on disk.
 

+ 3 - 2
dulwich/contrib/diffstat.py

@@ -45,6 +45,7 @@ statistics about changes, including:
 
 import re
 import sys
+from collections.abc import Sequence
 from typing import Optional
 
 # only needs to detect git style diffs as this is for
@@ -67,7 +68,7 @@ _GIT_UNCHANGED_START = b" "
 
 
 def _parse_patch(
-    lines: list[bytes],
+    lines: Sequence[bytes],
 ) -> tuple[list[bytes], list[bool], list[tuple[int, int]]]:
     """Parse a git style diff or patch to generate diff stats.
 
@@ -121,7 +122,7 @@ def _parse_patch(
 
 # note must all done using bytes not string because on linux filenames
 # may not be encodable even to utf-8
-def diffstat(lines: list[bytes], max_width: int = 80) -> bytes:
+def diffstat(lines: Sequence[bytes], max_width: int = 80) -> bytes:
     """Generate summary statistics from a git style diff ala (git diff tag1 tag2 --stat).
 
     Args:

+ 6 - 8
dulwich/contrib/swift.py

@@ -36,7 +36,7 @@ import sys
 import tempfile
 import urllib.parse as urlparse
 import zlib
-from collections.abc import Iterator
+from collections.abc import Iterator, Mapping
 from configparser import ConfigParser
 from io import BytesIO
 from typing import Any, BinaryIO, Callable, Optional, Union, cast
@@ -1004,7 +1004,7 @@ class SwiftInfoRefsContainer(InfoRefsContainer):
                 return False
         return refs
 
-    def _write_refs(self, refs: dict[bytes, bytes]) -> None:
+    def _write_refs(self, refs: Mapping[bytes, bytes]) -> None:
         f = BytesIO()
         f.writelines(write_info_refs(refs, cast("ObjectContainer", self.store)))
         self.scon.put_object(self.filename, f)
@@ -1250,17 +1250,15 @@ def main(argv: list[str] = sys.argv) -> None:
         "daemon": cmd_daemon,
     }
 
-    if len(sys.argv) < 2:
-        print(
-            "Usage: {} <{}> [OPTIONS...]".format(sys.argv[0], "|".join(commands.keys()))
-        )
+    if len(argv) < 2:
+        print("Usage: {} <{}> [OPTIONS...]".format(argv[0], "|".join(commands.keys())))
         sys.exit(1)
 
-    cmd = sys.argv[1]
+    cmd = argv[1]
     if cmd not in commands:
         print(f"No such subcommand: {cmd}")
         sys.exit(1)
-    commands[cmd](sys.argv[2:])
+    commands[cmd](argv[2:])
 
 
 if __name__ == "__main__":

+ 5 - 5
dulwich/diff.py

@@ -49,7 +49,7 @@ import logging
 import os
 import stat
 import sys
-from collections.abc import Iterable
+from collections.abc import Iterable, Sequence
 from typing import BinaryIO, Optional, Union
 
 if sys.version_info >= (3, 12):
@@ -66,7 +66,7 @@ from .repo import Repo
 logger = logging.getLogger(__name__)
 
 
-def should_include_path(path: bytes, paths: Optional[list[bytes]]) -> bool:
+def should_include_path(path: bytes, paths: Optional[Sequence[bytes]]) -> bool:
     """Check if a path should be included based on path filters.
 
     Args:
@@ -85,7 +85,7 @@ def diff_index_to_tree(
     repo: Repo,
     outstream: BinaryIO,
     commit_sha: Optional[bytes] = None,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
     diff_algorithm: Optional[str] = None,
 ) -> None:
     """Show staged changes (index vs commit).
@@ -130,7 +130,7 @@ def diff_working_tree_to_tree(
     repo: Repo,
     outstream: BinaryIO,
     commit_sha: bytes,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
     diff_algorithm: Optional[str] = None,
 ) -> None:
     """Compare working tree to a specific commit.
@@ -375,7 +375,7 @@ def diff_working_tree_to_tree(
 def diff_working_tree_to_index(
     repo: Repo,
     outstream: BinaryIO,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
     diff_algorithm: Optional[str] = None,
 ) -> None:
     """Compare working tree to index.

+ 11 - 8
dulwich/diff_tree.py

@@ -23,7 +23,8 @@
 
 import stat
 from collections import defaultdict
-from collections.abc import Iterator
+from collections.abc import Iterator, Mapping, Sequence
+from collections.abc import Set as AbstractSet
 from io import BytesIO
 from itertools import chain
 from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, TypeVar
@@ -143,7 +144,7 @@ def walk_trees(
     tree1_id: Optional[ObjectID],
     tree2_id: Optional[ObjectID],
     prune_identical: bool = False,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
 ) -> Iterator[tuple[Optional[TreeEntry], Optional[TreeEntry]]]:
     """Recursively walk all the entries of two trees.
 
@@ -262,7 +263,7 @@ def tree_changes(
     rename_detector: Optional["RenameDetector"] = None,
     include_trees: bool = False,
     change_type_same: bool = False,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
 ) -> Iterator[TreeChange]:
     """Find the differences between the contents of two trees.
 
@@ -331,20 +332,20 @@ T = TypeVar("T")
 U = TypeVar("U")
 
 
-def _all_eq(seq: list[T], key: Callable[[T], U], value: U) -> bool:
+def _all_eq(seq: Sequence[T], key: Callable[[T], U], value: U) -> bool:
     for e in seq:
         if key(e) != value:
             return False
     return True
 
 
-def _all_same(seq: list[Any], key: Callable[[Any], Any]) -> bool:
+def _all_same(seq: Sequence[Any], key: Callable[[Any], Any]) -> bool:
     return _all_eq(seq[1:], key, key(seq[0]))
 
 
 def tree_changes_for_merge(
     store: BaseObjectStore,
-    parent_tree_ids: list[ObjectID],
+    parent_tree_ids: Sequence[ObjectID],
     tree_id: ObjectID,
     rename_detector: Optional["RenameDetector"] = None,
 ) -> Iterator[list[Optional[TreeChange]]]:
@@ -451,7 +452,7 @@ def _count_blocks(obj: ShaFile) -> dict[int, int]:
     return block_counts
 
 
-def _common_bytes(blocks1: dict[int, int], blocks2: dict[int, int]) -> int:
+def _common_bytes(blocks1: Mapping[int, int], blocks2: Mapping[int, int]) -> int:
     """Count the number of common bytes in two block count dicts.
 
     Args:
@@ -608,7 +609,9 @@ class RenameDetector:
         ):
             self._add_change(change)
 
-    def _prune(self, add_paths: set[bytes], delete_paths: set[bytes]) -> None:
+    def _prune(
+        self, add_paths: AbstractSet[bytes], delete_paths: AbstractSet[bytes]
+    ) -> None:
         def check_add(a: TreeChange) -> bool:
             assert a.new is not None
             return a.new.path not in add_paths

+ 4 - 2
dulwich/dumb.py

@@ -24,7 +24,7 @@
 import os
 import tempfile
 import zlib
-from collections.abc import Iterator, Sequence
+from collections.abc import Iterator, Mapping, Sequence
 from io import BytesIO
 from typing import Any, Callable, Optional
 from urllib.parse import urljoin
@@ -434,7 +434,9 @@ class DumbRemoteHTTPRepo:
 
     def fetch_pack_data(
         self,
-        determine_wants: Callable[[dict[Ref, ObjectID], Optional[int]], list[ObjectID]],
+        determine_wants: Callable[
+            [Mapping[Ref, ObjectID], Optional[int]], list[ObjectID]
+        ],
         graph_walker: object,
         progress: Optional[Callable[[bytes], None]] = None,
         *,

+ 2 - 1
dulwich/errors.py

@@ -27,6 +27,7 @@
 # that raises the error.
 
 import binascii
+from collections.abc import Sequence
 from typing import Optional, Union
 
 
@@ -197,7 +198,7 @@ class SendPackError(GitProtocolError):
 class HangupException(GitProtocolError):
     """Hangup exception."""
 
-    def __init__(self, stderr_lines: Optional[list[bytes]] = None) -> None:
+    def __init__(self, stderr_lines: Optional[Sequence[bytes]] = None) -> None:
         """Initialize a HangupException.
 
         Args:

+ 3 - 2
dulwich/filter_branch.py

@@ -24,6 +24,7 @@
 import os
 import tempfile
 import warnings
+from collections.abc import Sequence
 from typing import Callable, Optional, TypedDict
 
 from .index import Index, build_index_from_tree
@@ -58,7 +59,7 @@ class CommitFilter:
         filter_message: Optional[Callable[[bytes], Optional[bytes]]] = None,
         tree_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
         index_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
-        parent_filter: Optional[Callable[[list[bytes]], list[bytes]]] = None,
+        parent_filter: Optional[Callable[[Sequence[bytes]], list[bytes]]] = None,
         commit_filter: Optional[Callable[[Commit, bytes], Optional[bytes]]] = None,
         subdirectory_filter: Optional[bytes] = None,
         prune_empty: bool = False,
@@ -377,7 +378,7 @@ class CommitFilter:
 def filter_refs(
     refs: RefsContainer,
     object_store: BaseObjectStore,
-    ref_names: list[bytes],
+    ref_names: Sequence[bytes],
     commit_filter: CommitFilter,
     *,
     keep_original: bool = True,

+ 10 - 6
dulwich/graph.py

@@ -20,7 +20,7 @@
 
 """Implementation of merge-base following the approach of git."""
 
-from collections.abc import Iterator
+from collections.abc import Iterator, Mapping, Sequence
 from heapq import heappop, heappush
 from typing import TYPE_CHECKING, Callable, Generic, Optional, TypeVar
 
@@ -77,7 +77,7 @@ class WorkList(Generic[T]):
 def _find_lcas(
     lookup_parents: Callable[[ObjectID], list[ObjectID]],
     c1: ObjectID,
-    c2s: list[ObjectID],
+    c2s: Sequence[ObjectID],
     lookup_stamp: Callable[[ObjectID], int],
     min_stamp: int = 0,
     shallows: Optional[set[ObjectID]] = None,
@@ -104,7 +104,9 @@ def _find_lcas(
     _DNC = 4  # Do Not Consider
     _LCA = 8  # potential LCA (Lowest Common Ancestor)
 
-    def _has_candidates(wlst: WorkList[ObjectID], cstates: dict[ObjectID, int]) -> bool:
+    def _has_candidates(
+        wlst: WorkList[ObjectID], cstates: Mapping[ObjectID, int]
+    ) -> bool:
         """Check if there are any candidate commits in the work list.
 
         Args:
@@ -203,7 +205,7 @@ def _find_lcas(
 
 
 # actual git sorts these based on commit times
-def find_merge_base(repo: "BaseRepo", commit_ids: list[ObjectID]) -> list[ObjectID]:
+def find_merge_base(repo: "BaseRepo", commit_ids: Sequence[ObjectID]) -> list[ObjectID]:
     """Find lowest common ancestors of commit_ids[0] and *any* of commits_ids[1:].
 
     Args:
@@ -236,7 +238,7 @@ def find_merge_base(repo: "BaseRepo", commit_ids: list[ObjectID]) -> list[Object
     c1 = commit_ids[0]
     if not len(commit_ids) > 1:
         return [c1]
-    c2s = commit_ids[1:]
+    c2s = list(commit_ids[1:])
     if c1 in c2s:
         return [c1]
     lcas = _find_lcas(
@@ -245,7 +247,9 @@ def find_merge_base(repo: "BaseRepo", commit_ids: list[ObjectID]) -> list[Object
     return lcas
 
 
-def find_octopus_base(repo: "BaseRepo", commit_ids: list[ObjectID]) -> list[ObjectID]:
+def find_octopus_base(
+    repo: "BaseRepo", commit_ids: Sequence[ObjectID]
+) -> list[ObjectID]:
     """Find lowest common ancestors of *all* provided commit_ids.
 
     Args:

+ 4 - 3
dulwich/greenthreads.py

@@ -23,6 +23,7 @@
 
 """Utility module for querying an ObjectStore with gevent."""
 
+from collections.abc import Sequence
 from typing import Callable, Optional
 
 import gevent
@@ -39,7 +40,7 @@ from .objects import Commit, ObjectID, Tag
 
 def _split_commits_and_tags(
     obj_store: BaseObjectStore,
-    lst: list[ObjectID],
+    lst: Sequence[ObjectID],
     *,
     ignore_unknown: bool = False,
     pool: pool.Pool,
@@ -82,8 +83,8 @@ class GreenThreadsMissingObjectFinder(MissingObjectFinder):
     def __init__(
         self,
         object_store: BaseObjectStore,
-        haves: list[ObjectID],
-        wants: list[ObjectID],
+        haves: Sequence[ObjectID],
+        wants: Sequence[ObjectID],
         progress: Optional[Callable[[bytes], None]] = None,
         get_tagged: Optional[Callable[[], dict[ObjectID, ObjectID]]] = None,
         concurrency: int = 1,

+ 4 - 1
dulwich/hooks.py

@@ -23,6 +23,7 @@
 
 import os
 import subprocess
+from collections.abc import Sequence
 from typing import Any, Callable, Optional
 
 from .errors import HookError
@@ -188,7 +189,9 @@ class PostReceiveShellHook(ShellHook):
         filepath = os.path.join(controldir, "hooks", "post-receive")
         ShellHook.__init__(self, "post-receive", path=filepath, numparam=0)
 
-    def execute(self, client_refs: list[tuple[bytes, bytes, bytes]]) -> Optional[bytes]:
+    def execute(
+        self, client_refs: Sequence[tuple[bytes, bytes, bytes]]
+    ) -> Optional[bytes]:
         """Execute the post-receive hook.
 
         Args:

+ 3 - 3
dulwich/ignore.py

@@ -28,7 +28,7 @@ For example, use "dir/" instead of "dir" to check if a directory is ignored.
 
 import os.path
 import re
-from collections.abc import Iterable
+from collections.abc import Iterable, Sequence
 from contextlib import suppress
 from typing import TYPE_CHECKING, BinaryIO, Optional, Union
 
@@ -47,7 +47,7 @@ def _pattern_to_str(pattern: Union["Pattern", bytes, str]) -> str:
     return pattern_data.decode() if isinstance(pattern_data, bytes) else pattern_data
 
 
-def _check_parent_exclusion(path: str, matching_patterns: list["Pattern"]) -> bool:
+def _check_parent_exclusion(path: str, matching_patterns: Sequence["Pattern"]) -> bool:
     """Check if a parent directory exclusion prevents negation patterns from taking effect.
 
     Args:
@@ -163,7 +163,7 @@ def _translate_segment(segment: bytes) -> bytes:
     return res
 
 
-def _handle_double_asterisk(segments: list[bytes], i: int) -> tuple[bytes, bool]:
+def _handle_double_asterisk(segments: Sequence[bytes], i: int) -> tuple[bytes, bool]:
     """Handle ** segment processing, returns (regex_part, skip_next)."""
     # Check if ** is at end
     remaining = segments[i + 1 :]

+ 6 - 6
dulwich/index.py

@@ -28,7 +28,7 @@ import stat
 import struct
 import sys
 import types
-from collections.abc import Generator, Iterable, Iterator
+from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
 from dataclasses import dataclass
 from enum import Enum
 from typing import (
@@ -876,9 +876,9 @@ def read_index_dict(
 
 def write_index(
     f: IO[bytes],
-    entries: list[SerializedIndexEntry],
+    entries: Sequence[SerializedIndexEntry],
     version: Optional[int] = None,
-    extensions: Optional[list[IndexExtension]] = None,
+    extensions: Optional[Sequence[IndexExtension]] = None,
 ) -> None:
     """Write an index file.
 
@@ -917,9 +917,9 @@ def write_index(
 
 def write_index_dict(
     f: IO[bytes],
-    entries: dict[bytes, Union[IndexEntry, ConflictedIndexEntry]],
+    entries: Mapping[bytes, Union[IndexEntry, ConflictedIndexEntry]],
     version: Optional[int] = None,
-    extensions: Optional[list[IndexExtension]] = None,
+    extensions: Optional[Sequence[IndexExtension]] = None,
 ) -> None:
     """Write an index file based on the contents of a dictionary.
 
@@ -2102,7 +2102,7 @@ def _transition_to_absent(
 
 
 def detect_case_only_renames(
-    changes: list["TreeChange"],
+    changes: Sequence["TreeChange"],
     config: "Config",
 ) -> list["TreeChange"]:
     """Detect and transform case-only renames in a list of tree changes.

+ 2 - 2
dulwich/lfs.py

@@ -37,7 +37,7 @@ import json
 import logging
 import os
 import tempfile
-from collections.abc import Iterable
+from collections.abc import Iterable, Mapping
 from dataclasses import dataclass
 from typing import TYPE_CHECKING, Any, BinaryIO, Optional, Union
 from urllib.parse import urljoin, urlparse
@@ -478,7 +478,7 @@ class LFSClient:
         response_data = json.loads(response)
         return self._parse_batch_response(response_data)
 
-    def _parse_batch_response(self, data: dict[str, Any]) -> LFSBatchResponse:
+    def _parse_batch_response(self, data: Mapping[str, Any]) -> LFSBatchResponse:
         """Parse JSON response into LFSBatchResponse dataclass."""
         objects = []
         for obj_data in data.get("objects", []):

+ 4 - 1
dulwich/lfs_server.py

@@ -25,6 +25,7 @@ import hashlib
 import json
 import tempfile
 import typing
+from collections.abc import Mapping
 from http.server import BaseHTTPRequestHandler, HTTPServer
 from typing import Optional
 
@@ -36,7 +37,9 @@ class LFSRequestHandler(BaseHTTPRequestHandler):
 
     server: "LFSServer"  # Type annotation for the server attribute
 
-    def send_json_response(self, status_code: int, data: dict[str, typing.Any]) -> None:
+    def send_json_response(
+        self, status_code: int, data: Mapping[str, typing.Any]
+    ) -> None:
         """Send a JSON response."""
         response = json.dumps(data).encode("utf-8")
         self.send_response(status_code)

+ 5 - 4
dulwich/line_ending.py

@@ -138,6 +138,7 @@ Sources:
 """
 
 import logging
+from collections.abc import Mapping
 from typing import TYPE_CHECKING, Any, Callable, Optional, Union
 
 if TYPE_CHECKING:
@@ -424,7 +425,7 @@ def get_clean_filter_autocrlf(
 # Backwards compatibility wrappers
 @replace_me(since="0.23.1", remove_in="0.25.0")
 def get_checkout_filter(
-    core_eol: str, core_autocrlf: Union[bool, str], git_attributes: dict[str, Any]
+    core_eol: str, core_autocrlf: Union[bool, str], git_attributes: Mapping[str, Any]
 ) -> Optional[Callable[[bytes], bytes]]:
     """Deprecated: Use get_smudge_filter instead."""
     # Convert core_autocrlf to bytes for compatibility
@@ -441,7 +442,7 @@ def get_checkout_filter(
 
 @replace_me(since="0.23.1", remove_in="0.25.0")
 def get_checkin_filter(
-    core_eol: str, core_autocrlf: Union[bool, str], git_attributes: dict[str, Any]
+    core_eol: str, core_autocrlf: Union[bool, str], git_attributes: Mapping[str, Any]
 ) -> Optional[Callable[[bytes], bytes]]:
     """Deprecated: Use get_clean_filter instead."""
     # Convert core_autocrlf to bytes for compatibility
@@ -481,7 +482,7 @@ class BlobNormalizer(FilterBlobNormalizer):
     def __init__(
         self,
         config_stack: "StackedConfig",
-        gitattributes: dict[str, Any],
+        gitattributes: Mapping[str, Any],
         core_eol: str = "native",
         autocrlf: bytes = b"false",
         safecrlf: bytes = b"false",
@@ -634,7 +635,7 @@ class TreeBlobNormalizer(BlobNormalizer):
     def __init__(
         self,
         config_stack: "StackedConfig",
-        git_attributes: dict[str, Any],
+        git_attributes: Mapping[str, Any],
         object_store: "BaseObjectStore",
         tree: Optional[ObjectID] = None,
         core_eol: str = "native",

+ 5 - 4
dulwich/merge.py

@@ -1,5 +1,6 @@
 """Git merge implementation."""
 
+from collections.abc import Sequence
 from difflib import SequenceMatcher
 from typing import TYPE_CHECKING, Optional
 
@@ -19,9 +20,9 @@ from dulwich.objects import S_ISGITLINK, Blob, Commit, Tree, is_blob, is_tree
 
 
 def make_merge3(
-    base: list[bytes],
-    a: list[bytes],
-    b: list[bytes],
+    base: Sequence[bytes],
+    a: Sequence[bytes],
+    b: Sequence[bytes],
     is_cherrypick: bool = False,
     sequence_matcher: Optional[type[SequenceMatcher[bytes]]] = None,
 ) -> "merge3.Merge3":
@@ -49,7 +50,7 @@ class MergeConflict(Exception):
 
 
 def _can_merge_lines(
-    base_lines: list[bytes], a_lines: list[bytes], b_lines: list[bytes]
+    base_lines: Sequence[bytes], a_lines: Sequence[bytes], b_lines: Sequence[bytes]
 ) -> bool:
     """Check if lines can be merged without conflict."""
     # If one side is unchanged, we can take the other side

+ 6 - 4
dulwich/notes.py

@@ -21,7 +21,7 @@
 """Git notes handling."""
 
 import stat
-from collections.abc import Iterator
+from collections.abc import Iterator, Sequence
 from typing import TYPE_CHECKING, Optional
 
 from .objects import Blob, Tree
@@ -240,7 +240,7 @@ class NotesTree:
 
             # Build new tree structure
             def update_tree(
-                tree: Tree, components: list[bytes], blob_sha: bytes
+                tree: Tree, components: Sequence[bytes], blob_sha: bytes
             ) -> Tree:
                 """Update tree with new note entry.
 
@@ -411,7 +411,9 @@ class NotesTree:
         components = path.split(b"/")
 
         # Build new tree structure
-        def update_tree(tree: Tree, components: list[bytes], blob_sha: bytes) -> Tree:
+        def update_tree(
+            tree: Tree, components: Sequence[bytes], blob_sha: bytes
+        ) -> Tree:
             """Update tree with new note entry.
 
             Args:
@@ -485,7 +487,7 @@ class NotesTree:
         components = path.split(b"/")
 
         # Build new tree structure without the note
-        def remove_from_tree(tree: Tree, components: list[bytes]) -> Optional[Tree]:
+        def remove_from_tree(tree: Tree, components: Sequence[bytes]) -> Optional[Tree]:
             """Remove note entry from tree.
 
             Args:

+ 13 - 11
dulwich/object_store.py

@@ -29,7 +29,7 @@ import stat
 import sys
 import time
 import warnings
-from collections.abc import Iterable, Iterator, Sequence
+from collections.abc import Iterable, Iterator, Mapping, Sequence, Set
 from contextlib import suppress
 from io import BytesIO
 from pathlib import Path
@@ -232,7 +232,7 @@ class BaseObjectStore:
     """Object store interface."""
 
     def determine_wants_all(
-        self, refs: dict[Ref, ObjectID], depth: Optional[int] = None
+        self, refs: Mapping[Ref, ObjectID], depth: Optional[int] = None
     ) -> list[ObjectID]:
         """Determine which objects are wanted based on refs."""
 
@@ -314,7 +314,7 @@ class BaseObjectStore:
         include_trees: bool = False,
         change_type_same: bool = False,
         rename_detector: Optional["RenameDetector"] = None,
-        paths: Optional[list[bytes]] = None,
+        paths: Optional[Sequence[bytes]] = None,
     ) -> Iterator[
         tuple[
             tuple[Optional[bytes], Optional[bytes]],
@@ -444,7 +444,7 @@ class BaseObjectStore:
         self,
         haves: Iterable[bytes],
         wants: Iterable[bytes],
-        shallow: Optional[set[bytes]] = None,
+        shallow: Optional[Set[bytes]] = None,
         progress: Optional[Callable[..., None]] = None,
         get_tagged: Optional[Callable[[], dict[bytes, bytes]]] = None,
         get_parents: Callable[..., list[bytes]] = lambda commit: commit.parents,
@@ -495,7 +495,7 @@ class BaseObjectStore:
         self,
         have: Iterable[bytes],
         want: Iterable[bytes],
-        shallow: Optional[set[bytes]] = None,
+        shallow: Optional[Set[bytes]] = None,
         progress: Optional[Callable[..., None]] = None,
         ofs_delta: bool = True,
     ) -> tuple[int, Iterator[UnpackedObject]]:
@@ -590,7 +590,7 @@ class BaseObjectStore:
         return None
 
     def write_commit_graph(
-        self, refs: Optional[list[bytes]] = None, reachable: bool = True
+        self, refs: Optional[Sequence[bytes]] = None, reachable: bool = True
     ) -> None:
         """Write a commit graph file for this object store.
 
@@ -740,7 +740,7 @@ class PackBasedObjectStore(BaseObjectStore, PackedObjectContainer):
         self,
         have: Iterable[bytes],
         want: Iterable[bytes],
-        shallow: Optional[set[bytes]] = None,
+        shallow: Optional[Set[bytes]] = None,
         progress: Optional[Callable[..., None]] = None,
         ofs_delta: bool = True,
     ) -> tuple[int, Iterator[UnpackedObject]]:
@@ -850,7 +850,7 @@ class PackBasedObjectStore(BaseObjectStore, PackedObjectContainer):
 
     def repack(
         self,
-        exclude: Optional[set[bytes]] = None,
+        exclude: Optional[Set[bytes]] = None,
         progress: Optional[Callable[[str], None]] = None,
     ) -> int:
         """Repack the packs in this repository.
@@ -2126,7 +2126,7 @@ class MissingObjectFinder:
         haves: Iterable[bytes],
         wants: Iterable[bytes],
         *,
-        shallow: Optional[set[bytes]] = None,
+        shallow: Optional[Set[bytes]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         get_tagged: Optional[Callable[[], dict[bytes, bytes]]] = None,
         get_parents: Callable[[Commit], list[bytes]] = lambda commit: commit.parents,
@@ -2370,7 +2370,7 @@ class ObjectStoreGraphWalker:
 def commit_tree_changes(
     object_store: BaseObjectStore,
     tree: Union[ObjectID, Tree],
-    changes: list[tuple[bytes, Optional[int], Optional[bytes]]],
+    changes: Sequence[tuple[bytes, Optional[int], Optional[bytes]]],
 ) -> ObjectID:
     """Commit a specified set of changes to a tree structure.
 
@@ -2434,7 +2434,9 @@ class OverlayObjectStore(BaseObjectStore):
     """Object store that can overlay multiple object stores."""
 
     def __init__(
-        self, bases: list[BaseObjectStore], add_store: Optional[BaseObjectStore] = None
+        self,
+        bases: list[BaseObjectStore],
+        add_store: Optional[BaseObjectStore] = None,
     ) -> None:
         """Initialize an OverlayObjectStore.
 

+ 2 - 2
dulwich/objects.py

@@ -28,7 +28,7 @@ import posixpath
 import stat
 import sys
 import zlib
-from collections.abc import Callable, Iterable, Iterator
+from collections.abc import Callable, Iterable, Iterator, Sequence
 from hashlib import sha1
 from io import BufferedIOBase, BytesIO
 from typing import (
@@ -832,7 +832,7 @@ def _parse_message(
 
 
 def _format_message(
-    headers: list[tuple[bytes, bytes]], body: Optional[bytes]
+    headers: Sequence[tuple[bytes, bytes]], body: Optional[bytes]
 ) -> Iterator[bytes]:
     for field, value in headers:
         lines = value.split(b"\n")

+ 5 - 4
dulwich/objectspec.py

@@ -21,6 +21,7 @@
 
 """Object specification."""
 
+from collections.abc import Sequence
 from typing import TYPE_CHECKING, Optional, Union
 
 from .objects import Commit, ShaFile, Tag, Tree
@@ -288,7 +289,7 @@ def parse_reftuple(
 def parse_reftuples(
     lh_container: Union["Repo", "RefsContainer"],
     rh_container: Union["Repo", "RefsContainer"],
-    refspecs: Union[bytes, list[bytes]],
+    refspecs: Union[bytes, Sequence[bytes]],
     force: bool = False,
 ) -> list[tuple[Optional["Ref"], Optional["Ref"], bool]]:
     """Parse a list of reftuple specs to a list of reftuples.
@@ -302,7 +303,7 @@ def parse_reftuples(
     Raises:
       KeyError: If one of the refs can not be found
     """
-    if not isinstance(refspecs, list):
+    if isinstance(refspecs, bytes):
         refspecs = [refspecs]
     ret = []
     # TODO: Support * in refspecs
@@ -313,7 +314,7 @@ def parse_reftuples(
 
 def parse_refs(
     container: Union["Repo", "RefsContainer"],
-    refspecs: Union[bytes, str, list[Union[bytes, str]]],
+    refspecs: Union[bytes, str, Sequence[Union[bytes, str]]],
 ) -> list["Ref"]:
     """Parse a list of refspecs to a list of refs.
 
@@ -325,7 +326,7 @@ def parse_refs(
       KeyError: If one of the refs can not be found
     """
     # TODO: Support * in refspecs
-    if not isinstance(refspecs, list):
+    if isinstance(refspecs, (bytes, str)):
         refspecs = [refspecs]
     ret = []
     for refspec in refspecs:

+ 4 - 4
dulwich/pack.py

@@ -48,7 +48,7 @@ import struct
 import sys
 import warnings
 import zlib
-from collections.abc import Iterable, Iterator, Sequence
+from collections.abc import Iterable, Iterator, Sequence, Set
 from hashlib import sha1
 from itertools import chain
 from os import SEEK_CUR, SEEK_END
@@ -2601,9 +2601,9 @@ def write_pack_header(
 
 def find_reusable_deltas(
     container: PackedObjectContainer,
-    object_ids: set[bytes],
+    object_ids: Set[bytes],
     *,
-    other_haves: Optional[set[bytes]] = None,
+    other_haves: Optional[Set[bytes]] = None,
     progress: Optional[Callable[..., None]] = None,
 ) -> Iterator[UnpackedObject]:
     """Find deltas in a pack that can be reused.
@@ -3834,7 +3834,7 @@ class Pack:
 
 def extend_pack(
     f: BinaryIO,
-    object_ids: set[ObjectID],
+    object_ids: Set[ObjectID],
     get_raw: Callable[[ObjectID], tuple[int, bytes]],
     *,
     compression_level: int = -1,

+ 6 - 6
dulwich/patch.py

@@ -27,7 +27,7 @@ on.
 
 import email.parser
 import time
-from collections.abc import Generator
+from collections.abc import Generator, Sequence
 from difflib import SequenceMatcher
 from typing import (
     IO,
@@ -165,8 +165,8 @@ def _format_range_unified(start: int, stop: int) -> str:
 
 
 def unified_diff(
-    a: list[bytes],
-    b: list[bytes],
+    a: Sequence[bytes],
+    b: Sequence[bytes],
     fromfile: bytes = b"",
     tofile: bytes = b"",
     fromfiledate: str = "",
@@ -216,7 +216,7 @@ def unified_diff(
 
 
 def _get_sequence_matcher(
-    algorithm: str, a: list[bytes], b: list[bytes]
+    algorithm: str, a: Sequence[bytes], b: Sequence[bytes]
 ) -> SequenceMatcher[bytes]:
     """Get appropriate sequence matcher for the given algorithm.
 
@@ -245,8 +245,8 @@ def _get_sequence_matcher(
 
 
 def unified_diff_with_algorithm(
-    a: list[bytes],
-    b: list[bytes],
+    a: Sequence[bytes],
+    b: Sequence[bytes],
     fromfile: bytes = b"",
     tofile: bytes = b"",
     fromfiledate: str = "",

+ 45 - 36
dulwich/porcelain.py

@@ -87,7 +87,8 @@ import stat
 import sys
 import time
 from collections import namedtuple
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Sequence
+from collections.abc import Set as AbstractSet
 from contextlib import AbstractContextManager, closing, contextmanager
 from dataclasses import dataclass
 from io import BytesIO, RawIOBase
@@ -757,7 +758,7 @@ def clone(
     filter_spec: Optional[str] = None,
     protocol_version: Optional[int] = None,
     recurse_submodules: bool = False,
-    **kwargs: Union[Union[str, bytes], list[Union[str, bytes]]],
+    **kwargs: Union[Union[str, bytes], Sequence[Union[str, bytes]]],
 ) -> Repo:
     """Clone a local or remote git repository.
 
@@ -865,7 +866,9 @@ def clone(
 def add(
     repo: Union[str, os.PathLike[str], Repo] = ".",
     paths: Optional[
-        Union[list[Union[str, bytes, os.PathLike[str]]], str, bytes, os.PathLike[str]]
+        Union[
+            Sequence[Union[str, bytes, os.PathLike[str]]], str, bytes, os.PathLike[str]
+        ]
     ] = None,
 ) -> tuple[list[str], set[str]]:
     """Add files to the staging area.
@@ -916,7 +919,7 @@ def add(
             # When no paths specified, add all untracked and modified files from repo root
             paths = [str(repo_path)]
         relpaths = []
-        if not isinstance(paths, list):
+        if isinstance(paths, (str, bytes, os.PathLike)):
             paths = [paths]
         for p in paths:
             # Handle bytes paths by decoding them
@@ -1067,7 +1070,7 @@ def clean(
 
 def remove(
     repo: Union[str, os.PathLike[str], Repo] = ".",
-    paths: list[Union[str, bytes, os.PathLike[str]]] = [],
+    paths: Sequence[Union[str, bytes, os.PathLike[str]]] = [],
     cached: bool = False,
 ) -> None:
     """Remove files from the staging area.
@@ -1498,7 +1501,7 @@ def print_name_status(changes: Iterator[TreeChange]) -> Iterator[str]:
 
 def log(
     repo: RepoPath = ".",
-    paths: Optional[list[Union[str, bytes]]] = None,
+    paths: Optional[Sequence[Union[str, bytes]]] = None,
     outstream: TextIO = sys.stdout,
     max_entries: Optional[int] = None,
     reverse: bool = False,
@@ -1547,7 +1550,7 @@ def log(
 # TODO(jelmer): better default for encoding?
 def show(
     repo: RepoPath = ".",
-    objects: Optional[list[Union[str, bytes]]] = None,
+    objects: Optional[Sequence[Union[str, bytes]]] = None,
     outstream: TextIO = sys.stdout,
     default_encoding: str = DEFAULT_ENCODING,
 ) -> None:
@@ -1562,7 +1565,7 @@ def show(
     """
     if objects is None:
         objects = ["HEAD"]
-    if not isinstance(objects, list):
+    if isinstance(objects, (str, bytes)):
         objects = [objects]
     with open_repo_closing(repo) as r:
         for objectish in objects:
@@ -1618,7 +1621,7 @@ def diff(
     commit: Optional[Union[str, bytes, Commit]] = None,
     commit2: Optional[Union[str, bytes, Commit]] = None,
     staged: bool = False,
-    paths: Optional[list[Union[str, bytes]]] = None,
+    paths: Optional[Sequence[Union[str, bytes]]] = None,
     outstream: BinaryIO = default_bytes_out_stream,
     diff_algorithm: Optional[str] = None,
 ) -> None:
@@ -1724,7 +1727,7 @@ def diff(
 
 def rev_list(
     repo: RepoPath,
-    commits: list[Union[str, bytes]],
+    commits: Sequence[Union[str, bytes]],
     outstream: BinaryIO = default_bytes_out_stream,
 ) -> None:
     """Lists commit objects in reverse chronological order.
@@ -1815,7 +1818,7 @@ def submodule_list(repo: RepoPath) -> Iterator[tuple[str, str]]:
 
 def submodule_update(
     repo: Union[str, os.PathLike[str], Repo],
-    paths: Optional[list[Union[str, bytes, os.PathLike[str]]]] = None,
+    paths: Optional[Sequence[Union[str, bytes, os.PathLike[str]]]] = None,
     init: bool = False,
     force: bool = False,
     errstream: Optional[BinaryIO] = None,
@@ -2378,7 +2381,7 @@ def get_remote_repo(
 def push(
     repo: RepoPath,
     remote_location: Optional[Union[str, bytes]] = None,
-    refspecs: Optional[Union[Union[str, bytes], list[Union[str, bytes]]]] = None,
+    refspecs: Optional[Union[Union[str, bytes], Sequence[Union[str, bytes]]]] = None,
     outstream: BinaryIO = default_bytes_out_stream,
     errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
     force: bool = False,
@@ -2483,11 +2486,14 @@ def push(
         try:
 
             def generate_pack_data_wrapper(
-                have: set[bytes], want: set[bytes], ofs_delta: bool = False
+                have: AbstractSet[bytes],
+                want: AbstractSet[bytes],
+                ofs_delta: bool = False,
             ) -> tuple[int, Iterator[UnpackedObject]]:
                 # Wrap to match the expected signature
+                # Convert AbstractSet to set since generate_pack_data expects set
                 return r.generate_pack_data(
-                    have, want, progress=None, ofs_delta=ofs_delta
+                    set(have), set(want), progress=None, ofs_delta=ofs_delta
                 )
 
             result = client.send_pack(
@@ -2528,7 +2534,7 @@ def push(
 def pull(
     repo: RepoPath,
     remote_location: Optional[Union[str, bytes]] = None,
-    refspecs: Optional[Union[Union[str, bytes], list[Union[str, bytes]]]] = None,
+    refspecs: Optional[Union[Union[str, bytes], Sequence[Union[str, bytes]]]] = None,
     outstream: BinaryIO = default_bytes_out_stream,
     errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
     fast_forward: bool = True,
@@ -3154,7 +3160,7 @@ def _make_tag_ref(name: Union[str, bytes]) -> Ref:
 
 
 def branch_delete(
-    repo: RepoPath, name: Union[str, bytes, list[Union[str, bytes]]]
+    repo: RepoPath, name: Union[str, bytes, Sequence[Union[str, bytes]]]
 ) -> None:
     """Delete a branch.
 
@@ -3163,12 +3169,12 @@ def branch_delete(
       name: Name of the branch
     """
     with open_repo_closing(repo) as r:
-        if isinstance(name, list):
+        if isinstance(name, (list, tuple)):
             names = name
         else:
             names = [name]
-        for name in names:
-            del r.refs[_make_branch_ref(name)]
+        for branch_name in names:
+            del r.refs[_make_branch_ref(branch_name)]
 
 
 def branch_create(
@@ -3753,7 +3759,7 @@ def repack(repo: RepoPath) -> None:
 
 def pack_objects(
     repo: RepoPath,
-    object_ids: list[bytes],
+    object_ids: Sequence[bytes],
     packf: BinaryIO,
     idxf: Optional[BinaryIO],
     delta_window_size: Optional[int] = None,
@@ -3913,7 +3919,7 @@ def _quote_path(path: str) -> str:
 
 def check_ignore(
     repo: RepoPath,
-    paths: list[Union[str, bytes, os.PathLike[str]]],
+    paths: Sequence[Union[str, bytes, os.PathLike[str]]],
     no_index: bool = False,
     quote_path: bool = True,
 ) -> Iterator[str]:
@@ -4400,7 +4406,7 @@ def cone_mode_init(repo: Union[str, os.PathLike[str], Repo]) -> None:
 
 
 def cone_mode_set(
-    repo: Union[str, os.PathLike[str], Repo], dirs: list[str], force: bool = False
+    repo: Union[str, os.PathLike[str], Repo], dirs: Sequence[str], force: bool = False
 ) -> None:
     """Overwrite the existing 'cone-mode' sparse patterns with a new set of directories.
 
@@ -4425,7 +4431,7 @@ def cone_mode_set(
 
 
 def cone_mode_add(
-    repo: Union[str, os.PathLike[str], Repo], dirs: list[str], force: bool = False
+    repo: Union[str, os.PathLike[str], Repo], dirs: Sequence[str], force: bool = False
 ) -> None:
     """Add new directories to the existing 'cone-mode' sparse-checkout patterns.
 
@@ -4450,7 +4456,7 @@ def cone_mode_add(
             for pat in repo_obj.get_worktree().get_sparse_checkout_patterns()
             if pat not in base_patterns
         ]
-        added_dirs = existing_dirs + (dirs or [])
+        added_dirs = existing_dirs + list(dirs or [])
         repo_obj.get_worktree().set_cone_mode_patterns(dirs=added_dirs)
         new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
         sparse_checkout(repo_obj, patterns=new_patterns, force=force, cone=True)
@@ -5135,7 +5141,7 @@ def cherry_pick(  # noqa: D417
 
 def revert(
     repo: Union[str, os.PathLike[str], Repo],
-    commits: Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]],
+    commits: Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]],
     no_commit: bool = False,
     message: Optional[Union[str, bytes]] = None,
     author: Optional[bytes] = None,
@@ -5614,7 +5620,7 @@ def filter_branch(
     filter_message: Optional[Callable[[bytes], Optional[bytes]]] = None,
     tree_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
     index_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
-    parent_filter: Optional[Callable[[list[bytes]], list[bytes]]] = None,
+    parent_filter: Optional[Callable[[Sequence[bytes]], list[bytes]]] = None,
     commit_filter: Optional[Callable[[Commit, bytes], Optional[bytes]]] = None,
     subdirectory_filter: Optional[Union[str, bytes]] = None,
     prune_empty: bool = False,
@@ -5872,9 +5878,9 @@ def bisect_start(
     repo: Union[str, os.PathLike[str], Repo] = ".",
     bad: Optional[Union[str, bytes, Commit, Tag]] = None,
     good: Optional[
-        Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]]
+        Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]]
     ] = None,
-    paths: Optional[list[bytes]] = None,
+    paths: Optional[Sequence[bytes]] = None,
     no_checkout: bool = False,
     term_bad: str = "bad",
     term_good: str = "good",
@@ -5893,8 +5899,8 @@ def bisect_start(
     with open_repo_closing(repo) as r:
         state = BisectState(r)
 
-        # Convert single good commit to list
-        if good is not None and not isinstance(good, list):
+        # Convert single good commit to sequence
+        if good is not None and isinstance(good, (str, bytes, Commit, Tag)):
             good = [good]
 
         # Parse commits
@@ -5987,7 +5993,7 @@ def bisect_good(
 def bisect_skip(
     repo: Union[str, os.PathLike[str], Repo] = ".",
     revs: Optional[
-        Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]]
+        Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]]
     ] = None,
 ) -> Optional[bytes]:
     """Skip one or more commits.
@@ -6005,8 +6011,8 @@ def bisect_skip(
         if revs is None:
             rev_shas = None
         else:
-            # Convert single rev to list
-            if not isinstance(revs, list):
+            # Convert single rev to sequence
+            if isinstance(revs, (str, bytes, Commit, Tag)):
                 revs = [revs]
             rev_shas = [parse_commit(r, rev).id for rev in revs]
 
@@ -6135,7 +6141,8 @@ def reflog(
 
 
 def lfs_track(
-    repo: Union[str, os.PathLike[str], Repo] = ".", patterns: Optional[list[str]] = None
+    repo: Union[str, os.PathLike[str], Repo] = ".",
+    patterns: Optional[Sequence[str]] = None,
 ) -> list[str]:
     """Track file patterns with Git LFS.
 
@@ -6187,7 +6194,8 @@ def lfs_track(
 
 
 def lfs_untrack(
-    repo: Union[str, os.PathLike[str], Repo] = ".", patterns: Optional[list[str]] = None
+    repo: Union[str, os.PathLike[str], Repo] = ".",
+    patterns: Optional[Sequence[str]] = None,
 ) -> list[str]:
     """Untrack file patterns from Git LFS.
 
@@ -6472,7 +6480,8 @@ def lfs_migrate(
 
 
 def lfs_pointer_check(
-    repo: Union[str, os.PathLike[str], Repo] = ".", paths: Optional[list[str]] = None
+    repo: Union[str, os.PathLike[str], Repo] = ".",
+    paths: Optional[Sequence[str]] = None,
 ) -> dict[str, Optional[Any]]:
     """Check if files are valid LFS pointers.
 

+ 2 - 2
dulwich/protocol.py

@@ -23,7 +23,7 @@
 """Generic functions for talking the git smart server protocol."""
 
 import types
-from collections.abc import Iterable
+from collections.abc import Iterable, Sequence
 from io import BytesIO
 from os import SEEK_END
 from typing import Callable, Optional
@@ -714,7 +714,7 @@ def format_capability_line(capabilities: Iterable[bytes]) -> bytes:
 
 
 def format_ref_line(
-    ref: bytes, sha: bytes, capabilities: Optional[list[bytes]] = None
+    ref: bytes, sha: bytes, capabilities: Optional[Sequence[bytes]] = None
 ) -> bytes:
     """Format a ref advertisement line.
 

+ 2 - 1
dulwich/rebase.py

@@ -24,6 +24,7 @@
 import os
 import shutil
 import subprocess
+from collections.abc import Sequence
 from dataclasses import dataclass
 from enum import Enum
 from typing import Callable, Optional, Protocol, TypedDict
@@ -341,7 +342,7 @@ class RebaseTodo:
         return cls(entries)
 
     @classmethod
-    def from_commits(cls, commits: list[Commit]) -> "RebaseTodo":
+    def from_commits(cls, commits: Sequence[Commit]) -> "RebaseTodo":
         """Create a todo list from a list of commits.
 
         Args:

+ 10 - 10
dulwich/refs.py

@@ -25,7 +25,7 @@
 import os
 import types
 import warnings
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Mapping
 from contextlib import suppress
 from typing import (
     IO,
@@ -218,7 +218,7 @@ class RefsContainer:
         """
         raise NotImplementedError(self.get_packed_refs)
 
-    def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]) -> None:
+    def add_packed_refs(self, new_refs: Mapping[Ref, Optional[ObjectID]]) -> None:
         """Add the given refs as packed refs.
 
         Args:
@@ -241,7 +241,7 @@ class RefsContainer:
     def import_refs(
         self,
         base: Ref,
-        other: dict[Ref, ObjectID],
+        other: Mapping[Ref, ObjectID],
         committer: Optional[bytes] = None,
         timestamp: Optional[bytes] = None,
         timezone: Optional[bytes] = None,
@@ -756,14 +756,14 @@ class DictRefsContainer(RefsContainer):
         """Get peeled version of a reference."""
         return self._peeled.get(name)
 
-    def _update(self, refs: dict[bytes, bytes]) -> None:
+    def _update(self, refs: Mapping[bytes, bytes]) -> None:
         """Update multiple refs; intended only for testing."""
         # TODO(dborowitz): replace this with a public function that uses
         # set_if_equal.
         for ref, sha in refs.items():
             self.set_if_equals(ref, None, sha)
 
-    def _update_peeled(self, peeled: dict[bytes, bytes]) -> None:
+    def _update_peeled(self, peeled: Mapping[bytes, bytes]) -> None:
         """Update cached peeled refs; intended only for testing."""
         self._peeled.update(peeled)
 
@@ -940,7 +940,7 @@ class DiskRefsContainer(RefsContainer):
                         self._packed_refs[name] = sha
         return self._packed_refs
 
-    def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]) -> None:
+    def add_packed_refs(self, new_refs: Mapping[Ref, Optional[ObjectID]]) -> None:
         """Add the given refs as packed refs.
 
         Args:
@@ -1405,8 +1405,8 @@ def read_packed_refs_with_peeled(
 
 def write_packed_refs(
     f: IO[bytes],
-    packed_refs: dict[bytes, bytes],
-    peeled_refs: Optional[dict[bytes, bytes]] = None,
+    packed_refs: Mapping[bytes, bytes],
+    peeled_refs: Optional[Mapping[bytes, bytes]] = None,
 ) -> None:
     """Write a packed refs file.
 
@@ -1442,7 +1442,7 @@ def read_info_refs(f: BinaryIO) -> dict[bytes, bytes]:
 
 
 def write_info_refs(
-    refs: dict[bytes, bytes], store: ObjectContainer
+    refs: Mapping[bytes, bytes], store: ObjectContainer
 ) -> Iterator[bytes]:
     """Generate info refs."""
     # TODO: Avoid recursive import :(
@@ -1593,7 +1593,7 @@ def _import_remote_refs(
 
 
 def serialize_refs(
-    store: ObjectContainer, refs: dict[bytes, bytes]
+    store: ObjectContainer, refs: Mapping[bytes, bytes]
 ) -> dict[bytes, bytes]:
     """Serialize refs with peeled refs.
 

+ 3 - 2
dulwich/reftable.py

@@ -13,6 +13,7 @@ import shutil
 import struct
 import time
 import zlib
+from collections.abc import Mapping
 from dataclasses import dataclass
 from io import BytesIO
 from types import TracebackType
@@ -1082,14 +1083,14 @@ class ReftableRefsContainer(RefsContainer):
         table_name = f"0x{min_idx:016x}-0x{max_idx:016x}-{hash_part:08x}.ref"
         return os.path.join(self.reftable_dir, table_name)
 
-    def add_packed_refs(self, new_refs: dict[bytes, Optional[bytes]]) -> None:
+    def add_packed_refs(self, new_refs: Mapping[bytes, Optional[bytes]]) -> None:
         """Add packed refs. Creates a new reftable file with all refs consolidated."""
         if not new_refs:
             return
 
         self._write_batch_updates(new_refs)
 
-    def _write_batch_updates(self, updates: dict[bytes, Optional[bytes]]) -> None:
+    def _write_batch_updates(self, updates: Mapping[bytes, Optional[bytes]]) -> None:
         """Write multiple ref updates to a single reftable file."""
         if not updates:
             return

+ 13 - 13
dulwich/repo.py

@@ -34,7 +34,7 @@ import stat
 import sys
 import time
 import warnings
-from collections.abc import Generator, Iterable, Iterator
+from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
 from io import BytesIO
 from types import TracebackType
 from typing import (
@@ -300,7 +300,7 @@ def parse_graftpoints(
     return grafts
 
 
-def serialize_graftpoints(graftpoints: dict[bytes, list[bytes]]) -> bytes:
+def serialize_graftpoints(graftpoints: Mapping[bytes, Sequence[bytes]]) -> bytes:
     """Convert a dictionary of grafts into string.
 
     The graft dictionary is:
@@ -508,7 +508,7 @@ class BaseRepo:
         self,
         target: "BaseRepo",
         determine_wants: Optional[
-            Callable[[dict[bytes, bytes], Optional[int]], list[bytes]]
+            Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]]
         ] = None,
         progress: Optional[Callable[..., None]] = None,
         depth: Optional[int] = None,
@@ -536,7 +536,7 @@ class BaseRepo:
 
     def fetch_pack_data(
         self,
-        determine_wants: Callable[[dict[bytes, bytes], Optional[int]], list[bytes]],
+        determine_wants: Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]],
         graph_walker: "GraphWalker",
         progress: Optional[Callable[[bytes], None]],
         *,
@@ -571,7 +571,7 @@ class BaseRepo:
 
     def find_missing_objects(
         self,
-        determine_wants: Callable[[dict[bytes, bytes], Optional[int]], list[bytes]],
+        determine_wants: Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]],
         graph_walker: "GraphWalker",
         progress: Optional[Callable[[bytes], None]],
         *,
@@ -917,12 +917,12 @@ class BaseRepo:
 
     def get_walker(
         self,
-        include: Optional[list[bytes]] = None,
-        exclude: Optional[list[bytes]] = None,
+        include: Optional[Sequence[bytes]] = None,
+        exclude: Optional[Sequence[bytes]] = None,
         order: str = "date",
         reverse: bool = False,
         max_entries: Optional[int] = None,
-        paths: Optional[list[bytes]] = None,
+        paths: Optional[Sequence[bytes]] = None,
         rename_detector: Optional["RenameDetector"] = None,
         follow: bool = False,
         since: Optional[int] = None,
@@ -1060,7 +1060,7 @@ class BaseRepo:
 
         self._graftpoints.update(updated_graftpoints)
 
-    def _remove_graftpoints(self, to_remove: list[bytes] = []) -> None:
+    def _remove_graftpoints(self, to_remove: Sequence[bytes] = ()) -> None:
         """Remove graftpoints.
 
         Args:
@@ -1583,7 +1583,7 @@ class Repo(BaseRepo):
         return self.get_worktree().stage(fs_paths)
 
     @replace_me(remove_in="0.26.0")
-    def unstage(self, fs_paths: list[str]) -> None:
+    def unstage(self, fs_paths: Sequence[str]) -> None:
         """Unstage specific file in the index.
 
         Args:
@@ -2192,7 +2192,7 @@ class Repo(BaseRepo):
         return self.get_worktree().get_sparse_checkout_patterns()
 
     @replace_me(remove_in="0.26.0")
-    def set_sparse_checkout_patterns(self, patterns: list[str]) -> None:
+    def set_sparse_checkout_patterns(self, patterns: Sequence[str]) -> None:
         """Write the given sparse-checkout patterns into info/sparse-checkout.
 
         Creates the info/ directory if it does not exist.
@@ -2203,7 +2203,7 @@ class Repo(BaseRepo):
         return self.get_worktree().set_sparse_checkout_patterns(patterns)
 
     @replace_me(remove_in="0.26.0")
-    def set_cone_mode_patterns(self, dirs: Union[list[str], None] = None) -> None:
+    def set_cone_mode_patterns(self, dirs: Union[Sequence[str], None] = None) -> None:
         """Write the given cone-mode directory patterns into info/sparse-checkout.
 
         For each directory to include, add an inclusion line that "undoes" the prior
@@ -2510,7 +2510,7 @@ class MemoryRepo(BaseRepo):
     def init_bare(
         cls,
         objects: Iterable[ShaFile],
-        refs: dict[bytes, bytes],
+        refs: Mapping[bytes, bytes],
         format: Optional[int] = None,
     ) -> "MemoryRepo":
         """Create a new bare repository in memory.

+ 16 - 13
dulwich/server.py

@@ -50,7 +50,8 @@ import socketserver
 import sys
 import time
 import zlib
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Mapping, Sequence
+from collections.abc import Set as AbstractSet
 from functools import partial
 from typing import IO, TYPE_CHECKING, Callable, Optional, Union
 from typing import Protocol as TypingProtocol
@@ -176,7 +177,7 @@ class BackendRepo(TypingProtocol):
 
     def find_missing_objects(
         self,
-        determine_wants: Callable[[dict[bytes, bytes], Optional[int]], list[bytes]],
+        determine_wants: Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]],
         graph_walker: "_ProtocolGraphWalker",
         progress: Optional[Callable[[bytes], None]],
         *,
@@ -395,7 +396,7 @@ class UploadPackHandler(PackHandler):
     def __init__(
         self,
         backend: Backend,
-        args: list[str],
+        args: Sequence[str],
         proto: Protocol,
         stateless_rpc: bool = False,
         advertise_refs: bool = False,
@@ -472,7 +473,7 @@ class UploadPackHandler(PackHandler):
 
     def get_tagged(
         self,
-        refs: Optional[dict[bytes, bytes]] = None,
+        refs: Optional[Mapping[bytes, bytes]] = None,
         repo: Optional[BackendRepo] = None,
     ) -> dict[ObjectID, ObjectID]:
         """Get a dict of peeled values of tags to their original tag shas.
@@ -526,7 +527,7 @@ class UploadPackHandler(PackHandler):
         wants = []
 
         def wants_wrapper(
-            refs: dict[bytes, bytes], depth: Optional[int] = None
+            refs: Mapping[bytes, bytes], depth: Optional[int] = None
         ) -> list[bytes]:
             wants.extend(graph_walker.determine_wants(refs, depth))
             return wants
@@ -652,7 +653,7 @@ def _want_satisfied(
 
 
 def _all_wants_satisfied(
-    store: ObjectContainer, haves: set[bytes], wants: set[bytes]
+    store: ObjectContainer, haves: AbstractSet[bytes], wants: set[bytes]
 ) -> bool:
     """Check whether all the current wants are satisfied by a set of haves.
 
@@ -746,7 +747,7 @@ class _ProtocolGraphWalker:
         self._impl: Optional[AckGraphWalkerImpl] = None
 
     def determine_wants(
-        self, heads: dict[bytes, bytes], depth: Optional[int] = None
+        self, heads: Mapping[bytes, bytes], depth: Optional[int] = None
     ) -> list[bytes]:
         """Determine the wants for a set of heads.
 
@@ -897,7 +898,7 @@ class _ProtocolGraphWalker:
         """
         return _split_proto_line(self.proto.read_pkt_line(), allowed)
 
-    def _handle_shallow_request(self, wants: list[bytes]) -> None:
+    def _handle_shallow_request(self, wants: Sequence[bytes]) -> None:
         """Handle shallow clone requests from the client.
 
         Args:
@@ -923,7 +924,9 @@ class _ProtocolGraphWalker:
 
         self.update_shallow(new_shallow, unshallow)
 
-    def update_shallow(self, new_shallow: set[bytes], unshallow: set[bytes]) -> None:
+    def update_shallow(
+        self, new_shallow: AbstractSet[bytes], unshallow: AbstractSet[bytes]
+    ) -> None:
         """Update shallow/unshallow information to the client.
 
         Args:
@@ -975,7 +978,7 @@ class _ProtocolGraphWalker:
         """
         self._wants = wants
 
-    def all_wants_satisfied(self, haves: set[bytes]) -> bool:
+    def all_wants_satisfied(self, haves: AbstractSet[bytes]) -> bool:
         """Check whether all the current wants are satisfied by a set of haves.
 
         Args:
@@ -1253,7 +1256,7 @@ class ReceivePackHandler(PackHandler):
     def __init__(
         self,
         backend: Backend,
-        args: list[str],
+        args: Sequence[str],
         proto: Protocol,
         stateless_rpc: bool = False,
         advertise_refs: bool = False,
@@ -1351,7 +1354,7 @@ class ReceivePackHandler(PackHandler):
                 ref_status = b"bad ref"
             yield (ref, ref_status)
 
-    def _report_status(self, status: list[tuple[bytes, bytes]]) -> None:
+    def _report_status(self, status: Sequence[tuple[bytes, bytes]]) -> None:
         """Report status to client.
 
         Args:
@@ -1456,7 +1459,7 @@ class UploadArchiveHandler(Handler):
     def __init__(
         self,
         backend: Backend,
-        args: list[str],
+        args: Sequence[str],
         proto: Protocol,
         stateless_rpc: bool = False,
     ) -> None:

+ 9 - 6
dulwich/sparse_patterns.py

@@ -22,6 +22,7 @@
 """Sparse checkout pattern handling."""
 
 import os
+from collections.abc import Sequence, Set
 from fnmatch import fnmatch
 
 from .file import ensure_dir_exists
@@ -38,7 +39,9 @@ class BlobNotFoundError(Exception):
     """Raised when a requested blob is not found in the repository's object store."""
 
 
-def determine_included_paths(index: Index, lines: list[str], cone: bool) -> set[str]:
+def determine_included_paths(
+    index: Index, lines: Sequence[str], cone: bool
+) -> set[str]:
     """Determine which paths in the index should be included based on either a full-pattern match or a cone-mode approach.
 
     Args:
@@ -55,7 +58,7 @@ def determine_included_paths(index: Index, lines: list[str], cone: bool) -> set[
         return compute_included_paths_full(index, lines)
 
 
-def compute_included_paths_full(index: Index, lines: list[str]) -> set[str]:
+def compute_included_paths_full(index: Index, lines: Sequence[str]) -> set[str]:
     """Use .gitignore-style parsing and matching to determine included paths.
 
     Each file path in the index is tested against the parsed sparse patterns.
@@ -78,7 +81,7 @@ def compute_included_paths_full(index: Index, lines: list[str]) -> set[str]:
     return included
 
 
-def compute_included_paths_cone(index: Index, lines: list[str]) -> set[str]:
+def compute_included_paths_cone(index: Index, lines: Sequence[str]) -> set[str]:
     """Implement a simplified 'cone' approach for sparse-checkout.
 
     By default, this can include top-level files, exclude all subdirectories,
@@ -135,7 +138,7 @@ def compute_included_paths_cone(index: Index, lines: list[str]) -> set[str]:
 
 
 def apply_included_paths(
-    repo: Repo, included_paths: set[str], force: bool = False
+    repo: Repo, included_paths: Set[str], force: bool = False
 ) -> None:
     """Apply the sparse-checkout inclusion set to the index and working tree.
 
@@ -226,7 +229,7 @@ def apply_included_paths(
                         f.write(blob.data)
 
 
-def parse_sparse_patterns(lines: list[str]) -> list[tuple[str, bool, bool, bool]]:
+def parse_sparse_patterns(lines: Sequence[str]) -> list[tuple[str, bool, bool, bool]]:
     """Parse pattern lines from a sparse-checkout file (.git/info/sparse-checkout).
 
     This simplified parser:
@@ -277,7 +280,7 @@ def parse_sparse_patterns(lines: list[str]) -> list[tuple[str, bool, bool, bool]
 
 def match_gitignore_patterns(
     path_str: str,
-    parsed_patterns: list[tuple[str, bool, bool, bool]],
+    parsed_patterns: Sequence[tuple[str, bool, bool, bool]],
     path_is_dir: bool = False,
 ) -> bool:
     """Check whether a path is included based on .gitignore-style patterns.

+ 4 - 4
dulwich/walk.py

@@ -23,7 +23,7 @@
 
 import collections
 import heapq
-from collections.abc import Iterator
+from collections.abc import Iterator, Sequence
 from itertools import chain
 from typing import TYPE_CHECKING, Any, Callable, Optional, Union, cast
 
@@ -274,12 +274,12 @@ class Walker:
     def __init__(
         self,
         store: "BaseObjectStore",
-        include: list[bytes],
-        exclude: Optional[list[bytes]] = None,
+        include: Sequence[bytes],
+        exclude: Optional[Sequence[bytes]] = None,
         order: str = "date",
         reverse: bool = False,
         max_entries: Optional[int] = None,
-        paths: Optional[list[bytes]] = None,
+        paths: Optional[Sequence[bytes]] = None,
         rename_detector: Optional[RenameDetector] = None,
         follow: bool = False,
         since: Optional[int] = None,

+ 2 - 2
dulwich/web.py

@@ -51,7 +51,7 @@ import os
 import re
 import sys
 import time
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Sequence
 from io import BytesIO
 from types import TracebackType
 from typing import (
@@ -613,7 +613,7 @@ class HTTPGitRequest:
         self,
         status: str = HTTP_OK,
         content_type: Optional[str] = None,
-        headers: Optional[list[tuple[str, str]]] = None,
+        headers: Optional[Sequence[tuple[str, str]]] = None,
     ) -> Callable[[bytes], object]:
         """Begin a response with the given status and other headers."""
         if headers:

+ 4 - 1
dulwich/whitespace.py

@@ -24,6 +24,7 @@ This module implements Git's core.whitespace configuration and related
 whitespace error detection capabilities.
 """
 
+from collections.abc import Sequence, Set
 from typing import Optional
 
 # Default whitespace errors Git checks for
@@ -217,7 +218,9 @@ class WhitespaceChecker:
 
 
 def fix_whitespace_errors(
-    content: bytes, errors: list[tuple[str, int]], fix_types: Optional[set[str]] = None
+    content: bytes,
+    errors: Sequence[tuple[str, int]],
+    fix_types: Optional[Set[str]] = None,
 ) -> bytes:
     """Fix whitespace errors in content.
 

+ 5 - 5
dulwich/worktree.py

@@ -31,7 +31,7 @@ import sys
 import tempfile
 import time
 import warnings
-from collections.abc import Iterable, Iterator
+from collections.abc import Iterable, Iterator, Sequence
 from contextlib import contextmanager
 from pathlib import Path
 from typing import Any, Callable, Union
@@ -332,7 +332,7 @@ class WorkTree:
                     index[tree_path] = index_entry_from_stat(st, blob.id)
         index.write()
 
-    def unstage(self, fs_paths: list[str]) -> None:
+    def unstage(self, fs_paths: Sequence[str]) -> None:
         """Unstage specific file in the index.
 
         Args:
@@ -411,7 +411,7 @@ class WorkTree:
         tree: ObjectID | None = None,
         encoding: bytes | None = None,
         ref: Ref | None = b"HEAD",
-        merge_heads: list[ObjectID] | None = None,
+        merge_heads: Sequence[ObjectID] | None = None,
         no_verify: bool = False,
         sign: bool = False,
     ) -> ObjectID:
@@ -700,7 +700,7 @@ class WorkTree:
         except FileNotFoundError:
             return []
 
-    def set_sparse_checkout_patterns(self, patterns: list[str]) -> None:
+    def set_sparse_checkout_patterns(self, patterns: Sequence[str]) -> None:
         """Write the given sparse-checkout patterns into info/sparse-checkout.
 
         Creates the info/ directory if it does not exist.
@@ -716,7 +716,7 @@ class WorkTree:
             for pat in patterns:
                 f.write(pat + "\n")
 
-    def set_cone_mode_patterns(self, dirs: list[str] | None = None) -> None:
+    def set_cone_mode_patterns(self, dirs: Sequence[str] | None = None) -> None:
         """Write the given cone-mode directory patterns into info/sparse-checkout.
 
         For each directory to include, add an inclusion line that "undoes" the prior

+ 2 - 1
tests/__init__.py

@@ -38,6 +38,7 @@ import tempfile
 
 # If Python itself provides an exception, use that
 import unittest
+from collections.abc import Sequence
 from typing import ClassVar, Optional
 from unittest import SkipTest, expectedFailure, skipIf
 from unittest import TestCase as _TestCase
@@ -88,7 +89,7 @@ class BlackboxTestCase(TestCase):
         else:
             raise SkipTest(f"Unable to find binary {name}")
 
-    def run_command(self, name: str, args: list[str]) -> subprocess.Popen[bytes]:
+    def run_command(self, name: str, args: Sequence[str]) -> subprocess.Popen[bytes]:
         """Run a Dulwich command.
 
         Args:

+ 6 - 5
tests/compat/test_check_ignore.py

@@ -23,6 +23,7 @@
 
 import os
 import tempfile
+from collections.abc import Sequence
 
 from dulwich import porcelain
 from dulwich.repo import Repo
@@ -65,7 +66,7 @@ class CheckIgnoreCompatTestCase(CompatTestCase):
         full_path = os.path.join(self.test_dir, path)
         os.makedirs(full_path, exist_ok=True)
 
-    def _git_check_ignore(self, paths: list[str]) -> set[str]:
+    def _git_check_ignore(self, paths: Sequence[str]) -> set[str]:
         """Run git check-ignore and return set of ignored paths."""
         try:
             output = run_git_or_fail(
@@ -80,7 +81,7 @@ class CheckIgnoreCompatTestCase(CompatTestCase):
             # git check-ignore returns non-zero when no paths are ignored
             return set()
 
-    def _dulwich_check_ignore(self, paths: list[str]) -> set[str]:
+    def _dulwich_check_ignore(self, paths: Sequence[str]) -> set[str]:
         """Run dulwich check_ignore and return set of ignored paths."""
         # Convert to absolute paths relative to the test directory
         abs_paths = [os.path.join(self.test_dir, path) for path in paths]
@@ -112,7 +113,7 @@ class CheckIgnoreCompatTestCase(CompatTestCase):
                 result.add(path.replace("\\", "/"))
         return result
 
-    def _assert_ignore_match(self, paths: list[str]) -> None:
+    def _assert_ignore_match(self, paths: Sequence[str]) -> None:
         """Assert that dulwich and git return the same ignored paths."""
         git_ignored = self._git_check_ignore(paths)
         dulwich_ignored = self._dulwich_check_ignore(paths)
@@ -1158,7 +1159,7 @@ class CheckIgnoreCompatTestCase(CompatTestCase):
         ]
         self._assert_ignore_match(paths)
 
-    def _git_check_ignore_quoted(self, paths: list[str]) -> set[str]:
+    def _git_check_ignore_quoted(self, paths: Sequence[str]) -> set[str]:
         """Run git check-ignore with default quoting and return set of ignored paths."""
         try:
             # Use default git settings (core.quotePath=true by default)
@@ -1174,7 +1175,7 @@ class CheckIgnoreCompatTestCase(CompatTestCase):
             # git check-ignore returns non-zero when no paths are ignored
             return set()
 
-    def _dulwich_check_ignore_quoted(self, paths: list[str]) -> set[str]:
+    def _dulwich_check_ignore_quoted(self, paths: Sequence[str]) -> set[str]:
         """Run dulwich check_ignore with quote_path=True and return set of ignored paths."""
         # Convert to absolute paths relative to the test directory
         abs_paths = [os.path.join(self.test_dir, path) for path in paths]

+ 3 - 2
tests/test_dumb.py

@@ -22,6 +22,7 @@
 """Tests for dumb HTTP git repositories."""
 
 import zlib
+from collections.abc import Mapping
 from typing import Callable, Optional, Union
 from unittest import TestCase
 from unittest.mock import Mock
@@ -264,7 +265,7 @@ fedcba9876543210fedcba9876543210fedcba98\trefs/tags/v1.0
         graph_walker = Mock()
 
         def determine_wants(
-            refs: dict[bytes, bytes], depth: Optional[int] = None
+            refs: Mapping[bytes, bytes], depth: Optional[int] = None
         ) -> list[bytes]:
             return []
 
@@ -291,7 +292,7 @@ fedcba9876543210fedcba9876543210fedcba98\trefs/tags/v1.0
         graph_walker.ack.return_value = []  # No existing objects
 
         def determine_wants(
-            refs: dict[bytes, bytes], depth: Optional[int] = None
+            refs: Mapping[bytes, bytes], depth: Optional[int] = None
         ) -> list[bytes]:
             return [blob_sha]