Parcourir la source

Drop support for Python 3.9

Jelmer Vernooij il y a 2 mois
Parent
commit
c593e5afa3
83 fichiers modifiés avec 1947 ajouts et 2082 suppressions
  1. 1 1
      .github/workflows/pythontest.yml
  2. 4 0
      NEWS
  3. 1 1
      README.rst
  4. 4 9
      dulwich/__init__.py
  5. 2 2
      dulwich/annotate.py
  6. 1 2
      dulwich/approxidate.py
  7. 2 2
      dulwich/archive.py
  8. 10 14
      dulwich/attrs.py
  9. 8 9
      dulwich/bisect.py
  10. 8 8
      dulwich/bitmap.py
  11. 10 12
      dulwich/bundle.py
  12. 88 95
      dulwich/cli.py
  13. 220 221
      dulwich/client.py
  14. 8 8
      dulwich/commit_graph.py
  15. 53 61
      dulwich/config.py
  16. 1 2
      dulwich/contrib/diffstat.py
  17. 8 8
      dulwich/contrib/paramiko_vendor.py
  18. 3 3
      dulwich/contrib/release_robot.py
  19. 12 12
      dulwich/contrib/requests_vendor.py
  20. 26 30
      dulwich/contrib/swift.py
  21. 1 2
      dulwich/credentials.py
  22. 16 16
      dulwich/diff.py
  23. 26 26
      dulwich/diff_tree.py
  24. 12 14
      dulwich/dumb.py
  25. 5 6
      dulwich/errors.py
  26. 6 6
      dulwich/fastexport.py
  27. 24 24
      dulwich/file.py
  28. 15 15
      dulwich/filter_branch.py
  29. 20 19
      dulwich/filters.py
  30. 9 8
      dulwich/gc.py
  31. 4 4
      dulwich/graph.py
  32. 7 8
      dulwich/greenthreads.py
  33. 7 7
      dulwich/hooks.py
  34. 10 10
      dulwich/ignore.py
  35. 98 106
      dulwich/index.py
  36. 19 19
      dulwich/lfs.py
  37. 1 2
      dulwich/lfs_server.py
  38. 16 16
      dulwich/line_ending.py
  39. 1 2
      dulwich/log_utils.py
  40. 17 17
      dulwich/lru_cache.py
  41. 18 19
      dulwich/mailmap.py
  42. 6 5
      dulwich/maintenance.py
  43. 5 5
      dulwich/mbox.py
  44. 23 23
      dulwich/merge.py
  45. 9 8
      dulwich/merge_drivers.py
  46. 17 17
      dulwich/notes.py
  47. 91 98
      dulwich/object_store.py
  48. 74 72
      dulwich/objects.py
  49. 12 18
      dulwich/objectspec.py
  50. 184 192
      dulwich/pack.py
  51. 23 24
      dulwich/patch.py
  52. 183 189
      dulwich/porcelain.py
  53. 20 21
      dulwich/protocol.py
  54. 52 52
      dulwich/rebase.py
  55. 8 10
      dulwich/reflog.py
  56. 149 173
      dulwich/refs.py
  57. 40 50
      dulwich/reftable.py
  58. 116 118
      dulwich/repo.py
  59. 34 36
      dulwich/server.py
  60. 7 7
      dulwich/stash.py
  61. 2 2
      dulwich/submodule.py
  62. 2 2
      dulwich/tests/test_object_store.py
  63. 7 7
      dulwich/tests/utils.py
  64. 1 3
      dulwich/trailers.py
  65. 17 17
      dulwich/walk.py
  66. 24 34
      dulwich/web.py
  67. 2 3
      dulwich/whitespace.py
  68. 5 5
      dulwich/worktree.py
  69. 1 2
      examples/merge_driver.py
  70. 1 2
      fuzzing/fuzz-targets/fuzz_bundle.py
  71. 1 2
      fuzzing/fuzz-targets/fuzz_configfile.py
  72. 1 2
      fuzzing/fuzz-targets/fuzz_object_store.py
  73. 1 2
      fuzzing/fuzz-targets/fuzz_repo.py
  74. 1 4
      pyproject.toml
  75. 3 3
      tests/__init__.py
  76. 1 1
      tests/compat/utils.py
  77. 3 4
      tests/contrib/test_paramiko_vendor.py
  78. 2 2
      tests/contrib/test_release_robot.py
  79. 4 4
      tests/test_annotate.py
  80. 1 2
      tests/test_archive.py
  81. 10 11
      tests/test_dumb.py
  82. 1 2
      tests/test_log_utils.py
  83. 1 2
      tests/test_merge_drivers.py

+ 1 - 1
.github/workflows/pythontest.yml

@@ -17,7 +17,7 @@ jobs:
       matrix:
         os: [ubuntu-latest, macos-latest, windows-latest]
         python-version:
-          ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
+          ["3.10", "3.11", "3.12", "3.13", "3.14"]
       fail-fast: false
 
     steps:

+ 4 - 0
NEWS

@@ -1,3 +1,7 @@
+0.25.0	UNRELEASED
+
+ * Drop support for Python 3.9. (Jelmer Vernooij)
+
 0.24.8	2025-1029
 
  * Add Rust implementation of pack delta creation (create_delta). The

+ 1 - 1
README.rst

@@ -103,5 +103,5 @@ file and `list of open issues <https://github.com/dulwich/dulwich/issues>`_.
 Supported versions of Python
 ----------------------------
 
-At the moment, Dulwich supports (and is tested on) CPython 3.9 and later and
+At the moment, Dulwich supports (and is tested on) CPython 3.10 and later and
 Pypy.

+ 4 - 9
dulwich/__init__.py

@@ -23,13 +23,8 @@
 
 """Python implementation of the Git file formats and protocols."""
 
-import sys
-from typing import Any, Callable, Optional, TypeVar, Union
-
-if sys.version_info >= (3, 10):
-    from typing import ParamSpec
-else:
-    from typing_extensions import ParamSpec
+from collections.abc import Callable
+from typing import Any, ParamSpec, TypeVar
 
 __version__ = (0, 24, 8)
 
@@ -45,8 +40,8 @@ except ImportError:
     # if dissolve is not installed, then just provide a basic implementation
     # of its replace_me decorator
     def replace_me(
-        since: Optional[Union[tuple[int, ...], str]] = None,
-        remove_in: Optional[Union[tuple[int, ...], str]] = None,
+        since: tuple[int, ...] | str | None = None,
+        remove_in: tuple[int, ...] | str | None = None,
     ) -> Callable[[F], F]:
         """Decorator to mark functions as deprecated.
 

+ 2 - 2
dulwich/annotate.py

@@ -28,7 +28,7 @@ Python's difflib.
 
 import difflib
 from collections.abc import Sequence
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING
 
 from dulwich.objects import Blob
 from dulwich.walk import (
@@ -77,7 +77,7 @@ def annotate_lines(
     commit_id: bytes,
     path: bytes,
     order: str = ORDER_DATE,
-    lines: Optional[Sequence[tuple[tuple["Commit", "TreeEntry"], bytes]]] = None,
+    lines: Sequence[tuple[tuple["Commit", "TreeEntry"], bytes]] | None = None,
     follow: bool = True,
 ) -> list[tuple[tuple["Commit", "TreeEntry"], bytes]]:
     """Annotate the lines of a blob.

+ 1 - 2
dulwich/approxidate.py

@@ -31,10 +31,9 @@ formats for specifying dates and times, including:
 
 import time
 from datetime import datetime
-from typing import Union
 
 
-def parse_approxidate(time_spec: Union[str, bytes]) -> int:
+def parse_approxidate(time_spec: str | bytes) -> int:
     """Parse a Git approxidate specification and return a Unix timestamp.
 
     Args:

+ 2 - 2
dulwich/archive.py

@@ -30,7 +30,7 @@ from collections.abc import Generator
 from contextlib import closing
 from io import BytesIO
 from os import SEEK_END
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
     from .object_store import BaseObjectStore
@@ -59,7 +59,7 @@ class ChunkedBytesIO:
         self.contents = contents
         self.pos = (0, 0)
 
-    def read(self, maxbytes: Optional[int] = None) -> bytes:
+    def read(self, maxbytes: int | None = None) -> bytes:
         """Read bytes from the chunked stream.
 
         Args:

+ 10 - 14
dulwich/attrs.py

@@ -24,13 +24,9 @@
 import os
 import re
 from collections.abc import Generator, Iterator, Mapping, Sequence
-from typing import (
-    IO,
-    Optional,
-    Union,
-)
+from typing import IO
 
-AttributeValue = Union[bytes, bool, None]
+AttributeValue = bytes | bool | None
 
 
 def _parse_attr(attr: bytes) -> tuple[bytes, AttributeValue]:
@@ -170,7 +166,7 @@ class Pattern:
             pattern: Attribute pattern as bytes
         """
         self.pattern = pattern
-        self._regex: Optional[re.Pattern[bytes]] = None
+        self._regex: re.Pattern[bytes] | None = None
         self._compile()
 
     def _compile(self) -> None:
@@ -227,7 +223,7 @@ def match_path(
 
 
 def parse_gitattributes_file(
-    filename: Union[str, bytes],
+    filename: str | bytes,
 ) -> list[tuple[Pattern, Mapping[bytes, AttributeValue]]]:
     """Parse a gitattributes file and return compiled patterns.
 
@@ -251,7 +247,7 @@ def parse_gitattributes_file(
 
 
 def read_gitattributes(
-    path: Union[str, bytes],
+    path: str | bytes,
 ) -> list[tuple[Pattern, Mapping[bytes, AttributeValue]]]:
     """Read .gitattributes from a directory.
 
@@ -276,7 +272,7 @@ class GitAttributes:
 
     def __init__(
         self,
-        patterns: Optional[list[tuple[Pattern, Mapping[bytes, AttributeValue]]]] = None,
+        patterns: list[tuple[Pattern, Mapping[bytes, AttributeValue]]] | None = None,
     ):
         """Initialize GitAttributes.
 
@@ -315,7 +311,7 @@ class GitAttributes:
         return iter(self._patterns)
 
     @classmethod
-    def from_file(cls, filename: Union[str, bytes]) -> "GitAttributes":
+    def from_file(cls, filename: str | bytes) -> "GitAttributes":
         """Create GitAttributes from a gitattributes file.
 
         Args:
@@ -328,7 +324,7 @@ class GitAttributes:
         return cls(patterns)
 
     @classmethod
-    def from_path(cls, path: Union[str, bytes]) -> "GitAttributes":
+    def from_path(cls, path: str | bytes) -> "GitAttributes":
         """Create GitAttributes from .gitattributes in a directory.
 
         Args:
@@ -350,7 +346,7 @@ class GitAttributes:
         """
         # Find existing pattern
         pattern_obj = None
-        attrs_dict: Optional[dict[bytes, AttributeValue]] = None
+        attrs_dict: dict[bytes, AttributeValue] | None = None
         pattern_index = -1
 
         for i, (p, attrs) in enumerate(self._patterns):
@@ -415,7 +411,7 @@ class GitAttributes:
 
         return b"\n".join(lines) + b"\n" if lines else b""
 
-    def write_to_file(self, filename: Union[str, bytes]) -> None:
+    def write_to_file(self, filename: str | bytes) -> None:
         """Write GitAttributes to a file.
 
         Args:

+ 8 - 9
dulwich/bisect.py

@@ -22,7 +22,6 @@
 
 import os
 from collections.abc import Sequence, Set
-from typing import Optional
 
 from dulwich.object_store import peel_sha
 from dulwich.objects import Commit
@@ -48,9 +47,9 @@ class BisectState:
 
     def start(
         self,
-        bad: Optional[bytes] = None,
-        good: Optional[Sequence[bytes]] = None,
-        paths: Optional[Sequence[bytes]] = None,
+        bad: bytes | None = None,
+        good: Sequence[bytes] | None = None,
+        paths: Sequence[bytes] | None = None,
         no_checkout: bool = False,
         term_bad: str = "bad",
         term_good: str = "good",
@@ -125,7 +124,7 @@ class BisectState:
             for g in good:
                 self.mark_good(g)
 
-    def mark_bad(self, rev: Optional[bytes] = None) -> Optional[bytes]:
+    def mark_bad(self, rev: bytes | None = None) -> bytes | None:
         """Mark a commit as bad.
 
         Args:
@@ -155,7 +154,7 @@ class BisectState:
 
         return self._find_next_commit()
 
-    def mark_good(self, rev: Optional[bytes] = None) -> Optional[bytes]:
+    def mark_good(self, rev: bytes | None = None) -> bytes | None:
         """Mark a commit as good.
 
         Args:
@@ -187,7 +186,7 @@ class BisectState:
 
         return self._find_next_commit()
 
-    def skip(self, revs: Optional[Sequence[bytes]] = None) -> Optional[bytes]:
+    def skip(self, revs: Sequence[bytes] | None = None) -> bytes | None:
         """Skip one or more commits.
 
         Args:
@@ -214,7 +213,7 @@ class BisectState:
 
         return self._find_next_commit()
 
-    def reset(self, commit: Optional[bytes] = None) -> None:
+    def reset(self, commit: bytes | None = None) -> None:
         """Reset bisect state and return to original branch/commit.
 
         Args:
@@ -299,7 +298,7 @@ class BisectState:
                 revs = [arg.encode("ascii") for arg in args] if args else None
                 self.skip(revs)
 
-    def _find_next_commit(self) -> Optional[bytes]:
+    def _find_next_commit(self) -> bytes | None:
         """Find the next commit to test using binary search.
 
         Returns:

+ 8 - 8
dulwich/bitmap.py

@@ -32,7 +32,7 @@ import os
 import struct
 from collections.abc import Iterator
 from io import BytesIO
-from typing import IO, TYPE_CHECKING, Optional, Union
+from typing import IO, TYPE_CHECKING, Optional
 
 from .file import GitFile
 
@@ -122,7 +122,7 @@ class EWAHBitmap:
     - Bits 33-63: literal_words (31 bits) - count of literal words following this RLW
     """
 
-    def __init__(self, data: Optional[bytes] = None) -> None:
+    def __init__(self, data: bytes | None = None) -> None:
         """Initialize EWAH bitmap.
 
         Args:
@@ -357,7 +357,7 @@ class PackBitmap:
         """
         self.version = version
         self.flags = flags
-        self.pack_checksum: Optional[bytes] = None
+        self.pack_checksum: bytes | None = None
 
         # Type bitmaps for commits, trees, blobs, tags
         self.commit_bitmap = EWAHBitmap()
@@ -372,12 +372,12 @@ class PackBitmap:
         self.entries_list: list[tuple[bytes, BitmapEntry]] = []
 
         # Optional lookup table for random access
-        self.lookup_table: Optional[list[tuple[int, int, int]]] = None
+        self.lookup_table: list[tuple[int, int, int]] | None = None
 
         # Optional name-hash cache
-        self.name_hash_cache: Optional[list[int]] = None
+        self.name_hash_cache: list[int] | None = None
 
-    def get_bitmap(self, commit_sha: bytes) -> Optional[EWAHBitmap]:
+    def get_bitmap(self, commit_sha: bytes) -> EWAHBitmap | None:
         """Get the bitmap for a commit.
 
         Args:
@@ -439,7 +439,7 @@ class PackBitmap:
 
 
 def read_bitmap(
-    filename: Union[str, os.PathLike[str]],
+    filename: str | os.PathLike[str],
     pack_index: Optional["PackIndex"] = None,
 ) -> PackBitmap:
     """Read a bitmap index file.
@@ -665,7 +665,7 @@ def read_bitmap_file(
 
 
 def write_bitmap(
-    filename: Union[str, os.PathLike[str]],
+    filename: str | os.PathLike[str],
     bitmap: PackBitmap,
 ) -> None:
     """Write a bitmap index file.

+ 10 - 12
dulwich/bundle.py

@@ -21,12 +21,10 @@
 
 """Bundle format support."""
 
-from collections.abc import Iterator, Sequence
+from collections.abc import Callable, Iterator, Sequence
 from typing import (
     TYPE_CHECKING,
     BinaryIO,
-    Callable,
-    Optional,
     Protocol,
     cast,
     runtime_checkable,
@@ -56,12 +54,12 @@ if TYPE_CHECKING:
 class Bundle:
     """Git bundle object representation."""
 
-    version: Optional[int]
+    version: int | None
 
-    capabilities: dict[str, Optional[str]]
+    capabilities: dict[str, str | None]
     prerequisites: list[tuple[bytes, bytes]]
     references: dict[bytes, bytes]
-    pack_data: Optional[PackDataLike]
+    pack_data: PackDataLike | None
 
     def __repr__(self) -> str:
         """Return string representation of Bundle."""
@@ -91,7 +89,7 @@ class Bundle:
     def store_objects(
         self,
         object_store: "BaseObjectStore",
-        progress: Optional[Callable[[str], None]] = None,
+        progress: Callable[[str], None] | None = None,
     ) -> None:
         """Store all objects from this bundle into an object store.
 
@@ -222,11 +220,11 @@ def write_bundle(f: BinaryIO, bundle: Bundle) -> None:
 
 def create_bundle_from_repo(
     repo: "BaseRepo",
-    refs: Optional[Sequence[bytes]] = None,
-    prerequisites: Optional[Sequence[bytes]] = None,
-    version: Optional[int] = None,
-    capabilities: Optional[dict[str, Optional[str]]] = None,
-    progress: Optional[Callable[[str], None]] = None,
+    refs: Sequence[bytes] | None = None,
+    prerequisites: Sequence[bytes] | None = None,
+    version: int | None = None,
+    capabilities: dict[str, str | None] | None = None,
+    progress: Callable[[str], None] | None = None,
 ) -> Bundle:
     """Create a bundle from a repository.
 

+ 88 - 95
dulwich/cli.py

@@ -42,22 +42,19 @@ import subprocess
 import sys
 import tempfile
 import types
-from collections.abc import Iterable, Iterator, Mapping, Sequence
+from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
 from pathlib import Path
 from types import TracebackType
 from typing import (
     BinaryIO,
-    Callable,
     ClassVar,
-    Optional,
     TextIO,
-    Union,
 )
 
 if sys.version_info >= (3, 12):
     from collections.abc import Buffer
 else:
-    Buffer = Union[bytes, bytearray, memoryview]
+    Buffer = bytes | bytearray | memoryview
 
 from dulwich import porcelain
 
@@ -81,7 +78,7 @@ from .repo import Repo
 logger = logging.getLogger(__name__)
 
 
-def to_display_str(value: Union[bytes, str]) -> str:
+def to_display_str(value: bytes | str) -> str:
     """Convert a bytes or string value to a display string.
 
     Args:
@@ -96,7 +93,7 @@ def to_display_str(value: Union[bytes, str]) -> str:
 
 
 def _should_auto_flush(
-    stream: Union[TextIO, BinaryIO], env: Optional[Mapping[str, str]] = None
+    stream: TextIO | BinaryIO, env: Mapping[str, str] | None = None
 ) -> bool:
     """Determine if output should be auto-flushed based on GIT_FLUSH environment variable.
 
@@ -136,7 +133,7 @@ class AutoFlushTextIOWrapper:
 
     @classmethod
     def env(
-        cls, stream: TextIO, env: Optional[Mapping[str, str]] = None
+        cls, stream: TextIO, env: Mapping[str, str] | None = None
     ) -> "AutoFlushTextIOWrapper | TextIO":
         """Create wrapper respecting the GIT_FLUSH environment variable.
 
@@ -193,9 +190,9 @@ class AutoFlushTextIOWrapper:
 
     def __exit__(
         self,
-        exc_type: Optional[type[BaseException]],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[TracebackType],
+        exc_type: type[BaseException] | None,
+        exc_val: BaseException | None,
+        exc_tb: TracebackType | None,
     ) -> None:
         """Support context manager protocol."""
         if hasattr(self._stream, "__exit__"):
@@ -219,7 +216,7 @@ class AutoFlushBinaryIOWrapper:
 
     @classmethod
     def env(
-        cls, stream: BinaryIO, env: Optional[Mapping[str, str]] = None
+        cls, stream: BinaryIO, env: Mapping[str, str] | None = None
     ) -> "AutoFlushBinaryIOWrapper | BinaryIO":
         """Create wrapper respecting the GIT_FLUSH environment variable.
 
@@ -276,9 +273,9 @@ class AutoFlushBinaryIOWrapper:
 
     def __exit__(
         self,
-        exc_type: Optional[type[BaseException]],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[TracebackType],
+        exc_type: type[BaseException] | None,
+        exc_val: BaseException | None,
+        exc_tb: TracebackType | None,
     ) -> None:
         """Support context manager protocol."""
         if hasattr(self._stream, "__exit__"):
@@ -289,7 +286,7 @@ class CommitMessageError(Exception):
     """Raised when there's an issue with the commit message."""
 
 
-def signal_int(signal: int, frame: Optional[types.FrameType]) -> None:
+def signal_int(signal: int, frame: types.FrameType | None) -> None:
     """Handle interrupt signal by exiting.
 
     Args:
@@ -299,7 +296,7 @@ def signal_int(signal: int, frame: Optional[types.FrameType]) -> None:
     sys.exit(1)
 
 
-def signal_quit(signal: int, frame: Optional[types.FrameType]) -> None:
+def signal_quit(signal: int, frame: types.FrameType | None) -> None:
     """Handle quit signal by entering debugger.
 
     Args:
@@ -404,9 +401,9 @@ def detect_terminal_width() -> int:
 
 
 def write_columns(
-    items: Union[Iterator[bytes], Sequence[bytes]],
+    items: Iterator[bytes] | Sequence[bytes],
     out: TextIO,
-    width: Optional[int] = None,
+    width: int | None = None,
 ) -> None:
     """Display items in formatted columns based on terminal width.
 
@@ -490,7 +487,7 @@ class PagerBuffer(BinaryIO):
         """
         self.pager = pager
 
-    def write(self, data: Union[bytes, bytearray, memoryview]) -> int:  # type: ignore[override]
+    def write(self, data: bytes | bytearray | memoryview) -> int:  # type: ignore[override]
         """Write bytes to pager."""
         # Convert to bytes and decode to string for the pager
         text = bytes(data).decode("utf-8", errors="replace")
@@ -500,7 +497,7 @@ class PagerBuffer(BinaryIO):
         """Flush the pager."""
         return self.pager.flush()
 
-    def writelines(self, lines: Iterable[Union[bytes, bytearray, memoryview]]) -> None:  # type: ignore[override]
+    def writelines(self, lines: Iterable[bytes | bytearray | memoryview]) -> None:  # type: ignore[override]
         """Write multiple lines to pager."""
         for line in lines:
             self.write(line)
@@ -576,7 +573,7 @@ class PagerBuffer(BinaryIO):
         """Return the current position (not supported)."""
         raise io.UnsupportedOperation("PagerBuffer does not support tell()")
 
-    def truncate(self, size: Optional[int] = None) -> int:
+    def truncate(self, size: int | None = None) -> int:
         """Truncate the buffer (not supported)."""
         raise io.UnsupportedOperation("PagerBuffer does not support truncation")
 
@@ -594,9 +591,9 @@ class PagerBuffer(BinaryIO):
 
     def __exit__(
         self,
-        exc_type: Optional[type[BaseException]],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[TracebackType],
+        exc_type: type[BaseException] | None,
+        exc_val: BaseException | None,
+        exc_tb: TracebackType | None,
     ) -> None:
         """Exit context manager."""
         self.close()
@@ -611,7 +608,7 @@ class Pager(TextIO):
         Args:
             pager_cmd: Command to use for paging (default: "cat")
         """
-        self.pager_process: Optional[subprocess.Popen[str]] = None
+        self.pager_process: subprocess.Popen[str] | None = None
         self._buffer = PagerBuffer(self)
         self._closed = False
         self.pager_cmd = pager_cmd
@@ -696,9 +693,9 @@ class Pager(TextIO):
 
     def __exit__(
         self,
-        exc_type: Optional[type],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[types.TracebackType],
+        exc_type: type | None,
+        exc_val: BaseException | None,
+        exc_tb: types.TracebackType | None,
     ) -> None:
         """Context manager exit."""
         self.close()
@@ -739,7 +736,7 @@ class Pager(TextIO):
         return "utf-8"
 
     @property
-    def errors(self) -> Optional[str]:
+    def errors(self) -> str | None:
         """Return the error handling scheme."""
         return "replace"
 
@@ -767,7 +764,7 @@ class Pager(TextIO):
         return "<pager>"
 
     @property
-    def newlines(self) -> Optional[Union[str, tuple[str, ...]]]:
+    def newlines(self) -> str | tuple[str, ...] | None:
         """Return the newlines mode."""
         return None
 
@@ -791,7 +788,7 @@ class Pager(TextIO):
         """Return the current position (not supported)."""
         raise io.UnsupportedOperation("Pager does not support tell()")
 
-    def truncate(self, size: Optional[int] = None) -> int:
+    def truncate(self, size: int | None = None) -> int:
         """Truncate the pager (not supported)."""
         raise io.UnsupportedOperation("Pager does not support truncation")
 
@@ -807,7 +804,7 @@ class Pager(TextIO):
 class _StreamContextAdapter:
     """Adapter to make streams work with context manager protocol."""
 
-    def __init__(self, stream: Union[TextIO, BinaryIO]) -> None:
+    def __init__(self, stream: TextIO | BinaryIO) -> None:
         self.stream = stream
         # Expose buffer if it exists
         if hasattr(stream, "buffer"):
@@ -821,9 +818,9 @@ class _StreamContextAdapter:
 
     def __exit__(
         self,
-        exc_type: Optional[type[BaseException]],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[TracebackType],
+        exc_type: type[BaseException] | None,
+        exc_val: BaseException | None,
+        exc_tb: TracebackType | None,
     ) -> None:
         # For stdout/stderr, we don't close them
         pass
@@ -833,8 +830,8 @@ class _StreamContextAdapter:
 
 
 def get_pager(
-    config: Optional[Config] = None, cmd_name: Optional[str] = None
-) -> Union[_StreamContextAdapter, "Pager"]:
+    config: Config | None = None, cmd_name: str | None = None
+) -> "_StreamContextAdapter | Pager":
     """Get a pager instance if paging should be used, otherwise return sys.stdout.
 
     Args:
@@ -930,7 +927,7 @@ def enable_pager() -> None:
 class Command:
     """A Dulwich subcommand."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Run the command."""
         raise NotImplementedError(self.run)
 
@@ -1102,7 +1099,7 @@ class cmd_fetch_pack(Command):
         else:
 
             def determine_wants(
-                refs: Mapping[bytes, bytes], depth: Optional[int] = None
+                refs: Mapping[bytes, bytes], depth: int | None = None
             ) -> list[bytes]:
                 return [y.encode("utf-8") for y in args.refs if y not in r.object_store]
 
@@ -1705,9 +1702,9 @@ class cmd_clone(Command):
 
 
 def _get_commit_message_with_template(
-    initial_message: Optional[bytes],
-    repo: Optional[Repo] = None,
-    commit: Optional[Commit] = None,
+    initial_message: bytes | None,
+    repo: Repo | None = None,
+    commit: Commit | None = None,
 ) -> bytes:
     """Get commit message with an initial message template."""
     # Start with the initial message
@@ -1753,7 +1750,7 @@ def _get_commit_message_with_template(
 class cmd_config(Command):
     """Get and set repository or global options."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the config command.
 
         Args:
@@ -1969,7 +1966,7 @@ class cmd_config(Command):
 class cmd_commit(Command):
     """Record changes to the repository."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the commit command.
 
         Args:
@@ -1990,15 +1987,13 @@ class cmd_commit(Command):
         )
         parsed_args = parser.parse_args(args)
 
-        message: Union[bytes, str, Callable[[Optional[Repo], Optional[Commit]], bytes]]
+        message: bytes | str | Callable[[Repo | None, Commit | None], bytes]
 
         if parsed_args.message:
             message = parsed_args.message
         elif parsed_args.amend:
             # For amend, create a callable that opens editor with original message pre-populated
-            def get_amend_message(
-                repo: Optional[Repo], commit: Optional[Commit]
-            ) -> bytes:
+            def get_amend_message(repo: Repo | None, commit: Commit | None) -> bytes:
                 # Get the original commit message from current HEAD
                 assert repo is not None
                 try:
@@ -2014,9 +2009,7 @@ class cmd_commit(Command):
             message = get_amend_message
         else:
             # For regular commits, use empty template
-            def get_regular_message(
-                repo: Optional[Repo], commit: Optional[Commit]
-            ) -> bytes:
+            def get_regular_message(repo: Repo | None, commit: Commit | None) -> bytes:
                 return _get_commit_message_with_template(b"", repo, commit)
 
             message = get_regular_message
@@ -2062,7 +2055,7 @@ class cmd_update_server_info(Command):
 class cmd_symbolic_ref(Command):
     """Read, modify and delete symbolic refs."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the symbolic-ref command.
 
         Args:
@@ -2130,7 +2123,7 @@ class cmd_pack_refs(Command):
 class cmd_var(Command):
     """Display Git logical variables."""
 
-    def run(self, argv: Sequence[str]) -> Optional[int]:
+    def run(self, argv: Sequence[str]) -> int | None:
         """Execute the var command.
 
         Args:
@@ -2234,7 +2227,7 @@ class cmd_show(Command):
 class cmd_show_ref(Command):
     """List references in a local repository."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the show-ref command.
 
         Args:
@@ -2366,7 +2359,7 @@ class cmd_show_ref(Command):
 class cmd_show_branch(Command):
     """Show branches and their commits."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the show-branch command.
 
         Args:
@@ -2522,7 +2515,7 @@ class cmd_tag(Command):
 class cmd_verify_commit(Command):
     """Check the GPG signature of commits."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the verify-commit command.
 
         Args:
@@ -2578,7 +2571,7 @@ class cmd_verify_commit(Command):
 class cmd_verify_tag(Command):
     """Check the GPG signature of tags."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the verify-tag command.
 
         Args:
@@ -3179,7 +3172,7 @@ class cmd_unpack_objects(Command):
 class cmd_prune(Command):
     """Prune all unreachable objects from the object database."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the prune command.
 
         Args:
@@ -3274,7 +3267,7 @@ class cmd_pull(Command):
 class cmd_push(Command):
     """Update remote refs along with associated objects."""
 
-    def run(self, argv: Sequence[str]) -> Optional[int]:
+    def run(self, argv: Sequence[str]) -> int | None:
         """Execute the push command.
 
         Args:
@@ -3316,9 +3309,9 @@ class SuperCommand(Command):
     """Base class for commands that have subcommands."""
 
     subcommands: ClassVar[dict[str, type[Command]]] = {}
-    default_command: ClassVar[Optional[type[Command]]] = None
+    default_command: ClassVar[type[Command] | None] = None
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the subcommand command.
 
         Args:
@@ -3480,7 +3473,7 @@ class cmd_check_mailmap(Command):
 class cmd_branch(Command):
     """List, create, or delete branches."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the branch command.
 
         Args:
@@ -3529,7 +3522,7 @@ class cmd_branch(Command):
         parsed_args = parser.parse_args(args)
 
         def print_branches(
-            branches: Union[Iterator[bytes], Sequence[bytes]], use_columns: bool = False
+            branches: Iterator[bytes] | Sequence[bytes], use_columns: bool = False
         ) -> None:
             if use_columns:
                 write_columns(branches, sys.stdout)
@@ -3537,7 +3530,7 @@ class cmd_branch(Command):
                 for branch in branches:
                     sys.stdout.write(f"{branch.decode()}\n")
 
-        branches: Union[Iterator[bytes], list[bytes], None] = None
+        branches: Iterator[bytes] | list[bytes] | None = None
 
         try:
             if parsed_args.all:
@@ -3592,7 +3585,7 @@ class cmd_branch(Command):
 class cmd_checkout(Command):
     """Switch branches or restore working tree files."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the checkout command.
 
         Args:
@@ -3693,7 +3686,7 @@ class cmd_bisect(SuperCommand):
 
     subcommands: ClassVar[dict[str, type[Command]]] = {}
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the bisect command.
 
         Args:
@@ -3874,7 +3867,7 @@ class cmd_describe(Command):
 class cmd_merge(Command):
     """Join two or more development histories together."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the merge command.
 
         Args:
@@ -3945,7 +3938,7 @@ class cmd_merge(Command):
 class cmd_merge_base(Command):
     """Find the best common ancestor between commits."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the merge-base command.
 
         Args:
@@ -4131,7 +4124,7 @@ class cmd_replace_list(Command):
 class cmd_replace_delete(Command):
     """Delete a replacement ref."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the replace-delete command.
 
         Args:
@@ -4163,7 +4156,7 @@ class cmd_replace(SuperCommand):
 
     default_command = cmd_replace_list
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the replace command.
 
         Args:
@@ -4195,7 +4188,7 @@ class cmd_replace(SuperCommand):
 class cmd_cherry(Command):
     """Find commits not merged upstream."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the cherry command.
 
         Args:
@@ -4260,7 +4253,7 @@ class cmd_cherry(Command):
 class cmd_cherry_pick(Command):
     """Apply the changes introduced by some existing commits."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the cherry-pick command.
 
         Args:
@@ -4335,7 +4328,7 @@ class cmd_cherry_pick(Command):
 class cmd_merge_tree(Command):
     """Show three-way merge without touching index."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the merge-tree command.
 
         Args:
@@ -4404,7 +4397,7 @@ class cmd_merge_tree(Command):
 class cmd_gc(Command):
     """Cleanup unnecessary files and optimize the local repository."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the gc command.
 
         Args:
@@ -4516,7 +4509,7 @@ class cmd_gc(Command):
 class cmd_maintenance(Command):
     """Run tasks to optimize Git repository data."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the maintenance command.
 
         Args:
@@ -4864,7 +4857,7 @@ class cmd_rebase(Command):
 class cmd_filter_branch(Command):
     """Rewrite branches."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the filter-branch command.
 
         Args:
@@ -4921,10 +4914,10 @@ class cmd_filter_branch(Command):
         # Helper function to run shell commands
         def run_filter(
             cmd: str,
-            input_data: Optional[bytes] = None,
-            cwd: Optional[str] = None,
-            extra_env: Optional[dict[str, str]] = None,
-        ) -> Optional[bytes]:
+            input_data: bytes | None = None,
+            cwd: str | None = None,
+            extra_env: dict[str, str] | None = None,
+        ) -> bytes | None:
             nonlocal filter_error
             filter_env = env.copy()
             if extra_env:
@@ -5004,7 +4997,7 @@ class cmd_filter_branch(Command):
         index_filter = None
         if parsed_args.index_filter:
 
-            def index_filter(tree_sha: bytes, index_path: str) -> Optional[bytes]:
+            def index_filter(tree_sha: bytes, index_path: str) -> bytes | None:
                 run_filter(
                     parsed_args.index_filter, extra_env={"GIT_INDEX_FILE": index_path}
                 )
@@ -5034,7 +5027,7 @@ class cmd_filter_branch(Command):
         commit_filter = None
         if parsed_args.commit_filter:
 
-            def commit_filter(commit_obj: Commit, tree_sha: bytes) -> Optional[bytes]:
+            def commit_filter(commit_obj: Commit, tree_sha: bytes) -> bytes | None:
                 # The filter receives: tree parent1 parent2...
                 cmd_input = tree_sha.hex()
                 for parent in commit_obj.parents:
@@ -5386,7 +5379,7 @@ class cmd_format_patch(Command):
         parsed_args = parser.parse_args(args)
 
         # Parse committish using the new function
-        committish: Optional[Union[bytes, tuple[bytes, bytes]]] = None
+        committish: bytes | tuple[bytes, bytes] | None = None
         if parsed_args.committish:
             with Repo(".") as r:
                 range_result = parse_commit_range(r, parsed_args.committish)
@@ -5549,7 +5542,7 @@ class cmd_bundle(Command):
         progress = None
         if parsed_args.progress and not parsed_args.quiet:
 
-            def progress(*args: Union[str, int]) -> None:
+            def progress(*args: str | int) -> None:
                 # Handle both progress(msg) and progress(count, msg) signatures
                 if len(args) == 1:
                     msg = args[0]
@@ -5695,7 +5688,7 @@ class cmd_bundle(Command):
         progress = None
         if parsed_args.progress:
 
-            def progress(*args: Union[str, int, bytes]) -> None:
+            def progress(*args: str | int | bytes) -> None:
                 # Handle both progress(msg) and progress(count, msg) signatures
                 if len(args) == 1:
                     msg = args[0]
@@ -5733,7 +5726,7 @@ class cmd_worktree_add(Command):
 
     """Add a new worktree to the repository."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the worktree-add command.
 
         Args:
@@ -5788,7 +5781,7 @@ class cmd_worktree_list(Command):
 
     """List details of each worktree."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the worktree-list command.
 
         Args:
@@ -5850,7 +5843,7 @@ class cmd_worktree_remove(Command):
 
     """Remove a worktree."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the worktree-remove command.
 
         Args:
@@ -5878,7 +5871,7 @@ class cmd_worktree_prune(Command):
 
     """Prune worktree information."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the worktree-prune command.
 
         Args:
@@ -5923,7 +5916,7 @@ class cmd_worktree_lock(Command):
 
     """Lock a worktree."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the worktree-lock command.
 
         Args:
@@ -5951,7 +5944,7 @@ class cmd_worktree_unlock(Command):
 
     """Unlock a worktree."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the worktree-unlock command.
 
         Args:
@@ -5976,7 +5969,7 @@ class cmd_worktree_move(Command):
 
     """Move a worktree."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the worktree-move command.
 
         Args:
@@ -6006,7 +5999,7 @@ class cmd_worktree_repair(Command):
 
     """Repair worktree administrative files."""
 
-    def run(self, args: Sequence[str]) -> Optional[int]:
+    def run(self, args: Sequence[str]) -> int | None:
         """Execute the worktree-repair command.
 
         Args:
@@ -6140,7 +6133,7 @@ commands = {
 }
 
 
-def main(argv: Optional[Sequence[str]] = None) -> Optional[int]:
+def main(argv: Sequence[str] | None = None) -> int | None:
     """Main entry point for the Dulwich CLI.
 
     Args:

Fichier diff supprimé car celui-ci est trop grand
+ 220 - 221
dulwich/client.py


+ 8 - 8
dulwich/commit_graph.py

@@ -19,7 +19,7 @@ https://git-scm.com/docs/gitformat-commit-graph
 import os
 import struct
 from collections.abc import Iterator, Sequence
-from typing import TYPE_CHECKING, BinaryIO, Optional, Union
+from typing import TYPE_CHECKING, BinaryIO
 
 from .file import _GitFile
 
@@ -269,7 +269,7 @@ class CommitGraph:
 
         return parents
 
-    def get_entry_by_oid(self, oid: ObjectID) -> Optional[CommitGraphEntry]:
+    def get_entry_by_oid(self, oid: ObjectID) -> CommitGraphEntry | None:
         """Get commit graph entry by commit OID."""
         # Convert hex ObjectID to binary if needed for lookup
         if isinstance(oid, bytes) and len(oid) == 40:
@@ -283,17 +283,17 @@ class CommitGraph:
             return self.entries[index]
         return None
 
-    def get_generation_number(self, oid: ObjectID) -> Optional[int]:
+    def get_generation_number(self, oid: ObjectID) -> int | None:
         """Get generation number for a commit."""
         entry = self.get_entry_by_oid(oid)
         return entry.generation if entry else None
 
-    def get_parents(self, oid: ObjectID) -> Optional[list[bytes]]:
+    def get_parents(self, oid: ObjectID) -> list[bytes] | None:
         """Get parent commit IDs for a commit."""
         entry = self.get_entry_by_oid(oid)
         return entry.parents if entry else None
 
-    def write_to_file(self, f: Union[BinaryIO, _GitFile]) -> None:
+    def write_to_file(self, f: BinaryIO | _GitFile) -> None:
         """Write commit graph to file."""
         if not self.entries:
             raise ValueError("Cannot write empty commit graph")
@@ -391,7 +391,7 @@ class CommitGraph:
         return iter(self.entries)
 
 
-def read_commit_graph(path: Union[str, bytes]) -> Optional[CommitGraph]:
+def read_commit_graph(path: str | bytes) -> CommitGraph | None:
     """Read commit graph from file path."""
     if isinstance(path, str):
         path = path.encode()
@@ -403,7 +403,7 @@ def read_commit_graph(path: Union[str, bytes]) -> Optional[CommitGraph]:
         return CommitGraph.from_file(f)
 
 
-def find_commit_graph_file(git_dir: Union[str, bytes]) -> Optional[bytes]:
+def find_commit_graph_file(git_dir: str | bytes) -> bytes | None:
     """Find commit graph file in a Git repository."""
     if isinstance(git_dir, str):
         git_dir = git_dir.encode()
@@ -538,7 +538,7 @@ def generate_commit_graph(
 
 
 def write_commit_graph(
-    git_dir: Union[str, bytes],
+    git_dir: str | bytes,
     object_store: "BaseObjectStore",
     commit_ids: Sequence[ObjectID],
 ) -> None:

+ 53 - 61
dulwich/config.py

@@ -30,6 +30,7 @@ import os
 import re
 import sys
 from collections.abc import (
+    Callable,
     ItemsView,
     Iterable,
     Iterator,
@@ -42,9 +43,7 @@ from contextlib import suppress
 from pathlib import Path
 from typing import (
     IO,
-    Callable,
     Generic,
-    Optional,
     TypeVar,
     Union,
     overload,
@@ -52,13 +51,13 @@ from typing import (
 
 from .file import GitFile, _GitFile
 
-ConfigKey = Union[str, bytes, tuple[Union[str, bytes], ...]]
-ConfigValue = Union[str, bytes, bool, int]
+ConfigKey = str | bytes | tuple[str | bytes, ...]
+ConfigValue = str | bytes | bool | int
 
 logger = logging.getLogger(__name__)
 
 # Type for file opener callback
-FileOpener = Callable[[Union[str, os.PathLike[str]]], IO[bytes]]
+FileOpener = Callable[[str | os.PathLike[str]], IO[bytes]]
 
 # Type for includeIf condition matcher
 # Takes the condition value (e.g., "main" for onbranch:main) and returns bool
@@ -185,7 +184,7 @@ class CaseInsensitiveOrderedMultiDict(MutableMapping[K, V], Generic[K, V]):
     for the same key. Keys are compared case-insensitively.
     """
 
-    def __init__(self, default_factory: Optional[Callable[[], V]] = None) -> None:
+    def __init__(self, default_factory: Callable[[], V] | None = None) -> None:
         """Initialize a CaseInsensitiveOrderedMultiDict.
 
         Args:
@@ -198,10 +197,9 @@ class CaseInsensitiveOrderedMultiDict(MutableMapping[K, V], Generic[K, V]):
     @classmethod
     def make(
         cls,
-        dict_in: Optional[
-            Union[MutableMapping[K, V], "CaseInsensitiveOrderedMultiDict[K, V]"]
-        ] = None,
-        default_factory: Optional[Callable[[], V]] = None,
+        dict_in: Union[MutableMapping[K, V], "CaseInsensitiveOrderedMultiDict[K, V]"]
+        | None = None,
+        default_factory: Callable[[], V] | None = None,
     ) -> "CaseInsensitiveOrderedMultiDict[K, V]":
         """Create a CaseInsensitiveOrderedMultiDict from an existing mapping.
 
@@ -339,7 +337,7 @@ class CaseInsensitiveOrderedMultiDict(MutableMapping[K, V], Generic[K, V]):
         """
         return self._keyed[lower_key(item)]
 
-    def get(self, key: K, /, default: Union[V, _T, None] = None) -> Union[V, _T, None]:  # type: ignore[override]
+    def get(self, key: K, /, default: V | _T | None = None) -> V | _T | None:  # type: ignore[override]
         """Get the last value for a key, or a default if not found.
 
         Args:
@@ -373,7 +371,7 @@ class CaseInsensitiveOrderedMultiDict(MutableMapping[K, V], Generic[K, V]):
             if lower_key(actual) == lowered_key:
                 yield value
 
-    def setdefault(self, key: K, default: Optional[V] = None) -> V:
+    def setdefault(self, key: K, default: V | None = None) -> V:
         """Get value for key, setting it to default if not present.
 
         Args:
@@ -401,11 +399,11 @@ class CaseInsensitiveOrderedMultiDict(MutableMapping[K, V], Generic[K, V]):
 
 
 Name = bytes
-NameLike = Union[bytes, str]
+NameLike = bytes | str
 Section = tuple[bytes, ...]
-SectionLike = Union[bytes, str, tuple[Union[bytes, str], ...]]
+SectionLike = bytes | str | tuple[bytes | str, ...]
 Value = bytes
-ValueLike = Union[bytes, str]
+ValueLike = bytes | str
 
 
 class Config:
@@ -443,11 +441,11 @@ class Config:
     ) -> bool: ...
 
     @overload
-    def get_boolean(self, section: SectionLike, name: NameLike) -> Optional[bool]: ...
+    def get_boolean(self, section: SectionLike, name: NameLike) -> bool | None: ...
 
     def get_boolean(
-        self, section: SectionLike, name: NameLike, default: Optional[bool] = None
-    ) -> Optional[bool]:
+        self, section: SectionLike, name: NameLike, default: bool | None = None
+    ) -> bool | None:
         """Retrieve a configuration setting as boolean.
 
         Args:
@@ -470,7 +468,7 @@ class Config:
         raise ValueError(f"not a valid boolean string: {value!r}")
 
     def set(
-        self, section: SectionLike, name: NameLike, value: Union[ValueLike, bool]
+        self, section: SectionLike, name: NameLike, value: ValueLike | bool
     ) -> None:
         """Set a configuration value.
 
@@ -515,10 +513,9 @@ class ConfigDict(Config):
 
     def __init__(
         self,
-        values: Union[
-            MutableMapping[Section, CaseInsensitiveOrderedMultiDict[Name, Value]], None
-        ] = None,
-        encoding: Union[str, None] = None,
+        values: MutableMapping[Section, CaseInsensitiveOrderedMultiDict[Name, Value]]
+        | None = None,
+        encoding: str | None = None,
     ) -> None:
         """Create a new ConfigDict."""
         if encoding is None:
@@ -573,7 +570,7 @@ class ConfigDict(Config):
         return self._values.keys()
 
     @classmethod
-    def _parse_setting(cls, name: str) -> tuple[str, Optional[str], str]:
+    def _parse_setting(cls, name: str) -> tuple[str, str | None, str]:
         parts = name.split(".")
         if len(parts) == 3:
             return (parts[0], parts[1], parts[2])
@@ -651,7 +648,7 @@ class ConfigDict(Config):
         self,
         section: SectionLike,
         name: NameLike,
-        value: Union[ValueLike, bool],
+        value: ValueLike | bool,
     ) -> None:
         """Set a configuration value.
 
@@ -678,7 +675,7 @@ class ConfigDict(Config):
         self,
         section: SectionLike,
         name: NameLike,
-        value: Union[ValueLike, bool],
+        value: ValueLike | bool,
     ) -> None:
         """Add a value to a configuration setting, creating a multivar if needed."""
         section, name = self._check_section_and_name(section, name)
@@ -908,10 +905,9 @@ class ConfigFile(ConfigDict):
 
     def __init__(
         self,
-        values: Union[
-            MutableMapping[Section, CaseInsensitiveOrderedMultiDict[Name, Value]], None
-        ] = None,
-        encoding: Union[str, None] = None,
+        values: MutableMapping[Section, CaseInsensitiveOrderedMultiDict[Name, Value]]
+        | None = None,
+        encoding: str | None = None,
     ) -> None:
         """Initialize a ConfigFile.
 
@@ -920,7 +916,7 @@ class ConfigFile(ConfigDict):
           encoding: Optional encoding for the file (defaults to system encoding)
         """
         super().__init__(values=values, encoding=encoding)
-        self.path: Optional[str] = None
+        self.path: str | None = None
         self._included_paths: set[str] = set()  # Track included files to prevent cycles
 
     @classmethod
@@ -928,12 +924,12 @@ class ConfigFile(ConfigDict):
         cls,
         f: IO[bytes],
         *,
-        config_dir: Optional[str] = None,
-        included_paths: Optional[set[str]] = None,
+        config_dir: str | None = None,
+        included_paths: set[str] | None = None,
         include_depth: int = 0,
         max_include_depth: int = DEFAULT_MAX_INCLUDE_DEPTH,
-        file_opener: Optional[FileOpener] = None,
-        condition_matchers: Optional[Mapping[str, ConditionMatcher]] = None,
+        file_opener: FileOpener | None = None,
+        condition_matchers: Mapping[str, ConditionMatcher] | None = None,
     ) -> "ConfigFile":
         """Read configuration from a file-like object.
 
@@ -954,7 +950,7 @@ class ConfigFile(ConfigDict):
         if included_paths is not None:
             ret._included_paths = included_paths.copy()
 
-        section: Optional[Section] = None
+        section: Section | None = None
         setting = None
         continuation = None
         for lineno, line in enumerate(f.readlines()):
@@ -1031,15 +1027,15 @@ class ConfigFile(ConfigDict):
 
     def _handle_include_directive(
         self,
-        section: Optional[Section],
+        section: Section | None,
         setting: bytes,
         value: bytes,
         *,
-        config_dir: Optional[str],
+        config_dir: str | None,
         include_depth: int,
         max_include_depth: int,
-        file_opener: Optional[FileOpener],
-        condition_matchers: Optional[Mapping[str, ConditionMatcher]],
+        file_opener: FileOpener | None,
+        condition_matchers: Mapping[str, ConditionMatcher] | None,
     ) -> None:
         """Handle include/includeIf directives during config parsing."""
         if (
@@ -1065,11 +1061,11 @@ class ConfigFile(ConfigDict):
         section: Section,
         path_value: bytes,
         *,
-        config_dir: Optional[str],
+        config_dir: str | None,
         include_depth: int,
         max_include_depth: int,
-        file_opener: Optional[FileOpener],
-        condition_matchers: Optional[Mapping[str, ConditionMatcher]],
+        file_opener: FileOpener | None,
+        condition_matchers: Mapping[str, ConditionMatcher] | None,
     ) -> None:
         """Process an include or includeIf directive."""
         path_str = path_value.decode(self.encoding, errors="replace")
@@ -1103,7 +1099,7 @@ class ConfigFile(ConfigDict):
             opener: FileOpener
             if file_opener is None:
 
-                def opener(path: Union[str, os.PathLike[str]]) -> IO[bytes]:
+                def opener(path: str | os.PathLike[str]) -> IO[bytes]:
                     return GitFile(path, "rb")
             else:
                 opener = file_opener
@@ -1140,9 +1136,7 @@ class ConfigFile(ConfigDict):
             for key, value in values.items():
                 self._values[section][key] = value
 
-    def _resolve_include_path(
-        self, path: str, config_dir: Optional[str]
-    ) -> Optional[str]:
+    def _resolve_include_path(self, path: str, config_dir: str | None) -> str | None:
         """Resolve an include path to an absolute path."""
         # Expand ~ to home directory
         path = os.path.expanduser(path)
@@ -1156,8 +1150,8 @@ class ConfigFile(ConfigDict):
     def _evaluate_includeif_condition(
         self,
         condition: str,
-        config_dir: Optional[str] = None,
-        condition_matchers: Optional[Mapping[str, ConditionMatcher]] = None,
+        config_dir: str | None = None,
+        condition_matchers: Mapping[str, ConditionMatcher] | None = None,
     ) -> bool:
         """Evaluate an includeIf condition."""
         # Try custom matchers first if provided
@@ -1243,11 +1237,11 @@ class ConfigFile(ConfigDict):
     @classmethod
     def from_path(
         cls,
-        path: Union[str, os.PathLike[str]],
+        path: str | os.PathLike[str],
         *,
         max_include_depth: int = DEFAULT_MAX_INCLUDE_DEPTH,
-        file_opener: Optional[FileOpener] = None,
-        condition_matchers: Optional[Mapping[str, ConditionMatcher]] = None,
+        file_opener: FileOpener | None = None,
+        condition_matchers: Mapping[str, ConditionMatcher] | None = None,
     ) -> "ConfigFile":
         """Read configuration from a file on disk.
 
@@ -1264,7 +1258,7 @@ class ConfigFile(ConfigDict):
         opener: FileOpener
         if file_opener is None:
 
-            def opener(p: Union[str, os.PathLike[str]]) -> IO[bytes]:
+            def opener(p: str | os.PathLike[str]) -> IO[bytes]:
                 return GitFile(p, "rb")
         else:
             opener = file_opener
@@ -1280,20 +1274,18 @@ class ConfigFile(ConfigDict):
             ret.path = abs_path
             return ret
 
-    def write_to_path(
-        self, path: Optional[Union[str, os.PathLike[str]]] = None
-    ) -> None:
+    def write_to_path(self, path: str | os.PathLike[str] | None = None) -> None:
         """Write configuration to a file on disk."""
         if path is None:
             if self.path is None:
                 raise ValueError("No path specified and no default path available")
-            path_to_use: Union[str, os.PathLike[str]] = self.path
+            path_to_use: str | os.PathLike[str] = self.path
         else:
             path_to_use = path
         with GitFile(path_to_use, "wb") as f:
             self.write_to_file(f)
 
-    def write_to_file(self, f: Union[IO[bytes], _GitFile]) -> None:
+    def write_to_file(self, f: IO[bytes] | _GitFile) -> None:
         """Write configuration to a file-like object."""
         for section, values in self._values.items():
             try:
@@ -1404,7 +1396,7 @@ class StackedConfig(Config):
     """Configuration which reads from multiple config files.."""
 
     def __init__(
-        self, backends: list[ConfigFile], writable: Optional[ConfigFile] = None
+        self, backends: list[ConfigFile], writable: ConfigFile | None = None
     ) -> None:
         """Initialize a StackedConfig.
 
@@ -1487,7 +1479,7 @@ class StackedConfig(Config):
                 pass
 
     def set(
-        self, section: SectionLike, name: NameLike, value: Union[ValueLike, bool]
+        self, section: SectionLike, name: NameLike, value: ValueLike | bool
     ) -> None:
         """Set value in configuration."""
         if self.writable is None:
@@ -1505,7 +1497,7 @@ class StackedConfig(Config):
 
 
 def read_submodules(
-    path: Union[str, os.PathLike[str]],
+    path: str | os.PathLike[str],
 ) -> Iterator[tuple[bytes, bytes, bytes]]:
     """Read a .gitmodules file."""
     cfg = ConfigFile.from_path(path)

+ 1 - 2
dulwich/contrib/diffstat.py

@@ -46,7 +46,6 @@ statistics about changes, including:
 import re
 import sys
 from collections.abc import Sequence
-from typing import Optional
 
 # only needs to detect git style diffs as this is for
 # use with dulwich
@@ -80,7 +79,7 @@ def _parse_patch(
     nametypes = []
     counts = []
     in_patch_chunk = in_git_header = binaryfile = False
-    currentfile: Optional[bytes] = None
+    currentfile: bytes | None = None
     added = deleted = 0
     for line in lines:
         if line.startswith(_GIT_HEADER_START):

+ 8 - 8
dulwich/contrib/paramiko_vendor.py

@@ -33,7 +33,7 @@ This implementation has comprehensive tests in tests/contrib/test_paramiko_vendo
 
 import os
 import warnings
-from typing import Any, BinaryIO, Optional, cast
+from typing import Any, BinaryIO, cast
 
 import paramiko
 import paramiko.client
@@ -81,7 +81,7 @@ class _ParamikoWrapper:
         """
         return self.channel.sendall(data)
 
-    def read(self, n: Optional[int] = None) -> bytes:
+    def read(self, n: int | None = None) -> bytes:
         """Read data from the channel.
 
         Args:
@@ -141,12 +141,12 @@ class ParamikoSSHVendor:
         self,
         host: str,
         command: str,
-        username: Optional[str] = None,
-        port: Optional[int] = None,
-        password: Optional[str] = None,
-        pkey: Optional[paramiko.PKey] = None,
-        key_filename: Optional[str] = None,
-        protocol_version: Optional[int] = None,
+        username: str | None = None,
+        port: int | None = None,
+        password: str | None = None,
+        pkey: paramiko.PKey | None = None,
+        key_filename: str | None = None,
+        protocol_version: int | None = None,
         **kwargs: object,
     ) -> _ParamikoWrapper:
         """Run a command on a remote host via SSH.

+ 3 - 3
dulwich/contrib/release_robot.py

@@ -50,7 +50,7 @@ import logging
 import re
 import sys
 import time
-from typing import Any, Optional, cast
+from typing import Any, cast
 
 from ..repo import Repo
 
@@ -118,8 +118,8 @@ def get_recent_tags(projdir: str = PROJDIR) -> list[tuple[str, list[Any]]]:
 def get_current_version(
     projdir: str = PROJDIR,
     pattern: str = PATTERN,
-    logger: Optional[logging.Logger] = None,
-) -> Optional[str]:
+    logger: logging.Logger | None = None,
+) -> str | None:
     """Return the most recent tag, using an options regular expression pattern.
 
     The default pattern will strip any characters preceding the first semantic

+ 12 - 12
dulwich/contrib/requests_vendor.py

@@ -31,9 +31,9 @@ the dulwich.client.HttpGitClient attribute:
 This implementation is experimental and does not have any tests.
 """
 
-from collections.abc import Iterator
+from collections.abc import Callable, Iterator
 from io import BytesIO
-from typing import TYPE_CHECKING, Any, Callable, Optional, Union
+from typing import TYPE_CHECKING, Any, Optional
 
 if TYPE_CHECKING:
     from ..config import ConfigFile
@@ -55,12 +55,12 @@ class RequestsHttpGitClient(AbstractHttpGitClient):
     def __init__(
         self,
         base_url: str,
-        dumb: Optional[bool] = None,
+        dumb: bool | None = None,
         config: Optional["ConfigFile"] = None,
-        username: Optional[str] = None,
-        password: Optional[str] = None,
+        username: str | None = None,
+        password: str | None = None,
         thin_packs: bool = True,
-        report_activity: Optional[Callable[[int, str], None]] = None,
+        report_activity: Callable[[int, str], None] | None = None,
         quiet: bool = False,
         include_tags: bool = False,
     ) -> None:
@@ -97,8 +97,8 @@ class RequestsHttpGitClient(AbstractHttpGitClient):
     def _http_request(
         self,
         url: str,
-        headers: Optional[dict[str, str]] = None,
-        data: Optional[Union[bytes, Iterator[bytes]]] = None,
+        headers: dict[str, str] | None = None,
+        data: bytes | Iterator[bytes] | None = None,
         raise_for_status: bool = True,
     ) -> tuple[Any, Callable[[int], bytes]]:
         req_headers = self.session.headers.copy()  # type: ignore[attr-defined]
@@ -145,10 +145,10 @@ def get_session(config: Optional["ConfigFile"]) -> Session:
     session = Session()
     session.headers.update({"Pragma": "no-cache"})
 
-    proxy_server: Optional[str] = None
-    user_agent: Optional[str] = None
-    ca_certs: Optional[str] = None
-    ssl_verify: Optional[bool] = None
+    proxy_server: str | None = None
+    user_agent: str | None = None
+    ca_certs: str | None = None
+    ssl_verify: bool | None = None
 
     if config is not None:
         try:

+ 26 - 30
dulwich/contrib/swift.py

@@ -36,10 +36,10 @@ import sys
 import tempfile
 import urllib.parse as urlparse
 import zlib
-from collections.abc import Iterator, Mapping
+from collections.abc import Callable, Iterator, Mapping
 from configparser import ConfigParser
 from io import BytesIO
-from typing import Any, BinaryIO, Callable, Optional, Union, cast
+from typing import Any, BinaryIO, Optional, cast
 
 from geventhttpclient import HTTPClient
 
@@ -101,7 +101,7 @@ cache_length = 20
 class PackInfoMissingObjectFinder(GreenThreadsMissingObjectFinder):
     """Find missing objects required for pack generation."""
 
-    def next(self) -> Optional[tuple[bytes, int, Union[bytes, None]]]:
+    def next(self) -> tuple[bytes, int, bytes | None] | None:
         """Get the next missing object.
 
         Returns:
@@ -149,7 +149,7 @@ class PackInfoMissingObjectFinder(GreenThreadsMissingObjectFinder):
         )
 
 
-def load_conf(path: Optional[str] = None, file: Optional[str] = None) -> ConfigParser:
+def load_conf(path: str | None = None, file: str | None = None) -> ConfigParser:
     """Load configuration in global var CONF.
 
     Args:
@@ -232,8 +232,8 @@ def pack_info_create(pack_data: "PackData", pack_index: "PackIndex") -> bytes:
 def load_pack_info(
     filename: str,
     scon: Optional["SwiftConnector"] = None,
-    file: Optional[BinaryIO] = None,
-) -> Optional[dict[str, Any]]:
+    file: BinaryIO | None = None,
+) -> dict[str, Any] | None:
     """Load pack info from Swift or file.
 
     Args:
@@ -389,7 +389,7 @@ class SwiftConnector:
         )
         return endpoint[self.endpoint_type], token
 
-    def test_root_exists(self) -> Optional[bool]:
+    def test_root_exists(self) -> bool | None:
         """Check that Swift container exist.
 
         Returns: True if exist or None it not
@@ -416,7 +416,7 @@ class SwiftConnector:
                     f"PUT request failed with error code {ret.status_code}"
                 )
 
-    def get_container_objects(self) -> Optional[list[dict[str, Any]]]:
+    def get_container_objects(self) -> list[dict[str, Any]] | None:
         """Retrieve objects list in a container.
 
         Returns: A list of dict that describe objects
@@ -434,7 +434,7 @@ class SwiftConnector:
         content = ret.read()
         return cast(list[dict[str, Any]], json.loads(content))
 
-    def get_object_stat(self, name: str) -> Optional[dict[str, Any]]:
+    def get_object_stat(self, name: str) -> dict[str, Any] | None:
         """Retrieve object stat.
 
         Args:
@@ -485,9 +485,7 @@ class SwiftConnector:
                 f"PUT request failed with error code {ret.status_code}"  # type: ignore
             )
 
-    def get_object(
-        self, name: str, range: Optional[str] = None
-    ) -> Optional[Union[bytes, BytesIO]]:
+    def get_object(self, name: str, range: str | None = None) -> bytes | BytesIO | None:
         """Retrieve an object.
 
         Args:
@@ -637,9 +635,7 @@ class SwiftPackData(PackData):
     using the Range header feature of Swift.
     """
 
-    def __init__(
-        self, scon: SwiftConnector, filename: Union[str, os.PathLike[str]]
-    ) -> None:
+    def __init__(self, scon: SwiftConnector, filename: str | os.PathLike[str]) -> None:
         """Initialize a SwiftPackReader.
 
         Args:
@@ -663,7 +659,7 @@ class SwiftPackData(PackData):
 
     def get_object_at(
         self, offset: int
-    ) -> tuple[int, Union[tuple[Union[bytes, int], list[bytes]], list[bytes]]]:
+    ) -> tuple[int, tuple[bytes | int, list[bytes]] | list[bytes]]:
         """Get the object at a specific offset in the pack.
 
         Args:
@@ -713,15 +709,15 @@ class SwiftPack(Pack):
         del kwargs["scon"]
         super().__init__(*args, **kwargs)  # type: ignore
         self._pack_info_path = self._basename + ".info"
-        self._pack_info: Optional[dict[str, Any]] = None
-        self._pack_info_load: Callable[[], Optional[dict[str, Any]]] = (
+        self._pack_info: dict[str, Any] | None = None
+        self._pack_info_load: Callable[[], dict[str, Any] | None] = (
             lambda: load_pack_info(self._pack_info_path, self.scon)
         )
         self._idx_load = lambda: swift_load_pack_index(self.scon, self._idx_path)
         self._data_load = lambda: SwiftPackData(self.scon, self._data_path)
 
     @property
-    def pack_info(self) -> Optional[dict[str, Any]]:
+    def pack_info(self) -> dict[str, Any] | None:
         """The pack data object being used."""
         if self._pack_info is None:
             self._pack_info = self._pack_info_load()
@@ -767,7 +763,7 @@ class SwiftObjectStore(PackBasedObjectStore):
         """Loose objects are not supported by this repository."""
         return iter([])
 
-    def pack_info_get(self, sha: bytes) -> Optional[tuple[Any, ...]]:
+    def pack_info_get(self, sha: bytes) -> tuple[Any, ...] | None:
         """Get pack info for a specific SHA.
 
         Args:
@@ -781,11 +777,11 @@ class SwiftObjectStore(PackBasedObjectStore):
                 if hasattr(pack, "pack_info"):
                     pack_info = pack.pack_info
                     if pack_info is not None:
-                        return cast(Optional[tuple[Any, ...]], pack_info.get(sha))
+                        return cast(tuple[Any, ...] | None, pack_info.get(sha))
         return None
 
     def _collect_ancestors(
-        self, heads: list[Any], common: Optional[set[Any]] = None
+        self, heads: list[Any], common: set[Any] | None = None
     ) -> tuple[set[Any], set[Any]]:
         if common is None:
             common = set()
@@ -987,8 +983,8 @@ class SwiftInfoRefsContainer(InfoRefsContainer):
         super().__init__(f)
 
     def _load_check_ref(
-        self, name: bytes, old_ref: Optional[bytes]
-    ) -> Union[dict[bytes, bytes], bool]:
+        self, name: bytes, old_ref: bytes | None
+    ) -> dict[bytes, bytes] | bool:
         self._check_refname(name)
         obj = self.scon.get_object(self.filename)
         if not obj:
@@ -1012,12 +1008,12 @@ class SwiftInfoRefsContainer(InfoRefsContainer):
     def set_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
+        old_ref: bytes | None,
         new_ref: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[float] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: float | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Set a refname to new_ref only if it currently equals old_ref."""
         if name == b"HEAD":
@@ -1033,7 +1029,7 @@ class SwiftInfoRefsContainer(InfoRefsContainer):
     def remove_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
+        old_ref: bytes | None,
         committer: object = None,
         timestamp: object = None,
         timezone: object = None,

+ 1 - 2
dulwich/credentials.py

@@ -28,7 +28,6 @@ https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage
 
 import sys
 from collections.abc import Iterator
-from typing import Optional
 from urllib.parse import ParseResult, urlparse
 
 from .config import ConfigDict, SectionLike
@@ -77,7 +76,7 @@ def match_partial_url(valid_url: ParseResult, partial_url: str) -> bool:
 
 
 def urlmatch_credential_sections(
-    config: ConfigDict, url: Optional[str]
+    config: ConfigDict, url: str | None
 ) -> Iterator[SectionLike]:
     """Returns credential sections from the config which match the given URL."""
     encoding = config.encoding or sys.getdefaultencoding()

+ 16 - 16
dulwich/diff.py

@@ -50,12 +50,12 @@ import os
 import stat
 import sys
 from collections.abc import Iterable, Sequence
-from typing import BinaryIO, Optional, Union
+from typing import BinaryIO
 
 if sys.version_info >= (3, 12):
     from collections.abc import Buffer
 else:
-    Buffer = Union[bytes, bytearray, memoryview]
+    Buffer = bytes | bytearray | memoryview
 
 from .index import ConflictedIndexEntry, commit_index
 from .object_store import iter_tree_contents
@@ -66,7 +66,7 @@ from .repo import Repo
 logger = logging.getLogger(__name__)
 
 
-def should_include_path(path: bytes, paths: Optional[Sequence[bytes]]) -> bool:
+def should_include_path(path: bytes, paths: Sequence[bytes] | None) -> bool:
     """Check if a path should be included based on path filters.
 
     Args:
@@ -84,9 +84,9 @@ def should_include_path(path: bytes, paths: Optional[Sequence[bytes]]) -> bool:
 def diff_index_to_tree(
     repo: Repo,
     outstream: BinaryIO,
-    commit_sha: Optional[bytes] = None,
-    paths: Optional[Sequence[bytes]] = None,
-    diff_algorithm: Optional[str] = None,
+    commit_sha: bytes | None = None,
+    paths: Sequence[bytes] | None = None,
+    diff_algorithm: str | None = None,
 ) -> None:
     """Show staged changes (index vs commit).
 
@@ -130,8 +130,8 @@ def diff_working_tree_to_tree(
     repo: Repo,
     outstream: BinaryIO,
     commit_sha: bytes,
-    paths: Optional[Sequence[bytes]] = None,
-    diff_algorithm: Optional[str] = None,
+    paths: Sequence[bytes] | None = None,
+    diff_algorithm: str | None = None,
 ) -> None:
     """Compare working tree to a specific commit.
 
@@ -375,8 +375,8 @@ def diff_working_tree_to_tree(
 def diff_working_tree_to_index(
     repo: Repo,
     outstream: BinaryIO,
-    paths: Optional[Sequence[bytes]] = None,
-    diff_algorithm: Optional[str] = None,
+    paths: Sequence[bytes] | None = None,
+    diff_algorithm: str | None = None,
 ) -> None:
     """Compare working tree to index.
 
@@ -572,7 +572,7 @@ class ColorizedDiffStream(BinaryIO):
         self.console = Console(file=self.text_wrapper, force_terminal=True)
         self.buffer = b""
 
-    def write(self, data: Union[bytes, Buffer]) -> int:  # type: ignore[override,unused-ignore]
+    def write(self, data: bytes | Buffer) -> int:  # type: ignore[override,unused-ignore]
         """Write data to the stream, applying colorization.
 
         Args:
@@ -593,7 +593,7 @@ class ColorizedDiffStream(BinaryIO):
 
         return len(data)
 
-    def writelines(self, lines: Iterable[Union[bytes, Buffer]]) -> None:  # type: ignore[override,unused-ignore]
+    def writelines(self, lines: Iterable[bytes | Buffer]) -> None:  # type: ignore[override,unused-ignore]
         """Write a list of lines to the stream.
 
         Args:
@@ -686,7 +686,7 @@ class ColorizedDiffStream(BinaryIO):
         """Tell is not supported on this stream."""
         raise io.UnsupportedOperation("not seekable")
 
-    def truncate(self, size: Optional[int] = None) -> int:
+    def truncate(self, size: int | None = None) -> int:
         """Truncate is not supported on this stream."""
         raise io.UnsupportedOperation("not truncatable")
 
@@ -700,9 +700,9 @@ class ColorizedDiffStream(BinaryIO):
 
     def __exit__(
         self,
-        exc_type: Optional[type[BaseException]],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[object],
+        exc_type: type[BaseException] | None,
+        exc_val: BaseException | None,
+        exc_tb: object | None,
     ) -> None:
         """Context manager exit."""
         self.flush()

+ 26 - 26
dulwich/diff_tree.py

@@ -23,11 +23,11 @@
 
 import stat
 from collections import defaultdict
-from collections.abc import Iterator, Mapping, Sequence
+from collections.abc import Callable, Iterator, Mapping, Sequence
 from collections.abc import Set as AbstractSet
 from io import BytesIO
 from itertools import chain
-from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, TypeVar
+from typing import TYPE_CHECKING, Any, NamedTuple, Optional, TypeVar
 
 from .object_store import BaseObjectStore
 from .objects import S_ISGITLINK, ObjectID, ShaFile, Tree, TreeEntry
@@ -47,15 +47,15 @@ RENAME_CHANGE_TYPES = (CHANGE_RENAME, CHANGE_COPY)
 _MAX_SCORE = 100
 RENAME_THRESHOLD = 60
 MAX_FILES = 200
-REWRITE_THRESHOLD: Optional[int] = None
+REWRITE_THRESHOLD: int | None = None
 
 
 class TreeChange(NamedTuple):
     """Named tuple a single change between two trees."""
 
     type: str
-    old: Optional[TreeEntry]
-    new: Optional[TreeEntry]
+    old: TreeEntry | None
+    new: TreeEntry | None
 
     @classmethod
     def add(cls, new: TreeEntry) -> "TreeChange":
@@ -93,7 +93,7 @@ def _tree_entries(path: bytes, tree: Tree) -> list[TreeEntry]:
 
 def _merge_entries(
     path: bytes, tree1: Tree, tree2: Tree
-) -> list[tuple[Optional[TreeEntry], Optional[TreeEntry]]]:
+) -> list[tuple[TreeEntry | None, TreeEntry | None]]:
     """Merge the entries of two trees.
 
     Args:
@@ -112,7 +112,7 @@ def _merge_entries(
     len1 = len(entries1)
     len2 = len(entries2)
 
-    result: list[tuple[Optional[TreeEntry], Optional[TreeEntry]]] = []
+    result: list[tuple[TreeEntry | None, TreeEntry | None]] = []
     while i1 < len1 and i2 < len2:
         entry1 = entries1[i1]
         entry2 = entries2[i2]
@@ -133,7 +133,7 @@ def _merge_entries(
     return result
 
 
-def _is_tree(entry: Optional[TreeEntry]) -> bool:
+def _is_tree(entry: TreeEntry | None) -> bool:
     if entry is None or entry.mode is None:
         return False
     return stat.S_ISDIR(entry.mode)
@@ -141,11 +141,11 @@ def _is_tree(entry: Optional[TreeEntry]) -> bool:
 
 def walk_trees(
     store: BaseObjectStore,
-    tree1_id: Optional[ObjectID],
-    tree2_id: Optional[ObjectID],
+    tree1_id: ObjectID | None,
+    tree2_id: ObjectID | None,
     prune_identical: bool = False,
-    paths: Optional[Sequence[bytes]] = None,
-) -> Iterator[tuple[Optional[TreeEntry], Optional[TreeEntry]]]:
+    paths: Sequence[bytes] | None = None,
+) -> Iterator[tuple[TreeEntry | None, TreeEntry | None]]:
     """Recursively walk all the entries of two trees.
 
     Iteration is depth-first pre-order, as in e.g. os.walk.
@@ -167,7 +167,7 @@ def walk_trees(
     # case.
     entry1 = TreeEntry(b"", stat.S_IFDIR, tree1_id) if tree1_id else None
     entry2 = TreeEntry(b"", stat.S_IFDIR, tree2_id) if tree2_id else None
-    todo: list[tuple[Optional[TreeEntry], Optional[TreeEntry]]] = [(entry1, entry2)]
+    todo: list[tuple[TreeEntry | None, TreeEntry | None]] = [(entry1, entry2)]
     while todo:
         entry1, entry2 = todo.pop()
         is_tree1 = _is_tree(entry1)
@@ -247,7 +247,7 @@ def walk_trees(
                     break
 
 
-def _skip_tree(entry: Optional[TreeEntry], include_trees: bool) -> Optional[TreeEntry]:
+def _skip_tree(entry: TreeEntry | None, include_trees: bool) -> TreeEntry | None:
     if entry is None or entry.mode is None:
         return None
     if not include_trees and stat.S_ISDIR(entry.mode):
@@ -257,13 +257,13 @@ def _skip_tree(entry: Optional[TreeEntry], include_trees: bool) -> Optional[Tree
 
 def tree_changes(
     store: BaseObjectStore,
-    tree1_id: Optional[ObjectID],
-    tree2_id: Optional[ObjectID],
+    tree1_id: ObjectID | None,
+    tree2_id: ObjectID | None,
     want_unchanged: bool = False,
     rename_detector: Optional["RenameDetector"] = None,
     include_trees: bool = False,
     change_type_same: bool = False,
-    paths: Optional[Sequence[bytes]] = None,
+    paths: Sequence[bytes] | None = None,
 ) -> Iterator[TreeChange]:
     """Find the differences between the contents of two trees.
 
@@ -348,7 +348,7 @@ def tree_changes_for_merge(
     parent_tree_ids: Sequence[ObjectID],
     tree_id: ObjectID,
     rename_detector: Optional["RenameDetector"] = None,
-) -> Iterator[list[Optional[TreeChange]]]:
+) -> Iterator[list[TreeChange | None]]:
     """Get the tree changes for a merge tree relative to all its parents.
 
     Args:
@@ -374,7 +374,7 @@ def tree_changes_for_merge(
         for t in parent_tree_ids
     ]
     num_parents = len(parent_tree_ids)
-    changes_by_path: dict[bytes, list[Optional[TreeChange]]] = defaultdict(
+    changes_by_path: dict[bytes, list[TreeChange | None]] = defaultdict(
         lambda: [None] * num_parents
     )
 
@@ -390,7 +390,7 @@ def tree_changes_for_merge(
             assert path is not None
             changes_by_path[path][i] = change
 
-    def old_sha(c: TreeChange) -> Optional[ObjectID]:
+    def old_sha(c: TreeChange) -> ObjectID | None:
         return c.old.sha if c.old is not None else None
 
     def change_type(c: TreeChange) -> str:
@@ -477,7 +477,7 @@ def _common_bytes(blocks1: Mapping[int, int], blocks2: Mapping[int, int]) -> int
 def _similarity_score(
     obj1: ShaFile,
     obj2: ShaFile,
-    block_cache: Optional[dict[ObjectID, dict[int, int]]] = None,
+    block_cache: dict[ObjectID, dict[int, int]] | None = None,
 ) -> int:
     """Compute a similarity score for two objects.
 
@@ -531,8 +531,8 @@ class RenameDetector:
         self,
         store: BaseObjectStore,
         rename_threshold: int = RENAME_THRESHOLD,
-        max_files: Optional[int] = MAX_FILES,
-        rewrite_threshold: Optional[int] = REWRITE_THRESHOLD,
+        max_files: int | None = MAX_FILES,
+        rewrite_threshold: int | None = REWRITE_THRESHOLD,
         find_copies_harder: bool = False,
     ) -> None:
         """Initialize the rename detector.
@@ -597,7 +597,7 @@ class RenameDetector:
             self._changes.append(change)
 
     def _collect_changes(
-        self, tree1_id: Optional[ObjectID], tree2_id: Optional[ObjectID]
+        self, tree1_id: ObjectID | None, tree2_id: ObjectID | None
     ) -> None:
         want_unchanged = self._find_copies_harder or self._want_unchanged
         for change in tree_changes(
@@ -794,8 +794,8 @@ class RenameDetector:
 
     def changes_with_renames(
         self,
-        tree1_id: Optional[ObjectID],
-        tree2_id: Optional[ObjectID],
+        tree1_id: ObjectID | None,
+        tree2_id: ObjectID | None,
         want_unchanged: bool = False,
         include_trees: bool = False,
     ) -> list[TreeChange]:

+ 12 - 14
dulwich/dumb.py

@@ -24,9 +24,9 @@
 import os
 import tempfile
 import zlib
-from collections.abc import Iterator, Mapping, Sequence
+from collections.abc import Callable, Iterator, Mapping, Sequence
 from io import BytesIO
-from typing import Any, Callable, Optional
+from typing import Any, Optional
 from urllib.parse import urljoin
 
 from .errors import NotGitRepository, ObjectFormatException
@@ -65,9 +65,9 @@ class DumbHTTPObjectStore(BaseObjectStore):
         """
         self.base_url = base_url.rstrip("/") + "/"
         self._http_request = http_request_func
-        self._packs: Optional[list[tuple[str, Optional[PackIndex]]]] = None
+        self._packs: list[tuple[str, PackIndex | None]] | None = None
         self._cached_objects: dict[bytes, tuple[int, bytes]] = {}
-        self._temp_pack_dir: Optional[str] = None
+        self._temp_pack_dir: str | None = None
 
     def _ensure_temp_pack_dir(self) -> None:
         """Ensure we have a temporary directory for storing pack files."""
@@ -338,8 +338,8 @@ class DumbHTTPObjectStore(BaseObjectStore):
 
     def add_objects(
         self,
-        objects: Sequence[tuple[ShaFile, Optional[str]]],
-        progress: Optional[Callable[[str], None]] = None,
+        objects: Sequence[tuple[ShaFile, str | None]],
+        progress: Callable[[str], None] | None = None,
     ) -> Optional["Pack"]:
         """Add a set of objects to this object store."""
         raise NotImplementedError("Cannot add objects to dumb HTTP repository")
@@ -370,8 +370,8 @@ class DumbRemoteHTTPRepo:
         """
         self.base_url = base_url.rstrip("/") + "/"
         self._http_request = http_request_func
-        self._refs: Optional[dict[Ref, ObjectID]] = None
-        self._peeled: Optional[dict[Ref, ObjectID]] = None
+        self._refs: dict[Ref, ObjectID] | None = None
+        self._peeled: dict[Ref, ObjectID] | None = None
         self.object_store = DumbHTTPObjectStore(base_url, http_request_func)
 
     def _fetch_url(self, path: str) -> bytes:
@@ -434,14 +434,12 @@ class DumbRemoteHTTPRepo:
 
     def fetch_pack_data(
         self,
-        determine_wants: Callable[
-            [Mapping[Ref, ObjectID], Optional[int]], list[ObjectID]
-        ],
+        determine_wants: Callable[[Mapping[Ref, ObjectID], int | None], list[ObjectID]],
         graph_walker: object,
-        progress: Optional[Callable[[bytes], None]] = None,
+        progress: Callable[[bytes], None] | None = None,
         *,
-        get_tagged: Optional[bool] = None,
-        depth: Optional[int] = None,
+        get_tagged: bool | None = None,
+        depth: int | None = None,
     ) -> Iterator[UnpackedObject]:
         """Fetch pack data from the remote.
 

+ 5 - 6
dulwich/errors.py

@@ -28,7 +28,6 @@
 
 import binascii
 from collections.abc import Sequence
-from typing import Optional, Union
 
 
 class ChecksumMismatch(Exception):
@@ -36,9 +35,9 @@ class ChecksumMismatch(Exception):
 
     def __init__(
         self,
-        expected: Union[bytes, str],
-        got: Union[bytes, str],
-        extra: Optional[str] = None,
+        expected: bytes | str,
+        got: bytes | str,
+        extra: str | None = None,
     ) -> None:
         """Initialize a ChecksumMismatch exception.
 
@@ -198,7 +197,7 @@ class SendPackError(GitProtocolError):
 class HangupException(GitProtocolError):
     """Hangup exception."""
 
-    def __init__(self, stderr_lines: Optional[Sequence[bytes]] = None) -> None:
+    def __init__(self, stderr_lines: Sequence[bytes] | None = None) -> None:
         """Initialize a HangupException.
 
         Args:
@@ -232,7 +231,7 @@ class HangupException(GitProtocolError):
 class UnexpectedCommandError(GitProtocolError):
     """Unexpected command received in a proto line."""
 
-    def __init__(self, command: Optional[str]) -> None:
+    def __init__(self, command: str | None) -> None:
         """Initialize an UnexpectedCommandError.
 
         Args:

+ 6 - 6
dulwich/fastexport.py

@@ -24,7 +24,7 @@
 
 import stat
 from collections.abc import Generator
-from typing import TYPE_CHECKING, Any, BinaryIO, Optional
+from typing import TYPE_CHECKING, Any, BinaryIO
 
 from fastimport import commands, parser, processor
 from fastimport import errors as fastimport_errors
@@ -117,7 +117,7 @@ class GitFastExporter:
         return marker
 
     def _iter_files(
-        self, base_tree: Optional[bytes], new_tree: Optional[bytes]
+        self, base_tree: bytes | None, new_tree: bytes | None
     ) -> Generator[Any, None, None]:
         for (
             (old_path, new_path),
@@ -146,7 +146,7 @@ class GitFastExporter:
                 )
 
     def _export_commit(
-        self, commit: Commit, ref: Ref, base_tree: Optional[ObjectID] = None
+        self, commit: Commit, ref: Ref, base_tree: ObjectID | None = None
     ) -> tuple[Any, bytes]:
         file_cmds = list(self._iter_files(base_tree, commit.tree))
         marker = self._allocate_marker()
@@ -176,7 +176,7 @@ class GitFastExporter:
         return (cmd, marker)
 
     def emit_commit(
-        self, commit: Commit, ref: Ref, base_tree: Optional[ObjectID] = None
+        self, commit: Commit, ref: Ref, base_tree: ObjectID | None = None
     ) -> bytes:
         """Emit a commit in fast-export format.
 
@@ -201,9 +201,9 @@ class GitImportProcessor(processor.ImportProcessor):  # type: ignore[misc,unused
     def __init__(
         self,
         repo: "BaseRepo",
-        params: Optional[Any] = None,  # noqa: ANN401
+        params: Any | None = None,  # noqa: ANN401
         verbose: bool = False,
-        outf: Optional[BinaryIO] = None,
+        outf: BinaryIO | None = None,
     ) -> None:
         """Initialize GitImportProcessor.
 

+ 24 - 24
dulwich/file.py

@@ -26,16 +26,16 @@ import sys
 import warnings
 from collections.abc import Iterable, Iterator
 from types import TracebackType
-from typing import IO, Any, ClassVar, Literal, Optional, Union, overload
+from typing import IO, Any, ClassVar, Literal, overload
 
 if sys.version_info >= (3, 12):
     from collections.abc import Buffer
 else:
-    Buffer = Union[bytes, bytearray, memoryview]
+    Buffer = bytes | bytearray | memoryview
 
 
 def ensure_dir_exists(
-    dirname: Union[str, bytes, os.PathLike[str], os.PathLike[bytes]],
+    dirname: str | bytes | os.PathLike[str] | os.PathLike[bytes],
 ) -> None:
     """Ensure a directory exists, creating if necessary."""
     try:
@@ -44,7 +44,7 @@ def ensure_dir_exists(
         pass
 
 
-def _fancy_rename(oldname: Union[str, bytes], newname: Union[str, bytes]) -> None:
+def _fancy_rename(oldname: str | bytes, newname: str | bytes) -> None:
     """Rename file with temporary backup file to rollback if rename fails."""
     if not os.path.exists(newname):
         os.rename(oldname, newname)
@@ -68,7 +68,7 @@ def _fancy_rename(oldname: Union[str, bytes], newname: Union[str, bytes]) -> Non
 
 @overload
 def GitFile(
-    filename: Union[str, bytes, os.PathLike[str], os.PathLike[bytes]],
+    filename: str | bytes | os.PathLike[str] | os.PathLike[bytes],
     mode: Literal["wb"],
     bufsize: int = -1,
     mask: int = 0o644,
@@ -78,7 +78,7 @@ def GitFile(
 
 @overload
 def GitFile(
-    filename: Union[str, bytes, os.PathLike[str], os.PathLike[bytes]],
+    filename: str | bytes | os.PathLike[str] | os.PathLike[bytes],
     mode: Literal["rb"] = "rb",
     bufsize: int = -1,
     mask: int = 0o644,
@@ -88,21 +88,21 @@ def GitFile(
 
 @overload
 def GitFile(
-    filename: Union[str, bytes, os.PathLike[str], os.PathLike[bytes]],
+    filename: str | bytes | os.PathLike[str] | os.PathLike[bytes],
     mode: str = "rb",
     bufsize: int = -1,
     mask: int = 0o644,
     fsync: bool = True,
-) -> Union[IO[bytes], "_GitFile"]: ...
+) -> "IO[bytes] | _GitFile": ...
 
 
 def GitFile(
-    filename: Union[str, bytes, os.PathLike[str], os.PathLike[bytes]],
+    filename: str | bytes | os.PathLike[str] | os.PathLike[bytes],
     mode: str = "rb",
     bufsize: int = -1,
     mask: int = 0o644,
     fsync: bool = True,
-) -> Union[IO[bytes], "_GitFile"]:
+) -> "IO[bytes] | _GitFile":
     """Create a file object that obeys the git file locking protocol.
 
     Returns: a builtin file object or a _GitFile object
@@ -142,8 +142,8 @@ class FileLocked(Exception):
 
     def __init__(
         self,
-        filename: Union[str, bytes, os.PathLike[str], os.PathLike[bytes]],
-        lockfilename: Union[str, bytes],
+        filename: str | bytes | os.PathLike[str] | os.PathLike[bytes],
+        lockfilename: str | bytes,
     ) -> None:
         """Initialize FileLocked.
 
@@ -168,8 +168,8 @@ class _GitFile(IO[bytes]):
     """
 
     _file: IO[bytes]
-    _filename: Union[str, bytes]
-    _lockfilename: Union[str, bytes]
+    _filename: str | bytes
+    _lockfilename: str | bytes
     _closed: bool
 
     PROXY_PROPERTIES: ClassVar[set[str]] = {
@@ -201,17 +201,17 @@ class _GitFile(IO[bytes]):
 
     def __init__(
         self,
-        filename: Union[str, bytes, os.PathLike[str], os.PathLike[bytes]],
+        filename: str | bytes | os.PathLike[str] | os.PathLike[bytes],
         mode: str,
         bufsize: int,
         mask: int,
         fsync: bool = True,
     ) -> None:
         # Convert PathLike to str/bytes for our internal use
-        self._filename: Union[str, bytes] = os.fspath(filename)
+        self._filename: str | bytes = os.fspath(filename)
         self._fsync = fsync
         if isinstance(self._filename, bytes):
-            self._lockfilename: Union[str, bytes] = self._filename + b".lock"
+            self._lockfilename: str | bytes = self._filename + b".lock"
         else:
             self._lockfilename = self._filename + ".lock"
         try:
@@ -286,16 +286,16 @@ class _GitFile(IO[bytes]):
 
     def __exit__(
         self,
-        exc_type: Optional[type[BaseException]],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[TracebackType],
+        exc_type: type[BaseException] | None,
+        exc_val: BaseException | None,
+        exc_tb: TracebackType | None,
     ) -> None:
         if exc_type is not None:
             self.abort()
         else:
             self.close()
 
-    def __fspath__(self) -> Union[str, bytes]:
+    def __fspath__(self) -> str | bytes:
         """Return the file path for os.fspath() compatibility."""
         return self._filename
 
@@ -315,7 +315,7 @@ class _GitFile(IO[bytes]):
         return self._file.read(size)
 
     # TODO: Remove type: ignore when Python 3.10 support is dropped (Oct 2026)
-    # Python 3.9/3.10 have issues with IO[bytes] overload signatures
+    # Python 3.10 has issues with IO[bytes] overload signatures
     def write(self, data: Buffer, /) -> int:  # type: ignore[override,unused-ignore]
         return self._file.write(data)
 
@@ -326,7 +326,7 @@ class _GitFile(IO[bytes]):
         return self._file.readlines(hint)
 
     # TODO: Remove type: ignore when Python 3.10 support is dropped (Oct 2026)
-    # Python 3.9/3.10 have issues with IO[bytes] overload signatures
+    # Python 3.10 has issues with IO[bytes] overload signatures
     def writelines(self, lines: Iterable[Buffer], /) -> None:  # type: ignore[override,unused-ignore]
         return self._file.writelines(lines)
 
@@ -339,7 +339,7 @@ class _GitFile(IO[bytes]):
     def flush(self) -> None:
         return self._file.flush()
 
-    def truncate(self, size: Optional[int] = None) -> int:
+    def truncate(self, size: int | None = None) -> int:
         return self._file.truncate(size)
 
     def fileno(self) -> int:

+ 15 - 15
dulwich/filter_branch.py

@@ -24,8 +24,8 @@
 import os
 import tempfile
 import warnings
-from collections.abc import Sequence
-from typing import Callable, Optional, TypedDict
+from collections.abc import Callable, Sequence
+from typing import TypedDict
 
 from .index import Index, build_index_from_tree
 from .object_store import BaseObjectStore
@@ -53,17 +53,17 @@ class CommitFilter:
         self,
         object_store: BaseObjectStore,
         *,
-        filter_fn: Optional[Callable[[Commit], Optional[CommitData]]] = None,
-        filter_author: Optional[Callable[[bytes], Optional[bytes]]] = None,
-        filter_committer: Optional[Callable[[bytes], Optional[bytes]]] = None,
-        filter_message: Optional[Callable[[bytes], Optional[bytes]]] = None,
-        tree_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
-        index_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
-        parent_filter: Optional[Callable[[Sequence[bytes]], list[bytes]]] = None,
-        commit_filter: Optional[Callable[[Commit, bytes], Optional[bytes]]] = None,
-        subdirectory_filter: Optional[bytes] = None,
+        filter_fn: Callable[[Commit], CommitData | None] | None = None,
+        filter_author: Callable[[bytes], bytes | None] | None = None,
+        filter_committer: Callable[[bytes], bytes | None] | None = None,
+        filter_message: Callable[[bytes], bytes | None] | None = None,
+        tree_filter: Callable[[bytes, str], bytes | None] | None = None,
+        index_filter: Callable[[bytes, str], bytes | None] | None = None,
+        parent_filter: Callable[[Sequence[bytes]], list[bytes]] | None = None,
+        commit_filter: Callable[[Commit, bytes], bytes | None] | None = None,
+        subdirectory_filter: bytes | None = None,
         prune_empty: bool = False,
-        tag_name_filter: Optional[Callable[[bytes], Optional[bytes]]] = None,
+        tag_name_filter: Callable[[bytes], bytes | None] | None = None,
     ):
         """Initialize a commit filter.
 
@@ -107,7 +107,7 @@ class CommitFilter:
 
     def _filter_tree_with_subdirectory(
         self, tree_sha: bytes, subdirectory: bytes
-    ) -> Optional[bytes]:
+    ) -> bytes | None:
         """Extract a subdirectory from a tree as the new root.
 
         Args:
@@ -217,7 +217,7 @@ class CommitFilter:
         finally:
             os.unlink(tmp_index_path)
 
-    def process_commit(self, commit_sha: bytes) -> Optional[bytes]:
+    def process_commit(self, commit_sha: bytes) -> bytes | None:
         """Process a single commit, creating a filtered version.
 
         Args:
@@ -383,7 +383,7 @@ def filter_refs(
     *,
     keep_original: bool = True,
     force: bool = False,
-    tag_callback: Optional[Callable[[bytes, bytes], None]] = None,
+    tag_callback: Callable[[bytes, bytes], None] | None = None,
 ) -> dict[bytes, bytes]:
     """Filter commits reachable from the given refs.
 

+ 20 - 19
dulwich/filters.py

@@ -24,7 +24,8 @@
 import logging
 import subprocess
 import threading
-from typing import TYPE_CHECKING, Callable, Optional
+from collections.abc import Callable
+from typing import TYPE_CHECKING, Optional
 from typing import Protocol as TypingProtocol
 
 from .attrs import GitAttributes
@@ -114,11 +115,11 @@ class ProcessFilterDriver:
 
     def __init__(
         self,
-        clean_cmd: Optional[str] = None,
-        smudge_cmd: Optional[str] = None,
+        clean_cmd: str | None = None,
+        smudge_cmd: str | None = None,
         required: bool = False,
-        cwd: Optional[str] = None,
-        process_cmd: Optional[str] = None,
+        cwd: str | None = None,
+        process_cmd: str | None = None,
     ) -> None:
         """Initialize ProcessFilterDriver.
 
@@ -134,8 +135,8 @@ class ProcessFilterDriver:
         self.required = required
         self.cwd = cwd
         self.process_cmd = process_cmd
-        self._process: Optional[subprocess.Popen[bytes]] = None
-        self._protocol: Optional[Protocol] = None
+        self._process: subprocess.Popen[bytes] | None = None
+        self._protocol: Protocol | None = None
         self._capabilities: set[bytes] = set()
         self._process_lock = threading.Lock()
 
@@ -508,7 +509,7 @@ class FilterContext:
         self.filter_registry = filter_registry
         self._active_drivers: dict[str, FilterDriver] = {}
 
-    def get_driver(self, name: str) -> Optional[FilterDriver]:
+    def get_driver(self, name: str) -> FilterDriver | None:
         """Get a filter driver by name, managing stateful instances.
 
         This method handles driver instantiation and caching. Only drivers
@@ -520,7 +521,7 @@ class FilterContext:
         Returns:
             FilterDriver instance or None
         """
-        driver: Optional[FilterDriver] = None
+        driver: FilterDriver | None = None
         # Check if we have a cached instance that should be reused
         if name in self._active_drivers:
             driver = self._active_drivers[name]
@@ -616,7 +617,7 @@ class FilterRegistry:
         """Register a filter driver instance."""
         self._drivers[name] = driver
 
-    def get_driver(self, name: str) -> Optional[FilterDriver]:
+    def get_driver(self, name: str) -> FilterDriver | None:
         """Get a filter driver by name."""
         # Check if we already have an instance
         if name in self._drivers:
@@ -651,14 +652,14 @@ class FilterRegistry:
             # Don't raise exceptions in __del__
             pass
 
-    def _create_from_config(self, name: str) -> Optional[FilterDriver]:
+    def _create_from_config(self, name: str) -> FilterDriver | None:
         """Create a filter driver from config."""
         if self.config is None:
             return None
 
-        clean_cmd: Optional[str] = None
-        smudge_cmd: Optional[str] = None
-        process_cmd: Optional[str] = None
+        clean_cmd: str | None = None
+        smudge_cmd: str | None = None
+        process_cmd: str | None = None
 
         # Get process command (preferred over clean/smudge for performance)
         try:
@@ -764,9 +765,9 @@ class FilterRegistry:
 def get_filter_for_path(
     path: bytes,
     gitattributes: "GitAttributes",
-    filter_registry: Optional[FilterRegistry] = None,
-    filter_context: Optional[FilterContext] = None,
-) -> Optional[FilterDriver]:
+    filter_registry: FilterRegistry | None = None,
+    filter_context: FilterContext | None = None,
+) -> FilterDriver | None:
     """Get the appropriate filter driver for a given path.
 
     Args:
@@ -864,9 +865,9 @@ class FilterBlobNormalizer:
         self,
         config_stack: Optional["StackedConfig"],
         gitattributes: GitAttributes,
-        filter_registry: Optional[FilterRegistry] = None,
+        filter_registry: FilterRegistry | None = None,
         repo: Optional["BaseRepo"] = None,
-        filter_context: Optional[FilterContext] = None,
+        filter_context: FilterContext | None = None,
     ) -> None:
         """Initialize FilterBlobNormalizer.
 

+ 9 - 8
dulwich/gc.py

@@ -4,8 +4,9 @@ import collections
 import logging
 import os
 import time
+from collections.abc import Callable
 from dataclasses import dataclass, field
-from typing import TYPE_CHECKING, Callable, Optional
+from typing import TYPE_CHECKING, Optional
 
 from dulwich.object_store import (
     BaseObjectStore,
@@ -39,7 +40,7 @@ def find_reachable_objects(
     object_store: BaseObjectStore,
     refs_container: RefsContainer,
     include_reflogs: bool = True,
-    progress: Optional[Callable[[str], None]] = None,
+    progress: Callable[[str], None] | None = None,
 ) -> set[bytes]:
     """Find all reachable objects in the repository.
 
@@ -113,7 +114,7 @@ def find_unreachable_objects(
     object_store: BaseObjectStore,
     refs_container: RefsContainer,
     include_reflogs: bool = True,
-    progress: Optional[Callable[[str], None]] = None,
+    progress: Callable[[str], None] | None = None,
 ) -> set[bytes]:
     """Find all unreachable objects in the repository.
 
@@ -141,9 +142,9 @@ def find_unreachable_objects(
 def prune_unreachable_objects(
     object_store: DiskObjectStore,
     refs_container: RefsContainer,
-    grace_period: Optional[int] = None,
+    grace_period: int | None = None,
     dry_run: bool = False,
-    progress: Optional[Callable[[str], None]] = None,
+    progress: Callable[[str], None] | None = None,
 ) -> tuple[set[bytes], int]:
     """Remove unreachable objects from the repository.
 
@@ -211,9 +212,9 @@ def garbage_collect(
     auto: bool = False,
     aggressive: bool = False,
     prune: bool = True,
-    grace_period: Optional[int] = 1209600,  # 2 weeks default
+    grace_period: int | None = 1209600,  # 2 weeks default
     dry_run: bool = False,
-    progress: Optional[Callable[[str], None]] = None,
+    progress: Callable[[str], None] | None = None,
 ) -> GCStats:
     """Run garbage collection on a repository.
 
@@ -372,7 +373,7 @@ def should_run_gc(repo: "BaseRepo", config: Optional["Config"] = None) -> bool:
 def maybe_auto_gc(
     repo: "Repo",
     config: Optional["Config"] = None,
-    progress: Optional[Callable[[str], None]] = None,
+    progress: Callable[[str], None] | None = None,
 ) -> bool:
     """Run automatic garbage collection if needed.
 

+ 4 - 4
dulwich/graph.py

@@ -20,9 +20,9 @@
 
 """Implementation of merge-base following the approach of git."""
 
-from collections.abc import Iterator, Mapping, Sequence
+from collections.abc import Callable, Iterator, Mapping, Sequence
 from heapq import heappop, heappush
-from typing import TYPE_CHECKING, Callable, Generic, Optional, TypeVar
+from typing import TYPE_CHECKING, Generic, TypeVar
 
 if TYPE_CHECKING:
     from .repo import BaseRepo
@@ -52,7 +52,7 @@ class WorkList(Generic[T]):
         dt, cmt = item
         heappush(self.pq, (-dt, cmt))
 
-    def get(self) -> Optional[tuple[int, T]]:
+    def get(self) -> tuple[int, T] | None:
         """Get the highest priority item from the work list.
 
         Returns:
@@ -80,7 +80,7 @@ def _find_lcas(
     c2s: Sequence[ObjectID],
     lookup_stamp: Callable[[ObjectID], int],
     min_stamp: int = 0,
-    shallows: Optional[set[ObjectID]] = None,
+    shallows: set[ObjectID] | None = None,
 ) -> list[ObjectID]:
     """Find lowest common ancestors between commits.
 

+ 7 - 8
dulwich/greenthreads.py

@@ -23,8 +23,7 @@
 
 """Utility module for querying an ObjectStore with gevent."""
 
-from collections.abc import Sequence
-from typing import Callable, Optional
+from collections.abc import Callable, Sequence
 
 import gevent
 from gevent import pool
@@ -85,10 +84,10 @@ class GreenThreadsMissingObjectFinder(MissingObjectFinder):
         object_store: BaseObjectStore,
         haves: Sequence[ObjectID],
         wants: Sequence[ObjectID],
-        progress: Optional[Callable[[bytes], None]] = None,
-        get_tagged: Optional[Callable[[], dict[ObjectID, ObjectID]]] = None,
+        progress: Callable[[bytes], None] | None = None,
+        get_tagged: Callable[[], dict[ObjectID, ObjectID]] | None = None,
         concurrency: int = 1,
-        get_parents: Optional[Callable[[ObjectID], list[ObjectID]]] = None,
+        get_parents: Callable[[ObjectID], list[ObjectID]] | None = None,
     ) -> None:
         """Initialize GreenThreadsMissingObjectFinder.
 
@@ -131,9 +130,9 @@ class GreenThreadsMissingObjectFinder(MissingObjectFinder):
             self.sha_done.add(t)
         missing_tags = want_tags.difference(have_tags)
         all_wants = missing_commits.union(missing_tags)
-        self.objects_to_send: set[
-            tuple[ObjectID, Optional[bytes], Optional[int], bool]
-        ] = {(w, None, 0, False) for w in all_wants}
+        self.objects_to_send: set[tuple[ObjectID, bytes | None, int | None, bool]] = {
+            (w, None, 0, False) for w in all_wants
+        }
         if progress is None:
             self.progress: Callable[[bytes], None] = lambda x: None
         else:

+ 7 - 7
dulwich/hooks.py

@@ -23,8 +23,8 @@
 
 import os
 import subprocess
-from collections.abc import Sequence
-from typing import Any, Callable, Optional
+from collections.abc import Callable, Sequence
+from typing import Any
 
 from .errors import HookError
 
@@ -58,9 +58,9 @@ class ShellHook(Hook):
         name: str,
         path: str,
         numparam: int,
-        pre_exec_callback: Optional[Callable[..., Any]] = None,
-        post_exec_callback: Optional[Callable[..., Any]] = None,
-        cwd: Optional[str] = None,
+        pre_exec_callback: Callable[..., Any] | None = None,
+        post_exec_callback: Callable[..., Any] | None = None,
+        cwd: str | None = None,
     ) -> None:
         """Setup shell hook definition.
 
@@ -162,7 +162,7 @@ class CommitMsgShellHook(ShellHook):
 
             return (path,)
 
-        def clean_msg(success: int, *args: str) -> Optional[bytes]:
+        def clean_msg(success: int, *args: str) -> bytes | None:
             if success:
                 with open(args[0], "rb") as f:
                     new_msg = f.read()
@@ -191,7 +191,7 @@ class PostReceiveShellHook(ShellHook):
 
     def execute(
         self, client_refs: Sequence[tuple[bytes, bytes, bytes]]
-    ) -> Optional[bytes]:
+    ) -> bytes | None:
         """Execute the post-receive hook.
 
         Args:

+ 10 - 10
dulwich/ignore.py

@@ -30,7 +30,7 @@ import os.path
 import re
 from collections.abc import Iterable, Sequence
 from contextlib import suppress
-from typing import TYPE_CHECKING, BinaryIO, Optional, Union
+from typing import TYPE_CHECKING, BinaryIO, Union
 
 if TYPE_CHECKING:
     from .repo import Repo
@@ -41,7 +41,7 @@ from .config import Config, get_xdg_config_home_path
 def _pattern_to_str(pattern: Union["Pattern", bytes, str]) -> str:
     """Convert a pattern to string, handling both Pattern objects and raw patterns."""
     if isinstance(pattern, Pattern):
-        pattern_data: Union[bytes, str] = pattern.pattern
+        pattern_data: bytes | str = pattern.pattern
     else:
         pattern_data = pattern
     return pattern_data.decode() if isinstance(pattern_data, bytes) else pattern_data
@@ -416,7 +416,7 @@ class IgnoreFilter:
         self,
         patterns: Iterable[bytes],
         ignorecase: bool = False,
-        path: Optional[str] = None,
+        path: str | None = None,
     ) -> None:
         """Initialize an IgnoreFilter with a set of patterns.
 
@@ -435,7 +435,7 @@ class IgnoreFilter:
         """Add a pattern to the set."""
         self._patterns.append(Pattern(pattern, self._ignorecase))
 
-    def find_matching(self, path: Union[bytes, str]) -> Iterable[Pattern]:
+    def find_matching(self, path: bytes | str) -> Iterable[Pattern]:
         """Yield all matching patterns for path.
 
         Args:
@@ -449,7 +449,7 @@ class IgnoreFilter:
             if pattern.match(path):
                 yield pattern
 
-    def is_ignored(self, path: Union[bytes, str]) -> Optional[bool]:
+    def is_ignored(self, path: bytes | str) -> bool | None:
         """Check whether a path is ignored using Git-compliant logic.
 
         For directories, include a trailing slash.
@@ -484,7 +484,7 @@ class IgnoreFilter:
 
     @classmethod
     def from_path(
-        cls, path: Union[str, os.PathLike[str]], ignorecase: bool = False
+        cls, path: str | os.PathLike[str], ignorecase: bool = False
     ) -> "IgnoreFilter":
         """Create an IgnoreFilter from a file path.
 
@@ -518,7 +518,7 @@ class IgnoreFilterStack:
         """
         self._filters = filters
 
-    def is_ignored(self, path: str) -> Optional[bool]:
+    def is_ignored(self, path: str) -> bool | None:
         """Check whether a path is explicitly included or excluded in ignores.
 
         Args:
@@ -580,7 +580,7 @@ class IgnoreFilterManager:
             global_filters: List of global ignore filters to apply.
             ignorecase: Whether to perform case-insensitive matching.
         """
-        self._path_filters: dict[str, Optional[IgnoreFilter]] = {}
+        self._path_filters: dict[str, IgnoreFilter | None] = {}
         self._top_path = top_path
         self._global_filters = global_filters
         self._ignorecase = ignorecase
@@ -589,7 +589,7 @@ class IgnoreFilterManager:
         """Return string representation of IgnoreFilterManager."""
         return f"{type(self).__name__}({self._top_path}, {self._global_filters!r}, {self._ignorecase!r})"
 
-    def _load_path(self, path: str) -> Optional[IgnoreFilter]:
+    def _load_path(self, path: str) -> IgnoreFilter | None:
         try:
             return self._path_filters[path]
         except KeyError:
@@ -638,7 +638,7 @@ class IgnoreFilterManager:
                 filters.insert(0, (i, ignore_filter))
         return iter(matches)
 
-    def is_ignored(self, path: str) -> Optional[bool]:
+    def is_ignored(self, path: str) -> bool | None:
         """Check whether a path is explicitly included or excluded in ignores.
 
         Args:

+ 98 - 106
dulwich/index.py

@@ -28,7 +28,15 @@ import stat
 import struct
 import sys
 import types
-from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence, Set
+from collections.abc import (
+    Callable,
+    Generator,
+    Iterable,
+    Iterator,
+    Mapping,
+    Sequence,
+    Set,
+)
 from dataclasses import dataclass
 from enum import Enum
 from typing import (
@@ -36,7 +44,6 @@ from typing import (
     TYPE_CHECKING,
     Any,
     BinaryIO,
-    Callable,
     Optional,
     Union,
 )
@@ -64,10 +71,7 @@ from .objects import (
 from .pack import ObjectContainer, SHA1Reader, SHA1Writer
 
 # Type alias for recursive tree structure used in commit_tree
-if sys.version_info >= (3, 10):
-    TreeDict = dict[bytes, Union["TreeDict", tuple[int, bytes]]]
-else:
-    TreeDict = dict[bytes, Any]
+TreeDict = dict[bytes, Union["TreeDict", tuple[int, bytes]]]
 
 # 2-bit stage (during merge)
 FLAG_STAGEMASK = 0x3000
@@ -284,8 +288,8 @@ class SerializedIndexEntry:
     """
 
     name: bytes
-    ctime: Union[int, float, tuple[int, int]]
-    mtime: Union[int, float, tuple[int, int]]
+    ctime: int | float | tuple[int, int]
+    mtime: int | float | tuple[int, int]
     dev: int
     ino: int
     mode: int
@@ -495,8 +499,8 @@ class IndexEntry:
     parsed data and convenience methods.
     """
 
-    ctime: Union[int, float, tuple[int, int]]
-    mtime: Union[int, float, tuple[int, int]]
+    ctime: int | float | tuple[int, int]
+    mtime: int | float | tuple[int, int]
     dev: int
     ino: int
     mode: int
@@ -615,15 +619,15 @@ class IndexEntry:
 class ConflictedIndexEntry:
     """Index entry that represents a conflict."""
 
-    ancestor: Optional[IndexEntry]
-    this: Optional[IndexEntry]
-    other: Optional[IndexEntry]
+    ancestor: IndexEntry | None
+    this: IndexEntry | None
+    other: IndexEntry | None
 
     def __init__(
         self,
-        ancestor: Optional[IndexEntry] = None,
-        this: Optional[IndexEntry] = None,
-        other: Optional[IndexEntry] = None,
+        ancestor: IndexEntry | None = None,
+        this: IndexEntry | None = None,
+        other: IndexEntry | None = None,
     ) -> None:
         """Initialize ConflictedIndexEntry.
 
@@ -674,7 +678,7 @@ def read_cache_time(f: BinaryIO) -> tuple[int, int]:
     return struct.unpack(">LL", f.read(8))
 
 
-def write_cache_time(f: IO[bytes], t: Union[int, float, tuple[int, int]]) -> None:
+def write_cache_time(f: IO[bytes], t: int | float | tuple[int, int]) -> None:
     """Write a cache time.
 
     Args:
@@ -856,9 +860,7 @@ def read_index(f: BinaryIO) -> Iterator[SerializedIndexEntry]:
 
 def read_index_dict_with_version(
     f: BinaryIO,
-) -> tuple[
-    dict[bytes, Union[IndexEntry, ConflictedIndexEntry]], int, list[IndexExtension]
-]:
+) -> tuple[dict[bytes, IndexEntry | ConflictedIndexEntry], int, list[IndexExtension]]:
     """Read an index file and return it as a dictionary along with the version.
 
     Returns:
@@ -866,7 +868,7 @@ def read_index_dict_with_version(
     """
     version, num_entries = read_index_header(f)
 
-    ret: dict[bytes, Union[IndexEntry, ConflictedIndexEntry]] = {}
+    ret: dict[bytes, IndexEntry | ConflictedIndexEntry] = {}
     previous_path = b""
     for i in range(num_entries):
         entry = read_cache_entry(f, version, previous_path)
@@ -927,7 +929,7 @@ def read_index_dict_with_version(
 
 def read_index_dict(
     f: BinaryIO,
-) -> dict[bytes, Union[IndexEntry, ConflictedIndexEntry]]:
+) -> dict[bytes, IndexEntry | ConflictedIndexEntry]:
     """Read an index file and return it as a dictionary.
 
        Dict Key is tuple of path and stage number, as
@@ -935,7 +937,7 @@ def read_index_dict(
     Args:
       f: File object to read fromls.
     """
-    ret: dict[bytes, Union[IndexEntry, ConflictedIndexEntry]] = {}
+    ret: dict[bytes, IndexEntry | ConflictedIndexEntry] = {}
     for entry in read_index(f):
         stage = entry.stage()
         if stage == Stage.NORMAL:
@@ -956,8 +958,8 @@ def read_index_dict(
 def write_index(
     f: IO[bytes],
     entries: Sequence[SerializedIndexEntry],
-    version: Optional[int] = None,
-    extensions: Optional[Sequence[IndexExtension]] = None,
+    version: int | None = None,
+    extensions: Sequence[IndexExtension] | None = None,
 ) -> None:
     """Write an index file.
 
@@ -996,9 +998,9 @@ def write_index(
 
 def write_index_dict(
     f: IO[bytes],
-    entries: Mapping[bytes, Union[IndexEntry, ConflictedIndexEntry]],
-    version: Optional[int] = None,
-    extensions: Optional[Sequence[IndexExtension]] = None,
+    entries: Mapping[bytes, IndexEntry | ConflictedIndexEntry],
+    version: int | None = None,
+    extensions: Sequence[IndexExtension] | None = None,
 ) -> None:
     """Write an index file based on the contents of a dictionary.
 
@@ -1052,14 +1054,14 @@ def cleanup_mode(mode: int) -> int:
 class Index:
     """A Git Index file."""
 
-    _byname: dict[bytes, Union[IndexEntry, ConflictedIndexEntry]]
+    _byname: dict[bytes, IndexEntry | ConflictedIndexEntry]
 
     def __init__(
         self,
-        filename: Union[bytes, str, os.PathLike[str]],
+        filename: bytes | str | os.PathLike[str],
         read: bool = True,
         skip_hash: bool = False,
-        version: Optional[int] = None,
+        version: int | None = None,
     ) -> None:
         """Create an index object associated with the given filename.
 
@@ -1079,7 +1081,7 @@ class Index:
             self.read()
 
     @property
-    def path(self) -> Union[bytes, str]:
+    def path(self) -> bytes | str:
         """Get the path to the index file.
 
         Returns:
@@ -1147,7 +1149,7 @@ class Index:
         """Number of entries in this index file."""
         return len(self._byname)
 
-    def __getitem__(self, key: bytes) -> Union[IndexEntry, ConflictedIndexEntry]:
+    def __getitem__(self, key: bytes) -> IndexEntry | ConflictedIndexEntry:
         """Retrieve entry by relative path and stage.
 
         Returns: Either a IndexEntry or a ConflictedIndexEntry
@@ -1201,7 +1203,7 @@ class Index:
         self._byname = {}
 
     def __setitem__(
-        self, name: bytes, value: Union[IndexEntry, ConflictedIndexEntry]
+        self, name: bytes, value: IndexEntry | ConflictedIndexEntry
     ) -> None:
         """Set an entry in the index."""
         assert isinstance(name, bytes)
@@ -1213,7 +1215,7 @@ class Index:
 
     def iteritems(
         self,
-    ) -> Iterator[tuple[bytes, Union[IndexEntry, ConflictedIndexEntry]]]:
+    ) -> Iterator[tuple[bytes, IndexEntry | ConflictedIndexEntry]]:
         """Iterate over (path, entry) pairs in the index.
 
         Returns:
@@ -1221,7 +1223,7 @@ class Index:
         """
         return iter(self._byname.items())
 
-    def items(self) -> Iterator[tuple[bytes, Union[IndexEntry, ConflictedIndexEntry]]]:
+    def items(self) -> Iterator[tuple[bytes, IndexEntry | ConflictedIndexEntry]]:
         """Get an iterator over (path, entry) pairs.
 
         Returns:
@@ -1229,9 +1231,7 @@ class Index:
         """
         return iter(self._byname.items())
 
-    def update(
-        self, entries: dict[bytes, Union[IndexEntry, ConflictedIndexEntry]]
-    ) -> None:
+    def update(self, entries: dict[bytes, IndexEntry | ConflictedIndexEntry]) -> None:
         """Update the index with multiple entries.
 
         Args:
@@ -1255,9 +1255,9 @@ class Index:
         want_unchanged: bool = False,
     ) -> Generator[
         tuple[
-            tuple[Optional[bytes], Optional[bytes]],
-            tuple[Optional[int], Optional[int]],
-            tuple[Optional[bytes], Optional[bytes]],
+            tuple[bytes | None, bytes | None],
+            tuple[int | None, int | None],
+            tuple[bytes | None, bytes | None],
         ],
         None,
         None,
@@ -1464,7 +1464,7 @@ class Index:
         tree: Tree,
         path: bytes,
         object_store: "BaseObjectStore",
-    ) -> Optional[bytes]:
+    ) -> bytes | None:
         """Find the SHA of a subtree at a given path.
 
         Args:
@@ -1561,13 +1561,13 @@ def changes_from_tree(
     names: Iterable[bytes],
     lookup_entry: Callable[[bytes], tuple[bytes, int]],
     object_store: ObjectContainer,
-    tree: Optional[bytes],
+    tree: bytes | None,
     want_unchanged: bool = False,
 ) -> Iterable[
     tuple[
-        tuple[Optional[bytes], Optional[bytes]],
-        tuple[Optional[int], Optional[int]],
-        tuple[Optional[bytes], Optional[bytes]],
+        tuple[bytes | None, bytes | None],
+        tuple[int | None, int | None],
+        tuple[bytes | None, bytes | None],
     ]
 ]:
     """Find the differences between the contents of a tree and a working copy.
@@ -1610,7 +1610,7 @@ def changes_from_tree(
 def index_entry_from_stat(
     stat_val: os.stat_result,
     hex_sha: bytes,
-    mode: Optional[int] = None,
+    mode: int | None = None,
 ) -> IndexEntry:
     """Create a new index entry from a stat value.
 
@@ -1651,7 +1651,7 @@ if sys.platform == "win32":
         typically due to lack of developer mode or administrator privileges.
         """
 
-        def __init__(self, errno: int, msg: str, filename: Optional[str]) -> None:
+        def __init__(self, errno: int, msg: str, filename: str | None) -> None:
             """Initialize WindowsSymlinkPermissionError."""
             super(PermissionError, self).__init__(
                 errno,
@@ -1660,11 +1660,11 @@ if sys.platform == "win32":
             )
 
     def symlink(
-        src: Union[str, bytes],
-        dst: Union[str, bytes],
+        src: str | bytes,
+        dst: str | bytes,
         target_is_directory: bool = False,
         *,
-        dir_fd: Optional[int] = None,
+        dir_fd: int | None = None,
     ) -> None:
         """Create a symbolic link on Windows with better error handling.
 
@@ -1696,12 +1696,10 @@ def build_file_from_blob(
     *,
     honor_filemode: bool = True,
     tree_encoding: str = "utf-8",
-    symlink_fn: Optional[
-        Callable[
-            [Union[str, bytes, os.PathLike[str]], Union[str, bytes, os.PathLike[str]]],
-            None,
-        ]
-    ] = None,
+    symlink_fn: Callable[
+        [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
+    ]
+    | None = None,
 ) -> os.stat_result:
     """Build a file or symlink on disk based on a Git object.
 
@@ -1880,18 +1878,16 @@ def validate_path(
 
 
 def build_index_from_tree(
-    root_path: Union[str, bytes],
-    index_path: Union[str, bytes],
+    root_path: str | bytes,
+    index_path: str | bytes,
     object_store: ObjectContainer,
     tree_id: bytes,
     honor_filemode: bool = True,
     validate_path_element: Callable[[bytes], bool] = validate_path_element_default,
-    symlink_fn: Optional[
-        Callable[
-            [Union[str, bytes, os.PathLike[str]], Union[str, bytes, os.PathLike[str]]],
-            None,
-        ]
-    ] = None,
+    symlink_fn: Callable[
+        [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
+    ]
+    | None = None,
     blob_normalizer: Optional["FilterBlobNormalizer"] = None,
     tree_encoding: str = "utf-8",
 ) -> None:
@@ -2014,7 +2010,7 @@ def blob_from_path_and_stat(
     return blob_from_path_and_mode(fs_path, st.st_mode, tree_encoding)
 
 
-def read_submodule_head(path: Union[str, bytes]) -> Optional[bytes]:
+def read_submodule_head(path: str | bytes) -> bytes | None:
     """Read the head commit of a submodule.
 
     Args:
@@ -2164,7 +2160,7 @@ def _check_file_matches(
     current_stat: os.stat_result,
     honor_filemode: bool,
     blob_normalizer: Optional["FilterBlobNormalizer"] = None,
-    tree_path: Optional[bytes] = None,
+    tree_path: bytes | None = None,
 ) -> bool:
     """Check if a file on disk matches the expected git object.
 
@@ -2226,8 +2222,8 @@ def _transition_to_submodule(
     repo: "Repo",
     path: bytes,
     full_path: bytes,
-    current_stat: Optional[os.stat_result],
-    entry: Union[IndexEntry, TreeEntry],
+    current_stat: os.stat_result | None,
+    entry: IndexEntry | TreeEntry,
     index: Index,
 ) -> None:
     """Transition any type to submodule."""
@@ -2251,16 +2247,14 @@ def _transition_to_file(
     object_store: "BaseObjectStore",
     path: bytes,
     full_path: bytes,
-    current_stat: Optional[os.stat_result],
-    entry: Union[IndexEntry, TreeEntry],
+    current_stat: os.stat_result | None,
+    entry: IndexEntry | TreeEntry,
     index: Index,
     honor_filemode: bool,
-    symlink_fn: Optional[
-        Callable[
-            [Union[str, bytes, os.PathLike[str]], Union[str, bytes, os.PathLike[str]]],
-            None,
-        ]
-    ],
+    symlink_fn: Callable[
+        [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
+    ]
+    | None,
     blob_normalizer: Optional["FilterBlobNormalizer"],
     tree_encoding: str = "utf-8",
 ) -> None:
@@ -2348,7 +2342,7 @@ def _transition_to_absent(
     repo: "Repo",
     path: bytes,
     full_path: bytes,
-    current_stat: Optional[os.stat_result],
+    current_stat: os.stat_result | None,
     index: Index,
 ) -> None:
     """Remove any type of entry."""
@@ -2511,17 +2505,15 @@ def detect_case_only_renames(
 
 def update_working_tree(
     repo: "Repo",
-    old_tree_id: Optional[bytes],
+    old_tree_id: bytes | None,
     new_tree_id: bytes,
     change_iterator: Iterator["TreeChange"],
     honor_filemode: bool = True,
-    validate_path_element: Optional[Callable[[bytes], bool]] = None,
-    symlink_fn: Optional[
-        Callable[
-            [Union[str, bytes, os.PathLike[str]], Union[str, bytes, os.PathLike[str]]],
-            None,
-        ]
-    ] = None,
+    validate_path_element: Callable[[bytes], bool] | None = None,
+    symlink_fn: Callable[
+        [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
+    ]
+    | None = None,
     force_remove_untracked: bool = False,
     blob_normalizer: Optional["FilterBlobNormalizer"] = None,
     tree_encoding: str = "utf-8",
@@ -2703,7 +2695,7 @@ def update_working_tree(
 
             full_path = _tree_to_fs_path(repo_path, path, tree_encoding)
             try:
-                delete_stat: Optional[os.stat_result] = os.lstat(full_path)
+                delete_stat: os.stat_result | None = os.lstat(full_path)
             except FileNotFoundError:
                 delete_stat = None
             except OSError as e:
@@ -2734,7 +2726,7 @@ def update_working_tree(
 
             full_path = _tree_to_fs_path(repo_path, path, tree_encoding)
             try:
-                modify_stat: Optional[os.stat_result] = os.lstat(full_path)
+                modify_stat: os.stat_result | None = os.lstat(full_path)
             except FileNotFoundError:
                 modify_stat = None
             except OSError as e:
@@ -2765,10 +2757,10 @@ def update_working_tree(
 
 def _check_entry_for_changes(
     tree_path: bytes,
-    entry: Union[IndexEntry, ConflictedIndexEntry],
+    entry: IndexEntry | ConflictedIndexEntry,
     root_path: bytes,
-    filter_blob_callback: Optional[Callable[[bytes, bytes], bytes]] = None,
-) -> Optional[bytes]:
+    filter_blob_callback: Callable[[bytes, bytes], bytes] | None = None,
+) -> bytes | None:
     """Check a single index entry for changes.
 
     Args:
@@ -2809,8 +2801,8 @@ def _check_entry_for_changes(
 
 def get_unstaged_changes(
     index: Index,
-    root_path: Union[str, bytes],
-    filter_blob_callback: Optional[Callable[..., Any]] = None,
+    root_path: str | bytes,
+    filter_blob_callback: Callable[..., Any] | None = None,
     preload_index: bool = False,
 ) -> Generator[bytes, None, None]:
     """Walk through an index and check for differences against working tree.
@@ -2902,7 +2894,7 @@ def _tree_to_fs_path(
     return os.path.join(root_path, sep_corrected_path)
 
 
-def _fs_to_tree_path(fs_path: Union[str, bytes], tree_encoding: str = "utf-8") -> bytes:
+def _fs_to_tree_path(fs_path: str | bytes, tree_encoding: str = "utf-8") -> bytes:
     """Convert a file system path to a git tree path.
 
     Args:
@@ -2933,7 +2925,7 @@ def _fs_to_tree_path(fs_path: Union[str, bytes], tree_encoding: str = "utf-8") -
     return tree_path
 
 
-def index_entry_from_directory(st: os.stat_result, path: bytes) -> Optional[IndexEntry]:
+def index_entry_from_directory(st: os.stat_result, path: bytes) -> IndexEntry | None:
     """Create an index entry for a directory.
 
     This is only used for submodules (directories containing .git).
@@ -2954,8 +2946,8 @@ def index_entry_from_directory(st: os.stat_result, path: bytes) -> Optional[Inde
 
 
 def index_entry_from_path(
-    path: bytes, object_store: Optional[ObjectContainer] = None
-) -> Optional[IndexEntry]:
+    path: bytes, object_store: ObjectContainer | None = None
+) -> IndexEntry | None:
     """Create an index from a filesystem path.
 
     This returns an index value for files, symlinks
@@ -2985,8 +2977,8 @@ def index_entry_from_path(
 def iter_fresh_entries(
     paths: Iterable[bytes],
     root_path: bytes,
-    object_store: Optional[ObjectContainer] = None,
-) -> Iterator[tuple[bytes, Optional[IndexEntry]]]:
+    object_store: ObjectContainer | None = None,
+) -> Iterator[tuple[bytes, IndexEntry | None]]:
     """Iterate over current versions of index entries on disk.
 
     Args:
@@ -3008,8 +3000,8 @@ def iter_fresh_objects(
     paths: Iterable[bytes],
     root_path: bytes,
     include_deleted: bool = False,
-    object_store: Optional[ObjectContainer] = None,
-) -> Iterator[tuple[bytes, Optional[bytes], Optional[int]]]:
+    object_store: ObjectContainer | None = None,
+) -> Iterator[tuple[bytes, bytes | None, int | None]]:
     """Iterate over versions of objects on disk referenced by index.
 
     Args:
@@ -3050,7 +3042,7 @@ class locked_index:
 
     _file: "_GitFile"
 
-    def __init__(self, path: Union[bytes, str]) -> None:
+    def __init__(self, path: bytes | str) -> None:
         """Initialize locked_index."""
         self._path = path
 
@@ -3063,9 +3055,9 @@ class locked_index:
 
     def __exit__(
         self,
-        exc_type: Optional[type],
-        exc_value: Optional[BaseException],
-        traceback: Optional[types.TracebackType],
+        exc_type: type | None,
+        exc_value: BaseException | None,
+        traceback: types.TracebackType | None,
     ) -> None:
         """Exit context manager and unlock index."""
         if exc_type is not None:

+ 19 - 19
dulwich/lfs.py

@@ -39,7 +39,7 @@ import os
 import tempfile
 from collections.abc import Iterable, Mapping
 from dataclasses import dataclass
-from typing import TYPE_CHECKING, Any, BinaryIO, Optional, Union
+from typing import TYPE_CHECKING, Any, BinaryIO, Optional
 from urllib.parse import urljoin, urlparse
 from urllib.request import Request, urlopen
 
@@ -57,8 +57,8 @@ class LFSAction:
     """LFS action structure."""
 
     href: str
-    header: Optional[dict[str, str]] = None
-    expires_at: Optional[str] = None
+    header: dict[str, str] | None = None
+    expires_at: str | None = None
 
 
 @dataclass
@@ -75,9 +75,9 @@ class LFSBatchObject:
 
     oid: str
     size: int
-    authenticated: Optional[bool] = None
-    actions: Optional[dict[str, LFSAction]] = None
-    error: Optional[LFSErrorInfo] = None
+    authenticated: bool | None = None
+    actions: dict[str, LFSAction] | None = None
+    error: LFSErrorInfo | None = None
 
 
 @dataclass
@@ -86,7 +86,7 @@ class LFSBatchResponse:
 
     transfer: str
     objects: list[LFSBatchObject]
-    hash_algo: Optional[str] = None
+    hash_algo: str | None = None
 
 
 class LFSStore:
@@ -400,7 +400,7 @@ class LFSClient:
         """Get the LFS server URL without trailing slash."""
         return self._base_url.rstrip("/")
 
-    def download(self, oid: str, size: int, ref: Optional[str] = None) -> bytes:
+    def download(self, oid: str, size: int, ref: str | None = None) -> bytes:
         """Download an LFS object.
 
         Args:
@@ -414,7 +414,7 @@ class LFSClient:
         raise NotImplementedError
 
     def upload(
-        self, oid: str, size: int, content: bytes, ref: Optional[str] = None
+        self, oid: str, size: int, content: bytes, ref: str | None = None
     ) -> None:
         """Upload an LFS object.
 
@@ -499,7 +499,7 @@ class HTTPLFSClient(LFSClient):
             config: Optional git config for authentication/proxy settings
         """
         super().__init__(url, config)
-        self._pool_manager: Optional[urllib3.PoolManager] = None
+        self._pool_manager: urllib3.PoolManager | None = None
 
     def _get_pool_manager(self) -> "urllib3.PoolManager":
         """Get urllib3 pool manager with git config applied."""
@@ -513,8 +513,8 @@ class HTTPLFSClient(LFSClient):
         self,
         method: str,
         path: str,
-        data: Optional[bytes] = None,
-        headers: Optional[dict[str, str]] = None,
+        data: bytes | None = None,
+        headers: dict[str, str] | None = None,
     ) -> bytes:
         """Make an HTTP request to the LFS server."""
         url = urljoin(self._base_url, path)
@@ -538,8 +538,8 @@ class HTTPLFSClient(LFSClient):
     def batch(
         self,
         operation: str,
-        objects: list[dict[str, Union[str, int]]],
-        ref: Optional[str] = None,
+        objects: list[dict[str, str | int]],
+        ref: str | None = None,
     ) -> LFSBatchResponse:
         """Perform batch operation to get transfer URLs.
 
@@ -552,7 +552,7 @@ class HTTPLFSClient(LFSClient):
             Batch response from server
         """
         data: dict[
-            str, Union[str, list[str], list[dict[str, Union[str, int]]], dict[str, str]]
+            str, str | list[str] | list[dict[str, str | int]] | dict[str, str]
         ] = {
             "operation": operation,
             "transfers": ["basic"],
@@ -604,7 +604,7 @@ class HTTPLFSClient(LFSClient):
             hash_algo=data.get("hash_algo"),
         )
 
-    def download(self, oid: str, size: int, ref: Optional[str] = None) -> bytes:
+    def download(self, oid: str, size: int, ref: str | None = None) -> bytes:
         """Download an LFS object.
 
         Args:
@@ -652,7 +652,7 @@ class HTTPLFSClient(LFSClient):
         return content
 
     def upload(
-        self, oid: str, size: int, content: bytes, ref: Optional[str] = None
+        self, oid: str, size: int, content: bytes, ref: str | None = None
     ) -> None:
         """Upload an LFS object.
 
@@ -731,7 +731,7 @@ class FileLFSClient(LFSClient):
         path = url2pathname(parsed.path)
         self._local_store = LFSStore(path)
 
-    def download(self, oid: str, size: int, ref: Optional[str] = None) -> bytes:
+    def download(self, oid: str, size: int, ref: str | None = None) -> bytes:
         """Download an LFS object from local filesystem.
 
         Args:
@@ -763,7 +763,7 @@ class FileLFSClient(LFSClient):
         return content
 
     def upload(
-        self, oid: str, size: int, content: bytes, ref: Optional[str] = None
+        self, oid: str, size: int, content: bytes, ref: str | None = None
     ) -> None:
         """Upload an LFS object to local filesystem.
 

+ 1 - 2
dulwich/lfs_server.py

@@ -27,7 +27,6 @@ import tempfile
 import typing
 from collections.abc import Mapping
 from http.server import BaseHTTPRequestHandler, HTTPServer
-from typing import Optional
 
 from .lfs import LFSStore
 
@@ -265,7 +264,7 @@ class LFSServer(HTTPServer):
 def run_lfs_server(
     host: str = "localhost",
     port: int = 0,
-    lfs_dir: Optional[str] = None,
+    lfs_dir: str | None = None,
     log_requests: bool = False,
 ) -> tuple[LFSServer, str]:
     """Run an LFS server.

+ 16 - 16
dulwich/line_ending.py

@@ -138,8 +138,8 @@ Sources:
 """
 
 import logging
-from collections.abc import Mapping
-from typing import TYPE_CHECKING, Any, Callable, Optional, Union
+from collections.abc import Callable, Mapping
+from typing import TYPE_CHECKING, Any, Optional
 
 if TYPE_CHECKING:
     from .config import StackedConfig
@@ -163,8 +163,8 @@ class LineEndingFilter(FilterDriver):
 
     def __init__(
         self,
-        clean_conversion: Optional[Callable[[bytes], bytes]] = None,
-        smudge_conversion: Optional[Callable[[bytes], bytes]] = None,
+        clean_conversion: Callable[[bytes], bytes] | None = None,
+        smudge_conversion: Callable[[bytes], bytes] | None = None,
         binary_detection: bool = True,
         safecrlf: bytes = b"false",
     ):
@@ -237,7 +237,7 @@ class LineEndingFilter(FilterDriver):
             # For text attribute: always normalize to LF on checkin
             # Smudge behavior depends on core.eol and core.autocrlf
             smudge_filter = get_smudge_filter(core_eol, autocrlf)
-            clean_filter: Optional[Callable[[bytes], bytes]] = convert_crlf_to_lf
+            clean_filter: Callable[[bytes], bytes] | None = convert_crlf_to_lf
         else:
             # Normal autocrlf behavior
             smudge_filter = get_smudge_filter(core_eol, autocrlf)
@@ -373,7 +373,7 @@ def check_safecrlf(
 
 def get_smudge_filter(
     core_eol: str, core_autocrlf: bytes
-) -> Optional[Callable[[bytes], bytes]]:
+) -> Callable[[bytes], bytes] | None:
     """Returns the correct smudge filter based on the passed arguments."""
     # Git attributes handling is done by the filter infrastructure
     return get_smudge_filter_autocrlf(core_autocrlf)
@@ -381,7 +381,7 @@ def get_smudge_filter(
 
 def get_clean_filter(
     core_eol: str, core_autocrlf: bytes
-) -> Optional[Callable[[bytes], bytes]]:
+) -> Callable[[bytes], bytes] | None:
     """Returns the correct clean filter based on the passed arguments."""
     # Git attributes handling is done by the filter infrastructure
     return get_clean_filter_autocrlf(core_autocrlf)
@@ -389,7 +389,7 @@ def get_clean_filter(
 
 def get_smudge_filter_autocrlf(
     core_autocrlf: bytes,
-) -> Optional[Callable[[bytes], bytes]]:
+) -> Callable[[bytes], bytes] | None:
     """Returns the correct smudge filter base on autocrlf value.
 
     Args:
@@ -406,7 +406,7 @@ def get_smudge_filter_autocrlf(
 
 def get_clean_filter_autocrlf(
     core_autocrlf: bytes,
-) -> Optional[Callable[[bytes], bytes]]:
+) -> Callable[[bytes], bytes] | None:
     """Returns the correct clean filter base on autocrlf value.
 
     Args:
@@ -425,8 +425,8 @@ def get_clean_filter_autocrlf(
 # Backwards compatibility wrappers
 @replace_me(since="0.23.1", remove_in="0.25.0")
 def get_checkout_filter(
-    core_eol: str, core_autocrlf: Union[bool, str], git_attributes: Mapping[str, Any]
-) -> Optional[Callable[[bytes], bytes]]:
+    core_eol: str, core_autocrlf: bool | str, git_attributes: Mapping[str, Any]
+) -> Callable[[bytes], bytes] | None:
     """Deprecated: Use get_smudge_filter instead."""
     # Convert core_autocrlf to bytes for compatibility
     if isinstance(core_autocrlf, bool):
@@ -442,8 +442,8 @@ def get_checkout_filter(
 
 @replace_me(since="0.23.1", remove_in="0.25.0")
 def get_checkin_filter(
-    core_eol: str, core_autocrlf: Union[bool, str], git_attributes: Mapping[str, Any]
-) -> Optional[Callable[[bytes], bytes]]:
+    core_eol: str, core_autocrlf: bool | str, git_attributes: Mapping[str, Any]
+) -> Callable[[bytes], bytes] | None:
     """Deprecated: Use get_clean_filter instead."""
     # Convert core_autocrlf to bytes for compatibility
     if isinstance(core_autocrlf, bool):
@@ -460,7 +460,7 @@ def get_checkin_filter(
 @replace_me(since="0.23.1", remove_in="0.25.0")
 def get_checkout_filter_autocrlf(
     core_autocrlf: bytes,
-) -> Optional[Callable[[bytes], bytes]]:
+) -> Callable[[bytes], bytes] | None:
     """Deprecated: Use get_smudge_filter_autocrlf instead."""
     return get_smudge_filter_autocrlf(core_autocrlf)
 
@@ -468,7 +468,7 @@ def get_checkout_filter_autocrlf(
 @replace_me(since="0.23.1", remove_in="0.25.0")
 def get_checkin_filter_autocrlf(
     core_autocrlf: bytes,
-) -> Optional[Callable[[bytes], bytes]]:
+) -> Callable[[bytes], bytes] | None:
     """Deprecated: Use get_clean_filter_autocrlf instead."""
     return get_clean_filter_autocrlf(core_autocrlf)
 
@@ -637,7 +637,7 @@ class TreeBlobNormalizer(BlobNormalizer):
         config_stack: "StackedConfig",
         git_attributes: Mapping[str, Any],
         object_store: "BaseObjectStore",
-        tree: Optional[ObjectID] = None,
+        tree: ObjectID | None = None,
         core_eol: str = "native",
         autocrlf: bytes = b"false",
         safecrlf: bytes = b"false",

+ 1 - 2
dulwich/log_utils.py

@@ -39,7 +39,6 @@ directly.
 import logging
 import os
 import sys
-from typing import Optional, Union
 
 getLogger = logging.getLogger
 
@@ -67,7 +66,7 @@ def _should_trace() -> bool:
     return True
 
 
-def _get_trace_target() -> Optional[Union[str, int]]:
+def _get_trace_target() -> str | int | None:
     """Get the trace target from GIT_TRACE environment variable.
 
     Returns:

+ 17 - 17
dulwich/lru_cache.py

@@ -22,8 +22,8 @@
 
 """A simple least-recently-used (LRU) cache."""
 
-from collections.abc import Iterable, Iterator
-from typing import Callable, Generic, Optional, TypeVar, Union, cast
+from collections.abc import Callable, Iterable, Iterator
+from typing import Generic, Optional, TypeVar, cast
 
 _null_key = object()
 
@@ -38,11 +38,11 @@ class _LRUNode(Generic[K, V]):
     __slots__ = ("cleanup", "key", "next_key", "prev", "size", "value")
 
     prev: Optional["_LRUNode[K, V]"]
-    next_key: Union[K, object]
-    size: Optional[int]
+    next_key: K | object
+    size: int | None
 
     def __init__(
-        self, key: K, value: V, cleanup: Optional[Callable[[K, V], None]] = None
+        self, key: K, value: V, cleanup: Callable[[K, V], None] | None = None
     ) -> None:
         self.prev = None
         self.next_key = _null_key
@@ -72,11 +72,11 @@ class _LRUNode(Generic[K, V]):
 class LRUCache(Generic[K, V]):
     """A class which manages a cache of entries, removing unused ones."""
 
-    _least_recently_used: Optional[_LRUNode[K, V]]
-    _most_recently_used: Optional[_LRUNode[K, V]]
+    _least_recently_used: _LRUNode[K, V] | None
+    _most_recently_used: _LRUNode[K, V] | None
 
     def __init__(
-        self, max_cache: int = 100, after_cleanup_count: Optional[int] = None
+        self, max_cache: int = 100, after_cleanup_count: int | None = None
     ) -> None:
         """Initialize LRUCache.
 
@@ -171,7 +171,7 @@ class LRUCache(Generic[K, V]):
             node = node_next
 
     def add(
-        self, key: K, value: V, cleanup: Optional[Callable[[K, V], None]] = None
+        self, key: K, value: V, cleanup: Callable[[K, V], None] | None = None
     ) -> None:
         """Add a new value to the cache.
 
@@ -204,7 +204,7 @@ class LRUCache(Generic[K, V]):
         """Get the number of entries we will cache."""
         return self._max_cache
 
-    def get(self, key: K, default: Optional[V] = None) -> Optional[V]:
+    def get(self, key: K, default: V | None = None) -> V | None:
         """Get value from cache with default if not found.
 
         Args:
@@ -308,12 +308,12 @@ class LRUCache(Generic[K, V]):
         while self._cache:
             self._remove_lru()
 
-    def resize(self, max_cache: int, after_cleanup_count: Optional[int] = None) -> None:
+    def resize(self, max_cache: int, after_cleanup_count: int | None = None) -> None:
         """Change the number of entries that will be cached."""
         self._update_max_cache(max_cache, after_cleanup_count=after_cleanup_count)
 
     def _update_max_cache(
-        self, max_cache: int, after_cleanup_count: Optional[int] = None
+        self, max_cache: int, after_cleanup_count: int | None = None
     ) -> None:
         self._max_cache = max_cache
         if after_cleanup_count is None:
@@ -338,8 +338,8 @@ class LRUSizeCache(LRUCache[K, V]):
     def __init__(
         self,
         max_size: int = 1024 * 1024,
-        after_cleanup_size: Optional[int] = None,
-        compute_size: Optional[Callable[[V], int]] = None,
+        after_cleanup_size: int | None = None,
+        compute_size: Callable[[V], int] | None = None,
     ) -> None:
         """Create a new LRUSizeCache.
 
@@ -364,7 +364,7 @@ class LRUSizeCache(LRUCache[K, V]):
         LRUCache.__init__(self, max_cache=max(int(max_size / 512), 1))
 
     def add(
-        self, key: K, value: V, cleanup: Optional[Callable[[K, V], None]] = None
+        self, key: K, value: V, cleanup: Callable[[K, V], None] | None = None
     ) -> None:
         """Add a new value to the cache.
 
@@ -419,14 +419,14 @@ class LRUSizeCache(LRUCache[K, V]):
         self._value_size -= node.size
         LRUCache._remove_node(self, node)
 
-    def resize(self, max_size: int, after_cleanup_size: Optional[int] = None) -> None:
+    def resize(self, max_size: int, after_cleanup_size: int | None = None) -> None:
         """Change the number of bytes that will be cached."""
         self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
         max_cache = max(int(max_size / 512), 1)
         self._update_max_cache(max_cache)
 
     def _update_max_size(
-        self, max_size: int, after_cleanup_size: Optional[int] = None
+        self, max_size: int, after_cleanup_size: int | None = None
     ) -> None:
         self._max_size = max_size
         if after_cleanup_size is None:

+ 18 - 19
dulwich/mailmap.py

@@ -22,10 +22,10 @@
 """Mailmap file reader."""
 
 from collections.abc import Iterator
-from typing import IO, Optional, Union
+from typing import IO
 
 
-def parse_identity(text: bytes) -> tuple[Optional[bytes], Optional[bytes]]:
+def parse_identity(text: bytes) -> tuple[bytes | None, bytes | None]:
     """Parse an identity string into name and email.
 
     Args:
@@ -39,8 +39,8 @@ def parse_identity(text: bytes) -> tuple[Optional[bytes], Optional[bytes]]:
     (name_str, email_str) = text.rsplit(b"<", 1)
     name_str = name_str.strip()
     email_str = email_str.rstrip(b">").strip()
-    name: Optional[bytes] = name_str if name_str else None
-    email: Optional[bytes] = email_str if email_str else None
+    name: bytes | None = name_str if name_str else None
+    email: bytes | None = email_str if email_str else None
     return (name, email)
 
 
@@ -48,8 +48,8 @@ def read_mailmap(
     f: IO[bytes],
 ) -> Iterator[
     tuple[
-        tuple[Optional[bytes], Optional[bytes]],
-        Optional[tuple[Optional[bytes], Optional[bytes]]],
+        tuple[bytes | None, bytes | None],
+        tuple[bytes | None, bytes | None] | None,
     ]
 ]:
     """Read a mailmap.
@@ -80,14 +80,13 @@ class Mailmap:
 
     def __init__(
         self,
-        map: Optional[
-            Iterator[
-                tuple[
-                    tuple[Optional[bytes], Optional[bytes]],
-                    Optional[tuple[Optional[bytes], Optional[bytes]]],
-                ]
+        map: Iterator[
+            tuple[
+                tuple[bytes | None, bytes | None],
+                tuple[bytes | None, bytes | None] | None,
             ]
-        ] = None,
+        ]
+        | None = None,
     ) -> None:
         """Initialize Mailmap.
 
@@ -95,8 +94,8 @@ class Mailmap:
           map: Optional iterator of (canonical_identity, from_identity) tuples
         """
         self._table: dict[
-            tuple[Optional[bytes], Optional[bytes]],
-            tuple[Optional[bytes], Optional[bytes]],
+            tuple[bytes | None, bytes | None],
+            tuple[bytes | None, bytes | None],
         ] = {}
         if map:
             for canonical_identity, from_identity in map:
@@ -104,8 +103,8 @@ class Mailmap:
 
     def add_entry(
         self,
-        canonical_identity: tuple[Optional[bytes], Optional[bytes]],
-        from_identity: Optional[tuple[Optional[bytes], Optional[bytes]]] = None,
+        canonical_identity: tuple[bytes | None, bytes | None],
+        from_identity: tuple[bytes | None, bytes | None] | None = None,
     ) -> None:
         """Add an entry to the mail mail.
 
@@ -128,8 +127,8 @@ class Mailmap:
             self._table[from_name, from_email] = canonical_identity
 
     def lookup(
-        self, identity: Union[bytes, tuple[Optional[bytes], Optional[bytes]]]
-    ) -> Union[bytes, tuple[Optional[bytes], Optional[bytes]]]:
+        self, identity: bytes | tuple[bytes | None, bytes | None]
+    ) -> bytes | tuple[bytes | None, bytes | None]:
         """Lookup an identity in this mailmail."""
         if not isinstance(identity, tuple):
             was_tuple = False

+ 6 - 5
dulwich/maintenance.py

@@ -7,9 +7,10 @@ and maintaining Git repositories.
 import logging
 import os
 from abc import ABC, abstractmethod
+from collections.abc import Callable
 from dataclasses import dataclass, field
 from enum import Enum
-from typing import TYPE_CHECKING, Callable, Optional
+from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
     from .repo import BaseRepo, Repo
@@ -44,7 +45,7 @@ class MaintenanceTask(ABC):
         self,
         repo: "BaseRepo",
         auto: bool = False,
-        progress: Optional[Callable[[str], None]] = None,
+        progress: Callable[[str], None] | None = None,
     ) -> None:
         """Initialize maintenance task.
 
@@ -312,7 +313,7 @@ MAINTENANCE_TASKS: dict[str, type[MaintenanceTask]] = {
 
 def get_enabled_tasks(
     repo: "BaseRepo",
-    task_filter: Optional[list[str]] = None,
+    task_filter: list[str] | None = None,
 ) -> list[str]:
     """Get list of enabled maintenance tasks.
 
@@ -341,9 +342,9 @@ def get_enabled_tasks(
 
 def run_maintenance(
     repo: "BaseRepo",
-    tasks: Optional[list[str]] = None,
+    tasks: list[str] | None = None,
     auto: bool = False,
-    progress: Optional[Callable[[str], None]] = None,
+    progress: Callable[[str], None] | None = None,
 ) -> MaintenanceResult:
     """Run maintenance tasks on a repository.
 

+ 5 - 5
dulwich/mbox.py

@@ -29,12 +29,12 @@ import mailbox
 import os
 from collections.abc import Iterable, Iterator
 from pathlib import Path
-from typing import BinaryIO, Union
+from typing import BinaryIO
 
 
 def split_mbox(
-    input_file: Union[str, bytes, BinaryIO],
-    output_dir: Union[str, bytes, Path],
+    input_file: str | bytes | BinaryIO,
+    output_dir: str | bytes | Path,
     start_number: int = 1,
     precision: int = 4,
     keep_cr: bool = False,
@@ -111,8 +111,8 @@ def split_mbox(
 
 
 def split_maildir(
-    maildir_path: Union[str, bytes, Path],
-    output_dir: Union[str, bytes, Path],
+    maildir_path: str | bytes | Path,
+    output_dir: str | bytes | Path,
     start_number: int = 1,
     precision: int = 4,
     keep_cr: bool = False,

+ 23 - 23
dulwich/merge.py

@@ -1,7 +1,7 @@
 """Git merge implementation."""
 
 from collections.abc import Sequence
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
     import merge3
@@ -24,7 +24,7 @@ def make_merge3(
     a: Sequence[bytes],
     b: Sequence[bytes],
     is_cherrypick: bool = False,
-    sequence_matcher: Optional[type["SequenceMatcherProtocol[bytes]"]] = None,
+    sequence_matcher: type["SequenceMatcherProtocol[bytes]"] | None = None,
 ) -> "merge3.Merge3[bytes]":
     """Return a Merge3 object, or raise ImportError if merge3 is not installed."""
     if merge3 is None:
@@ -118,12 +118,12 @@ def _merge_lines(
 
 
 def merge_blobs(
-    base_blob: Optional[Blob],
-    ours_blob: Optional[Blob],
-    theirs_blob: Optional[Blob],
-    path: Optional[bytes] = None,
-    gitattributes: Optional[GitAttributes] = None,
-    config: Optional[Config] = None,
+    base_blob: Blob | None,
+    ours_blob: Blob | None,
+    theirs_blob: Blob | None,
+    path: bytes | None = None,
+    gitattributes: GitAttributes | None = None,
+    config: Config | None = None,
 ) -> tuple[bytes, bool]:
     """Perform three-way merge on blob contents.
 
@@ -253,8 +253,8 @@ class Merger:
     def __init__(
         self,
         object_store: BaseObjectStore,
-        gitattributes: Optional[GitAttributes] = None,
-        config: Optional[Config] = None,
+        gitattributes: GitAttributes | None = None,
+        config: Config | None = None,
     ) -> None:
         """Initialize merger.
 
@@ -269,10 +269,10 @@ class Merger:
 
     def merge_blobs(
         self,
-        base_blob: Optional[Blob],
-        ours_blob: Optional[Blob],
-        theirs_blob: Optional[Blob],
-        path: Optional[bytes] = None,
+        base_blob: Blob | None,
+        ours_blob: Blob | None,
+        theirs_blob: Blob | None,
+        path: bytes | None = None,
     ) -> tuple[bytes, bool]:
         """Perform three-way merge on blob contents.
 
@@ -290,7 +290,7 @@ class Merger:
         )
 
     def merge_trees(
-        self, base_tree: Optional[Tree], ours_tree: Tree, theirs_tree: Tree
+        self, base_tree: Tree | None, ours_tree: Tree, theirs_tree: Tree
     ) -> tuple[Tree, list[bytes]]:
         """Perform three-way merge on trees.
 
@@ -303,7 +303,7 @@ class Merger:
             tuple of (merged_tree, list_of_conflicted_paths)
         """
         conflicts: list[bytes] = []
-        merged_entries: dict[bytes, tuple[Optional[int], Optional[bytes]]] = {}
+        merged_entries: dict[bytes, tuple[int | None, bytes | None]] = {}
 
         # Get all paths from all trees
         all_paths = set()
@@ -522,8 +522,8 @@ def recursive_merge(
     merge_bases: list[bytes],
     ours_commit: Commit,
     theirs_commit: Commit,
-    gitattributes: Optional[GitAttributes] = None,
-    config: Optional[Config] = None,
+    gitattributes: GitAttributes | None = None,
+    config: Config | None = None,
 ) -> tuple[Tree, list[bytes]]:
     """Perform a recursive merge with multiple merge bases.
 
@@ -622,11 +622,11 @@ def recursive_merge(
 
 def three_way_merge(
     object_store: BaseObjectStore,
-    base_commit: Optional[Commit],
+    base_commit: Commit | None,
     ours_commit: Commit,
     theirs_commit: Commit,
-    gitattributes: Optional[GitAttributes] = None,
-    config: Optional[Config] = None,
+    gitattributes: GitAttributes | None = None,
+    config: Config | None = None,
 ) -> tuple[Tree, list[bytes]]:
     """Perform a three-way merge between commits.
 
@@ -674,8 +674,8 @@ def octopus_merge(
     merge_bases: list[bytes],
     head_commit: Commit,
     other_commits: list[Commit],
-    gitattributes: Optional[GitAttributes] = None,
-    config: Optional[Config] = None,
+    gitattributes: GitAttributes | None = None,
+    config: Config | None = None,
 ) -> tuple[Tree, list[bytes]]:
     """Perform an octopus merge of multiple commits.
 

+ 9 - 8
dulwich/merge_drivers.py

@@ -23,7 +23,8 @@
 import os
 import subprocess
 import tempfile
-from typing import Callable, Optional, Protocol
+from collections.abc import Callable
+from typing import Protocol
 
 from .config import Config
 
@@ -36,7 +37,7 @@ class MergeDriver(Protocol):
         ancestor: bytes,
         ours: bytes,
         theirs: bytes,
-        path: Optional[str] = None,
+        path: str | None = None,
         marker_size: int = 7,
     ) -> tuple[bytes, bool]:
         """Perform a three-way merge.
@@ -73,7 +74,7 @@ class ProcessMergeDriver:
         ancestor: bytes,
         ours: bytes,
         theirs: bytes,
-        path: Optional[str] = None,
+        path: str | None = None,
         marker_size: int = 7,
     ) -> tuple[bytes, bool]:
         """Perform merge using external process.
@@ -136,7 +137,7 @@ class ProcessMergeDriver:
 class MergeDriverRegistry:
     """Registry for merge drivers."""
 
-    def __init__(self, config: Optional[Config] = None):
+    def __init__(self, config: Config | None = None):
         """Initialize merge driver registry.
 
         Args:
@@ -172,7 +173,7 @@ class MergeDriverRegistry:
         """
         self._factories[name] = factory
 
-    def get_driver(self, name: str) -> Optional[MergeDriver]:
+    def get_driver(self, name: str) -> MergeDriver | None:
         """Get a merge driver by name.
 
         Args:
@@ -200,7 +201,7 @@ class MergeDriverRegistry:
 
         return None
 
-    def _create_from_config(self, name: str) -> Optional[MergeDriver]:
+    def _create_from_config(self, name: str) -> MergeDriver | None:
         """Create a merge driver from git configuration.
 
         Args:
@@ -224,10 +225,10 @@ class MergeDriverRegistry:
 
 
 # Global registry instance
-_merge_driver_registry: Optional[MergeDriverRegistry] = None
+_merge_driver_registry: MergeDriverRegistry | None = None
 
 
-def get_merge_driver_registry(config: Optional[Config] = None) -> MergeDriverRegistry:
+def get_merge_driver_registry(config: Config | None = None) -> MergeDriverRegistry:
     """Get the global merge driver registry.
 
     Args:

+ 17 - 17
dulwich/notes.py

@@ -333,7 +333,7 @@ class NotesTree:
 
         return new_tree
 
-    def _get_note_sha(self, object_sha: bytes) -> Optional[bytes]:
+    def _get_note_sha(self, object_sha: bytes) -> bytes | None:
         """Get the SHA of the note blob for an object.
 
         Args:
@@ -365,7 +365,7 @@ class NotesTree:
         except KeyError:
             return None
 
-    def get_note(self, object_sha: bytes) -> Optional[bytes]:
+    def get_note(self, object_sha: bytes) -> bytes | None:
         """Get the note content for an object.
 
         Args:
@@ -470,7 +470,7 @@ class NotesTree:
         self._fanout_level = self._detect_fanout_level()
         return new_tree
 
-    def remove_note(self, object_sha: bytes) -> Optional[Tree]:
+    def remove_note(self, object_sha: bytes) -> Tree | None:
         """Remove a note for an object.
 
         Args:
@@ -487,7 +487,7 @@ class NotesTree:
         components = path.split(b"/")
 
         # Build new tree structure without the note
-        def remove_from_tree(tree: Tree, components: Sequence[bytes]) -> Optional[Tree]:
+        def remove_from_tree(tree: Tree, components: Sequence[bytes]) -> Tree | None:
             """Remove note entry from tree.
 
             Args:
@@ -608,7 +608,7 @@ class Notes:
 
     def get_notes_ref(
         self,
-        notes_ref: Optional[bytes] = None,
+        notes_ref: bytes | None = None,
         config: Optional["StackedConfig"] = None,
     ) -> bytes:
         """Get the notes reference to use.
@@ -630,9 +630,9 @@ class Notes:
     def get_note(
         self,
         object_sha: bytes,
-        notes_ref: Optional[bytes] = None,
+        notes_ref: bytes | None = None,
         config: Optional["StackedConfig"] = None,
-    ) -> Optional[bytes]:
+    ) -> bytes | None:
         """Get the note for an object.
 
         Args:
@@ -671,10 +671,10 @@ class Notes:
         self,
         object_sha: bytes,
         note_content: bytes,
-        notes_ref: Optional[bytes] = None,
-        author: Optional[bytes] = None,
-        committer: Optional[bytes] = None,
-        message: Optional[bytes] = None,
+        notes_ref: bytes | None = None,
+        author: bytes | None = None,
+        committer: bytes | None = None,
+        message: bytes | None = None,
         config: Optional["StackedConfig"] = None,
     ) -> bytes:
         """Set or update a note for an object.
@@ -755,12 +755,12 @@ class Notes:
     def remove_note(
         self,
         object_sha: bytes,
-        notes_ref: Optional[bytes] = None,
-        author: Optional[bytes] = None,
-        committer: Optional[bytes] = None,
-        message: Optional[bytes] = None,
+        notes_ref: bytes | None = None,
+        author: bytes | None = None,
+        committer: bytes | None = None,
+        message: bytes | None = None,
         config: Optional["StackedConfig"] = None,
-    ) -> Optional[bytes]:
+    ) -> bytes | None:
         """Remove a note for an object.
 
         Args:
@@ -836,7 +836,7 @@ class Notes:
 
     def list_notes(
         self,
-        notes_ref: Optional[bytes] = None,
+        notes_ref: bytes | None = None,
         config: Optional["StackedConfig"] = None,
     ) -> list[tuple[bytes, bytes]]:
         """List all notes in a notes ref.

+ 91 - 98
dulwich/object_store.py

@@ -29,17 +29,15 @@ import stat
 import sys
 import time
 import warnings
-from collections.abc import Iterable, Iterator, Mapping, Sequence, Set
+from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence, Set
 from contextlib import suppress
 from io import BytesIO
 from pathlib import Path
 from typing import (
     TYPE_CHECKING,
     BinaryIO,
-    Callable,
     Optional,
     Protocol,
-    Union,
 )
 
 from .errors import NotTreeError
@@ -93,7 +91,7 @@ if TYPE_CHECKING:
 class GraphWalker(Protocol):
     """Protocol for graph walker objects."""
 
-    def __next__(self) -> Optional[bytes]:
+    def __next__(self) -> bytes | None:
         """Return the next object SHA to visit."""
         ...
 
@@ -181,7 +179,7 @@ def get_depth(
     store: ObjectContainer,
     head: bytes,
     get_parents: Callable[..., list[bytes]] = lambda commit: commit.parents,
-    max_depth: Optional[int] = None,
+    max_depth: int | None = None,
 ) -> int:
     """Return the current available depth for the given head.
 
@@ -232,7 +230,7 @@ class BaseObjectStore:
     """Object store interface."""
 
     def determine_wants_all(
-        self, refs: Mapping[Ref, ObjectID], depth: Optional[int] = None
+        self, refs: Mapping[Ref, ObjectID], depth: int | None = None
     ) -> list[ObjectID]:
         """Determine which objects are wanted based on refs."""
 
@@ -295,8 +293,8 @@ class BaseObjectStore:
 
     def add_objects(
         self,
-        objects: Sequence[tuple[ShaFile, Optional[str]]],
-        progress: Optional[Callable[..., None]] = None,
+        objects: Sequence[tuple[ShaFile, str | None]],
+        progress: Callable[..., None] | None = None,
     ) -> Optional["Pack"]:
         """Add a set of objects to this object store.
 
@@ -308,18 +306,18 @@ class BaseObjectStore:
 
     def tree_changes(
         self,
-        source: Optional[bytes],
-        target: Optional[bytes],
+        source: bytes | None,
+        target: bytes | None,
         want_unchanged: bool = False,
         include_trees: bool = False,
         change_type_same: bool = False,
         rename_detector: Optional["RenameDetector"] = None,
-        paths: Optional[Sequence[bytes]] = None,
+        paths: Sequence[bytes] | None = None,
     ) -> Iterator[
         tuple[
-            tuple[Optional[bytes], Optional[bytes]],
-            tuple[Optional[int], Optional[int]],
-            tuple[Optional[bytes], Optional[bytes]],
+            tuple[bytes | None, bytes | None],
+            tuple[int | None, int | None],
+            tuple[bytes | None, bytes | None],
         ]
     ]:
         """Find the differences between the contents of two trees.
@@ -444,11 +442,11 @@ class BaseObjectStore:
         self,
         haves: Iterable[bytes],
         wants: Iterable[bytes],
-        shallow: Optional[Set[bytes]] = None,
-        progress: Optional[Callable[..., None]] = None,
-        get_tagged: Optional[Callable[[], dict[bytes, bytes]]] = None,
+        shallow: Set[bytes] | None = None,
+        progress: Callable[..., None] | None = None,
+        get_tagged: Callable[[], dict[bytes, bytes]] | None = None,
         get_parents: Callable[..., list[bytes]] = lambda commit: commit.parents,
-    ) -> Iterator[tuple[bytes, Optional[PackHint]]]:
+    ) -> Iterator[tuple[bytes, PackHint | None]]:
         """Find the missing objects required for a set of revisions.
 
         Args:
@@ -496,8 +494,8 @@ class BaseObjectStore:
         have: Iterable[bytes],
         want: Iterable[bytes],
         *,
-        shallow: Optional[Set[bytes]] = None,
-        progress: Optional[Callable[..., None]] = None,
+        shallow: Set[bytes] | None = None,
+        progress: Callable[..., None] | None = None,
         ofs_delta: bool = True,
     ) -> tuple[int, Iterator[UnpackedObject]]:
         """Generate pack data objects for a set of wants/haves.
@@ -541,7 +539,7 @@ class BaseObjectStore:
         self,
         head: bytes,
         get_parents: Callable[..., list[bytes]] = lambda commit: commit.parents,
-        max_depth: Optional[int] = None,
+        max_depth: int | None = None,
     ) -> int:
         """Return the current available depth for the given head.
 
@@ -559,7 +557,7 @@ class BaseObjectStore:
         """Close any files opened by this object store."""
         # Default implementation is a NO-OP
 
-    def prune(self, grace_period: Optional[int] = None) -> None:
+    def prune(self, grace_period: int | None = None) -> None:
         """Prune/clean up this object store.
 
         This includes removing orphaned temporary files and other
@@ -591,7 +589,7 @@ class BaseObjectStore:
         return None
 
     def write_commit_graph(
-        self, refs: Optional[Sequence[bytes]] = None, reachable: bool = True
+        self, refs: Sequence[bytes] | None = None, reachable: bool = True
     ) -> None:
         """Write a commit graph file for this object store.
 
@@ -641,7 +639,7 @@ class PackCapableObjectStore(BaseObjectStore, PackedObjectContainer):
         self,
         count: int,
         unpacked_objects: Iterator["UnpackedObject"],
-        progress: Optional[Callable[..., None]] = None,
+        progress: Callable[..., None] | None = None,
     ) -> Optional["Pack"]:
         """Add pack data to this object store.
 
@@ -700,13 +698,13 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
     def __init__(
         self,
         pack_compression_level: int = -1,
-        pack_index_version: Optional[int] = None,
-        pack_delta_window_size: Optional[int] = None,
-        pack_window_memory: Optional[int] = None,
-        pack_delta_cache_size: Optional[int] = None,
-        pack_depth: Optional[int] = None,
-        pack_threads: Optional[int] = None,
-        pack_big_file_threshold: Optional[int] = None,
+        pack_index_version: int | None = None,
+        pack_delta_window_size: int | None = None,
+        pack_window_memory: int | None = None,
+        pack_delta_cache_size: int | None = None,
+        pack_depth: int | None = None,
+        pack_threads: int | None = None,
+        pack_big_file_threshold: int | None = None,
     ) -> None:
         """Initialize a PackBasedObjectStore.
 
@@ -738,7 +736,7 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
         self,
         count: int,
         unpacked_objects: Iterator[UnpackedObject],
-        progress: Optional[Callable[..., None]] = None,
+        progress: Callable[..., None] | None = None,
     ) -> Optional["Pack"]:
         """Add pack data to this object store.
 
@@ -808,8 +806,8 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
         have: Iterable[bytes],
         want: Iterable[bytes],
         *,
-        shallow: Optional[Set[bytes]] = None,
-        progress: Optional[Callable[..., None]] = None,
+        shallow: Set[bytes] | None = None,
+        progress: Callable[..., None] | None = None,
         ofs_delta: bool = True,
     ) -> tuple[int, Iterator[UnpackedObject]]:
         """Generate pack data objects for a set of wants/haves.
@@ -882,7 +880,7 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
         """Iterate over the SHAs of all loose objects."""
         raise NotImplementedError(self._iter_loose_objects)
 
-    def _get_loose_object(self, sha: bytes) -> Optional[ShaFile]:
+    def _get_loose_object(self, sha: bytes) -> ShaFile | None:
         raise NotImplementedError(self._get_loose_object)
 
     def delete_loose_object(self, sha: bytes) -> None:
@@ -896,9 +894,7 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
     def _remove_pack(self, pack: "Pack") -> None:
         raise NotImplementedError(self._remove_pack)
 
-    def pack_loose_objects(
-        self, progress: Optional[Callable[[str], None]] = None
-    ) -> int:
+    def pack_loose_objects(self, progress: Callable[[str], None] | None = None) -> int:
         """Pack loose objects.
 
         Args:
@@ -918,8 +914,8 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
 
     def repack(
         self,
-        exclude: Optional[Set[bytes]] = None,
-        progress: Optional[Callable[[str], None]] = None,
+        exclude: Set[bytes] | None = None,
+        progress: Callable[[str], None] | None = None,
     ) -> int:
         """Repack the packs in this repository.
 
@@ -1119,7 +1115,7 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
                 yield o
                 todo.remove(o.id)
         for oid in todo:
-            loose_obj: Optional[ShaFile] = self._get_loose_object(oid)
+            loose_obj: ShaFile | None = self._get_loose_object(oid)
             if loose_obj is not None:
                 yield loose_obj
             elif not allow_missing:
@@ -1168,8 +1164,8 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
 
     def add_objects(
         self,
-        objects: Sequence[tuple[ShaFile, Optional[str]]],
-        progress: Optional[Callable[[str], None]] = None,
+        objects: Sequence[tuple[ShaFile, str | None]],
+        progress: Callable[[str], None] | None = None,
     ) -> Optional["Pack"]:
         """Add a set of objects to this object store.
 
@@ -1187,23 +1183,23 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
 class DiskObjectStore(PackBasedObjectStore):
     """Git-style object store that exists on disk."""
 
-    path: Union[str, os.PathLike[str]]
-    pack_dir: Union[str, os.PathLike[str]]
-    _alternates: Optional[list["BaseObjectStore"]]
+    path: str | os.PathLike[str]
+    pack_dir: str | os.PathLike[str]
+    _alternates: list["BaseObjectStore"] | None
     _commit_graph: Optional["CommitGraph"]
 
     def __init__(
         self,
-        path: Union[str, os.PathLike[str]],
+        path: str | os.PathLike[str],
         loose_compression_level: int = -1,
         pack_compression_level: int = -1,
-        pack_index_version: Optional[int] = None,
-        pack_delta_window_size: Optional[int] = None,
-        pack_window_memory: Optional[int] = None,
-        pack_delta_cache_size: Optional[int] = None,
-        pack_depth: Optional[int] = None,
-        pack_threads: Optional[int] = None,
-        pack_big_file_threshold: Optional[int] = None,
+        pack_index_version: int | None = None,
+        pack_delta_window_size: int | None = None,
+        pack_window_memory: int | None = None,
+        pack_delta_cache_size: int | None = None,
+        pack_depth: int | None = None,
+        pack_threads: int | None = None,
+        pack_big_file_threshold: int | None = None,
         fsync_object_files: bool = False,
     ) -> None:
         """Open an object store.
@@ -1253,7 +1249,7 @@ class DiskObjectStore(PackBasedObjectStore):
 
     @classmethod
     def from_config(
-        cls, path: Union[str, os.PathLike[str]], config: "Config"
+        cls, path: str | os.PathLike[str], config: "Config"
     ) -> "DiskObjectStore":
         """Create a DiskObjectStore from a configuration object.
 
@@ -1372,7 +1368,7 @@ class DiskObjectStore(PackBasedObjectStore):
                 else:
                     yield os.fsdecode(os.path.join(os.fsencode(self.path), line))
 
-    def add_alternate_path(self, path: Union[str, os.PathLike[str]]) -> None:
+    def add_alternate_path(self, path: str | os.PathLike[str]) -> None:
         """Add an alternate path to this object store."""
         try:
             os.mkdir(os.path.join(self.path, INFODIR))
@@ -1470,7 +1466,7 @@ class DiskObjectStore(PackBasedObjectStore):
 
         return count
 
-    def _get_loose_object(self, sha: bytes) -> Optional[ShaFile]:
+    def _get_loose_object(self, sha: bytes) -> ShaFile | None:
         path = self._get_shafile_path(sha)
         try:
             return ShaFile.from_path(path)
@@ -1534,7 +1530,7 @@ class DiskObjectStore(PackBasedObjectStore):
             os.remove(pack.index.path)
 
     def _get_pack_basepath(
-        self, entries: Iterable[tuple[bytes, int, Union[int, None]]]
+        self, entries: Iterable[tuple[bytes, int, int | None]]
     ) -> str:
         suffix_bytes = iter_sha1(entry[0] for entry in entries)
         # TODO: Handle self.pack_dir being bytes
@@ -1547,7 +1543,7 @@ class DiskObjectStore(PackBasedObjectStore):
         path: str,
         num_objects: int,
         indexer: PackIndexer,
-        progress: Optional[Callable[..., None]] = None,
+        progress: Callable[..., None] | None = None,
     ) -> Pack:
         """Move a specific file containing a pack into the pack directory.
 
@@ -1628,8 +1624,8 @@ class DiskObjectStore(PackBasedObjectStore):
     def add_thin_pack(
         self,
         read_all: Callable[[int], bytes],
-        read_some: Optional[Callable[[int], bytes]],
-        progress: Optional[Callable[..., None]] = None,
+        read_some: Callable[[int], bytes] | None,
+        progress: Callable[..., None] | None = None,
     ) -> "Pack":
         """Add a new thin pack to this object store.
 
@@ -1712,7 +1708,7 @@ class DiskObjectStore(PackBasedObjectStore):
             )
 
     @classmethod
-    def init(cls, path: Union[str, os.PathLike[str]]) -> "DiskObjectStore":
+    def init(cls, path: str | os.PathLike[str]) -> "DiskObjectStore":
         """Initialize a new disk object store.
 
         Creates the necessary directory structure for a Git object store.
@@ -1792,7 +1788,7 @@ class DiskObjectStore(PackBasedObjectStore):
         return self._commit_graph
 
     def write_commit_graph(
-        self, refs: Optional[Iterable[bytes]] = None, reachable: bool = True
+        self, refs: Iterable[bytes] | None = None, reachable: bool = True
     ) -> None:
         """Write a commit graph file for this object store.
 
@@ -1863,7 +1859,7 @@ class DiskObjectStore(PackBasedObjectStore):
             # Clear cached commit graph so it gets reloaded
             self._commit_graph = None
 
-    def prune(self, grace_period: Optional[int] = None) -> None:
+    def prune(self, grace_period: int | None = None) -> None:
         """Prune/clean up this object store.
 
         This removes temporary files that were left behind by interrupted
@@ -1985,8 +1981,8 @@ class MemoryObjectStore(PackCapableObjectStore):
 
     def add_objects(
         self,
-        objects: Iterable[tuple[ShaFile, Optional[str]]],
-        progress: Optional[Callable[[str], None]] = None,
+        objects: Iterable[tuple[ShaFile, str | None]],
+        progress: Callable[[str], None] | None = None,
     ) -> None:
         """Add a set of objects to this object store.
 
@@ -2032,7 +2028,7 @@ class MemoryObjectStore(PackCapableObjectStore):
         self,
         count: int,
         unpacked_objects: Iterator[UnpackedObject],
-        progress: Optional[Callable[[str], None]] = None,
+        progress: Callable[[str], None] | None = None,
     ) -> None:
         """Add pack data to this object store.
 
@@ -2065,7 +2061,7 @@ class MemoryObjectStore(PackCapableObjectStore):
         self,
         read_all: Callable[[], bytes],
         read_some: Callable[[int], bytes],
-        progress: Optional[Callable[[str], None]] = None,
+        progress: Callable[[str], None] | None = None,
     ) -> None:
         """Add a new thin pack to this object store.
 
@@ -2204,9 +2200,9 @@ class MissingObjectFinder:
         haves: Iterable[bytes],
         wants: Iterable[bytes],
         *,
-        shallow: Optional[Set[bytes]] = None,
-        progress: Optional[Callable[[bytes], None]] = None,
-        get_tagged: Optional[Callable[[], dict[bytes, bytes]]] = None,
+        shallow: Set[bytes] | None = None,
+        progress: Callable[[bytes], None] | None = None,
+        get_tagged: Callable[[], dict[bytes, bytes]] | None = None,
         get_parents: Callable[[Commit], list[bytes]] = lambda commit: commit.parents,
     ) -> None:
         """Initialize a MissingObjectFinder.
@@ -2270,9 +2266,9 @@ class MissingObjectFinder:
 
         # in fact, what we 'want' is commits, tags, and others
         # we've found missing
-        self.objects_to_send: set[
-            tuple[ObjectID, Optional[bytes], Optional[int], bool]
-        ] = {(w, None, Commit.type_num, False) for w in missing_commits}
+        self.objects_to_send: set[tuple[ObjectID, bytes | None, int | None, bool]] = {
+            (w, None, Commit.type_num, False) for w in missing_commits
+        }
         missing_tags = want_tags.difference(have_tags)
         self.objects_to_send.update(
             {(w, None, Tag.type_num, False) for w in missing_tags}
@@ -2295,7 +2291,7 @@ class MissingObjectFinder:
         return self.remote_has
 
     def add_todo(
-        self, entries: Iterable[tuple[ObjectID, Optional[bytes], Optional[int], bool]]
+        self, entries: Iterable[tuple[ObjectID, bytes | None, int | None, bool]]
     ) -> None:
         """Add objects to the todo list.
 
@@ -2304,7 +2300,7 @@ class MissingObjectFinder:
         """
         self.objects_to_send.update([e for e in entries if e[0] not in self.sha_done])
 
-    def __next__(self) -> tuple[bytes, Optional[PackHint]]:
+    def __next__(self) -> tuple[bytes, PackHint | None]:
         """Get the next object to send.
 
         Returns:
@@ -2355,7 +2351,7 @@ class MissingObjectFinder:
             pack_hint = (type_num, name)
         return (sha, pack_hint)
 
-    def __iter__(self) -> Iterator[tuple[bytes, Optional[PackHint]]]:
+    def __iter__(self) -> Iterator[tuple[bytes, PackHint | None]]:
         """Return iterator over objects to send.
 
         Returns:
@@ -2379,10 +2375,9 @@ class ObjectStoreGraphWalker:
         self,
         local_heads: Iterable[ObjectID],
         get_parents: Callable[[ObjectID], list[ObjectID]],
-        shallow: Optional[set[ObjectID]] = None,
-        update_shallow: Optional[
-            Callable[[Optional[set[ObjectID]], Optional[set[ObjectID]]], None]
-        ] = None,
+        shallow: set[ObjectID] | None = None,
+        update_shallow: Callable[[set[ObjectID] | None, set[ObjectID] | None], None]
+        | None = None,
     ) -> None:
         """Create a new instance.
 
@@ -2394,7 +2389,7 @@ class ObjectStoreGraphWalker:
         """
         self.heads = set(local_heads)
         self.get_parents = get_parents
-        self.parents: dict[ObjectID, Optional[list[ObjectID]]] = {}
+        self.parents: dict[ObjectID, list[ObjectID] | None] = {}
         if shallow is None:
             shallow = set()
         self.shallow = shallow
@@ -2429,7 +2424,7 @@ class ObjectStoreGraphWalker:
 
             ancestors = new_ancestors
 
-    def next(self) -> Optional[ObjectID]:
+    def next(self) -> ObjectID | None:
         """Iterate over ancestors of heads in the target."""
         if self.heads:
             ret = self.heads.pop()
@@ -2447,8 +2442,8 @@ class ObjectStoreGraphWalker:
 
 def commit_tree_changes(
     object_store: BaseObjectStore,
-    tree: Union[ObjectID, Tree],
-    changes: Sequence[tuple[bytes, Optional[int], Optional[bytes]]],
+    tree: ObjectID | Tree,
+    changes: Sequence[tuple[bytes, int | None, bytes | None]],
 ) -> ObjectID:
     """Commit a specified set of changes to a tree structure.
 
@@ -2479,7 +2474,7 @@ def commit_tree_changes(
         sha_obj = object_store[tree]
         assert isinstance(sha_obj, Tree)
         tree_obj = sha_obj
-    nested_changes: dict[bytes, list[tuple[bytes, Optional[int], Optional[bytes]]]] = {}
+    nested_changes: dict[bytes, list[tuple[bytes, int | None, bytes | None]]] = {}
     for path, new_mode, new_sha in changes:
         try:
             (dirname, subpath) = path.split(b"/", 1)
@@ -2493,7 +2488,7 @@ def commit_tree_changes(
             nested_changes.setdefault(dirname, []).append((subpath, new_mode, new_sha))
     for name, subchanges in nested_changes.items():
         try:
-            orig_subtree_id: Union[bytes, Tree] = tree_obj[name][1]
+            orig_subtree_id: bytes | Tree = tree_obj[name][1]
         except KeyError:
             # For new directories, pass an empty Tree object
             orig_subtree_id = Tree()
@@ -2514,7 +2509,7 @@ class OverlayObjectStore(BaseObjectStore):
     def __init__(
         self,
         bases: list[BaseObjectStore],
-        add_store: Optional[BaseObjectStore] = None,
+        add_store: BaseObjectStore | None = None,
     ) -> None:
         """Initialize an OverlayObjectStore.
 
@@ -2540,9 +2535,9 @@ class OverlayObjectStore(BaseObjectStore):
 
     def add_objects(
         self,
-        objects: Sequence[tuple[ShaFile, Optional[str]]],
-        progress: Optional[Callable[[str], None]] = None,
-    ) -> Optional[Pack]:
+        objects: Sequence[tuple[ShaFile, str | None]],
+        progress: Callable[[str], None] | None = None,
+    ) -> Pack | None:
         """Add multiple objects to the store.
 
         Args:
@@ -2725,9 +2720,7 @@ class BucketBasedObjectStore(PackBasedObjectStore):
         """
         # Doesn't exist..
 
-    def pack_loose_objects(
-        self, progress: Optional[Callable[[str], None]] = None
-    ) -> int:
+    def pack_loose_objects(self, progress: Callable[[str], None] | None = None) -> int:
         """Pack loose objects. Returns number of objects packed.
 
         BucketBasedObjectStore doesn't support loose objects, so this is a no-op.
@@ -2780,7 +2773,7 @@ class BucketBasedObjectStore(PackBasedObjectStore):
             max_size=PACK_SPOOL_FILE_MAX_SIZE, prefix="incoming-"
         )
 
-        def commit() -> Optional[Pack]:
+        def commit() -> Pack | None:
             if pf.tell() == 0:
                 pf.close()
                 return None
@@ -2870,7 +2863,7 @@ def _collect_ancestors(
 
 
 def iter_tree_contents(
-    store: ObjectContainer, tree_id: Optional[ObjectID], *, include_trees: bool = False
+    store: ObjectContainer, tree_id: ObjectID | None, *, include_trees: bool = False
 ) -> Iterator[TreeEntry]:
     """Iterate the contents of a tree and all subtrees.
 
@@ -2906,9 +2899,9 @@ def iter_tree_contents(
 
 def iter_commit_contents(
     store: ObjectContainer,
-    commit: Union[Commit, bytes],
+    commit: Commit | bytes,
     *,
-    include: Optional[Sequence[Union[str, bytes, Path]]] = None,
+    include: Sequence[str | bytes | Path] | None = None,
 ) -> Iterator[TreeEntry]:
     """Iterate the contents of the repository at the specified commit.
 

+ 74 - 72
dulwich/objects.py

@@ -35,7 +35,6 @@ from typing import (
     IO,
     TYPE_CHECKING,
     NamedTuple,
-    Optional,
     TypeVar,
     Union,
 )
@@ -45,10 +44,7 @@ if sys.version_info >= (3, 11):
 else:
     from typing_extensions import Self
 
-if sys.version_info >= (3, 10):
-    from typing import TypeGuard
-else:
-    from typing_extensions import TypeGuard
+from typing import TypeGuard
 
 from . import replace_me
 from .errors import (
@@ -129,7 +125,7 @@ def sha_to_hex(sha: ObjectID) -> bytes:
     return hexsha
 
 
-def hex_to_sha(hex: Union[bytes, str]) -> bytes:
+def hex_to_sha(hex: bytes | str) -> bytes:
     """Takes a hex sha and returns a binary sha."""
     assert len(hex) == 40, f"Incorrect length of hexsha: {hex!r}"
     try:
@@ -140,7 +136,7 @@ def hex_to_sha(hex: Union[bytes, str]) -> bytes:
         raise ValueError(exc.args[0]) from exc
 
 
-def valid_hexsha(hex: Union[bytes, str]) -> bool:
+def valid_hexsha(hex: bytes | str) -> bool:
     """Check if a string is a valid hex SHA.
 
     Args:
@@ -162,7 +158,7 @@ def valid_hexsha(hex: Union[bytes, str]) -> bool:
 PathT = TypeVar("PathT", str, bytes)
 
 
-def hex_to_filename(path: PathT, hex: Union[str, bytes]) -> PathT:
+def hex_to_filename(path: PathT, hex: str | bytes) -> PathT:
     """Takes a hex sha and returns its filename relative to the given path."""
     # os.path.join accepts bytes or unicode, but all args must be of the same
     # type. Make sure that hex which is expected to be bytes, is the same type
@@ -190,7 +186,7 @@ def hex_to_filename(path: PathT, hex: Union[str, bytes]) -> PathT:
         return result_b
 
 
-def filename_to_hex(filename: Union[str, bytes]) -> str:
+def filename_to_hex(filename: str | bytes) -> str:
     """Takes an object filename and returns its corresponding hex sha."""
     # grab the last (up to) two path components
     errmsg = f"Invalid object filename: {filename!r}"
@@ -223,7 +219,7 @@ def object_header(num_type: int, length: int) -> bytes:
     return cls.type_name + b" " + str(length).encode("ascii") + b"\0"
 
 
-def serializable_property(name: str, docstring: Optional[str] = None) -> property:
+def serializable_property(name: str, docstring: str | None = None) -> property:
     """A property that helps tracking whether serialization is necessary."""
 
     def set(obj: "ShaFile", value: object) -> None:
@@ -250,7 +246,7 @@ def serializable_property(name: str, docstring: Optional[str] = None) -> propert
     return property(get, set, doc=docstring)
 
 
-def object_class(type: Union[bytes, int]) -> Optional[type["ShaFile"]]:
+def object_class(type: bytes | int) -> type["ShaFile"] | None:
     """Get the object class corresponding to the given type.
 
     Args:
@@ -261,7 +257,7 @@ def object_class(type: Union[bytes, int]) -> Optional[type["ShaFile"]]:
     return _TYPE_MAP.get(type, None)
 
 
-def check_hexsha(hex: Union[str, bytes], error_msg: str) -> None:
+def check_hexsha(hex: str | bytes, error_msg: str) -> None:
     """Check if a string is a valid hex sha string.
 
     Args:
@@ -274,7 +270,7 @@ def check_hexsha(hex: Union[str, bytes], error_msg: str) -> None:
         raise ObjectFormatException(f"{error_msg} {hex!r}")
 
 
-def check_identity(identity: Optional[bytes], error_msg: str) -> None:
+def check_identity(identity: bytes | None, error_msg: str) -> None:
     """Check if the specified identity is valid.
 
     This will raise an exception if the identity is not valid.
@@ -300,7 +296,7 @@ def check_identity(identity: Optional[bytes], error_msg: str) -> None:
         raise ObjectFormatException(error_msg)
 
 
-def _path_to_bytes(path: Union[str, bytes]) -> bytes:
+def _path_to_bytes(path: str | bytes) -> bytes:
     """Convert a path to bytes for use in error messages."""
     if isinstance(path, str):
         return path.encode("utf-8", "surrogateescape")
@@ -331,7 +327,7 @@ class FixedSha:
 
     __slots__ = ("_hexsha", "_sha")
 
-    def __init__(self, hexsha: Union[str, bytes]) -> None:
+    def __init__(self, hexsha: str | bytes) -> None:
         """Initialize FixedSha with a fixed SHA value.
 
         Args:
@@ -398,7 +394,7 @@ class ShaFile:
     _needs_serialization: bool
     type_name: bytes
     type_num: int
-    _chunked_text: Optional[list[bytes]]
+    _chunked_text: list[bytes] | None
     _sha: Union[FixedSha, None, "HASH"]
 
     @staticmethod
@@ -486,15 +482,13 @@ class ShaFile:
         """Return a string representing this object, fit for display."""
         return self.as_raw_string().decode("utf-8", "replace")
 
-    def set_raw_string(self, text: bytes, sha: Optional[ObjectID] = None) -> None:
+    def set_raw_string(self, text: bytes, sha: ObjectID | None = None) -> None:
         """Set the contents of this object from a serialized string."""
         if not isinstance(text, bytes):
             raise TypeError(f"Expected bytes for text, got {text!r}")
         self.set_raw_chunks([text], sha)
 
-    def set_raw_chunks(
-        self, chunks: list[bytes], sha: Optional[ObjectID] = None
-    ) -> None:
+    def set_raw_chunks(self, chunks: list[bytes], sha: ObjectID | None = None) -> None:
         """Set the contents of this object from a list of chunks."""
         self._chunked_text = chunks
         self._deserialize(chunks)
@@ -561,7 +555,7 @@ class ShaFile:
         raise NotImplementedError(self._serialize)
 
     @classmethod
-    def from_path(cls, path: Union[str, bytes]) -> "ShaFile":
+    def from_path(cls, path: str | bytes) -> "ShaFile":
         """Open a SHA file from disk."""
         with GitFile(path, "rb") as f:
             return cls.from_file(f)
@@ -578,7 +572,7 @@ class ShaFile:
 
     @staticmethod
     def from_raw_string(
-        type_num: int, string: bytes, sha: Optional[ObjectID] = None
+        type_num: int, string: bytes, sha: ObjectID | None = None
     ) -> "ShaFile":
         """Creates an object of the indicated type from the raw string given.
 
@@ -596,7 +590,7 @@ class ShaFile:
 
     @staticmethod
     def from_raw_chunks(
-        type_num: int, chunks: list[bytes], sha: Optional[ObjectID] = None
+        type_num: int, chunks: list[bytes], sha: ObjectID | None = None
     ) -> "ShaFile":
         """Creates an object of the indicated type from the raw chunks given.
 
@@ -754,7 +748,7 @@ class Blob(ShaFile):
     )
 
     @classmethod
-    def from_path(cls, path: Union[str, bytes]) -> "Blob":
+    def from_path(cls, path: str | bytes) -> "Blob":
         """Read a blob from a file on disk.
 
         Args:
@@ -810,7 +804,7 @@ class Blob(ShaFile):
 
 def _parse_message(
     chunks: Iterable[bytes],
-) -> Iterator[Union[tuple[None, None], tuple[Optional[bytes], bytes]]]:
+) -> Iterator[tuple[None, None] | tuple[bytes | None, bytes]]:
     """Parse a message with a list of fields and a body.
 
     Args:
@@ -865,7 +859,7 @@ def _parse_message(
 
 
 def _format_message(
-    headers: Sequence[tuple[bytes, bytes]], body: Optional[bytes]
+    headers: Sequence[tuple[bytes, bytes]], body: bytes | None
 ) -> Iterator[bytes]:
     for field, value in headers:
         lines = value.split(b"\n")
@@ -895,15 +889,15 @@ class Tag(ShaFile):
         "_tagger",
     )
 
-    _message: Optional[bytes]
-    _name: Optional[bytes]
-    _object_class: Optional[type["ShaFile"]]
-    _object_sha: Optional[bytes]
-    _signature: Optional[bytes]
-    _tag_time: Optional[int]
-    _tag_timezone: Optional[int]
-    _tag_timezone_neg_utc: Optional[bool]
-    _tagger: Optional[bytes]
+    _message: bytes | None
+    _name: bytes | None
+    _object_class: type["ShaFile"] | None
+    _object_sha: bytes | None
+    _signature: bytes | None
+    _tag_time: int | None
+    _tag_timezone: int | None
+    _tag_timezone_neg_utc: bool | None
+    _tagger: bytes | None
 
     def __init__(self) -> None:
         """Initialize a new Tag object."""
@@ -912,10 +906,10 @@ class Tag(ShaFile):
         self._tag_time = None
         self._tag_timezone = None
         self._tag_timezone_neg_utc = False
-        self._signature: Optional[bytes] = None
+        self._signature: bytes | None = None
 
     @classmethod
-    def from_path(cls, filename: Union[str, bytes]) -> "Tag":
+    def from_path(cls, filename: str | bytes) -> "Tag":
         """Read a tag from a file on disk.
 
         Args:
@@ -1086,7 +1080,7 @@ class Tag(ShaFile):
 
     signature = serializable_property("signature", "Optional detached GPG signature")
 
-    def sign(self, keyid: Optional[str] = None) -> None:
+    def sign(self, keyid: str | None = None) -> None:
         """Sign this tag with a GPG key.
 
         Args:
@@ -1118,7 +1112,7 @@ class Tag(ShaFile):
             ret = ret[: -len(self._signature)]
         return ret
 
-    def extract_signature(self) -> tuple[bytes, Optional[bytes], Optional[bytes]]:
+    def extract_signature(self) -> tuple[bytes, bytes | None, bytes | None]:
         """Extract the payload, signature, and signature type from this tag.
 
         Returns:
@@ -1146,7 +1140,7 @@ class Tag(ShaFile):
 
         return payload, self._signature, sig_type
 
-    def verify(self, keyids: Optional[Iterable[str]] = None) -> None:
+    def verify(self, keyids: Iterable[str] | None = None) -> None:
         """Verify GPG signature for this tag (if it is signed).
 
         Args:
@@ -1333,7 +1327,7 @@ class Tree(ShaFile):
         self._entries: dict[bytes, tuple[int, bytes]] = {}
 
     @classmethod
-    def from_path(cls, filename: Union[str, bytes]) -> "Tree":
+    def from_path(cls, filename: str | bytes) -> "Tree":
         """Read a tree from a file on disk.
 
         Args:
@@ -1495,7 +1489,7 @@ class Tree(ShaFile):
 
         parts = path.split(b"/")
         sha = self.id
-        mode: Optional[int] = None
+        mode: int | None = None
         for i, p in enumerate(parts):
             if not p:
                 continue
@@ -1559,7 +1553,7 @@ def format_timezone(offset: int, unnecessary_negative_timezone: bool = False) ->
 
 def parse_time_entry(
     value: bytes,
-) -> tuple[bytes, Optional[int], tuple[Optional[int], bool]]:
+) -> tuple[bytes, int | None, tuple[int | None, bool]]:
     """Parse event.
 
     Args:
@@ -1598,14 +1592,14 @@ def format_time_entry(
 def parse_commit(
     chunks: Iterable[bytes],
 ) -> tuple[
-    Optional[bytes],
+    bytes | None,
     list[bytes],
-    tuple[Optional[bytes], Optional[int], tuple[Optional[int], Optional[bool]]],
-    tuple[Optional[bytes], Optional[int], tuple[Optional[int], Optional[bool]]],
-    Optional[bytes],
+    tuple[bytes | None, int | None, tuple[int | None, bool | None]],
+    tuple[bytes | None, int | None, tuple[int | None, bool | None]],
+    bytes | None,
     list[Tag],
-    Optional[bytes],
-    Optional[bytes],
+    bytes | None,
+    bytes | None,
     list[tuple[bytes, bytes]],
 ]:
     """Parse a commit object from chunks.
@@ -1618,12 +1612,16 @@ def parse_commit(
     parents = []
     extra = []
     tree = None
-    author_info: tuple[
-        Optional[bytes], Optional[int], tuple[Optional[int], Optional[bool]]
-    ] = (None, None, (None, None))
-    commit_info: tuple[
-        Optional[bytes], Optional[int], tuple[Optional[int], Optional[bool]]
-    ] = (None, None, (None, None))
+    author_info: tuple[bytes | None, int | None, tuple[int | None, bool | None]] = (
+        None,
+        None,
+        (None, None),
+    )
+    commit_info: tuple[bytes | None, int | None, tuple[int | None, bool | None]] = (
+        None,
+        None,
+        (None, None),
+    )
     encoding = None
     mergetag = []
     message = None
@@ -1702,15 +1700,15 @@ class Commit(ShaFile):
         """Initialize an empty Commit."""
         super().__init__()
         self._parents: list[bytes] = []
-        self._encoding: Optional[bytes] = None
+        self._encoding: bytes | None = None
         self._mergetag: list[Tag] = []
-        self._gpgsig: Optional[bytes] = None
-        self._extra: list[tuple[bytes, Optional[bytes]]] = []
-        self._author_timezone_neg_utc: Optional[bool] = False
-        self._commit_timezone_neg_utc: Optional[bool] = False
+        self._gpgsig: bytes | None = None
+        self._extra: list[tuple[bytes, bytes | None]] = []
+        self._author_timezone_neg_utc: bool | None = False
+        self._commit_timezone_neg_utc: bool | None = False
 
     @classmethod
-    def from_path(cls, path: Union[str, bytes]) -> "Commit":
+    def from_path(cls, path: str | bytes) -> "Commit":
         """Read a commit from a file on disk.
 
         Args:
@@ -1731,12 +1729,16 @@ class Commit(ShaFile):
         self._parents = []
         self._extra = []
         self._tree = None
-        author_info: tuple[
-            Optional[bytes], Optional[int], tuple[Optional[int], Optional[bool]]
-        ] = (None, None, (None, None))
-        commit_info: tuple[
-            Optional[bytes], Optional[int], tuple[Optional[int], Optional[bool]]
-        ] = (None, None, (None, None))
+        author_info: tuple[bytes | None, int | None, tuple[int | None, bool | None]] = (
+            None,
+            None,
+            (None, None),
+        )
+        commit_info: tuple[bytes | None, int | None, tuple[int | None, bool | None]] = (
+            None,
+            None,
+            (None, None),
+        )
         self._encoding = None
         self._mergetag = []
         self._message = None
@@ -1833,7 +1835,7 @@ class Commit(ShaFile):
 
         # TODO: optionally check for duplicate parents
 
-    def sign(self, keyid: Optional[str] = None) -> None:
+    def sign(self, keyid: str | None = None) -> None:
         """Sign this commit with a GPG key.
 
         Args:
@@ -1866,7 +1868,7 @@ class Commit(ShaFile):
         tmp.gpgsig = None
         return tmp.as_raw_string()
 
-    def extract_signature(self) -> tuple[bytes, Optional[bytes], Optional[bytes]]:
+    def extract_signature(self) -> tuple[bytes, bytes | None, bytes | None]:
         """Extract the payload, signature, and signature type from this commit.
 
         Returns:
@@ -1894,7 +1896,7 @@ class Commit(ShaFile):
 
         return payload, self._gpgsig, sig_type
 
-    def verify(self, keyids: Optional[Iterable[str]] = None) -> None:
+    def verify(self, keyids: Iterable[str] | None = None) -> None:
         """Verify GPG signature for this commit (if it is signed).
 
         Args:
@@ -1991,7 +1993,7 @@ class Commit(ShaFile):
     )
 
     @replace_me(since="0.21.0", remove_in="0.24.0")
-    def _get_extra(self) -> list[tuple[bytes, Optional[bytes]]]:
+    def _get_extra(self) -> list[tuple[bytes, bytes | None]]:
         """Return extra settings of this commit."""
         return self._extra
 
@@ -2044,7 +2046,7 @@ OBJECT_CLASSES = (
     Tag,
 )
 
-_TYPE_MAP: dict[Union[bytes, int], type[ShaFile]] = {}
+_TYPE_MAP: dict[bytes | int, type[ShaFile]] = {}
 
 for cls in OBJECT_CLASSES:
     _TYPE_MAP[cls.type_name] = cls

+ 12 - 18
dulwich/objectspec.py

@@ -34,7 +34,7 @@ if TYPE_CHECKING:
     from .repo import Repo
 
 
-def to_bytes(text: Union[str, bytes]) -> bytes:
+def to_bytes(text: str | bytes) -> bytes:
     """Convert text to bytes.
 
     Args:
@@ -75,7 +75,7 @@ def _parse_number_suffix(suffix: bytes) -> tuple[int, bytes]:
     return int(suffix[:end]), suffix[end:]
 
 
-def parse_object(repo: "Repo", objectish: Union[bytes, str]) -> "ShaFile":
+def parse_object(repo: "Repo", objectish: bytes | str) -> "ShaFile":
     """Parse a string referring to an object.
 
     Args:
@@ -237,9 +237,7 @@ def parse_object(repo: "Repo", objectish: Union[bytes, str]) -> "ShaFile":
     return _resolve_object(repo, objectish)
 
 
-def parse_tree(
-    repo: "BaseRepo", treeish: Union[bytes, str, Tree, Commit, Tag]
-) -> "Tree":
+def parse_tree(repo: "BaseRepo", treeish: bytes | str | Tree | Commit | Tag) -> "Tree":
     """Parse a string referring to a tree.
 
     Args:
@@ -292,9 +290,7 @@ def parse_tree(
     return o
 
 
-def parse_ref(
-    container: Union["Repo", "RefsContainer"], refspec: Union[str, bytes]
-) -> "Ref":
+def parse_ref(container: Union["Repo", "RefsContainer"], refspec: str | bytes) -> "Ref":
     """Parse a string referring to a reference.
 
     Args:
@@ -322,7 +318,7 @@ def parse_ref(
 def parse_reftuple(
     lh_container: Union["Repo", "RefsContainer"],
     rh_container: Union["Repo", "RefsContainer"],
-    refspec: Union[str, bytes],
+    refspec: str | bytes,
     force: bool = False,
 ) -> tuple[Optional["Ref"], Optional["Ref"], bool]:
     """Parse a reftuple spec.
@@ -340,8 +336,8 @@ def parse_reftuple(
     if refspec.startswith(b"+"):
         force = True
         refspec = refspec[1:]
-    lh: Optional[bytes]
-    rh: Optional[bytes]
+    lh: bytes | None
+    rh: bytes | None
     if b":" in refspec:
         (lh, rh) = refspec.split(b":")
     else:
@@ -365,7 +361,7 @@ def parse_reftuple(
 def parse_reftuples(
     lh_container: Union["Repo", "RefsContainer"],
     rh_container: Union["Repo", "RefsContainer"],
-    refspecs: Union[bytes, Sequence[bytes]],
+    refspecs: bytes | Sequence[bytes],
     force: bool = False,
 ) -> list[tuple[Optional["Ref"], Optional["Ref"], bool]]:
     """Parse a list of reftuple specs to a list of reftuples.
@@ -390,7 +386,7 @@ def parse_reftuples(
 
 def parse_refs(
     container: Union["Repo", "RefsContainer"],
-    refspecs: Union[bytes, str, Sequence[Union[bytes, str]]],
+    refspecs: bytes | str | Sequence[bytes | str],
 ) -> list["Ref"]:
     """Parse a list of refspecs to a list of refs.
 
@@ -411,8 +407,8 @@ def parse_refs(
 
 
 def parse_commit_range(
-    repo: "Repo", committish: Union[str, bytes]
-) -> Optional[tuple["Commit", "Commit"]]:
+    repo: "Repo", committish: str | bytes
+) -> tuple["Commit", "Commit"] | None:
     """Parse a string referring to a commit range.
 
     Args:
@@ -473,9 +469,7 @@ def scan_for_short_id(
     raise AmbiguousShortId(prefix, ret)
 
 
-def parse_commit(
-    repo: "BaseRepo", committish: Union[str, bytes, Commit, Tag]
-) -> "Commit":
+def parse_commit(repo: "BaseRepo", committish: str | bytes | Commit | Tag) -> "Commit":
     """Parse a string referring to a single commit.
 
     Args:

+ 184 - 192
dulwich/pack.py

@@ -48,7 +48,7 @@ import struct
 import sys
 import warnings
 import zlib
-from collections.abc import Iterable, Iterator, Sequence, Set
+from collections.abc import Callable, Iterable, Iterator, Sequence, Set
 from hashlib import sha1
 from itertools import chain
 from os import SEEK_CUR, SEEK_END
@@ -59,7 +59,6 @@ from typing import (
     TYPE_CHECKING,
     Any,
     BinaryIO,
-    Callable,
     Generic,
     Optional,
     Protocol,
@@ -77,7 +76,7 @@ else:
 if sys.version_info >= (3, 12):
     from collections.abc import Buffer
 else:
-    Buffer = Union[bytes, bytearray, memoryview]
+    Buffer = bytes | bytearray | memoryview
 
 if TYPE_CHECKING:
     from _hashlib import HASH as HashObject
@@ -110,10 +109,10 @@ PACK_SPOOL_FILE_MAX_SIZE = 16 * 1024 * 1024
 DEFAULT_PACK_INDEX_VERSION = 2
 
 
-OldUnpackedObject = Union[tuple[Union[bytes, int], list[bytes]], list[bytes]]
+OldUnpackedObject = tuple[bytes | int, list[bytes]] | list[bytes]
 ResolveExtRefFn = Callable[[bytes], tuple[int, OldUnpackedObject]]
 ProgressFn = Callable[[int, str], None]
-PackHint = tuple[int, Optional[bytes]]
+PackHint = tuple[int, bytes | None]
 
 
 class UnresolvedDeltas(Exception):
@@ -136,8 +135,8 @@ class ObjectContainer(Protocol):
 
     def add_objects(
         self,
-        objects: Sequence[tuple[ShaFile, Optional[str]]],
-        progress: Optional[Callable[..., None]] = None,
+        objects: Sequence[tuple[ShaFile, str | None]],
+        progress: Callable[..., None] | None = None,
     ) -> Optional["Pack"]:
         """Add a set of objects to this object store.
 
@@ -228,8 +227,8 @@ class UnpackedObjectStream:
 
 
 def take_msb_bytes(
-    read: Callable[[int], bytes], crc32: Optional[int] = None
-) -> tuple[list[int], Optional[int]]:
+    read: Callable[[int], bytes], crc32: int | None = None
+) -> tuple[list[int], int | None]:
     """Read bytes marked with most significant bit.
 
     Args:
@@ -284,16 +283,16 @@ class UnpackedObject:
         "pack_type_num",  # Type of this object in the pack (may be a delta).
     ]
 
-    obj_type_num: Optional[int]
-    obj_chunks: Optional[list[bytes]]
-    delta_base: Union[None, bytes, int]
+    obj_type_num: int | None
+    obj_chunks: list[bytes] | None
+    delta_base: None | bytes | int
     decomp_chunks: list[bytes]
-    comp_chunks: Optional[list[bytes]]
-    decomp_len: Optional[int]
-    crc32: Optional[int]
-    offset: Optional[int]
+    comp_chunks: list[bytes] | None
+    decomp_len: int | None
+    crc32: int | None
+    offset: int | None
     pack_type_num: int
-    _sha: Optional[bytes]
+    _sha: bytes | None
 
     # TODO(dborowitz): read_zlib_chunks and unpack_object could very well be
     # methods of this object.
@@ -301,12 +300,12 @@ class UnpackedObject:
         self,
         pack_type_num: int,
         *,
-        delta_base: Union[None, bytes, int] = None,
-        decomp_len: Optional[int] = None,
-        crc32: Optional[int] = None,
-        sha: Optional[bytes] = None,
-        decomp_chunks: Optional[list[bytes]] = None,
-        offset: Optional[int] = None,
+        delta_base: None | bytes | int = None,
+        decomp_len: int | None = None,
+        crc32: int | None = None,
+        sha: bytes | None = None,
+        decomp_chunks: list[bytes] | None = None,
+        offset: int | None = None,
     ) -> None:
         """Initialize an UnpackedObject.
 
@@ -464,7 +463,7 @@ def iter_sha1(iter: Iterable[bytes]) -> bytes:
     return sha.hexdigest().encode("ascii")
 
 
-def load_pack_index(path: Union[str, os.PathLike[str]]) -> "PackIndex":
+def load_pack_index(path: str | os.PathLike[str]) -> "PackIndex":
     """Load an index file by path.
 
     Args:
@@ -476,8 +475,8 @@ def load_pack_index(path: Union[str, os.PathLike[str]]) -> "PackIndex":
 
 
 def _load_file_contents(
-    f: Union[IO[bytes], _GitFile], size: Optional[int] = None
-) -> tuple[Union[bytes, Any], int]:
+    f: IO[bytes] | _GitFile, size: int | None = None
+) -> tuple[bytes | Any, int]:
     """Load contents from a file, preferring mmap when possible.
 
     Args:
@@ -507,7 +506,7 @@ def _load_file_contents(
 
 
 def load_pack_index_file(
-    path: Union[str, os.PathLike[str]], f: Union[IO[bytes], _GitFile]
+    path: str | os.PathLike[str], f: IO[bytes] | _GitFile
 ) -> "PackIndex":
     """Load an index file from a file-like object.
 
@@ -531,7 +530,7 @@ def load_pack_index_file(
 
 def bisect_find_sha(
     start: int, end: int, sha: bytes, unpack_name: Callable[[int], bytes]
-) -> Optional[int]:
+) -> int | None:
     """Find a SHA in a data blob with sorted SHAs.
 
     Args:
@@ -554,7 +553,7 @@ def bisect_find_sha(
     return None
 
 
-PackIndexEntry = tuple[bytes, int, Optional[int]]
+PackIndexEntry = tuple[bytes, int, int | None]
 
 
 class PackIndex:
@@ -600,7 +599,7 @@ class PackIndex:
         """
         raise NotImplementedError(self.iterentries)
 
-    def get_pack_checksum(self) -> Optional[bytes]:
+    def get_pack_checksum(self) -> bytes | None:
         """Return the SHA1 checksum stored for the corresponding packfile.
 
         Returns: 20-byte binary digest, or None if not available
@@ -679,8 +678,8 @@ class MemoryPackIndex(PackIndex):
 
     def __init__(
         self,
-        entries: list[tuple[bytes, int, Optional[int]]],
-        pack_checksum: Optional[bytes] = None,
+        entries: list[tuple[bytes, int, int | None]],
+        pack_checksum: bytes | None = None,
     ) -> None:
         """Create a new MemoryPackIndex.
 
@@ -696,7 +695,7 @@ class MemoryPackIndex(PackIndex):
         self._entries = entries
         self._pack_checksum = pack_checksum
 
-    def get_pack_checksum(self) -> Optional[bytes]:
+    def get_pack_checksum(self) -> bytes | None:
         """Return the SHA checksum stored for the corresponding packfile."""
         return self._pack_checksum
 
@@ -753,14 +752,14 @@ class FilePackIndex(PackIndex):
     """
 
     _fan_out_table: list[int]
-    _file: Union[IO[bytes], _GitFile]
+    _file: IO[bytes] | _GitFile
 
     def __init__(
         self,
-        filename: Union[str, os.PathLike[str]],
-        file: Optional[Union[IO[bytes], _GitFile]] = None,
-        contents: Optional[Union[bytes, "mmap.mmap"]] = None,
-        size: Optional[int] = None,
+        filename: str | os.PathLike[str],
+        file: IO[bytes] | _GitFile | None = None,
+        contents: Union[bytes, "mmap.mmap"] | None = None,
+        size: int | None = None,
     ) -> None:
         """Create a pack index object.
 
@@ -823,7 +822,7 @@ class FilePackIndex(PackIndex):
         """Unpack the i-th object offset from the index file."""
         raise NotImplementedError(self._unpack_offset)
 
-    def _unpack_crc32_checksum(self, i: int) -> Optional[int]:
+    def _unpack_crc32_checksum(self, i: int) -> int | None:
         """Unpack the crc32 checksum for the ith object from the index file."""
         raise NotImplementedError(self._unpack_crc32_checksum)
 
@@ -950,10 +949,10 @@ class PackIndex1(FilePackIndex):
 
     def __init__(
         self,
-        filename: Union[str, os.PathLike[str]],
-        file: Optional[Union[IO[bytes], _GitFile]] = None,
-        contents: Optional[bytes] = None,
-        size: Optional[int] = None,
+        filename: str | os.PathLike[str],
+        file: IO[bytes] | _GitFile | None = None,
+        contents: bytes | None = None,
+        size: int | None = None,
     ) -> None:
         """Initialize a version 1 pack index.
 
@@ -991,10 +990,10 @@ class PackIndex2(FilePackIndex):
 
     def __init__(
         self,
-        filename: Union[str, os.PathLike[str]],
-        file: Optional[Union[IO[bytes], _GitFile]] = None,
-        contents: Optional[bytes] = None,
-        size: Optional[int] = None,
+        filename: str | os.PathLike[str],
+        file: IO[bytes] | _GitFile | None = None,
+        contents: bytes | None = None,
+        size: int | None = None,
     ) -> None:
         """Initialize a version 2 pack index.
 
@@ -1055,10 +1054,10 @@ class PackIndex3(FilePackIndex):
 
     def __init__(
         self,
-        filename: Union[str, os.PathLike[str]],
-        file: Optional[Union[IO[bytes], _GitFile]] = None,
-        contents: Optional[bytes] = None,
-        size: Optional[int] = None,
+        filename: str | os.PathLike[str],
+        file: IO[bytes] | _GitFile | None = None,
+        contents: bytes | None = None,
+        size: int | None = None,
     ) -> None:
         """Initialize a version 3 pack index.
 
@@ -1147,7 +1146,7 @@ def read_pack_header(read: Callable[[int], bytes]) -> tuple[int, int]:
     return (version, num_objects)
 
 
-def chunks_length(chunks: Union[bytes, Iterable[bytes]]) -> int:
+def chunks_length(chunks: bytes | Iterable[bytes]) -> int:
     """Get the total length of a sequence of chunks.
 
     Args:
@@ -1162,7 +1161,7 @@ def chunks_length(chunks: Union[bytes, Iterable[bytes]]) -> int:
 
 def unpack_object(
     read_all: Callable[[int], bytes],
-    read_some: Optional[Callable[[int], bytes]] = None,
+    read_some: Callable[[int], bytes] | None = None,
     compute_crc32: bool = False,
     include_comp: bool = False,
     zlib_bufsize: int = _ZLIB_BUFSIZE,
@@ -1203,7 +1202,7 @@ def unpack_object(
     for i, byte in enumerate(raw[1:]):
         size += (byte & 0x7F) << ((i * 7) + 4)
 
-    delta_base: Union[int, bytes, None]
+    delta_base: int | bytes | None
     raw_base = len(raw)
     if type_num == OFS_DELTA:
         raw, crc32 = take_msb_bytes(read_all, crc32=crc32)
@@ -1255,7 +1254,7 @@ class PackStreamReader:
     def __init__(
         self,
         read_all: Callable[[int], bytes],
-        read_some: Optional[Callable[[int], bytes]] = None,
+        read_some: Callable[[int], bytes] | None = None,
         zlib_bufsize: int = _ZLIB_BUFSIZE,
     ) -> None:
         """Initialize pack stream reader.
@@ -1410,7 +1409,7 @@ class PackStreamCopier(PackStreamReader):
     def __init__(
         self,
         read_all: Callable[[int], bytes],
-        read_some: Optional[Callable[[int], bytes]],
+        read_some: Callable[[int], bytes] | None,
         outfile: IO[bytes],
         delta_iter: Optional["DeltaChainIterator[UnpackedObject]"] = None,
     ) -> None:
@@ -1435,7 +1434,7 @@ class PackStreamCopier(PackStreamReader):
         self.outfile.write(data)
         return data
 
-    def verify(self, progress: Optional[Callable[..., None]] = None) -> None:
+    def verify(self, progress: Callable[..., None] | None = None) -> None:
         """Verify a pack stream and write it to the output file.
 
         See PackStreamReader.iterobjects for a list of exceptions this may
@@ -1451,7 +1450,7 @@ class PackStreamCopier(PackStreamReader):
             progress(f"copied {i} pack entries\n".encode("ascii"))
 
 
-def obj_sha(type: int, chunks: Union[bytes, Iterable[bytes]]) -> bytes:
+def obj_sha(type: int, chunks: bytes | Iterable[bytes]) -> bytes:
     """Compute the SHA for a numeric type and object chunks."""
     sha = sha1()
     sha.update(object_header(type, chunks_length(chunks)))
@@ -1521,16 +1520,16 @@ class PackData:
 
     def __init__(
         self,
-        filename: Union[str, os.PathLike[str]],
-        file: Optional[IO[bytes]] = None,
-        size: Optional[int] = None,
+        filename: str | os.PathLike[str],
+        file: IO[bytes] | None = None,
+        size: int | None = None,
         *,
-        delta_window_size: Optional[int] = None,
-        window_memory: Optional[int] = None,
-        delta_cache_size: Optional[int] = None,
-        depth: Optional[int] = None,
-        threads: Optional[int] = None,
-        big_file_threshold: Optional[int] = None,
+        delta_window_size: int | None = None,
+        window_memory: int | None = None,
+        delta_cache_size: int | None = None,
+        depth: int | None = None,
+        threads: int | None = None,
+        big_file_threshold: int | None = None,
     ) -> None:
         """Create a PackData object representing the pack in the given filename.
 
@@ -1573,7 +1572,7 @@ class PackData:
         return os.path.basename(self._filename)
 
     @property
-    def path(self) -> Union[str, os.PathLike[str]]:
+    def path(self) -> str | os.PathLike[str]:
         """Get the full path of the pack file.
 
         Returns:
@@ -1582,7 +1581,7 @@ class PackData:
         return self._filename
 
     @classmethod
-    def from_file(cls, file: IO[bytes], size: Optional[int] = None) -> "PackData":
+    def from_file(cls, file: IO[bytes], size: int | None = None) -> "PackData":
         """Create a PackData object from an open file.
 
         Args:
@@ -1595,7 +1594,7 @@ class PackData:
         return cls(str(file), file=file, size=size)
 
     @classmethod
-    def from_path(cls, path: Union[str, os.PathLike[str]]) -> "PackData":
+    def from_path(cls, path: str | os.PathLike[str]) -> "PackData":
         """Create a PackData object from a file path.
 
         Args:
@@ -1616,9 +1615,9 @@ class PackData:
 
     def __exit__(
         self,
-        exc_type: Optional[type],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[TracebackType],
+        exc_type: type | None,
+        exc_val: BaseException | None,
+        exc_tb: TracebackType | None,
     ) -> None:
         """Exit context manager."""
         self.close()
@@ -1668,9 +1667,9 @@ class PackData:
 
     def iterentries(
         self,
-        progress: Optional[Callable[[int, int], None]] = None,
-        resolve_ext_ref: Optional[ResolveExtRefFn] = None,
-    ) -> Iterator[tuple[bytes, int, Optional[int]]]:
+        progress: Callable[[int, int], None] | None = None,
+        resolve_ext_ref: ResolveExtRefFn | None = None,
+    ) -> Iterator[tuple[bytes, int, int | None]]:
         """Yield entries summarizing the contents of this pack.
 
         Args:
@@ -1688,8 +1687,8 @@ class PackData:
 
     def sorted_entries(
         self,
-        progress: Optional[ProgressFn] = None,
-        resolve_ext_ref: Optional[ResolveExtRefFn] = None,
+        progress: ProgressFn | None = None,
+        resolve_ext_ref: ResolveExtRefFn | None = None,
     ) -> list[tuple[bytes, int, int]]:
         """Return entries in this pack, sorted by SHA.
 
@@ -1706,8 +1705,8 @@ class PackData:
     def create_index_v1(
         self,
         filename: str,
-        progress: Optional[Callable[..., None]] = None,
-        resolve_ext_ref: Optional[ResolveExtRefFn] = None,
+        progress: Callable[..., None] | None = None,
+        resolve_ext_ref: ResolveExtRefFn | None = None,
     ) -> bytes:
         """Create a version 1 file for this data file.
 
@@ -1732,8 +1731,8 @@ class PackData:
     def create_index_v2(
         self,
         filename: str,
-        progress: Optional[Callable[..., None]] = None,
-        resolve_ext_ref: Optional[ResolveExtRefFn] = None,
+        progress: Callable[..., None] | None = None,
+        resolve_ext_ref: ResolveExtRefFn | None = None,
     ) -> bytes:
         """Create a version 2 index file for this data file.
 
@@ -1752,8 +1751,8 @@ class PackData:
     def create_index_v3(
         self,
         filename: str,
-        progress: Optional[Callable[..., None]] = None,
-        resolve_ext_ref: Optional[ResolveExtRefFn] = None,
+        progress: Callable[..., None] | None = None,
+        resolve_ext_ref: ResolveExtRefFn | None = None,
         hash_algorithm: int = 1,
     ) -> bytes:
         """Create a version 3 index file for this data file.
@@ -1776,9 +1775,9 @@ class PackData:
     def create_index(
         self,
         filename: str,
-        progress: Optional[Callable[..., None]] = None,
+        progress: Callable[..., None] | None = None,
         version: int = 2,
-        resolve_ext_ref: Optional[ResolveExtRefFn] = None,
+        resolve_ext_ref: ResolveExtRefFn | None = None,
         hash_algorithm: int = 1,
     ) -> bytes:
         """Create an  index file for this data file.
@@ -1875,9 +1874,9 @@ class DeltaChainIterator(Generic[T]):
 
     def __init__(
         self,
-        file_obj: Optional[IO[bytes]],
+        file_obj: IO[bytes] | None,
         *,
-        resolve_ext_ref: Optional[ResolveExtRefFn] = None,
+        resolve_ext_ref: ResolveExtRefFn | None = None,
     ) -> None:
         """Initialize DeltaChainIterator.
 
@@ -1894,7 +1893,7 @@ class DeltaChainIterator(Generic[T]):
 
     @classmethod
     def for_pack_data(
-        cls, pack_data: PackData, resolve_ext_ref: Optional[ResolveExtRefFn] = None
+        cls, pack_data: PackData, resolve_ext_ref: ResolveExtRefFn | None = None
     ) -> "DeltaChainIterator[T]":
         """Create a DeltaChainIterator from pack data.
 
@@ -1918,7 +1917,7 @@ class DeltaChainIterator(Generic[T]):
         shas: Iterable[bytes],
         *,
         allow_missing: bool = False,
-        resolve_ext_ref: Optional[ResolveExtRefFn] = None,
+        resolve_ext_ref: ResolveExtRefFn | None = None,
     ) -> "DeltaChainIterator[T]":
         """Create a DeltaChainIterator for a subset of objects.
 
@@ -2027,7 +2026,7 @@ class DeltaChainIterator(Generic[T]):
         raise NotImplementedError
 
     def _resolve_object(
-        self, offset: int, obj_type_num: int, base_chunks: Optional[list[bytes]]
+        self, offset: int, obj_type_num: int, base_chunks: list[bytes] | None
     ) -> UnpackedObject:
         assert self._file is not None
         self._file.seek(offset)
@@ -2046,7 +2045,7 @@ class DeltaChainIterator(Generic[T]):
         return unpacked
 
     def _follow_chain(
-        self, offset: int, obj_type_num: int, base_chunks: Optional[list[bytes]]
+        self, offset: int, obj_type_num: int, base_chunks: list[bytes] | None
     ) -> Iterator[T]:
         # Unlike PackData.get_object_at, there is no need to cache offsets as
         # this approach by design inflates each object exactly once.
@@ -2095,7 +2094,7 @@ class PackIndexer(DeltaChainIterator[PackIndexEntry]):
 
     _compute_crc32 = True
 
-    def _result(self, unpacked: UnpackedObject) -> tuple[bytes, int, Optional[int]]:
+    def _result(self, unpacked: UnpackedObject) -> tuple[bytes, int, int | None]:
         """Convert unpacked object to pack index entry.
 
         Args:
@@ -2239,9 +2238,9 @@ class SHA1Reader(BinaryIO):
 
     def __exit__(
         self,
-        type: Optional[type],
-        value: Optional[BaseException],
-        traceback: Optional[TracebackType],
+        type: type | None,
+        value: BaseException | None,
+        traceback: TracebackType | None,
     ) -> None:
         """Exit context manager and close file."""
         self.close()
@@ -2272,7 +2271,7 @@ class SHA1Reader(BinaryIO):
         """Check if file is a terminal."""
         return getattr(self.f, "isatty", lambda: False)()
 
-    def truncate(self, size: Optional[int] = None) -> int:
+    def truncate(self, size: int | None = None) -> int:
         """Not supported for read-only file.
 
         Raises:
@@ -2284,7 +2283,7 @@ class SHA1Reader(BinaryIO):
 class SHA1Writer(BinaryIO):
     """Wrapper for file-like object that remembers the SHA1 of its data."""
 
-    def __init__(self, f: Union[BinaryIO, IO[bytes]]) -> None:
+    def __init__(self, f: BinaryIO | IO[bytes]) -> None:
         """Initialize SHA1Writer.
 
         Args:
@@ -2293,9 +2292,9 @@ class SHA1Writer(BinaryIO):
         self.f = f
         self.length = 0
         self.sha1 = sha1(b"")
-        self.digest: Optional[bytes] = None
+        self.digest: bytes | None = None
 
-    def write(self, data: Union[bytes, bytearray, memoryview], /) -> int:  # type: ignore[override]
+    def write(self, data: bytes | bytearray | memoryview, /) -> int:  # type: ignore[override]
         """Write data and update SHA1.
 
         Args:
@@ -2407,9 +2406,9 @@ class SHA1Writer(BinaryIO):
 
     def __exit__(
         self,
-        type: Optional[type],
-        value: Optional[BaseException],
-        traceback: Optional[TracebackType],
+        type: type | None,
+        value: BaseException | None,
+        traceback: TracebackType | None,
     ) -> None:
         """Exit context manager and close file."""
         self.close()
@@ -2434,7 +2433,7 @@ class SHA1Writer(BinaryIO):
         """Check if file is a terminal."""
         return getattr(self.f, "isatty", lambda: False)()
 
-    def truncate(self, size: Optional[int] = None) -> int:
+    def truncate(self, size: int | None = None) -> int:
         """Not supported for write-only file.
 
         Raises:
@@ -2444,7 +2443,7 @@ class SHA1Writer(BinaryIO):
 
 
 def pack_object_header(
-    type_num: int, delta_base: Optional[Union[bytes, int]], size: int
+    type_num: int, delta_base: bytes | int | None, size: int
 ) -> bytearray:
     """Create a pack object header for the given object info.
 
@@ -2480,7 +2479,7 @@ def pack_object_header(
 
 def pack_object_chunks(
     type: int,
-    object: Union[list[bytes], tuple[Union[bytes, int], list[bytes]]],
+    object: list[bytes] | tuple[bytes | int, list[bytes]],
     compression_level: int = -1,
 ) -> Iterator[bytes]:
     """Generate chunks for a pack object.
@@ -2520,7 +2519,7 @@ def pack_object_chunks(
 def write_pack_object(
     write: Callable[[bytes], int],
     type: int,
-    object: Union[list[bytes], tuple[Union[bytes, int], list[bytes]]],
+    object: list[bytes] | tuple[bytes | int, list[bytes]],
     sha: Optional["HashObject"] = None,
     compression_level: int = -1,
 ) -> int:
@@ -2545,10 +2544,10 @@ def write_pack_object(
 
 def write_pack(
     filename: str,
-    objects: Union[Sequence[ShaFile], Sequence[tuple[ShaFile, Optional[bytes]]]],
+    objects: Sequence[ShaFile] | Sequence[tuple[ShaFile, bytes | None]],
     *,
-    deltify: Optional[bool] = None,
-    delta_window_size: Optional[int] = None,
+    deltify: bool | None = None,
+    delta_window_size: int | None = None,
     compression_level: int = -1,
 ) -> tuple[bytes, bytes]:
     """Write a new pack data file.
@@ -2583,7 +2582,7 @@ def pack_header_chunks(num_objects: int) -> Iterator[bytes]:
 
 
 def write_pack_header(
-    write: Union[Callable[[bytes], int], IO[bytes]], num_objects: int
+    write: Callable[[bytes], int] | IO[bytes], num_objects: int
 ) -> None:
     """Write a pack header for the given number of objects."""
     write_fn: Callable[[bytes], int]
@@ -2604,8 +2603,8 @@ def find_reusable_deltas(
     container: PackedObjectContainer,
     object_ids: Set[bytes],
     *,
-    other_haves: Optional[Set[bytes]] = None,
-    progress: Optional[Callable[..., None]] = None,
+    other_haves: Set[bytes] | None = None,
+    progress: Callable[..., None] | None = None,
 ) -> Iterator[UnpackedObject]:
     """Find deltas in a pack that can be reused.
 
@@ -2638,10 +2637,10 @@ def find_reusable_deltas(
 
 
 def deltify_pack_objects(
-    objects: Union[Iterator[ShaFile], Iterator[tuple[ShaFile, Optional[bytes]]]],
+    objects: Iterator[ShaFile] | Iterator[tuple[ShaFile, bytes | None]],
     *,
-    window_size: Optional[int] = None,
-    progress: Optional[Callable[..., None]] = None,
+    window_size: int | None = None,
+    progress: Callable[..., None] | None = None,
 ) -> Iterator[UnpackedObject]:
     """Generate deltas for pack objects.
 
@@ -2653,7 +2652,7 @@ def deltify_pack_objects(
         delta_base is None for full text entries
     """
 
-    def objects_with_hints() -> Iterator[tuple[ShaFile, tuple[int, Optional[bytes]]]]:
+    def objects_with_hints() -> Iterator[tuple[ShaFile, tuple[int, bytes | None]]]:
         for e in objects:
             if isinstance(e, ShaFile):
                 yield (e, (e.type_num, None))
@@ -2669,8 +2668,8 @@ def deltify_pack_objects(
 
 
 def sort_objects_for_delta(
-    objects: Union[Iterator[ShaFile], Iterator[tuple[ShaFile, Optional[PackHint]]]],
-) -> Iterator[tuple[ShaFile, Optional[bytes]]]:
+    objects: Iterator[ShaFile] | Iterator[tuple[ShaFile, PackHint | None]],
+) -> Iterator[tuple[ShaFile, bytes | None]]:
     """Sort objects for optimal delta compression.
 
     Args:
@@ -2700,9 +2699,9 @@ def sort_objects_for_delta(
 
 
 def deltas_from_sorted_objects(
-    objects: Iterator[tuple[ShaFile, Optional[bytes]]],
-    window_size: Optional[int] = None,
-    progress: Optional[Callable[..., None]] = None,
+    objects: Iterator[tuple[ShaFile, bytes | None]],
+    window_size: int | None = None,
+    progress: Callable[..., None] | None = None,
 ) -> Iterator[UnpackedObject]:
     """Create deltas from sorted objects.
 
@@ -2753,16 +2752,14 @@ def deltas_from_sorted_objects(
 
 
 def pack_objects_to_data(
-    objects: Union[
-        Sequence[ShaFile],
-        Sequence[tuple[ShaFile, Optional[bytes]]],
-        Sequence[tuple[ShaFile, Optional[PackHint]]],
-    ],
+    objects: Sequence[ShaFile]
+    | Sequence[tuple[ShaFile, bytes | None]]
+    | Sequence[tuple[ShaFile, PackHint | None]],
     *,
-    deltify: Optional[bool] = None,
-    delta_window_size: Optional[int] = None,
+    deltify: bool | None = None,
+    delta_window_size: int | None = None,
     ofs_delta: bool = True,
-    progress: Optional[Callable[..., None]] = None,
+    progress: Callable[..., None] | None = None,
 ) -> tuple[int, Iterator[UnpackedObject]]:
     """Create pack data from objects.
 
@@ -2803,13 +2800,13 @@ def pack_objects_to_data(
 
 def generate_unpacked_objects(
     container: PackedObjectContainer,
-    object_ids: Sequence[tuple[ObjectID, Optional[PackHint]]],
-    delta_window_size: Optional[int] = None,
-    deltify: Optional[bool] = None,
+    object_ids: Sequence[tuple[ObjectID, PackHint | None]],
+    delta_window_size: int | None = None,
+    deltify: bool | None = None,
     reuse_deltas: bool = True,
     ofs_delta: bool = True,
-    other_haves: Optional[set[bytes]] = None,
-    progress: Optional[Callable[..., None]] = None,
+    other_haves: set[bytes] | None = None,
+    progress: Callable[..., None] | None = None,
 ) -> Iterator[UnpackedObject]:
     """Create pack data from objects.
 
@@ -2860,18 +2857,16 @@ def full_unpacked_object(o: ShaFile) -> UnpackedObject:
 
 
 def write_pack_from_container(
-    write: Union[
-        Callable[[bytes], None],
-        Callable[[Union[bytes, bytearray, memoryview]], int],
-        IO[bytes],
-    ],
+    write: Callable[[bytes], None]
+    | Callable[[bytes | bytearray | memoryview], int]
+    | IO[bytes],
     container: PackedObjectContainer,
-    object_ids: Sequence[tuple[ObjectID, Optional[PackHint]]],
-    delta_window_size: Optional[int] = None,
-    deltify: Optional[bool] = None,
+    object_ids: Sequence[tuple[ObjectID, PackHint | None]],
+    delta_window_size: int | None = None,
+    deltify: bool | None = None,
     reuse_deltas: bool = True,
     compression_level: int = -1,
-    other_haves: Optional[set[bytes]] = None,
+    other_haves: set[bytes] | None = None,
 ) -> tuple[dict[bytes, tuple[int, int]], bytes]:
     """Write a new pack data file.
 
@@ -2906,11 +2901,11 @@ def write_pack_from_container(
 
 
 def write_pack_objects(
-    write: Union[Callable[[bytes], None], IO[bytes]],
-    objects: Union[Sequence[ShaFile], Sequence[tuple[ShaFile, Optional[bytes]]]],
+    write: Callable[[bytes], None] | IO[bytes],
+    objects: Sequence[ShaFile] | Sequence[tuple[ShaFile, bytes | None]],
     *,
-    delta_window_size: Optional[int] = None,
-    deltify: Optional[bool] = None,
+    delta_window_size: int | None = None,
+    deltify: bool | None = None,
     compression_level: int = -1,
 ) -> tuple[dict[bytes, tuple[int, int]], bytes]:
     """Write a new pack data file.
@@ -2939,9 +2934,9 @@ class PackChunkGenerator:
 
     def __init__(
         self,
-        num_records: Optional[int] = None,
-        records: Optional[Iterator[UnpackedObject]] = None,
-        progress: Optional[Callable[..., None]] = None,
+        num_records: int | None = None,
+        records: Iterator[UnpackedObject] | None = None,
+        progress: Callable[..., None] | None = None,
         compression_level: int = -1,
         reuse_compressed: bool = True,
     ) -> None:
@@ -2978,8 +2973,8 @@ class PackChunkGenerator:
         self,
         records: Iterator[UnpackedObject],
         *,
-        num_records: Optional[int] = None,
-        progress: Optional[Callable[..., None]] = None,
+        num_records: int | None = None,
+        progress: Callable[..., None] | None = None,
         compression_level: int = -1,
         reuse_compressed: bool = True,
     ) -> Iterator[bytes]:
@@ -3006,7 +3001,7 @@ class PackChunkGenerator:
             type_num = unpacked.pack_type_num
             if progress is not None and i % 1000 == 0:
                 progress((f"writing pack data: {i}/{num_records}\r").encode("ascii"))
-            raw: Union[list[bytes], tuple[int, list[bytes]], tuple[bytes, list[bytes]]]
+            raw: list[bytes] | tuple[int, list[bytes]] | tuple[bytes, list[bytes]]
             if unpacked.delta_base is not None:
                 assert isinstance(unpacked.delta_base, bytes), (
                     f"Expected bytes, got {type(unpacked.delta_base)}"
@@ -3022,7 +3017,7 @@ class PackChunkGenerator:
                     raw = (offset - base_offset, unpacked.decomp_chunks)
             else:
                 raw = unpacked.decomp_chunks
-            chunks: Union[list[bytes], Iterator[bytes]]
+            chunks: list[bytes] | Iterator[bytes]
             if unpacked.comp_chunks is not None and reuse_compressed:
                 chunks = unpacked.comp_chunks
             else:
@@ -3048,15 +3043,13 @@ class PackChunkGenerator:
 
 
 def write_pack_data(
-    write: Union[
-        Callable[[bytes], None],
-        Callable[[Union[bytes, bytearray, memoryview]], int],
-        IO[bytes],
-    ],
+    write: Callable[[bytes], None]
+    | Callable[[bytes | bytearray | memoryview], int]
+    | IO[bytes],
     records: Iterator[UnpackedObject],
     *,
-    num_records: Optional[int] = None,
-    progress: Optional[Callable[..., None]] = None,
+    num_records: int | None = None,
+    progress: Callable[..., None] | None = None,
     compression_level: int = -1,
 ) -> tuple[dict[bytes, tuple[int, int]], bytes]:
     """Write a new pack data file.
@@ -3085,7 +3078,7 @@ def write_pack_data(
 
 def write_pack_index_v1(
     f: IO[bytes],
-    entries: Iterable[tuple[bytes, int, Union[int, None]]],
+    entries: Iterable[tuple[bytes, int, int | None]],
     pack_checksum: bytes,
 ) -> bytes:
     """Write a new pack index file.
@@ -3196,7 +3189,7 @@ create_delta = _create_delta_py
 
 
 def apply_delta(
-    src_buf: Union[bytes, list[bytes]], delta: Union[bytes, list[bytes]]
+    src_buf: bytes | list[bytes], delta: bytes | list[bytes]
 ) -> list[bytes]:
     """Based on the similar function in git's patch-delta.c.
 
@@ -3273,7 +3266,7 @@ def apply_delta(
 
 def write_pack_index_v2(
     f: IO[bytes],
-    entries: Iterable[tuple[bytes, int, Union[int, None]]],
+    entries: Iterable[tuple[bytes, int, int | None]],
     pack_checksum: bytes,
 ) -> bytes:
     """Write a new pack index file.
@@ -3315,7 +3308,7 @@ def write_pack_index_v2(
 
 def write_pack_index_v3(
     f: IO[bytes],
-    entries: Iterable[tuple[bytes, int, Union[int, None]]],
+    entries: Iterable[tuple[bytes, int, int | None]],
     pack_checksum: bytes,
     hash_algorithm: int = 1,
 ) -> bytes:
@@ -3395,10 +3388,10 @@ def write_pack_index_v3(
 
 def write_pack_index(
     f: IO[bytes],
-    entries: Iterable[tuple[bytes, int, Union[int, None]]],
+    entries: Iterable[tuple[bytes, int, int | None]],
     pack_checksum: bytes,
-    progress: Optional[Callable[..., None]] = None,
-    version: Optional[int] = None,
+    progress: Callable[..., None] | None = None,
+    version: int | None = None,
 ) -> bytes:
     """Write a pack index file.
 
@@ -3428,24 +3421,24 @@ def write_pack_index(
 class Pack:
     """A Git pack object."""
 
-    _data_load: Optional[Callable[[], PackData]]
-    _idx_load: Optional[Callable[[], PackIndex]]
+    _data_load: Callable[[], PackData] | None
+    _idx_load: Callable[[], PackIndex] | None
 
-    _data: Optional[PackData]
-    _idx: Optional[PackIndex]
-    _bitmap: Optional["PackBitmap"]
+    _data: PackData | None
+    _idx: PackIndex | None
+    _bitmap: "PackBitmap | None"
 
     def __init__(
         self,
         basename: str,
-        resolve_ext_ref: Optional[ResolveExtRefFn] = None,
+        resolve_ext_ref: ResolveExtRefFn | None = None,
         *,
-        delta_window_size: Optional[int] = None,
-        window_memory: Optional[int] = None,
-        delta_cache_size: Optional[int] = None,
-        depth: Optional[int] = None,
-        threads: Optional[int] = None,
-        big_file_threshold: Optional[int] = None,
+        delta_window_size: int | None = None,
+        window_memory: int | None = None,
+        delta_cache_size: int | None = None,
+        depth: int | None = None,
+        threads: int | None = None,
+        big_file_threshold: int | None = None,
     ) -> None:
         """Initialize a Pack object.
 
@@ -3558,9 +3551,9 @@ class Pack:
 
     def __exit__(
         self,
-        exc_type: Optional[type],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[TracebackType],
+        exc_type: type | None,
+        exc_val: BaseException | None,
+        exc_tb: TracebackType | None,
     ) -> None:
         """Exit context manager."""
         self.close()
@@ -3712,7 +3705,7 @@ class Pack:
             unpacked.crc32 = crc32
             yield unpacked
 
-    def keep(self, msg: Optional[bytes] = None) -> str:
+    def keep(self, msg: bytes | None = None) -> str:
         """Add a .keep file for the pack, preventing git from garbage collecting it.
 
         Args:
@@ -3727,7 +3720,7 @@ class Pack:
                 keepfile.write(b"\n")
         return keepfile_name
 
-    def get_ref(self, sha: bytes) -> tuple[Optional[int], int, OldUnpackedObject]:
+    def get_ref(self, sha: bytes) -> tuple[int | None, int, OldUnpackedObject]:
         """Get the object for a ref SHA, only looking in this pack."""
         # TODO: cache these results
         try:
@@ -3747,9 +3740,8 @@ class Pack:
         offset: int,
         type: int,
         obj: OldUnpackedObject,
-        get_ref: Optional[
-            Callable[[bytes], tuple[Optional[int], int, OldUnpackedObject]]
-        ] = None,
+        get_ref: Callable[[bytes], tuple[int | None, int, OldUnpackedObject]]
+        | None = None,
     ) -> tuple[int, OldUnpackedObject]:
         """Resolve an object, possibly resolving deltas when necessary.
 
@@ -3808,7 +3800,7 @@ class Pack:
         return base_type, chunks
 
     def entries(
-        self, progress: Optional[Callable[[int, int], None]] = None
+        self, progress: Callable[[int, int], None] | None = None
     ) -> Iterator[PackIndexEntry]:
         """Yield entries summarizing the contents of this pack.
 
@@ -3822,7 +3814,7 @@ class Pack:
         )
 
     def sorted_entries(
-        self, progress: Optional[ProgressFn] = None
+        self, progress: ProgressFn | None = None
     ) -> Iterator[PackIndexEntry]:
         """Return entries in this pack, sorted by SHA.
 
@@ -3862,7 +3854,7 @@ def extend_pack(
     get_raw: Callable[[ObjectID], tuple[int, bytes]],
     *,
     compression_level: int = -1,
-    progress: Optional[Callable[[bytes], None]] = None,
+    progress: Callable[[bytes], None] | None = None,
 ) -> tuple[bytes, list[tuple[bytes, int, int]]]:
     """Extend a pack file with more objects.
 

+ 23 - 24
dulwich/patch.py

@@ -35,7 +35,6 @@ from typing import (
     BinaryIO,
     Optional,
     TextIO,
-    Union,
 )
 
 if TYPE_CHECKING:
@@ -75,10 +74,10 @@ class DiffAlgorithmNotAvailable(Exception):
 def write_commit_patch(
     f: IO[bytes],
     commit: "Commit",
-    contents: Union[str, bytes],
+    contents: str | bytes,
     progress: tuple[int, int],
-    version: Optional[str] = None,
-    encoding: Optional[str] = None,
+    version: str | None = None,
+    encoding: str | None = None,
 ) -> None:
     """Write a individual file patch.
 
@@ -255,7 +254,7 @@ def unified_diff_with_algorithm(
     lineterm: str = "\n",
     tree_encoding: str = "utf-8",
     output_encoding: str = "utf-8",
-    algorithm: Optional[str] = None,
+    algorithm: str | None = None,
 ) -> Generator[bytes, None, None]:
     """Generate unified diff with specified algorithm.
 
@@ -327,7 +326,7 @@ def is_binary(content: bytes) -> bool:
     return b"\0" in content[:FIRST_FEW_BYTES]
 
 
-def shortid(hexsha: Optional[bytes]) -> bytes:
+def shortid(hexsha: bytes | None) -> bytes:
     """Get short object ID.
 
     Args:
@@ -342,7 +341,7 @@ def shortid(hexsha: Optional[bytes]) -> bytes:
         return hexsha[:7]
 
 
-def patch_filename(p: Optional[bytes], root: bytes) -> bytes:
+def patch_filename(p: bytes | None, root: bytes) -> bytes:
     """Generate patch filename.
 
     Args:
@@ -361,10 +360,10 @@ def patch_filename(p: Optional[bytes], root: bytes) -> bytes:
 def write_object_diff(
     f: IO[bytes],
     store: "BaseObjectStore",
-    old_file: tuple[Optional[bytes], Optional[int], Optional[bytes]],
-    new_file: tuple[Optional[bytes], Optional[int], Optional[bytes]],
+    old_file: tuple[bytes | None, int | None, bytes | None],
+    new_file: tuple[bytes | None, int | None, bytes | None],
     diff_binary: bool = False,
-    diff_algorithm: Optional[str] = None,
+    diff_algorithm: str | None = None,
 ) -> None:
     """Write the diff for an object.
 
@@ -384,7 +383,7 @@ def write_object_diff(
     patched_old_path = patch_filename(old_path, b"a")
     patched_new_path = patch_filename(new_path, b"b")
 
-    def content(mode: Optional[int], hexsha: Optional[bytes]) -> Blob:
+    def content(mode: int | None, hexsha: bytes | None) -> Blob:
         """Get blob content for a file.
 
         Args:
@@ -448,9 +447,9 @@ def write_object_diff(
 
 # TODO(jelmer): Support writing unicode, rather than bytes.
 def gen_diff_header(
-    paths: tuple[Optional[bytes], Optional[bytes]],
-    modes: tuple[Optional[int], Optional[int]],
-    shas: tuple[Optional[bytes], Optional[bytes]],
+    paths: tuple[bytes | None, bytes | None],
+    modes: tuple[int | None, int | None],
+    shas: tuple[bytes | None, bytes | None],
 ) -> Generator[bytes, None, None]:
     """Write a blob diff header.
 
@@ -486,9 +485,9 @@ def gen_diff_header(
 # TODO(jelmer): Support writing unicode, rather than bytes.
 def write_blob_diff(
     f: IO[bytes],
-    old_file: tuple[Optional[bytes], Optional[int], Optional["Blob"]],
-    new_file: tuple[Optional[bytes], Optional[int], Optional["Blob"]],
-    diff_algorithm: Optional[str] = None,
+    old_file: tuple[bytes | None, int | None, Optional["Blob"]],
+    new_file: tuple[bytes | None, int | None, Optional["Blob"]],
+    diff_algorithm: str | None = None,
 ) -> None:
     """Write blob diff.
 
@@ -542,10 +541,10 @@ def write_blob_diff(
 def write_tree_diff(
     f: IO[bytes],
     store: "BaseObjectStore",
-    old_tree: Optional[bytes],
-    new_tree: Optional[bytes],
+    old_tree: bytes | None,
+    new_tree: bytes | None,
     diff_binary: bool = False,
-    diff_algorithm: Optional[str] = None,
+    diff_algorithm: str | None = None,
 ) -> None:
     """Write tree diff.
 
@@ -571,8 +570,8 @@ def write_tree_diff(
 
 
 def git_am_patch_split(
-    f: Union[TextIO, BinaryIO], encoding: Optional[str] = None
-) -> tuple["Commit", bytes, Optional[bytes]]:
+    f: TextIO | BinaryIO, encoding: str | None = None
+) -> tuple["Commit", bytes, bytes | None]:
     """Parse a git-am-style patch and split it up into bits.
 
     Args:
@@ -593,8 +592,8 @@ def git_am_patch_split(
 
 
 def parse_patch_message(
-    msg: "email.message.Message", encoding: Optional[str] = None
-) -> tuple["Commit", bytes, Optional[bytes]]:
+    msg: "email.message.Message", encoding: str | None = None
+) -> tuple["Commit", bytes, bytes | None]:
     """Extract a Commit object and patch from an e-mail message.
 
     Args:

Fichier diff supprimé car celui-ci est trop grand
+ 183 - 189
dulwich/porcelain.py


+ 20 - 21
dulwich/protocol.py

@@ -23,10 +23,9 @@
 """Generic functions for talking the git smart server protocol."""
 
 import types
-from collections.abc import Iterable, Sequence
+from collections.abc import Callable, Iterable, Sequence
 from io import BytesIO
 from os import SEEK_END
-from typing import Callable, Optional
 
 import dulwich
 
@@ -173,7 +172,7 @@ def extract_capability_names(capabilities: Iterable[bytes]) -> set[bytes]:
     return {parse_capability(c)[0] for c in capabilities}
 
 
-def parse_capability(capability: bytes) -> tuple[bytes, Optional[bytes]]:
+def parse_capability(capability: bytes) -> tuple[bytes, bytes | None]:
     """Parse a capability string into name and value.
 
     Args:
@@ -238,7 +237,7 @@ def parse_cmd_pkt(line: bytes) -> tuple[bytes, list[bytes]]:
     return cmd, args[:-1].split(b"\0")
 
 
-def pkt_line(data: Optional[bytes]) -> bytes:
+def pkt_line(data: bytes | None) -> bytes:
     """Wrap data in a pkt-line.
 
     Args:
@@ -251,7 +250,7 @@ def pkt_line(data: Optional[bytes]) -> bytes:
     return ("%04x" % (len(data) + 4)).encode("ascii") + data
 
 
-def pkt_seq(*seq: Optional[bytes]) -> bytes:
+def pkt_seq(*seq: bytes | None) -> bytes:
     """Wrap a sequence of data in pkt-lines.
 
     Args:
@@ -275,9 +274,9 @@ class Protocol:
     def __init__(
         self,
         read: Callable[[int], bytes],
-        write: Callable[[bytes], Optional[int]],
-        close: Optional[Callable[[], None]] = None,
-        report_activity: Optional[Callable[[int, str], None]] = None,
+        write: Callable[[bytes], int | None],
+        close: Callable[[], None] | None = None,
+        report_activity: Callable[[int, str], None] | None = None,
     ) -> None:
         """Initialize Protocol.
 
@@ -291,7 +290,7 @@ class Protocol:
         self.write = write
         self._close = close
         self.report_activity = report_activity
-        self._readahead: Optional[BytesIO] = None
+        self._readahead: BytesIO | None = None
 
     def close(self) -> None:
         """Close the underlying transport if a close function was provided."""
@@ -304,14 +303,14 @@ class Protocol:
 
     def __exit__(
         self,
-        exc_type: Optional[type[BaseException]],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[types.TracebackType],
+        exc_type: type[BaseException] | None,
+        exc_val: BaseException | None,
+        exc_tb: types.TracebackType | None,
     ) -> None:
         """Exit context manager and close transport."""
         self.close()
 
-    def read_pkt_line(self) -> Optional[bytes]:
+    def read_pkt_line(self) -> bytes | None:
         """Reads a pkt-line from the remote git process.
 
         This method may read from the readahead buffer; see unread_pkt_line.
@@ -363,7 +362,7 @@ class Protocol:
         self.unread_pkt_line(next_line)
         return False
 
-    def unread_pkt_line(self, data: Optional[bytes]) -> None:
+    def unread_pkt_line(self, data: bytes | None) -> None:
         """Unread a single line of data into the readahead buffer.
 
         This method can be used to unread a single pkt-line into a fixed
@@ -390,7 +389,7 @@ class Protocol:
             yield pkt
             pkt = self.read_pkt_line()
 
-    def write_pkt_line(self, line: Optional[bytes]) -> None:
+    def write_pkt_line(self, line: bytes | None) -> None:
         """Sends a pkt-line to the remote git process.
 
         Args:
@@ -461,9 +460,9 @@ class ReceivableProtocol(Protocol):
     def __init__(
         self,
         recv: Callable[[int], bytes],
-        write: Callable[[bytes], Optional[int]],
-        close: Optional[Callable[[], None]] = None,
-        report_activity: Optional[Callable[[int, str], None]] = None,
+        write: Callable[[bytes], int | None],
+        close: Callable[[], None] | None = None,
+        report_activity: Callable[[int, str], None] | None = None,
         rbufsize: int = _RBUFSIZE,
     ) -> None:
         """Initialize ReceivableProtocol.
@@ -630,7 +629,7 @@ class BufferedPktLineWriter:
     """
 
     def __init__(
-        self, write: Callable[[bytes], Optional[int]], bufsize: int = 65515
+        self, write: Callable[[bytes], int | None], bufsize: int = 65515
     ) -> None:
         """Initialize the BufferedPktLineWriter.
 
@@ -670,7 +669,7 @@ class BufferedPktLineWriter:
 class PktLineParser:
     """Packet line parser that hands completed packets off to a callback."""
 
-    def __init__(self, handle_pkt: Callable[[Optional[bytes]], None]) -> None:
+    def __init__(self, handle_pkt: Callable[[bytes | None], None]) -> None:
         """Initialize PktLineParser.
 
         Args:
@@ -716,7 +715,7 @@ def format_capability_line(capabilities: Iterable[bytes]) -> bytes:
 
 
 def format_ref_line(
-    ref: bytes, sha: bytes, capabilities: Optional[Sequence[bytes]] = None
+    ref: bytes, sha: bytes, capabilities: Sequence[bytes] | None = None
 ) -> bytes:
     """Format a ref advertisement line.
 

+ 52 - 52
dulwich/rebase.py

@@ -24,10 +24,10 @@
 import os
 import shutil
 import subprocess
-from collections.abc import Sequence
+from collections.abc import Callable, Sequence
 from dataclasses import dataclass
 from enum import Enum
-from typing import Callable, Optional, Protocol, TypedDict
+from typing import Optional, Protocol, TypedDict
 
 from dulwich.graph import find_merge_base
 from dulwich.merge import three_way_merge
@@ -119,9 +119,9 @@ class RebaseTodoEntry:
     """Represents a single entry in a rebase todo list."""
 
     command: RebaseTodoCommand
-    commit_sha: Optional[bytes] = None  # Store as hex string encoded as bytes
-    short_message: Optional[str] = None
-    arguments: Optional[str] = None
+    commit_sha: bytes | None = None  # Store as hex string encoded as bytes
+    short_message: str | None = None
+    arguments: str | None = None
 
     def to_string(self, abbreviate: bool = False) -> str:
         """Convert to git-rebase-todo format string.
@@ -226,7 +226,7 @@ class RebaseTodoEntry:
 class RebaseTodo:
     """Manages the git-rebase-todo file for interactive rebase."""
 
-    def __init__(self, entries: Optional[list[RebaseTodoEntry]] = None):
+    def __init__(self, entries: list[RebaseTodoEntry] | None = None):
         """Initialize RebaseTodo.
 
         Args:
@@ -239,7 +239,7 @@ class RebaseTodo:
         """Add an entry to the todo list."""
         self.entries.append(entry)
 
-    def get_current(self) -> Optional[RebaseTodoEntry]:
+    def get_current(self) -> RebaseTodoEntry | None:
         """Get the current todo entry."""
         if self.current_index < len(self.entries):
             return self.entries[self.current_index]
@@ -373,9 +373,9 @@ class RebaseStateManager(Protocol):
 
     def save(
         self,
-        original_head: Optional[bytes],
-        rebasing_branch: Optional[bytes],
-        onto: Optional[bytes],
+        original_head: bytes | None,
+        rebasing_branch: bytes | None,
+        onto: bytes | None,
         todo: list[Commit],
         done: list[Commit],
     ) -> None:
@@ -385,9 +385,9 @@ class RebaseStateManager(Protocol):
     def load(
         self,
     ) -> tuple[
-        Optional[bytes],  # original_head
-        Optional[bytes],  # rebasing_branch
-        Optional[bytes],  # onto
+        bytes | None,  # original_head
+        bytes | None,  # rebasing_branch
+        bytes | None,  # onto
         list[Commit],  # todo
         list[Commit],  # done
     ]:
@@ -406,7 +406,7 @@ class RebaseStateManager(Protocol):
         """Save interactive rebase todo list."""
         ...
 
-    def load_todo(self) -> Optional[RebaseTodo]:
+    def load_todo(self) -> RebaseTodo | None:
         """Load interactive rebase todo list."""
         ...
 
@@ -424,9 +424,9 @@ class DiskRebaseStateManager:
 
     def save(
         self,
-        original_head: Optional[bytes],
-        rebasing_branch: Optional[bytes],
-        onto: Optional[bytes],
+        original_head: bytes | None,
+        rebasing_branch: bytes | None,
+        onto: bytes | None,
         todo: list[Commit],
         done: list[Commit],
     ) -> None:
@@ -466,9 +466,9 @@ class DiskRebaseStateManager:
     def load(
         self,
     ) -> tuple[
-        Optional[bytes],
-        Optional[bytes],
-        Optional[bytes],
+        bytes | None,
+        bytes | None,
+        bytes | None,
         list[Commit],
         list[Commit],
     ]:
@@ -486,7 +486,7 @@ class DiskRebaseStateManager:
 
         return original_head, rebasing_branch, onto, todo, done
 
-    def _read_file(self, name: str) -> Optional[bytes]:
+    def _read_file(self, name: str) -> bytes | None:
         """Read content from a file in the rebase directory."""
         try:
             with open(os.path.join(self.path, name), "rb") as f:
@@ -515,7 +515,7 @@ class DiskRebaseStateManager:
         todo_content = todo.to_string()
         self._write_file("git-rebase-todo", todo_content.encode("utf-8"))
 
-    def load_todo(self) -> Optional[RebaseTodo]:
+    def load_todo(self) -> RebaseTodo | None:
         """Load the interactive rebase todo list.
 
         Returns:
@@ -531,9 +531,9 @@ class DiskRebaseStateManager:
 class RebaseState(TypedDict):
     """Type definition for rebase state."""
 
-    original_head: Optional[bytes]
-    rebasing_branch: Optional[bytes]
-    onto: Optional[bytes]
+    original_head: bytes | None
+    rebasing_branch: bytes | None
+    onto: bytes | None
     todo: list[Commit]
     done: list[Commit]
 
@@ -548,14 +548,14 @@ class MemoryRebaseStateManager:
           repo: Repository instance
         """
         self.repo = repo
-        self._state: Optional[RebaseState] = None
-        self._todo: Optional[RebaseTodo] = None
+        self._state: RebaseState | None = None
+        self._todo: RebaseTodo | None = None
 
     def save(
         self,
-        original_head: Optional[bytes],
-        rebasing_branch: Optional[bytes],
-        onto: Optional[bytes],
+        original_head: bytes | None,
+        rebasing_branch: bytes | None,
+        onto: bytes | None,
         todo: list[Commit],
         done: list[Commit],
     ) -> None:
@@ -571,9 +571,9 @@ class MemoryRebaseStateManager:
     def load(
         self,
     ) -> tuple[
-        Optional[bytes],
-        Optional[bytes],
-        Optional[bytes],
+        bytes | None,
+        bytes | None,
+        bytes | None,
         list[Commit],
         list[Commit],
     ]:
@@ -606,7 +606,7 @@ class MemoryRebaseStateManager:
         """
         self._todo = todo
 
-    def load_todo(self) -> Optional[RebaseTodo]:
+    def load_todo(self) -> RebaseTodo | None:
         """Load the interactive rebase todo list.
 
         Returns:
@@ -629,17 +629,17 @@ class Rebaser:
         self._state_manager = repo.get_rebase_state_manager()
 
         # Initialize state
-        self._original_head: Optional[bytes] = None
-        self._onto: Optional[bytes] = None
+        self._original_head: bytes | None = None
+        self._onto: bytes | None = None
         self._todo: list[Commit] = []
         self._done: list[Commit] = []
-        self._rebasing_branch: Optional[bytes] = None
+        self._rebasing_branch: bytes | None = None
 
         # Load any existing rebase state
         self._load_rebase_state()
 
     def _get_commits_to_rebase(
-        self, upstream: bytes, branch: Optional[bytes] = None
+        self, upstream: bytes, branch: bytes | None = None
     ) -> list[Commit]:
         """Get list of commits to rebase.
 
@@ -689,7 +689,7 @@ class Rebaser:
 
     def _cherry_pick(
         self, commit: Commit, onto: bytes
-    ) -> tuple[Optional[bytes], list[bytes]]:
+    ) -> tuple[bytes | None, list[bytes]]:
         """Cherry-pick a commit onto another commit.
 
         Args:
@@ -740,8 +740,8 @@ class Rebaser:
     def start(
         self,
         upstream: bytes,
-        onto: Optional[bytes] = None,
-        branch: Optional[bytes] = None,
+        onto: bytes | None = None,
+        branch: bytes | None = None,
     ) -> list[Commit]:
         """Start a rebase.
 
@@ -790,7 +790,7 @@ class Rebaser:
 
         return commits
 
-    def continue_(self) -> Optional[tuple[bytes, list[bytes]]]:
+    def continue_(self) -> tuple[bytes, list[bytes]] | None:
         """Continue an in-progress rebase.
 
         Returns:
@@ -914,8 +914,8 @@ class Rebaser:
 def rebase(
     repo: Repo,
     upstream: bytes,
-    onto: Optional[bytes] = None,
-    branch: Optional[bytes] = None,
+    onto: bytes | None = None,
+    branch: bytes | None = None,
 ) -> list[bytes]:
     """Perform a git rebase operation.
 
@@ -950,9 +950,9 @@ def rebase(
 def start_interactive(
     repo: Repo,
     upstream: bytes,
-    onto: Optional[bytes] = None,
-    branch: Optional[bytes] = None,
-    editor_callback: Optional[Callable[[bytes], bytes]] = None,
+    onto: bytes | None = None,
+    branch: bytes | None = None,
+    editor_callback: Callable[[bytes], bytes] | None = None,
 ) -> RebaseTodo:
     """Start an interactive rebase.
 
@@ -1052,9 +1052,9 @@ def edit_todo(repo: Repo, editor_callback: Callable[[bytes], bytes]) -> RebaseTo
 
 def process_interactive_rebase(
     repo: Repo,
-    todo: Optional[RebaseTodo] = None,
-    editor_callback: Optional[Callable[[bytes], bytes]] = None,
-) -> tuple[bool, Optional[str]]:
+    todo: RebaseTodo | None = None,
+    editor_callback: Callable[[bytes], bytes] | None = None,
+) -> tuple[bool, str | None]:
     """Process an interactive rebase.
 
     This function executes the commands in the todo list sequentially.
@@ -1201,8 +1201,8 @@ def _squash_commits(
     rebaser: Rebaser,
     entry: RebaseTodoEntry,
     keep_message: bool,
-    editor_callback: Optional[Callable[[bytes], bytes]] = None,
-) -> Optional[str]:
+    editor_callback: Callable[[bytes], bytes] | None = None,
+) -> str | None:
     """Helper to squash/fixup commits.
 
     Args:

+ 8 - 10
dulwich/reflog.py

@@ -23,7 +23,7 @@
 
 import collections
 from collections.abc import Callable, Generator
-from typing import IO, BinaryIO, Optional, Union
+from typing import IO, BinaryIO
 
 from .file import _GitFile
 from .objects import ZERO_SHA, format_timezone, parse_timezone
@@ -34,7 +34,7 @@ Entry = collections.namedtuple(
 )
 
 
-def parse_reflog_spec(refspec: Union[str, bytes]) -> tuple[bytes, int]:
+def parse_reflog_spec(refspec: str | bytes) -> tuple[bytes, int]:
     """Parse a reflog specification like 'HEAD@{1}' or 'refs/heads/master@{2}'.
 
     Args:
@@ -74,10 +74,10 @@ def parse_reflog_spec(refspec: Union[str, bytes]) -> tuple[bytes, int]:
 
 
 def format_reflog_line(
-    old_sha: Optional[bytes],
+    old_sha: bytes | None,
     new_sha: bytes,
     committer: bytes,
-    timestamp: Union[int, float],
+    timestamp: int | float,
     timezone: int,
     message: bytes,
 ) -> bytes:
@@ -130,7 +130,7 @@ def parse_reflog_line(line: bytes) -> Entry:
 
 
 def read_reflog(
-    f: Union[BinaryIO, IO[bytes], _GitFile],
+    f: BinaryIO | IO[bytes] | _GitFile,
 ) -> Generator[Entry, None, None]:
     """Read reflog.
 
@@ -203,11 +203,9 @@ def drop_reflog_entry(f: BinaryIO, index: int, rewrite: bool = False) -> None:
 
 def expire_reflog(
     f: BinaryIO,
-    expire_time: Optional[int] = None,
-    expire_unreachable_time: Optional[int] = None,
-    # String annotation to work around typing module bug in Python 3.9.0/3.9.1
-    # See: https://github.com/jelmer/dulwich/issues/1948
-    reachable_checker: "Optional[Callable[[bytes], bool]]" = None,
+    expire_time: int | None = None,
+    expire_unreachable_time: int | None = None,
+    reachable_checker: Callable[[bytes], bool] | None = None,
 ) -> int:
     """Expire reflog entries based on age and reachability.
 

+ 149 - 173
dulwich/refs.py

@@ -25,17 +25,14 @@
 import os
 import types
 import warnings
-from collections.abc import Iterable, Iterator, Mapping
+from collections.abc import Callable, Iterable, Iterator, Mapping
 from contextlib import suppress
 from typing import (
     IO,
     TYPE_CHECKING,
     Any,
     BinaryIO,
-    Callable,
-    Optional,
     TypeVar,
-    Union,
 )
 
 if TYPE_CHECKING:
@@ -149,20 +146,10 @@ class RefsContainer:
 
     def __init__(
         self,
-        logger: Optional[
-            Callable[
-                [
-                    bytes,
-                    bytes,
-                    bytes,
-                    Optional[bytes],
-                    Optional[int],
-                    Optional[int],
-                    bytes,
-                ],
-                None,
-            ]
-        ] = None,
+        logger: Callable[
+            [bytes, bytes, bytes, bytes | None, int | None, int | None, bytes], None
+        ]
+        | None = None,
     ) -> None:
         """Initialize RefsContainer with optional logger function."""
         self._logger = logger
@@ -170,12 +157,12 @@ class RefsContainer:
     def _log(
         self,
         ref: bytes,
-        old_sha: Optional[bytes],
-        new_sha: Optional[bytes],
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        old_sha: bytes | None,
+        new_sha: bytes | None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> None:
         if self._logger is None:
             return
@@ -192,10 +179,10 @@ class RefsContainer:
         self,
         name: bytes,
         other: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> None:
         """Make a ref point at another ref.
 
@@ -219,7 +206,7 @@ class RefsContainer:
         """
         raise NotImplementedError(self.get_packed_refs)
 
-    def add_packed_refs(self, new_refs: Mapping[Ref, Optional[ObjectID]]) -> None:
+    def add_packed_refs(self, new_refs: Mapping[Ref, ObjectID | None]) -> None:
         """Add the given refs as packed refs.
 
         Args:
@@ -228,7 +215,7 @@ class RefsContainer:
         """
         raise NotImplementedError(self.add_packed_refs)
 
-    def get_peeled(self, name: bytes) -> Optional[ObjectID]:
+    def get_peeled(self, name: bytes) -> ObjectID | None:
         """Return the cached peeled value of a ref, if available.
 
         Args:
@@ -243,10 +230,10 @@ class RefsContainer:
         self,
         base: Ref,
         other: Mapping[Ref, ObjectID],
-        committer: Optional[bytes] = None,
-        timestamp: Optional[bytes] = None,
-        timezone: Optional[bytes] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: bytes | None = None,
+        timezone: bytes | None = None,
+        message: bytes | None = None,
         prune: bool = False,
     ) -> None:
         """Import refs from another repository.
@@ -287,7 +274,7 @@ class RefsContainer:
         """Iterate over all reference keys."""
         return iter(self.allkeys())
 
-    def keys(self, base: Optional[bytes] = None) -> set[bytes]:
+    def keys(self, base: bytes | None = None) -> set[bytes]:
         """Refs present in this container.
 
         Args:
@@ -315,7 +302,7 @@ class RefsContainer:
                 keys.add(refname[base_len:])
         return keys
 
-    def as_dict(self, base: Optional[bytes] = None) -> dict[Ref, ObjectID]:
+    def as_dict(self, base: bytes | None = None) -> dict[Ref, ObjectID]:
         """Return the contents of this container as a dictionary."""
         ret = {}
         keys = self.keys(base)
@@ -350,7 +337,7 @@ class RefsContainer:
         if not name.startswith(b"refs/") or not check_ref_format(name[5:]):
             raise RefFormatError(name)
 
-    def read_ref(self, refname: bytes) -> Optional[bytes]:
+    def read_ref(self, refname: bytes) -> bytes | None:
         """Read a reference without following any references.
 
         Args:
@@ -363,7 +350,7 @@ class RefsContainer:
             contents = self.get_packed_refs().get(refname, None)
         return contents
 
-    def read_loose_ref(self, name: bytes) -> Optional[bytes]:
+    def read_loose_ref(self, name: bytes) -> bytes | None:
         """Read a loose reference and return its contents.
 
         Args:
@@ -373,13 +360,13 @@ class RefsContainer:
         """
         raise NotImplementedError(self.read_loose_ref)
 
-    def follow(self, name: bytes) -> tuple[list[bytes], Optional[bytes]]:
+    def follow(self, name: bytes) -> tuple[list[bytes], bytes | None]:
         """Follow a reference name.
 
         Returns: a tuple of (refnames, sha), wheres refnames are the names of
             references in the chain
         """
-        contents: Optional[bytes] = SYMREF + name
+        contents: bytes | None = SYMREF + name
         depth = 0
         refnames = []
         while contents and contents.startswith(SYMREF):
@@ -412,12 +399,12 @@ class RefsContainer:
     def set_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
+        old_ref: bytes | None,
         new_ref: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Set a refname to new_ref only if it currently equals old_ref.
 
@@ -442,10 +429,10 @@ class RefsContainer:
         self,
         name: bytes,
         ref: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Add a new reference only if it does not already exist.
 
@@ -480,11 +467,11 @@ class RefsContainer:
     def remove_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        old_ref: bytes | None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Remove a refname only if it currently equals old_ref.
 
@@ -555,20 +542,19 @@ class DictRefsContainer(RefsContainer):
     def __init__(
         self,
         refs: dict[bytes, bytes],
-        logger: Optional[
-            Callable[
-                [
-                    bytes,
-                    Optional[bytes],
-                    Optional[bytes],
-                    Optional[bytes],
-                    Optional[int],
-                    Optional[int],
-                    Optional[bytes],
-                ],
-                None,
-            ]
-        ] = None,
+        logger: Callable[
+            [
+                bytes,
+                bytes | None,
+                bytes | None,
+                bytes | None,
+                int | None,
+                int | None,
+                bytes | None,
+            ],
+            None,
+        ]
+        | None = None,
     ) -> None:
         """Initialize DictRefsContainer with refs dictionary and optional logger."""
         super().__init__(logger=logger)
@@ -580,7 +566,7 @@ class DictRefsContainer(RefsContainer):
         """Return all reference keys."""
         return set(self._refs.keys())
 
-    def read_loose_ref(self, name: bytes) -> Optional[bytes]:
+    def read_loose_ref(self, name: bytes) -> bytes | None:
         """Read a loose reference."""
         return self._refs.get(name, None)
 
@@ -588,7 +574,7 @@ class DictRefsContainer(RefsContainer):
         """Get packed references."""
         return {}
 
-    def _notify(self, ref: bytes, newsha: Optional[bytes]) -> None:
+    def _notify(self, ref: bytes, newsha: bytes | None) -> None:
         for watcher in self._watchers:
             watcher._notify((ref, newsha))
 
@@ -596,10 +582,10 @@ class DictRefsContainer(RefsContainer):
         self,
         name: Ref,
         other: Ref,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> None:
         """Make a ref point at another ref.
 
@@ -628,12 +614,12 @@ class DictRefsContainer(RefsContainer):
     def set_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
+        old_ref: bytes | None,
         new_ref: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Set a refname to new_ref only if it currently equals old_ref.
 
@@ -675,10 +661,10 @@ class DictRefsContainer(RefsContainer):
         self,
         name: Ref,
         ref: ObjectID,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Add a new reference only if it does not already exist.
 
@@ -711,11 +697,11 @@ class DictRefsContainer(RefsContainer):
     def remove_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        old_ref: bytes | None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Remove a refname only if it currently equals old_ref.
 
@@ -753,7 +739,7 @@ class DictRefsContainer(RefsContainer):
             )
         return True
 
-    def get_peeled(self, name: bytes) -> Optional[bytes]:
+    def get_peeled(self, name: bytes) -> bytes | None:
         """Get peeled version of a reference."""
         return self._peeled.get(name)
 
@@ -783,7 +769,7 @@ class InfoRefsContainer(RefsContainer):
         """Return all reference keys."""
         return set(self._refs.keys())
 
-    def read_loose_ref(self, name: bytes) -> Optional[bytes]:
+    def read_loose_ref(self, name: bytes) -> bytes | None:
         """Read a loose reference."""
         return self._refs.get(name, None)
 
@@ -791,7 +777,7 @@ class InfoRefsContainer(RefsContainer):
         """Get packed references."""
         return {}
 
-    def get_peeled(self, name: bytes) -> Optional[bytes]:
+    def get_peeled(self, name: bytes) -> bytes | None:
         """Get peeled version of a reference."""
         try:
             return self._peeled[name]
@@ -804,22 +790,12 @@ class DiskRefsContainer(RefsContainer):
 
     def __init__(
         self,
-        path: Union[str, bytes, os.PathLike[str]],
-        worktree_path: Optional[Union[str, bytes, os.PathLike[str]]] = None,
-        logger: Optional[
-            Callable[
-                [
-                    bytes,
-                    bytes,
-                    bytes,
-                    Optional[bytes],
-                    Optional[int],
-                    Optional[int],
-                    bytes,
-                ],
-                None,
-            ]
-        ] = None,
+        path: str | bytes | os.PathLike[str],
+        worktree_path: str | bytes | os.PathLike[str] | None = None,
+        logger: Callable[
+            [bytes, bytes, bytes, bytes | None, int | None, int | None, bytes], None
+        ]
+        | None = None,
     ) -> None:
         """Initialize DiskRefsContainer."""
         super().__init__(logger=logger)
@@ -829,8 +805,8 @@ class DiskRefsContainer(RefsContainer):
             self.worktree_path = self.path
         else:
             self.worktree_path = os.fsencode(os.fspath(worktree_path))
-        self._packed_refs: Optional[dict[bytes, bytes]] = None
-        self._peeled_refs: Optional[dict[bytes, bytes]] = None
+        self._packed_refs: dict[bytes, bytes] | None = None
+        self._peeled_refs: dict[bytes, bytes] | None = None
 
     def __repr__(self) -> str:
         """Return string representation of DiskRefsContainer."""
@@ -840,7 +816,7 @@ class DiskRefsContainer(RefsContainer):
         self,
         path: bytes,
         base: bytes,
-        dir_filter: Optional[Callable[[bytes], bool]] = None,
+        dir_filter: Callable[[bytes], bool] | None = None,
     ) -> Iterator[bytes]:
         refspath = os.path.join(path, base.rstrip(b"/"))
         prefix_len = len(os.path.join(path, b""))
@@ -861,7 +837,7 @@ class DiskRefsContainer(RefsContainer):
 
     def _iter_loose_refs(self, base: bytes = b"refs/") -> Iterator[bytes]:
         base = base.rstrip(b"/") + b"/"
-        search_paths: list[tuple[bytes, Optional[Callable[[bytes], bool]]]] = []
+        search_paths: list[tuple[bytes, Callable[[bytes], bool] | None]] = []
         if base != b"refs/":
             path = self.worktree_path if is_per_worktree_ref(base) else self.path
             search_paths.append((path, None))
@@ -941,7 +917,7 @@ class DiskRefsContainer(RefsContainer):
                         self._packed_refs[name] = sha
         return self._packed_refs
 
-    def add_packed_refs(self, new_refs: Mapping[Ref, Optional[ObjectID]]) -> None:
+    def add_packed_refs(self, new_refs: Mapping[Ref, ObjectID | None]) -> None:
         """Add the given refs as packed refs.
 
         Args:
@@ -977,7 +953,7 @@ class DiskRefsContainer(RefsContainer):
 
             self._packed_refs = packed_refs
 
-    def get_peeled(self, name: bytes) -> Optional[bytes]:
+    def get_peeled(self, name: bytes) -> bytes | None:
         """Return the cached peeled value of a ref, if available.
 
         Args:
@@ -1000,7 +976,7 @@ class DiskRefsContainer(RefsContainer):
             # Known not peelable
             return self[name]
 
-    def read_loose_ref(self, name: bytes) -> Optional[bytes]:
+    def read_loose_ref(self, name: bytes) -> bytes | None:
         """Read a reference file and return its contents.
 
         If the reference file a symbolic reference, only read the first line of
@@ -1058,10 +1034,10 @@ class DiskRefsContainer(RefsContainer):
         self,
         name: bytes,
         other: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> None:
         """Make a ref point at another ref.
 
@@ -1098,12 +1074,12 @@ class DiskRefsContainer(RefsContainer):
     def set_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
+        old_ref: bytes | None,
         new_ref: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Set a refname to new_ref only if it currently equals old_ref.
 
@@ -1183,10 +1159,10 @@ class DiskRefsContainer(RefsContainer):
         self,
         name: bytes,
         ref: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Add a new reference only if it does not already exist.
 
@@ -1236,11 +1212,11 @@ class DiskRefsContainer(RefsContainer):
     def remove_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        old_ref: bytes | None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Remove a refname only if it currently equals old_ref.
 
@@ -1323,7 +1299,7 @@ class DiskRefsContainer(RefsContainer):
         Args:
             all: If True, pack all refs. If False, only pack tags.
         """
-        refs_to_pack: dict[Ref, Optional[ObjectID]] = {}
+        refs_to_pack: dict[Ref, ObjectID | None] = {}
         for ref in self.allkeys():
             if ref == HEADREF:
                 # Never pack HEAD
@@ -1372,7 +1348,7 @@ def read_packed_refs(f: IO[bytes]) -> Iterator[tuple[bytes, bytes]]:
 
 def read_packed_refs_with_peeled(
     f: IO[bytes],
-) -> Iterator[tuple[bytes, bytes, Optional[bytes]]]:
+) -> Iterator[tuple[bytes, bytes, bytes | None]]:
     """Read a packed refs file including peeled refs.
 
     Assumes the "# pack-refs with: peeled" line was already read. Yields tuples
@@ -1407,7 +1383,7 @@ def read_packed_refs_with_peeled(
 def write_packed_refs(
     f: IO[bytes],
     packed_refs: Mapping[bytes, bytes],
-    peeled_refs: Optional[Mapping[bytes, bytes]] = None,
+    peeled_refs: Mapping[bytes, bytes] | None = None,
 ) -> None:
     """Write a packed refs file.
 
@@ -1601,7 +1577,7 @@ def shorten_ref_name(ref: bytes) -> bytes:
     return ref
 
 
-T = TypeVar("T", dict[bytes, bytes], dict[bytes, Optional[bytes]])
+T = TypeVar("T", dict[bytes, bytes], dict[bytes, bytes | None])
 
 
 def strip_peeled_refs(refs: T) -> T:
@@ -1626,7 +1602,7 @@ def split_peeled_refs(refs: T) -> tuple[T, dict[bytes, bytes]]:
 
 
 def _set_origin_head(
-    refs: RefsContainer, origin: bytes, origin_head: Optional[bytes]
+    refs: RefsContainer, origin: bytes, origin_head: bytes | None
 ) -> None:
     # set refs/remotes/origin/HEAD
     origin_base = b"refs/remotes/" + origin + b"/"
@@ -1640,9 +1616,9 @@ def _set_origin_head(
 def _set_default_branch(
     refs: RefsContainer,
     origin: bytes,
-    origin_head: Optional[bytes],
-    branch: Optional[bytes],
-    ref_message: Optional[bytes],
+    origin_head: bytes | None,
+    branch: bytes | None,
+    ref_message: bytes | None,
 ) -> bytes:
     """Set the default branch."""
     origin_base = b"refs/remotes/" + origin + b"/"
@@ -1672,8 +1648,8 @@ def _set_default_branch(
 
 
 def _set_head(
-    refs: RefsContainer, head_ref: bytes, ref_message: Optional[bytes]
-) -> Optional[bytes]:
+    refs: RefsContainer, head_ref: bytes, ref_message: bytes | None
+) -> bytes | None:
     if head_ref.startswith(LOCAL_TAG_PREFIX):
         # detach HEAD at specified tag
         head = refs[head_ref]
@@ -1696,8 +1672,8 @@ def _set_head(
 def _import_remote_refs(
     refs_container: RefsContainer,
     remote_name: str,
-    refs: dict[bytes, Optional[bytes]],
-    message: Optional[bytes] = None,
+    refs: dict[bytes, bytes | None],
+    message: bytes | None = None,
     prune: bool = False,
     prune_tags: bool = False,
 ) -> None:
@@ -1774,8 +1750,8 @@ class locked_ref:
         """
         self._refs_container = refs_container
         self._refname = refname
-        self._file: Optional[_GitFile] = None
-        self._realname: Optional[Ref] = None
+        self._file: _GitFile | None = None
+        self._realname: Ref | None = None
         self._deleted = False
 
     def __enter__(self) -> "locked_ref":
@@ -1802,9 +1778,9 @@ class locked_ref:
 
     def __exit__(
         self,
-        exc_type: Optional[type],
-        exc_value: Optional[BaseException],
-        traceback: Optional[types.TracebackType],
+        exc_type: type | None,
+        exc_value: BaseException | None,
+        traceback: types.TracebackType | None,
     ) -> None:
         """Exit the context manager and release the lock.
 
@@ -1819,7 +1795,7 @@ class locked_ref:
             else:
                 self._file.close()
 
-    def get(self) -> Optional[bytes]:
+    def get(self) -> bytes | None:
         """Get the current value of the ref."""
         if not self._file:
             raise RuntimeError("locked_ref not in context")
@@ -1832,7 +1808,7 @@ class locked_ref:
             )
         return current_ref
 
-    def ensure_equals(self, expected_value: Optional[bytes]) -> bool:
+    def ensure_equals(self, expected_value: bytes | None) -> bool:
         """Ensure the ref currently equals the expected value.
 
         Args:
@@ -1928,7 +1904,7 @@ class NamespacedRefsContainer(RefsContainer):
             return name
         return self._namespace_prefix + name
 
-    def _strip_namespace(self, name: bytes) -> Optional[bytes]:
+    def _strip_namespace(self, name: bytes) -> bytes | None:
         """Remove namespace prefix from a ref name.
 
         Returns None if the ref is not in our namespace.
@@ -1949,7 +1925,7 @@ class NamespacedRefsContainer(RefsContainer):
                 keys.add(stripped)
         return keys
 
-    def read_loose_ref(self, name: bytes) -> Optional[bytes]:
+    def read_loose_ref(self, name: bytes) -> bytes | None:
         """Read a loose reference."""
         return self._refs.read_loose_ref(self._apply_namespace(name))
 
@@ -1962,14 +1938,14 @@ class NamespacedRefsContainer(RefsContainer):
                 packed[stripped] = value
         return packed
 
-    def add_packed_refs(self, new_refs: Mapping[Ref, Optional[ObjectID]]) -> None:
+    def add_packed_refs(self, new_refs: Mapping[Ref, ObjectID | None]) -> None:
         """Add packed refs with namespace prefix."""
         namespaced_refs = {
             self._apply_namespace(name): value for name, value in new_refs.items()
         }
         self._refs.add_packed_refs(namespaced_refs)
 
-    def get_peeled(self, name: bytes) -> Optional[ObjectID]:
+    def get_peeled(self, name: bytes) -> ObjectID | None:
         """Return the cached peeled value of a ref."""
         return self._refs.get_peeled(self._apply_namespace(name))
 
@@ -1977,10 +1953,10 @@ class NamespacedRefsContainer(RefsContainer):
         self,
         name: bytes,
         other: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> None:
         """Make a ref point at another ref."""
         self._refs.set_symbolic_ref(
@@ -1995,12 +1971,12 @@ class NamespacedRefsContainer(RefsContainer):
     def set_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
+        old_ref: bytes | None,
         new_ref: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Set a refname to new_ref only if it currently equals old_ref."""
         return self._refs.set_if_equals(
@@ -2017,10 +1993,10 @@ class NamespacedRefsContainer(RefsContainer):
         self,
         name: bytes,
         ref: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Add a new reference only if it does not already exist."""
         return self._refs.add_if_new(
@@ -2035,11 +2011,11 @@ class NamespacedRefsContainer(RefsContainer):
     def remove_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        old_ref: bytes | None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Remove a refname only if it currently equals old_ref."""
         return self._refs.remove_if_equals(

+ 40 - 50
dulwich/reftable.py

@@ -13,11 +13,11 @@ import shutil
 import struct
 import time
 import zlib
-from collections.abc import Mapping
+from collections.abc import Callable, Mapping
 from dataclasses import dataclass
 from io import BytesIO
 from types import TracebackType
-from typing import BinaryIO, Callable, Optional, Union
+from typing import BinaryIO
 
 from dulwich.objects import ObjectID
 from dulwich.refs import (
@@ -26,7 +26,7 @@ from dulwich.refs import (
 )
 
 
-def decode_varint_from_stream(stream: BinaryIO) -> Optional[int]:
+def decode_varint_from_stream(stream: BinaryIO) -> int | None:
     """Decode a variable-length integer from a stream."""
     result = 0
     shift = 0
@@ -65,7 +65,7 @@ def _encode_reftable_suffix_and_type(value: int) -> bytes:
     return bytes([0x80, value - 0x80])
 
 
-def _decode_reftable_suffix_and_type(stream: BinaryIO) -> Optional[int]:
+def _decode_reftable_suffix_and_type(stream: BinaryIO) -> int | None:
     """Decode suffix_and_type handling both Git's broken and standard formats."""
     pos = stream.tell()
     first_byte_data = stream.read(1)
@@ -214,7 +214,7 @@ class RefValue:
         """Check if this is a peeled reference."""
         return self.value_type == REF_VALUE_PEELED
 
-    def get_sha(self) -> Optional[bytes]:
+    def get_sha(self) -> bytes | None:
         """Get the SHA1 value (for regular or peeled refs)."""
         if self.value_type == REF_VALUE_REF:
             return self.value
@@ -754,7 +754,7 @@ class ReftableReader:
             # Store all refs including deletion records - deletion handling is done at container level
             self.refs[ref.refname] = (ref.value_type, ref.value)
 
-    def get_ref(self, refname: bytes) -> Optional[tuple[int, bytes]]:
+    def get_ref(self, refname: bytes) -> tuple[int, bytes] | None:
         """Get a reference by name."""
         return self.refs.get(refname)
 
@@ -775,9 +775,9 @@ class _ReftableBatchContext:
 
     def __exit__(
         self,
-        exc_type: Optional[type[BaseException]],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[TracebackType],
+        exc_type: type[BaseException] | None,
+        exc_val: BaseException | None,
+        exc_tb: TracebackType | None,
     ) -> None:
         self.refs_container._batch_mode = False
         if exc_type is None:  # Only flush if no exception occurred
@@ -789,21 +789,11 @@ class ReftableRefsContainer(RefsContainer):
 
     def __init__(
         self,
-        path: Union[str, bytes],
-        logger: Optional[
-            Callable[
-                [
-                    bytes,
-                    bytes,
-                    bytes,
-                    Optional[bytes],
-                    Optional[int],
-                    Optional[int],
-                    bytes,
-                ],
-                None,
-            ]
-        ] = None,
+        path: str | bytes,
+        logger: Callable[
+            [bytes, bytes, bytes, bytes | None, int | None, int | None, bytes], None
+        ]
+        | None = None,
     ) -> None:
         """Initialize a reftable refs container.
 
@@ -1040,7 +1030,7 @@ class ReftableRefsContainer(RefsContainer):
                 result[name] = value[:SHA1_HEX_SIZE]  # First SHA1 hex chars
         return result
 
-    def get_peeled(self, name: bytes) -> Optional[bytes]:
+    def get_peeled(self, name: bytes) -> bytes | None:
         """Return the cached peeled value of a ref, if available.
 
         Args:
@@ -1068,8 +1058,8 @@ class ReftableRefsContainer(RefsContainer):
 
     def _generate_table_path(
         self,
-        min_update_index: Optional[int] = None,
-        max_update_index: Optional[int] = None,
+        min_update_index: int | None = None,
+        max_update_index: int | None = None,
     ) -> str:
         """Generate a new reftable file path."""
         if min_update_index is None or max_update_index is None:
@@ -1083,14 +1073,14 @@ class ReftableRefsContainer(RefsContainer):
         table_name = f"0x{min_idx:016x}-0x{max_idx:016x}-{hash_part:08x}.ref"
         return os.path.join(self.reftable_dir, table_name)
 
-    def add_packed_refs(self, new_refs: Mapping[bytes, Optional[bytes]]) -> None:
+    def add_packed_refs(self, new_refs: Mapping[bytes, bytes | None]) -> None:
         """Add packed refs. Creates a new reftable file with all refs consolidated."""
         if not new_refs:
             return
 
         self._write_batch_updates(new_refs)
 
-    def _write_batch_updates(self, updates: Mapping[bytes, Optional[bytes]]) -> None:
+    def _write_batch_updates(self, updates: Mapping[bytes, bytes | None]) -> None:
         """Write multiple ref updates to a single reftable file."""
         if not updates:
             return
@@ -1111,12 +1101,12 @@ class ReftableRefsContainer(RefsContainer):
     def set_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
-        new_ref: Optional[bytes],
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        old_ref: bytes | None,
+        new_ref: bytes | None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Atomically set a ref if it currently equals old_ref."""
         # For now, implement a simple non-atomic version
@@ -1142,10 +1132,10 @@ class ReftableRefsContainer(RefsContainer):
         self,
         name: bytes,
         ref: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Add a ref only if it doesn't exist."""
         try:
@@ -1159,11 +1149,11 @@ class ReftableRefsContainer(RefsContainer):
     def remove_if_equals(
         self,
         name: bytes,
-        old_ref: Optional[bytes],
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        old_ref: bytes | None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> bool:
         """Remove a ref if it equals old_ref."""
         return self.set_if_equals(
@@ -1180,10 +1170,10 @@ class ReftableRefsContainer(RefsContainer):
         self,
         name: bytes,
         other: bytes,
-        committer: Optional[bytes] = None,
-        timestamp: Optional[int] = None,
-        timezone: Optional[int] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        timestamp: int | None = None,
+        timezone: int | None = None,
+        message: bytes | None = None,
     ) -> None:
         """Set a symbolic reference."""
         self._write_ref_update(name, REF_VALUE_SYMREF, other)
@@ -1273,7 +1263,7 @@ class ReftableRefsContainer(RefsContainer):
 
     def _process_pending_updates(
         self,
-    ) -> tuple[Optional[tuple[bytes, int, bytes]], list[tuple[bytes, int, bytes]]]:
+    ) -> tuple[tuple[bytes, int, bytes] | None, list[tuple[bytes, int, bytes]]]:
         """Process pending updates and return (head_update, other_updates)."""
         head_update = None
         other_updates = []
@@ -1334,7 +1324,7 @@ class ReftableRefsContainer(RefsContainer):
         self,
         all_refs: dict[bytes, tuple[int, bytes]],
         other_updates: list[tuple[bytes, int, bytes]],
-        head_update: Optional[tuple[bytes, int, bytes]],
+        head_update: tuple[bytes, int, bytes] | None,
         batch_update_index: int,
     ) -> None:
         """Apply batch updates to the refs dict and update indices."""

+ 116 - 118
dulwich/repo.py

@@ -34,17 +34,15 @@ import stat
 import sys
 import time
 import warnings
-from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
+from collections.abc import Callable, Generator, Iterable, Iterator, Mapping, Sequence
 from io import BytesIO
 from types import TracebackType
 from typing import (
     TYPE_CHECKING,
     Any,
     BinaryIO,
-    Callable,
     Optional,
     TypeVar,
-    Union,
 )
 
 if TYPE_CHECKING:
@@ -200,7 +198,7 @@ def _get_default_identity() -> tuple[str, str]:
     return (fullname, email)
 
 
-def get_user_identity(config: "StackedConfig", kind: Optional[str] = None) -> bytes:
+def get_user_identity(config: "StackedConfig", kind: str | None = None) -> bytes:
     """Determine the identity to use for new commits.
 
     If kind is set, this first checks
@@ -222,8 +220,8 @@ def get_user_identity(config: "StackedConfig", kind: Optional[str] = None) -> by
     Returns:
       A user identity
     """
-    user: Optional[bytes] = None
-    email: Optional[bytes] = None
+    user: bytes | None = None
+    email: bytes | None = None
     if kind:
         user_uc = os.environ.get("GIT_" + kind + "_NAME")
         if user_uc is not None:
@@ -370,7 +368,7 @@ class ParentsProvider:
         self.commit_graph = store.get_commit_graph()
 
     def get_parents(
-        self, commit_id: bytes, commit: Optional[Commit] = None
+        self, commit_id: bytes, commit: Commit | None = None
     ) -> list[bytes]:
         """Get parents for a commit using the parents provider."""
         try:
@@ -443,7 +441,7 @@ class BaseRepo:
         return sys.platform != "win32"
 
     def _init_files(
-        self, bare: bool, symlinks: Optional[bool] = None, format: Optional[int] = None
+        self, bare: bool, symlinks: bool | None = None, format: int | None = None
     ) -> None:
         """Initialize a default set of named files."""
         from .config import ConfigFile
@@ -473,7 +471,7 @@ class BaseRepo:
         self._put_named_file("config", f.getvalue())
         self._put_named_file(os.path.join("info", "exclude"), b"")
 
-    def get_named_file(self, path: str) -> Optional[BinaryIO]:
+    def get_named_file(self, path: str) -> BinaryIO | None:
         """Get a file from the control dir with a specific name.
 
         Although the filename should be interpreted as a filename relative to
@@ -511,11 +509,10 @@ class BaseRepo:
     def fetch(
         self,
         target: "BaseRepo",
-        determine_wants: Optional[
-            Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]]
-        ] = None,
-        progress: Optional[Callable[..., None]] = None,
-        depth: Optional[int] = None,
+        determine_wants: Callable[[Mapping[bytes, bytes], int | None], list[bytes]]
+        | None = None,
+        progress: Callable[..., None] | None = None,
+        depth: int | None = None,
     ) -> dict[bytes, bytes]:
         """Fetch objects into another repository.
 
@@ -540,12 +537,12 @@ class BaseRepo:
 
     def fetch_pack_data(
         self,
-        determine_wants: Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]],
+        determine_wants: Callable[[Mapping[bytes, bytes], int | None], list[bytes]],
         graph_walker: "GraphWalker",
-        progress: Optional[Callable[[bytes], None]],
+        progress: Callable[[bytes], None] | None,
         *,
-        get_tagged: Optional[Callable[[], dict[bytes, bytes]]] = None,
-        depth: Optional[int] = None,
+        get_tagged: Callable[[], dict[bytes, bytes]] | None = None,
+        depth: int | None = None,
     ) -> tuple[int, Iterator["UnpackedObject"]]:
         """Fetch the pack data required for a set of revisions.
 
@@ -575,13 +572,13 @@ class BaseRepo:
 
     def find_missing_objects(
         self,
-        determine_wants: Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]],
+        determine_wants: Callable[[Mapping[bytes, bytes], int | None], list[bytes]],
         graph_walker: "GraphWalker",
-        progress: Optional[Callable[[bytes], None]],
+        progress: Callable[[bytes], None] | None,
         *,
-        get_tagged: Optional[Callable[[], dict[bytes, bytes]]] = None,
-        depth: Optional[int] = None,
-    ) -> Optional[MissingObjectFinder]:
+        get_tagged: Callable[[], dict[bytes, bytes]] | None = None,
+        depth: int | None = None,
+    ) -> MissingObjectFinder | None:
         """Fetch the missing objects required for a set of revisions.
 
         Args:
@@ -674,9 +671,9 @@ class BaseRepo:
         have: set[ObjectID],
         want: set[ObjectID],
         *,
-        shallow: Optional[set[ObjectID]] = None,
-        progress: Optional[Callable[[str], None]] = None,
-        ofs_delta: Optional[bool] = None,
+        shallow: set[ObjectID] | None = None,
+        progress: Callable[[str], None] | None = None,
+        ofs_delta: bool | None = None,
     ) -> tuple[int, Iterator["UnpackedObject"]]:
         """Generate pack data objects for a set of wants/haves.
 
@@ -698,7 +695,7 @@ class BaseRepo:
         )
 
     def get_graph_walker(
-        self, heads: Optional[list[ObjectID]] = None
+        self, heads: list[ObjectID] | None = None
     ) -> ObjectStoreGraphWalker:
         """Retrieve a graph walker.
 
@@ -774,7 +771,7 @@ class BaseRepo:
             shallows=self.get_shallow(),
         )
 
-    def get_parents(self, sha: bytes, commit: Optional[Commit] = None) -> list[bytes]:
+    def get_parents(self, sha: bytes, commit: Commit | None = None) -> list[bytes]:
         """Retrieve the parents of a specific commit.
 
         If the specific commit is a graftpoint, the graft parents
@@ -798,7 +795,7 @@ class BaseRepo:
         """Retrieve the worktree config object."""
         raise NotImplementedError(self.get_worktree_config)
 
-    def get_description(self) -> Optional[bytes]:
+    def get_description(self) -> bytes | None:
         """Retrieve the description for this repository.
 
         Returns: Bytes with the description of the repository
@@ -828,7 +825,7 @@ class BaseRepo:
         """
         raise NotImplementedError(self.get_blob_normalizer)
 
-    def get_gitattributes(self, tree: Optional[bytes] = None) -> "GitAttributes":
+    def get_gitattributes(self, tree: bytes | None = None) -> "GitAttributes":
         """Read gitattributes for the repository.
 
         Args:
@@ -870,7 +867,7 @@ class BaseRepo:
             return {line.strip() for line in f}
 
     def update_shallow(
-        self, new_shallow: Optional[set[bytes]], new_unshallow: Optional[set[bytes]]
+        self, new_shallow: set[bytes] | None, new_unshallow: set[bytes] | None
     ) -> None:
         """Update the list of shallow objects.
 
@@ -915,17 +912,17 @@ class BaseRepo:
 
     def get_walker(
         self,
-        include: Optional[Sequence[bytes]] = None,
-        exclude: Optional[Sequence[bytes]] = None,
+        include: Sequence[bytes] | None = None,
+        exclude: Sequence[bytes] | None = None,
         order: str = "date",
         reverse: bool = False,
-        max_entries: Optional[int] = None,
-        paths: Optional[Sequence[bytes]] = None,
+        max_entries: int | None = None,
+        paths: Sequence[bytes] | None = None,
         rename_detector: Optional["RenameDetector"] = None,
         follow: bool = False,
-        since: Optional[int] = None,
-        until: Optional[int] = None,
-        queue_cls: Optional[type] = None,
+        since: int | None = None,
+        until: int | None = None,
+        queue_cls: type | None = None,
     ) -> "Walker":
         """Obtain a walker for this repository.
 
@@ -974,7 +971,7 @@ class BaseRepo:
             queue_cls=queue_cls if queue_cls is not None else _CommitTimeQueue,
         )
 
-    def __getitem__(self, name: Union[ObjectID, Ref]) -> "ShaFile":
+    def __getitem__(self, name: ObjectID | Ref) -> "ShaFile":
         """Retrieve a Git object by SHA1 or ref.
 
         Args:
@@ -1006,7 +1003,7 @@ class BaseRepo:
         else:
             return name in self.refs
 
-    def __setitem__(self, name: bytes, value: Union[ShaFile, bytes]) -> None:
+    def __setitem__(self, name: bytes, value: ShaFile | bytes) -> None:
         """Set a ref.
 
         Args:
@@ -1035,7 +1032,7 @@ class BaseRepo:
             raise ValueError(name)
 
     def _get_user_identity(
-        self, config: "StackedConfig", kind: Optional[str] = None
+        self, config: "StackedConfig", kind: str | None = None
     ) -> bytes:
         """Determine the identity to use for new commits."""
         warnings.warn(
@@ -1089,17 +1086,17 @@ class BaseRepo:
     @replace_me(remove_in="0.26.0")
     def do_commit(
         self,
-        message: Optional[bytes] = None,
-        committer: Optional[bytes] = None,
-        author: Optional[bytes] = None,
-        commit_timestamp: Optional[float] = None,
-        commit_timezone: Optional[int] = None,
-        author_timestamp: Optional[float] = None,
-        author_timezone: Optional[int] = None,
-        tree: Optional[ObjectID] = None,
-        encoding: Optional[bytes] = None,
-        ref: Optional[Ref] = b"HEAD",
-        merge_heads: Optional[list[ObjectID]] = None,
+        message: bytes | None = None,
+        committer: bytes | None = None,
+        author: bytes | None = None,
+        commit_timestamp: float | None = None,
+        commit_timezone: int | None = None,
+        author_timestamp: float | None = None,
+        author_timezone: int | None = None,
+        tree: ObjectID | None = None,
+        encoding: bytes | None = None,
+        ref: Ref | None = b"HEAD",
+        merge_heads: list[ObjectID] | None = None,
         no_verify: bool = False,
         sign: bool = False,
     ) -> bytes:
@@ -1215,9 +1212,9 @@ class Repo(BaseRepo):
 
     def __init__(
         self,
-        root: Union[str, bytes, os.PathLike[str]],
-        object_store: Optional[PackBasedObjectStore] = None,
-        bare: Optional[bool] = None,
+        root: str | bytes | os.PathLike[str],
+        object_store: PackBasedObjectStore | None = None,
+        bare: bool | None = None,
     ) -> None:
         """Open a repository on disk.
 
@@ -1350,9 +1347,9 @@ class Repo(BaseRepo):
         ref: bytes,
         old_sha: bytes,
         new_sha: bytes,
-        committer: Optional[bytes],
-        timestamp: Optional[int],
-        timezone: Optional[int],
+        committer: bytes | None,
+        timestamp: int | None,
+        timezone: int | None,
         message: bytes,
     ) -> None:
         from .reflog import format_reflog_line
@@ -1404,7 +1401,7 @@ class Repo(BaseRepo):
             return
 
     @classmethod
-    def discover(cls, start: Union[str, bytes, os.PathLike[str]] = ".") -> "Repo":
+    def discover(cls, start: str | bytes | os.PathLike[str] = ".") -> "Repo":
         """Iterate parent directories to discover a repository.
 
         Return a Repo object for the first parent directory that looks like a
@@ -1491,9 +1488,9 @@ class Repo(BaseRepo):
 
     def get_named_file(
         self,
-        path: Union[str, bytes],
-        basedir: Optional[str] = None,
-    ) -> Optional[BinaryIO]:
+        path: str | bytes,
+        basedir: str | None = None,
+    ) -> BinaryIO | None:
         """Get a file from the control dir with a specific name.
 
         Although the filename should be interpreted as a filename relative to
@@ -1568,9 +1565,10 @@ class Repo(BaseRepo):
     @replace_me(remove_in="0.26.0")
     def stage(
         self,
-        fs_paths: Union[
-            str, bytes, os.PathLike[str], Iterable[Union[str, bytes, os.PathLike[str]]]
-        ],
+        fs_paths: str
+        | bytes
+        | os.PathLike[str]
+        | Iterable[str | bytes | os.PathLike[str]],
     ) -> None:
         """Stage a set of paths.
 
@@ -1591,16 +1589,16 @@ class Repo(BaseRepo):
 
     def clone(
         self,
-        target_path: Union[str, bytes, os.PathLike[str]],
+        target_path: str | bytes | os.PathLike[str],
         *,
         mkdir: bool = True,
         bare: bool = False,
         origin: bytes = b"origin",
-        checkout: Optional[bool] = None,
-        branch: Optional[bytes] = None,
-        progress: Optional[Callable[[str], None]] = None,
-        depth: Optional[int] = None,
-        symlinks: Optional[bool] = None,
+        checkout: bool | None = None,
+        branch: bytes | None = None,
+        progress: Callable[[str], None] | None = None,
+        depth: int | None = None,
+        symlinks: bool | None = None,
     ) -> "Repo":
         """Clone this repository.
 
@@ -1685,7 +1683,7 @@ class Repo(BaseRepo):
         return target
 
     @replace_me(remove_in="0.26.0")
-    def reset_index(self, tree: Optional[bytes] = None) -> None:
+    def reset_index(self, tree: bytes | None = None) -> None:
         """Reset the index back to a specific tree.
 
         Args:
@@ -1828,7 +1826,7 @@ class Repo(BaseRepo):
         path = os.path.join(self.controldir(), "rebase-merge")
         return DiskRebaseStateManager(path)
 
-    def get_description(self) -> Optional[bytes]:
+    def get_description(self) -> bytes | None:
         """Retrieve the description of this repository.
 
         Returns: Description as bytes or None.
@@ -1855,14 +1853,14 @@ class Repo(BaseRepo):
     @classmethod
     def _init_maybe_bare(
         cls,
-        path: Union[str, bytes, os.PathLike[str]],
-        controldir: Union[str, bytes, os.PathLike[str]],
+        path: str | bytes | os.PathLike[str],
+        controldir: str | bytes | os.PathLike[str],
         bare: bool,
-        object_store: Optional[PackBasedObjectStore] = None,
+        object_store: PackBasedObjectStore | None = None,
         config: Optional["StackedConfig"] = None,
-        default_branch: Optional[bytes] = None,
-        symlinks: Optional[bool] = None,
-        format: Optional[int] = None,
+        default_branch: bytes | None = None,
+        symlinks: bool | None = None,
+        format: int | None = None,
     ) -> "Repo":
         path = os.fspath(path)
         if isinstance(path, bytes):
@@ -1891,13 +1889,13 @@ class Repo(BaseRepo):
     @classmethod
     def init(
         cls,
-        path: Union[str, bytes, os.PathLike[str]],
+        path: str | bytes | os.PathLike[str],
         *,
         mkdir: bool = False,
         config: Optional["StackedConfig"] = None,
-        default_branch: Optional[bytes] = None,
-        symlinks: Optional[bool] = None,
-        format: Optional[int] = None,
+        default_branch: bytes | None = None,
+        symlinks: bool | None = None,
+        format: int | None = None,
     ) -> "Repo":
         """Create a new repository.
 
@@ -1931,9 +1929,9 @@ class Repo(BaseRepo):
     @classmethod
     def _init_new_working_directory(
         cls,
-        path: Union[str, bytes, os.PathLike[str]],
+        path: str | bytes | os.PathLike[str],
         main_repo: "Repo",
-        identifier: Optional[str] = None,
+        identifier: str | None = None,
         mkdir: bool = False,
     ) -> "Repo":
         """Create a new working directory linked to a repository.
@@ -1980,13 +1978,13 @@ class Repo(BaseRepo):
     @classmethod
     def init_bare(
         cls,
-        path: Union[str, bytes, os.PathLike[str]],
+        path: str | bytes | os.PathLike[str],
         *,
         mkdir: bool = False,
-        object_store: Optional[PackBasedObjectStore] = None,
+        object_store: PackBasedObjectStore | None = None,
         config: Optional["StackedConfig"] = None,
-        default_branch: Optional[bytes] = None,
-        format: Optional[int] = None,
+        default_branch: bytes | None = None,
+        format: int | None = None,
     ) -> "Repo":
         """Create a new bare repository.
 
@@ -2032,9 +2030,9 @@ class Repo(BaseRepo):
 
     def __exit__(
         self,
-        exc_type: Optional[type[BaseException]],
-        exc_val: Optional[BaseException],
-        exc_tb: Optional[TracebackType],
+        exc_type: type[BaseException] | None,
+        exc_val: BaseException | None,
+        exc_tb: TracebackType | None,
     ) -> None:
         """Exit context manager and close repository."""
         self.close()
@@ -2099,7 +2097,7 @@ class Repo(BaseRepo):
             config_stack, git_attributes, filter_context=self.filter_context
         )
 
-    def get_gitattributes(self, tree: Optional[bytes] = None) -> "GitAttributes":
+    def get_gitattributes(self, tree: bytes | None = None) -> "GitAttributes":
         """Read gitattributes for the repository.
 
         Args:
@@ -2202,7 +2200,7 @@ class Repo(BaseRepo):
         return self.get_worktree().set_sparse_checkout_patterns(patterns)
 
     @replace_me(remove_in="0.26.0")
-    def set_cone_mode_patterns(self, dirs: Union[Sequence[str], None] = None) -> None:
+    def set_cone_mode_patterns(self, dirs: Sequence[str] | None = None) -> None:
         """Write the given cone-mode directory patterns into info/sparse-checkout.
 
         For each directory to include, add an inclusion line that "undoes" the prior
@@ -2231,18 +2229,18 @@ class MemoryRepo(BaseRepo):
         self._named_files: dict[str, bytes] = {}
         self.bare = True
         self._config = ConfigFile()
-        self._description: Optional[bytes] = None
+        self._description: bytes | None = None
         self.filter_context = None
 
     def _append_reflog(
         self,
         ref: bytes,
-        old_sha: Optional[bytes],
-        new_sha: Optional[bytes],
-        committer: Optional[bytes],
-        timestamp: Optional[int],
-        timezone: Optional[int],
-        message: Optional[bytes],
+        old_sha: bytes | None,
+        new_sha: bytes | None,
+        committer: bytes | None,
+        timestamp: int | None,
+        timezone: int | None,
+        message: bytes | None,
     ) -> None:
         self._reflog.append(
             (ref, old_sha, new_sha, committer, timestamp, timezone, message)
@@ -2256,7 +2254,7 @@ class MemoryRepo(BaseRepo):
         """
         self._description = description
 
-    def get_description(self) -> Optional[bytes]:
+    def get_description(self) -> bytes | None:
         """Get the description of this repository.
 
         Returns:
@@ -2295,9 +2293,9 @@ class MemoryRepo(BaseRepo):
 
     def get_named_file(
         self,
-        path: Union[str, bytes],
-        basedir: Optional[str] = None,
-    ) -> Optional[BytesIO]:
+        path: str | bytes,
+        basedir: str | None = None,
+    ) -> BytesIO | None:
         """Get a file from the control dir with a specific name.
 
         Although the filename should be interpreted as a filename relative to
@@ -2360,7 +2358,7 @@ class MemoryRepo(BaseRepo):
             config_stack, git_attributes, filter_context=self.filter_context
         )
 
-    def get_gitattributes(self, tree: Optional[bytes] = None) -> "GitAttributes":
+    def get_gitattributes(self, tree: bytes | None = None) -> "GitAttributes":
         """Read gitattributes for the repository."""
         from .attrs import GitAttributes
 
@@ -2377,17 +2375,17 @@ class MemoryRepo(BaseRepo):
 
     def do_commit(
         self,
-        message: Optional[bytes] = None,
-        committer: Optional[bytes] = None,
-        author: Optional[bytes] = None,
-        commit_timestamp: Optional[float] = None,
-        commit_timezone: Optional[int] = None,
-        author_timestamp: Optional[float] = None,
-        author_timezone: Optional[int] = None,
-        tree: Optional[ObjectID] = None,
-        encoding: Optional[bytes] = None,
-        ref: Optional[Ref] = b"HEAD",
-        merge_heads: Optional[list[ObjectID]] = None,
+        message: bytes | None = None,
+        committer: bytes | None = None,
+        author: bytes | None = None,
+        commit_timestamp: float | None = None,
+        commit_timezone: int | None = None,
+        author_timestamp: float | None = None,
+        author_timezone: int | None = None,
+        tree: ObjectID | None = None,
+        encoding: bytes | None = None,
+        ref: Ref | None = b"HEAD",
+        merge_heads: list[ObjectID] | None = None,
         no_verify: bool = False,
         sign: bool = False,
     ) -> bytes:
@@ -2510,7 +2508,7 @@ class MemoryRepo(BaseRepo):
         cls,
         objects: Iterable[ShaFile],
         refs: Mapping[bytes, bytes],
-        format: Optional[int] = None,
+        format: int | None = None,
     ) -> "MemoryRepo":
         """Create a new bare repository in memory.
 

+ 34 - 36
dulwich/server.py

@@ -50,18 +50,16 @@ import socketserver
 import sys
 import time
 import zlib
-from collections.abc import Iterable, Iterator, Mapping, Sequence
+from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
 from collections.abc import Set as AbstractSet
 from functools import partial
-from typing import IO, TYPE_CHECKING, Callable, Optional, Union
+from typing import IO, TYPE_CHECKING, Optional
 from typing import Protocol as TypingProtocol
 
 if sys.version_info >= (3, 12):
     from collections.abc import Buffer
 else:
-    from typing import Union
-
-    Buffer = Union[bytes, bytearray, memoryview]
+    Buffer = bytes | bytearray | memoryview
 
 if TYPE_CHECKING:
     from .object_store import BaseObjectStore
@@ -163,7 +161,7 @@ class BackendRepo(TypingProtocol):
         """
         raise NotImplementedError
 
-    def get_peeled(self, name: bytes) -> Optional[bytes]:
+    def get_peeled(self, name: bytes) -> bytes | None:
         """Return the cached peeled value of a ref, if available.
 
         Args:
@@ -177,12 +175,12 @@ class BackendRepo(TypingProtocol):
 
     def find_missing_objects(
         self,
-        determine_wants: Callable[[Mapping[bytes, bytes], Optional[int]], list[bytes]],
+        determine_wants: Callable[[Mapping[bytes, bytes], int | None], list[bytes]],
         graph_walker: "_ProtocolGraphWalker",
-        progress: Optional[Callable[[bytes], None]],
+        progress: Callable[[bytes], None] | None,
         *,
-        get_tagged: Optional[Callable[[], dict[bytes, bytes]]] = None,
-        depth: Optional[int] = None,
+        get_tagged: Callable[[], dict[bytes, bytes]] | None = None,
+        depth: int | None = None,
     ) -> Optional["MissingObjectFinder"]:
         """Yield the objects required for a list of commits.
 
@@ -201,7 +199,7 @@ class DictBackend(Backend):
     """Trivial backend that looks up Git repositories in a dictionary."""
 
     def __init__(
-        self, repos: Union[dict[bytes, "BackendRepo"], dict[str, "BackendRepo"]]
+        self, repos: dict[bytes, "BackendRepo"] | dict[str, "BackendRepo"]
     ) -> None:
         """Initialize a DictBackend.
 
@@ -315,7 +313,7 @@ class PackHandler(Handler):
           stateless_rpc: Whether this is a stateless RPC session
         """
         super().__init__(backend, proto, stateless_rpc)
-        self._client_capabilities: Optional[set[bytes]] = None
+        self._client_capabilities: set[bytes] | None = None
         # Flags needed for the no-done capability
         self._done_received = False
         self.advertise_refs = False
@@ -473,8 +471,8 @@ class UploadPackHandler(PackHandler):
 
     def get_tagged(
         self,
-        refs: Optional[Mapping[bytes, bytes]] = None,
-        repo: Optional[BackendRepo] = None,
+        refs: Mapping[bytes, bytes] | None = None,
+        repo: BackendRepo | None = None,
     ) -> dict[ObjectID, ObjectID]:
         """Get a dict of peeled values of tags to their original tag shas.
 
@@ -527,7 +525,7 @@ class UploadPackHandler(PackHandler):
         wants = []
 
         def wants_wrapper(
-            refs: Mapping[bytes, bytes], depth: Optional[int] = None
+            refs: Mapping[bytes, bytes], depth: int | None = None
         ) -> list[bytes]:
             wants.extend(graph_walker.determine_wants(refs, depth))
             return wants
@@ -572,8 +570,8 @@ class UploadPackHandler(PackHandler):
 
 
 def _split_proto_line(
-    line: Optional[bytes], allowed: Optional[Iterable[Optional[bytes]]]
-) -> tuple[Optional[bytes], Optional[Union[bytes, int]]]:
+    line: bytes | None, allowed: Iterable[bytes | None] | None
+) -> tuple[bytes | None, bytes | int | None]:
     """Split a line read from the wire.
 
     Args:
@@ -593,7 +591,7 @@ def _split_proto_line(
         allowed return values.
     """
     if not line:
-        fields: list[Optional[bytes]] = [None]
+        fields: list[bytes | None] = [None]
     else:
         fields = list(line.rstrip(b"\n").split(b" ", 1))
     command = fields[0]
@@ -719,7 +717,7 @@ class _ProtocolGraphWalker:
         self,
         handler: PackHandler,
         object_store: ObjectContainer,
-        get_peeled: Callable[[bytes], Optional[bytes]],
+        get_peeled: Callable[[bytes], bytes | None],
         get_symrefs: Callable[[], dict[bytes, bytes]],
     ) -> None:
         """Initialize a ProtocolGraphWalker.
@@ -744,10 +742,10 @@ class _ProtocolGraphWalker:
         self._cached = False
         self._cache: list[bytes] = []
         self._cache_index = 0
-        self._impl: Optional[AckGraphWalkerImpl] = None
+        self._impl: AckGraphWalkerImpl | None = None
 
     def determine_wants(
-        self, heads: Mapping[bytes, bytes], depth: Optional[int] = None
+        self, heads: Mapping[bytes, bytes], depth: int | None = None
     ) -> list[bytes]:
         """Determine the wants for a set of heads.
 
@@ -834,7 +832,7 @@ class _ProtocolGraphWalker:
 
         return want_revs
 
-    def unread_proto_line(self, command: bytes, value: Union[bytes, int]) -> None:
+    def unread_proto_line(self, command: bytes, value: bytes | int) -> None:
         """Push a command back to be read again.
 
         Args:
@@ -867,7 +865,7 @@ class _ProtocolGraphWalker:
         self._cached = True
         self._cache_index = 0
 
-    def next(self) -> Optional[bytes]:
+    def next(self) -> bytes | None:
         """Get the next SHA from the graph walker.
 
         Returns: Next SHA or None if done
@@ -885,8 +883,8 @@ class _ProtocolGraphWalker:
     __next__ = next
 
     def read_proto_line(
-        self, allowed: Optional[Iterable[Optional[bytes]]]
-    ) -> tuple[Optional[bytes], Optional[Union[bytes, int]]]:
+        self, allowed: Iterable[bytes | None] | None
+    ) -> tuple[bytes | None, bytes | int | None]:
         """Read a line from the wire.
 
         Args:
@@ -1027,7 +1025,7 @@ class SingleAckGraphWalkerImpl(AckGraphWalkerImpl):
             self.walker.send_ack(have_ref)
             self._common.append(have_ref)
 
-    def next(self) -> Optional[bytes]:
+    def next(self) -> bytes | None:
         """Get next SHA from graph walker.
 
         Returns:
@@ -1101,7 +1099,7 @@ class MultiAckGraphWalkerImpl(AckGraphWalkerImpl):
                 self._found_base = True
         # else we blind ack within next
 
-    def next(self) -> Optional[bytes]:
+    def next(self) -> bytes | None:
         """Get next SHA from graph walker.
 
         Returns:
@@ -1181,7 +1179,7 @@ class MultiAckDetailedGraphWalkerImpl(AckGraphWalkerImpl):
         self._common.append(have_ref)
         self.walker.send_ack(have_ref, b"common")
 
-    def next(self) -> Optional[bytes]:
+    def next(self) -> bytes | None:
         """Get next SHA from graph walker.
 
         Returns:
@@ -1592,7 +1590,7 @@ class TCPGitServer(socketserver.TCPServer):
         backend: Backend,
         listen_addr: str,
         port: int = TCP_GIT_PORT,
-        handlers: Optional[dict[bytes, type[Handler]]] = None,
+        handlers: dict[bytes, type[Handler]] | None = None,
     ) -> None:
         """Initialize TCP git server.
 
@@ -1611,8 +1609,8 @@ class TCPGitServer(socketserver.TCPServer):
 
     def verify_request(
         self,
-        request: Union[socket.socket, tuple[bytes, socket.socket]],
-        client_address: Union[tuple[str, int], socket.socket],
+        request: socket.socket | tuple[bytes, socket.socket],
+        client_address: tuple[str, int] | socket.socket,
     ) -> bool:
         """Verify incoming request.
 
@@ -1628,8 +1626,8 @@ class TCPGitServer(socketserver.TCPServer):
 
     def handle_error(
         self,
-        request: Union[socket.socket, tuple[bytes, socket.socket]],
-        client_address: Union[tuple[str, int], socket.socket],
+        request: socket.socket | tuple[bytes, socket.socket],
+        client_address: tuple[str, int] | socket.socket,
     ) -> None:
         """Handle request processing errors.
 
@@ -1679,9 +1677,9 @@ def main(argv: list[str] = sys.argv) -> None:
 def serve_command(
     handler_cls: type[Handler],
     argv: list[str] = sys.argv,
-    backend: Optional[Backend] = None,
-    inf: Optional[IO[bytes]] = None,
-    outf: Optional[IO[bytes]] = None,
+    backend: Backend | None = None,
+    inf: IO[bytes] | None = None,
+    outf: IO[bytes] | None = None,
 ) -> int:
     """Serve a single command.
 

+ 7 - 7
dulwich/stash.py

@@ -23,7 +23,7 @@
 
 import os
 import sys
-from typing import TYPE_CHECKING, Optional, TypedDict, Union
+from typing import TYPE_CHECKING, TypedDict
 
 from .diff_tree import tree_changes
 from .file import GitFile
@@ -163,11 +163,11 @@ class Stash:
         else:
 
             def symlink_fn(  # type: ignore[misc,unused-ignore]
-                src: Union[str, bytes],
-                dst: Union[str, bytes],
+                src: str | bytes,
+                dst: str | bytes,
                 target_is_directory: bool = False,
                 *,
-                dir_fd: Optional[int] = None,
+                dir_fd: int | None = None,
             ) -> None:
                 mode = "w" + ("b" if isinstance(src, bytes) else "")
                 with open(dst, mode) as f:
@@ -280,9 +280,9 @@ class Stash:
 
     def push(
         self,
-        committer: Optional[bytes] = None,
-        author: Optional[bytes] = None,
-        message: Optional[bytes] = None,
+        committer: bytes | None = None,
+        author: bytes | None = None,
+        message: bytes | None = None,
     ) -> ObjectID:
         """Create a new stash.
 

+ 2 - 2
dulwich/submodule.py

@@ -23,7 +23,7 @@
 
 import os
 from collections.abc import Iterator
-from typing import TYPE_CHECKING, Union
+from typing import TYPE_CHECKING
 
 from .object_store import iter_tree_contents
 from .objects import S_ISGITLINK
@@ -55,7 +55,7 @@ def iter_cached_submodules(
 
 def ensure_submodule_placeholder(
     repo: "Repo",
-    submodule_path: Union[str, bytes],
+    submodule_path: str | bytes,
 ) -> None:
     """Create a submodule placeholder directory with .git file.
 

+ 2 - 2
dulwich/tests/test_object_store.py

@@ -21,8 +21,8 @@
 
 """Tests for the object store interface."""
 
-from collections.abc import Iterator, Sequence
-from typing import TYPE_CHECKING, Any, Callable
+from collections.abc import Callable, Iterator, Sequence
+from typing import TYPE_CHECKING, Any
 from unittest import TestCase
 from unittest.mock import patch
 

+ 7 - 7
dulwich/tests/utils.py

@@ -30,7 +30,8 @@ import tempfile
 import time
 import types
 import warnings
-from typing import Any, BinaryIO, Callable, Optional, TypeVar, Union
+from collections.abc import Callable
+from typing import Any, BinaryIO, TypeVar
 from unittest import SkipTest
 
 from dulwich.index import commit_tree
@@ -54,7 +55,7 @@ F = 0o100644  # Shorthand mode for Files.
 T = TypeVar("T", bound=ShaFile)
 
 
-def open_repo(name: str, temp_dir: Optional[str] = None) -> Repo:
+def open_repo(name: str, temp_dir: str | None = None) -> Repo:
     """Open a copy of a repo in a temporary directory.
 
     Use this function for accessing repos in dulwich/tests/data/repos to avoid
@@ -211,7 +212,7 @@ def ext_functest_builder(
 def build_pack(
     f: BinaryIO,
     objects_spec: list[tuple[int, Any]],
-    store: Optional[BaseObjectStore] = None,
+    store: BaseObjectStore | None = None,
 ) -> list[tuple[int, int, bytes, bytes, int]]:
     """Write test pack data from a concise spec.
 
@@ -293,10 +294,9 @@ def build_pack(
 def build_commit_graph(
     object_store: BaseObjectStore,
     commit_spec: list[list[int]],
-    trees: Optional[
-        dict[int, list[Union[tuple[bytes, ShaFile], tuple[bytes, ShaFile, int]]]]
-    ] = None,
-    attrs: Optional[dict[int, dict[str, Any]]] = None,
+    trees: dict[int, list[tuple[bytes, ShaFile] | tuple[bytes, ShaFile, int]]]
+    | None = None,
+    attrs: dict[int, dict[str, Any]] | None = None,
 ) -> list[Commit]:
     """Build a commit graph from a concise specification.
 

+ 1 - 3
dulwich/trailers.py

@@ -31,8 +31,6 @@ They are similar to RFC 822 email headers and appear at the end of commit
 messages after free-form content.
 """
 
-from typing import Optional
-
 
 class Trailer:
     """Represents a single Git trailer.
@@ -260,7 +258,7 @@ def _parse_trailer_lines(lines: list[str], separators: str) -> list[Trailer]:
         List of parsed Trailer objects
     """
     trailers: list[Trailer] = []
-    current_trailer: Optional[Trailer] = None
+    current_trailer: Trailer | None = None
 
     for line in lines:
         stripped = line.rstrip()

+ 17 - 17
dulwich/walk.py

@@ -23,9 +23,9 @@
 
 import collections
 import heapq
-from collections.abc import Iterator, Sequence
+from collections.abc import Callable, Iterator, Sequence
 from itertools import chain
-from typing import TYPE_CHECKING, Any, Callable, Optional, Union, cast
+from typing import TYPE_CHECKING, Any, cast
 
 if TYPE_CHECKING:
     from .object_store import BaseObjectStore
@@ -62,12 +62,12 @@ class WalkEntry:
         self.commit = commit
         self._store = walker.store
         self._get_parents = walker.get_parents
-        self._changes: dict[Optional[bytes], list[TreeChange]] = {}
+        self._changes: dict[bytes | None, list[TreeChange]] = {}
         self._rename_detector = walker.rename_detector
 
     def changes(
-        self, path_prefix: Optional[bytes] = None
-    ) -> Union[list[TreeChange], list[list[TreeChange]]]:
+        self, path_prefix: bytes | None = None
+    ) -> list[TreeChange] | list[list[TreeChange]]:
         """Get the tree changes for this entry.
 
         Args:
@@ -169,7 +169,7 @@ class _CommitTimeQueue:
         self._seen: set[ObjectID] = set()
         self._done: set[ObjectID] = set()
         self._min_time = walker.since
-        self._last: Optional[Commit] = None
+        self._last: Commit | None = None
         self._extra_commits_left = _MAX_EXTRA_COMMITS
         self._is_finished = False
 
@@ -210,7 +210,7 @@ class _CommitTimeQueue:
                     todo.append(parent_commit)
                 excluded.add(parent)
 
-    def next(self) -> Optional[WalkEntry]:
+    def next(self) -> WalkEntry | None:
         if self._is_finished:
             return None
         while self._pq:
@@ -275,15 +275,15 @@ class Walker:
         self,
         store: "BaseObjectStore",
         include: Sequence[bytes],
-        exclude: Optional[Sequence[bytes]] = None,
+        exclude: Sequence[bytes] | None = None,
         order: str = "date",
         reverse: bool = False,
-        max_entries: Optional[int] = None,
-        paths: Optional[Sequence[bytes]] = None,
-        rename_detector: Optional[RenameDetector] = None,
+        max_entries: int | None = None,
+        paths: Sequence[bytes] | None = None,
+        rename_detector: RenameDetector | None = None,
         follow: bool = False,
-        since: Optional[int] = None,
-        until: Optional[int] = None,
+        since: int | None = None,
+        until: int | None = None,
         get_parents: Callable[[Commit], list[bytes]] = lambda commit: commit.parents,
         queue_cls: type = _CommitTimeQueue,
     ) -> None:
@@ -340,7 +340,7 @@ class Walker:
         self._queue = queue_cls(self)
         self._out_queue: collections.deque[WalkEntry] = collections.deque()
 
-    def _path_matches(self, changed_path: Optional[bytes]) -> bool:
+    def _path_matches(self, changed_path: bytes | None) -> bool:
         if changed_path is None:
             return False
         if self.paths is None:
@@ -373,7 +373,7 @@ class Walker:
             return True
         return False
 
-    def _should_return(self, entry: WalkEntry) -> Optional[bool]:
+    def _should_return(self, entry: WalkEntry) -> bool | None:
         """Determine if a walk entry should be returned..
 
         Args:
@@ -429,7 +429,7 @@ class Walker:
                         return True
         return None
 
-    def _next(self) -> Optional[WalkEntry]:
+    def _next(self) -> WalkEntry | None:
         max_entries = self.max_entries
         while max_entries is None or self._num_entries < max_entries:
             entry = next(self._queue)
@@ -446,7 +446,7 @@ class Walker:
 
     def _reorder(
         self, results: Iterator[WalkEntry]
-    ) -> Union[Iterator[WalkEntry], list[WalkEntry]]:
+    ) -> Iterator[WalkEntry] | list[WalkEntry]:
         """Possibly reorder a results iterator.
 
         Args:

+ 24 - 34
dulwich/web.py

@@ -51,16 +51,14 @@ import os
 import re
 import sys
 import time
-from collections.abc import Iterable, Iterator, Sequence
+from collections.abc import Callable, Iterable, Iterator, Sequence
 from io import BytesIO
 from types import TracebackType
 from typing import (
     TYPE_CHECKING,
     Any,
     BinaryIO,
-    Callable,
     ClassVar,
-    Optional,
     Union,
     cast,
 )
@@ -92,9 +90,7 @@ else:
                     self,
                     status: str,
                     response_headers: list[tuple[str, str]],
-                    exc_info: Optional[
-                        tuple[type, BaseException, TracebackType]
-                    ] = None,
+                    exc_info: tuple[type, BaseException, TracebackType] | None = None,
                 ) -> Callable[[bytes], None]:
                     """Start the response with status and headers."""
                     ...
@@ -171,7 +167,7 @@ NO_CACHE_HEADERS = [
 ]
 
 
-def cache_forever_headers(now: Optional[float] = None) -> list[tuple[str, str]]:
+def cache_forever_headers(now: float | None = None) -> list[tuple[str, str]]:
     """Generate headers for caching forever.
 
     Args:
@@ -189,7 +185,7 @@ def cache_forever_headers(now: Optional[float] = None) -> list[tuple[str, str]]:
     ]
 
 
-def date_time_string(timestamp: Optional[float] = None) -> str:
+def date_time_string(timestamp: float | None = None) -> str:
     """Convert a timestamp to an HTTP date string.
 
     Args:
@@ -252,7 +248,7 @@ def get_repo(backend: "Backend", mat: re.Match[str]) -> BaseRepo:
 
 
 def send_file(
-    req: "HTTPGitRequest", f: Optional[BinaryIO], content_type: str
+    req: "HTTPGitRequest", f: BinaryIO | None, content_type: str
 ) -> Iterator[bytes]:
     """Send a file-like object to the request output.
 
@@ -407,7 +403,7 @@ def get_info_refs(
         req.nocache()
         write = req.respond(HTTP_OK, f"application/x-{service}-advertisement")
 
-        def write_fn(data: bytes) -> Optional[int]:
+        def write_fn(data: bytes) -> int | None:
             result = write(data)
             return len(data) if result is not None else None
 
@@ -556,7 +552,7 @@ def handle_service_request(
     req.nocache()
     write = req.respond(HTTP_OK, f"application/x-{service}-result")
 
-    def write_fn(data: bytes) -> Optional[int]:
+    def write_fn(data: bytes) -> int | None:
         result = write(data)
         return len(data) if result is not None else None
 
@@ -586,9 +582,8 @@ class HTTPGitRequest:
         environ: WSGIEnvironment,
         start_response: StartResponse,
         dumb: bool = False,
-        handlers: Optional[
-            dict[bytes, Union["HandlerConstructor", Callable[..., Any]]]
-        ] = None,
+        handlers: dict[bytes, Union["HandlerConstructor", Callable[..., Any]]]
+        | None = None,
     ) -> None:
         """Initialize HTTPGitRequest.
 
@@ -612,8 +607,8 @@ class HTTPGitRequest:
     def respond(
         self,
         status: str = HTTP_OK,
-        content_type: Optional[str] = None,
-        headers: Optional[Sequence[tuple[str, str]]] = None,
+        content_type: str | None = None,
+        headers: Sequence[tuple[str, str]] | None = None,
     ) -> Callable[[bytes], object]:
         """Begin a response with the given status and other headers."""
         if headers:
@@ -692,10 +687,9 @@ class HTTPGitApplication:
         self,
         backend: Backend,
         dumb: bool = False,
-        handlers: Optional[
-            dict[bytes, Union["HandlerConstructor", Callable[..., Any]]]
-        ] = None,
-        fallback_app: Optional[WSGIApplication] = None,
+        handlers: dict[bytes, Union["HandlerConstructor", Callable[..., Any]]]
+        | None = None,
+        fallback_app: WSGIApplication | None = None,
     ) -> None:
         """Initialize HTTPGitApplication.
 
@@ -707,8 +701,8 @@ class HTTPGitApplication:
         """
         self.backend = backend
         self.dumb = dumb
-        self.handlers: dict[bytes, Union[HandlerConstructor, Callable[..., Any]]] = (
-            dict(DEFAULT_HANDLERS)
+        self.handlers: dict[bytes, HandlerConstructor | Callable[..., Any]] = dict(
+            DEFAULT_HANDLERS
         )
         self.fallback_app = fallback_app
         if handlers is not None:
@@ -798,8 +792,8 @@ class LimitedInputFilter:
 def make_wsgi_chain(
     backend: Backend,
     dumb: bool = False,
-    handlers: Optional[dict[bytes, Callable[..., Any]]] = None,
-    fallback_app: Optional[WSGIApplication] = None,
+    handlers: dict[bytes, Callable[..., Any]] | None = None,
+    fallback_app: WSGIApplication | None = None,
 ) -> WSGIApplication:
     """Factory function to create an instance of HTTPGitApplication.
 
@@ -817,11 +811,9 @@ class ServerHandlerLogger(ServerHandler):
 
     def log_exception(
         self,
-        exc_info: Union[
-            tuple[type[BaseException], BaseException, TracebackType],
-            tuple[None, None, None],
-            None,
-        ],
+        exc_info: tuple[type[BaseException], BaseException, TracebackType]
+        | tuple[None, None, None]
+        | None,
     ) -> None:
         """Log exception using dulwich logger."""
         logger.exception(
@@ -843,11 +835,9 @@ class WSGIRequestHandlerLogger(WSGIRequestHandler):
 
     def log_exception(
         self,
-        exc_info: Union[
-            tuple[type[BaseException], BaseException, TracebackType],
-            tuple[None, None, None],
-            None,
-        ],
+        exc_info: tuple[type[BaseException], BaseException, TracebackType]
+        | tuple[None, None, None]
+        | None,
     ) -> None:
         """Log exception using dulwich logger."""
         logger.exception(

+ 2 - 3
dulwich/whitespace.py

@@ -25,7 +25,6 @@ whitespace error detection capabilities.
 """
 
 from collections.abc import Sequence, Set
-from typing import Optional
 
 # Default whitespace errors Git checks for
 DEFAULT_WHITESPACE_ERRORS = {
@@ -47,7 +46,7 @@ WHITESPACE_ERROR_TYPES = {
 }
 
 
-def parse_whitespace_config(value: Optional[str]) -> tuple[set[str], int]:
+def parse_whitespace_config(value: str | None) -> tuple[set[str], int]:
     """Parse core.whitespace configuration value.
 
     Args:
@@ -220,7 +219,7 @@ class WhitespaceChecker:
 def fix_whitespace_errors(
     content: bytes,
     errors: Sequence[tuple[str, int]],
-    fix_types: Optional[Set[str]] = None,
+    fix_types: Set[str] | None = None,
 ) -> bytes:
     """Fix whitespace errors in content.
 

+ 5 - 5
dulwich/worktree.py

@@ -31,10 +31,10 @@ import sys
 import tempfile
 import time
 import warnings
-from collections.abc import Iterable, Iterator, Sequence
+from collections.abc import Callable, Iterable, Iterator, Sequence
 from contextlib import contextmanager
 from pathlib import Path
-from typing import Any, Callable, Union
+from typing import Any
 
 from .errors import CommitError, HookError
 from .objects import Blob, Commit, ObjectID, Tag, Tree
@@ -416,7 +416,7 @@ class WorkTree:
 
     def commit(
         self,
-        message: Union[str, bytes, Callable[[Any, Commit], bytes], None] = None,
+        message: str | bytes | Callable[[Any, Commit], bytes] | None = None,
         committer: bytes | None = None,
         author: bytes | None = None,
         commit_timestamp: float | None = None,
@@ -706,8 +706,8 @@ class WorkTree:
         else:
 
             def symlink_fn(  # type: ignore[misc,unused-ignore]
-                src: Union[str, bytes],
-                dst: Union[str, bytes],
+                src: str | bytes,
+                dst: str | bytes,
                 target_is_directory: bool = False,
                 *,
                 dir_fd: int | None = None,

+ 1 - 2
examples/merge_driver.py

@@ -12,7 +12,6 @@ This example:
 
 import json
 import os
-from typing import Optional
 
 from dulwich import porcelain
 from dulwich.merge_drivers import get_merge_driver_registry
@@ -27,7 +26,7 @@ class JSONMergeDriver:
         ancestor: bytes,
         ours: bytes,
         theirs: bytes,
-        path: Optional[str] = None,
+        path: str | None = None,
         marker_size: int = 7,
     ) -> tuple[bytes, bool]:
         """Merge JSON files by combining objects."""

+ 1 - 2
fuzzing/fuzz-targets/fuzz_bundle.py

@@ -2,7 +2,6 @@
 
 import sys
 from io import BytesIO
-from typing import Optional
 
 import atheris
 
@@ -14,7 +13,7 @@ with atheris.instrument_imports():
     from dulwich.pack import PackData, write_pack_objects
 
 
-def TestOneInput(data) -> Optional[int]:
+def TestOneInput(data) -> int | None:
     fdp = EnhancedFuzzedDataProvider(data)
     bundle = Bundle()
     bundle.version = fdp.PickValueInList([2, 3, None])

+ 1 - 2
fuzzing/fuzz-targets/fuzz_configfile.py

@@ -2,7 +2,6 @@
 
 import sys
 from io import BytesIO
-from typing import Optional
 
 import atheris
 from test_utils import is_expected_exception
@@ -11,7 +10,7 @@ with atheris.instrument_imports():
     from dulwich.config import ConfigFile
 
 
-def TestOneInput(data) -> Optional[int]:
+def TestOneInput(data) -> int | None:
     try:
         ConfigFile.from_file(BytesIO(data))
     except ValueError as e:

+ 1 - 2
fuzzing/fuzz-targets/fuzz_object_store.py

@@ -3,7 +3,6 @@
 import stat
 import sys
 from io import BytesIO
-from typing import Optional
 
 import atheris
 
@@ -20,7 +19,7 @@ with atheris.instrument_imports():
     )
 
 
-def TestOneInput(data) -> Optional[int]:
+def TestOneInput(data) -> int | None:
     fdp = EnhancedFuzzedDataProvider(data)
     repo = MemoryRepo()
     blob = Blob.from_string(fdp.ConsumeRandomBytes())

+ 1 - 2
fuzzing/fuzz-targets/fuzz_repo.py

@@ -3,7 +3,6 @@
 import os
 import sys
 import tempfile
-from typing import Optional
 
 import atheris
 
@@ -17,7 +16,7 @@ with atheris.instrument_imports():
     )
 
 
-def TestOneInput(data) -> Optional[int]:
+def TestOneInput(data) -> int | None:
     fdp = EnhancedFuzzedDataProvider(data)
     with tempfile.TemporaryDirectory() as temp_dir:
         repo = Repo.init(temp_dir)

+ 1 - 4
pyproject.toml

@@ -11,7 +11,6 @@ license = "Apache-2.0 OR GPL-2.0-or-later"
 keywords = ["vcs", "git"]
 classifiers = [
     "Development Status :: 4 - Beta",
-    "Programming Language :: Python :: 3.9",
     "Programming Language :: Python :: 3.10",
     "Programming Language :: Python :: 3.11",
     "Programming Language :: Python :: 3.12",
@@ -23,7 +22,7 @@ classifiers = [
     "Operating System :: Microsoft :: Windows",
     "Topic :: Software Development :: Version Control",
 ]
-requires-python = ">=3.9"
+requires-python = ">=3.10"
 dependencies = [
     "urllib3>=2.2.2",
     'typing_extensions >=4.6.0 ; python_version < "3.12"',
@@ -140,7 +139,6 @@ ignore = [
     "ANN205",
     "ANN206",
     "E501",  # line too long
-    "UP007",  # Use X | Y for type annotations (Python 3.10+ syntax, but we support 3.9+)
 ]
 
 [tool.ruff.lint.pydocstyle]
@@ -168,7 +166,6 @@ before-build = "pip install -U setuptools-rust && yum -y install libatomic && cu
 archs = ["auto", "universal2", "x86_64", "arm64"]
 before-all = "rustup target add x86_64-apple-darwin aarch64-apple-darwin"
 skip = """\
-    cp39-macosx_x86_64 cp39-macosx_universal2 \
     cp310-macosx_x86_64 cp310-macosx_universal2 \
     cp311-macosx_x86_64 cp311-macosx_universal2 \
     cp312-macosx_x86_64 cp312-macosx_universal2 \

+ 3 - 3
tests/__init__.py

@@ -40,7 +40,7 @@ import tempfile
 # If Python itself provides an exception, use that
 import unittest
 from collections.abc import Sequence
-from typing import ClassVar, Optional
+from typing import ClassVar
 from unittest import SkipTest, expectedFailure, skipIf
 from unittest import TestCase as _TestCase
 
@@ -56,7 +56,7 @@ class TestCase(_TestCase):
         self.overrideEnv("HOME", "/nonexistent")
         self.overrideEnv("GIT_CONFIG_NOSYSTEM", "1")
 
-    def overrideEnv(self, name: str, value: Optional[str]) -> None:
+    def overrideEnv(self, name: str, value: str | None) -> None:
         def restore() -> None:
             if oldval is not None:
                 os.environ[name] = oldval
@@ -211,7 +211,7 @@ def tutorial_test_suite() -> unittest.TestSuite:
 
     to_restore = []
 
-    def overrideEnv(name: str, value: Optional[str]) -> None:
+    def overrideEnv(name: str, value: str | None) -> None:
         oldval = os.environ.get(name)
         if value is not None:
             os.environ[name] = value

+ 1 - 1
tests/compat/utils.py

@@ -234,7 +234,7 @@ def check_for_daemon(limit=10, delay=0.1, timeout=0.1, port=TCP_GIT_PORT) -> boo
         try:
             s.connect(("localhost", port))
             return True
-        except socket.timeout:
+        except TimeoutError:
             pass
         except OSError as e:
             if getattr(e, "errno", False) and e.errno != errno.ECONNREFUSED:

+ 3 - 4
tests/contrib/test_paramiko_vendor.py

@@ -26,7 +26,6 @@ import tempfile
 import threading
 import time
 from io import StringIO
-from typing import Optional
 from unittest import skipIf
 from unittest.mock import patch
 
@@ -134,7 +133,7 @@ else:
                     conn_thread.start()
                     self.connection_threads.append(conn_thread)
 
-                except socket.timeout:
+                except TimeoutError:
                     # Normal timeout, continue to check if we should keep running
                     continue
                 except OSError as e:
@@ -192,7 +191,7 @@ else:
                             break
                         # Echo the data back immediately
                         channel.send(data)
-                    except socket.timeout:
+                    except TimeoutError:
                         # No more data available, break
                         break
 
@@ -284,7 +283,7 @@ class ParamikoSSHVendorTests(TestCase):
     def tearDown(self) -> None:
         self.thread.join()
 
-    def _run(self) -> Optional[bool]:
+    def _run(self) -> bool | None:
         try:
             conn, _addr = self.socket.accept()
         except OSError:

+ 2 - 2
tests/contrib/test_release_robot.py

@@ -29,7 +29,7 @@ import sys
 import tempfile
 import time
 import unittest
-from typing import ClassVar, Optional
+from typing import ClassVar
 from unittest.mock import MagicMock, patch
 
 from dulwich.contrib import release_robot
@@ -89,7 +89,7 @@ class GetRecentTagsTest(unittest.TestCase):
     committer = b"Mark Mikofski <mark.mikofski@sunpowercorp.com>"
     test_tags: ClassVar[list[bytes]] = [b"v0.1a", b"v0.1"]
     tag_test_data: ClassVar[
-        dict[bytes, tuple[int, bytes, Optional[tuple[int, bytes]]]]
+        dict[bytes, tuple[int, bytes, tuple[int, bytes] | None]]
     ] = {
         test_tags[0]: (1484788003, b"3" * 40, None),
         test_tags[1]: (1484788314, b"1" * 40, (1484788401, b"2" * 40)),

+ 4 - 4
tests/test_annotate.py

@@ -21,7 +21,7 @@
 import os
 import tempfile
 import unittest
-from typing import Any, Optional
+from typing import Any
 from unittest import TestCase
 
 from dulwich.annotate import annotate_lines, update_lines
@@ -134,7 +134,7 @@ class AnnotateLinesTestCase(TestCase):
         shutil.rmtree(self.temp_dir)
 
     def _make_commit(
-        self, blob_content: bytes, message: str, parent: Optional[bytes] = None
+        self, blob_content: bytes, message: str, parent: bytes | None = None
     ) -> bytes:
         """Helper to create a commit with a single file."""
         # Create blob
@@ -231,7 +231,7 @@ class PorcelainAnnotateTestCase(TestCase):
         filename: str,
         content: bytes,
         message: str,
-        parent: Optional[bytes] = None,
+        parent: bytes | None = None,
     ) -> bytes:
         """Helper to create a commit with a file."""
         # Create blob
@@ -330,7 +330,7 @@ class IntegrationTestCase(TestCase):
         filename: str,
         content: bytes,
         message: str,
-        parent: Optional[bytes] = None,
+        parent: bytes | None = None,
     ) -> bytes:
         """Helper to create a commit with file content."""
         # Write file to working directory

+ 1 - 2
tests/test_archive.py

@@ -24,7 +24,6 @@
 import struct
 import tarfile
 from io import BytesIO
-from typing import Optional
 from unittest.mock import patch
 
 from dulwich.archive import tar_stream
@@ -88,7 +87,7 @@ class ArchiveTests(TestCase):
         self.assertEqual(stream.getvalue()[4:8], expected_mtime)
 
     def test_same_file(self) -> None:
-        contents: list[Optional[bytes]] = [None, None]
+        contents: list[bytes | None] = [None, None]
         for format in ["", "gz", "bz2"]:
             for i in [0, 1]:
                 with patch("time.time", return_value=i):

+ 10 - 11
tests/test_dumb.py

@@ -22,8 +22,7 @@
 """Tests for dumb HTTP git repositories."""
 
 import zlib
-from collections.abc import Mapping
-from typing import Callable, Optional, Union
+from collections.abc import Callable, Mapping
 from unittest import TestCase
 from unittest.mock import Mock
 
@@ -37,7 +36,7 @@ class MockResponse:
         self,
         status: int = 200,
         content: bytes = b"",
-        headers: Optional[dict[str, str]] = None,
+        headers: dict[str, str] | None = None,
     ) -> None:
         self.status = status
         self.content = content
@@ -53,12 +52,12 @@ class DumbHTTPObjectStoreTests(TestCase):
 
     def setUp(self) -> None:
         self.base_url = "https://example.com/repo.git/"
-        self.responses: dict[str, dict[str, Union[int, bytes]]] = {}
+        self.responses: dict[str, dict[str, int | bytes]] = {}
         self.store = DumbHTTPObjectStore(self.base_url, self._mock_http_request)
 
     def _mock_http_request(
         self, url: str, headers: dict[str, str]
-    ) -> tuple[MockResponse, Callable[[Optional[int]], bytes]]:
+    ) -> tuple[MockResponse, Callable[[int | None], bytes]]:
         """Mock HTTP request function."""
         if url in self.responses:
             resp_data = self.responses[url]
@@ -69,7 +68,7 @@ class DumbHTTPObjectStoreTests(TestCase):
             content = resp.content
             offset = [0]  # Use list to make it mutable in closure
 
-            def read_func(size: Optional[int] = None) -> bytes:
+            def read_func(size: int | None = None) -> bytes:
                 if offset[0] >= len(content):
                     return b""
                 if size is None:
@@ -188,12 +187,12 @@ class DumbRemoteHTTPRepoTests(TestCase):
 
     def setUp(self) -> None:
         self.base_url = "https://example.com/repo.git/"
-        self.responses: dict[str, dict[str, Union[int, bytes]]] = {}
+        self.responses: dict[str, dict[str, int | bytes]] = {}
         self.repo = DumbRemoteHTTPRepo(self.base_url, self._mock_http_request)
 
     def _mock_http_request(
         self, url: str, headers: dict[str, str]
-    ) -> tuple[MockResponse, Callable[[Optional[int]], bytes]]:
+    ) -> tuple[MockResponse, Callable[[int | None], bytes]]:
         """Mock HTTP request function."""
         if url in self.responses:
             resp_data = self.responses[url]
@@ -204,7 +203,7 @@ class DumbRemoteHTTPRepoTests(TestCase):
             content = resp.content
             offset = [0]  # Use list to make it mutable in closure
 
-            def read_func(size: Optional[int] = None) -> bytes:
+            def read_func(size: int | None = None) -> bytes:
                 if offset[0] >= len(content):
                     return b""
                 if size is None:
@@ -265,7 +264,7 @@ fedcba9876543210fedcba9876543210fedcba98\trefs/tags/v1.0
         graph_walker = Mock()
 
         def determine_wants(
-            refs: Mapping[bytes, bytes], depth: Optional[int] = None
+            refs: Mapping[bytes, bytes], depth: int | None = None
         ) -> list[bytes]:
             return []
 
@@ -292,7 +291,7 @@ fedcba9876543210fedcba9876543210fedcba98\trefs/tags/v1.0
         graph_walker.ack.return_value = []  # No existing objects
 
         def determine_wants(
-            refs: Mapping[bytes, bytes], depth: Optional[int] = None
+            refs: Mapping[bytes, bytes], depth: int | None = None
         ) -> list[bytes]:
             return [blob_sha]
 

+ 1 - 2
tests/test_log_utils.py

@@ -24,7 +24,6 @@
 import logging
 import os
 import tempfile
-from typing import Optional
 
 from dulwich.log_utils import (
     _DULWICH_LOGGER,
@@ -60,7 +59,7 @@ class LogUtilsTests(TestCase):
             os.environ["GIT_TRACE"] = self.original_git_trace
         super().tearDown()
 
-    def _set_git_trace(self, value: Optional[str]) -> None:
+    def _set_git_trace(self, value: str | None) -> None:
         """Helper to set GIT_TRACE environment variable."""
         if value is None:
             os.environ.pop("GIT_TRACE", None)

+ 1 - 2
tests/test_merge_drivers.py

@@ -23,7 +23,6 @@
 import importlib.util
 import sys
 import unittest
-from typing import Optional
 
 from dulwich.attrs import GitAttributes, Pattern
 from dulwich.config import ConfigDict
@@ -51,7 +50,7 @@ class _TestMergeDriver:
         ancestor: bytes,
         ours: bytes,
         theirs: bytes,
-        path: Optional[str] = None,
+        path: str | None = None,
         marker_size: int = 7,
     ) -> tuple[bytes, bool]:
         """Test merge implementation."""

Certains fichiers n'ont pas été affichés car il y a eu trop de fichiers modifiés dans ce diff