Quellcode durchsuchen

Merge branch 'master' into bitmap-support

Jelmer Vernooij vor 1 Monat
Ursprung
Commit
da53eaf766

+ 19 - 3
NEWS

@@ -1,5 +1,18 @@
 0.25.0	UNRELEASED
 0.25.0	UNRELEASED
 
 
+ * Implement support for ``core.sharedRepository`` configuration option.
+   Repository files and directories now respect shared repository permissions
+   for group-writable or world-writable repositories. Affects loose objects,
+   pack files, pack indexes, index files, and other git metadata files.
+   (Jelmer Vernooij, #1804)
+
+ * Optimize status performance by using stat matching to skip reading
+   and filtering unchanged files. This provides significant performance
+   improvements for repositories with LFS filters, where filter operations can
+   be very expensive. The optimization matches Git's behavior of using mtime
+   and size comparisons to determine if files need processing.
+   (Jelmer Vernooij, #1999)
+
  * Drop support for Python 3.9. (Jelmer Vernooij)
  * Drop support for Python 3.9. (Jelmer Vernooij)
 
 
  * Add support for pack bitmap indexes for fast reachability queries.
  * Add support for pack bitmap indexes for fast reachability queries.
@@ -22,7 +35,10 @@
    Dulwich version, and installed dependencies with their versions.
    Dulwich version, and installed dependencies with their versions.
    (Jelmer Vernooij, #1835)
    (Jelmer Vernooij, #1835)
 
 
-024.10	2025-11-10
+ * Add basic ``dulwich restore`` and ``dulwich switch``
+   commands. (Jelmer Vernooij, #1777)
+
+0.24.10	2025-11-10
 
 
  * Fix compatibility with python 3.9. (Jelmer Vernooij, #1991)
  * Fix compatibility with python 3.9. (Jelmer Vernooij, #1991)
 
 
@@ -134,7 +150,7 @@
    commit metadata. Supports automatic upstream detection from tracking branches
    commit metadata. Supports automatic upstream detection from tracking branches
    and verbose mode to display commit messages. (Jelmer Vernooij, #1782)
    and verbose mode to display commit messages. (Jelmer Vernooij, #1782)
 
 
- * Add support for ``git mailsplit`` command to split mbox files and Maildir
+ * Add support for ``dulwich mailsplit`` command to split mbox files and Maildir
    into individual message files. Supports mboxrd format, custom precision,
    into individual message files. Supports mboxrd format, custom precision,
    and all standard git mailsplit options. (Jelmer Vernooij, #1840)
    and all standard git mailsplit options. (Jelmer Vernooij, #1840)
 
 
@@ -490,7 +506,7 @@
 
 
  * Add ``merge-tree`` plumbing command to ``dulwich.porcelain`` and CLI.
  * Add ``merge-tree`` plumbing command to ``dulwich.porcelain`` and CLI.
    This command performs three-way tree merges without touching the working
    This command performs three-way tree merges without touching the working
-   directory or creating commits, similar to ``git merge-tree``. It outputs
+   directory or creating commits, similar to ``dulwich merge-tree``. It outputs
    the merged tree SHA and lists any conflicted paths. (Jelmer Vernooij)
    the merged tree SHA and lists any conflicted paths. (Jelmer Vernooij)
 
 
  * Add ``porcelain.count_objects()`` function to count unpacked objects and
  * Add ``porcelain.count_objects()`` function to count unpacked objects and

+ 3 - 4
SECURITY.md

@@ -2,11 +2,10 @@
 
 
 ## Supported Versions
 ## Supported Versions
 
 
-| Version  | Supported          |
+| Version  | Supported         |
 | -------- | ------------------ |
 | -------- | ------------------ |
-| 0.21.x   | :white_check_mark: |
-| 0.20.x   | :white_check_mark: |
-| < 0.20.x | :x:                |
+| 0.24.x   | :white_check_mark: |
+| < 0.24.x | :x:                |
 
 
 ## Reporting a Vulnerability
 ## Reporting a Vulnerability
 
 

+ 10 - 0
dulwich/_typing.py

@@ -0,0 +1,10 @@
+"""Common type definitions for Dulwich."""
+
+import sys
+
+if sys.version_info >= (3, 12):
+    from collections.abc import Buffer
+else:
+    Buffer = bytes | bytearray | memoryview
+
+__all__ = ["Buffer"]

+ 3 - 5
dulwich/bitmap.py

@@ -33,7 +33,7 @@ import struct
 from collections import deque
 from collections import deque
 from collections.abc import Callable, Iterable, Iterator
 from collections.abc import Callable, Iterable, Iterator
 from io import BytesIO
 from io import BytesIO
-from typing import IO, TYPE_CHECKING, Optional
+from typing import IO, TYPE_CHECKING
 
 
 from .file import GitFile
 from .file import GitFile
 from .objects import Blob, Commit, Tag, Tree
 from .objects import Blob, Commit, Tag, Tree
@@ -466,7 +466,7 @@ class PackBitmap:
 
 
 def read_bitmap(
 def read_bitmap(
     filename: str | os.PathLike[str],
     filename: str | os.PathLike[str],
-    pack_index: Optional["PackIndex"] = None,
+    pack_index: "PackIndex | None" = None,
 ) -> PackBitmap:
 ) -> PackBitmap:
     """Read a bitmap index file.
     """Read a bitmap index file.
 
 
@@ -485,9 +485,7 @@ def read_bitmap(
         return read_bitmap_file(f, pack_index=pack_index)
         return read_bitmap_file(f, pack_index=pack_index)
 
 
 
 
-def read_bitmap_file(
-    f: IO[bytes], pack_index: Optional["PackIndex"] = None
-) -> PackBitmap:
+def read_bitmap_file(f: IO[bytes], pack_index: "PackIndex | None" = None) -> PackBitmap:
     """Read bitmap data from a file object.
     """Read bitmap data from a file object.
 
 
     Args:
     Args:

+ 114 - 5
dulwich/cli.py

@@ -51,12 +51,8 @@ from typing import (
     TextIO,
     TextIO,
 )
 )
 
 
-if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
-else:
-    Buffer = bytes | bytearray | memoryview
-
 from dulwich import porcelain
 from dulwich import porcelain
+from dulwich._typing import Buffer
 
 
 from .bundle import Bundle, create_bundle_from_repo, read_bundle, write_bundle
 from .bundle import Bundle, create_bundle_from_repo, read_bundle, write_bundle
 from .client import get_transport_and_path
 from .client import get_transport_and_path
@@ -3850,6 +3846,117 @@ class cmd_checkout(Command):
         return 0
         return 0
 
 
 
 
+class cmd_restore(Command):
+    """Restore working tree files."""
+
+    def run(self, args: Sequence[str]) -> int | None:
+        """Execute the restore command.
+
+        Args:
+            args: Command line arguments
+        """
+        parser = argparse.ArgumentParser()
+        parser.add_argument(
+            "paths",
+            nargs="+",
+            type=str,
+            help="Paths to restore",
+        )
+        parser.add_argument(
+            "-s",
+            "--source",
+            type=str,
+            help="Restore from a specific commit (default: HEAD for --staged, index for worktree)",
+        )
+        parser.add_argument(
+            "--staged",
+            action="store_true",
+            help="Restore files in the index",
+        )
+        parser.add_argument(
+            "--worktree",
+            action="store_true",
+            help="Restore files in the working tree",
+        )
+        parsed_args = parser.parse_args(args)
+
+        # If neither --staged nor --worktree is specified, default to --worktree
+        if not parsed_args.staged and not parsed_args.worktree:
+            worktree = True
+            staged = False
+        else:
+            worktree = parsed_args.worktree
+            staged = parsed_args.staged
+
+        try:
+            porcelain.restore(
+                ".",
+                paths=parsed_args.paths,
+                source=parsed_args.source,
+                staged=staged,
+                worktree=worktree,
+            )
+        except porcelain.CheckoutError as e:
+            sys.stderr.write(f"{e}\n")
+            return 1
+        return 0
+
+
+class cmd_switch(Command):
+    """Switch branches."""
+
+    def run(self, args: Sequence[str]) -> int | None:
+        """Execute the switch command.
+
+        Args:
+            args: Command line arguments
+        """
+        parser = argparse.ArgumentParser()
+        parser.add_argument(
+            "target",
+            type=str,
+            help="Branch or commit to switch to",
+        )
+        parser.add_argument(
+            "-c",
+            "--create",
+            type=str,
+            help="Create a new branch at the target and switch to it",
+        )
+        parser.add_argument(
+            "-f",
+            "--force",
+            action="store_true",
+            help="Force switch even if there are local changes",
+        )
+        parser.add_argument(
+            "-d",
+            "--detach",
+            action="store_true",
+            help="Switch to a commit in detached HEAD state",
+        )
+        parsed_args = parser.parse_args(args)
+
+        if not parsed_args.target:
+            logger.error(
+                "Usage: dulwich switch TARGET [-c NEW_BRANCH] [--force] [--detach]"
+            )
+            return 1
+
+        try:
+            porcelain.switch(
+                ".",
+                target=parsed_args.target,
+                create=parsed_args.create,
+                force=parsed_args.force,
+                detach=parsed_args.detach,
+            )
+        except porcelain.CheckoutError as e:
+            sys.stderr.write(f"{e}\n")
+            return 1
+        return 0
+
+
 class cmd_stash_list(Command):
 class cmd_stash_list(Command):
     """List stash entries."""
     """List stash entries."""
 
 
@@ -6559,6 +6666,7 @@ commands = {
     "repack": cmd_repack,
     "repack": cmd_repack,
     "replace": cmd_replace,
     "replace": cmd_replace,
     "reset": cmd_reset,
     "reset": cmd_reset,
+    "restore": cmd_restore,
     "revert": cmd_revert,
     "revert": cmd_revert,
     "rev-list": cmd_rev_list,
     "rev-list": cmd_rev_list,
     "rm": cmd_rm,
     "rm": cmd_rm,
@@ -6570,6 +6678,7 @@ commands = {
     "status": cmd_status,
     "status": cmd_status,
     "stripspace": cmd_stripspace,
     "stripspace": cmd_stripspace,
     "shortlog": cmd_shortlog,
     "shortlog": cmd_shortlog,
+    "switch": cmd_switch,
     "symbolic-ref": cmd_symbolic_ref,
     "symbolic-ref": cmd_symbolic_ref,
     "submodule": cmd_submodule,
     "submodule": cmd_submodule,
     "tag": cmd_tag,
     "tag": cmd_tag,

+ 9 - 11
dulwich/client.py

@@ -55,8 +55,6 @@ from typing import (
     TYPE_CHECKING,
     TYPE_CHECKING,
     Any,
     Any,
     ClassVar,
     ClassVar,
-    Optional,
-    Union,
 )
 )
 from urllib.parse import ParseResult, urljoin, urlparse, urlunparse, urlunsplit
 from urllib.parse import ParseResult, urljoin, urlparse, urlunparse, urlunsplit
 from urllib.parse import quote as urlquote
 from urllib.parse import quote as urlquote
@@ -1110,7 +1108,7 @@ class GitClient:
         self,
         self,
         path: bytes,
         path: bytes,
         target: BaseRepo,
         target: BaseRepo,
-        determine_wants: Optional["DetermineWantsFunc"] = None,
+        determine_wants: "DetermineWantsFunc | None" = None,
         progress: Callable[[bytes], None] | None = None,
         progress: Callable[[bytes], None] | None = None,
         depth: int | None = None,
         depth: int | None = None,
         ref_prefix: Sequence[Ref] | None = None,
         ref_prefix: Sequence[Ref] | None = None,
@@ -2304,7 +2302,7 @@ class LocalGitClient(GitClient):
         self,
         self,
         path: bytes,
         path: bytes,
         target: BaseRepo,
         target: BaseRepo,
-        determine_wants: Optional["DetermineWantsFunc"] = None,
+        determine_wants: "DetermineWantsFunc | None" = None,
         progress: Callable[[bytes], None] | None = None,
         progress: Callable[[bytes], None] | None = None,
         depth: int | None = None,
         depth: int | None = None,
         ref_prefix: Sequence[bytes] | None = None,
         ref_prefix: Sequence[bytes] | None = None,
@@ -2632,7 +2630,7 @@ class BundleClient(GitClient):
         self,
         self,
         path: bytes,
         path: bytes,
         target: BaseRepo,
         target: BaseRepo,
-        determine_wants: Optional["DetermineWantsFunc"] = None,
+        determine_wants: "DetermineWantsFunc | None" = None,
         progress: Callable[[bytes], None] | None = None,
         progress: Callable[[bytes], None] | None = None,
         depth: int | None = None,
         depth: int | None = None,
         ref_prefix: Sequence[Ref] | None = None,
         ref_prefix: Sequence[Ref] | None = None,
@@ -3221,7 +3219,7 @@ def default_urllib3_manager(
     base_url: str | None = None,
     base_url: str | None = None,
     timeout: float | None = None,
     timeout: float | None = None,
     cert_reqs: str | None = None,
     cert_reqs: str | None = None,
-) -> Union["urllib3.ProxyManager", "urllib3.PoolManager"]:
+) -> "urllib3.ProxyManager | urllib3.PoolManager":
     """Return urllib3 connection pool manager.
     """Return urllib3 connection pool manager.
 
 
     Honour detected proxy configurations.
     Honour detected proxy configurations.
@@ -3989,7 +3987,7 @@ class AbstractHttpGitClient(GitClient):
         username: str | None = None,
         username: str | None = None,
         password: str | None = None,
         password: str | None = None,
         config: Config | None = None,
         config: Config | None = None,
-        pool_manager: Optional["urllib3.PoolManager"] = None,
+        pool_manager: "urllib3.PoolManager | None" = None,
     ) -> "AbstractHttpGitClient":
     ) -> "AbstractHttpGitClient":
         """Create an AbstractHttpGitClient from a parsed URL.
         """Create an AbstractHttpGitClient from a parsed URL.
 
 
@@ -4084,7 +4082,7 @@ class Urllib3HttpGitClient(AbstractHttpGitClient):
         self,
         self,
         base_url: str,
         base_url: str,
         dumb: bool | None = None,
         dumb: bool | None = None,
-        pool_manager: Optional["urllib3.PoolManager"] = None,
+        pool_manager: "urllib3.PoolManager | None" = None,
         config: Config | None = None,
         config: Config | None = None,
         username: str | None = None,
         username: str | None = None,
         password: str | None = None,
         password: str | None = None,
@@ -4229,7 +4227,7 @@ def get_transport_and_path_from_url(
     password: str | None = None,
     password: str | None = None,
     key_filename: str | None = None,
     key_filename: str | None = None,
     ssh_command: str | None = None,
     ssh_command: str | None = None,
-    pool_manager: Optional["urllib3.PoolManager"] = None,
+    pool_manager: "urllib3.PoolManager | None" = None,
 ) -> tuple[GitClient, str]:
 ) -> tuple[GitClient, str]:
     """Obtain a git client from a URL.
     """Obtain a git client from a URL.
 
 
@@ -4282,7 +4280,7 @@ def _get_transport_and_path_from_url(
     password: str | None = None,
     password: str | None = None,
     key_filename: str | None = None,
     key_filename: str | None = None,
     ssh_command: str | None = None,
     ssh_command: str | None = None,
-    pool_manager: Optional["urllib3.PoolManager"] = None,
+    pool_manager: "urllib3.PoolManager | None" = None,
 ) -> tuple[GitClient, str]:
 ) -> tuple[GitClient, str]:
     parsed = urlparse(url)
     parsed = urlparse(url)
     if parsed.scheme == "git":
     if parsed.scheme == "git":
@@ -4377,7 +4375,7 @@ def get_transport_and_path(
     password: str | None = None,
     password: str | None = None,
     key_filename: str | None = None,
     key_filename: str | None = None,
     ssh_command: str | None = None,
     ssh_command: str | None = None,
-    pool_manager: Optional["urllib3.PoolManager"] = None,
+    pool_manager: "urllib3.PoolManager | None" = None,
 ) -> tuple[GitClient, str]:
 ) -> tuple[GitClient, str]:
     """Obtain a git client from a URL.
     """Obtain a git client from a URL.
 
 

+ 1 - 3
dulwich/config.py

@@ -45,7 +45,6 @@ from typing import (
     IO,
     IO,
     Generic,
     Generic,
     TypeVar,
     TypeVar,
-    Union,
     overload,
     overload,
 )
 )
 
 
@@ -197,8 +196,7 @@ class CaseInsensitiveOrderedMultiDict(MutableMapping[K, V], Generic[K, V]):
     @classmethod
     @classmethod
     def make(
     def make(
         cls,
         cls,
-        dict_in: Union[MutableMapping[K, V], "CaseInsensitiveOrderedMultiDict[K, V]"]
-        | None = None,
+        dict_in: "MutableMapping[K, V] | CaseInsensitiveOrderedMultiDict[K, V] | None" = None,
         default_factory: Callable[[], V] | None = None,
         default_factory: Callable[[], V] | None = None,
     ) -> "CaseInsensitiveOrderedMultiDict[K, V]":
     ) -> "CaseInsensitiveOrderedMultiDict[K, V]":
         """Create a CaseInsensitiveOrderedMultiDict from an existing mapping.
         """Create a CaseInsensitiveOrderedMultiDict from an existing mapping.

+ 3 - 3
dulwich/contrib/requests_vendor.py

@@ -33,7 +33,7 @@ This implementation is experimental and does not have any tests.
 
 
 from collections.abc import Callable, Iterator
 from collections.abc import Callable, Iterator
 from io import BytesIO
 from io import BytesIO
-from typing import TYPE_CHECKING, Any, Optional
+from typing import TYPE_CHECKING, Any
 
 
 if TYPE_CHECKING:
 if TYPE_CHECKING:
     from ..config import ConfigFile
     from ..config import ConfigFile
@@ -56,7 +56,7 @@ class RequestsHttpGitClient(AbstractHttpGitClient):
         self,
         self,
         base_url: str,
         base_url: str,
         dumb: bool | None = None,
         dumb: bool | None = None,
-        config: Optional["ConfigFile"] = None,
+        config: "ConfigFile | None" = None,
         username: str | None = None,
         username: str | None = None,
         password: str | None = None,
         password: str | None = None,
         thin_packs: bool = True,
         thin_packs: bool = True,
@@ -133,7 +133,7 @@ class RequestsHttpGitClient(AbstractHttpGitClient):
         return resp, read
         return resp, read
 
 
 
 
-def get_session(config: Optional["ConfigFile"]) -> Session:
+def get_session(config: "ConfigFile | None") -> Session:
     """Create a requests session with Git configuration.
     """Create a requests session with Git configuration.
 
 
     Args:
     Args:

+ 3 - 3
dulwich/contrib/swift.py

@@ -39,7 +39,7 @@ import zlib
 from collections.abc import Callable, Iterator, Mapping
 from collections.abc import Callable, Iterator, Mapping
 from configparser import ConfigParser
 from configparser import ConfigParser
 from io import BytesIO
 from io import BytesIO
-from typing import Any, BinaryIO, Optional, cast
+from typing import Any, BinaryIO, cast
 
 
 from geventhttpclient import HTTPClient
 from geventhttpclient import HTTPClient
 
 
@@ -231,7 +231,7 @@ def pack_info_create(pack_data: "PackData", pack_index: "PackIndex") -> bytes:
 
 
 def load_pack_info(
 def load_pack_info(
     filename: str,
     filename: str,
-    scon: Optional["SwiftConnector"] = None,
+    scon: "SwiftConnector | None" = None,
     file: BinaryIO | None = None,
     file: BinaryIO | None = None,
 ) -> dict[str, Any] | None:
 ) -> dict[str, Any] | None:
     """Load pack info from Swift or file.
     """Load pack info from Swift or file.
@@ -821,7 +821,7 @@ class SwiftObjectStore(PackBasedObjectStore):
         """
         """
         f = BytesIO()
         f = BytesIO()
 
 
-        def commit() -> Optional["SwiftPack"]:
+        def commit() -> "SwiftPack | None":
             """Commit the pack to Swift storage.
             """Commit the pack to Swift storage.
 
 
             Returns:
             Returns:

+ 1 - 6
dulwich/diff.py

@@ -48,15 +48,10 @@ import io
 import logging
 import logging
 import os
 import os
 import stat
 import stat
-import sys
 from collections.abc import Iterable, Sequence
 from collections.abc import Iterable, Sequence
 from typing import BinaryIO
 from typing import BinaryIO
 
 
-if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
-else:
-    Buffer = bytes | bytearray | memoryview
-
+from ._typing import Buffer
 from .index import ConflictedIndexEntry, commit_index
 from .index import ConflictedIndexEntry, commit_index
 from .object_store import iter_tree_contents
 from .object_store import iter_tree_contents
 from .objects import S_ISGITLINK, Blob, Commit
 from .objects import S_ISGITLINK, Blob, Commit

+ 3 - 3
dulwich/diff_tree.py

@@ -27,7 +27,7 @@ from collections.abc import Callable, Iterator, Mapping, Sequence
 from collections.abc import Set as AbstractSet
 from collections.abc import Set as AbstractSet
 from io import BytesIO
 from io import BytesIO
 from itertools import chain
 from itertools import chain
-from typing import TYPE_CHECKING, Any, NamedTuple, Optional, TypeVar
+from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar
 
 
 from .object_store import BaseObjectStore
 from .object_store import BaseObjectStore
 from .objects import S_ISGITLINK, ObjectID, ShaFile, Tree, TreeEntry
 from .objects import S_ISGITLINK, ObjectID, ShaFile, Tree, TreeEntry
@@ -260,7 +260,7 @@ def tree_changes(
     tree1_id: ObjectID | None,
     tree1_id: ObjectID | None,
     tree2_id: ObjectID | None,
     tree2_id: ObjectID | None,
     want_unchanged: bool = False,
     want_unchanged: bool = False,
-    rename_detector: Optional["RenameDetector"] = None,
+    rename_detector: "RenameDetector | None" = None,
     include_trees: bool = False,
     include_trees: bool = False,
     change_type_same: bool = False,
     change_type_same: bool = False,
     paths: Sequence[bytes] | None = None,
     paths: Sequence[bytes] | None = None,
@@ -347,7 +347,7 @@ def tree_changes_for_merge(
     store: BaseObjectStore,
     store: BaseObjectStore,
     parent_tree_ids: Sequence[ObjectID],
     parent_tree_ids: Sequence[ObjectID],
     tree_id: ObjectID,
     tree_id: ObjectID,
-    rename_detector: Optional["RenameDetector"] = None,
+    rename_detector: "RenameDetector | None" = None,
 ) -> Iterator[list[TreeChange | None]]:
 ) -> Iterator[list[TreeChange | None]]:
     """Get the tree changes for a merge tree relative to all its parents.
     """Get the tree changes for a merge tree relative to all its parents.
 
 

+ 2 - 2
dulwich/dumb.py

@@ -26,7 +26,7 @@ import tempfile
 import zlib
 import zlib
 from collections.abc import Callable, Iterator, Mapping, Sequence
 from collections.abc import Callable, Iterator, Mapping, Sequence
 from io import BytesIO
 from io import BytesIO
-from typing import Any, Optional
+from typing import Any
 from urllib.parse import urljoin
 from urllib.parse import urljoin
 
 
 from .errors import NotGitRepository, ObjectFormatException
 from .errors import NotGitRepository, ObjectFormatException
@@ -340,7 +340,7 @@ class DumbHTTPObjectStore(BaseObjectStore):
         self,
         self,
         objects: Sequence[tuple[ShaFile, str | None]],
         objects: Sequence[tuple[ShaFile, str | None]],
         progress: Callable[[str], None] | None = None,
         progress: Callable[[str], None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add a set of objects to this object store."""
         """Add a set of objects to this object store."""
         raise NotImplementedError("Cannot add objects to dumb HTTP repository")
         raise NotImplementedError("Cannot add objects to dumb HTTP repository")
 
 

+ 1 - 4
dulwich/file.py

@@ -28,10 +28,7 @@ from collections.abc import Iterable, Iterator
 from types import TracebackType
 from types import TracebackType
 from typing import IO, Any, ClassVar, Literal, overload
 from typing import IO, Any, ClassVar, Literal, overload
 
 
-if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
-else:
-    Buffer = bytes | bytearray | memoryview
+from ._typing import Buffer
 
 
 
 
 def ensure_dir_exists(
 def ensure_dir_exists(

+ 6 - 6
dulwich/filters.py

@@ -25,7 +25,7 @@ import logging
 import subprocess
 import subprocess
 import threading
 import threading
 from collections.abc import Callable
 from collections.abc import Callable
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING
 from typing import Protocol as TypingProtocol
 from typing import Protocol as TypingProtocol
 
 
 from .attrs import GitAttributes
 from .attrs import GitAttributes
@@ -140,7 +140,7 @@ class ProcessFilterDriver:
         self._capabilities: set[bytes] = set()
         self._capabilities: set[bytes] = set()
         self._process_lock = threading.Lock()
         self._process_lock = threading.Lock()
 
 
-    def _get_or_start_process(self) -> Optional["Protocol"]:
+    def _get_or_start_process(self) -> "Protocol | None":
         """Get or start the long-running process filter."""
         """Get or start the long-running process filter."""
         if self._process is None and self.process_cmd:
         if self._process is None and self.process_cmd:
             from .errors import GitProtocolError, HangupException
             from .errors import GitProtocolError, HangupException
@@ -602,8 +602,8 @@ class FilterRegistry:
 
 
     def __init__(
     def __init__(
         self,
         self,
-        config: Optional["StackedConfig"] = None,
-        repo: Optional["BaseRepo"] = None,
+        config: "StackedConfig | None" = None,
+        repo: "BaseRepo | None" = None,
     ) -> None:
     ) -> None:
         """Initialize FilterRegistry.
         """Initialize FilterRegistry.
 
 
@@ -879,10 +879,10 @@ class FilterBlobNormalizer:
 
 
     def __init__(
     def __init__(
         self,
         self,
-        config_stack: Optional["StackedConfig"],
+        config_stack: "StackedConfig | None",
         gitattributes: GitAttributes,
         gitattributes: GitAttributes,
         filter_registry: FilterRegistry | None = None,
         filter_registry: FilterRegistry | None = None,
-        repo: Optional["BaseRepo"] = None,
+        repo: "BaseRepo | None" = None,
         filter_context: FilterContext | None = None,
         filter_context: FilterContext | None = None,
     ) -> None:
     ) -> None:
         """Initialize FilterBlobNormalizer.
         """Initialize FilterBlobNormalizer.

+ 5 - 5
dulwich/gc.py

@@ -1,12 +1,12 @@
 """Git garbage collection implementation."""
 """Git garbage collection implementation."""
 
 
-import collections
 import logging
 import logging
 import os
 import os
 import time
 import time
+from collections import deque
 from collections.abc import Callable
 from collections.abc import Callable
 from dataclasses import dataclass, field
 from dataclasses import dataclass, field
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING
 
 
 from dulwich.object_store import (
 from dulwich.object_store import (
     BaseObjectStore,
     BaseObjectStore,
@@ -54,7 +54,7 @@ def find_reachable_objects(
         Set of reachable object SHAs
         Set of reachable object SHAs
     """
     """
     reachable = set()
     reachable = set()
-    pending: collections.deque[ObjectID] = collections.deque()
+    pending: deque[ObjectID] = deque()
 
 
     # Start with all refs
     # Start with all refs
     for ref in refs_container.allkeys():
     for ref in refs_container.allkeys():
@@ -313,7 +313,7 @@ def garbage_collect(
     return stats
     return stats
 
 
 
 
-def should_run_gc(repo: "BaseRepo", config: Optional["Config"] = None) -> bool:
+def should_run_gc(repo: "BaseRepo", config: "Config | None" = None) -> bool:
     """Check if automatic garbage collection should run.
     """Check if automatic garbage collection should run.
 
 
     Args:
     Args:
@@ -372,7 +372,7 @@ def should_run_gc(repo: "BaseRepo", config: Optional["Config"] = None) -> bool:
 
 
 def maybe_auto_gc(
 def maybe_auto_gc(
     repo: "Repo",
     repo: "Repo",
-    config: Optional["Config"] = None,
+    config: "Config | None" = None,
     progress: Callable[[str], None] | None = None,
     progress: Callable[[str], None] | None = None,
 ) -> bool:
 ) -> bool:
     """Run automatic garbage collection if needed.
     """Run automatic garbage collection if needed.

+ 6 - 3
dulwich/hooks.py

@@ -226,9 +226,12 @@ class PostReceiveShellHook(ShellHook):
             out_data, err_data = p.communicate(in_data)
             out_data, err_data = p.communicate(in_data)
 
 
             if (p.returncode != 0) or err_data:
             if (p.returncode != 0) or err_data:
-                err_fmt = b"post-receive exit code: %d\n" + b"stdout:\n%s\nstderr:\n%s"
-                err_msg = err_fmt % (p.returncode, out_data, err_data)
-                raise HookError(err_msg.decode("utf-8", "backslashreplace"))
+                err_msg = (
+                    f"post-receive exit code: {p.returncode}\n"
+                    f"stdout:\n{out_data.decode('utf-8', 'backslashreplace')}\n"
+                    f"stderr:\n{err_data.decode('utf-8', 'backslashreplace')}"
+                )
+                raise HookError(err_msg)
             return out_data
             return out_data
         except OSError as err:
         except OSError as err:
             raise HookError(repr(err)) from err
             raise HookError(repr(err)) from err

+ 2 - 2
dulwich/ignore.py

@@ -30,7 +30,7 @@ import os.path
 import re
 import re
 from collections.abc import Iterable, Sequence
 from collections.abc import Iterable, Sequence
 from contextlib import suppress
 from contextlib import suppress
-from typing import TYPE_CHECKING, BinaryIO, Union
+from typing import TYPE_CHECKING, BinaryIO
 
 
 if TYPE_CHECKING:
 if TYPE_CHECKING:
     from .repo import Repo
     from .repo import Repo
@@ -38,7 +38,7 @@ if TYPE_CHECKING:
 from .config import Config, get_xdg_config_home_path
 from .config import Config, get_xdg_config_home_path
 
 
 
 
-def _pattern_to_str(pattern: Union["Pattern", bytes, str]) -> str:
+def _pattern_to_str(pattern: "Pattern | bytes | str") -> str:
     """Convert a pattern to string, handling both Pattern objects and raw patterns."""
     """Convert a pattern to string, handling both Pattern objects and raw patterns."""
     if isinstance(pattern, Pattern):
     if isinstance(pattern, Pattern):
         pattern_data: bytes | str = pattern.pattern
         pattern_data: bytes | str = pattern.pattern

+ 55 - 11
dulwich/index.py

@@ -44,8 +44,6 @@ from typing import (
     TYPE_CHECKING,
     TYPE_CHECKING,
     Any,
     Any,
     BinaryIO,
     BinaryIO,
-    Optional,
-    Union,
 )
 )
 
 
 if TYPE_CHECKING:
 if TYPE_CHECKING:
@@ -71,7 +69,7 @@ from .objects import (
 from .pack import ObjectContainer, SHA1Reader, SHA1Writer
 from .pack import ObjectContainer, SHA1Reader, SHA1Writer
 
 
 # Type alias for recursive tree structure used in commit_tree
 # Type alias for recursive tree structure used in commit_tree
-TreeDict = dict[bytes, Union["TreeDict", tuple[int, bytes]]]
+TreeDict = dict[bytes, "TreeDict | tuple[int, bytes]"]
 
 
 # 2-bit stage (during merge)
 # 2-bit stage (during merge)
 FLAG_STAGEMASK = 0x3000
 FLAG_STAGEMASK = 0x3000
@@ -1062,6 +1060,8 @@ class Index:
         read: bool = True,
         read: bool = True,
         skip_hash: bool = False,
         skip_hash: bool = False,
         version: int | None = None,
         version: int | None = None,
+        *,
+        file_mode: int | None = None,
     ) -> None:
     ) -> None:
         """Create an index object associated with the given filename.
         """Create an index object associated with the given filename.
 
 
@@ -1070,11 +1070,13 @@ class Index:
           read: Whether to initialize the index from the given file, should it exist.
           read: Whether to initialize the index from the given file, should it exist.
           skip_hash: Whether to skip SHA1 hash when writing (for manyfiles feature)
           skip_hash: Whether to skip SHA1 hash when writing (for manyfiles feature)
           version: Index format version to use (None = auto-detect from file or use default)
           version: Index format version to use (None = auto-detect from file or use default)
+          file_mode: Optional file permission mask for shared repository
         """
         """
         self._filename = os.fspath(filename)
         self._filename = os.fspath(filename)
         # TODO(jelmer): Store the version returned by read_index
         # TODO(jelmer): Store the version returned by read_index
         self._version = version
         self._version = version
         self._skip_hash = skip_hash
         self._skip_hash = skip_hash
+        self._file_mode = file_mode
         self._extensions: list[IndexExtension] = []
         self._extensions: list[IndexExtension] = []
         self.clear()
         self.clear()
         if read:
         if read:
@@ -1095,7 +1097,8 @@ class Index:
 
 
     def write(self) -> None:
     def write(self) -> None:
         """Write current contents of index to disk."""
         """Write current contents of index to disk."""
-        f = GitFile(self._filename, "wb")
+        mask = self._file_mode if self._file_mode is not None else 0o644
+        f = GitFile(self._filename, "wb", mask=mask)
         try:
         try:
             # Filter out extensions with no meaningful data
             # Filter out extensions with no meaningful data
             meaningful_extensions = []
             meaningful_extensions = []
@@ -1653,7 +1656,7 @@ if sys.platform == "win32":
 
 
         def __init__(self, errno: int, msg: str, filename: str | None) -> None:
         def __init__(self, errno: int, msg: str, filename: str | None) -> None:
             """Initialize WindowsSymlinkPermissionError."""
             """Initialize WindowsSymlinkPermissionError."""
-            super(PermissionError, self).__init__(
+            super().__init__(
                 errno,
                 errno,
                 f"Unable to create symlink; do you have developer mode enabled? {msg}",
                 f"Unable to create symlink; do you have developer mode enabled? {msg}",
                 filename,
                 filename,
@@ -1888,7 +1891,7 @@ def build_index_from_tree(
         [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
         [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
     ]
     ]
     | None = None,
     | None = None,
-    blob_normalizer: Optional["FilterBlobNormalizer"] = None,
+    blob_normalizer: "FilterBlobNormalizer | None" = None,
     tree_encoding: str = "utf-8",
     tree_encoding: str = "utf-8",
 ) -> None:
 ) -> None:
     """Generate and materialize index from a tree.
     """Generate and materialize index from a tree.
@@ -2122,7 +2125,7 @@ def _remove_empty_parents(path: bytes, stop_at: bytes) -> None:
             # Directory doesn't exist - stop trying
             # Directory doesn't exist - stop trying
             break
             break
         except OSError as e:
         except OSError as e:
-            if e.errno == errno.ENOTEMPTY:
+            if e.errno in (errno.ENOTEMPTY, errno.EEXIST):
                 # Directory not empty - stop trying
                 # Directory not empty - stop trying
                 break
                 break
             raise
             raise
@@ -2159,7 +2162,7 @@ def _check_file_matches(
     entry_mode: int,
     entry_mode: int,
     current_stat: os.stat_result,
     current_stat: os.stat_result,
     honor_filemode: bool,
     honor_filemode: bool,
-    blob_normalizer: Optional["FilterBlobNormalizer"] = None,
+    blob_normalizer: "FilterBlobNormalizer | None" = None,
     tree_path: bytes | None = None,
     tree_path: bytes | None = None,
 ) -> bool:
 ) -> bool:
     """Check if a file on disk matches the expected git object.
     """Check if a file on disk matches the expected git object.
@@ -2255,7 +2258,7 @@ def _transition_to_file(
         [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
         [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
     ]
     ]
     | None,
     | None,
-    blob_normalizer: Optional["FilterBlobNormalizer"],
+    blob_normalizer: "FilterBlobNormalizer | None",
     tree_encoding: str = "utf-8",
     tree_encoding: str = "utf-8",
 ) -> None:
 ) -> None:
     """Transition any type to regular file or symlink."""
     """Transition any type to regular file or symlink."""
@@ -2311,7 +2314,7 @@ def _transition_to_file(
             try:
             try:
                 os.rmdir(full_path)
                 os.rmdir(full_path)
             except OSError as e:
             except OSError as e:
-                if e.errno == errno.ENOTEMPTY:
+                if e.errno in (errno.ENOTEMPTY, errno.EEXIST):
                     raise IsADirectoryError(
                     raise IsADirectoryError(
                         f"Cannot replace non-empty directory with file: {full_path!r}"
                         f"Cannot replace non-empty directory with file: {full_path!r}"
                     )
                     )
@@ -2515,7 +2518,7 @@ def update_working_tree(
     ]
     ]
     | None = None,
     | None = None,
     force_remove_untracked: bool = False,
     force_remove_untracked: bool = False,
-    blob_normalizer: Optional["FilterBlobNormalizer"] = None,
+    blob_normalizer: "FilterBlobNormalizer | None" = None,
     tree_encoding: str = "utf-8",
     tree_encoding: str = "utf-8",
     allow_overwrite_modified: bool = False,
     allow_overwrite_modified: bool = False,
 ) -> None:
 ) -> None:
@@ -2755,6 +2758,37 @@ def update_working_tree(
     index.write()
     index.write()
 
 
 
 
+def _stat_matches_entry(st: os.stat_result, entry: IndexEntry) -> bool:
+    """Check if filesystem stat matches index entry stat.
+
+    This is used to determine if a file might have changed without reading its content.
+    Git uses this optimization to avoid expensive filter operations on unchanged files.
+
+    Args:
+      st: Filesystem stat result
+      entry: Index entry to compare against
+    Returns: True if stat matches and file is likely unchanged
+    """
+    # Get entry mtime
+    if isinstance(entry.mtime, tuple):
+        entry_mtime_sec = entry.mtime[0]
+    else:
+        entry_mtime_sec = int(entry.mtime)
+
+    # Compare modification time (seconds only for now)
+    # Note: We use int() to compare only seconds, as nanosecond precision
+    # can vary across filesystems
+    if int(st.st_mtime) != entry_mtime_sec:
+        return False
+
+    # Compare file size
+    if st.st_size != entry.size:
+        return False
+
+    # If both mtime and size match, file is likely unchanged
+    return True
+
+
 def _check_entry_for_changes(
 def _check_entry_for_changes(
     tree_path: bytes,
     tree_path: bytes,
     entry: IndexEntry | ConflictedIndexEntry,
     entry: IndexEntry | ConflictedIndexEntry,
@@ -2785,6 +2819,16 @@ def _check_entry_for_changes(
         if not stat.S_ISREG(st.st_mode) and not stat.S_ISLNK(st.st_mode):
         if not stat.S_ISREG(st.st_mode) and not stat.S_ISLNK(st.st_mode):
             return None
             return None
 
 
+        # Optimization: If stat matches index entry (mtime and size unchanged),
+        # we can skip reading and filtering the file entirely. This is a significant
+        # performance improvement for repositories with many unchanged files.
+        # Even with filters (e.g., LFS), if the file hasn't been modified (stat unchanged),
+        # the filter output would be the same, so we can safely skip the expensive
+        # filter operation. This addresses performance issues with LFS repositories
+        # where filter operations can be very slow.
+        if _stat_matches_entry(st, entry):
+            return None
+
         blob = blob_from_path_and_stat(full_path, st)
         blob = blob_from_path_and_stat(full_path, st)
 
 
         if filter_blob_callback is not None:
         if filter_blob_callback is not None:

+ 9 - 11
dulwich/lfs.py

@@ -39,7 +39,7 @@ import os
 import tempfile
 import tempfile
 from collections.abc import Iterable, Mapping
 from collections.abc import Iterable, Mapping
 from dataclasses import dataclass
 from dataclasses import dataclass
-from typing import TYPE_CHECKING, Any, BinaryIO, Optional
+from typing import TYPE_CHECKING, Any, BinaryIO
 from urllib.parse import urljoin, urlparse
 from urllib.parse import urljoin, urlparse
 from urllib.request import Request, urlopen
 from urllib.request import Request, urlopen
 
 
@@ -182,7 +182,7 @@ class LFSPointer:
         self.size = size
         self.size = size
 
 
     @classmethod
     @classmethod
-    def from_bytes(cls, data: bytes) -> Optional["LFSPointer"]:
+    def from_bytes(cls, data: bytes) -> "LFSPointer | None":
         """Parse LFS pointer from bytes.
         """Parse LFS pointer from bytes.
 
 
         Returns None if data is not a valid LFS pointer.
         Returns None if data is not a valid LFS pointer.
@@ -243,9 +243,7 @@ class LFSPointer:
 class LFSFilterDriver:
 class LFSFilterDriver:
     """LFS filter driver implementation."""
     """LFS filter driver implementation."""
 
 
-    def __init__(
-        self, lfs_store: "LFSStore", config: Optional["Config"] = None
-    ) -> None:
+    def __init__(self, lfs_store: "LFSStore", config: "Config | None" = None) -> None:
         """Initialize LFSFilterDriver."""
         """Initialize LFSFilterDriver."""
         self.lfs_store = lfs_store
         self.lfs_store = lfs_store
         self.config = config
         self.config = config
@@ -328,13 +326,13 @@ class LFSFilterDriver:
         """Clean up any resources held by this filter driver."""
         """Clean up any resources held by this filter driver."""
         # LFSFilterDriver doesn't hold any resources that need cleanup
         # LFSFilterDriver doesn't hold any resources that need cleanup
 
 
-    def reuse(self, config: Optional["Config"], filter_name: str) -> bool:
+    def reuse(self, config: "Config | None", filter_name: str) -> bool:
         """Check if this filter driver should be reused with the given configuration."""
         """Check if this filter driver should be reused with the given configuration."""
         # LFSFilterDriver is stateless and lightweight, no need to cache
         # LFSFilterDriver is stateless and lightweight, no need to cache
         return False
         return False
 
 
 
 
-def _get_lfs_user_agent(config: Optional["Config"]) -> str:
+def _get_lfs_user_agent(config: "Config | None") -> str:
     """Get User-Agent string for LFS requests, respecting git config."""
     """Get User-Agent string for LFS requests, respecting git config."""
     try:
     try:
         if config:
         if config:
@@ -385,7 +383,7 @@ def _is_valid_lfs_url(url: str) -> bool:
 class LFSClient:
 class LFSClient:
     """Base class for LFS client operations."""
     """Base class for LFS client operations."""
 
 
-    def __init__(self, url: str, config: Optional["Config"] = None) -> None:
+    def __init__(self, url: str, config: "Config | None" = None) -> None:
         """Initialize LFS client.
         """Initialize LFS client.
 
 
         Args:
         Args:
@@ -427,7 +425,7 @@ class LFSClient:
         raise NotImplementedError
         raise NotImplementedError
 
 
     @classmethod
     @classmethod
-    def from_config(cls, config: "Config") -> Optional["LFSClient"]:
+    def from_config(cls, config: "Config") -> "LFSClient | None":
         """Create LFS client from git config.
         """Create LFS client from git config.
 
 
         Returns the appropriate subclass (HTTPLFSClient or FileLFSClient)
         Returns the appropriate subclass (HTTPLFSClient or FileLFSClient)
@@ -491,7 +489,7 @@ class LFSClient:
 class HTTPLFSClient(LFSClient):
 class HTTPLFSClient(LFSClient):
     """LFS client for HTTP/HTTPS operations."""
     """LFS client for HTTP/HTTPS operations."""
 
 
-    def __init__(self, url: str, config: Optional["Config"] = None) -> None:
+    def __init__(self, url: str, config: "Config | None" = None) -> None:
         """Initialize HTTP LFS client.
         """Initialize HTTP LFS client.
 
 
         Args:
         Args:
@@ -711,7 +709,7 @@ class HTTPLFSClient(LFSClient):
 class FileLFSClient(LFSClient):
 class FileLFSClient(LFSClient):
     """LFS client for file:// URLs that accesses local filesystem."""
     """LFS client for file:// URLs that accesses local filesystem."""
 
 
-    def __init__(self, url: str, config: Optional["Config"] = None) -> None:
+    def __init__(self, url: str, config: "Config | None" = None) -> None:
         """Initialize File LFS client.
         """Initialize File LFS client.
 
 
         Args:
         Args:

+ 2 - 2
dulwich/line_ending.py

@@ -139,7 +139,7 @@ Sources:
 
 
 import logging
 import logging
 from collections.abc import Callable, Mapping
 from collections.abc import Callable, Mapping
-from typing import TYPE_CHECKING, Any, Optional
+from typing import TYPE_CHECKING, Any
 
 
 if TYPE_CHECKING:
 if TYPE_CHECKING:
     from .config import StackedConfig
     from .config import StackedConfig
@@ -176,7 +176,7 @@ class LineEndingFilter(FilterDriver):
 
 
     @classmethod
     @classmethod
     def from_config(
     def from_config(
-        cls, config: Optional["StackedConfig"], for_text_attr: bool = False
+        cls, config: "StackedConfig | None", for_text_attr: bool = False
     ) -> "LineEndingFilter":
     ) -> "LineEndingFilter":
         """Create a LineEndingFilter from git configuration.
         """Create a LineEndingFilter from git configuration.
 
 

+ 2 - 2
dulwich/lru_cache.py

@@ -23,7 +23,7 @@
 """A simple least-recently-used (LRU) cache."""
 """A simple least-recently-used (LRU) cache."""
 
 
 from collections.abc import Callable, Iterable, Iterator
 from collections.abc import Callable, Iterable, Iterator
-from typing import Generic, Optional, TypeVar, cast
+from typing import Generic, TypeVar, cast
 
 
 _null_key = object()
 _null_key = object()
 
 
@@ -37,7 +37,7 @@ class _LRUNode(Generic[K, V]):
 
 
     __slots__ = ("cleanup", "key", "next_key", "prev", "size", "value")
     __slots__ = ("cleanup", "key", "next_key", "prev", "size", "value")
 
 
-    prev: Optional["_LRUNode[K, V]"]
+    prev: "_LRUNode[K, V] | None"
     next_key: K | object
     next_key: K | object
     size: int | None
     size: int | None
 
 

+ 1 - 1
dulwich/merge.py

@@ -24,7 +24,7 @@ def make_merge3(
     a: Sequence[bytes],
     a: Sequence[bytes],
     b: Sequence[bytes],
     b: Sequence[bytes],
     is_cherrypick: bool = False,
     is_cherrypick: bool = False,
-    sequence_matcher: type["SequenceMatcherProtocol[bytes]"] | None = None,
+    sequence_matcher: "type[SequenceMatcherProtocol[bytes]] | None" = None,
 ) -> "merge3.Merge3[bytes]":
 ) -> "merge3.Merge3[bytes]":
     """Return a Merge3 object, or raise ImportError if merge3 is not installed."""
     """Return a Merge3 object, or raise ImportError if merge3 is not installed."""
     if merge3 is None:
     if merge3 is None:

+ 6 - 6
dulwich/notes.py

@@ -22,7 +22,7 @@
 
 
 import stat
 import stat
 from collections.abc import Iterator, Sequence
 from collections.abc import Iterator, Sequence
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING
 
 
 from .objects import Blob, Tree
 from .objects import Blob, Tree
 
 
@@ -609,7 +609,7 @@ class Notes:
     def get_notes_ref(
     def get_notes_ref(
         self,
         self,
         notes_ref: bytes | None = None,
         notes_ref: bytes | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
     ) -> bytes:
     ) -> bytes:
         """Get the notes reference to use.
         """Get the notes reference to use.
 
 
@@ -631,7 +631,7 @@ class Notes:
         self,
         self,
         object_sha: bytes,
         object_sha: bytes,
         notes_ref: bytes | None = None,
         notes_ref: bytes | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
     ) -> bytes | None:
     ) -> bytes | None:
         """Get the note for an object.
         """Get the note for an object.
 
 
@@ -675,7 +675,7 @@ class Notes:
         author: bytes | None = None,
         author: bytes | None = None,
         committer: bytes | None = None,
         committer: bytes | None = None,
         message: bytes | None = None,
         message: bytes | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
     ) -> bytes:
     ) -> bytes:
         """Set or update a note for an object.
         """Set or update a note for an object.
 
 
@@ -759,7 +759,7 @@ class Notes:
         author: bytes | None = None,
         author: bytes | None = None,
         committer: bytes | None = None,
         committer: bytes | None = None,
         message: bytes | None = None,
         message: bytes | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
     ) -> bytes | None:
     ) -> bytes | None:
         """Remove a note for an object.
         """Remove a note for an object.
 
 
@@ -837,7 +837,7 @@ class Notes:
     def list_notes(
     def list_notes(
         self,
         self,
         notes_ref: bytes | None = None,
         notes_ref: bytes | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
     ) -> list[tuple[bytes, bytes]]:
     ) -> list[tuple[bytes, bytes]]:
         """List all notes in a notes ref.
         """List all notes in a notes ref.
 
 

+ 80 - 35
dulwich/object_store.py

@@ -36,7 +36,6 @@ from pathlib import Path
 from typing import (
 from typing import (
     TYPE_CHECKING,
     TYPE_CHECKING,
     BinaryIO,
     BinaryIO,
-    Optional,
     Protocol,
     Protocol,
 )
 )
 
 
@@ -353,7 +352,7 @@ class BaseObjectStore:
         self,
         self,
         objects: Sequence[tuple[ShaFile, str | None]],
         objects: Sequence[tuple[ShaFile, str | None]],
         progress: Callable[..., None] | None = None,
         progress: Callable[..., None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add a set of objects to this object store.
         """Add a set of objects to this object store.
 
 
         Args:
         Args:
@@ -387,7 +386,7 @@ class BaseObjectStore:
         want_unchanged: bool = False,
         want_unchanged: bool = False,
         include_trees: bool = False,
         include_trees: bool = False,
         change_type_same: bool = False,
         change_type_same: bool = False,
-        rename_detector: Optional["RenameDetector"] = None,
+        rename_detector: "RenameDetector | None" = None,
         paths: Sequence[bytes] | None = None,
         paths: Sequence[bytes] | None = None,
     ) -> Iterator[
     ) -> Iterator[
         tuple[
         tuple[
@@ -659,7 +658,7 @@ class BaseObjectStore:
             if sha.startswith(prefix):
             if sha.startswith(prefix):
                 yield sha
                 yield sha
 
 
-    def get_commit_graph(self) -> Optional["CommitGraph"]:
+    def get_commit_graph(self) -> "CommitGraph | None":
         """Get the commit graph for this object store.
         """Get the commit graph for this object store.
 
 
         Returns:
         Returns:
@@ -719,7 +718,7 @@ class PackCapableObjectStore(BaseObjectStore, PackedObjectContainer):
         count: int,
         count: int,
         unpacked_objects: Iterator["UnpackedObject"],
         unpacked_objects: Iterator["UnpackedObject"],
         progress: Callable[..., None] | None = None,
         progress: Callable[..., None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add pack data to this object store.
         """Add pack data to this object store.
 
 
         Args:
         Args:
@@ -849,7 +848,7 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
         count: int,
         count: int,
         unpacked_objects: Iterator[UnpackedObject],
         unpacked_objects: Iterator[UnpackedObject],
         progress: Callable[..., None] | None = None,
         progress: Callable[..., None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add pack data to this object store.
         """Add pack data to this object store.
 
 
         Args:
         Args:
@@ -1310,7 +1309,7 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
         self,
         self,
         objects: Sequence[tuple[ShaFile, str | None]],
         objects: Sequence[tuple[ShaFile, str | None]],
         progress: Callable[[str], None] | None = None,
         progress: Callable[[str], None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add a set of objects to this object store.
         """Add a set of objects to this object store.
 
 
         Args:
         Args:
@@ -1329,12 +1328,13 @@ class DiskObjectStore(PackBasedObjectStore):
 
 
     path: str | os.PathLike[str]
     path: str | os.PathLike[str]
     pack_dir: str | os.PathLike[str]
     pack_dir: str | os.PathLike[str]
-    _alternates: list["BaseObjectStore"] | None
-    _commit_graph: Optional["CommitGraph"]
+    _alternates: "list[BaseObjectStore] | None"
+    _commit_graph: "CommitGraph | None"
 
 
     def __init__(
     def __init__(
         self,
         self,
         path: str | os.PathLike[str],
         path: str | os.PathLike[str],
+        *,
         loose_compression_level: int = -1,
         loose_compression_level: int = -1,
         pack_compression_level: int = -1,
         pack_compression_level: int = -1,
         pack_index_version: int | None = None,
         pack_index_version: int | None = None,
@@ -1348,6 +1348,8 @@ class DiskObjectStore(PackBasedObjectStore):
         pack_write_bitmaps: bool = False,
         pack_write_bitmaps: bool = False,
         pack_write_bitmap_hash_cache: bool = True,
         pack_write_bitmap_hash_cache: bool = True,
         pack_write_bitmap_lookup_table: bool = True,
         pack_write_bitmap_lookup_table: bool = True,
+        file_mode: int | None = None,
+        dir_mode: int | None = None,
     ) -> None:
     ) -> None:
         """Open an object store.
         """Open an object store.
 
 
@@ -1366,6 +1368,8 @@ class DiskObjectStore(PackBasedObjectStore):
           pack_write_bitmaps: whether to write bitmap indexes for packs
           pack_write_bitmaps: whether to write bitmap indexes for packs
           pack_write_bitmap_hash_cache: whether to include name-hash cache in bitmaps
           pack_write_bitmap_hash_cache: whether to include name-hash cache in bitmaps
           pack_write_bitmap_lookup_table: whether to include lookup table in bitmaps
           pack_write_bitmap_lookup_table: whether to include lookup table in bitmaps
+          file_mode: File permission mask for shared repository
+          dir_mode: Directory permission mask for shared repository
         """
         """
         super().__init__(
         super().__init__(
             pack_compression_level=pack_compression_level,
             pack_compression_level=pack_compression_level,
@@ -1387,6 +1391,8 @@ class DiskObjectStore(PackBasedObjectStore):
         self.pack_write_bitmaps = pack_write_bitmaps
         self.pack_write_bitmaps = pack_write_bitmaps
         self.pack_write_bitmap_hash_cache = pack_write_bitmap_hash_cache
         self.pack_write_bitmap_hash_cache = pack_write_bitmap_hash_cache
         self.pack_write_bitmap_lookup_table = pack_write_bitmap_lookup_table
         self.pack_write_bitmap_lookup_table = pack_write_bitmap_lookup_table
+        self.file_mode = file_mode
+        self.dir_mode = dir_mode
 
 
         # Commit graph support - lazy loaded
         # Commit graph support - lazy loaded
         self._commit_graph = None
         self._commit_graph = None
@@ -1402,13 +1408,20 @@ class DiskObjectStore(PackBasedObjectStore):
 
 
     @classmethod
     @classmethod
     def from_config(
     def from_config(
-        cls, path: str | os.PathLike[str], config: "Config"
+        cls,
+        path: str | os.PathLike[str],
+        config: "Config",
+        *,
+        file_mode: int | None = None,
+        dir_mode: int | None = None,
     ) -> "DiskObjectStore":
     ) -> "DiskObjectStore":
         """Create a DiskObjectStore from a configuration object.
         """Create a DiskObjectStore from a configuration object.
 
 
         Args:
         Args:
           path: Path to the object store directory
           path: Path to the object store directory
           config: Configuration object to read settings from
           config: Configuration object to read settings from
+          file_mode: Optional file permission mask for shared repository
+          dir_mode: Optional directory permission mask for shared repository
 
 
         Returns:
         Returns:
           New DiskObjectStore instance configured according to config
           New DiskObjectStore instance configured according to config
@@ -1490,19 +1503,21 @@ class DiskObjectStore(PackBasedObjectStore):
 
 
         instance = cls(
         instance = cls(
             path,
             path,
-            loose_compression_level,
-            pack_compression_level,
-            pack_index_version,
-            pack_delta_window_size,
-            pack_window_memory,
-            pack_delta_cache_size,
-            pack_depth,
-            pack_threads,
-            pack_big_file_threshold,
-            fsync_object_files,
-            pack_write_bitmaps,
-            pack_write_bitmap_hash_cache,
-            pack_write_bitmap_lookup_table,
+            loose_compression_level=loose_compression_level,
+            pack_compression_level=pack_compression_level,
+            pack_index_version=pack_index_version,
+            pack_delta_window_size=pack_delta_window_size,
+            pack_window_memory=pack_window_memory,
+            pack_delta_cache_size=pack_delta_cache_size,
+            pack_depth=pack_depth,
+            pack_threads=pack_threads,
+            pack_big_file_threshold=pack_big_file_threshold,
+            fsync_object_files=fsync_object_files,
+            pack_write_bitmaps=pack_write_bitmaps,
+            pack_write_bitmap_hash_cache=pack_write_bitmap_hash_cache,
+            pack_write_bitmap_lookup_table=pack_write_bitmap_lookup_table,
+            file_mode=file_mode,
+            dir_mode=dir_mode,
         )
         )
         instance._use_commit_graph = use_commit_graph
         instance._use_commit_graph = use_commit_graph
         return instance
         return instance
@@ -1540,12 +1555,16 @@ class DiskObjectStore(PackBasedObjectStore):
 
 
     def add_alternate_path(self, path: str | os.PathLike[str]) -> None:
     def add_alternate_path(self, path: str | os.PathLike[str]) -> None:
         """Add an alternate path to this object store."""
         """Add an alternate path to this object store."""
+        info_dir = os.path.join(self.path, INFODIR)
         try:
         try:
-            os.mkdir(os.path.join(self.path, INFODIR))
+            os.mkdir(info_dir)
+            if self.dir_mode is not None:
+                os.chmod(info_dir, self.dir_mode)
         except FileExistsError:
         except FileExistsError:
             pass
             pass
         alternates_path = os.path.join(self.path, INFODIR, "alternates")
         alternates_path = os.path.join(self.path, INFODIR, "alternates")
-        with GitFile(alternates_path, "wb") as f:
+        mask = self.file_mode if self.file_mode is not None else 0o644
+        with GitFile(alternates_path, "wb", mask=mask) as f:
             try:
             try:
                 orig_f = open(alternates_path, "rb")
                 orig_f = open(alternates_path, "rb")
             except FileNotFoundError:
             except FileNotFoundError:
@@ -1772,8 +1791,12 @@ class DiskObjectStore(PackBasedObjectStore):
         os.rename(path, target_pack_path)
         os.rename(path, target_pack_path)
 
 
         # Write the index.
         # Write the index.
+        mask = self.file_mode if self.file_mode is not None else PACK_MODE
         with GitFile(
         with GitFile(
-            target_index_path, "wb", mask=PACK_MODE, fsync=self.fsync_object_files
+            target_index_path,
+            "wb",
+            mask=mask,
+            fsync=self.fsync_object_files,
         ) as index_file:
         ) as index_file:
             write_pack_index(
             write_pack_index(
                 index_file, entries, pack_sha, version=self.pack_index_version
                 index_file, entries, pack_sha, version=self.pack_index_version
@@ -1871,9 +1894,10 @@ class DiskObjectStore(PackBasedObjectStore):
 
 
         fd, path = tempfile.mkstemp(dir=self.pack_dir, suffix=".pack")
         fd, path = tempfile.mkstemp(dir=self.pack_dir, suffix=".pack")
         f = os.fdopen(fd, "w+b")
         f = os.fdopen(fd, "w+b")
-        os.chmod(path, PACK_MODE)
+        mask = self.file_mode if self.file_mode is not None else PACK_MODE
+        os.chmod(path, mask)
 
 
-        def commit() -> Optional["Pack"]:
+        def commit() -> "Pack | None":
             if f.tell() > 0:
             if f.tell() > 0:
                 f.seek(0)
                 f.seek(0)
 
 
@@ -1904,34 +1928,52 @@ class DiskObjectStore(PackBasedObjectStore):
         dir = os.path.dirname(path)
         dir = os.path.dirname(path)
         try:
         try:
             os.mkdir(dir)
             os.mkdir(dir)
+            if self.dir_mode is not None:
+                os.chmod(dir, self.dir_mode)
         except FileExistsError:
         except FileExistsError:
             pass
             pass
         if os.path.exists(path):
         if os.path.exists(path):
             return  # Already there, no need to write again
             return  # Already there, no need to write again
-        with GitFile(path, "wb", mask=PACK_MODE, fsync=self.fsync_object_files) as f:
+        mask = self.file_mode if self.file_mode is not None else PACK_MODE
+        with GitFile(path, "wb", mask=mask, fsync=self.fsync_object_files) as f:
             f.write(
             f.write(
                 obj.as_legacy_object(compression_level=self.loose_compression_level)
                 obj.as_legacy_object(compression_level=self.loose_compression_level)
             )
             )
 
 
     @classmethod
     @classmethod
-    def init(cls, path: str | os.PathLike[str]) -> "DiskObjectStore":
+    def init(
+        cls,
+        path: str | os.PathLike[str],
+        *,
+        file_mode: int | None = None,
+        dir_mode: int | None = None,
+    ) -> "DiskObjectStore":
         """Initialize a new disk object store.
         """Initialize a new disk object store.
 
 
         Creates the necessary directory structure for a Git object store.
         Creates the necessary directory structure for a Git object store.
 
 
         Args:
         Args:
           path: Path where the object store should be created
           path: Path where the object store should be created
+          file_mode: Optional file permission mask for shared repository
+          dir_mode: Optional directory permission mask for shared repository
 
 
         Returns:
         Returns:
           New DiskObjectStore instance
           New DiskObjectStore instance
         """
         """
         try:
         try:
             os.mkdir(path)
             os.mkdir(path)
+            if dir_mode is not None:
+                os.chmod(path, dir_mode)
         except FileExistsError:
         except FileExistsError:
             pass
             pass
-        os.mkdir(os.path.join(path, "info"))
-        os.mkdir(os.path.join(path, PACKDIR))
-        return cls(path)
+        info_path = os.path.join(path, "info")
+        pack_path = os.path.join(path, PACKDIR)
+        os.mkdir(info_path)
+        os.mkdir(pack_path)
+        if dir_mode is not None:
+            os.chmod(info_path, dir_mode)
+            os.chmod(pack_path, dir_mode)
+        return cls(path, file_mode=file_mode, dir_mode=dir_mode)
 
 
     def iter_prefix(self, prefix: bytes) -> Iterator[bytes]:
     def iter_prefix(self, prefix: bytes) -> Iterator[bytes]:
         """Iterate over all object SHAs with the given prefix.
         """Iterate over all object SHAs with the given prefix.
@@ -1975,7 +2017,7 @@ class DiskObjectStore(PackBasedObjectStore):
                     seen.add(sha)
                     seen.add(sha)
                     yield sha
                     yield sha
 
 
-    def get_commit_graph(self) -> Optional["CommitGraph"]:
+    def get_commit_graph(self) -> "CommitGraph | None":
         """Get the commit graph for this object store.
         """Get the commit graph for this object store.
 
 
         Returns:
         Returns:
@@ -2053,10 +2095,13 @@ class DiskObjectStore(PackBasedObjectStore):
                 # Ensure the info directory exists
                 # Ensure the info directory exists
                 info_dir = os.path.join(self.path, "info")
                 info_dir = os.path.join(self.path, "info")
                 os.makedirs(info_dir, exist_ok=True)
                 os.makedirs(info_dir, exist_ok=True)
+                if self.dir_mode is not None:
+                    os.chmod(info_dir, self.dir_mode)
 
 
                 # Write using GitFile for atomic operation
                 # Write using GitFile for atomic operation
                 graph_path = os.path.join(info_dir, "commit-graph")
                 graph_path = os.path.join(info_dir, "commit-graph")
-                with GitFile(graph_path, "wb") as f:
+                mask = self.file_mode if self.file_mode is not None else 0o644
+                with GitFile(graph_path, "wb", mask=mask) as f:
                     assert isinstance(
                     assert isinstance(
                         f, _GitFile
                         f, _GitFile
                     )  # GitFile in write mode always returns _GitFile
                     )  # GitFile in write mode always returns _GitFile

+ 7 - 8
dulwich/objects.py

@@ -36,7 +36,6 @@ from typing import (
     TYPE_CHECKING,
     TYPE_CHECKING,
     NamedTuple,
     NamedTuple,
     TypeVar,
     TypeVar,
-    Union,
 )
 )
 
 
 if sys.version_info >= (3, 11):
 if sys.version_info >= (3, 11):
@@ -395,11 +394,11 @@ class ShaFile:
     type_name: bytes
     type_name: bytes
     type_num: int
     type_num: int
     _chunked_text: list[bytes] | None
     _chunked_text: list[bytes] | None
-    _sha: Union[FixedSha, None, "HASH"]
+    _sha: "FixedSha | None | HASH"
 
 
     @staticmethod
     @staticmethod
     def _parse_legacy_object_header(
     def _parse_legacy_object_header(
-        magic: bytes, f: Union[BufferedIOBase, IO[bytes], "_GitFile"]
+        magic: bytes, f: BufferedIOBase | IO[bytes] | "_GitFile"
     ) -> "ShaFile":
     ) -> "ShaFile":
         """Parse a legacy object, creating it but not reading the file."""
         """Parse a legacy object, creating it but not reading the file."""
         bufsize = 1024
         bufsize = 1024
@@ -500,7 +499,7 @@ class ShaFile:
 
 
     @staticmethod
     @staticmethod
     def _parse_object_header(
     def _parse_object_header(
-        magic: bytes, f: Union[BufferedIOBase, IO[bytes], "_GitFile"]
+        magic: bytes, f: BufferedIOBase | IO[bytes] | "_GitFile"
     ) -> "ShaFile":
     ) -> "ShaFile":
         """Parse a new style object, creating it but not reading the file."""
         """Parse a new style object, creating it but not reading the file."""
         num_type = (ord(magic[0:1]) >> 4) & 7
         num_type = (ord(magic[0:1]) >> 4) & 7
@@ -529,7 +528,7 @@ class ShaFile:
         return (b0 & 0x8F) == 0x08 and (word % 31) == 0
         return (b0 & 0x8F) == 0x08 and (word % 31) == 0
 
 
     @classmethod
     @classmethod
-    def _parse_file(cls, f: Union[BufferedIOBase, IO[bytes], "_GitFile"]) -> "ShaFile":
+    def _parse_file(cls, f: BufferedIOBase | IO[bytes] | "_GitFile") -> "ShaFile":
         map = f.read()
         map = f.read()
         if not map:
         if not map:
             raise EmptyFileException("Corrupted empty file detected")
             raise EmptyFileException("Corrupted empty file detected")
@@ -561,7 +560,7 @@ class ShaFile:
             return cls.from_file(f)
             return cls.from_file(f)
 
 
     @classmethod
     @classmethod
-    def from_file(cls, f: Union[BufferedIOBase, IO[bytes], "_GitFile"]) -> "ShaFile":
+    def from_file(cls, f: BufferedIOBase | IO[bytes] | "_GitFile") -> "ShaFile":
         """Get the contents of a SHA file on disk."""
         """Get the contents of a SHA file on disk."""
         try:
         try:
             obj = cls._parse_file(f)
             obj = cls._parse_file(f)
@@ -655,7 +654,7 @@ class ShaFile:
         """Returns the length of the raw string of this object."""
         """Returns the length of the raw string of this object."""
         return sum(map(len, self.as_raw_chunks()))
         return sum(map(len, self.as_raw_chunks()))
 
 
-    def sha(self) -> Union[FixedSha, "HASH"]:
+    def sha(self) -> "FixedSha | HASH":
         """The SHA1 object that is the name of this object."""
         """The SHA1 object that is the name of this object."""
         if self._sha is None or self._needs_serialization:
         if self._sha is None or self._needs_serialization:
             # this is a local because as_raw_chunks() overwrites self._sha
             # this is a local because as_raw_chunks() overwrites self._sha
@@ -891,7 +890,7 @@ class Tag(ShaFile):
 
 
     _message: bytes | None
     _message: bytes | None
     _name: bytes | None
     _name: bytes | None
-    _object_class: type["ShaFile"] | None
+    _object_class: "type[ShaFile] | None"
     _object_sha: bytes | None
     _object_sha: bytes | None
     _signature: bytes | None
     _signature: bytes | None
     _tag_time: int | None
     _tag_time: int | None

+ 9 - 9
dulwich/objectspec.py

@@ -22,7 +22,7 @@
 """Object specification."""
 """Object specification."""
 
 
 from collections.abc import Sequence
 from collections.abc import Sequence
-from typing import TYPE_CHECKING, Optional, Union
+from typing import TYPE_CHECKING
 
 
 from .objects import Commit, ShaFile, Tag, Tree
 from .objects import Commit, ShaFile, Tag, Tree
 from .refs import local_branch_name, local_tag_name
 from .refs import local_branch_name, local_tag_name
@@ -290,7 +290,7 @@ def parse_tree(repo: "BaseRepo", treeish: bytes | str | Tree | Commit | Tag) ->
     return o
     return o
 
 
 
 
-def parse_ref(container: Union["Repo", "RefsContainer"], refspec: str | bytes) -> "Ref":
+def parse_ref(container: "Repo | RefsContainer", refspec: str | bytes) -> "Ref":
     """Parse a string referring to a reference.
     """Parse a string referring to a reference.
 
 
     Args:
     Args:
@@ -316,11 +316,11 @@ def parse_ref(container: Union["Repo", "RefsContainer"], refspec: str | bytes) -
 
 
 
 
 def parse_reftuple(
 def parse_reftuple(
-    lh_container: Union["Repo", "RefsContainer"],
-    rh_container: Union["Repo", "RefsContainer"],
+    lh_container: "Repo | RefsContainer",
+    rh_container: "Repo | RefsContainer",
     refspec: str | bytes,
     refspec: str | bytes,
     force: bool = False,
     force: bool = False,
-) -> tuple[Optional["Ref"], Optional["Ref"], bool]:
+) -> tuple["Ref | None", "Ref | None", bool]:
     """Parse a reftuple spec.
     """Parse a reftuple spec.
 
 
     Args:
     Args:
@@ -359,11 +359,11 @@ def parse_reftuple(
 
 
 
 
 def parse_reftuples(
 def parse_reftuples(
-    lh_container: Union["Repo", "RefsContainer"],
-    rh_container: Union["Repo", "RefsContainer"],
+    lh_container: "Repo | RefsContainer",
+    rh_container: "Repo | RefsContainer",
     refspecs: bytes | Sequence[bytes],
     refspecs: bytes | Sequence[bytes],
     force: bool = False,
     force: bool = False,
-) -> list[tuple[Optional["Ref"], Optional["Ref"], bool]]:
+) -> list[tuple["Ref | None", "Ref | None", bool]]:
     """Parse a list of reftuple specs to a list of reftuples.
     """Parse a list of reftuple specs to a list of reftuples.
 
 
     Args:
     Args:
@@ -385,7 +385,7 @@ def parse_reftuples(
 
 
 
 
 def parse_refs(
 def parse_refs(
-    container: Union["Repo", "RefsContainer"],
+    container: "Repo | RefsContainer",
     refspecs: bytes | str | Sequence[bytes | str],
     refspecs: bytes | str | Sequence[bytes | str],
 ) -> list["Ref"]:
 ) -> list["Ref"]:
     """Parse a list of refspecs to a list of refs.
     """Parse a list of refspecs to a list of refs.

+ 6 - 13
dulwich/pack.py

@@ -60,10 +60,8 @@ from typing import (
     Any,
     Any,
     BinaryIO,
     BinaryIO,
     Generic,
     Generic,
-    Optional,
     Protocol,
     Protocol,
     TypeVar,
     TypeVar,
-    Union,
 )
 )
 
 
 try:
 try:
@@ -73,11 +71,6 @@ except ImportError:
 else:
 else:
     has_mmap = True
     has_mmap = True
 
 
-if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
-else:
-    Buffer = bytes | bytearray | memoryview
-
 if TYPE_CHECKING:
 if TYPE_CHECKING:
     from _hashlib import HASH as HashObject
     from _hashlib import HASH as HashObject
 
 
@@ -138,7 +131,7 @@ class ObjectContainer(Protocol):
         self,
         self,
         objects: Sequence[tuple[ShaFile, str | None]],
         objects: Sequence[tuple[ShaFile, str | None]],
         progress: Callable[..., None] | None = None,
         progress: Callable[..., None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add a set of objects to this object store.
         """Add a set of objects to this object store.
 
 
         Args:
         Args:
@@ -153,7 +146,7 @@ class ObjectContainer(Protocol):
     def __getitem__(self, sha1: bytes) -> ShaFile:
     def __getitem__(self, sha1: bytes) -> ShaFile:
         """Retrieve an object."""
         """Retrieve an object."""
 
 
-    def get_commit_graph(self) -> Optional["CommitGraph"]:
+    def get_commit_graph(self) -> "CommitGraph | None":
         """Get the commit graph for this object store.
         """Get the commit graph for this object store.
 
 
         Returns:
         Returns:
@@ -759,7 +752,7 @@ class FilePackIndex(PackIndex):
         self,
         self,
         filename: str | os.PathLike[str],
         filename: str | os.PathLike[str],
         file: IO[bytes] | _GitFile | None = None,
         file: IO[bytes] | _GitFile | None = None,
-        contents: Union[bytes, "mmap.mmap"] | None = None,
+        contents: "bytes | mmap.mmap | None" = None,
         size: int | None = None,
         size: int | None = None,
     ) -> None:
     ) -> None:
         """Create a pack index object.
         """Create a pack index object.
@@ -1412,7 +1405,7 @@ class PackStreamCopier(PackStreamReader):
         read_all: Callable[[int], bytes],
         read_all: Callable[[int], bytes],
         read_some: Callable[[int], bytes] | None,
         read_some: Callable[[int], bytes] | None,
         outfile: IO[bytes],
         outfile: IO[bytes],
-        delta_iter: Optional["DeltaChainIterator[UnpackedObject]"] = None,
+        delta_iter: "DeltaChainIterator[UnpackedObject] | None" = None,
     ) -> None:
     ) -> None:
         """Initialize the copier.
         """Initialize the copier.
 
 
@@ -2521,7 +2514,7 @@ def write_pack_object(
     write: Callable[[bytes], int],
     write: Callable[[bytes], int],
     type: int,
     type: int,
     object: list[bytes] | tuple[bytes | int, list[bytes]],
     object: list[bytes] | tuple[bytes | int, list[bytes]],
-    sha: Optional["HashObject"] = None,
+    sha: "HashObject | None" = None,
     compression_level: int = -1,
     compression_level: int = -1,
 ) -> int:
 ) -> int:
     """Write pack object to a file.
     """Write pack object to a file.
@@ -3524,7 +3517,7 @@ class Pack:
         return self._idx
         return self._idx
 
 
     @property
     @property
-    def bitmap(self) -> Optional["PackBitmap"]:
+    def bitmap(self) -> "PackBitmap | None":
         """The bitmap being used, if available.
         """The bitmap being used, if available.
 
 
         Returns:
         Returns:

+ 3 - 4
dulwich/patch.py

@@ -37,7 +37,6 @@ from typing import (
     IO,
     IO,
     TYPE_CHECKING,
     TYPE_CHECKING,
     BinaryIO,
     BinaryIO,
-    Optional,
     TextIO,
     TextIO,
 )
 )
 
 
@@ -487,8 +486,8 @@ def gen_diff_header(
 # TODO(jelmer): Support writing unicode, rather than bytes.
 # TODO(jelmer): Support writing unicode, rather than bytes.
 def write_blob_diff(
 def write_blob_diff(
     f: IO[bytes],
     f: IO[bytes],
-    old_file: tuple[bytes | None, int | None, Optional["Blob"]],
-    new_file: tuple[bytes | None, int | None, Optional["Blob"]],
+    old_file: tuple[bytes | None, int | None, "Blob | None"],
+    new_file: tuple[bytes | None, int | None, "Blob | None"],
     diff_algorithm: str | None = None,
     diff_algorithm: str | None = None,
 ) -> None:
 ) -> None:
     """Write blob diff.
     """Write blob diff.
@@ -506,7 +505,7 @@ def write_blob_diff(
     patched_old_path = patch_filename(old_path, b"a")
     patched_old_path = patch_filename(old_path, b"a")
     patched_new_path = patch_filename(new_path, b"b")
     patched_new_path = patch_filename(new_path, b"b")
 
 
-    def lines(blob: Optional["Blob"]) -> list[bytes]:
+    def lines(blob: "Blob | None") -> list[bytes]:
         """Split blob content into lines.
         """Split blob content into lines.
 
 
         Args:
         Args:

+ 457 - 163
dulwich/porcelain.py

@@ -105,7 +105,6 @@ from typing import (
     TYPE_CHECKING,
     TYPE_CHECKING,
     Any,
     Any,
     BinaryIO,
     BinaryIO,
-    Optional,
     TextIO,
     TextIO,
     TypedDict,
     TypedDict,
     TypeVar,
     TypeVar,
@@ -114,10 +113,11 @@ from typing import (
 )
 )
 
 
 if sys.version_info >= (3, 12):
 if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
     from typing import override
     from typing import override
 else:
 else:
-    from typing_extensions import Buffer, override
+    from typing_extensions import override
+
+from ._typing import Buffer
 
 
 if TYPE_CHECKING:
 if TYPE_CHECKING:
     import urllib3
     import urllib3
@@ -198,6 +198,7 @@ from .refs import (
     LOCAL_REMOTE_PREFIX,
     LOCAL_REMOTE_PREFIX,
     LOCAL_REPLACE_PREFIX,
     LOCAL_REPLACE_PREFIX,
     LOCAL_TAG_PREFIX,
     LOCAL_TAG_PREFIX,
+    DictRefsContainer,
     Ref,
     Ref,
     SymrefLoop,
     SymrefLoop,
     _import_remote_refs,
     _import_remote_refs,
@@ -205,6 +206,7 @@ from .refs import (
     local_branch_name,
     local_branch_name,
     local_replace_name,
     local_replace_name,
     local_tag_name,
     local_tag_name,
+    parse_remote_ref,
     shorten_ref_name,
     shorten_ref_name,
 )
 )
 from .repo import BaseRepo, Repo, get_user_identity
 from .repo import BaseRepo, Repo, get_user_identity
@@ -244,7 +246,7 @@ class TransportKwargs(TypedDict, total=False):
     password: str | None
     password: str | None
     key_filename: str | None
     key_filename: str | None
     ssh_command: str | None
     ssh_command: str | None
-    pool_manager: Optional["urllib3.PoolManager"]
+    pool_manager: "urllib3.PoolManager | None"
 
 
 
 
 @dataclass
 @dataclass
@@ -281,14 +283,25 @@ class NoneStream(RawIOBase):
         """
         """
         return b""
         return b""
 
 
-    @override
-    def readinto(self, b: Buffer) -> int | None:
-        return 0
+    if sys.version_info >= (3, 12):
+
+        @override
+        def readinto(self, b: Buffer) -> int | None:
+            return 0
+
+        @override
+        def write(self, b: Buffer) -> int | None:
+            return len(cast(bytes, b)) if b else 0
+
+    else:
+
+        @override
+        def readinto(self, b: bytearray | memoryview) -> int | None:  # type: ignore[override]
+            return 0
 
 
-    @override
-    def write(self, b: Buffer) -> int | None:
-        # All Buffer implementations (bytes, bytearray, memoryview) support len()
-        return len(b) if b else 0  # type: ignore[arg-type]
+        @override
+        def write(self, b: bytes | bytearray | memoryview) -> int | None:  # type: ignore[override]
+            return len(b) if b else 0
 
 
 
 
 default_bytes_out_stream: BinaryIO = cast(
 default_bytes_out_stream: BinaryIO = cast(
@@ -630,8 +643,6 @@ def _get_variables(repo: RepoPath = ".") -> dict[str, str]:
     Returns:
     Returns:
       A dictionary of all logical variables with values
       A dictionary of all logical variables with values
     """
     """
-    from .repo import get_user_identity
-
     with open_repo_closing(repo) as repo_obj:
     with open_repo_closing(repo) as repo_obj:
         config = repo_obj.get_config_stack()
         config = repo_obj.get_config_stack()
 
 
@@ -827,8 +838,6 @@ def commit(
             if normalizer is not None:
             if normalizer is not None:
 
 
                 def filter_callback(data: bytes, path: bytes) -> bytes:
                 def filter_callback(data: bytes, path: bytes) -> bytes:
-                    from dulwich.objects import Blob
-
                     blob = Blob()
                     blob = Blob()
                     blob.data = data
                     blob.data = data
                     normalized_blob = normalizer.checkin_normalize(blob, path)
                     normalized_blob = normalizer.checkin_normalize(blob, path)
@@ -1066,7 +1075,7 @@ def stripspace(
         >>> stripspace(b"line\\n", comment_lines=True)
         >>> stripspace(b"line\\n", comment_lines=True)
         b'# line\\n'
         b'# line\\n'
     """
     """
-    from dulwich.stripspace import stripspace as _stripspace
+    from .stripspace import stripspace as _stripspace
 
 
     # Convert text to bytes
     # Convert text to bytes
     if isinstance(text, str):
     if isinstance(text, str):
@@ -1290,8 +1299,6 @@ def add(
         if normalizer is not None:
         if normalizer is not None:
 
 
             def filter_callback(data: bytes, path: bytes) -> bytes:
             def filter_callback(data: bytes, path: bytes) -> bytes:
-                from dulwich.objects import Blob
-
                 blob = Blob()
                 blob = Blob()
                 blob.data = data
                 blob.data = data
                 normalized_blob = normalizer.checkin_normalize(blob, path)
                 normalized_blob = normalizer.checkin_normalize(blob, path)
@@ -2838,42 +2845,6 @@ def reset(
 
 
         elif mode == "hard":
         elif mode == "hard":
             # Hard reset: update HEAD, index, and working tree
             # Hard reset: update HEAD, index, and working tree
-            # Get configuration for working directory update
-            config = r.get_config()
-            honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
-
-            if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
-                validate_path_element = validate_path_element_ntfs
-            elif config.get_boolean(
-                b"core", b"core.protectHFS", sys.platform == "darwin"
-            ):
-                validate_path_element = validate_path_element_hfs
-            else:
-                validate_path_element = validate_path_element_default
-
-            if config.get_boolean(b"core", b"symlinks", True):
-
-                def symlink_wrapper(
-                    source: str | bytes | os.PathLike[str],
-                    target: str | bytes | os.PathLike[str],
-                ) -> None:
-                    symlink(source, target)  # type: ignore[arg-type,unused-ignore]
-
-                symlink_fn = symlink_wrapper
-            else:
-
-                def symlink_fallback(
-                    source: str | bytes | os.PathLike[str],
-                    target: str | bytes | os.PathLike[str],
-                ) -> None:
-                    mode = "w" + ("b" if isinstance(source, bytes) else "")
-                    with open(target, mode) as f:
-                        f.write(source)
-
-                symlink_fn = symlink_fallback
-
-            # Update working tree and index
-            blob_normalizer = r.get_blob_normalizer()
             # For reset --hard, use current index tree as old tree to get proper deletions
             # For reset --hard, use current index tree as old tree to get proper deletions
             index = r.open_index()
             index = r.open_index()
             if len(index) > 0:
             if len(index) > 0:
@@ -2882,6 +2853,12 @@ def reset(
                 # Empty index
                 # Empty index
                 index_tree_id = None
                 index_tree_id = None
 
 
+            # Get configuration for working tree updates
+            honor_filemode, validate_path_element, symlink_fn = (
+                _get_worktree_update_config(r)
+            )
+
+            blob_normalizer = r.get_blob_normalizer()
             changes = tree_changes(
             changes = tree_changes(
                 r.object_store, index_tree_id, tree.id, want_unchanged=True
                 r.object_store, index_tree_id, tree.id, want_unchanged=True
             )
             )
@@ -3004,8 +2981,6 @@ def push(
         remote_changed_refs: dict[bytes, bytes | None] = {}
         remote_changed_refs: dict[bytes, bytes | None] = {}
 
 
         def update_refs(refs: dict[bytes, bytes]) -> dict[bytes, bytes]:
         def update_refs(refs: dict[bytes, bytes]) -> dict[bytes, bytes]:
-            from .refs import DictRefsContainer
-
             remote_refs = DictRefsContainer(refs)
             remote_refs = DictRefsContainer(refs)
             selected_refs.extend(
             selected_refs.extend(
                 parse_reftuples(r.refs, remote_refs, refspecs_bytes, force=force)
                 parse_reftuples(r.refs, remote_refs, refspecs_bytes, force=force)
@@ -3074,10 +3049,14 @@ def push(
         for ref, error in (result.ref_status or {}).items():
         for ref, error in (result.ref_status or {}).items():
             if error is not None:
             if error is not None:
                 errstream.write(
                 errstream.write(
-                    b"Push of ref %s failed: %s\n" % (ref, error.encode(err_encoding))
+                    f"Push of ref {ref.decode('utf-8', 'replace')} failed: {error}\n".encode(
+                        err_encoding
+                    )
                 )
                 )
             else:
             else:
-                errstream.write(b"Ref %s updated\n" % ref)
+                errstream.write(
+                    f"Ref {ref.decode('utf-8', 'replace')} updated\n".encode()
+                )
 
 
         if remote_name is not None:
         if remote_name is not None:
             _import_remote_refs(r.refs, remote_name, remote_changed_refs)
             _import_remote_refs(r.refs, remote_name, remote_changed_refs)
@@ -3148,8 +3127,6 @@ def pull(
         def determine_wants(
         def determine_wants(
             remote_refs: dict[bytes, bytes], depth: int | None = None
             remote_refs: dict[bytes, bytes], depth: int | None = None
         ) -> list[bytes]:
         ) -> list[bytes]:
-            from .refs import DictRefsContainer
-
             remote_refs_container = DictRefsContainer(remote_refs)
             remote_refs_container = DictRefsContainer(remote_refs)
             selected_refs.extend(
             selected_refs.extend(
                 parse_reftuples(
                 parse_reftuples(
@@ -3286,18 +3263,17 @@ def status(
         untracked - list of untracked, un-ignored & non-.git paths
         untracked - list of untracked, un-ignored & non-.git paths
     """
     """
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
+        # Open the index once and reuse it for both staged and unstaged checks
+        index = r.open_index()
         # 1. Get status of staged
         # 1. Get status of staged
-        tracked_changes = get_tree_changes(r)
+        tracked_changes = get_tree_changes(r, index)
         # 2. Get status of unstaged
         # 2. Get status of unstaged
-        index = r.open_index()
         normalizer = r.get_blob_normalizer()
         normalizer = r.get_blob_normalizer()
 
 
         # Create a wrapper that handles the bytes -> Blob conversion
         # Create a wrapper that handles the bytes -> Blob conversion
         if normalizer is not None:
         if normalizer is not None:
 
 
             def filter_callback(data: bytes, path: bytes) -> bytes:
             def filter_callback(data: bytes, path: bytes) -> bytes:
-                from dulwich.objects import Blob
-
                 blob = Blob()
                 blob = Blob()
                 blob.data = data
                 blob.data = data
                 normalized_blob = normalizer.checkin_normalize(blob, path)
                 normalized_blob = normalizer.checkin_normalize(blob, path)
@@ -3684,15 +3660,19 @@ def grep(
                         outstream.write(f"{path_str}:{line_str}\n")
                         outstream.write(f"{path_str}:{line_str}\n")
 
 
 
 
-def get_tree_changes(repo: RepoPath) -> dict[str, list[str | bytes]]:
+def get_tree_changes(
+    repo: RepoPath, index: Index | None = None
+) -> dict[str, list[str | bytes]]:
     """Return add/delete/modify changes to tree by comparing index to HEAD.
     """Return add/delete/modify changes to tree by comparing index to HEAD.
 
 
     Args:
     Args:
       repo: repo path or object
       repo: repo path or object
+      index: optional Index object to reuse (avoids re-opening the index)
     Returns: dict with lists for each type of change
     Returns: dict with lists for each type of change
     """
     """
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
-        index = r.open_index()
+        if index is None:
+            index = r.open_index()
 
 
         # Compares the Index to the HEAD & determines changes
         # Compares the Index to the HEAD & determines changes
         # Iterate through the changes and report add/delete/modify
         # Iterate through the changes and report add/delete/modify
@@ -4498,8 +4478,6 @@ def show_ref(
                 try:
                 try:
                     obj = r.get_object(sha)
                     obj = r.get_object(sha)
                     # Peel tag objects to get the underlying commit/object
                     # Peel tag objects to get the underlying commit/object
-                    from .objects import Tag
-
                     while obj.type_name == b"tag":
                     while obj.type_name == b"tag":
                         assert isinstance(obj, Tag)
                         assert isinstance(obj, Tag)
                         _obj_class, sha = obj.object
                         _obj_class, sha = obj.object
@@ -5079,6 +5057,168 @@ def check_ignore(
                 yield _quote_path(output_path) if quote_path else output_path
                 yield _quote_path(output_path) if quote_path else output_path
 
 
 
 
+def _get_current_head_tree(repo: Repo) -> bytes | None:
+    """Get the current HEAD tree ID.
+
+    Args:
+      repo: Repository object
+
+    Returns:
+      Tree ID of current HEAD, or None if no HEAD exists (empty repo)
+    """
+    try:
+        current_head = repo.refs[b"HEAD"]
+        current_commit = repo[current_head]
+        assert isinstance(current_commit, Commit), "Expected a Commit object"
+        tree_id: bytes = current_commit.tree
+        return tree_id
+    except KeyError:
+        # No HEAD yet (empty repo)
+        return None
+
+
+def _check_uncommitted_changes(
+    repo: Repo, target_tree_id: bytes, force: bool = False
+) -> None:
+    """Check for uncommitted changes that would conflict with a checkout/switch.
+
+    Args:
+      repo: Repository object
+      target_tree_id: Tree ID to check conflicts against
+      force: If True, skip the check
+
+    Raises:
+      CheckoutError: If there are conflicting local changes
+    """
+    if force:
+        return
+
+    # Get current HEAD tree for comparison
+    current_tree_id = _get_current_head_tree(repo)
+    if current_tree_id is None:
+        # No HEAD yet (empty repo)
+        return
+
+    status_report = status(repo)
+    changes = []
+    # staged is a dict with 'add', 'delete', 'modify' keys
+    if isinstance(status_report.staged, dict):
+        changes.extend(status_report.staged.get("add", []))
+        changes.extend(status_report.staged.get("delete", []))
+        changes.extend(status_report.staged.get("modify", []))
+    # unstaged is a list
+    changes.extend(status_report.unstaged)
+
+    if changes:
+        # Check if any changes would conflict with checkout
+        target_tree_obj = repo[target_tree_id]
+        assert isinstance(target_tree_obj, Tree), "Expected a Tree object"
+        target_tree = target_tree_obj
+        for change in changes:
+            if isinstance(change, str):
+                change = change.encode(DEFAULT_ENCODING)
+
+            try:
+                target_tree.lookup_path(repo.object_store.__getitem__, change)
+            except KeyError:
+                # File doesn't exist in target tree - change can be preserved
+                pass
+            else:
+                # File exists in target tree - would overwrite local changes
+                raise CheckoutError(
+                    f"Your local changes to '{change.decode()}' would be "
+                    "overwritten. Please commit or stash before switching."
+                )
+
+
+def _get_worktree_update_config(
+    repo: Repo,
+) -> tuple[
+    bool,
+    Callable[[bytes], bool],
+    Callable[[str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None],
+]:
+    """Get configuration for working tree updates.
+
+    Args:
+      repo: Repository object
+
+    Returns:
+      Tuple of (honor_filemode, validate_path_element, symlink_fn)
+    """
+    config = repo.get_config()
+    honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
+
+    if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
+        validate_path_element = validate_path_element_ntfs
+    elif config.get_boolean(b"core", b"core.protectHFS", sys.platform == "darwin"):
+        validate_path_element = validate_path_element_hfs
+    else:
+        validate_path_element = validate_path_element_default
+
+    if config.get_boolean(b"core", b"symlinks", True):
+
+        def symlink_wrapper(
+            source: str | bytes | os.PathLike[str],
+            target: str | bytes | os.PathLike[str],
+        ) -> None:
+            symlink(source, target)  # type: ignore[arg-type,unused-ignore]
+
+        symlink_fn = symlink_wrapper
+    else:
+
+        def symlink_fallback(
+            source: str | bytes | os.PathLike[str],
+            target: str | bytes | os.PathLike[str],
+        ) -> None:
+            mode = "w" + ("b" if isinstance(source, bytes) else "")
+            with open(target, mode) as f:
+                f.write(source)
+
+        symlink_fn = symlink_fallback
+
+    return honor_filemode, validate_path_element, symlink_fn
+
+
+def _perform_tree_switch(
+    repo: Repo,
+    current_tree_id: bytes | None,
+    target_tree_id: bytes,
+    force: bool = False,
+) -> None:
+    """Perform the actual working tree switch.
+
+    Args:
+      repo: Repository object
+      current_tree_id: Current tree ID (or None for empty repo)
+      target_tree_id: Target tree ID to switch to
+      force: If True, force removal of untracked files and allow overwriting modified files
+    """
+    honor_filemode, validate_path_element, symlink_fn = _get_worktree_update_config(
+        repo
+    )
+
+    # Get blob normalizer for line ending conversion
+    blob_normalizer = repo.get_blob_normalizer()
+
+    # Update working tree
+    tree_change_iterator: Iterator[TreeChange] = tree_changes(
+        repo.object_store, current_tree_id, target_tree_id
+    )
+    update_working_tree(
+        repo,
+        current_tree_id,
+        target_tree_id,
+        change_iterator=tree_change_iterator,
+        honor_filemode=honor_filemode,
+        validate_path_element=validate_path_element,
+        symlink_fn=symlink_fn,
+        force_remove_untracked=force,
+        blob_normalizer=blob_normalizer,
+        allow_overwrite_modified=force,
+    )
+
+
 def update_head(
 def update_head(
     repo: RepoPath,
     repo: RepoPath,
     target: str | bytes,
     target: str | bytes,
@@ -5233,102 +5373,257 @@ def checkout(
         target_tree_id = target_commit.tree
         target_tree_id = target_commit.tree
 
 
         # Get current HEAD tree for comparison
         # Get current HEAD tree for comparison
-        try:
-            current_head = r.refs[b"HEAD"]
-            current_commit = r[current_head]
-            assert isinstance(current_commit, Commit), "Expected a Commit object"
-            current_tree_id = current_commit.tree
-        except KeyError:
-            # No HEAD yet (empty repo)
-            current_tree_id = None
+        current_tree_id = _get_current_head_tree(r)
 
 
         # Check for uncommitted changes if not forcing
         # Check for uncommitted changes if not forcing
-        if not force and current_tree_id is not None:
-            status_report = status(r)
-            changes = []
-            # staged is a dict with 'add', 'delete', 'modify' keys
-            if isinstance(status_report.staged, dict):
-                changes.extend(status_report.staged.get("add", []))
-                changes.extend(status_report.staged.get("delete", []))
-                changes.extend(status_report.staged.get("modify", []))
-            # unstaged is a list
-            changes.extend(status_report.unstaged)
-            if changes:
-                # Check if any changes would conflict with checkout
-                target_tree_obj = r[target_tree_id]
-                assert isinstance(target_tree_obj, Tree), "Expected a Tree object"
-                target_tree = target_tree_obj
-                for change in changes:
-                    if isinstance(change, str):
-                        change = change.encode(DEFAULT_ENCODING)
+        if current_tree_id is not None:
+            _check_uncommitted_changes(r, target_tree_id, force)
+
+        # Update working tree
+        _perform_tree_switch(r, current_tree_id, target_tree_id, force)
+
+        # Update HEAD
+        if new_branch:
+            # Create new branch and switch to it
+            branch_create(r, new_branch, objectish=target_commit.id.decode("ascii"))
+            update_head(r, new_branch)
+
+            # Set up tracking if creating from a remote branch
+            if isinstance(original_target, bytes) and target_bytes.startswith(
+                LOCAL_REMOTE_PREFIX
+            ):
+                try:
+                    remote_name, branch_name = parse_remote_ref(target_bytes)
+                    # Set tracking to refs/heads/<branch> on the remote
+                    set_branch_tracking(
+                        r, new_branch, remote_name, local_branch_name(branch_name)
+                    )
+                except ValueError:
+                    # Invalid remote ref format, skip tracking setup
+                    pass
+        else:
+            # Check if target is a branch name (with or without refs/heads/ prefix)
+            branch_ref = None
+            if (
+                isinstance(original_target, (str, bytes))
+                and target_bytes in r.refs.keys()
+            ):
+                if target_bytes.startswith(LOCAL_BRANCH_PREFIX):
+                    branch_ref = target_bytes
+            else:
+                # Try adding refs/heads/ prefix
+                potential_branch = (
+                    _make_branch_ref(target_bytes)
+                    if isinstance(original_target, (str, bytes))
+                    else None
+                )
+                if potential_branch in r.refs.keys():
+                    branch_ref = potential_branch
+
+            if branch_ref:
+                # It's a branch - update HEAD symbolically
+                update_head(r, branch_ref)
+            else:
+                # It's a tag, other ref, or commit SHA - detached HEAD
+                update_head(r, target_commit.id.decode("ascii"), detached=True)
+
+
+def restore(
+    repo: str | os.PathLike[str] | Repo,
+    paths: list[bytes | str],
+    source: str | bytes | Commit | Tag | None = None,
+    staged: bool = False,
+    worktree: bool = True,
+) -> None:
+    """Restore working tree files.
+
+    This is similar to 'git restore', allowing you to restore specific files
+    from a commit or the index without changing HEAD.
+
+    Args:
+      repo: Path to repository or repository object
+      paths: List of specific paths to restore
+      source: Branch name, tag, or commit SHA to restore from. If None, restores
+              staged files from HEAD, or worktree files from index
+      staged: Restore files in the index (--staged)
+      worktree: Restore files in the working tree (default: True)
+
+    Raises:
+      CheckoutError: If restore cannot be performed
+      ValueError: If neither staged nor worktree is specified
+      KeyError: If the source reference cannot be found
+    """
+    if not staged and not worktree:
+        raise ValueError("At least one of staged or worktree must be True")
+
+    with open_repo_closing(repo) as r:
+        from .index import _fs_to_tree_path, build_file_from_blob
+
+        # Determine the source tree
+        if source is None:
+            if staged:
+                # Restoring staged files from HEAD
+                try:
+                    source = r.refs[b"HEAD"]
+                except KeyError:
+                    raise CheckoutError("No HEAD reference found")
+            elif worktree:
+                # Restoring worktree files from index
+                from .index import ConflictedIndexEntry, IndexEntry
+
+                index = r.open_index()
+                for path in paths:
+                    if isinstance(path, str):
+                        tree_path = _fs_to_tree_path(path)
+                    else:
+                        tree_path = path
 
 
                     try:
                     try:
-                        target_tree.lookup_path(r.object_store.__getitem__, change)
+                        index_entry = index[tree_path]
+                        if isinstance(index_entry, ConflictedIndexEntry):
+                            raise CheckoutError(
+                                f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' has conflicts"
+                            )
+                        blob = r[index_entry.sha]
+                        assert isinstance(blob, Blob), "Expected a Blob object"
+
+                        full_path = os.path.join(os.fsencode(r.path), tree_path)
+                        mode = index_entry.mode
+
+                        # Use build_file_from_blob to write the file
+                        build_file_from_blob(blob, mode, full_path)
                     except KeyError:
                     except KeyError:
-                        # File doesn't exist in target tree - change can be preserved
-                        pass
-                    else:
-                        # File exists in target tree - would overwrite local changes
+                        # Path doesn't exist in index
                         raise CheckoutError(
                         raise CheckoutError(
-                            f"Your local changes to '{change.decode()}' would be "
-                            "overwritten by checkout. Please commit or stash before switching."
+                            f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' not in index"
                         )
                         )
+                return
 
 
-        # Get configuration for working directory update
-        config = r.get_config()
-        honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
+        # source is not None at this point
+        assert source is not None
+        # Get the source tree
+        source_tree = parse_tree(r, treeish=source)
+
+        # Restore specified paths from source tree
+        for path in paths:
+            if isinstance(path, str):
+                tree_path = _fs_to_tree_path(path)
+            else:
+                tree_path = path
+
+            try:
+                # Look up the path in the source tree
+                mode, sha = source_tree.lookup_path(
+                    r.object_store.__getitem__, tree_path
+                )
+                blob = r[sha]
+                assert isinstance(blob, Blob), "Expected a Blob object"
+            except KeyError:
+                # Path doesn't exist in source tree
+                raise CheckoutError(
+                    f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' not found in source"
+                )
+
+            full_path = os.path.join(os.fsencode(r.path), tree_path)
+
+            if worktree:
+                # Use build_file_from_blob to restore to working tree
+                build_file_from_blob(blob, mode, full_path)
+
+            if staged:
+                # Update the index with the blob from source
+                from .index import IndexEntry
+
+                index = r.open_index()
+
+                # When only updating staged (not worktree), we want to reset the index
+                # to the source, but invalidate the stat cache so Git knows to check
+                # the worktree file. Use zeros for stat fields.
+                if not worktree:
+                    # Invalidate stat cache by using zeros
+                    new_entry = IndexEntry(
+                        ctime=(0, 0),
+                        mtime=(0, 0),
+                        dev=0,
+                        ino=0,
+                        mode=mode,
+                        uid=0,
+                        gid=0,
+                        size=0,
+                        sha=sha,
+                    )
+                else:
+                    # If we also updated worktree, use actual stat
+                    from .index import index_entry_from_stat
+
+                    st = os.lstat(full_path)
+                    new_entry = index_entry_from_stat(st, sha, mode)
+
+                index[tree_path] = new_entry
+                index.write()
 
 
-        if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
-            validate_path_element = validate_path_element_ntfs
-        else:
-            validate_path_element = validate_path_element_default
 
 
-        if config.get_boolean(b"core", b"symlinks", True):
+def switch(
+    repo: str | os.PathLike[str] | Repo,
+    target: str | bytes | Commit | Tag,
+    create: str | bytes | None = None,
+    force: bool = False,
+    detach: bool = False,
+) -> None:
+    """Switch branches.
+
+    This is similar to 'git switch', allowing you to switch to a different
+    branch or commit, updating both HEAD and the working tree.
+
+    Args:
+      repo: Path to repository or repository object
+      target: Branch name, tag, or commit SHA to switch to
+      create: Create a new branch at target before switching (like git switch -c)
+      force: Force switch even if there are local changes
+      detach: Switch to a commit in detached HEAD state (like git switch --detach)
 
 
-            def symlink_wrapper(
-                source: str | bytes | os.PathLike[str],
-                target: str | bytes | os.PathLike[str],
-            ) -> None:
-                symlink(source, target)  # type: ignore[arg-type,unused-ignore]
+    Raises:
+      CheckoutError: If switch cannot be performed due to conflicts
+      KeyError: If the target reference cannot be found
+      ValueError: If both create and detach are specified
+    """
+    if create and detach:
+        raise ValueError("Cannot use both create and detach options")
+
+    with open_repo_closing(repo) as r:
+        # Store the original target for later reference checks
+        original_target = target
 
 
-            symlink_fn = symlink_wrapper
+        if isinstance(target, str):
+            target_bytes = target.encode(DEFAULT_ENCODING)
+        elif isinstance(target, bytes):
+            target_bytes = target
         else:
         else:
+            # For Commit/Tag objects, we'll use their SHA
+            target_bytes = target.id
 
 
-            def symlink_fallback(
-                source: str | bytes | os.PathLike[str],
-                target: str | bytes | os.PathLike[str],
-            ) -> None:
-                mode = "w" + ("b" if isinstance(source, bytes) else "")
-                with open(target, mode) as f:
-                    f.write(source)
+        if isinstance(create, str):
+            create = create.encode(DEFAULT_ENCODING)
 
 
-            symlink_fn = symlink_fallback
+        # Parse the target to get the commit
+        target_commit = parse_commit(r, original_target)
+        target_tree_id = target_commit.tree
 
 
-        # Get blob normalizer for line ending conversion
-        blob_normalizer = r.get_blob_normalizer()
+        # Get current HEAD tree for comparison
+        current_tree_id = _get_current_head_tree(r)
+
+        # Check for uncommitted changes if not forcing
+        if current_tree_id is not None:
+            _check_uncommitted_changes(r, target_tree_id, force)
 
 
         # Update working tree
         # Update working tree
-        tree_change_iterator: Iterator[TreeChange] = tree_changes(
-            r.object_store, current_tree_id, target_tree_id
-        )
-        update_working_tree(
-            r,
-            current_tree_id,
-            target_tree_id,
-            change_iterator=tree_change_iterator,
-            honor_filemode=honor_filemode,
-            validate_path_element=validate_path_element,
-            symlink_fn=symlink_fn,
-            force_remove_untracked=force,
-            blob_normalizer=blob_normalizer,
-            allow_overwrite_modified=force,
-        )
+        _perform_tree_switch(r, current_tree_id, target_tree_id, force)
 
 
         # Update HEAD
         # Update HEAD
-        if new_branch:
+        if create:
             # Create new branch and switch to it
             # Create new branch and switch to it
-            branch_create(r, new_branch, objectish=target_commit.id.decode("ascii"))
-            update_head(r, new_branch)
+            branch_create(r, create, objectish=target_commit.id.decode("ascii"))
+            update_head(r, create)
 
 
             # Set up tracking if creating from a remote branch
             # Set up tracking if creating from a remote branch
             from .refs import LOCAL_REMOTE_PREFIX, local_branch_name, parse_remote_ref
             from .refs import LOCAL_REMOTE_PREFIX, local_branch_name, parse_remote_ref
@@ -5340,11 +5635,14 @@ def checkout(
                     remote_name, branch_name = parse_remote_ref(target_bytes)
                     remote_name, branch_name = parse_remote_ref(target_bytes)
                     # Set tracking to refs/heads/<branch> on the remote
                     # Set tracking to refs/heads/<branch> on the remote
                     set_branch_tracking(
                     set_branch_tracking(
-                        r, new_branch, remote_name, local_branch_name(branch_name)
+                        r, create, remote_name, local_branch_name(branch_name)
                     )
                     )
                 except ValueError:
                 except ValueError:
                     # Invalid remote ref format, skip tracking setup
                     # Invalid remote ref format, skip tracking setup
                     pass
                     pass
+        elif detach:
+            # Detached HEAD mode
+            update_head(r, target_commit.id.decode("ascii"), detached=True)
         else:
         else:
             # Check if target is a branch name (with or without refs/heads/ prefix)
             # Check if target is a branch name (with or without refs/heads/ prefix)
             branch_ref = None
             branch_ref = None
@@ -5368,8 +5666,12 @@ def checkout(
                 # It's a branch - update HEAD symbolically
                 # It's a branch - update HEAD symbolically
                 update_head(r, branch_ref)
                 update_head(r, branch_ref)
             else:
             else:
-                # It's a tag, other ref, or commit SHA - detached HEAD
-                update_head(r, target_commit.id.decode("ascii"), detached=True)
+                # It's a tag, other ref, or commit SHA
+                # In git switch, this would be an error unless --detach is used
+                raise CheckoutError(
+                    f"'{target_bytes.decode(DEFAULT_ENCODING)}' is not a branch. "
+                    "Use detach=True to switch to a commit in detached HEAD state."
+                )
 
 
 
 
 def reset_file(
 def reset_file(
@@ -6911,6 +7213,7 @@ def rebase(
     Raises:
     Raises:
       Error: If rebase fails or conflicts occur
       Error: If rebase fails or conflicts occur
     """
     """
+    # TODO: Avoid importing from .cli
     from .cli import launch_editor
     from .cli import launch_editor
     from .rebase import (
     from .rebase import (
         RebaseConflict,
         RebaseConflict,
@@ -7038,7 +7341,7 @@ def annotate(
     """
     """
     if committish is None:
     if committish is None:
         committish = "HEAD"
         committish = "HEAD"
-    from dulwich.annotate import annotate_lines
+    from .annotate import annotate_lines
 
 
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         commit_id = parse_commit(r, committish).id
         commit_id = parse_commit(r, committish).id
@@ -7055,7 +7358,7 @@ def filter_branch(
     repo: RepoPath = ".",
     repo: RepoPath = ".",
     branch: str | bytes = "HEAD",
     branch: str | bytes = "HEAD",
     *,
     *,
-    filter_fn: Callable[[Commit], Optional["CommitData"]] | None = None,
+    filter_fn: Callable[[Commit], "CommitData | None"] | None = None,
     filter_author: Callable[[bytes], bytes | None] | None = None,
     filter_author: Callable[[bytes], bytes | None] | None = None,
     filter_committer: Callable[[bytes], bytes | None] | None = None,
     filter_committer: Callable[[bytes], bytes | None] | None = None,
     filter_message: Callable[[bytes], bytes | None] | None = None,
     filter_message: Callable[[bytes], bytes | None] | None = None,
@@ -8577,7 +8880,6 @@ def merge_base(
         List of commit IDs that are merge bases
         List of commit IDs that are merge bases
     """
     """
     from .graph import find_merge_base, find_octopus_base
     from .graph import find_merge_base, find_octopus_base
-    from .objects import Commit
     from .objectspec import parse_object
     from .objectspec import parse_object
 
 
     if committishes is None or len(committishes) < 2:
     if committishes is None or len(committishes) < 2:
@@ -8620,7 +8922,6 @@ def is_ancestor(
         True if ancestor is an ancestor of descendant, False otherwise
         True if ancestor is an ancestor of descendant, False otherwise
     """
     """
     from .graph import find_merge_base
     from .graph import find_merge_base
-    from .objects import Commit
     from .objectspec import parse_object
     from .objectspec import parse_object
 
 
     if ancestor is None or descendant is None:
     if ancestor is None or descendant is None:
@@ -8656,7 +8957,6 @@ def independent_commits(
         List of commit IDs that are not ancestors of any other commits in the list
         List of commit IDs that are not ancestors of any other commits in the list
     """
     """
     from .graph import independent
     from .graph import independent
-    from .objects import Commit
     from .objectspec import parse_object
     from .objectspec import parse_object
 
 
     if committishes is None or len(committishes) == 0:
     if committishes is None or len(committishes) == 0:
@@ -8726,8 +9026,6 @@ def mailsplit(
             keep_cr=keep_cr,
             keep_cr=keep_cr,
         )
         )
     else:
     else:
-        from typing import BinaryIO, cast
-
         if input_path is None:
         if input_path is None:
             # Read from stdin
             # Read from stdin
             input_file: str | bytes | BinaryIO = sys.stdin.buffer
             input_file: str | bytes | BinaryIO = sys.stdin.buffer
@@ -8788,8 +9086,6 @@ def mailinfo(
         >>> print(f"Author: {result.author_name} <{result.author_email}>")
         >>> print(f"Author: {result.author_name} <{result.author_email}>")
         >>> print(f"Subject: {result.subject}")
         >>> print(f"Subject: {result.subject}")
     """
     """
-    from typing import BinaryIO, TextIO, cast
-
     from .mbox import mailinfo as mbox_mailinfo
     from .mbox import mailinfo as mbox_mailinfo
 
 
     if input_path is None:
     if input_path is None:
@@ -8848,15 +9144,13 @@ def rerere(repo: RepoPath = ".") -> tuple[list[tuple[bytes, str]], list[bytes]]:
         - List of tuples (path, conflict_id) for recorded conflicts
         - List of tuples (path, conflict_id) for recorded conflicts
         - List of paths where resolutions were automatically applied
         - List of paths where resolutions were automatically applied
     """
     """
-    from dulwich.rerere import _has_conflict_markers, rerere_auto
+    from .rerere import _has_conflict_markers, rerere_auto
 
 
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         # Get conflicts from the index (if available)
         # Get conflicts from the index (if available)
         index = r.open_index()
         index = r.open_index()
         conflicts = []
         conflicts = []
 
 
-        from dulwich.index import ConflictedIndexEntry
-
         for path, entry in index.items():
         for path, entry in index.items():
             if isinstance(entry, ConflictedIndexEntry):
             if isinstance(entry, ConflictedIndexEntry):
                 conflicts.append(path)
                 conflicts.append(path)
@@ -8889,7 +9183,7 @@ def rerere_status(repo: RepoPath = ".") -> list[tuple[str, bool]]:
     Returns:
     Returns:
         List of tuples (conflict_id, has_resolution)
         List of tuples (conflict_id, has_resolution)
     """
     """
-    from dulwich.rerere import RerereCache
+    from .rerere import RerereCache
 
 
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         cache = RerereCache.from_repo(r)
         cache = RerereCache.from_repo(r)
@@ -8908,7 +9202,7 @@ def rerere_diff(
     Returns:
     Returns:
         List of tuples (conflict_id, preimage, postimage)
         List of tuples (conflict_id, preimage, postimage)
     """
     """
-    from dulwich.rerere import RerereCache
+    from .rerere import RerereCache
 
 
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         cache = RerereCache.from_repo(r)
         cache = RerereCache.from_repo(r)
@@ -8935,7 +9229,7 @@ def rerere_forget(repo: RepoPath = ".", pathspec: str | bytes | None = None) ->
         repo: Path to the repository
         repo: Path to the repository
         pathspec: Path to forget (currently not implemented, forgets all)
         pathspec: Path to forget (currently not implemented, forgets all)
     """
     """
-    from dulwich.rerere import RerereCache
+    from .rerere import RerereCache
 
 
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         cache = RerereCache.from_repo(r)
         cache = RerereCache.from_repo(r)
@@ -8955,7 +9249,7 @@ def rerere_clear(repo: RepoPath = ".") -> None:
     Args:
     Args:
         repo: Path to the repository
         repo: Path to the repository
     """
     """
-    from dulwich.rerere import RerereCache
+    from .rerere import RerereCache
 
 
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         cache = RerereCache.from_repo(r)
         cache = RerereCache.from_repo(r)
@@ -8969,7 +9263,7 @@ def rerere_gc(repo: RepoPath = ".", max_age_days: int = 60) -> None:
         repo: Path to the repository
         repo: Path to the repository
         max_age_days: Maximum age in days for keeping resolutions
         max_age_days: Maximum age in days for keeping resolutions
     """
     """
-    from dulwich.rerere import RerereCache
+    from .rerere import RerereCache
 
 
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         cache = RerereCache.from_repo(r)
         cache = RerereCache.from_repo(r)

+ 1 - 1
dulwich/protocol.py

@@ -247,7 +247,7 @@ def pkt_line(data: bytes | None) -> bytes:
     """
     """
     if data is None:
     if data is None:
         return b"0000"
         return b"0000"
-    return ("%04x" % (len(data) + 4)).encode("ascii") + data
+    return f"{len(data) + 4:04x}".encode("ascii") + data
 
 
 
 
 def pkt_seq(*seq: bytes | None) -> bytes:
 def pkt_seq(*seq: bytes | None) -> bytes:

+ 2 - 2
dulwich/rebase.py

@@ -27,7 +27,7 @@ import subprocess
 from collections.abc import Callable, Sequence
 from collections.abc import Callable, Sequence
 from dataclasses import dataclass
 from dataclasses import dataclass
 from enum import Enum
 from enum import Enum
-from typing import Optional, Protocol, TypedDict
+from typing import Protocol, TypedDict
 
 
 from dulwich.graph import find_merge_base
 from dulwich.graph import find_merge_base
 from dulwich.merge import three_way_merge
 from dulwich.merge import three_way_merge
@@ -164,7 +164,7 @@ class RebaseTodoEntry:
         return " ".join(parts)
         return " ".join(parts)
 
 
     @classmethod
     @classmethod
-    def from_string(cls, line: str) -> Optional["RebaseTodoEntry"]:
+    def from_string(cls, line: str) -> "RebaseTodoEntry | None":
         """Parse a todo entry from a line.
         """Parse a todo entry from a line.
 
 
         Args:
         Args:

+ 194 - 19
dulwich/repo.py

@@ -41,7 +41,6 @@ from typing import (
     TYPE_CHECKING,
     TYPE_CHECKING,
     Any,
     Any,
     BinaryIO,
     BinaryIO,
-    Optional,
     TypeVar,
     TypeVar,
 )
 )
 
 
@@ -344,6 +343,71 @@ def _set_filesystem_hidden(path: str) -> None:
     # Could implement other platform specific filesystem hiding here
     # Could implement other platform specific filesystem hiding here
 
 
 
 
+def parse_shared_repository(
+    value: str | bytes | bool,
+) -> tuple[int | None, int | None]:
+    """Parse core.sharedRepository configuration value.
+
+    Args:
+      value: Configuration value (string, bytes, or boolean)
+
+    Returns:
+      tuple of (file_mask, directory_mask) or (None, None) if not shared
+
+    The masks are permission bits to apply via chmod.
+    """
+    if isinstance(value, bytes):
+        value = value.decode("utf-8", errors="replace")
+
+    # Handle boolean values
+    if isinstance(value, bool):
+        if value:
+            # true = group (same as "group")
+            return (0o664, 0o2775)
+        else:
+            # false = umask (use system umask, no adjustment)
+            return (None, None)
+
+    # Handle string values
+    value_lower = value.lower()
+
+    if value_lower in ("false", "0", ""):
+        # Use umask (no adjustment)
+        return (None, None)
+
+    if value_lower in ("true", "1", "group"):
+        # Group writable (with setgid bit)
+        return (0o664, 0o2775)
+
+    if value_lower in ("all", "world", "everybody", "2"):
+        # World readable/writable (with setgid bit)
+        return (0o666, 0o2777)
+
+    if value_lower == "umask":
+        # Explicitly use umask
+        return (None, None)
+
+    # Try to parse as octal
+    if value.startswith("0"):
+        try:
+            mode = int(value, 8)
+            # For directories, add execute bits where read bits are set
+            # and add setgid bit for shared repositories
+            dir_mode = mode | 0o2000  # Add setgid bit
+            if mode & 0o004:
+                dir_mode |= 0o001
+            if mode & 0o040:
+                dir_mode |= 0o010
+            if mode & 0o400:
+                dir_mode |= 0o100
+            return (mode, dir_mode)
+        except ValueError:
+            pass
+
+    # Default to umask for unrecognized values
+    return (None, None)
+
+
 class ParentsProvider:
 class ParentsProvider:
     """Provider for commit parent information."""
     """Provider for commit parent information."""
 
 
@@ -441,7 +505,11 @@ class BaseRepo:
         return sys.platform != "win32"
         return sys.platform != "win32"
 
 
     def _init_files(
     def _init_files(
-        self, bare: bool, symlinks: bool | None = None, format: int | None = None
+        self,
+        bare: bool,
+        symlinks: bool | None = None,
+        format: int | None = None,
+        shared_repository: str | bool | None = None,
     ) -> None:
     ) -> None:
         """Initialize a default set of named files."""
         """Initialize a default set of named files."""
         from .config import ConfigFile
         from .config import ConfigFile
@@ -467,6 +535,14 @@ class BaseRepo:
 
 
         cf.set("core", "bare", bare)
         cf.set("core", "bare", bare)
         cf.set("core", "logallrefupdates", True)
         cf.set("core", "logallrefupdates", True)
+
+        # Set shared repository if specified
+        if shared_repository is not None:
+            if isinstance(shared_repository, bool):
+                cf.set("core", "sharedRepository", shared_repository)
+            else:
+                cf.set("core", "sharedRepository", shared_repository)
+
         cf.write_to_file(f)
         cf.write_to_file(f)
         self._put_named_file("config", f.getvalue())
         self._put_named_file("config", f.getvalue())
         self._put_named_file(os.path.join("info", "exclude"), b"")
         self._put_named_file(os.path.join("info", "exclude"), b"")
@@ -918,7 +994,7 @@ class BaseRepo:
         reverse: bool = False,
         reverse: bool = False,
         max_entries: int | None = None,
         max_entries: int | None = None,
         paths: Sequence[bytes] | None = None,
         paths: Sequence[bytes] | None = None,
-        rename_detector: Optional["RenameDetector"] = None,
+        rename_detector: "RenameDetector | None" = None,
         follow: bool = False,
         follow: bool = False,
         since: int | None = None,
         since: int | None = None,
         until: int | None = None,
         until: int | None = None,
@@ -1208,7 +1284,7 @@ class Repo(BaseRepo):
     path: str
     path: str
     bare: bool
     bare: bool
     object_store: DiskObjectStore
     object_store: DiskObjectStore
-    filter_context: Optional["FilterContext"]
+    filter_context: "FilterContext | None"
 
 
     def __init__(
     def __init__(
         self,
         self,
@@ -1299,8 +1375,18 @@ class Repo(BaseRepo):
                 raise UnsupportedExtension(extension.decode("utf-8"))
                 raise UnsupportedExtension(extension.decode("utf-8"))
 
 
         if object_store is None:
         if object_store is None:
+            # Get shared repository permissions from config
+            try:
+                shared_value = config.get(("core",), "sharedRepository")
+                file_mode, dir_mode = parse_shared_repository(shared_value)
+            except KeyError:
+                file_mode, dir_mode = None, None
+
             object_store = DiskObjectStore.from_config(
             object_store = DiskObjectStore.from_config(
-                os.path.join(self.commondir(), OBJECTDIR), config
+                os.path.join(self.commondir(), OBJECTDIR),
+                config,
+                file_mode=file_mode,
+                dir_mode=dir_mode,
             )
             )
 
 
         # Use reftable if extension is configured
         # Use reftable if extension is configured
@@ -1355,10 +1441,23 @@ class Repo(BaseRepo):
         from .reflog import format_reflog_line
         from .reflog import format_reflog_line
 
 
         path = self._reflog_path(ref)
         path = self._reflog_path(ref)
-        try:
-            os.makedirs(os.path.dirname(path))
-        except FileExistsError:
-            pass
+
+        # Get shared repository permissions
+        file_mode, dir_mode = self._get_shared_repository_permissions()
+
+        # Create directory with appropriate permissions
+        parent_dir = os.path.dirname(path)
+        # Create directory tree, setting permissions on each level if needed
+        parts = []
+        current = parent_dir
+        while current and not os.path.exists(current):
+            parts.append(current)
+            current = os.path.dirname(current)
+        parts.reverse()
+        for part in parts:
+            os.mkdir(part)
+            if dir_mode is not None:
+                os.chmod(part, dir_mode)
         if committer is None:
         if committer is None:
             config = self.get_config_stack()
             config = self.get_config_stack()
             committer = get_user_identity(config)
             committer = get_user_identity(config)
@@ -1375,6 +1474,11 @@ class Repo(BaseRepo):
                 + b"\n"
                 + b"\n"
             )
             )
 
 
+        # Set file permissions (open() respects umask, so we need chmod to set the actual mode)
+        # Always chmod to ensure correct permissions even if file already existed
+        if file_mode is not None:
+            os.chmod(path, file_mode)
+
     def _reflog_path(self, ref: bytes) -> str:
     def _reflog_path(self, ref: bytes) -> str:
         if ref.startswith((b"main-worktree/", b"worktrees/")):
         if ref.startswith((b"main-worktree/", b"worktrees/")):
             raise NotImplementedError(f"refs {ref.decode()} are not supported")
             raise NotImplementedError(f"refs {ref.decode()} are not supported")
@@ -1469,6 +1573,21 @@ class Repo(BaseRepo):
         # TODO(jelmer): Actually probe disk / look at filesystem
         # TODO(jelmer): Actually probe disk / look at filesystem
         return sys.platform != "win32"
         return sys.platform != "win32"
 
 
+    def _get_shared_repository_permissions(
+        self,
+    ) -> tuple[int | None, int | None]:
+        """Get shared repository file and directory permissions from config.
+
+        Returns:
+            tuple of (file_mask, directory_mask) or (None, None) if not shared
+        """
+        try:
+            config = self.get_config()
+            value = config.get(("core",), "sharedRepository")
+            return parse_shared_repository(value)
+        except KeyError:
+            return (None, None)
+
     def _put_named_file(self, path: str, contents: bytes) -> None:
     def _put_named_file(self, path: str, contents: bytes) -> None:
         """Write a file to the control dir with the given name and contents.
         """Write a file to the control dir with the given name and contents.
 
 
@@ -1477,8 +1596,19 @@ class Repo(BaseRepo):
           contents: A string to write to the file.
           contents: A string to write to the file.
         """
         """
         path = path.lstrip(os.path.sep)
         path = path.lstrip(os.path.sep)
-        with GitFile(os.path.join(self.controldir(), path), "wb") as f:
-            f.write(contents)
+
+        # Get shared repository permissions
+        file_mode, _ = self._get_shared_repository_permissions()
+
+        # Create file with appropriate permissions
+        if file_mode is not None:
+            with GitFile(
+                os.path.join(self.controldir(), path), "wb", mask=file_mode
+            ) as f:
+                f.write(contents)
+        else:
+            with GitFile(os.path.join(self.controldir(), path), "wb") as f:
+                f.write(contents)
 
 
     def _del_named_file(self, path: str) -> None:
     def _del_named_file(self, path: str) -> None:
         try:
         try:
@@ -1554,7 +1684,15 @@ class Repo(BaseRepo):
                 index_version = None
                 index_version = None
             skip_hash = config.get_boolean(b"index", b"skipHash", False)
             skip_hash = config.get_boolean(b"index", b"skipHash", False)
 
 
-        return Index(self.index_path(), skip_hash=skip_hash, version=index_version)
+        # Get shared repository permissions for index file
+        file_mode, _ = self._get_shared_repository_permissions()
+
+        return Index(
+            self.index_path(),
+            skip_hash=skip_hash,
+            version=index_version,
+            file_mode=file_mode,
+        )
 
 
     def has_index(self) -> bool:
     def has_index(self) -> bool:
         """Check if an index is present."""
         """Check if an index is present."""
@@ -1857,10 +1995,11 @@ class Repo(BaseRepo):
         controldir: str | bytes | os.PathLike[str],
         controldir: str | bytes | os.PathLike[str],
         bare: bool,
         bare: bool,
         object_store: PackBasedObjectStore | None = None,
         object_store: PackBasedObjectStore | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
         default_branch: bytes | None = None,
         default_branch: bytes | None = None,
         symlinks: bool | None = None,
         symlinks: bool | None = None,
         format: int | None = None,
         format: int | None = None,
+        shared_repository: str | bool | None = None,
     ) -> "Repo":
     ) -> "Repo":
         path = os.fspath(path)
         path = os.fspath(path)
         if isinstance(path, bytes):
         if isinstance(path, bytes):
@@ -1868,10 +2007,26 @@ class Repo(BaseRepo):
         controldir = os.fspath(controldir)
         controldir = os.fspath(controldir)
         if isinstance(controldir, bytes):
         if isinstance(controldir, bytes):
             controldir = os.fsdecode(controldir)
             controldir = os.fsdecode(controldir)
+
+        # Determine shared repository permissions early
+        file_mode: int | None = None
+        dir_mode: int | None = None
+        if shared_repository is not None:
+            file_mode, dir_mode = parse_shared_repository(shared_repository)
+
+        # Create base directories with appropriate permissions
         for d in BASE_DIRECTORIES:
         for d in BASE_DIRECTORIES:
-            os.mkdir(os.path.join(controldir, *d))
+            dir_path = os.path.join(controldir, *d)
+            os.mkdir(dir_path)
+            if dir_mode is not None:
+                os.chmod(dir_path, dir_mode)
+
         if object_store is None:
         if object_store is None:
-            object_store = DiskObjectStore.init(os.path.join(controldir, OBJECTDIR))
+            object_store = DiskObjectStore.init(
+                os.path.join(controldir, OBJECTDIR),
+                file_mode=file_mode,
+                dir_mode=dir_mode,
+            )
         ret = cls(path, bare=bare, object_store=object_store)
         ret = cls(path, bare=bare, object_store=object_store)
         if default_branch is None:
         if default_branch is None:
             if config is None:
             if config is None:
@@ -1883,7 +2038,12 @@ class Repo(BaseRepo):
             except KeyError:
             except KeyError:
                 default_branch = DEFAULT_BRANCH
                 default_branch = DEFAULT_BRANCH
         ret.refs.set_symbolic_ref(b"HEAD", local_branch_name(default_branch))
         ret.refs.set_symbolic_ref(b"HEAD", local_branch_name(default_branch))
-        ret._init_files(bare=bare, symlinks=symlinks, format=format)
+        ret._init_files(
+            bare=bare,
+            symlinks=symlinks,
+            format=format,
+            shared_repository=shared_repository,
+        )
         return ret
         return ret
 
 
     @classmethod
     @classmethod
@@ -1892,10 +2052,11 @@ class Repo(BaseRepo):
         path: str | bytes | os.PathLike[str],
         path: str | bytes | os.PathLike[str],
         *,
         *,
         mkdir: bool = False,
         mkdir: bool = False,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
         default_branch: bytes | None = None,
         default_branch: bytes | None = None,
         symlinks: bool | None = None,
         symlinks: bool | None = None,
         format: int | None = None,
         format: int | None = None,
+        shared_repository: str | bool | None = None,
     ) -> "Repo":
     ) -> "Repo":
         """Create a new repository.
         """Create a new repository.
 
 
@@ -1906,6 +2067,7 @@ class Repo(BaseRepo):
           default_branch: Default branch name
           default_branch: Default branch name
           symlinks: Whether to support symlinks
           symlinks: Whether to support symlinks
           format: Repository format version (defaults to 0)
           format: Repository format version (defaults to 0)
+          shared_repository: Shared repository setting (group, all, umask, or octal)
         Returns: `Repo` instance
         Returns: `Repo` instance
         """
         """
         path = os.fspath(path)
         path = os.fspath(path)
@@ -1924,6 +2086,7 @@ class Repo(BaseRepo):
             default_branch=default_branch,
             default_branch=default_branch,
             symlinks=symlinks,
             symlinks=symlinks,
             format=format,
             format=format,
+            shared_repository=shared_repository,
         )
         )
 
 
     @classmethod
     @classmethod
@@ -1957,12 +2120,21 @@ class Repo(BaseRepo):
         gitdirfile = os.path.join(path, CONTROLDIR)
         gitdirfile = os.path.join(path, CONTROLDIR)
         with open(gitdirfile, "wb") as f:
         with open(gitdirfile, "wb") as f:
             f.write(b"gitdir: " + os.fsencode(worktree_controldir) + b"\n")
             f.write(b"gitdir: " + os.fsencode(worktree_controldir) + b"\n")
+
+        # Get shared repository permissions from main repository
+        _, dir_mode = main_repo._get_shared_repository_permissions()
+
+        # Create directories with appropriate permissions
         try:
         try:
             os.mkdir(main_worktreesdir)
             os.mkdir(main_worktreesdir)
+            if dir_mode is not None:
+                os.chmod(main_worktreesdir, dir_mode)
         except FileExistsError:
         except FileExistsError:
             pass
             pass
         try:
         try:
             os.mkdir(worktree_controldir)
             os.mkdir(worktree_controldir)
+            if dir_mode is not None:
+                os.chmod(worktree_controldir, dir_mode)
         except FileExistsError:
         except FileExistsError:
             pass
             pass
         with open(os.path.join(worktree_controldir, GITDIR), "wb") as f:
         with open(os.path.join(worktree_controldir, GITDIR), "wb") as f:
@@ -1982,9 +2154,10 @@ class Repo(BaseRepo):
         *,
         *,
         mkdir: bool = False,
         mkdir: bool = False,
         object_store: PackBasedObjectStore | None = None,
         object_store: PackBasedObjectStore | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
         default_branch: bytes | None = None,
         default_branch: bytes | None = None,
         format: int | None = None,
         format: int | None = None,
+        shared_repository: str | bool | None = None,
     ) -> "Repo":
     ) -> "Repo":
         """Create a new bare repository.
         """Create a new bare repository.
 
 
@@ -1997,6 +2170,7 @@ class Repo(BaseRepo):
           config: Configuration object
           config: Configuration object
           default_branch: Default branch name
           default_branch: Default branch name
           format: Repository format version (defaults to 0)
           format: Repository format version (defaults to 0)
+          shared_repository: Shared repository setting (group, all, umask, or octal)
         Returns: a `Repo` instance
         Returns: a `Repo` instance
         """
         """
         path = os.fspath(path)
         path = os.fspath(path)
@@ -2012,6 +2186,7 @@ class Repo(BaseRepo):
             config=config,
             config=config,
             default_branch=default_branch,
             default_branch=default_branch,
             format=format,
             format=format,
+            shared_repository=shared_repository,
         )
         )
 
 
     create = init_bare
     create = init_bare
@@ -2217,7 +2392,7 @@ class MemoryRepo(BaseRepo):
     those have a stronger dependency on the filesystem.
     those have a stronger dependency on the filesystem.
     """
     """
 
 
-    filter_context: Optional["FilterContext"]
+    filter_context: "FilterContext | None"
 
 
     def __init__(self) -> None:
     def __init__(self) -> None:
         """Create a new repository in memory."""
         """Create a new repository in memory."""

+ 4 - 9
dulwich/server.py

@@ -43,24 +43,19 @@ Currently supported capabilities:
  * symref
  * symref
 """
 """
 
 
-import collections
 import os
 import os
 import socket
 import socket
 import socketserver
 import socketserver
 import sys
 import sys
 import time
 import time
 import zlib
 import zlib
+from collections import deque
 from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
 from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
 from collections.abc import Set as AbstractSet
 from collections.abc import Set as AbstractSet
 from functools import partial
 from functools import partial
-from typing import IO, TYPE_CHECKING, Optional
+from typing import IO, TYPE_CHECKING
 from typing import Protocol as TypingProtocol
 from typing import Protocol as TypingProtocol
 
 
-if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
-else:
-    Buffer = bytes | bytearray | memoryview
-
 if TYPE_CHECKING:
 if TYPE_CHECKING:
     from .object_store import BaseObjectStore
     from .object_store import BaseObjectStore
     from .repo import BaseRepo
     from .repo import BaseRepo
@@ -181,7 +176,7 @@ class BackendRepo(TypingProtocol):
         *,
         *,
         get_tagged: Callable[[], dict[bytes, bytes]] | None = None,
         get_tagged: Callable[[], dict[bytes, bytes]] | None = None,
         depth: int | None = None,
         depth: int | None = None,
-    ) -> Optional["MissingObjectFinder"]:
+    ) -> "MissingObjectFinder | None":
         """Yield the objects required for a list of commits.
         """Yield the objects required for a list of commits.
 
 
         Args:
         Args:
@@ -629,7 +624,7 @@ def _want_satisfied(
     Returns: True if the want is satisfied by the haves
     Returns: True if the want is satisfied by the haves
     """
     """
     o = store[want]
     o = store[want]
-    pending = collections.deque([o])
+    pending = deque([o])
     known = {want}
     known = {want}
     while pending:
     while pending:
         commit = pending.popleft()
         commit = pending.popleft()

+ 4 - 4
dulwich/walk.py

@@ -21,8 +21,8 @@
 
 
 """General implementation of walking commits and their contents."""
 """General implementation of walking commits and their contents."""
 
 
-import collections
 import heapq
 import heapq
+from collections import defaultdict, deque
 from collections.abc import Callable, Iterator, Sequence
 from collections.abc import Callable, Iterator, Sequence
 from itertools import chain
 from itertools import chain
 from typing import TYPE_CHECKING, Any, cast
 from typing import TYPE_CHECKING, Any, cast
@@ -338,7 +338,7 @@ class Walker:
 
 
         self._num_entries = 0
         self._num_entries = 0
         self._queue = queue_cls(self)
         self._queue = queue_cls(self)
-        self._out_queue: collections.deque[WalkEntry] = collections.deque()
+        self._out_queue: deque[WalkEntry] = deque()
 
 
     def _path_matches(self, changed_path: bytes | None) -> bool:
     def _path_matches(self, changed_path: bytes | None) -> bool:
         if changed_path is None:
         if changed_path is None:
@@ -481,9 +481,9 @@ def _topo_reorder(
     Returns: iterator over WalkEntry objects from entries in FIFO order, except
     Returns: iterator over WalkEntry objects from entries in FIFO order, except
         where a parent would be yielded before any of its children.
         where a parent would be yielded before any of its children.
     """
     """
-    todo: collections.deque[WalkEntry] = collections.deque()
+    todo: deque[WalkEntry] = deque()
     pending: dict[bytes, WalkEntry] = {}
     pending: dict[bytes, WalkEntry] = {}
-    num_children: dict[bytes, int] = collections.defaultdict(int)
+    num_children: dict[bytes, int] = defaultdict(int)
     for entry in entries:
     for entry in entries:
         todo.append(entry)
         todo.append(entry)
         for p in get_parents(entry.commit):
         for p in get_parents(entry.commit):

+ 2 - 5
dulwich/web.py

@@ -59,7 +59,6 @@ from typing import (
     Any,
     Any,
     BinaryIO,
     BinaryIO,
     ClassVar,
     ClassVar,
-    Union,
     cast,
     cast,
 )
 )
 from urllib.parse import parse_qs
 from urllib.parse import parse_qs
@@ -582,8 +581,7 @@ class HTTPGitRequest:
         environ: WSGIEnvironment,
         environ: WSGIEnvironment,
         start_response: StartResponse,
         start_response: StartResponse,
         dumb: bool = False,
         dumb: bool = False,
-        handlers: dict[bytes, Union["HandlerConstructor", Callable[..., Any]]]
-        | None = None,
+        handlers: dict[bytes, "HandlerConstructor | Callable[..., Any]"] | None = None,
     ) -> None:
     ) -> None:
         """Initialize HTTPGitRequest.
         """Initialize HTTPGitRequest.
 
 
@@ -687,8 +685,7 @@ class HTTPGitApplication:
         self,
         self,
         backend: Backend,
         backend: Backend,
         dumb: bool = False,
         dumb: bool = False,
-        handlers: dict[bytes, Union["HandlerConstructor", Callable[..., Any]]]
-        | None = None,
+        handlers: dict[bytes, "HandlerConstructor | Callable[..., Any]"] | None = None,
         fallback_app: WSGIApplication | None = None,
         fallback_app: WSGIApplication | None = None,
     ) -> None:
     ) -> None:
         """Initialize HTTPGitApplication.
         """Initialize HTTPGitApplication.

+ 1 - 1
tests/compat/test_lfs.py

@@ -384,7 +384,7 @@ class LFSStatusCompatTest(LFSCompatTestCase):
 
 
         # Modify the file
         # Modify the file
         with open(test_file, "wb") as f:
         with open(test_file, "wb") as f:
-            f.write(b"modified content\n")
+            f.write(b"slightly modified content\n")
 
 
         # Check status - should show file as modified
         # Check status - should show file as modified
         status = porcelain.status(repo_dir, untracked_files="no")
         status = porcelain.status(repo_dir, untracked_files="no")

+ 12 - 1
tests/test_cli_merge.py

@@ -21,6 +21,7 @@
 
 
 """Tests for dulwich merge CLI command."""
 """Tests for dulwich merge CLI command."""
 
 
+import importlib.util
 import os
 import os
 import tempfile
 import tempfile
 import unittest
 import unittest
@@ -28,7 +29,7 @@ import unittest
 from dulwich import porcelain
 from dulwich import porcelain
 from dulwich.cli import main
 from dulwich.cli import main
 
 
-from . import TestCase
+from . import DependencyMissing, TestCase
 
 
 
 
 class CLIMergeTests(TestCase):
 class CLIMergeTests(TestCase):
@@ -77,6 +78,11 @@ class CLIMergeTests(TestCase):
 
 
     def test_merge_with_conflicts(self):
     def test_merge_with_conflicts(self):
         """Test CLI merge with conflicts."""
         """Test CLI merge with conflicts."""
+
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         with tempfile.TemporaryDirectory() as tmpdir:
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             # Initialize repo
             porcelain.init(tmpdir)
             porcelain.init(tmpdir)
@@ -334,6 +340,11 @@ class CLIMergeTests(TestCase):
 
 
     def test_octopus_merge_with_conflicts(self):
     def test_octopus_merge_with_conflicts(self):
         """Test CLI octopus merge with conflicts."""
         """Test CLI octopus merge with conflicts."""
+
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         with tempfile.TemporaryDirectory() as tmpdir:
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             # Initialize repo
             porcelain.init(tmpdir)
             porcelain.init(tmpdir)

+ 156 - 3
tests/test_porcelain.py

@@ -4559,6 +4559,157 @@ class CheckoutTests(PorcelainTestCase):
         remote_repo.close()
         remote_repo.close()
 
 
 
 
+class RestoreTests(PorcelainTestCase):
+    """Tests for the restore command."""
+
+    def setUp(self) -> None:
+        super().setUp()
+        self._sha, self._foo_path = _commit_file_with_content(
+            self.repo, "foo", "original\n"
+        )
+
+    def test_restore_worktree_from_index(self) -> None:
+        # Modify the working tree file
+        with open(self._foo_path, "w") as f:
+            f.write("modified\n")
+
+        # Restore from index (should restore to original)
+        porcelain.restore(self.repo, paths=["foo"])
+
+        with open(self._foo_path) as f:
+            content = f.read()
+        self.assertEqual("original\n", content)
+
+    def test_restore_worktree_from_head(self) -> None:
+        # Modify and stage the file
+        with open(self._foo_path, "w") as f:
+            f.write("staged\n")
+        porcelain.add(self.repo, paths=[self._foo_path])
+
+        # Now modify it again in worktree
+        with open(self._foo_path, "w") as f:
+            f.write("worktree\n")
+
+        # Restore from HEAD (should restore to original, not staged)
+        porcelain.restore(self.repo, paths=["foo"], source="HEAD")
+
+        with open(self._foo_path) as f:
+            content = f.read()
+        self.assertEqual("original\n", content)
+
+    def test_restore_staged_from_head(self) -> None:
+        # Modify and stage the file
+        with open(self._foo_path, "w") as f:
+            f.write("staged\n")
+        porcelain.add(self.repo, paths=[self._foo_path])
+
+        # Verify it's staged
+        status = list(porcelain.status(self.repo))
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": [b"foo"]}, [], []], status
+        )
+
+        # Restore staged from HEAD
+        porcelain.restore(self.repo, paths=["foo"], staged=True, worktree=False)
+
+        # Verify it's no longer staged
+        status = list(porcelain.status(self.repo))
+        # Now it should show as unstaged modification
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
+        )
+
+    def test_restore_both_staged_and_worktree(self) -> None:
+        # Modify and stage the file
+        with open(self._foo_path, "w") as f:
+            f.write("staged\n")
+        porcelain.add(self.repo, paths=[self._foo_path])
+
+        # Now modify it again in worktree
+        with open(self._foo_path, "w") as f:
+            f.write("worktree\n")
+
+        # Restore both from HEAD
+        porcelain.restore(self.repo, paths=["foo"], staged=True, worktree=True)
+
+        # Verify content is restored
+        with open(self._foo_path) as f:
+            content = f.read()
+        self.assertEqual("original\n", content)
+
+        # Verify nothing is staged
+        status = list(porcelain.status(self.repo))
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
+
+    def test_restore_nonexistent_path(self) -> None:
+        with self.assertRaises(porcelain.CheckoutError):
+            porcelain.restore(self.repo, paths=["nonexistent"])
+
+
+class SwitchTests(PorcelainTestCase):
+    """Tests for the switch command."""
+
+    def setUp(self) -> None:
+        super().setUp()
+        self._sha, self._foo_path = _commit_file_with_content(
+            self.repo, "foo", "hello\n"
+        )
+        porcelain.branch_create(self.repo, "dev")
+
+    def test_switch_to_existing_branch(self) -> None:
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+        porcelain.switch(self.repo, "dev")
+        self.assertEqual(b"dev", porcelain.active_branch(self.repo))
+
+    def test_switch_to_non_existing_branch(self) -> None:
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+
+        with self.assertRaises(KeyError):
+            porcelain.switch(self.repo, "nonexistent")
+
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+
+    def test_switch_with_create(self) -> None:
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+        porcelain.switch(self.repo, "master", create="feature")
+        self.assertEqual(b"feature", porcelain.active_branch(self.repo))
+
+    def test_switch_with_detach(self) -> None:
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+        porcelain.switch(self.repo, self._sha.decode(), detach=True)
+        # In detached HEAD state, active_branch raises IndexError
+        with self.assertRaises(IndexError):
+            porcelain.active_branch(self.repo)
+
+    def test_switch_with_uncommitted_changes(self) -> None:
+        # Modify the file
+        with open(self._foo_path, "a") as f:
+            f.write("new content\n")
+        porcelain.add(self.repo, paths=[self._foo_path])
+
+        # Switch should fail due to uncommitted changes
+        with self.assertRaises(porcelain.CheckoutError):
+            porcelain.switch(self.repo, "dev")
+
+        # Should still be on master
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+
+    def test_switch_with_force(self) -> None:
+        # Modify the file
+        with open(self._foo_path, "a") as f:
+            f.write("new content\n")
+        porcelain.add(self.repo, paths=[self._foo_path])
+
+        # Force switch should work
+        porcelain.switch(self.repo, "dev", force=True)
+        self.assertEqual(b"dev", porcelain.active_branch(self.repo))
+
+    def test_switch_to_commit_without_detach(self) -> None:
+        # Switching to a commit SHA without --detach should fail
+        with self.assertRaises(porcelain.CheckoutError):
+            porcelain.switch(self.repo, self._sha.decode())
+
+
 class GeneralCheckoutTests(PorcelainTestCase):
 class GeneralCheckoutTests(PorcelainTestCase):
     """Tests for the general checkout function that handles branches, tags, and commits."""
     """Tests for the general checkout function that handles branches, tags, and commits."""
 
 
@@ -6071,9 +6222,11 @@ class StatusTests(PorcelainTestCase):
             {"add": [b"crlf-new"], "delete": [], "modify": []}, results.staged
             {"add": [b"crlf-new"], "delete": [], "modify": []}, results.staged
         )
         )
         # File committed with CRLF before autocrlf=input was enabled
         # File committed with CRLF before autocrlf=input was enabled
-        # will appear as unstaged because working tree is normalized to LF
-        # during comparison but index still has CRLF
-        self.assertListEqual(results.unstaged, [b"crlf-exists"])
+        # will NOT appear as unstaged because stat matching optimization
+        # skips filter processing when file hasn't been modified.
+        # This matches Git's behavior, which uses stat matching to avoid
+        # expensive filter operations. Git shows a warning instead.
+        self.assertListEqual(results.unstaged, [])
         self.assertListEqual(results.untracked, [])
         self.assertListEqual(results.untracked, [])
 
 
     def test_status_autocrlf_input_modified(self) -> None:
     def test_status_autocrlf_input_modified(self) -> None:

+ 12 - 1
tests/test_porcelain_cherry_pick.py

@@ -21,12 +21,13 @@
 
 
 """Tests for porcelain cherry-pick functionality."""
 """Tests for porcelain cherry-pick functionality."""
 
 
+import importlib.util
 import os
 import os
 import tempfile
 import tempfile
 
 
 from dulwich import porcelain
 from dulwich import porcelain
 
 
-from . import TestCase
+from . import DependencyMissing, TestCase
 
 
 
 
 class PorcelainCherryPickTests(TestCase):
 class PorcelainCherryPickTests(TestCase):
@@ -107,6 +108,11 @@ class PorcelainCherryPickTests(TestCase):
 
 
     def test_cherry_pick_conflict(self):
     def test_cherry_pick_conflict(self):
         """Test cherry-pick with conflicts."""
         """Test cherry-pick with conflicts."""
+
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         with tempfile.TemporaryDirectory() as tmpdir:
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             # Initialize repo
             porcelain.init(tmpdir)
             porcelain.init(tmpdir)
@@ -164,6 +170,11 @@ class PorcelainCherryPickTests(TestCase):
 
 
     def test_cherry_pick_abort(self):
     def test_cherry_pick_abort(self):
         """Test aborting a cherry-pick."""
         """Test aborting a cherry-pick."""
+
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         with tempfile.TemporaryDirectory() as tmpdir:
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             # Initialize repo
             porcelain.init(tmpdir)
             porcelain.init(tmpdir)

+ 18 - 1
tests/test_porcelain_merge.py

@@ -21,6 +21,7 @@
 
 
 """Tests for porcelain merge functionality."""
 """Tests for porcelain merge functionality."""
 
 
+import importlib.util
 import os
 import os
 import tempfile
 import tempfile
 import unittest
 import unittest
@@ -28,7 +29,7 @@ import unittest
 from dulwich import porcelain
 from dulwich import porcelain
 from dulwich.repo import Repo
 from dulwich.repo import Repo
 
 
-from . import TestCase
+from . import DependencyMissing, TestCase
 
 
 
 
 class PorcelainMergeTests(TestCase):
 class PorcelainMergeTests(TestCase):
@@ -166,6 +167,10 @@ class PorcelainMergeTests(TestCase):
                 self.assertEqual(f.read(), "Master file2\n")
                 self.assertEqual(f.read(), "Master file2\n")
 
 
     def test_merge_with_conflicts(self):
     def test_merge_with_conflicts(self):
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         """Test merge with conflicts."""
         """Test merge with conflicts."""
         with tempfile.TemporaryDirectory() as tmpdir:
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             # Initialize repo
@@ -339,6 +344,10 @@ class PorcelainMergeTests(TestCase):
                 self.assertEqual(f.read(), "Branch3 modified file3\n")
                 self.assertEqual(f.read(), "Branch3 modified file3\n")
 
 
     def test_octopus_merge_with_conflicts(self):
     def test_octopus_merge_with_conflicts(self):
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         """Test that octopus merge refuses to proceed with conflicts."""
         """Test that octopus merge refuses to proceed with conflicts."""
         with tempfile.TemporaryDirectory() as tmpdir:
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             # Initialize repo
@@ -520,6 +529,10 @@ class PorcelainMergeTreeTests(TestCase):
             self.assertIn(b"file3.txt", merged_tree)
             self.assertIn(b"file3.txt", merged_tree)
 
 
     def test_merge_tree_with_conflicts(self):
     def test_merge_tree_with_conflicts(self):
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         """Test merge_tree with conflicts."""
         """Test merge_tree with conflicts."""
         with tempfile.TemporaryDirectory() as tmpdir:
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             # Initialize repo
@@ -607,6 +620,10 @@ class PorcelainMergeTreeTests(TestCase):
             self.assertIn(b"file2.txt", merged_tree)
             self.assertIn(b"file2.txt", merged_tree)
 
 
     def test_merge_tree_with_tree_objects(self):
     def test_merge_tree_with_tree_objects(self):
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         """Test merge_tree with tree objects instead of commits."""
         """Test merge_tree with tree objects instead of commits."""
         with tempfile.TemporaryDirectory() as tmpdir:
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             # Initialize repo

+ 6 - 1
tests/test_rebase.py

@@ -21,6 +21,7 @@
 
 
 """Tests for dulwich.rebase."""
 """Tests for dulwich.rebase."""
 
 
+import importlib.util
 import os
 import os
 import tempfile
 import tempfile
 
 
@@ -38,7 +39,7 @@ from dulwich.rebase import (
 from dulwich.repo import MemoryRepo, Repo
 from dulwich.repo import MemoryRepo, Repo
 from dulwich.tests.utils import make_commit
 from dulwich.tests.utils import make_commit
 
 
-from . import TestCase
+from . import DependencyMissing, TestCase
 
 
 
 
 class RebaserTestCase(TestCase):
 class RebaserTestCase(TestCase):
@@ -163,6 +164,10 @@ class RebaserTestCase(TestCase):
         self.assertIn(b"file.txt", new_tree)
         self.assertIn(b"file.txt", new_tree)
 
 
     def test_rebase_with_conflicts(self):
     def test_rebase_with_conflicts(self):
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         """Test rebase with merge conflicts."""
         """Test rebase with merge conflicts."""
         self._setup_initial_commit()
         self._setup_initial_commit()
         # Create feature branch with conflicting change
         # Create feature branch with conflicting change

+ 342 - 0
tests/test_repository.py

@@ -28,6 +28,7 @@ import shutil
 import stat
 import stat
 import sys
 import sys
 import tempfile
 import tempfile
+import time
 import warnings
 import warnings
 
 
 from dulwich import errors, objects, porcelain
 from dulwich import errors, objects, porcelain
@@ -2080,3 +2081,344 @@ class RepoConfigIncludeIfTests(TestCase):
             config = r.get_config()
             config = r.get_config()
             self.assertEqual(b"true", config.get((b"core",), b"autocrlf"))
             self.assertEqual(b"true", config.get((b"core",), b"autocrlf"))
             r.close()
             r.close()
+
+
+@skipIf(sys.platform == "win32", "Windows does not support Unix file permissions")
+class SharedRepositoryTests(TestCase):
+    """Tests for core.sharedRepository functionality."""
+
+    def setUp(self):
+        super().setUp()
+        self._orig_umask = os.umask(0o022)
+
+    def tearDown(self):
+        os.umask(self._orig_umask)
+        super().tearDown()
+
+    def _get_file_mode(self, path):
+        """Get the file mode bits (without file type bits)."""
+        return stat.S_IMODE(os.stat(path).st_mode)
+
+    def _check_permissions(self, repo, expected_file_mode, expected_dir_mode):
+        """Check that repository files and directories have expected permissions."""
+        objects_dir = os.path.join(repo.commondir(), "objects")
+
+        # Check objects directory
+        actual_dir_mode = self._get_file_mode(objects_dir)
+        self.assertEqual(
+            expected_dir_mode,
+            actual_dir_mode,
+            f"objects dir mode: expected {oct(expected_dir_mode)}, got {oct(actual_dir_mode)}",
+        )
+
+        # Check pack directory
+        pack_dir = os.path.join(objects_dir, "pack")
+        actual_dir_mode = self._get_file_mode(pack_dir)
+        self.assertEqual(
+            expected_dir_mode,
+            actual_dir_mode,
+            f"pack dir mode: expected {oct(expected_dir_mode)}, got {oct(actual_dir_mode)}",
+        )
+
+        # Check info directory
+        info_dir = os.path.join(objects_dir, "info")
+        actual_dir_mode = self._get_file_mode(info_dir)
+        self.assertEqual(
+            expected_dir_mode,
+            actual_dir_mode,
+            f"info dir mode: expected {oct(expected_dir_mode)}, got {oct(actual_dir_mode)}",
+        )
+
+    def test_init_bare_shared_group(self):
+        """Test initializing bare repo with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Expected permissions for group sharing
+        expected_dir_mode = 0o2775  # setgid + rwxrwxr-x
+        expected_file_mode = 0o664  # rw-rw-r--
+
+        self._check_permissions(repo, expected_file_mode, expected_dir_mode)
+
+    def test_init_bare_shared_all(self):
+        """Test initializing bare repo with sharedRepository=all."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="all")
+        self.addCleanup(repo.close)
+
+        # Expected permissions for world sharing
+        expected_dir_mode = 0o2777  # setgid + rwxrwxrwx
+        expected_file_mode = 0o666  # rw-rw-rw-
+
+        self._check_permissions(repo, expected_file_mode, expected_dir_mode)
+
+    def test_init_bare_shared_umask(self):
+        """Test initializing bare repo with sharedRepository=umask (default)."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="umask")
+        self.addCleanup(repo.close)
+
+        # With umask, no special permissions should be set
+        # The actual permissions will depend on the umask, but we can
+        # at least verify that setgid is NOT set
+        objects_dir = os.path.join(repo.commondir(), "objects")
+        actual_mode = os.stat(objects_dir).st_mode
+
+        # Verify setgid bit is NOT set
+        self.assertEqual(0, actual_mode & stat.S_ISGID)
+
+    def test_loose_object_permissions_group(self):
+        """Test that loose objects get correct permissions with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Create a blob object
+        blob = objects.Blob.from_string(b"test content")
+        repo.object_store.add_object(blob)
+
+        # Find the object file
+        obj_path = repo.object_store._get_shafile_path(blob.id)
+
+        # Check file permissions
+        actual_mode = self._get_file_mode(obj_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"loose object mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+        # Check directory permissions
+        obj_dir = os.path.dirname(obj_path)
+        actual_dir_mode = self._get_file_mode(obj_dir)
+        expected_dir_mode = 0o2775  # setgid + rwxrwxr-x
+        self.assertEqual(
+            expected_dir_mode,
+            actual_dir_mode,
+            f"object dir mode: expected {oct(expected_dir_mode)}, got {oct(actual_dir_mode)}",
+        )
+
+    def test_loose_object_permissions_all(self):
+        """Test that loose objects get correct permissions with sharedRepository=all."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="all")
+        self.addCleanup(repo.close)
+
+        # Create a blob object
+        blob = objects.Blob.from_string(b"test content")
+        repo.object_store.add_object(blob)
+
+        # Find the object file
+        obj_path = repo.object_store._get_shafile_path(blob.id)
+
+        # Check file permissions
+        actual_mode = self._get_file_mode(obj_path)
+        expected_mode = 0o666  # rw-rw-rw-
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"loose object mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+    def test_pack_file_permissions_group(self):
+        """Test that pack files get correct permissions with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Create some objects
+        blobs = [
+            objects.Blob.from_string(f"test content {i}".encode()) for i in range(5)
+        ]
+        repo.object_store.add_objects([(blob, None) for blob in blobs])
+
+        # Find the pack files
+        pack_dir = os.path.join(repo.commondir(), "objects", "pack")
+        pack_files = [f for f in os.listdir(pack_dir) if f.endswith(".pack")]
+        self.assertGreater(len(pack_files), 0, "No pack files created")
+
+        # Check pack file permissions
+        pack_path = os.path.join(pack_dir, pack_files[0])
+        actual_mode = self._get_file_mode(pack_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"pack file mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+    def test_pack_index_permissions_group(self):
+        """Test that pack index files get correct permissions with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Create some objects
+        blobs = [
+            objects.Blob.from_string(f"test content {i}".encode()) for i in range(5)
+        ]
+        repo.object_store.add_objects([(blob, None) for blob in blobs])
+
+        # Find the pack index files
+        pack_dir = os.path.join(repo.commondir(), "objects", "pack")
+        idx_files = [f for f in os.listdir(pack_dir) if f.endswith(".idx")]
+        self.assertGreater(len(idx_files), 0, "No pack index files created")
+
+        # Check pack index file permissions
+        idx_path = os.path.join(pack_dir, idx_files[0])
+        actual_mode = self._get_file_mode(idx_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"pack index mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+    def test_index_file_permissions_group(self):
+        """Test that index file gets correct permissions with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        # Create non-bare repo (index only exists in non-bare repos)
+        repo = Repo.init(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Make a change to trigger index write
+        blob = objects.Blob.from_string(b"test content")
+        repo.object_store.add_object(blob)
+        test_file = os.path.join(tmp_dir, "test.txt")
+        with open(test_file, "wb") as f:
+            f.write(b"test content")
+        # Stage the file
+        porcelain.add(repo, [test_file])
+
+        # Check index file permissions
+        index_path = repo.index_path()
+        actual_mode = self._get_file_mode(index_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"index file mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+    def test_existing_repo_respects_config(self):
+        """Test that opening an existing repo respects core.sharedRepository config."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        # Create repo with shared=group
+        repo = Repo.init_bare(tmp_dir, shared_repository="group")
+        repo.close()
+
+        # Reopen the repo
+        repo = Repo(tmp_dir)
+        self.addCleanup(repo.close)
+
+        # Add an object and check permissions
+        blob = objects.Blob.from_string(b"test content after reopen")
+        repo.object_store.add_object(blob)
+
+        obj_path = repo.object_store._get_shafile_path(blob.id)
+        actual_mode = self._get_file_mode(obj_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"loose object mode after reopen: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+    def test_reflog_permissions_group(self):
+        """Test that reflog files get correct permissions with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Create a commit to trigger reflog creation
+        blob = objects.Blob.from_string(b"test content")
+        tree = objects.Tree()
+        tree.add(b"test.txt", 0o100644, blob.id)
+
+        c = objects.Commit()
+        c.tree = tree.id
+        c.author = c.committer = b"Test <test@example.com>"
+        c.author_time = c.commit_time = int(time.time())
+        c.author_timezone = c.commit_timezone = 0
+        c.encoding = b"UTF-8"
+        c.message = b"Test commit"
+
+        repo.object_store.add_object(blob)
+        repo.object_store.add_object(tree)
+        repo.object_store.add_object(c)
+
+        # Update ref to trigger reflog creation
+        repo.refs.set_if_equals(
+            b"refs/heads/master", None, c.id, message=b"commit: initial commit"
+        )
+
+        # Check reflog file permissions
+        reflog_path = os.path.join(repo.controldir(), "logs", "refs", "heads", "master")
+        self.assertTrue(os.path.exists(reflog_path), "Reflog file should exist")
+
+        actual_mode = self._get_file_mode(reflog_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"reflog file mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+        # Check reflog directory permissions
+        reflog_dir = os.path.dirname(reflog_path)
+        actual_dir_mode = self._get_file_mode(reflog_dir)
+        expected_dir_mode = 0o2775  # setgid + rwxrwxr-x
+        self.assertEqual(
+            expected_dir_mode,
+            actual_dir_mode,
+            f"reflog dir mode: expected {oct(expected_dir_mode)}, got {oct(actual_dir_mode)}",
+        )