Browse Source

Merge branch 'master' into bitmap-support

Jelmer Vernooij 1 month ago
parent
commit
da53eaf766

+ 19 - 3
NEWS

@@ -1,5 +1,18 @@
 0.25.0	UNRELEASED
 
+ * Implement support for ``core.sharedRepository`` configuration option.
+   Repository files and directories now respect shared repository permissions
+   for group-writable or world-writable repositories. Affects loose objects,
+   pack files, pack indexes, index files, and other git metadata files.
+   (Jelmer Vernooij, #1804)
+
+ * Optimize status performance by using stat matching to skip reading
+   and filtering unchanged files. This provides significant performance
+   improvements for repositories with LFS filters, where filter operations can
+   be very expensive. The optimization matches Git's behavior of using mtime
+   and size comparisons to determine if files need processing.
+   (Jelmer Vernooij, #1999)
+
  * Drop support for Python 3.9. (Jelmer Vernooij)
 
  * Add support for pack bitmap indexes for fast reachability queries.
@@ -22,7 +35,10 @@
    Dulwich version, and installed dependencies with their versions.
    (Jelmer Vernooij, #1835)
 
-024.10	2025-11-10
+ * Add basic ``dulwich restore`` and ``dulwich switch``
+   commands. (Jelmer Vernooij, #1777)
+
+0.24.10	2025-11-10
 
  * Fix compatibility with python 3.9. (Jelmer Vernooij, #1991)
 
@@ -134,7 +150,7 @@
    commit metadata. Supports automatic upstream detection from tracking branches
    and verbose mode to display commit messages. (Jelmer Vernooij, #1782)
 
- * Add support for ``git mailsplit`` command to split mbox files and Maildir
+ * Add support for ``dulwich mailsplit`` command to split mbox files and Maildir
    into individual message files. Supports mboxrd format, custom precision,
    and all standard git mailsplit options. (Jelmer Vernooij, #1840)
 
@@ -490,7 +506,7 @@
 
  * Add ``merge-tree`` plumbing command to ``dulwich.porcelain`` and CLI.
    This command performs three-way tree merges without touching the working
-   directory or creating commits, similar to ``git merge-tree``. It outputs
+   directory or creating commits, similar to ``dulwich merge-tree``. It outputs
    the merged tree SHA and lists any conflicted paths. (Jelmer Vernooij)
 
  * Add ``porcelain.count_objects()`` function to count unpacked objects and

+ 3 - 4
SECURITY.md

@@ -2,11 +2,10 @@
 
 ## Supported Versions
 
-| Version  | Supported          |
+| Version  | Supported         |
 | -------- | ------------------ |
-| 0.21.x   | :white_check_mark: |
-| 0.20.x   | :white_check_mark: |
-| < 0.20.x | :x:                |
+| 0.24.x   | :white_check_mark: |
+| < 0.24.x | :x:                |
 
 ## Reporting a Vulnerability
 

+ 10 - 0
dulwich/_typing.py

@@ -0,0 +1,10 @@
+"""Common type definitions for Dulwich."""
+
+import sys
+
+if sys.version_info >= (3, 12):
+    from collections.abc import Buffer
+else:
+    Buffer = bytes | bytearray | memoryview
+
+__all__ = ["Buffer"]

+ 3 - 5
dulwich/bitmap.py

@@ -33,7 +33,7 @@ import struct
 from collections import deque
 from collections.abc import Callable, Iterable, Iterator
 from io import BytesIO
-from typing import IO, TYPE_CHECKING, Optional
+from typing import IO, TYPE_CHECKING
 
 from .file import GitFile
 from .objects import Blob, Commit, Tag, Tree
@@ -466,7 +466,7 @@ class PackBitmap:
 
 def read_bitmap(
     filename: str | os.PathLike[str],
-    pack_index: Optional["PackIndex"] = None,
+    pack_index: "PackIndex | None" = None,
 ) -> PackBitmap:
     """Read a bitmap index file.
 
@@ -485,9 +485,7 @@ def read_bitmap(
         return read_bitmap_file(f, pack_index=pack_index)
 
 
-def read_bitmap_file(
-    f: IO[bytes], pack_index: Optional["PackIndex"] = None
-) -> PackBitmap:
+def read_bitmap_file(f: IO[bytes], pack_index: "PackIndex | None" = None) -> PackBitmap:
     """Read bitmap data from a file object.
 
     Args:

+ 114 - 5
dulwich/cli.py

@@ -51,12 +51,8 @@ from typing import (
     TextIO,
 )
 
-if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
-else:
-    Buffer = bytes | bytearray | memoryview
-
 from dulwich import porcelain
+from dulwich._typing import Buffer
 
 from .bundle import Bundle, create_bundle_from_repo, read_bundle, write_bundle
 from .client import get_transport_and_path
@@ -3850,6 +3846,117 @@ class cmd_checkout(Command):
         return 0
 
 
+class cmd_restore(Command):
+    """Restore working tree files."""
+
+    def run(self, args: Sequence[str]) -> int | None:
+        """Execute the restore command.
+
+        Args:
+            args: Command line arguments
+        """
+        parser = argparse.ArgumentParser()
+        parser.add_argument(
+            "paths",
+            nargs="+",
+            type=str,
+            help="Paths to restore",
+        )
+        parser.add_argument(
+            "-s",
+            "--source",
+            type=str,
+            help="Restore from a specific commit (default: HEAD for --staged, index for worktree)",
+        )
+        parser.add_argument(
+            "--staged",
+            action="store_true",
+            help="Restore files in the index",
+        )
+        parser.add_argument(
+            "--worktree",
+            action="store_true",
+            help="Restore files in the working tree",
+        )
+        parsed_args = parser.parse_args(args)
+
+        # If neither --staged nor --worktree is specified, default to --worktree
+        if not parsed_args.staged and not parsed_args.worktree:
+            worktree = True
+            staged = False
+        else:
+            worktree = parsed_args.worktree
+            staged = parsed_args.staged
+
+        try:
+            porcelain.restore(
+                ".",
+                paths=parsed_args.paths,
+                source=parsed_args.source,
+                staged=staged,
+                worktree=worktree,
+            )
+        except porcelain.CheckoutError as e:
+            sys.stderr.write(f"{e}\n")
+            return 1
+        return 0
+
+
+class cmd_switch(Command):
+    """Switch branches."""
+
+    def run(self, args: Sequence[str]) -> int | None:
+        """Execute the switch command.
+
+        Args:
+            args: Command line arguments
+        """
+        parser = argparse.ArgumentParser()
+        parser.add_argument(
+            "target",
+            type=str,
+            help="Branch or commit to switch to",
+        )
+        parser.add_argument(
+            "-c",
+            "--create",
+            type=str,
+            help="Create a new branch at the target and switch to it",
+        )
+        parser.add_argument(
+            "-f",
+            "--force",
+            action="store_true",
+            help="Force switch even if there are local changes",
+        )
+        parser.add_argument(
+            "-d",
+            "--detach",
+            action="store_true",
+            help="Switch to a commit in detached HEAD state",
+        )
+        parsed_args = parser.parse_args(args)
+
+        if not parsed_args.target:
+            logger.error(
+                "Usage: dulwich switch TARGET [-c NEW_BRANCH] [--force] [--detach]"
+            )
+            return 1
+
+        try:
+            porcelain.switch(
+                ".",
+                target=parsed_args.target,
+                create=parsed_args.create,
+                force=parsed_args.force,
+                detach=parsed_args.detach,
+            )
+        except porcelain.CheckoutError as e:
+            sys.stderr.write(f"{e}\n")
+            return 1
+        return 0
+
+
 class cmd_stash_list(Command):
     """List stash entries."""
 
@@ -6559,6 +6666,7 @@ commands = {
     "repack": cmd_repack,
     "replace": cmd_replace,
     "reset": cmd_reset,
+    "restore": cmd_restore,
     "revert": cmd_revert,
     "rev-list": cmd_rev_list,
     "rm": cmd_rm,
@@ -6570,6 +6678,7 @@ commands = {
     "status": cmd_status,
     "stripspace": cmd_stripspace,
     "shortlog": cmd_shortlog,
+    "switch": cmd_switch,
     "symbolic-ref": cmd_symbolic_ref,
     "submodule": cmd_submodule,
     "tag": cmd_tag,

+ 9 - 11
dulwich/client.py

@@ -55,8 +55,6 @@ from typing import (
     TYPE_CHECKING,
     Any,
     ClassVar,
-    Optional,
-    Union,
 )
 from urllib.parse import ParseResult, urljoin, urlparse, urlunparse, urlunsplit
 from urllib.parse import quote as urlquote
@@ -1110,7 +1108,7 @@ class GitClient:
         self,
         path: bytes,
         target: BaseRepo,
-        determine_wants: Optional["DetermineWantsFunc"] = None,
+        determine_wants: "DetermineWantsFunc | None" = None,
         progress: Callable[[bytes], None] | None = None,
         depth: int | None = None,
         ref_prefix: Sequence[Ref] | None = None,
@@ -2304,7 +2302,7 @@ class LocalGitClient(GitClient):
         self,
         path: bytes,
         target: BaseRepo,
-        determine_wants: Optional["DetermineWantsFunc"] = None,
+        determine_wants: "DetermineWantsFunc | None" = None,
         progress: Callable[[bytes], None] | None = None,
         depth: int | None = None,
         ref_prefix: Sequence[bytes] | None = None,
@@ -2632,7 +2630,7 @@ class BundleClient(GitClient):
         self,
         path: bytes,
         target: BaseRepo,
-        determine_wants: Optional["DetermineWantsFunc"] = None,
+        determine_wants: "DetermineWantsFunc | None" = None,
         progress: Callable[[bytes], None] | None = None,
         depth: int | None = None,
         ref_prefix: Sequence[Ref] | None = None,
@@ -3221,7 +3219,7 @@ def default_urllib3_manager(
     base_url: str | None = None,
     timeout: float | None = None,
     cert_reqs: str | None = None,
-) -> Union["urllib3.ProxyManager", "urllib3.PoolManager"]:
+) -> "urllib3.ProxyManager | urllib3.PoolManager":
     """Return urllib3 connection pool manager.
 
     Honour detected proxy configurations.
@@ -3989,7 +3987,7 @@ class AbstractHttpGitClient(GitClient):
         username: str | None = None,
         password: str | None = None,
         config: Config | None = None,
-        pool_manager: Optional["urllib3.PoolManager"] = None,
+        pool_manager: "urllib3.PoolManager | None" = None,
     ) -> "AbstractHttpGitClient":
         """Create an AbstractHttpGitClient from a parsed URL.
 
@@ -4084,7 +4082,7 @@ class Urllib3HttpGitClient(AbstractHttpGitClient):
         self,
         base_url: str,
         dumb: bool | None = None,
-        pool_manager: Optional["urllib3.PoolManager"] = None,
+        pool_manager: "urllib3.PoolManager | None" = None,
         config: Config | None = None,
         username: str | None = None,
         password: str | None = None,
@@ -4229,7 +4227,7 @@ def get_transport_and_path_from_url(
     password: str | None = None,
     key_filename: str | None = None,
     ssh_command: str | None = None,
-    pool_manager: Optional["urllib3.PoolManager"] = None,
+    pool_manager: "urllib3.PoolManager | None" = None,
 ) -> tuple[GitClient, str]:
     """Obtain a git client from a URL.
 
@@ -4282,7 +4280,7 @@ def _get_transport_and_path_from_url(
     password: str | None = None,
     key_filename: str | None = None,
     ssh_command: str | None = None,
-    pool_manager: Optional["urllib3.PoolManager"] = None,
+    pool_manager: "urllib3.PoolManager | None" = None,
 ) -> tuple[GitClient, str]:
     parsed = urlparse(url)
     if parsed.scheme == "git":
@@ -4377,7 +4375,7 @@ def get_transport_and_path(
     password: str | None = None,
     key_filename: str | None = None,
     ssh_command: str | None = None,
-    pool_manager: Optional["urllib3.PoolManager"] = None,
+    pool_manager: "urllib3.PoolManager | None" = None,
 ) -> tuple[GitClient, str]:
     """Obtain a git client from a URL.
 

+ 1 - 3
dulwich/config.py

@@ -45,7 +45,6 @@ from typing import (
     IO,
     Generic,
     TypeVar,
-    Union,
     overload,
 )
 
@@ -197,8 +196,7 @@ class CaseInsensitiveOrderedMultiDict(MutableMapping[K, V], Generic[K, V]):
     @classmethod
     def make(
         cls,
-        dict_in: Union[MutableMapping[K, V], "CaseInsensitiveOrderedMultiDict[K, V]"]
-        | None = None,
+        dict_in: "MutableMapping[K, V] | CaseInsensitiveOrderedMultiDict[K, V] | None" = None,
         default_factory: Callable[[], V] | None = None,
     ) -> "CaseInsensitiveOrderedMultiDict[K, V]":
         """Create a CaseInsensitiveOrderedMultiDict from an existing mapping.

+ 3 - 3
dulwich/contrib/requests_vendor.py

@@ -33,7 +33,7 @@ This implementation is experimental and does not have any tests.
 
 from collections.abc import Callable, Iterator
 from io import BytesIO
-from typing import TYPE_CHECKING, Any, Optional
+from typing import TYPE_CHECKING, Any
 
 if TYPE_CHECKING:
     from ..config import ConfigFile
@@ -56,7 +56,7 @@ class RequestsHttpGitClient(AbstractHttpGitClient):
         self,
         base_url: str,
         dumb: bool | None = None,
-        config: Optional["ConfigFile"] = None,
+        config: "ConfigFile | None" = None,
         username: str | None = None,
         password: str | None = None,
         thin_packs: bool = True,
@@ -133,7 +133,7 @@ class RequestsHttpGitClient(AbstractHttpGitClient):
         return resp, read
 
 
-def get_session(config: Optional["ConfigFile"]) -> Session:
+def get_session(config: "ConfigFile | None") -> Session:
     """Create a requests session with Git configuration.
 
     Args:

+ 3 - 3
dulwich/contrib/swift.py

@@ -39,7 +39,7 @@ import zlib
 from collections.abc import Callable, Iterator, Mapping
 from configparser import ConfigParser
 from io import BytesIO
-from typing import Any, BinaryIO, Optional, cast
+from typing import Any, BinaryIO, cast
 
 from geventhttpclient import HTTPClient
 
@@ -231,7 +231,7 @@ def pack_info_create(pack_data: "PackData", pack_index: "PackIndex") -> bytes:
 
 def load_pack_info(
     filename: str,
-    scon: Optional["SwiftConnector"] = None,
+    scon: "SwiftConnector | None" = None,
     file: BinaryIO | None = None,
 ) -> dict[str, Any] | None:
     """Load pack info from Swift or file.
@@ -821,7 +821,7 @@ class SwiftObjectStore(PackBasedObjectStore):
         """
         f = BytesIO()
 
-        def commit() -> Optional["SwiftPack"]:
+        def commit() -> "SwiftPack | None":
             """Commit the pack to Swift storage.
 
             Returns:

+ 1 - 6
dulwich/diff.py

@@ -48,15 +48,10 @@ import io
 import logging
 import os
 import stat
-import sys
 from collections.abc import Iterable, Sequence
 from typing import BinaryIO
 
-if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
-else:
-    Buffer = bytes | bytearray | memoryview
-
+from ._typing import Buffer
 from .index import ConflictedIndexEntry, commit_index
 from .object_store import iter_tree_contents
 from .objects import S_ISGITLINK, Blob, Commit

+ 3 - 3
dulwich/diff_tree.py

@@ -27,7 +27,7 @@ from collections.abc import Callable, Iterator, Mapping, Sequence
 from collections.abc import Set as AbstractSet
 from io import BytesIO
 from itertools import chain
-from typing import TYPE_CHECKING, Any, NamedTuple, Optional, TypeVar
+from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar
 
 from .object_store import BaseObjectStore
 from .objects import S_ISGITLINK, ObjectID, ShaFile, Tree, TreeEntry
@@ -260,7 +260,7 @@ def tree_changes(
     tree1_id: ObjectID | None,
     tree2_id: ObjectID | None,
     want_unchanged: bool = False,
-    rename_detector: Optional["RenameDetector"] = None,
+    rename_detector: "RenameDetector | None" = None,
     include_trees: bool = False,
     change_type_same: bool = False,
     paths: Sequence[bytes] | None = None,
@@ -347,7 +347,7 @@ def tree_changes_for_merge(
     store: BaseObjectStore,
     parent_tree_ids: Sequence[ObjectID],
     tree_id: ObjectID,
-    rename_detector: Optional["RenameDetector"] = None,
+    rename_detector: "RenameDetector | None" = None,
 ) -> Iterator[list[TreeChange | None]]:
     """Get the tree changes for a merge tree relative to all its parents.
 

+ 2 - 2
dulwich/dumb.py

@@ -26,7 +26,7 @@ import tempfile
 import zlib
 from collections.abc import Callable, Iterator, Mapping, Sequence
 from io import BytesIO
-from typing import Any, Optional
+from typing import Any
 from urllib.parse import urljoin
 
 from .errors import NotGitRepository, ObjectFormatException
@@ -340,7 +340,7 @@ class DumbHTTPObjectStore(BaseObjectStore):
         self,
         objects: Sequence[tuple[ShaFile, str | None]],
         progress: Callable[[str], None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add a set of objects to this object store."""
         raise NotImplementedError("Cannot add objects to dumb HTTP repository")
 

+ 1 - 4
dulwich/file.py

@@ -28,10 +28,7 @@ from collections.abc import Iterable, Iterator
 from types import TracebackType
 from typing import IO, Any, ClassVar, Literal, overload
 
-if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
-else:
-    Buffer = bytes | bytearray | memoryview
+from ._typing import Buffer
 
 
 def ensure_dir_exists(

+ 6 - 6
dulwich/filters.py

@@ -25,7 +25,7 @@ import logging
 import subprocess
 import threading
 from collections.abc import Callable
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING
 from typing import Protocol as TypingProtocol
 
 from .attrs import GitAttributes
@@ -140,7 +140,7 @@ class ProcessFilterDriver:
         self._capabilities: set[bytes] = set()
         self._process_lock = threading.Lock()
 
-    def _get_or_start_process(self) -> Optional["Protocol"]:
+    def _get_or_start_process(self) -> "Protocol | None":
         """Get or start the long-running process filter."""
         if self._process is None and self.process_cmd:
             from .errors import GitProtocolError, HangupException
@@ -602,8 +602,8 @@ class FilterRegistry:
 
     def __init__(
         self,
-        config: Optional["StackedConfig"] = None,
-        repo: Optional["BaseRepo"] = None,
+        config: "StackedConfig | None" = None,
+        repo: "BaseRepo | None" = None,
     ) -> None:
         """Initialize FilterRegistry.
 
@@ -879,10 +879,10 @@ class FilterBlobNormalizer:
 
     def __init__(
         self,
-        config_stack: Optional["StackedConfig"],
+        config_stack: "StackedConfig | None",
         gitattributes: GitAttributes,
         filter_registry: FilterRegistry | None = None,
-        repo: Optional["BaseRepo"] = None,
+        repo: "BaseRepo | None" = None,
         filter_context: FilterContext | None = None,
     ) -> None:
         """Initialize FilterBlobNormalizer.

+ 5 - 5
dulwich/gc.py

@@ -1,12 +1,12 @@
 """Git garbage collection implementation."""
 
-import collections
 import logging
 import os
 import time
+from collections import deque
 from collections.abc import Callable
 from dataclasses import dataclass, field
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING
 
 from dulwich.object_store import (
     BaseObjectStore,
@@ -54,7 +54,7 @@ def find_reachable_objects(
         Set of reachable object SHAs
     """
     reachable = set()
-    pending: collections.deque[ObjectID] = collections.deque()
+    pending: deque[ObjectID] = deque()
 
     # Start with all refs
     for ref in refs_container.allkeys():
@@ -313,7 +313,7 @@ def garbage_collect(
     return stats
 
 
-def should_run_gc(repo: "BaseRepo", config: Optional["Config"] = None) -> bool:
+def should_run_gc(repo: "BaseRepo", config: "Config | None" = None) -> bool:
     """Check if automatic garbage collection should run.
 
     Args:
@@ -372,7 +372,7 @@ def should_run_gc(repo: "BaseRepo", config: Optional["Config"] = None) -> bool:
 
 def maybe_auto_gc(
     repo: "Repo",
-    config: Optional["Config"] = None,
+    config: "Config | None" = None,
     progress: Callable[[str], None] | None = None,
 ) -> bool:
     """Run automatic garbage collection if needed.

+ 6 - 3
dulwich/hooks.py

@@ -226,9 +226,12 @@ class PostReceiveShellHook(ShellHook):
             out_data, err_data = p.communicate(in_data)
 
             if (p.returncode != 0) or err_data:
-                err_fmt = b"post-receive exit code: %d\n" + b"stdout:\n%s\nstderr:\n%s"
-                err_msg = err_fmt % (p.returncode, out_data, err_data)
-                raise HookError(err_msg.decode("utf-8", "backslashreplace"))
+                err_msg = (
+                    f"post-receive exit code: {p.returncode}\n"
+                    f"stdout:\n{out_data.decode('utf-8', 'backslashreplace')}\n"
+                    f"stderr:\n{err_data.decode('utf-8', 'backslashreplace')}"
+                )
+                raise HookError(err_msg)
             return out_data
         except OSError as err:
             raise HookError(repr(err)) from err

+ 2 - 2
dulwich/ignore.py

@@ -30,7 +30,7 @@ import os.path
 import re
 from collections.abc import Iterable, Sequence
 from contextlib import suppress
-from typing import TYPE_CHECKING, BinaryIO, Union
+from typing import TYPE_CHECKING, BinaryIO
 
 if TYPE_CHECKING:
     from .repo import Repo
@@ -38,7 +38,7 @@ if TYPE_CHECKING:
 from .config import Config, get_xdg_config_home_path
 
 
-def _pattern_to_str(pattern: Union["Pattern", bytes, str]) -> str:
+def _pattern_to_str(pattern: "Pattern | bytes | str") -> str:
     """Convert a pattern to string, handling both Pattern objects and raw patterns."""
     if isinstance(pattern, Pattern):
         pattern_data: bytes | str = pattern.pattern

+ 55 - 11
dulwich/index.py

@@ -44,8 +44,6 @@ from typing import (
     TYPE_CHECKING,
     Any,
     BinaryIO,
-    Optional,
-    Union,
 )
 
 if TYPE_CHECKING:
@@ -71,7 +69,7 @@ from .objects import (
 from .pack import ObjectContainer, SHA1Reader, SHA1Writer
 
 # Type alias for recursive tree structure used in commit_tree
-TreeDict = dict[bytes, Union["TreeDict", tuple[int, bytes]]]
+TreeDict = dict[bytes, "TreeDict | tuple[int, bytes]"]
 
 # 2-bit stage (during merge)
 FLAG_STAGEMASK = 0x3000
@@ -1062,6 +1060,8 @@ class Index:
         read: bool = True,
         skip_hash: bool = False,
         version: int | None = None,
+        *,
+        file_mode: int | None = None,
     ) -> None:
         """Create an index object associated with the given filename.
 
@@ -1070,11 +1070,13 @@ class Index:
           read: Whether to initialize the index from the given file, should it exist.
           skip_hash: Whether to skip SHA1 hash when writing (for manyfiles feature)
           version: Index format version to use (None = auto-detect from file or use default)
+          file_mode: Optional file permission mask for shared repository
         """
         self._filename = os.fspath(filename)
         # TODO(jelmer): Store the version returned by read_index
         self._version = version
         self._skip_hash = skip_hash
+        self._file_mode = file_mode
         self._extensions: list[IndexExtension] = []
         self.clear()
         if read:
@@ -1095,7 +1097,8 @@ class Index:
 
     def write(self) -> None:
         """Write current contents of index to disk."""
-        f = GitFile(self._filename, "wb")
+        mask = self._file_mode if self._file_mode is not None else 0o644
+        f = GitFile(self._filename, "wb", mask=mask)
         try:
             # Filter out extensions with no meaningful data
             meaningful_extensions = []
@@ -1653,7 +1656,7 @@ if sys.platform == "win32":
 
         def __init__(self, errno: int, msg: str, filename: str | None) -> None:
             """Initialize WindowsSymlinkPermissionError."""
-            super(PermissionError, self).__init__(
+            super().__init__(
                 errno,
                 f"Unable to create symlink; do you have developer mode enabled? {msg}",
                 filename,
@@ -1888,7 +1891,7 @@ def build_index_from_tree(
         [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
     ]
     | None = None,
-    blob_normalizer: Optional["FilterBlobNormalizer"] = None,
+    blob_normalizer: "FilterBlobNormalizer | None" = None,
     tree_encoding: str = "utf-8",
 ) -> None:
     """Generate and materialize index from a tree.
@@ -2122,7 +2125,7 @@ def _remove_empty_parents(path: bytes, stop_at: bytes) -> None:
             # Directory doesn't exist - stop trying
             break
         except OSError as e:
-            if e.errno == errno.ENOTEMPTY:
+            if e.errno in (errno.ENOTEMPTY, errno.EEXIST):
                 # Directory not empty - stop trying
                 break
             raise
@@ -2159,7 +2162,7 @@ def _check_file_matches(
     entry_mode: int,
     current_stat: os.stat_result,
     honor_filemode: bool,
-    blob_normalizer: Optional["FilterBlobNormalizer"] = None,
+    blob_normalizer: "FilterBlobNormalizer | None" = None,
     tree_path: bytes | None = None,
 ) -> bool:
     """Check if a file on disk matches the expected git object.
@@ -2255,7 +2258,7 @@ def _transition_to_file(
         [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
     ]
     | None,
-    blob_normalizer: Optional["FilterBlobNormalizer"],
+    blob_normalizer: "FilterBlobNormalizer | None",
     tree_encoding: str = "utf-8",
 ) -> None:
     """Transition any type to regular file or symlink."""
@@ -2311,7 +2314,7 @@ def _transition_to_file(
             try:
                 os.rmdir(full_path)
             except OSError as e:
-                if e.errno == errno.ENOTEMPTY:
+                if e.errno in (errno.ENOTEMPTY, errno.EEXIST):
                     raise IsADirectoryError(
                         f"Cannot replace non-empty directory with file: {full_path!r}"
                     )
@@ -2515,7 +2518,7 @@ def update_working_tree(
     ]
     | None = None,
     force_remove_untracked: bool = False,
-    blob_normalizer: Optional["FilterBlobNormalizer"] = None,
+    blob_normalizer: "FilterBlobNormalizer | None" = None,
     tree_encoding: str = "utf-8",
     allow_overwrite_modified: bool = False,
 ) -> None:
@@ -2755,6 +2758,37 @@ def update_working_tree(
     index.write()
 
 
+def _stat_matches_entry(st: os.stat_result, entry: IndexEntry) -> bool:
+    """Check if filesystem stat matches index entry stat.
+
+    This is used to determine if a file might have changed without reading its content.
+    Git uses this optimization to avoid expensive filter operations on unchanged files.
+
+    Args:
+      st: Filesystem stat result
+      entry: Index entry to compare against
+    Returns: True if stat matches and file is likely unchanged
+    """
+    # Get entry mtime
+    if isinstance(entry.mtime, tuple):
+        entry_mtime_sec = entry.mtime[0]
+    else:
+        entry_mtime_sec = int(entry.mtime)
+
+    # Compare modification time (seconds only for now)
+    # Note: We use int() to compare only seconds, as nanosecond precision
+    # can vary across filesystems
+    if int(st.st_mtime) != entry_mtime_sec:
+        return False
+
+    # Compare file size
+    if st.st_size != entry.size:
+        return False
+
+    # If both mtime and size match, file is likely unchanged
+    return True
+
+
 def _check_entry_for_changes(
     tree_path: bytes,
     entry: IndexEntry | ConflictedIndexEntry,
@@ -2785,6 +2819,16 @@ def _check_entry_for_changes(
         if not stat.S_ISREG(st.st_mode) and not stat.S_ISLNK(st.st_mode):
             return None
 
+        # Optimization: If stat matches index entry (mtime and size unchanged),
+        # we can skip reading and filtering the file entirely. This is a significant
+        # performance improvement for repositories with many unchanged files.
+        # Even with filters (e.g., LFS), if the file hasn't been modified (stat unchanged),
+        # the filter output would be the same, so we can safely skip the expensive
+        # filter operation. This addresses performance issues with LFS repositories
+        # where filter operations can be very slow.
+        if _stat_matches_entry(st, entry):
+            return None
+
         blob = blob_from_path_and_stat(full_path, st)
 
         if filter_blob_callback is not None:

+ 9 - 11
dulwich/lfs.py

@@ -39,7 +39,7 @@ import os
 import tempfile
 from collections.abc import Iterable, Mapping
 from dataclasses import dataclass
-from typing import TYPE_CHECKING, Any, BinaryIO, Optional
+from typing import TYPE_CHECKING, Any, BinaryIO
 from urllib.parse import urljoin, urlparse
 from urllib.request import Request, urlopen
 
@@ -182,7 +182,7 @@ class LFSPointer:
         self.size = size
 
     @classmethod
-    def from_bytes(cls, data: bytes) -> Optional["LFSPointer"]:
+    def from_bytes(cls, data: bytes) -> "LFSPointer | None":
         """Parse LFS pointer from bytes.
 
         Returns None if data is not a valid LFS pointer.
@@ -243,9 +243,7 @@ class LFSPointer:
 class LFSFilterDriver:
     """LFS filter driver implementation."""
 
-    def __init__(
-        self, lfs_store: "LFSStore", config: Optional["Config"] = None
-    ) -> None:
+    def __init__(self, lfs_store: "LFSStore", config: "Config | None" = None) -> None:
         """Initialize LFSFilterDriver."""
         self.lfs_store = lfs_store
         self.config = config
@@ -328,13 +326,13 @@ class LFSFilterDriver:
         """Clean up any resources held by this filter driver."""
         # LFSFilterDriver doesn't hold any resources that need cleanup
 
-    def reuse(self, config: Optional["Config"], filter_name: str) -> bool:
+    def reuse(self, config: "Config | None", filter_name: str) -> bool:
         """Check if this filter driver should be reused with the given configuration."""
         # LFSFilterDriver is stateless and lightweight, no need to cache
         return False
 
 
-def _get_lfs_user_agent(config: Optional["Config"]) -> str:
+def _get_lfs_user_agent(config: "Config | None") -> str:
     """Get User-Agent string for LFS requests, respecting git config."""
     try:
         if config:
@@ -385,7 +383,7 @@ def _is_valid_lfs_url(url: str) -> bool:
 class LFSClient:
     """Base class for LFS client operations."""
 
-    def __init__(self, url: str, config: Optional["Config"] = None) -> None:
+    def __init__(self, url: str, config: "Config | None" = None) -> None:
         """Initialize LFS client.
 
         Args:
@@ -427,7 +425,7 @@ class LFSClient:
         raise NotImplementedError
 
     @classmethod
-    def from_config(cls, config: "Config") -> Optional["LFSClient"]:
+    def from_config(cls, config: "Config") -> "LFSClient | None":
         """Create LFS client from git config.
 
         Returns the appropriate subclass (HTTPLFSClient or FileLFSClient)
@@ -491,7 +489,7 @@ class LFSClient:
 class HTTPLFSClient(LFSClient):
     """LFS client for HTTP/HTTPS operations."""
 
-    def __init__(self, url: str, config: Optional["Config"] = None) -> None:
+    def __init__(self, url: str, config: "Config | None" = None) -> None:
         """Initialize HTTP LFS client.
 
         Args:
@@ -711,7 +709,7 @@ class HTTPLFSClient(LFSClient):
 class FileLFSClient(LFSClient):
     """LFS client for file:// URLs that accesses local filesystem."""
 
-    def __init__(self, url: str, config: Optional["Config"] = None) -> None:
+    def __init__(self, url: str, config: "Config | None" = None) -> None:
         """Initialize File LFS client.
 
         Args:

+ 2 - 2
dulwich/line_ending.py

@@ -139,7 +139,7 @@ Sources:
 
 import logging
 from collections.abc import Callable, Mapping
-from typing import TYPE_CHECKING, Any, Optional
+from typing import TYPE_CHECKING, Any
 
 if TYPE_CHECKING:
     from .config import StackedConfig
@@ -176,7 +176,7 @@ class LineEndingFilter(FilterDriver):
 
     @classmethod
     def from_config(
-        cls, config: Optional["StackedConfig"], for_text_attr: bool = False
+        cls, config: "StackedConfig | None", for_text_attr: bool = False
     ) -> "LineEndingFilter":
         """Create a LineEndingFilter from git configuration.
 

+ 2 - 2
dulwich/lru_cache.py

@@ -23,7 +23,7 @@
 """A simple least-recently-used (LRU) cache."""
 
 from collections.abc import Callable, Iterable, Iterator
-from typing import Generic, Optional, TypeVar, cast
+from typing import Generic, TypeVar, cast
 
 _null_key = object()
 
@@ -37,7 +37,7 @@ class _LRUNode(Generic[K, V]):
 
     __slots__ = ("cleanup", "key", "next_key", "prev", "size", "value")
 
-    prev: Optional["_LRUNode[K, V]"]
+    prev: "_LRUNode[K, V] | None"
     next_key: K | object
     size: int | None
 

+ 1 - 1
dulwich/merge.py

@@ -24,7 +24,7 @@ def make_merge3(
     a: Sequence[bytes],
     b: Sequence[bytes],
     is_cherrypick: bool = False,
-    sequence_matcher: type["SequenceMatcherProtocol[bytes]"] | None = None,
+    sequence_matcher: "type[SequenceMatcherProtocol[bytes]] | None" = None,
 ) -> "merge3.Merge3[bytes]":
     """Return a Merge3 object, or raise ImportError if merge3 is not installed."""
     if merge3 is None:

+ 6 - 6
dulwich/notes.py

@@ -22,7 +22,7 @@
 
 import stat
 from collections.abc import Iterator, Sequence
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING
 
 from .objects import Blob, Tree
 
@@ -609,7 +609,7 @@ class Notes:
     def get_notes_ref(
         self,
         notes_ref: bytes | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
     ) -> bytes:
         """Get the notes reference to use.
 
@@ -631,7 +631,7 @@ class Notes:
         self,
         object_sha: bytes,
         notes_ref: bytes | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
     ) -> bytes | None:
         """Get the note for an object.
 
@@ -675,7 +675,7 @@ class Notes:
         author: bytes | None = None,
         committer: bytes | None = None,
         message: bytes | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
     ) -> bytes:
         """Set or update a note for an object.
 
@@ -759,7 +759,7 @@ class Notes:
         author: bytes | None = None,
         committer: bytes | None = None,
         message: bytes | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
     ) -> bytes | None:
         """Remove a note for an object.
 
@@ -837,7 +837,7 @@ class Notes:
     def list_notes(
         self,
         notes_ref: bytes | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
     ) -> list[tuple[bytes, bytes]]:
         """List all notes in a notes ref.
 

+ 80 - 35
dulwich/object_store.py

@@ -36,7 +36,6 @@ from pathlib import Path
 from typing import (
     TYPE_CHECKING,
     BinaryIO,
-    Optional,
     Protocol,
 )
 
@@ -353,7 +352,7 @@ class BaseObjectStore:
         self,
         objects: Sequence[tuple[ShaFile, str | None]],
         progress: Callable[..., None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add a set of objects to this object store.
 
         Args:
@@ -387,7 +386,7 @@ class BaseObjectStore:
         want_unchanged: bool = False,
         include_trees: bool = False,
         change_type_same: bool = False,
-        rename_detector: Optional["RenameDetector"] = None,
+        rename_detector: "RenameDetector | None" = None,
         paths: Sequence[bytes] | None = None,
     ) -> Iterator[
         tuple[
@@ -659,7 +658,7 @@ class BaseObjectStore:
             if sha.startswith(prefix):
                 yield sha
 
-    def get_commit_graph(self) -> Optional["CommitGraph"]:
+    def get_commit_graph(self) -> "CommitGraph | None":
         """Get the commit graph for this object store.
 
         Returns:
@@ -719,7 +718,7 @@ class PackCapableObjectStore(BaseObjectStore, PackedObjectContainer):
         count: int,
         unpacked_objects: Iterator["UnpackedObject"],
         progress: Callable[..., None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add pack data to this object store.
 
         Args:
@@ -849,7 +848,7 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
         count: int,
         unpacked_objects: Iterator[UnpackedObject],
         progress: Callable[..., None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add pack data to this object store.
 
         Args:
@@ -1310,7 +1309,7 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
         self,
         objects: Sequence[tuple[ShaFile, str | None]],
         progress: Callable[[str], None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add a set of objects to this object store.
 
         Args:
@@ -1329,12 +1328,13 @@ class DiskObjectStore(PackBasedObjectStore):
 
     path: str | os.PathLike[str]
     pack_dir: str | os.PathLike[str]
-    _alternates: list["BaseObjectStore"] | None
-    _commit_graph: Optional["CommitGraph"]
+    _alternates: "list[BaseObjectStore] | None"
+    _commit_graph: "CommitGraph | None"
 
     def __init__(
         self,
         path: str | os.PathLike[str],
+        *,
         loose_compression_level: int = -1,
         pack_compression_level: int = -1,
         pack_index_version: int | None = None,
@@ -1348,6 +1348,8 @@ class DiskObjectStore(PackBasedObjectStore):
         pack_write_bitmaps: bool = False,
         pack_write_bitmap_hash_cache: bool = True,
         pack_write_bitmap_lookup_table: bool = True,
+        file_mode: int | None = None,
+        dir_mode: int | None = None,
     ) -> None:
         """Open an object store.
 
@@ -1366,6 +1368,8 @@ class DiskObjectStore(PackBasedObjectStore):
           pack_write_bitmaps: whether to write bitmap indexes for packs
           pack_write_bitmap_hash_cache: whether to include name-hash cache in bitmaps
           pack_write_bitmap_lookup_table: whether to include lookup table in bitmaps
+          file_mode: File permission mask for shared repository
+          dir_mode: Directory permission mask for shared repository
         """
         super().__init__(
             pack_compression_level=pack_compression_level,
@@ -1387,6 +1391,8 @@ class DiskObjectStore(PackBasedObjectStore):
         self.pack_write_bitmaps = pack_write_bitmaps
         self.pack_write_bitmap_hash_cache = pack_write_bitmap_hash_cache
         self.pack_write_bitmap_lookup_table = pack_write_bitmap_lookup_table
+        self.file_mode = file_mode
+        self.dir_mode = dir_mode
 
         # Commit graph support - lazy loaded
         self._commit_graph = None
@@ -1402,13 +1408,20 @@ class DiskObjectStore(PackBasedObjectStore):
 
     @classmethod
     def from_config(
-        cls, path: str | os.PathLike[str], config: "Config"
+        cls,
+        path: str | os.PathLike[str],
+        config: "Config",
+        *,
+        file_mode: int | None = None,
+        dir_mode: int | None = None,
     ) -> "DiskObjectStore":
         """Create a DiskObjectStore from a configuration object.
 
         Args:
           path: Path to the object store directory
           config: Configuration object to read settings from
+          file_mode: Optional file permission mask for shared repository
+          dir_mode: Optional directory permission mask for shared repository
 
         Returns:
           New DiskObjectStore instance configured according to config
@@ -1490,19 +1503,21 @@ class DiskObjectStore(PackBasedObjectStore):
 
         instance = cls(
             path,
-            loose_compression_level,
-            pack_compression_level,
-            pack_index_version,
-            pack_delta_window_size,
-            pack_window_memory,
-            pack_delta_cache_size,
-            pack_depth,
-            pack_threads,
-            pack_big_file_threshold,
-            fsync_object_files,
-            pack_write_bitmaps,
-            pack_write_bitmap_hash_cache,
-            pack_write_bitmap_lookup_table,
+            loose_compression_level=loose_compression_level,
+            pack_compression_level=pack_compression_level,
+            pack_index_version=pack_index_version,
+            pack_delta_window_size=pack_delta_window_size,
+            pack_window_memory=pack_window_memory,
+            pack_delta_cache_size=pack_delta_cache_size,
+            pack_depth=pack_depth,
+            pack_threads=pack_threads,
+            pack_big_file_threshold=pack_big_file_threshold,
+            fsync_object_files=fsync_object_files,
+            pack_write_bitmaps=pack_write_bitmaps,
+            pack_write_bitmap_hash_cache=pack_write_bitmap_hash_cache,
+            pack_write_bitmap_lookup_table=pack_write_bitmap_lookup_table,
+            file_mode=file_mode,
+            dir_mode=dir_mode,
         )
         instance._use_commit_graph = use_commit_graph
         return instance
@@ -1540,12 +1555,16 @@ class DiskObjectStore(PackBasedObjectStore):
 
     def add_alternate_path(self, path: str | os.PathLike[str]) -> None:
         """Add an alternate path to this object store."""
+        info_dir = os.path.join(self.path, INFODIR)
         try:
-            os.mkdir(os.path.join(self.path, INFODIR))
+            os.mkdir(info_dir)
+            if self.dir_mode is not None:
+                os.chmod(info_dir, self.dir_mode)
         except FileExistsError:
             pass
         alternates_path = os.path.join(self.path, INFODIR, "alternates")
-        with GitFile(alternates_path, "wb") as f:
+        mask = self.file_mode if self.file_mode is not None else 0o644
+        with GitFile(alternates_path, "wb", mask=mask) as f:
             try:
                 orig_f = open(alternates_path, "rb")
             except FileNotFoundError:
@@ -1772,8 +1791,12 @@ class DiskObjectStore(PackBasedObjectStore):
         os.rename(path, target_pack_path)
 
         # Write the index.
+        mask = self.file_mode if self.file_mode is not None else PACK_MODE
         with GitFile(
-            target_index_path, "wb", mask=PACK_MODE, fsync=self.fsync_object_files
+            target_index_path,
+            "wb",
+            mask=mask,
+            fsync=self.fsync_object_files,
         ) as index_file:
             write_pack_index(
                 index_file, entries, pack_sha, version=self.pack_index_version
@@ -1871,9 +1894,10 @@ class DiskObjectStore(PackBasedObjectStore):
 
         fd, path = tempfile.mkstemp(dir=self.pack_dir, suffix=".pack")
         f = os.fdopen(fd, "w+b")
-        os.chmod(path, PACK_MODE)
+        mask = self.file_mode if self.file_mode is not None else PACK_MODE
+        os.chmod(path, mask)
 
-        def commit() -> Optional["Pack"]:
+        def commit() -> "Pack | None":
             if f.tell() > 0:
                 f.seek(0)
 
@@ -1904,34 +1928,52 @@ class DiskObjectStore(PackBasedObjectStore):
         dir = os.path.dirname(path)
         try:
             os.mkdir(dir)
+            if self.dir_mode is not None:
+                os.chmod(dir, self.dir_mode)
         except FileExistsError:
             pass
         if os.path.exists(path):
             return  # Already there, no need to write again
-        with GitFile(path, "wb", mask=PACK_MODE, fsync=self.fsync_object_files) as f:
+        mask = self.file_mode if self.file_mode is not None else PACK_MODE
+        with GitFile(path, "wb", mask=mask, fsync=self.fsync_object_files) as f:
             f.write(
                 obj.as_legacy_object(compression_level=self.loose_compression_level)
             )
 
     @classmethod
-    def init(cls, path: str | os.PathLike[str]) -> "DiskObjectStore":
+    def init(
+        cls,
+        path: str | os.PathLike[str],
+        *,
+        file_mode: int | None = None,
+        dir_mode: int | None = None,
+    ) -> "DiskObjectStore":
         """Initialize a new disk object store.
 
         Creates the necessary directory structure for a Git object store.
 
         Args:
           path: Path where the object store should be created
+          file_mode: Optional file permission mask for shared repository
+          dir_mode: Optional directory permission mask for shared repository
 
         Returns:
           New DiskObjectStore instance
         """
         try:
             os.mkdir(path)
+            if dir_mode is not None:
+                os.chmod(path, dir_mode)
         except FileExistsError:
             pass
-        os.mkdir(os.path.join(path, "info"))
-        os.mkdir(os.path.join(path, PACKDIR))
-        return cls(path)
+        info_path = os.path.join(path, "info")
+        pack_path = os.path.join(path, PACKDIR)
+        os.mkdir(info_path)
+        os.mkdir(pack_path)
+        if dir_mode is not None:
+            os.chmod(info_path, dir_mode)
+            os.chmod(pack_path, dir_mode)
+        return cls(path, file_mode=file_mode, dir_mode=dir_mode)
 
     def iter_prefix(self, prefix: bytes) -> Iterator[bytes]:
         """Iterate over all object SHAs with the given prefix.
@@ -1975,7 +2017,7 @@ class DiskObjectStore(PackBasedObjectStore):
                     seen.add(sha)
                     yield sha
 
-    def get_commit_graph(self) -> Optional["CommitGraph"]:
+    def get_commit_graph(self) -> "CommitGraph | None":
         """Get the commit graph for this object store.
 
         Returns:
@@ -2053,10 +2095,13 @@ class DiskObjectStore(PackBasedObjectStore):
                 # Ensure the info directory exists
                 info_dir = os.path.join(self.path, "info")
                 os.makedirs(info_dir, exist_ok=True)
+                if self.dir_mode is not None:
+                    os.chmod(info_dir, self.dir_mode)
 
                 # Write using GitFile for atomic operation
                 graph_path = os.path.join(info_dir, "commit-graph")
-                with GitFile(graph_path, "wb") as f:
+                mask = self.file_mode if self.file_mode is not None else 0o644
+                with GitFile(graph_path, "wb", mask=mask) as f:
                     assert isinstance(
                         f, _GitFile
                     )  # GitFile in write mode always returns _GitFile

+ 7 - 8
dulwich/objects.py

@@ -36,7 +36,6 @@ from typing import (
     TYPE_CHECKING,
     NamedTuple,
     TypeVar,
-    Union,
 )
 
 if sys.version_info >= (3, 11):
@@ -395,11 +394,11 @@ class ShaFile:
     type_name: bytes
     type_num: int
     _chunked_text: list[bytes] | None
-    _sha: Union[FixedSha, None, "HASH"]
+    _sha: "FixedSha | None | HASH"
 
     @staticmethod
     def _parse_legacy_object_header(
-        magic: bytes, f: Union[BufferedIOBase, IO[bytes], "_GitFile"]
+        magic: bytes, f: BufferedIOBase | IO[bytes] | "_GitFile"
     ) -> "ShaFile":
         """Parse a legacy object, creating it but not reading the file."""
         bufsize = 1024
@@ -500,7 +499,7 @@ class ShaFile:
 
     @staticmethod
     def _parse_object_header(
-        magic: bytes, f: Union[BufferedIOBase, IO[bytes], "_GitFile"]
+        magic: bytes, f: BufferedIOBase | IO[bytes] | "_GitFile"
     ) -> "ShaFile":
         """Parse a new style object, creating it but not reading the file."""
         num_type = (ord(magic[0:1]) >> 4) & 7
@@ -529,7 +528,7 @@ class ShaFile:
         return (b0 & 0x8F) == 0x08 and (word % 31) == 0
 
     @classmethod
-    def _parse_file(cls, f: Union[BufferedIOBase, IO[bytes], "_GitFile"]) -> "ShaFile":
+    def _parse_file(cls, f: BufferedIOBase | IO[bytes] | "_GitFile") -> "ShaFile":
         map = f.read()
         if not map:
             raise EmptyFileException("Corrupted empty file detected")
@@ -561,7 +560,7 @@ class ShaFile:
             return cls.from_file(f)
 
     @classmethod
-    def from_file(cls, f: Union[BufferedIOBase, IO[bytes], "_GitFile"]) -> "ShaFile":
+    def from_file(cls, f: BufferedIOBase | IO[bytes] | "_GitFile") -> "ShaFile":
         """Get the contents of a SHA file on disk."""
         try:
             obj = cls._parse_file(f)
@@ -655,7 +654,7 @@ class ShaFile:
         """Returns the length of the raw string of this object."""
         return sum(map(len, self.as_raw_chunks()))
 
-    def sha(self) -> Union[FixedSha, "HASH"]:
+    def sha(self) -> "FixedSha | HASH":
         """The SHA1 object that is the name of this object."""
         if self._sha is None or self._needs_serialization:
             # this is a local because as_raw_chunks() overwrites self._sha
@@ -891,7 +890,7 @@ class Tag(ShaFile):
 
     _message: bytes | None
     _name: bytes | None
-    _object_class: type["ShaFile"] | None
+    _object_class: "type[ShaFile] | None"
     _object_sha: bytes | None
     _signature: bytes | None
     _tag_time: int | None

+ 9 - 9
dulwich/objectspec.py

@@ -22,7 +22,7 @@
 """Object specification."""
 
 from collections.abc import Sequence
-from typing import TYPE_CHECKING, Optional, Union
+from typing import TYPE_CHECKING
 
 from .objects import Commit, ShaFile, Tag, Tree
 from .refs import local_branch_name, local_tag_name
@@ -290,7 +290,7 @@ def parse_tree(repo: "BaseRepo", treeish: bytes | str | Tree | Commit | Tag) ->
     return o
 
 
-def parse_ref(container: Union["Repo", "RefsContainer"], refspec: str | bytes) -> "Ref":
+def parse_ref(container: "Repo | RefsContainer", refspec: str | bytes) -> "Ref":
     """Parse a string referring to a reference.
 
     Args:
@@ -316,11 +316,11 @@ def parse_ref(container: Union["Repo", "RefsContainer"], refspec: str | bytes) -
 
 
 def parse_reftuple(
-    lh_container: Union["Repo", "RefsContainer"],
-    rh_container: Union["Repo", "RefsContainer"],
+    lh_container: "Repo | RefsContainer",
+    rh_container: "Repo | RefsContainer",
     refspec: str | bytes,
     force: bool = False,
-) -> tuple[Optional["Ref"], Optional["Ref"], bool]:
+) -> tuple["Ref | None", "Ref | None", bool]:
     """Parse a reftuple spec.
 
     Args:
@@ -359,11 +359,11 @@ def parse_reftuple(
 
 
 def parse_reftuples(
-    lh_container: Union["Repo", "RefsContainer"],
-    rh_container: Union["Repo", "RefsContainer"],
+    lh_container: "Repo | RefsContainer",
+    rh_container: "Repo | RefsContainer",
     refspecs: bytes | Sequence[bytes],
     force: bool = False,
-) -> list[tuple[Optional["Ref"], Optional["Ref"], bool]]:
+) -> list[tuple["Ref | None", "Ref | None", bool]]:
     """Parse a list of reftuple specs to a list of reftuples.
 
     Args:
@@ -385,7 +385,7 @@ def parse_reftuples(
 
 
 def parse_refs(
-    container: Union["Repo", "RefsContainer"],
+    container: "Repo | RefsContainer",
     refspecs: bytes | str | Sequence[bytes | str],
 ) -> list["Ref"]:
     """Parse a list of refspecs to a list of refs.

+ 6 - 13
dulwich/pack.py

@@ -60,10 +60,8 @@ from typing import (
     Any,
     BinaryIO,
     Generic,
-    Optional,
     Protocol,
     TypeVar,
-    Union,
 )
 
 try:
@@ -73,11 +71,6 @@ except ImportError:
 else:
     has_mmap = True
 
-if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
-else:
-    Buffer = bytes | bytearray | memoryview
-
 if TYPE_CHECKING:
     from _hashlib import HASH as HashObject
 
@@ -138,7 +131,7 @@ class ObjectContainer(Protocol):
         self,
         objects: Sequence[tuple[ShaFile, str | None]],
         progress: Callable[..., None] | None = None,
-    ) -> Optional["Pack"]:
+    ) -> "Pack | None":
         """Add a set of objects to this object store.
 
         Args:
@@ -153,7 +146,7 @@ class ObjectContainer(Protocol):
     def __getitem__(self, sha1: bytes) -> ShaFile:
         """Retrieve an object."""
 
-    def get_commit_graph(self) -> Optional["CommitGraph"]:
+    def get_commit_graph(self) -> "CommitGraph | None":
         """Get the commit graph for this object store.
 
         Returns:
@@ -759,7 +752,7 @@ class FilePackIndex(PackIndex):
         self,
         filename: str | os.PathLike[str],
         file: IO[bytes] | _GitFile | None = None,
-        contents: Union[bytes, "mmap.mmap"] | None = None,
+        contents: "bytes | mmap.mmap | None" = None,
         size: int | None = None,
     ) -> None:
         """Create a pack index object.
@@ -1412,7 +1405,7 @@ class PackStreamCopier(PackStreamReader):
         read_all: Callable[[int], bytes],
         read_some: Callable[[int], bytes] | None,
         outfile: IO[bytes],
-        delta_iter: Optional["DeltaChainIterator[UnpackedObject]"] = None,
+        delta_iter: "DeltaChainIterator[UnpackedObject] | None" = None,
     ) -> None:
         """Initialize the copier.
 
@@ -2521,7 +2514,7 @@ def write_pack_object(
     write: Callable[[bytes], int],
     type: int,
     object: list[bytes] | tuple[bytes | int, list[bytes]],
-    sha: Optional["HashObject"] = None,
+    sha: "HashObject | None" = None,
     compression_level: int = -1,
 ) -> int:
     """Write pack object to a file.
@@ -3524,7 +3517,7 @@ class Pack:
         return self._idx
 
     @property
-    def bitmap(self) -> Optional["PackBitmap"]:
+    def bitmap(self) -> "PackBitmap | None":
         """The bitmap being used, if available.
 
         Returns:

+ 3 - 4
dulwich/patch.py

@@ -37,7 +37,6 @@ from typing import (
     IO,
     TYPE_CHECKING,
     BinaryIO,
-    Optional,
     TextIO,
 )
 
@@ -487,8 +486,8 @@ def gen_diff_header(
 # TODO(jelmer): Support writing unicode, rather than bytes.
 def write_blob_diff(
     f: IO[bytes],
-    old_file: tuple[bytes | None, int | None, Optional["Blob"]],
-    new_file: tuple[bytes | None, int | None, Optional["Blob"]],
+    old_file: tuple[bytes | None, int | None, "Blob | None"],
+    new_file: tuple[bytes | None, int | None, "Blob | None"],
     diff_algorithm: str | None = None,
 ) -> None:
     """Write blob diff.
@@ -506,7 +505,7 @@ def write_blob_diff(
     patched_old_path = patch_filename(old_path, b"a")
     patched_new_path = patch_filename(new_path, b"b")
 
-    def lines(blob: Optional["Blob"]) -> list[bytes]:
+    def lines(blob: "Blob | None") -> list[bytes]:
         """Split blob content into lines.
 
         Args:

+ 457 - 163
dulwich/porcelain.py

@@ -105,7 +105,6 @@ from typing import (
     TYPE_CHECKING,
     Any,
     BinaryIO,
-    Optional,
     TextIO,
     TypedDict,
     TypeVar,
@@ -114,10 +113,11 @@ from typing import (
 )
 
 if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
     from typing import override
 else:
-    from typing_extensions import Buffer, override
+    from typing_extensions import override
+
+from ._typing import Buffer
 
 if TYPE_CHECKING:
     import urllib3
@@ -198,6 +198,7 @@ from .refs import (
     LOCAL_REMOTE_PREFIX,
     LOCAL_REPLACE_PREFIX,
     LOCAL_TAG_PREFIX,
+    DictRefsContainer,
     Ref,
     SymrefLoop,
     _import_remote_refs,
@@ -205,6 +206,7 @@ from .refs import (
     local_branch_name,
     local_replace_name,
     local_tag_name,
+    parse_remote_ref,
     shorten_ref_name,
 )
 from .repo import BaseRepo, Repo, get_user_identity
@@ -244,7 +246,7 @@ class TransportKwargs(TypedDict, total=False):
     password: str | None
     key_filename: str | None
     ssh_command: str | None
-    pool_manager: Optional["urllib3.PoolManager"]
+    pool_manager: "urllib3.PoolManager | None"
 
 
 @dataclass
@@ -281,14 +283,25 @@ class NoneStream(RawIOBase):
         """
         return b""
 
-    @override
-    def readinto(self, b: Buffer) -> int | None:
-        return 0
+    if sys.version_info >= (3, 12):
+
+        @override
+        def readinto(self, b: Buffer) -> int | None:
+            return 0
+
+        @override
+        def write(self, b: Buffer) -> int | None:
+            return len(cast(bytes, b)) if b else 0
+
+    else:
+
+        @override
+        def readinto(self, b: bytearray | memoryview) -> int | None:  # type: ignore[override]
+            return 0
 
-    @override
-    def write(self, b: Buffer) -> int | None:
-        # All Buffer implementations (bytes, bytearray, memoryview) support len()
-        return len(b) if b else 0  # type: ignore[arg-type]
+        @override
+        def write(self, b: bytes | bytearray | memoryview) -> int | None:  # type: ignore[override]
+            return len(b) if b else 0
 
 
 default_bytes_out_stream: BinaryIO = cast(
@@ -630,8 +643,6 @@ def _get_variables(repo: RepoPath = ".") -> dict[str, str]:
     Returns:
       A dictionary of all logical variables with values
     """
-    from .repo import get_user_identity
-
     with open_repo_closing(repo) as repo_obj:
         config = repo_obj.get_config_stack()
 
@@ -827,8 +838,6 @@ def commit(
             if normalizer is not None:
 
                 def filter_callback(data: bytes, path: bytes) -> bytes:
-                    from dulwich.objects import Blob
-
                     blob = Blob()
                     blob.data = data
                     normalized_blob = normalizer.checkin_normalize(blob, path)
@@ -1066,7 +1075,7 @@ def stripspace(
         >>> stripspace(b"line\\n", comment_lines=True)
         b'# line\\n'
     """
-    from dulwich.stripspace import stripspace as _stripspace
+    from .stripspace import stripspace as _stripspace
 
     # Convert text to bytes
     if isinstance(text, str):
@@ -1290,8 +1299,6 @@ def add(
         if normalizer is not None:
 
             def filter_callback(data: bytes, path: bytes) -> bytes:
-                from dulwich.objects import Blob
-
                 blob = Blob()
                 blob.data = data
                 normalized_blob = normalizer.checkin_normalize(blob, path)
@@ -2838,42 +2845,6 @@ def reset(
 
         elif mode == "hard":
             # Hard reset: update HEAD, index, and working tree
-            # Get configuration for working directory update
-            config = r.get_config()
-            honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
-
-            if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
-                validate_path_element = validate_path_element_ntfs
-            elif config.get_boolean(
-                b"core", b"core.protectHFS", sys.platform == "darwin"
-            ):
-                validate_path_element = validate_path_element_hfs
-            else:
-                validate_path_element = validate_path_element_default
-
-            if config.get_boolean(b"core", b"symlinks", True):
-
-                def symlink_wrapper(
-                    source: str | bytes | os.PathLike[str],
-                    target: str | bytes | os.PathLike[str],
-                ) -> None:
-                    symlink(source, target)  # type: ignore[arg-type,unused-ignore]
-
-                symlink_fn = symlink_wrapper
-            else:
-
-                def symlink_fallback(
-                    source: str | bytes | os.PathLike[str],
-                    target: str | bytes | os.PathLike[str],
-                ) -> None:
-                    mode = "w" + ("b" if isinstance(source, bytes) else "")
-                    with open(target, mode) as f:
-                        f.write(source)
-
-                symlink_fn = symlink_fallback
-
-            # Update working tree and index
-            blob_normalizer = r.get_blob_normalizer()
             # For reset --hard, use current index tree as old tree to get proper deletions
             index = r.open_index()
             if len(index) > 0:
@@ -2882,6 +2853,12 @@ def reset(
                 # Empty index
                 index_tree_id = None
 
+            # Get configuration for working tree updates
+            honor_filemode, validate_path_element, symlink_fn = (
+                _get_worktree_update_config(r)
+            )
+
+            blob_normalizer = r.get_blob_normalizer()
             changes = tree_changes(
                 r.object_store, index_tree_id, tree.id, want_unchanged=True
             )
@@ -3004,8 +2981,6 @@ def push(
         remote_changed_refs: dict[bytes, bytes | None] = {}
 
         def update_refs(refs: dict[bytes, bytes]) -> dict[bytes, bytes]:
-            from .refs import DictRefsContainer
-
             remote_refs = DictRefsContainer(refs)
             selected_refs.extend(
                 parse_reftuples(r.refs, remote_refs, refspecs_bytes, force=force)
@@ -3074,10 +3049,14 @@ def push(
         for ref, error in (result.ref_status or {}).items():
             if error is not None:
                 errstream.write(
-                    b"Push of ref %s failed: %s\n" % (ref, error.encode(err_encoding))
+                    f"Push of ref {ref.decode('utf-8', 'replace')} failed: {error}\n".encode(
+                        err_encoding
+                    )
                 )
             else:
-                errstream.write(b"Ref %s updated\n" % ref)
+                errstream.write(
+                    f"Ref {ref.decode('utf-8', 'replace')} updated\n".encode()
+                )
 
         if remote_name is not None:
             _import_remote_refs(r.refs, remote_name, remote_changed_refs)
@@ -3148,8 +3127,6 @@ def pull(
         def determine_wants(
             remote_refs: dict[bytes, bytes], depth: int | None = None
         ) -> list[bytes]:
-            from .refs import DictRefsContainer
-
             remote_refs_container = DictRefsContainer(remote_refs)
             selected_refs.extend(
                 parse_reftuples(
@@ -3286,18 +3263,17 @@ def status(
         untracked - list of untracked, un-ignored & non-.git paths
     """
     with open_repo_closing(repo) as r:
+        # Open the index once and reuse it for both staged and unstaged checks
+        index = r.open_index()
         # 1. Get status of staged
-        tracked_changes = get_tree_changes(r)
+        tracked_changes = get_tree_changes(r, index)
         # 2. Get status of unstaged
-        index = r.open_index()
         normalizer = r.get_blob_normalizer()
 
         # Create a wrapper that handles the bytes -> Blob conversion
         if normalizer is not None:
 
             def filter_callback(data: bytes, path: bytes) -> bytes:
-                from dulwich.objects import Blob
-
                 blob = Blob()
                 blob.data = data
                 normalized_blob = normalizer.checkin_normalize(blob, path)
@@ -3684,15 +3660,19 @@ def grep(
                         outstream.write(f"{path_str}:{line_str}\n")
 
 
-def get_tree_changes(repo: RepoPath) -> dict[str, list[str | bytes]]:
+def get_tree_changes(
+    repo: RepoPath, index: Index | None = None
+) -> dict[str, list[str | bytes]]:
     """Return add/delete/modify changes to tree by comparing index to HEAD.
 
     Args:
       repo: repo path or object
+      index: optional Index object to reuse (avoids re-opening the index)
     Returns: dict with lists for each type of change
     """
     with open_repo_closing(repo) as r:
-        index = r.open_index()
+        if index is None:
+            index = r.open_index()
 
         # Compares the Index to the HEAD & determines changes
         # Iterate through the changes and report add/delete/modify
@@ -4498,8 +4478,6 @@ def show_ref(
                 try:
                     obj = r.get_object(sha)
                     # Peel tag objects to get the underlying commit/object
-                    from .objects import Tag
-
                     while obj.type_name == b"tag":
                         assert isinstance(obj, Tag)
                         _obj_class, sha = obj.object
@@ -5079,6 +5057,168 @@ def check_ignore(
                 yield _quote_path(output_path) if quote_path else output_path
 
 
+def _get_current_head_tree(repo: Repo) -> bytes | None:
+    """Get the current HEAD tree ID.
+
+    Args:
+      repo: Repository object
+
+    Returns:
+      Tree ID of current HEAD, or None if no HEAD exists (empty repo)
+    """
+    try:
+        current_head = repo.refs[b"HEAD"]
+        current_commit = repo[current_head]
+        assert isinstance(current_commit, Commit), "Expected a Commit object"
+        tree_id: bytes = current_commit.tree
+        return tree_id
+    except KeyError:
+        # No HEAD yet (empty repo)
+        return None
+
+
+def _check_uncommitted_changes(
+    repo: Repo, target_tree_id: bytes, force: bool = False
+) -> None:
+    """Check for uncommitted changes that would conflict with a checkout/switch.
+
+    Args:
+      repo: Repository object
+      target_tree_id: Tree ID to check conflicts against
+      force: If True, skip the check
+
+    Raises:
+      CheckoutError: If there are conflicting local changes
+    """
+    if force:
+        return
+
+    # Get current HEAD tree for comparison
+    current_tree_id = _get_current_head_tree(repo)
+    if current_tree_id is None:
+        # No HEAD yet (empty repo)
+        return
+
+    status_report = status(repo)
+    changes = []
+    # staged is a dict with 'add', 'delete', 'modify' keys
+    if isinstance(status_report.staged, dict):
+        changes.extend(status_report.staged.get("add", []))
+        changes.extend(status_report.staged.get("delete", []))
+        changes.extend(status_report.staged.get("modify", []))
+    # unstaged is a list
+    changes.extend(status_report.unstaged)
+
+    if changes:
+        # Check if any changes would conflict with checkout
+        target_tree_obj = repo[target_tree_id]
+        assert isinstance(target_tree_obj, Tree), "Expected a Tree object"
+        target_tree = target_tree_obj
+        for change in changes:
+            if isinstance(change, str):
+                change = change.encode(DEFAULT_ENCODING)
+
+            try:
+                target_tree.lookup_path(repo.object_store.__getitem__, change)
+            except KeyError:
+                # File doesn't exist in target tree - change can be preserved
+                pass
+            else:
+                # File exists in target tree - would overwrite local changes
+                raise CheckoutError(
+                    f"Your local changes to '{change.decode()}' would be "
+                    "overwritten. Please commit or stash before switching."
+                )
+
+
+def _get_worktree_update_config(
+    repo: Repo,
+) -> tuple[
+    bool,
+    Callable[[bytes], bool],
+    Callable[[str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None],
+]:
+    """Get configuration for working tree updates.
+
+    Args:
+      repo: Repository object
+
+    Returns:
+      Tuple of (honor_filemode, validate_path_element, symlink_fn)
+    """
+    config = repo.get_config()
+    honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
+
+    if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
+        validate_path_element = validate_path_element_ntfs
+    elif config.get_boolean(b"core", b"core.protectHFS", sys.platform == "darwin"):
+        validate_path_element = validate_path_element_hfs
+    else:
+        validate_path_element = validate_path_element_default
+
+    if config.get_boolean(b"core", b"symlinks", True):
+
+        def symlink_wrapper(
+            source: str | bytes | os.PathLike[str],
+            target: str | bytes | os.PathLike[str],
+        ) -> None:
+            symlink(source, target)  # type: ignore[arg-type,unused-ignore]
+
+        symlink_fn = symlink_wrapper
+    else:
+
+        def symlink_fallback(
+            source: str | bytes | os.PathLike[str],
+            target: str | bytes | os.PathLike[str],
+        ) -> None:
+            mode = "w" + ("b" if isinstance(source, bytes) else "")
+            with open(target, mode) as f:
+                f.write(source)
+
+        symlink_fn = symlink_fallback
+
+    return honor_filemode, validate_path_element, symlink_fn
+
+
+def _perform_tree_switch(
+    repo: Repo,
+    current_tree_id: bytes | None,
+    target_tree_id: bytes,
+    force: bool = False,
+) -> None:
+    """Perform the actual working tree switch.
+
+    Args:
+      repo: Repository object
+      current_tree_id: Current tree ID (or None for empty repo)
+      target_tree_id: Target tree ID to switch to
+      force: If True, force removal of untracked files and allow overwriting modified files
+    """
+    honor_filemode, validate_path_element, symlink_fn = _get_worktree_update_config(
+        repo
+    )
+
+    # Get blob normalizer for line ending conversion
+    blob_normalizer = repo.get_blob_normalizer()
+
+    # Update working tree
+    tree_change_iterator: Iterator[TreeChange] = tree_changes(
+        repo.object_store, current_tree_id, target_tree_id
+    )
+    update_working_tree(
+        repo,
+        current_tree_id,
+        target_tree_id,
+        change_iterator=tree_change_iterator,
+        honor_filemode=honor_filemode,
+        validate_path_element=validate_path_element,
+        symlink_fn=symlink_fn,
+        force_remove_untracked=force,
+        blob_normalizer=blob_normalizer,
+        allow_overwrite_modified=force,
+    )
+
+
 def update_head(
     repo: RepoPath,
     target: str | bytes,
@@ -5233,102 +5373,257 @@ def checkout(
         target_tree_id = target_commit.tree
 
         # Get current HEAD tree for comparison
-        try:
-            current_head = r.refs[b"HEAD"]
-            current_commit = r[current_head]
-            assert isinstance(current_commit, Commit), "Expected a Commit object"
-            current_tree_id = current_commit.tree
-        except KeyError:
-            # No HEAD yet (empty repo)
-            current_tree_id = None
+        current_tree_id = _get_current_head_tree(r)
 
         # Check for uncommitted changes if not forcing
-        if not force and current_tree_id is not None:
-            status_report = status(r)
-            changes = []
-            # staged is a dict with 'add', 'delete', 'modify' keys
-            if isinstance(status_report.staged, dict):
-                changes.extend(status_report.staged.get("add", []))
-                changes.extend(status_report.staged.get("delete", []))
-                changes.extend(status_report.staged.get("modify", []))
-            # unstaged is a list
-            changes.extend(status_report.unstaged)
-            if changes:
-                # Check if any changes would conflict with checkout
-                target_tree_obj = r[target_tree_id]
-                assert isinstance(target_tree_obj, Tree), "Expected a Tree object"
-                target_tree = target_tree_obj
-                for change in changes:
-                    if isinstance(change, str):
-                        change = change.encode(DEFAULT_ENCODING)
+        if current_tree_id is not None:
+            _check_uncommitted_changes(r, target_tree_id, force)
+
+        # Update working tree
+        _perform_tree_switch(r, current_tree_id, target_tree_id, force)
+
+        # Update HEAD
+        if new_branch:
+            # Create new branch and switch to it
+            branch_create(r, new_branch, objectish=target_commit.id.decode("ascii"))
+            update_head(r, new_branch)
+
+            # Set up tracking if creating from a remote branch
+            if isinstance(original_target, bytes) and target_bytes.startswith(
+                LOCAL_REMOTE_PREFIX
+            ):
+                try:
+                    remote_name, branch_name = parse_remote_ref(target_bytes)
+                    # Set tracking to refs/heads/<branch> on the remote
+                    set_branch_tracking(
+                        r, new_branch, remote_name, local_branch_name(branch_name)
+                    )
+                except ValueError:
+                    # Invalid remote ref format, skip tracking setup
+                    pass
+        else:
+            # Check if target is a branch name (with or without refs/heads/ prefix)
+            branch_ref = None
+            if (
+                isinstance(original_target, (str, bytes))
+                and target_bytes in r.refs.keys()
+            ):
+                if target_bytes.startswith(LOCAL_BRANCH_PREFIX):
+                    branch_ref = target_bytes
+            else:
+                # Try adding refs/heads/ prefix
+                potential_branch = (
+                    _make_branch_ref(target_bytes)
+                    if isinstance(original_target, (str, bytes))
+                    else None
+                )
+                if potential_branch in r.refs.keys():
+                    branch_ref = potential_branch
+
+            if branch_ref:
+                # It's a branch - update HEAD symbolically
+                update_head(r, branch_ref)
+            else:
+                # It's a tag, other ref, or commit SHA - detached HEAD
+                update_head(r, target_commit.id.decode("ascii"), detached=True)
+
+
+def restore(
+    repo: str | os.PathLike[str] | Repo,
+    paths: list[bytes | str],
+    source: str | bytes | Commit | Tag | None = None,
+    staged: bool = False,
+    worktree: bool = True,
+) -> None:
+    """Restore working tree files.
+
+    This is similar to 'git restore', allowing you to restore specific files
+    from a commit or the index without changing HEAD.
+
+    Args:
+      repo: Path to repository or repository object
+      paths: List of specific paths to restore
+      source: Branch name, tag, or commit SHA to restore from. If None, restores
+              staged files from HEAD, or worktree files from index
+      staged: Restore files in the index (--staged)
+      worktree: Restore files in the working tree (default: True)
+
+    Raises:
+      CheckoutError: If restore cannot be performed
+      ValueError: If neither staged nor worktree is specified
+      KeyError: If the source reference cannot be found
+    """
+    if not staged and not worktree:
+        raise ValueError("At least one of staged or worktree must be True")
+
+    with open_repo_closing(repo) as r:
+        from .index import _fs_to_tree_path, build_file_from_blob
+
+        # Determine the source tree
+        if source is None:
+            if staged:
+                # Restoring staged files from HEAD
+                try:
+                    source = r.refs[b"HEAD"]
+                except KeyError:
+                    raise CheckoutError("No HEAD reference found")
+            elif worktree:
+                # Restoring worktree files from index
+                from .index import ConflictedIndexEntry, IndexEntry
+
+                index = r.open_index()
+                for path in paths:
+                    if isinstance(path, str):
+                        tree_path = _fs_to_tree_path(path)
+                    else:
+                        tree_path = path
 
                     try:
-                        target_tree.lookup_path(r.object_store.__getitem__, change)
+                        index_entry = index[tree_path]
+                        if isinstance(index_entry, ConflictedIndexEntry):
+                            raise CheckoutError(
+                                f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' has conflicts"
+                            )
+                        blob = r[index_entry.sha]
+                        assert isinstance(blob, Blob), "Expected a Blob object"
+
+                        full_path = os.path.join(os.fsencode(r.path), tree_path)
+                        mode = index_entry.mode
+
+                        # Use build_file_from_blob to write the file
+                        build_file_from_blob(blob, mode, full_path)
                     except KeyError:
-                        # File doesn't exist in target tree - change can be preserved
-                        pass
-                    else:
-                        # File exists in target tree - would overwrite local changes
+                        # Path doesn't exist in index
                         raise CheckoutError(
-                            f"Your local changes to '{change.decode()}' would be "
-                            "overwritten by checkout. Please commit or stash before switching."
+                            f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' not in index"
                         )
+                return
 
-        # Get configuration for working directory update
-        config = r.get_config()
-        honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
+        # source is not None at this point
+        assert source is not None
+        # Get the source tree
+        source_tree = parse_tree(r, treeish=source)
+
+        # Restore specified paths from source tree
+        for path in paths:
+            if isinstance(path, str):
+                tree_path = _fs_to_tree_path(path)
+            else:
+                tree_path = path
+
+            try:
+                # Look up the path in the source tree
+                mode, sha = source_tree.lookup_path(
+                    r.object_store.__getitem__, tree_path
+                )
+                blob = r[sha]
+                assert isinstance(blob, Blob), "Expected a Blob object"
+            except KeyError:
+                # Path doesn't exist in source tree
+                raise CheckoutError(
+                    f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' not found in source"
+                )
+
+            full_path = os.path.join(os.fsencode(r.path), tree_path)
+
+            if worktree:
+                # Use build_file_from_blob to restore to working tree
+                build_file_from_blob(blob, mode, full_path)
+
+            if staged:
+                # Update the index with the blob from source
+                from .index import IndexEntry
+
+                index = r.open_index()
+
+                # When only updating staged (not worktree), we want to reset the index
+                # to the source, but invalidate the stat cache so Git knows to check
+                # the worktree file. Use zeros for stat fields.
+                if not worktree:
+                    # Invalidate stat cache by using zeros
+                    new_entry = IndexEntry(
+                        ctime=(0, 0),
+                        mtime=(0, 0),
+                        dev=0,
+                        ino=0,
+                        mode=mode,
+                        uid=0,
+                        gid=0,
+                        size=0,
+                        sha=sha,
+                    )
+                else:
+                    # If we also updated worktree, use actual stat
+                    from .index import index_entry_from_stat
+
+                    st = os.lstat(full_path)
+                    new_entry = index_entry_from_stat(st, sha, mode)
+
+                index[tree_path] = new_entry
+                index.write()
 
-        if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
-            validate_path_element = validate_path_element_ntfs
-        else:
-            validate_path_element = validate_path_element_default
 
-        if config.get_boolean(b"core", b"symlinks", True):
+def switch(
+    repo: str | os.PathLike[str] | Repo,
+    target: str | bytes | Commit | Tag,
+    create: str | bytes | None = None,
+    force: bool = False,
+    detach: bool = False,
+) -> None:
+    """Switch branches.
+
+    This is similar to 'git switch', allowing you to switch to a different
+    branch or commit, updating both HEAD and the working tree.
+
+    Args:
+      repo: Path to repository or repository object
+      target: Branch name, tag, or commit SHA to switch to
+      create: Create a new branch at target before switching (like git switch -c)
+      force: Force switch even if there are local changes
+      detach: Switch to a commit in detached HEAD state (like git switch --detach)
 
-            def symlink_wrapper(
-                source: str | bytes | os.PathLike[str],
-                target: str | bytes | os.PathLike[str],
-            ) -> None:
-                symlink(source, target)  # type: ignore[arg-type,unused-ignore]
+    Raises:
+      CheckoutError: If switch cannot be performed due to conflicts
+      KeyError: If the target reference cannot be found
+      ValueError: If both create and detach are specified
+    """
+    if create and detach:
+        raise ValueError("Cannot use both create and detach options")
+
+    with open_repo_closing(repo) as r:
+        # Store the original target for later reference checks
+        original_target = target
 
-            symlink_fn = symlink_wrapper
+        if isinstance(target, str):
+            target_bytes = target.encode(DEFAULT_ENCODING)
+        elif isinstance(target, bytes):
+            target_bytes = target
         else:
+            # For Commit/Tag objects, we'll use their SHA
+            target_bytes = target.id
 
-            def symlink_fallback(
-                source: str | bytes | os.PathLike[str],
-                target: str | bytes | os.PathLike[str],
-            ) -> None:
-                mode = "w" + ("b" if isinstance(source, bytes) else "")
-                with open(target, mode) as f:
-                    f.write(source)
+        if isinstance(create, str):
+            create = create.encode(DEFAULT_ENCODING)
 
-            symlink_fn = symlink_fallback
+        # Parse the target to get the commit
+        target_commit = parse_commit(r, original_target)
+        target_tree_id = target_commit.tree
 
-        # Get blob normalizer for line ending conversion
-        blob_normalizer = r.get_blob_normalizer()
+        # Get current HEAD tree for comparison
+        current_tree_id = _get_current_head_tree(r)
+
+        # Check for uncommitted changes if not forcing
+        if current_tree_id is not None:
+            _check_uncommitted_changes(r, target_tree_id, force)
 
         # Update working tree
-        tree_change_iterator: Iterator[TreeChange] = tree_changes(
-            r.object_store, current_tree_id, target_tree_id
-        )
-        update_working_tree(
-            r,
-            current_tree_id,
-            target_tree_id,
-            change_iterator=tree_change_iterator,
-            honor_filemode=honor_filemode,
-            validate_path_element=validate_path_element,
-            symlink_fn=symlink_fn,
-            force_remove_untracked=force,
-            blob_normalizer=blob_normalizer,
-            allow_overwrite_modified=force,
-        )
+        _perform_tree_switch(r, current_tree_id, target_tree_id, force)
 
         # Update HEAD
-        if new_branch:
+        if create:
             # Create new branch and switch to it
-            branch_create(r, new_branch, objectish=target_commit.id.decode("ascii"))
-            update_head(r, new_branch)
+            branch_create(r, create, objectish=target_commit.id.decode("ascii"))
+            update_head(r, create)
 
             # Set up tracking if creating from a remote branch
             from .refs import LOCAL_REMOTE_PREFIX, local_branch_name, parse_remote_ref
@@ -5340,11 +5635,14 @@ def checkout(
                     remote_name, branch_name = parse_remote_ref(target_bytes)
                     # Set tracking to refs/heads/<branch> on the remote
                     set_branch_tracking(
-                        r, new_branch, remote_name, local_branch_name(branch_name)
+                        r, create, remote_name, local_branch_name(branch_name)
                     )
                 except ValueError:
                     # Invalid remote ref format, skip tracking setup
                     pass
+        elif detach:
+            # Detached HEAD mode
+            update_head(r, target_commit.id.decode("ascii"), detached=True)
         else:
             # Check if target is a branch name (with or without refs/heads/ prefix)
             branch_ref = None
@@ -5368,8 +5666,12 @@ def checkout(
                 # It's a branch - update HEAD symbolically
                 update_head(r, branch_ref)
             else:
-                # It's a tag, other ref, or commit SHA - detached HEAD
-                update_head(r, target_commit.id.decode("ascii"), detached=True)
+                # It's a tag, other ref, or commit SHA
+                # In git switch, this would be an error unless --detach is used
+                raise CheckoutError(
+                    f"'{target_bytes.decode(DEFAULT_ENCODING)}' is not a branch. "
+                    "Use detach=True to switch to a commit in detached HEAD state."
+                )
 
 
 def reset_file(
@@ -6911,6 +7213,7 @@ def rebase(
     Raises:
       Error: If rebase fails or conflicts occur
     """
+    # TODO: Avoid importing from .cli
     from .cli import launch_editor
     from .rebase import (
         RebaseConflict,
@@ -7038,7 +7341,7 @@ def annotate(
     """
     if committish is None:
         committish = "HEAD"
-    from dulwich.annotate import annotate_lines
+    from .annotate import annotate_lines
 
     with open_repo_closing(repo) as r:
         commit_id = parse_commit(r, committish).id
@@ -7055,7 +7358,7 @@ def filter_branch(
     repo: RepoPath = ".",
     branch: str | bytes = "HEAD",
     *,
-    filter_fn: Callable[[Commit], Optional["CommitData"]] | None = None,
+    filter_fn: Callable[[Commit], "CommitData | None"] | None = None,
     filter_author: Callable[[bytes], bytes | None] | None = None,
     filter_committer: Callable[[bytes], bytes | None] | None = None,
     filter_message: Callable[[bytes], bytes | None] | None = None,
@@ -8577,7 +8880,6 @@ def merge_base(
         List of commit IDs that are merge bases
     """
     from .graph import find_merge_base, find_octopus_base
-    from .objects import Commit
     from .objectspec import parse_object
 
     if committishes is None or len(committishes) < 2:
@@ -8620,7 +8922,6 @@ def is_ancestor(
         True if ancestor is an ancestor of descendant, False otherwise
     """
     from .graph import find_merge_base
-    from .objects import Commit
     from .objectspec import parse_object
 
     if ancestor is None or descendant is None:
@@ -8656,7 +8957,6 @@ def independent_commits(
         List of commit IDs that are not ancestors of any other commits in the list
     """
     from .graph import independent
-    from .objects import Commit
     from .objectspec import parse_object
 
     if committishes is None or len(committishes) == 0:
@@ -8726,8 +9026,6 @@ def mailsplit(
             keep_cr=keep_cr,
         )
     else:
-        from typing import BinaryIO, cast
-
         if input_path is None:
             # Read from stdin
             input_file: str | bytes | BinaryIO = sys.stdin.buffer
@@ -8788,8 +9086,6 @@ def mailinfo(
         >>> print(f"Author: {result.author_name} <{result.author_email}>")
         >>> print(f"Subject: {result.subject}")
     """
-    from typing import BinaryIO, TextIO, cast
-
     from .mbox import mailinfo as mbox_mailinfo
 
     if input_path is None:
@@ -8848,15 +9144,13 @@ def rerere(repo: RepoPath = ".") -> tuple[list[tuple[bytes, str]], list[bytes]]:
         - List of tuples (path, conflict_id) for recorded conflicts
         - List of paths where resolutions were automatically applied
     """
-    from dulwich.rerere import _has_conflict_markers, rerere_auto
+    from .rerere import _has_conflict_markers, rerere_auto
 
     with open_repo_closing(repo) as r:
         # Get conflicts from the index (if available)
         index = r.open_index()
         conflicts = []
 
-        from dulwich.index import ConflictedIndexEntry
-
         for path, entry in index.items():
             if isinstance(entry, ConflictedIndexEntry):
                 conflicts.append(path)
@@ -8889,7 +9183,7 @@ def rerere_status(repo: RepoPath = ".") -> list[tuple[str, bool]]:
     Returns:
         List of tuples (conflict_id, has_resolution)
     """
-    from dulwich.rerere import RerereCache
+    from .rerere import RerereCache
 
     with open_repo_closing(repo) as r:
         cache = RerereCache.from_repo(r)
@@ -8908,7 +9202,7 @@ def rerere_diff(
     Returns:
         List of tuples (conflict_id, preimage, postimage)
     """
-    from dulwich.rerere import RerereCache
+    from .rerere import RerereCache
 
     with open_repo_closing(repo) as r:
         cache = RerereCache.from_repo(r)
@@ -8935,7 +9229,7 @@ def rerere_forget(repo: RepoPath = ".", pathspec: str | bytes | None = None) ->
         repo: Path to the repository
         pathspec: Path to forget (currently not implemented, forgets all)
     """
-    from dulwich.rerere import RerereCache
+    from .rerere import RerereCache
 
     with open_repo_closing(repo) as r:
         cache = RerereCache.from_repo(r)
@@ -8955,7 +9249,7 @@ def rerere_clear(repo: RepoPath = ".") -> None:
     Args:
         repo: Path to the repository
     """
-    from dulwich.rerere import RerereCache
+    from .rerere import RerereCache
 
     with open_repo_closing(repo) as r:
         cache = RerereCache.from_repo(r)
@@ -8969,7 +9263,7 @@ def rerere_gc(repo: RepoPath = ".", max_age_days: int = 60) -> None:
         repo: Path to the repository
         max_age_days: Maximum age in days for keeping resolutions
     """
-    from dulwich.rerere import RerereCache
+    from .rerere import RerereCache
 
     with open_repo_closing(repo) as r:
         cache = RerereCache.from_repo(r)

+ 1 - 1
dulwich/protocol.py

@@ -247,7 +247,7 @@ def pkt_line(data: bytes | None) -> bytes:
     """
     if data is None:
         return b"0000"
-    return ("%04x" % (len(data) + 4)).encode("ascii") + data
+    return f"{len(data) + 4:04x}".encode("ascii") + data
 
 
 def pkt_seq(*seq: bytes | None) -> bytes:

+ 2 - 2
dulwich/rebase.py

@@ -27,7 +27,7 @@ import subprocess
 from collections.abc import Callable, Sequence
 from dataclasses import dataclass
 from enum import Enum
-from typing import Optional, Protocol, TypedDict
+from typing import Protocol, TypedDict
 
 from dulwich.graph import find_merge_base
 from dulwich.merge import three_way_merge
@@ -164,7 +164,7 @@ class RebaseTodoEntry:
         return " ".join(parts)
 
     @classmethod
-    def from_string(cls, line: str) -> Optional["RebaseTodoEntry"]:
+    def from_string(cls, line: str) -> "RebaseTodoEntry | None":
         """Parse a todo entry from a line.
 
         Args:

+ 194 - 19
dulwich/repo.py

@@ -41,7 +41,6 @@ from typing import (
     TYPE_CHECKING,
     Any,
     BinaryIO,
-    Optional,
     TypeVar,
 )
 
@@ -344,6 +343,71 @@ def _set_filesystem_hidden(path: str) -> None:
     # Could implement other platform specific filesystem hiding here
 
 
+def parse_shared_repository(
+    value: str | bytes | bool,
+) -> tuple[int | None, int | None]:
+    """Parse core.sharedRepository configuration value.
+
+    Args:
+      value: Configuration value (string, bytes, or boolean)
+
+    Returns:
+      tuple of (file_mask, directory_mask) or (None, None) if not shared
+
+    The masks are permission bits to apply via chmod.
+    """
+    if isinstance(value, bytes):
+        value = value.decode("utf-8", errors="replace")
+
+    # Handle boolean values
+    if isinstance(value, bool):
+        if value:
+            # true = group (same as "group")
+            return (0o664, 0o2775)
+        else:
+            # false = umask (use system umask, no adjustment)
+            return (None, None)
+
+    # Handle string values
+    value_lower = value.lower()
+
+    if value_lower in ("false", "0", ""):
+        # Use umask (no adjustment)
+        return (None, None)
+
+    if value_lower in ("true", "1", "group"):
+        # Group writable (with setgid bit)
+        return (0o664, 0o2775)
+
+    if value_lower in ("all", "world", "everybody", "2"):
+        # World readable/writable (with setgid bit)
+        return (0o666, 0o2777)
+
+    if value_lower == "umask":
+        # Explicitly use umask
+        return (None, None)
+
+    # Try to parse as octal
+    if value.startswith("0"):
+        try:
+            mode = int(value, 8)
+            # For directories, add execute bits where read bits are set
+            # and add setgid bit for shared repositories
+            dir_mode = mode | 0o2000  # Add setgid bit
+            if mode & 0o004:
+                dir_mode |= 0o001
+            if mode & 0o040:
+                dir_mode |= 0o010
+            if mode & 0o400:
+                dir_mode |= 0o100
+            return (mode, dir_mode)
+        except ValueError:
+            pass
+
+    # Default to umask for unrecognized values
+    return (None, None)
+
+
 class ParentsProvider:
     """Provider for commit parent information."""
 
@@ -441,7 +505,11 @@ class BaseRepo:
         return sys.platform != "win32"
 
     def _init_files(
-        self, bare: bool, symlinks: bool | None = None, format: int | None = None
+        self,
+        bare: bool,
+        symlinks: bool | None = None,
+        format: int | None = None,
+        shared_repository: str | bool | None = None,
     ) -> None:
         """Initialize a default set of named files."""
         from .config import ConfigFile
@@ -467,6 +535,14 @@ class BaseRepo:
 
         cf.set("core", "bare", bare)
         cf.set("core", "logallrefupdates", True)
+
+        # Set shared repository if specified
+        if shared_repository is not None:
+            if isinstance(shared_repository, bool):
+                cf.set("core", "sharedRepository", shared_repository)
+            else:
+                cf.set("core", "sharedRepository", shared_repository)
+
         cf.write_to_file(f)
         self._put_named_file("config", f.getvalue())
         self._put_named_file(os.path.join("info", "exclude"), b"")
@@ -918,7 +994,7 @@ class BaseRepo:
         reverse: bool = False,
         max_entries: int | None = None,
         paths: Sequence[bytes] | None = None,
-        rename_detector: Optional["RenameDetector"] = None,
+        rename_detector: "RenameDetector | None" = None,
         follow: bool = False,
         since: int | None = None,
         until: int | None = None,
@@ -1208,7 +1284,7 @@ class Repo(BaseRepo):
     path: str
     bare: bool
     object_store: DiskObjectStore
-    filter_context: Optional["FilterContext"]
+    filter_context: "FilterContext | None"
 
     def __init__(
         self,
@@ -1299,8 +1375,18 @@ class Repo(BaseRepo):
                 raise UnsupportedExtension(extension.decode("utf-8"))
 
         if object_store is None:
+            # Get shared repository permissions from config
+            try:
+                shared_value = config.get(("core",), "sharedRepository")
+                file_mode, dir_mode = parse_shared_repository(shared_value)
+            except KeyError:
+                file_mode, dir_mode = None, None
+
             object_store = DiskObjectStore.from_config(
-                os.path.join(self.commondir(), OBJECTDIR), config
+                os.path.join(self.commondir(), OBJECTDIR),
+                config,
+                file_mode=file_mode,
+                dir_mode=dir_mode,
             )
 
         # Use reftable if extension is configured
@@ -1355,10 +1441,23 @@ class Repo(BaseRepo):
         from .reflog import format_reflog_line
 
         path = self._reflog_path(ref)
-        try:
-            os.makedirs(os.path.dirname(path))
-        except FileExistsError:
-            pass
+
+        # Get shared repository permissions
+        file_mode, dir_mode = self._get_shared_repository_permissions()
+
+        # Create directory with appropriate permissions
+        parent_dir = os.path.dirname(path)
+        # Create directory tree, setting permissions on each level if needed
+        parts = []
+        current = parent_dir
+        while current and not os.path.exists(current):
+            parts.append(current)
+            current = os.path.dirname(current)
+        parts.reverse()
+        for part in parts:
+            os.mkdir(part)
+            if dir_mode is not None:
+                os.chmod(part, dir_mode)
         if committer is None:
             config = self.get_config_stack()
             committer = get_user_identity(config)
@@ -1375,6 +1474,11 @@ class Repo(BaseRepo):
                 + b"\n"
             )
 
+        # Set file permissions (open() respects umask, so we need chmod to set the actual mode)
+        # Always chmod to ensure correct permissions even if file already existed
+        if file_mode is not None:
+            os.chmod(path, file_mode)
+
     def _reflog_path(self, ref: bytes) -> str:
         if ref.startswith((b"main-worktree/", b"worktrees/")):
             raise NotImplementedError(f"refs {ref.decode()} are not supported")
@@ -1469,6 +1573,21 @@ class Repo(BaseRepo):
         # TODO(jelmer): Actually probe disk / look at filesystem
         return sys.platform != "win32"
 
+    def _get_shared_repository_permissions(
+        self,
+    ) -> tuple[int | None, int | None]:
+        """Get shared repository file and directory permissions from config.
+
+        Returns:
+            tuple of (file_mask, directory_mask) or (None, None) if not shared
+        """
+        try:
+            config = self.get_config()
+            value = config.get(("core",), "sharedRepository")
+            return parse_shared_repository(value)
+        except KeyError:
+            return (None, None)
+
     def _put_named_file(self, path: str, contents: bytes) -> None:
         """Write a file to the control dir with the given name and contents.
 
@@ -1477,8 +1596,19 @@ class Repo(BaseRepo):
           contents: A string to write to the file.
         """
         path = path.lstrip(os.path.sep)
-        with GitFile(os.path.join(self.controldir(), path), "wb") as f:
-            f.write(contents)
+
+        # Get shared repository permissions
+        file_mode, _ = self._get_shared_repository_permissions()
+
+        # Create file with appropriate permissions
+        if file_mode is not None:
+            with GitFile(
+                os.path.join(self.controldir(), path), "wb", mask=file_mode
+            ) as f:
+                f.write(contents)
+        else:
+            with GitFile(os.path.join(self.controldir(), path), "wb") as f:
+                f.write(contents)
 
     def _del_named_file(self, path: str) -> None:
         try:
@@ -1554,7 +1684,15 @@ class Repo(BaseRepo):
                 index_version = None
             skip_hash = config.get_boolean(b"index", b"skipHash", False)
 
-        return Index(self.index_path(), skip_hash=skip_hash, version=index_version)
+        # Get shared repository permissions for index file
+        file_mode, _ = self._get_shared_repository_permissions()
+
+        return Index(
+            self.index_path(),
+            skip_hash=skip_hash,
+            version=index_version,
+            file_mode=file_mode,
+        )
 
     def has_index(self) -> bool:
         """Check if an index is present."""
@@ -1857,10 +1995,11 @@ class Repo(BaseRepo):
         controldir: str | bytes | os.PathLike[str],
         bare: bool,
         object_store: PackBasedObjectStore | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
         default_branch: bytes | None = None,
         symlinks: bool | None = None,
         format: int | None = None,
+        shared_repository: str | bool | None = None,
     ) -> "Repo":
         path = os.fspath(path)
         if isinstance(path, bytes):
@@ -1868,10 +2007,26 @@ class Repo(BaseRepo):
         controldir = os.fspath(controldir)
         if isinstance(controldir, bytes):
             controldir = os.fsdecode(controldir)
+
+        # Determine shared repository permissions early
+        file_mode: int | None = None
+        dir_mode: int | None = None
+        if shared_repository is not None:
+            file_mode, dir_mode = parse_shared_repository(shared_repository)
+
+        # Create base directories with appropriate permissions
         for d in BASE_DIRECTORIES:
-            os.mkdir(os.path.join(controldir, *d))
+            dir_path = os.path.join(controldir, *d)
+            os.mkdir(dir_path)
+            if dir_mode is not None:
+                os.chmod(dir_path, dir_mode)
+
         if object_store is None:
-            object_store = DiskObjectStore.init(os.path.join(controldir, OBJECTDIR))
+            object_store = DiskObjectStore.init(
+                os.path.join(controldir, OBJECTDIR),
+                file_mode=file_mode,
+                dir_mode=dir_mode,
+            )
         ret = cls(path, bare=bare, object_store=object_store)
         if default_branch is None:
             if config is None:
@@ -1883,7 +2038,12 @@ class Repo(BaseRepo):
             except KeyError:
                 default_branch = DEFAULT_BRANCH
         ret.refs.set_symbolic_ref(b"HEAD", local_branch_name(default_branch))
-        ret._init_files(bare=bare, symlinks=symlinks, format=format)
+        ret._init_files(
+            bare=bare,
+            symlinks=symlinks,
+            format=format,
+            shared_repository=shared_repository,
+        )
         return ret
 
     @classmethod
@@ -1892,10 +2052,11 @@ class Repo(BaseRepo):
         path: str | bytes | os.PathLike[str],
         *,
         mkdir: bool = False,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
         default_branch: bytes | None = None,
         symlinks: bool | None = None,
         format: int | None = None,
+        shared_repository: str | bool | None = None,
     ) -> "Repo":
         """Create a new repository.
 
@@ -1906,6 +2067,7 @@ class Repo(BaseRepo):
           default_branch: Default branch name
           symlinks: Whether to support symlinks
           format: Repository format version (defaults to 0)
+          shared_repository: Shared repository setting (group, all, umask, or octal)
         Returns: `Repo` instance
         """
         path = os.fspath(path)
@@ -1924,6 +2086,7 @@ class Repo(BaseRepo):
             default_branch=default_branch,
             symlinks=symlinks,
             format=format,
+            shared_repository=shared_repository,
         )
 
     @classmethod
@@ -1957,12 +2120,21 @@ class Repo(BaseRepo):
         gitdirfile = os.path.join(path, CONTROLDIR)
         with open(gitdirfile, "wb") as f:
             f.write(b"gitdir: " + os.fsencode(worktree_controldir) + b"\n")
+
+        # Get shared repository permissions from main repository
+        _, dir_mode = main_repo._get_shared_repository_permissions()
+
+        # Create directories with appropriate permissions
         try:
             os.mkdir(main_worktreesdir)
+            if dir_mode is not None:
+                os.chmod(main_worktreesdir, dir_mode)
         except FileExistsError:
             pass
         try:
             os.mkdir(worktree_controldir)
+            if dir_mode is not None:
+                os.chmod(worktree_controldir, dir_mode)
         except FileExistsError:
             pass
         with open(os.path.join(worktree_controldir, GITDIR), "wb") as f:
@@ -1982,9 +2154,10 @@ class Repo(BaseRepo):
         *,
         mkdir: bool = False,
         object_store: PackBasedObjectStore | None = None,
-        config: Optional["StackedConfig"] = None,
+        config: "StackedConfig | None" = None,
         default_branch: bytes | None = None,
         format: int | None = None,
+        shared_repository: str | bool | None = None,
     ) -> "Repo":
         """Create a new bare repository.
 
@@ -1997,6 +2170,7 @@ class Repo(BaseRepo):
           config: Configuration object
           default_branch: Default branch name
           format: Repository format version (defaults to 0)
+          shared_repository: Shared repository setting (group, all, umask, or octal)
         Returns: a `Repo` instance
         """
         path = os.fspath(path)
@@ -2012,6 +2186,7 @@ class Repo(BaseRepo):
             config=config,
             default_branch=default_branch,
             format=format,
+            shared_repository=shared_repository,
         )
 
     create = init_bare
@@ -2217,7 +2392,7 @@ class MemoryRepo(BaseRepo):
     those have a stronger dependency on the filesystem.
     """
 
-    filter_context: Optional["FilterContext"]
+    filter_context: "FilterContext | None"
 
     def __init__(self) -> None:
         """Create a new repository in memory."""

+ 4 - 9
dulwich/server.py

@@ -43,24 +43,19 @@ Currently supported capabilities:
  * symref
 """
 
-import collections
 import os
 import socket
 import socketserver
 import sys
 import time
 import zlib
+from collections import deque
 from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
 from collections.abc import Set as AbstractSet
 from functools import partial
-from typing import IO, TYPE_CHECKING, Optional
+from typing import IO, TYPE_CHECKING
 from typing import Protocol as TypingProtocol
 
-if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
-else:
-    Buffer = bytes | bytearray | memoryview
-
 if TYPE_CHECKING:
     from .object_store import BaseObjectStore
     from .repo import BaseRepo
@@ -181,7 +176,7 @@ class BackendRepo(TypingProtocol):
         *,
         get_tagged: Callable[[], dict[bytes, bytes]] | None = None,
         depth: int | None = None,
-    ) -> Optional["MissingObjectFinder"]:
+    ) -> "MissingObjectFinder | None":
         """Yield the objects required for a list of commits.
 
         Args:
@@ -629,7 +624,7 @@ def _want_satisfied(
     Returns: True if the want is satisfied by the haves
     """
     o = store[want]
-    pending = collections.deque([o])
+    pending = deque([o])
     known = {want}
     while pending:
         commit = pending.popleft()

+ 4 - 4
dulwich/walk.py

@@ -21,8 +21,8 @@
 
 """General implementation of walking commits and their contents."""
 
-import collections
 import heapq
+from collections import defaultdict, deque
 from collections.abc import Callable, Iterator, Sequence
 from itertools import chain
 from typing import TYPE_CHECKING, Any, cast
@@ -338,7 +338,7 @@ class Walker:
 
         self._num_entries = 0
         self._queue = queue_cls(self)
-        self._out_queue: collections.deque[WalkEntry] = collections.deque()
+        self._out_queue: deque[WalkEntry] = deque()
 
     def _path_matches(self, changed_path: bytes | None) -> bool:
         if changed_path is None:
@@ -481,9 +481,9 @@ def _topo_reorder(
     Returns: iterator over WalkEntry objects from entries in FIFO order, except
         where a parent would be yielded before any of its children.
     """
-    todo: collections.deque[WalkEntry] = collections.deque()
+    todo: deque[WalkEntry] = deque()
     pending: dict[bytes, WalkEntry] = {}
-    num_children: dict[bytes, int] = collections.defaultdict(int)
+    num_children: dict[bytes, int] = defaultdict(int)
     for entry in entries:
         todo.append(entry)
         for p in get_parents(entry.commit):

+ 2 - 5
dulwich/web.py

@@ -59,7 +59,6 @@ from typing import (
     Any,
     BinaryIO,
     ClassVar,
-    Union,
     cast,
 )
 from urllib.parse import parse_qs
@@ -582,8 +581,7 @@ class HTTPGitRequest:
         environ: WSGIEnvironment,
         start_response: StartResponse,
         dumb: bool = False,
-        handlers: dict[bytes, Union["HandlerConstructor", Callable[..., Any]]]
-        | None = None,
+        handlers: dict[bytes, "HandlerConstructor | Callable[..., Any]"] | None = None,
     ) -> None:
         """Initialize HTTPGitRequest.
 
@@ -687,8 +685,7 @@ class HTTPGitApplication:
         self,
         backend: Backend,
         dumb: bool = False,
-        handlers: dict[bytes, Union["HandlerConstructor", Callable[..., Any]]]
-        | None = None,
+        handlers: dict[bytes, "HandlerConstructor | Callable[..., Any]"] | None = None,
         fallback_app: WSGIApplication | None = None,
     ) -> None:
         """Initialize HTTPGitApplication.

+ 1 - 1
tests/compat/test_lfs.py

@@ -384,7 +384,7 @@ class LFSStatusCompatTest(LFSCompatTestCase):
 
         # Modify the file
         with open(test_file, "wb") as f:
-            f.write(b"modified content\n")
+            f.write(b"slightly modified content\n")
 
         # Check status - should show file as modified
         status = porcelain.status(repo_dir, untracked_files="no")

+ 12 - 1
tests/test_cli_merge.py

@@ -21,6 +21,7 @@
 
 """Tests for dulwich merge CLI command."""
 
+import importlib.util
 import os
 import tempfile
 import unittest
@@ -28,7 +29,7 @@ import unittest
 from dulwich import porcelain
 from dulwich.cli import main
 
-from . import TestCase
+from . import DependencyMissing, TestCase
 
 
 class CLIMergeTests(TestCase):
@@ -77,6 +78,11 @@ class CLIMergeTests(TestCase):
 
     def test_merge_with_conflicts(self):
         """Test CLI merge with conflicts."""
+
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             porcelain.init(tmpdir)
@@ -334,6 +340,11 @@ class CLIMergeTests(TestCase):
 
     def test_octopus_merge_with_conflicts(self):
         """Test CLI octopus merge with conflicts."""
+
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             porcelain.init(tmpdir)

+ 156 - 3
tests/test_porcelain.py

@@ -4559,6 +4559,157 @@ class CheckoutTests(PorcelainTestCase):
         remote_repo.close()
 
 
+class RestoreTests(PorcelainTestCase):
+    """Tests for the restore command."""
+
+    def setUp(self) -> None:
+        super().setUp()
+        self._sha, self._foo_path = _commit_file_with_content(
+            self.repo, "foo", "original\n"
+        )
+
+    def test_restore_worktree_from_index(self) -> None:
+        # Modify the working tree file
+        with open(self._foo_path, "w") as f:
+            f.write("modified\n")
+
+        # Restore from index (should restore to original)
+        porcelain.restore(self.repo, paths=["foo"])
+
+        with open(self._foo_path) as f:
+            content = f.read()
+        self.assertEqual("original\n", content)
+
+    def test_restore_worktree_from_head(self) -> None:
+        # Modify and stage the file
+        with open(self._foo_path, "w") as f:
+            f.write("staged\n")
+        porcelain.add(self.repo, paths=[self._foo_path])
+
+        # Now modify it again in worktree
+        with open(self._foo_path, "w") as f:
+            f.write("worktree\n")
+
+        # Restore from HEAD (should restore to original, not staged)
+        porcelain.restore(self.repo, paths=["foo"], source="HEAD")
+
+        with open(self._foo_path) as f:
+            content = f.read()
+        self.assertEqual("original\n", content)
+
+    def test_restore_staged_from_head(self) -> None:
+        # Modify and stage the file
+        with open(self._foo_path, "w") as f:
+            f.write("staged\n")
+        porcelain.add(self.repo, paths=[self._foo_path])
+
+        # Verify it's staged
+        status = list(porcelain.status(self.repo))
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": [b"foo"]}, [], []], status
+        )
+
+        # Restore staged from HEAD
+        porcelain.restore(self.repo, paths=["foo"], staged=True, worktree=False)
+
+        # Verify it's no longer staged
+        status = list(porcelain.status(self.repo))
+        # Now it should show as unstaged modification
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
+        )
+
+    def test_restore_both_staged_and_worktree(self) -> None:
+        # Modify and stage the file
+        with open(self._foo_path, "w") as f:
+            f.write("staged\n")
+        porcelain.add(self.repo, paths=[self._foo_path])
+
+        # Now modify it again in worktree
+        with open(self._foo_path, "w") as f:
+            f.write("worktree\n")
+
+        # Restore both from HEAD
+        porcelain.restore(self.repo, paths=["foo"], staged=True, worktree=True)
+
+        # Verify content is restored
+        with open(self._foo_path) as f:
+            content = f.read()
+        self.assertEqual("original\n", content)
+
+        # Verify nothing is staged
+        status = list(porcelain.status(self.repo))
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
+
+    def test_restore_nonexistent_path(self) -> None:
+        with self.assertRaises(porcelain.CheckoutError):
+            porcelain.restore(self.repo, paths=["nonexistent"])
+
+
+class SwitchTests(PorcelainTestCase):
+    """Tests for the switch command."""
+
+    def setUp(self) -> None:
+        super().setUp()
+        self._sha, self._foo_path = _commit_file_with_content(
+            self.repo, "foo", "hello\n"
+        )
+        porcelain.branch_create(self.repo, "dev")
+
+    def test_switch_to_existing_branch(self) -> None:
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+        porcelain.switch(self.repo, "dev")
+        self.assertEqual(b"dev", porcelain.active_branch(self.repo))
+
+    def test_switch_to_non_existing_branch(self) -> None:
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+
+        with self.assertRaises(KeyError):
+            porcelain.switch(self.repo, "nonexistent")
+
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+
+    def test_switch_with_create(self) -> None:
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+        porcelain.switch(self.repo, "master", create="feature")
+        self.assertEqual(b"feature", porcelain.active_branch(self.repo))
+
+    def test_switch_with_detach(self) -> None:
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+        porcelain.switch(self.repo, self._sha.decode(), detach=True)
+        # In detached HEAD state, active_branch raises IndexError
+        with self.assertRaises(IndexError):
+            porcelain.active_branch(self.repo)
+
+    def test_switch_with_uncommitted_changes(self) -> None:
+        # Modify the file
+        with open(self._foo_path, "a") as f:
+            f.write("new content\n")
+        porcelain.add(self.repo, paths=[self._foo_path])
+
+        # Switch should fail due to uncommitted changes
+        with self.assertRaises(porcelain.CheckoutError):
+            porcelain.switch(self.repo, "dev")
+
+        # Should still be on master
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
+
+    def test_switch_with_force(self) -> None:
+        # Modify the file
+        with open(self._foo_path, "a") as f:
+            f.write("new content\n")
+        porcelain.add(self.repo, paths=[self._foo_path])
+
+        # Force switch should work
+        porcelain.switch(self.repo, "dev", force=True)
+        self.assertEqual(b"dev", porcelain.active_branch(self.repo))
+
+    def test_switch_to_commit_without_detach(self) -> None:
+        # Switching to a commit SHA without --detach should fail
+        with self.assertRaises(porcelain.CheckoutError):
+            porcelain.switch(self.repo, self._sha.decode())
+
+
 class GeneralCheckoutTests(PorcelainTestCase):
     """Tests for the general checkout function that handles branches, tags, and commits."""
 
@@ -6071,9 +6222,11 @@ class StatusTests(PorcelainTestCase):
             {"add": [b"crlf-new"], "delete": [], "modify": []}, results.staged
         )
         # File committed with CRLF before autocrlf=input was enabled
-        # will appear as unstaged because working tree is normalized to LF
-        # during comparison but index still has CRLF
-        self.assertListEqual(results.unstaged, [b"crlf-exists"])
+        # will NOT appear as unstaged because stat matching optimization
+        # skips filter processing when file hasn't been modified.
+        # This matches Git's behavior, which uses stat matching to avoid
+        # expensive filter operations. Git shows a warning instead.
+        self.assertListEqual(results.unstaged, [])
         self.assertListEqual(results.untracked, [])
 
     def test_status_autocrlf_input_modified(self) -> None:

+ 12 - 1
tests/test_porcelain_cherry_pick.py

@@ -21,12 +21,13 @@
 
 """Tests for porcelain cherry-pick functionality."""
 
+import importlib.util
 import os
 import tempfile
 
 from dulwich import porcelain
 
-from . import TestCase
+from . import DependencyMissing, TestCase
 
 
 class PorcelainCherryPickTests(TestCase):
@@ -107,6 +108,11 @@ class PorcelainCherryPickTests(TestCase):
 
     def test_cherry_pick_conflict(self):
         """Test cherry-pick with conflicts."""
+
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             porcelain.init(tmpdir)
@@ -164,6 +170,11 @@ class PorcelainCherryPickTests(TestCase):
 
     def test_cherry_pick_abort(self):
         """Test aborting a cherry-pick."""
+
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
             porcelain.init(tmpdir)

+ 18 - 1
tests/test_porcelain_merge.py

@@ -21,6 +21,7 @@
 
 """Tests for porcelain merge functionality."""
 
+import importlib.util
 import os
 import tempfile
 import unittest
@@ -28,7 +29,7 @@ import unittest
 from dulwich import porcelain
 from dulwich.repo import Repo
 
-from . import TestCase
+from . import DependencyMissing, TestCase
 
 
 class PorcelainMergeTests(TestCase):
@@ -166,6 +167,10 @@ class PorcelainMergeTests(TestCase):
                 self.assertEqual(f.read(), "Master file2\n")
 
     def test_merge_with_conflicts(self):
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         """Test merge with conflicts."""
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
@@ -339,6 +344,10 @@ class PorcelainMergeTests(TestCase):
                 self.assertEqual(f.read(), "Branch3 modified file3\n")
 
     def test_octopus_merge_with_conflicts(self):
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         """Test that octopus merge refuses to proceed with conflicts."""
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
@@ -520,6 +529,10 @@ class PorcelainMergeTreeTests(TestCase):
             self.assertIn(b"file3.txt", merged_tree)
 
     def test_merge_tree_with_conflicts(self):
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         """Test merge_tree with conflicts."""
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo
@@ -607,6 +620,10 @@ class PorcelainMergeTreeTests(TestCase):
             self.assertIn(b"file2.txt", merged_tree)
 
     def test_merge_tree_with_tree_objects(self):
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         """Test merge_tree with tree objects instead of commits."""
         with tempfile.TemporaryDirectory() as tmpdir:
             # Initialize repo

+ 6 - 1
tests/test_rebase.py

@@ -21,6 +21,7 @@
 
 """Tests for dulwich.rebase."""
 
+import importlib.util
 import os
 import tempfile
 
@@ -38,7 +39,7 @@ from dulwich.rebase import (
 from dulwich.repo import MemoryRepo, Repo
 from dulwich.tests.utils import make_commit
 
-from . import TestCase
+from . import DependencyMissing, TestCase
 
 
 class RebaserTestCase(TestCase):
@@ -163,6 +164,10 @@ class RebaserTestCase(TestCase):
         self.assertIn(b"file.txt", new_tree)
 
     def test_rebase_with_conflicts(self):
+        # Check if merge3 module is available
+        if importlib.util.find_spec("merge3") is None:
+            raise DependencyMissing("merge3")
+
         """Test rebase with merge conflicts."""
         self._setup_initial_commit()
         # Create feature branch with conflicting change

+ 342 - 0
tests/test_repository.py

@@ -28,6 +28,7 @@ import shutil
 import stat
 import sys
 import tempfile
+import time
 import warnings
 
 from dulwich import errors, objects, porcelain
@@ -2080,3 +2081,344 @@ class RepoConfigIncludeIfTests(TestCase):
             config = r.get_config()
             self.assertEqual(b"true", config.get((b"core",), b"autocrlf"))
             r.close()
+
+
+@skipIf(sys.platform == "win32", "Windows does not support Unix file permissions")
+class SharedRepositoryTests(TestCase):
+    """Tests for core.sharedRepository functionality."""
+
+    def setUp(self):
+        super().setUp()
+        self._orig_umask = os.umask(0o022)
+
+    def tearDown(self):
+        os.umask(self._orig_umask)
+        super().tearDown()
+
+    def _get_file_mode(self, path):
+        """Get the file mode bits (without file type bits)."""
+        return stat.S_IMODE(os.stat(path).st_mode)
+
+    def _check_permissions(self, repo, expected_file_mode, expected_dir_mode):
+        """Check that repository files and directories have expected permissions."""
+        objects_dir = os.path.join(repo.commondir(), "objects")
+
+        # Check objects directory
+        actual_dir_mode = self._get_file_mode(objects_dir)
+        self.assertEqual(
+            expected_dir_mode,
+            actual_dir_mode,
+            f"objects dir mode: expected {oct(expected_dir_mode)}, got {oct(actual_dir_mode)}",
+        )
+
+        # Check pack directory
+        pack_dir = os.path.join(objects_dir, "pack")
+        actual_dir_mode = self._get_file_mode(pack_dir)
+        self.assertEqual(
+            expected_dir_mode,
+            actual_dir_mode,
+            f"pack dir mode: expected {oct(expected_dir_mode)}, got {oct(actual_dir_mode)}",
+        )
+
+        # Check info directory
+        info_dir = os.path.join(objects_dir, "info")
+        actual_dir_mode = self._get_file_mode(info_dir)
+        self.assertEqual(
+            expected_dir_mode,
+            actual_dir_mode,
+            f"info dir mode: expected {oct(expected_dir_mode)}, got {oct(actual_dir_mode)}",
+        )
+
+    def test_init_bare_shared_group(self):
+        """Test initializing bare repo with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Expected permissions for group sharing
+        expected_dir_mode = 0o2775  # setgid + rwxrwxr-x
+        expected_file_mode = 0o664  # rw-rw-r--
+
+        self._check_permissions(repo, expected_file_mode, expected_dir_mode)
+
+    def test_init_bare_shared_all(self):
+        """Test initializing bare repo with sharedRepository=all."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="all")
+        self.addCleanup(repo.close)
+
+        # Expected permissions for world sharing
+        expected_dir_mode = 0o2777  # setgid + rwxrwxrwx
+        expected_file_mode = 0o666  # rw-rw-rw-
+
+        self._check_permissions(repo, expected_file_mode, expected_dir_mode)
+
+    def test_init_bare_shared_umask(self):
+        """Test initializing bare repo with sharedRepository=umask (default)."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="umask")
+        self.addCleanup(repo.close)
+
+        # With umask, no special permissions should be set
+        # The actual permissions will depend on the umask, but we can
+        # at least verify that setgid is NOT set
+        objects_dir = os.path.join(repo.commondir(), "objects")
+        actual_mode = os.stat(objects_dir).st_mode
+
+        # Verify setgid bit is NOT set
+        self.assertEqual(0, actual_mode & stat.S_ISGID)
+
+    def test_loose_object_permissions_group(self):
+        """Test that loose objects get correct permissions with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Create a blob object
+        blob = objects.Blob.from_string(b"test content")
+        repo.object_store.add_object(blob)
+
+        # Find the object file
+        obj_path = repo.object_store._get_shafile_path(blob.id)
+
+        # Check file permissions
+        actual_mode = self._get_file_mode(obj_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"loose object mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+        # Check directory permissions
+        obj_dir = os.path.dirname(obj_path)
+        actual_dir_mode = self._get_file_mode(obj_dir)
+        expected_dir_mode = 0o2775  # setgid + rwxrwxr-x
+        self.assertEqual(
+            expected_dir_mode,
+            actual_dir_mode,
+            f"object dir mode: expected {oct(expected_dir_mode)}, got {oct(actual_dir_mode)}",
+        )
+
+    def test_loose_object_permissions_all(self):
+        """Test that loose objects get correct permissions with sharedRepository=all."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="all")
+        self.addCleanup(repo.close)
+
+        # Create a blob object
+        blob = objects.Blob.from_string(b"test content")
+        repo.object_store.add_object(blob)
+
+        # Find the object file
+        obj_path = repo.object_store._get_shafile_path(blob.id)
+
+        # Check file permissions
+        actual_mode = self._get_file_mode(obj_path)
+        expected_mode = 0o666  # rw-rw-rw-
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"loose object mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+    def test_pack_file_permissions_group(self):
+        """Test that pack files get correct permissions with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Create some objects
+        blobs = [
+            objects.Blob.from_string(f"test content {i}".encode()) for i in range(5)
+        ]
+        repo.object_store.add_objects([(blob, None) for blob in blobs])
+
+        # Find the pack files
+        pack_dir = os.path.join(repo.commondir(), "objects", "pack")
+        pack_files = [f for f in os.listdir(pack_dir) if f.endswith(".pack")]
+        self.assertGreater(len(pack_files), 0, "No pack files created")
+
+        # Check pack file permissions
+        pack_path = os.path.join(pack_dir, pack_files[0])
+        actual_mode = self._get_file_mode(pack_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"pack file mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+    def test_pack_index_permissions_group(self):
+        """Test that pack index files get correct permissions with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init_bare(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Create some objects
+        blobs = [
+            objects.Blob.from_string(f"test content {i}".encode()) for i in range(5)
+        ]
+        repo.object_store.add_objects([(blob, None) for blob in blobs])
+
+        # Find the pack index files
+        pack_dir = os.path.join(repo.commondir(), "objects", "pack")
+        idx_files = [f for f in os.listdir(pack_dir) if f.endswith(".idx")]
+        self.assertGreater(len(idx_files), 0, "No pack index files created")
+
+        # Check pack index file permissions
+        idx_path = os.path.join(pack_dir, idx_files[0])
+        actual_mode = self._get_file_mode(idx_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"pack index mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+    def test_index_file_permissions_group(self):
+        """Test that index file gets correct permissions with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        # Create non-bare repo (index only exists in non-bare repos)
+        repo = Repo.init(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Make a change to trigger index write
+        blob = objects.Blob.from_string(b"test content")
+        repo.object_store.add_object(blob)
+        test_file = os.path.join(tmp_dir, "test.txt")
+        with open(test_file, "wb") as f:
+            f.write(b"test content")
+        # Stage the file
+        porcelain.add(repo, [test_file])
+
+        # Check index file permissions
+        index_path = repo.index_path()
+        actual_mode = self._get_file_mode(index_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"index file mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+    def test_existing_repo_respects_config(self):
+        """Test that opening an existing repo respects core.sharedRepository config."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        # Create repo with shared=group
+        repo = Repo.init_bare(tmp_dir, shared_repository="group")
+        repo.close()
+
+        # Reopen the repo
+        repo = Repo(tmp_dir)
+        self.addCleanup(repo.close)
+
+        # Add an object and check permissions
+        blob = objects.Blob.from_string(b"test content after reopen")
+        repo.object_store.add_object(blob)
+
+        obj_path = repo.object_store._get_shafile_path(blob.id)
+        actual_mode = self._get_file_mode(obj_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"loose object mode after reopen: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+    def test_reflog_permissions_group(self):
+        """Test that reflog files get correct permissions with sharedRepository=group."""
+        tmp_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, tmp_dir)
+
+        # Set umask to 0 to see what permissions are actually set
+        os.umask(0)
+
+        repo = Repo.init(tmp_dir, shared_repository="group")
+        self.addCleanup(repo.close)
+
+        # Create a commit to trigger reflog creation
+        blob = objects.Blob.from_string(b"test content")
+        tree = objects.Tree()
+        tree.add(b"test.txt", 0o100644, blob.id)
+
+        c = objects.Commit()
+        c.tree = tree.id
+        c.author = c.committer = b"Test <test@example.com>"
+        c.author_time = c.commit_time = int(time.time())
+        c.author_timezone = c.commit_timezone = 0
+        c.encoding = b"UTF-8"
+        c.message = b"Test commit"
+
+        repo.object_store.add_object(blob)
+        repo.object_store.add_object(tree)
+        repo.object_store.add_object(c)
+
+        # Update ref to trigger reflog creation
+        repo.refs.set_if_equals(
+            b"refs/heads/master", None, c.id, message=b"commit: initial commit"
+        )
+
+        # Check reflog file permissions
+        reflog_path = os.path.join(repo.controldir(), "logs", "refs", "heads", "master")
+        self.assertTrue(os.path.exists(reflog_path), "Reflog file should exist")
+
+        actual_mode = self._get_file_mode(reflog_path)
+        expected_mode = 0o664  # rw-rw-r--
+        self.assertEqual(
+            expected_mode,
+            actual_mode,
+            f"reflog file mode: expected {oct(expected_mode)}, got {oct(actual_mode)}",
+        )
+
+        # Check reflog directory permissions
+        reflog_dir = os.path.dirname(reflog_path)
+        actual_dir_mode = self._get_file_mode(reflog_dir)
+        expected_dir_mode = 0o2775  # setgid + rwxrwxr-x
+        self.assertEqual(
+            expected_dir_mode,
+            actual_dir_mode,
+            f"reflog dir mode: expected {oct(expected_dir_mode)}, got {oct(actual_dir_mode)}",
+        )