Просмотр исходного кода

Merge branch 'master' into rs-create-delta

Jelmer Vernooij 2 месяцев назад
Родитель
Сommit
54acfffd9c

+ 32 - 0
NEWS

@@ -4,6 +4,38 @@
    implementation uses the similar crate for efficient diff computation.
    (Jelmer Vernooij)
 
+ * Add support for namespace isolation via ``NamespacedRefsContainer``.
+   Implements Git's namespace feature for isolating refs within a single
+   repository using the ``refs/namespaces/`` prefix. (Jelmer Vernooij, #1809)
+
+ * Add support for GIT_FLUSH environment variable to control output buffering
+   in CLI commands. When GIT_FLUSH=1, output is flushed after each write for
+   real-time visibility. (Jelmer Vernooij, #1810)
+
+ * Implement ``dulwich interpret-trailers`` functionality to parse and manipulate
+   structured metadata (trailers) in commit messages. Adds ``porcelain.interpret_trailers()``
+   with support for parsing, adding, replacing, and formatting trailers. Also fixes
+   the ``signoff`` parameter in ``porcelain.commit()`` to add ``Signed-off-by`` trailers.
+   (Jelmer Vernooij, #1826)
+
+ * Add support for recursive submodule updates via ``--recursive`` flag in
+   ``dulwich submodule update`` command and ``recursive`` parameter in
+   ``porcelain.submodule_update()``.
+   (Jelmer Vernooij, #1813)
+
+ * Add support for ``git maintenance`` command to optimize Git repository data.
+   Implements gc, commit-graph, loose-objects, incremental-repack, pack-refs, and
+   prefetch tasks. Supports automatic maintenance with ``--auto`` flag and task-specific
+   configuration. (Jelmer Vernooij)
+
+ * Add support for ``dulwich replace`` command to create refs that replace objects.
+   (Jelmer Vernooij, #1834)
+
+ * Implement advanced Git object specification support: index path lookup (``:``, ``:0:``,
+   ``:1:``, ``:2:``, ``:3:``) for accessing files from the index and merge stages, and
+   reflog time specifications (``@{time}``) using Git's approxidate format (e.g.,
+   ``HEAD@{yesterday}`, ``master@{2.weeks.ago}``). (Jelmer Vernooij, #1783)
+
 0.24.7	2025-10-23
 
  * Add sparse index support for improved performance with large repositories.

+ 161 - 0
dulwich/approxidate.py

@@ -0,0 +1,161 @@
+# approxidate.py -- Parsing of Git's "approxidate" time specifications
+# Copyright (C) 2025 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as published by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Parsing of Git's "approxidate" time specifications.
+
+Git uses a flexible date parser called "approxidate" that accepts various
+formats for specifying dates and times, including:
+- Relative times: "yesterday", "2 days ago", "2.weeks.ago"
+- Absolute dates: "2005-04-07", "2005-04-07 22:13:13"
+- Unix timestamps: "1234567890"
+- Special keywords: "now", "today", "yesterday"
+"""
+
+import time
+from datetime import datetime
+from typing import Union
+
+
+def parse_approxidate(time_spec: Union[str, bytes]) -> int:
+    """Parse a Git approxidate specification and return a Unix timestamp.
+
+    Args:
+        time_spec: Time specification string. Can be:
+            - A Unix timestamp (integer as string)
+            - A relative time like "2 weeks ago" or "2.weeks.ago"
+            - Special keywords: "now", "today", "yesterday"
+            - Absolute date: "2005-04-07" or "2005-04-07 22:13:13"
+
+    Returns:
+        Unix timestamp (seconds since epoch)
+
+    Raises:
+        ValueError: If the time specification cannot be parsed
+    """
+    if isinstance(time_spec, bytes):
+        time_spec = time_spec.decode("utf-8")
+
+    time_spec = time_spec.strip()
+
+    # Get current time
+    now = time.time()
+
+    # Handle special keywords
+    if time_spec == "yesterday":
+        return int(now - 86400)
+    elif time_spec == "today":
+        # Start of today (midnight)
+        dt = datetime.fromtimestamp(now)
+        dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
+        return int(dt.timestamp())
+    elif time_spec == "now":
+        return int(now)
+
+    # Try parsing as Unix timestamp
+    try:
+        return int(time_spec)
+    except ValueError:
+        pass
+
+    # Handle relative time specifications
+    # Supports both "2 weeks ago" and "2.weeks.ago" formats
+    if " ago" in time_spec or ".ago" in time_spec:
+        seconds_ago = parse_relative_time(time_spec)
+        return int(now - seconds_ago)
+
+    # Try parsing as absolute timestamp formats
+    # Git supports various formats like:
+    # - "2005-04-07" (ISO date)
+    # - "2005-04-07 22:13:13" (ISO datetime)
+    # - "2005-04-07T22:13:13" (ISO 8601)
+    formats = [
+        "%Y-%m-%d %H:%M:%S",
+        "%Y-%m-%dT%H:%M:%S",
+        "%Y-%m-%d",
+        "%Y/%m/%d %H:%M:%S",
+        "%Y/%m/%d",
+    ]
+
+    for fmt in formats:
+        try:
+            dt = datetime.strptime(time_spec, fmt)
+            return int(dt.timestamp())
+        except ValueError:
+            continue
+
+    raise ValueError(f"Unable to parse time specification: {time_spec!r}")
+
+
+def parse_relative_time(time_str: str) -> int:
+    """Parse a relative time string like '2 weeks ago' into seconds.
+
+    Args:
+        time_str: String like '2 weeks ago', '2.weeks.ago', or 'now'
+
+    Returns:
+        Number of seconds (relative to current time)
+
+    Raises:
+        ValueError: If the time string cannot be parsed
+    """
+    if time_str == "now":
+        return 0
+
+    # Normalize dot-separated format to space-separated
+    # "2.weeks.ago" -> "2 weeks ago"
+    normalized = time_str.replace(".ago", " ago").replace(".", " ")
+
+    if not normalized.endswith(" ago"):
+        raise ValueError(f"Invalid relative time format: {time_str}")
+
+    parts = normalized[:-4].split()
+    if len(parts) != 2:
+        raise ValueError(f"Invalid relative time format: {time_str}")
+
+    try:
+        num = int(parts[0])
+        unit = parts[1]
+
+        multipliers = {
+            "second": 1,
+            "seconds": 1,
+            "minute": 60,
+            "minutes": 60,
+            "hour": 3600,
+            "hours": 3600,
+            "day": 86400,
+            "days": 86400,
+            "week": 604800,
+            "weeks": 604800,
+            "month": 2592000,  # 30 days
+            "months": 2592000,
+            "year": 31536000,  # 365 days
+            "years": 31536000,
+        }
+
+        if unit in multipliers:
+            return num * multipliers[unit]
+        else:
+            raise ValueError(f"Unknown time unit: {unit}")
+    except ValueError as e:
+        if "invalid literal" in str(e):
+            raise ValueError(f"Invalid number in relative time: {parts[0]}")
+        raise

+ 16 - 26
dulwich/bundle.py

@@ -272,32 +272,22 @@ def create_bundle_from_repo(
     bundle_prerequisites = []
     have_objects = set()
     for prereq in prerequisites:
-        if isinstance(prereq, str):
-            prereq = prereq.encode("utf-8")
-        if isinstance(prereq, bytes):
-            if len(prereq) == 40:  # SHA1 hex string
-                try:
-                    # Validate it's actually hex
-                    bytes.fromhex(prereq.decode("utf-8"))
-                    # Store hex in bundle and for pack generation
-                    bundle_prerequisites.append((prereq, b""))
-                    have_objects.add(prereq)
-                except ValueError:
-                    # Not a valid hex string, invalid prerequisite
-                    raise ValueError(f"Invalid prerequisite format: {prereq!r}")
-            elif len(prereq) == 20:
-                # Binary SHA, convert to hex for both bundle and pack generation
-                hex_prereq = prereq.hex().encode("ascii")
-                bundle_prerequisites.append((hex_prereq, b""))
-                have_objects.add(hex_prereq)
-            else:
-                # Invalid length
-                raise ValueError(f"Invalid prerequisite SHA length: {len(prereq)}")
-        else:
-            # Assume it's already a binary SHA
-            hex_prereq = prereq.hex().encode("ascii")
-            bundle_prerequisites.append((hex_prereq, b""))
-            have_objects.append(hex_prereq)
+        if not isinstance(prereq, bytes):
+            raise TypeError(
+                f"Invalid prerequisite type: {type(prereq)}, expected bytes"
+            )
+        if len(prereq) != 40:
+            raise ValueError(
+                f"Invalid prerequisite SHA length: {len(prereq)}, expected 40 hex characters"
+            )
+        try:
+            # Validate it's actually hex
+            bytes.fromhex(prereq.decode("utf-8"))
+        except ValueError:
+            raise ValueError(f"Invalid prerequisite format: {prereq!r}")
+        # Store hex in bundle and for pack generation
+        bundle_prerequisites.append((prereq, b""))
+        have_objects.add(prereq)
 
     # Generate pack data containing all objects needed for the refs
     pack_count, pack_objects = repo.generate_pack_data(

+ 527 - 64
dulwich/cli.py

@@ -28,6 +28,10 @@ no means intended to be a full-blown Git command-line interface but just
 a way to test Dulwich.
 """
 
+# TODO: Add support for GIT_NAMESPACE environment variable by wrapping
+# repository refs with NamespacedRefsContainer when the environment
+# variable is set. See issue #1809 and dulwich.refs.NamespacedRefsContainer.
+
 import argparse
 import io
 import logging
@@ -91,6 +95,196 @@ def to_display_str(value: Union[bytes, str]) -> str:
     return value
 
 
+def _should_auto_flush(
+    stream: Union[TextIO, BinaryIO], env: Optional[Mapping[str, str]] = None
+) -> bool:
+    """Determine if output should be auto-flushed based on GIT_FLUSH environment variable.
+
+    Args:
+        stream: The output stream to check
+        env: Environment variables dict (defaults to os.environ)
+
+    Returns:
+        True if output should be flushed after each write, False otherwise
+    """
+    if env is None:
+        env = os.environ
+    git_flush = env.get("GIT_FLUSH", "").strip()
+    if git_flush == "1":
+        return True
+    elif git_flush == "0":
+        return False
+    else:
+        # Auto-detect: don't flush if redirected to a file
+        return hasattr(stream, "isatty") and not stream.isatty()
+
+
+class AutoFlushTextIOWrapper:
+    """Wrapper that automatically flushes a TextIO stream based on configuration.
+
+    This wrapper can be configured to flush after each write operation,
+    which is useful for real-time output monitoring in CI/CD systems.
+    """
+
+    def __init__(self, stream: TextIO) -> None:
+        """Initialize the wrapper.
+
+        Args:
+            stream: The stream to wrap
+        """
+        self._stream = stream
+
+    @classmethod
+    def env(
+        cls, stream: TextIO, env: Optional[Mapping[str, str]] = None
+    ) -> "AutoFlushTextIOWrapper | TextIO":
+        """Create wrapper respecting the GIT_FLUSH environment variable.
+
+        Respects the GIT_FLUSH environment variable:
+        - GIT_FLUSH=1: Always flush after each write
+        - GIT_FLUSH=0: Never auto-flush (use buffered I/O)
+        - Not set: Auto-detect based on whether output is redirected
+
+        Args:
+            stream: The stream to wrap
+            env: Environment variables dict (defaults to os.environ)
+
+        Returns:
+            AutoFlushTextIOWrapper instance configured based on GIT_FLUSH
+        """
+        if _should_auto_flush(stream, env):
+            return cls(stream)
+        else:
+            return stream
+
+    def write(self, data: str) -> int:
+        """Write data to the stream and optionally flush.
+
+        Args:
+            data: Data to write
+
+        Returns:
+            Number of characters written
+        """
+        result = self._stream.write(data)
+        self._stream.flush()
+        return result
+
+    def writelines(self, lines: Iterable[str]) -> None:
+        """Write multiple lines to the stream and optionally flush.
+
+        Args:
+            lines: Lines to write
+        """
+        self._stream.writelines(lines)
+        self._stream.flush()
+
+    def flush(self) -> None:
+        """Flush the underlying stream."""
+        self._stream.flush()
+
+    def __getattr__(self, name: str) -> object:
+        """Delegate all other attributes to the underlying stream."""
+        return getattr(self._stream, name)
+
+    def __enter__(self) -> "AutoFlushTextIOWrapper":
+        """Support context manager protocol."""
+        return self
+
+    def __exit__(
+        self,
+        exc_type: Optional[type[BaseException]],
+        exc_val: Optional[BaseException],
+        exc_tb: Optional[TracebackType],
+    ) -> None:
+        """Support context manager protocol."""
+        if hasattr(self._stream, "__exit__"):
+            self._stream.__exit__(exc_type, exc_val, exc_tb)
+
+
+class AutoFlushBinaryIOWrapper:
+    """Wrapper that automatically flushes a BinaryIO stream based on configuration.
+
+    This wrapper can be configured to flush after each write operation,
+    which is useful for real-time output monitoring in CI/CD systems.
+    """
+
+    def __init__(self, stream: BinaryIO) -> None:
+        """Initialize the wrapper.
+
+        Args:
+            stream: The stream to wrap
+        """
+        self._stream = stream
+
+    @classmethod
+    def env(
+        cls, stream: BinaryIO, env: Optional[Mapping[str, str]] = None
+    ) -> "AutoFlushBinaryIOWrapper | BinaryIO":
+        """Create wrapper respecting the GIT_FLUSH environment variable.
+
+        Respects the GIT_FLUSH environment variable:
+        - GIT_FLUSH=1: Always flush after each write
+        - GIT_FLUSH=0: Never auto-flush (use buffered I/O)
+        - Not set: Auto-detect based on whether output is redirected
+
+        Args:
+            stream: The stream to wrap
+            env: Environment variables dict (defaults to os.environ)
+
+        Returns:
+            AutoFlushBinaryIOWrapper instance configured based on GIT_FLUSH
+        """
+        if _should_auto_flush(stream, env):
+            return cls(stream)
+        else:
+            return stream
+
+    def write(self, data: Buffer) -> int:
+        """Write data to the stream and optionally flush.
+
+        Args:
+            data: Data to write
+
+        Returns:
+            Number of bytes written
+        """
+        result = self._stream.write(data)
+        self._stream.flush()
+        return result
+
+    def writelines(self, lines: Iterable[Buffer]) -> None:
+        """Write multiple lines to the stream and optionally flush.
+
+        Args:
+            lines: Lines to write
+        """
+        self._stream.writelines(lines)
+        self._stream.flush()
+
+    def flush(self) -> None:
+        """Flush the underlying stream."""
+        self._stream.flush()
+
+    def __getattr__(self, name: str) -> object:
+        """Delegate all other attributes to the underlying stream."""
+        return getattr(self._stream, name)
+
+    def __enter__(self) -> "AutoFlushBinaryIOWrapper":
+        """Support context manager protocol."""
+        return self
+
+    def __exit__(
+        self,
+        exc_type: Optional[type[BaseException]],
+        exc_val: Optional[BaseException],
+        exc_tb: Optional[TracebackType],
+    ) -> None:
+        """Support context manager protocol."""
+        if hasattr(self._stream, "__exit__"):
+            self._stream.__exit__(exc_type, exc_val, exc_tb)
+
+
 class CommitMessageError(Exception):
     """Raised when there's an issue with the commit message."""
 
@@ -117,59 +311,6 @@ def signal_quit(signal: int, frame: Optional[types.FrameType]) -> None:
     pdb.set_trace()
 
 
-def parse_relative_time(time_str: str) -> int:
-    """Parse a relative time string like '2 weeks ago' into seconds.
-
-    Args:
-        time_str: String like '2 weeks ago' or 'now'
-
-    Returns:
-        Number of seconds
-
-    Raises:
-        ValueError: If the time string cannot be parsed
-    """
-    if time_str == "now":
-        return 0
-
-    if not time_str.endswith(" ago"):
-        raise ValueError(f"Invalid relative time format: {time_str}")
-
-    parts = time_str[:-4].split()
-    if len(parts) != 2:
-        raise ValueError(f"Invalid relative time format: {time_str}")
-
-    try:
-        num = int(parts[0])
-        unit = parts[1]
-
-        multipliers = {
-            "second": 1,
-            "seconds": 1,
-            "minute": 60,
-            "minutes": 60,
-            "hour": 3600,
-            "hours": 3600,
-            "day": 86400,
-            "days": 86400,
-            "week": 604800,
-            "weeks": 604800,
-            "month": 2592000,  # 30 days
-            "months": 2592000,
-            "year": 31536000,  # 365 days
-            "years": 31536000,
-        }
-
-        if unit in multipliers:
-            return num * multipliers[unit]
-        else:
-            raise ValueError(f"Unknown time unit: {unit}")
-    except ValueError as e:
-        if "invalid literal" in str(e):
-            raise ValueError(f"Invalid number in relative time: {parts[0]}")
-        raise
-
-
 def parse_time_to_timestamp(time_spec: str) -> int:
     """Parse a time specification and return a Unix timestamp.
 
@@ -189,7 +330,9 @@ def parse_time_to_timestamp(time_spec: str) -> int:
     """
     import time
 
-    # Handle special cases
+    from .approxidate import parse_approxidate
+
+    # Handle special cases specific to CLI
     if time_spec == "all":
         # Expire all entries - set to future time so everything is "older"
         return int(time.time()) + (100 * 365 * 24 * 60 * 60)  # 100 years in future
@@ -197,15 +340,8 @@ def parse_time_to_timestamp(time_spec: str) -> int:
         # Never expire - set to epoch start so nothing is older
         return 0
 
-    # Try parsing as direct Unix timestamp
-    try:
-        return int(time_spec)
-    except ValueError:
-        pass
-
-    # Parse relative time and convert to timestamp
-    seconds_ago = parse_relative_time(time_spec)
-    return int(time.time()) - seconds_ago
+    # Use approxidate parser for everything else
+    return parse_approxidate(time_spec)
 
 
 def format_bytes(bytes: float) -> str:
@@ -1320,6 +1456,116 @@ class cmd_dump_index(Command):
             logger.info("%s %s", o, idx[o])
 
 
+class cmd_interpret_trailers(Command):
+    """Add or parse structured information in commit messages."""
+
+    def run(self, args: Sequence[str]) -> None:
+        """Execute the interpret-trailers command.
+
+        Args:
+            args: Command line arguments
+        """
+        parser = argparse.ArgumentParser()
+        parser.add_argument(
+            "file",
+            nargs="?",
+            help="File to read message from. If not specified, reads from stdin.",
+        )
+        parser.add_argument(
+            "--trailer",
+            action="append",
+            dest="trailers",
+            metavar="<token>[(=|:)<value>]",
+            help="Trailer to add. Can be specified multiple times.",
+        )
+        parser.add_argument(
+            "--trim-empty",
+            action="store_true",
+            help="Remove trailers with empty values",
+        )
+        parser.add_argument(
+            "--only-trailers",
+            action="store_true",
+            help="Output only the trailers, not the message body",
+        )
+        parser.add_argument(
+            "--only-input",
+            action="store_true",
+            help="Don't add new trailers, only parse existing ones",
+        )
+        parser.add_argument(
+            "--unfold", action="store_true", help="Join multiline values into one line"
+        )
+        parser.add_argument(
+            "--parse",
+            action="store_true",
+            help="Shorthand for --only-trailers --only-input --unfold",
+        )
+        parser.add_argument(
+            "--where",
+            choices=["end", "start", "after", "before"],
+            default="end",
+            help="Where to place new trailers",
+        )
+        parser.add_argument(
+            "--if-exists",
+            choices=[
+                "add",
+                "replace",
+                "addIfDifferent",
+                "addIfDifferentNeighbor",
+                "doNothing",
+            ],
+            default="addIfDifferentNeighbor",
+            help="Action if trailer already exists",
+        )
+        parser.add_argument(
+            "--if-missing",
+            choices=["add", "doNothing"],
+            default="add",
+            help="Action if trailer is missing",
+        )
+        parsed_args = parser.parse_args(args)
+
+        # Read message from file or stdin
+        if parsed_args.file:
+            with open(parsed_args.file, "rb") as f:
+                message = f.read()
+        else:
+            message = sys.stdin.buffer.read()
+
+        # Parse trailer arguments
+        trailer_list = []
+        if parsed_args.trailers:
+            for trailer_spec in parsed_args.trailers:
+                # Parse "key:value" or "key=value" or just "key"
+                if ":" in trailer_spec:
+                    key, value = trailer_spec.split(":", 1)
+                elif "=" in trailer_spec:
+                    key, value = trailer_spec.split("=", 1)
+                else:
+                    key = trailer_spec
+                    value = ""
+                trailer_list.append((key.strip(), value.strip()))
+
+        # Call interpret_trailers
+        result = porcelain.interpret_trailers(
+            message,
+            trailers=trailer_list if trailer_list else None,
+            trim_empty=parsed_args.trim_empty,
+            only_trailers=parsed_args.only_trailers,
+            only_input=parsed_args.only_input,
+            unfold=parsed_args.unfold,
+            parse=parsed_args.parse,
+            where=parsed_args.where,
+            if_exists=parsed_args.if_exists,
+            if_missing=parsed_args.if_missing,
+        )
+
+        # Output result
+        sys.stdout.buffer.write(result)
+
+
 class cmd_init(Command):
     """Create an empty Git repository or reinitialize an existing one."""
 
@@ -2917,6 +3163,8 @@ class cmd_prune(Command):
         # Parse expire grace period
         grace_period = DEFAULT_TEMPFILE_GRACE_PERIOD
         if parsed_args.expire:
+            from .approxidate import parse_relative_time
+
             try:
                 grace_period = parse_relative_time(parsed_args.expire)
             except ValueError:
@@ -3112,12 +3360,19 @@ class cmd_submodule_update(Command):
             action="store_true",
             help="Force update even if local changes exist",
         )
+        parser.add_argument(
+            "--recursive",
+            action="store_true",
+            help="Recursively update nested submodules",
+        )
         parser.add_argument(
             "paths", nargs="*", help="Specific submodule paths to update"
         )
         args = parser.parse_args(argv)
         paths = args.paths if args.paths else None
-        porcelain.submodule_update(".", paths=paths, init=args.init, force=args.force)
+        porcelain.submodule_update(
+            ".", paths=paths, init=args.init, force=args.force, recursive=args.recursive
+        )
 
 
 class cmd_submodule(SuperCommand):
@@ -3801,6 +4056,89 @@ class cmd_notes(SuperCommand):
     default_command = cmd_notes_list
 
 
+class cmd_replace_list(Command):
+    """List all replacement refs."""
+
+    def run(self, args: Sequence[str]) -> None:
+        """Execute the replace-list command.
+
+        Args:
+            args: Command line arguments
+        """
+        parser = argparse.ArgumentParser()
+        parser.parse_args(args)
+
+        replacements = porcelain.replace_list(".")
+        for object_sha, replacement_sha in replacements:
+            sys.stdout.write(
+                f"{object_sha.decode('ascii')} -> {replacement_sha.decode('ascii')}\n"
+            )
+
+
+class cmd_replace_delete(Command):
+    """Delete a replacement ref."""
+
+    def run(self, args: Sequence[str]) -> Optional[int]:
+        """Execute the replace-delete command.
+
+        Args:
+            args: Command line arguments
+
+        Returns:
+            Exit code (0 for success, 1 for error)
+        """
+        parser = argparse.ArgumentParser()
+        parser.add_argument("object", help="Object whose replacement should be removed")
+        parsed_args = parser.parse_args(args)
+
+        try:
+            porcelain.replace_delete(".", parsed_args.object)
+            logger.info("Deleted replacement for %s", parsed_args.object)
+            return None
+        except KeyError as e:
+            logger.error(str(e))
+            return 1
+
+
+class cmd_replace(SuperCommand):
+    """Create, list, and delete replacement refs."""
+
+    subcommands: ClassVar[dict[str, type[Command]]] = {
+        "list": cmd_replace_list,
+        "delete": cmd_replace_delete,
+    }
+
+    default_command = cmd_replace_list
+
+    def run(self, args: Sequence[str]) -> Optional[int]:
+        """Execute the replace command.
+
+        Args:
+            args: Command line arguments
+
+        Returns:
+            Exit code (0 for success, 1 for error)
+        """
+        # Special case: if we have exactly 2 args and no subcommand, treat as create
+        if len(args) == 2 and args[0] not in self.subcommands:
+            # This is the create form: git replace <object> <replacement>
+            parser = argparse.ArgumentParser()
+            parser.add_argument("object", help="Object to replace")
+            parser.add_argument("replacement", help="Replacement object")
+            parsed_args = parser.parse_args(args)
+
+            porcelain.replace_create(".", parsed_args.object, parsed_args.replacement)
+            logger.info(
+                "Created replacement: %s -> %s",
+                parsed_args.object,
+                parsed_args.replacement,
+            )
+            return None
+
+        # Otherwise, delegate to supercommand handling
+        return super().run(args)
+
+
 class cmd_cherry(Command):
     """Find commits not merged upstream."""
 
@@ -4061,6 +4399,8 @@ class cmd_gc(Command):
         # Parse prune grace period
         grace_period = None
         if parsed_args.prune:
+            from .approxidate import parse_relative_time
+
             try:
                 grace_period = parse_relative_time(parsed_args.prune)
             except ValueError:
@@ -4120,6 +4460,122 @@ class cmd_gc(Command):
         return None
 
 
+class cmd_maintenance(Command):
+    """Run tasks to optimize Git repository data."""
+
+    def run(self, args: Sequence[str]) -> Optional[int]:
+        """Execute the maintenance command.
+
+        Args:
+            args: Command line arguments
+        """
+        parser = argparse.ArgumentParser(
+            description="Run tasks to optimize Git repository data"
+        )
+        subparsers = parser.add_subparsers(
+            dest="subcommand", help="Maintenance subcommand"
+        )
+
+        # maintenance run subcommand
+        run_parser = subparsers.add_parser("run", help="Run maintenance tasks")
+        run_parser.add_argument(
+            "--task",
+            action="append",
+            dest="tasks",
+            help="Run a specific task (can be specified multiple times)",
+        )
+        run_parser.add_argument(
+            "--auto",
+            action="store_true",
+            help="Only run tasks if needed",
+        )
+        run_parser.add_argument(
+            "--quiet",
+            "-q",
+            action="store_true",
+            help="Only report errors",
+        )
+
+        # maintenance start subcommand (placeholder)
+        subparsers.add_parser("start", help="Start background maintenance")
+
+        # maintenance stop subcommand (placeholder)
+        subparsers.add_parser("stop", help="Stop background maintenance")
+
+        # maintenance register subcommand
+        subparsers.add_parser("register", help="Register repository for maintenance")
+
+        # maintenance unregister subcommand
+        unregister_parser = subparsers.add_parser(
+            "unregister", help="Unregister repository from maintenance"
+        )
+        unregister_parser.add_argument(
+            "--force",
+            action="store_true",
+            help="Don't error if repository is not registered",
+        )
+
+        parsed_args = parser.parse_args(args)
+
+        if not parsed_args.subcommand:
+            parser.print_help()
+            return 1
+
+        if parsed_args.subcommand == "run":
+            # Progress callback
+            def progress(msg: str) -> None:
+                if not parsed_args.quiet:
+                    logger.info(msg)
+
+            try:
+                result = porcelain.maintenance_run(
+                    ".",
+                    tasks=parsed_args.tasks,
+                    auto=parsed_args.auto,
+                    progress=progress if not parsed_args.quiet else None,
+                )
+
+                # Report results
+                if not parsed_args.quiet:
+                    if result.tasks_succeeded:
+                        logger.info("\nSuccessfully completed tasks:")
+                        for task in result.tasks_succeeded:
+                            logger.info(f"  - {task}")
+
+                    if result.tasks_failed:
+                        logger.error("\nFailed tasks:")
+                        for task in result.tasks_failed:
+                            error_msg = result.errors.get(task, "Unknown error")
+                            logger.error(f"  - {task}: {error_msg}")
+                        return 1
+
+            except porcelain.Error as e:
+                logger.error("%s", e)
+                return 1
+        elif parsed_args.subcommand == "register":
+            porcelain.maintenance_register(".")
+            logger.info("Repository registered for background maintenance")
+        elif parsed_args.subcommand == "unregister":
+            try:
+                force = getattr(parsed_args, "force", False)
+                porcelain.maintenance_unregister(".", force=force)
+            except ValueError as e:
+                logger.error(str(e))
+                return 1
+            logger.info("Repository unregistered from background maintenance")
+        elif parsed_args.subcommand in ("start", "stop"):
+            # TODO: Implement background maintenance scheduling
+            logger.error(
+                f"The '{parsed_args.subcommand}' subcommand is not yet implemented"
+            )
+            return 1
+        else:
+            parser.print_help()
+            return 1
+
+        return None
+
+
 class cmd_grep(Command):
     """Search for patterns in tracked files."""
 
@@ -5581,11 +6037,13 @@ commands = {
     "grep": cmd_grep,
     "help": cmd_help,
     "init": cmd_init,
+    "interpret-trailers": cmd_interpret_trailers,
     "lfs": cmd_lfs,
     "log": cmd_log,
     "ls-files": cmd_ls_files,
     "ls-remote": cmd_ls_remote,
     "ls-tree": cmd_ls_tree,
+    "maintenance": cmd_maintenance,
     "mailsplit": cmd_mailsplit,
     "merge": cmd_merge,
     "merge-base": cmd_merge_base,
@@ -5601,6 +6059,7 @@ commands = {
     "reflog": cmd_reflog,
     "remote": cmd_remote,
     "repack": cmd_repack,
+    "replace": cmd_replace,
     "reset": cmd_reset,
     "revert": cmd_revert,
     "rev-list": cmd_rev_list,
@@ -5636,6 +6095,10 @@ def main(argv: Optional[Sequence[str]] = None) -> Optional[int]:
     Returns:
         Exit code or None
     """
+    # Wrap stdout and stderr to respect GIT_FLUSH environment variable
+    sys.stdout = AutoFlushTextIOWrapper.env(sys.stdout)
+    sys.stderr = AutoFlushTextIOWrapper.env(sys.stderr)
+
     if argv is None:
         argv = sys.argv[1:]
 

+ 511 - 0
dulwich/maintenance.py

@@ -0,0 +1,511 @@
+"""Git maintenance implementation.
+
+This module provides the git maintenance functionality for optimizing
+and maintaining Git repositories.
+"""
+
+import logging
+import os
+from abc import ABC, abstractmethod
+from dataclasses import dataclass, field
+from enum import Enum
+from typing import TYPE_CHECKING, Callable, Optional
+
+if TYPE_CHECKING:
+    from .repo import BaseRepo, Repo
+
+logger = logging.getLogger(__name__)
+
+
+class MaintenanceSchedule(str, Enum):
+    """Maintenance schedule types."""
+
+    HOURLY = "hourly"
+    DAILY = "daily"
+    WEEKLY = "weekly"
+
+
+@dataclass
+class MaintenanceResult:
+    """Result from running maintenance tasks."""
+
+    tasks_run: list[str] = field(default_factory=list)
+    tasks_succeeded: list[str] = field(default_factory=list)
+    tasks_failed: list[str] = field(default_factory=list)
+    errors: dict[str, str] = field(default_factory=dict)
+
+
+class MaintenanceTask(ABC):
+    """Base class for maintenance tasks."""
+
+    name: str = ""
+
+    def __init__(
+        self,
+        repo: "BaseRepo",
+        auto: bool = False,
+        progress: Optional[Callable[[str], None]] = None,
+    ) -> None:
+        """Initialize maintenance task.
+
+        Args:
+            repo: Repository object
+            auto: If True, only run if needed
+            progress: Optional progress callback
+        """
+        self.repo = repo
+        self.auto = auto
+        self.progress = progress
+
+    @abstractmethod
+    def run(self) -> bool:
+        """Run the maintenance task.
+
+        Returns:
+            True if successful, False otherwise
+        """
+
+    def is_enabled(self) -> bool:
+        """Check if task is enabled in repository configuration.
+
+        Returns:
+            True if task is enabled
+        """
+        if not self.name:
+            return False
+
+        config = self.repo.get_config()
+
+        try:
+            enabled = config.get_boolean(
+                (b"maintenance", self.name.encode()), b"enabled"
+            )
+            return enabled if enabled is not None else self.default_enabled()
+        except KeyError:
+            # Return default enabled state
+            return self.default_enabled()
+
+    def default_enabled(self) -> bool:
+        """Return default enabled state for this task.
+
+        Returns:
+            True if task should be enabled by default
+        """
+        return False
+
+
+class GcTask(MaintenanceTask):
+    """Garbage collection maintenance task."""
+
+    name = "gc"
+
+    def default_enabled(self) -> bool:
+        """GC is enabled by default."""
+        return True
+
+    def run(self) -> bool:
+        """Run garbage collection.
+
+        Returns:
+            True if successful, False otherwise
+        """
+        from .gc import garbage_collect
+        from .repo import Repo
+
+        if self.progress:
+            self.progress("Running gc task")
+        assert isinstance(self.repo, Repo)
+        garbage_collect(self.repo, auto=self.auto, progress=self.progress)
+        return True
+
+
+class CommitGraphTask(MaintenanceTask):
+    """Commit-graph maintenance task."""
+
+    name = "commit-graph"
+
+    def default_enabled(self) -> bool:
+        """Commit-graph is enabled by default."""
+        return True
+
+    def run(self) -> bool:
+        """Update commit-graph file.
+
+        Returns:
+            True if successful, False otherwise
+        """
+        if self.progress:
+            self.progress("Running commit-graph task")
+
+        # Get all refs
+        refs = list(self.repo.refs.as_dict().values())
+        if refs:
+            self.repo.object_store.write_commit_graph(refs, reachable=True)
+        return True
+
+
+class LooseObjectsTask(MaintenanceTask):
+    """Loose-objects maintenance task.
+
+    This packs loose objects that are not already packed.
+    """
+
+    name = "loose-objects"
+
+    def run(self) -> bool:
+        """Pack loose objects.
+
+        Returns:
+            True if successful, False otherwise
+        """
+        from .object_store import PackBasedObjectStore
+
+        if self.progress:
+            self.progress("Running loose-objects task")
+
+        # Pack loose objects using the object store's method
+        assert isinstance(self.repo.object_store, PackBasedObjectStore)
+        count = self.repo.object_store.pack_loose_objects(progress=self.progress)
+
+        if self.progress and count > 0:
+            self.progress(f"Packed {count} loose objects")
+
+        return True
+
+
+class IncrementalRepackTask(MaintenanceTask):
+    """Incremental-repack maintenance task.
+
+    This consolidates pack files incrementally.
+    """
+
+    name = "incremental-repack"
+
+    def run(self) -> bool:
+        """Consolidate pack files incrementally.
+
+        Returns:
+            True if successful, False otherwise
+        """
+        from .object_store import PackBasedObjectStore
+
+        if self.progress:
+            self.progress("Running incremental-repack task")
+
+        # Get all packs sorted by size
+        assert isinstance(self.repo.object_store, PackBasedObjectStore)
+        packs = self.repo.object_store.packs
+        if len(packs) <= 1:
+            # Nothing to consolidate
+            if self.progress:
+                self.progress("No packs to consolidate")
+            return True
+
+        # In auto mode, only repack if there are many small packs
+        # This is a heuristic similar to git's auto gc behavior
+        if self.auto:
+            # Only repack if we have more than 50 packs
+            # (matching git's gc.autoPackLimit default)
+            if len(packs) < 50:
+                if self.progress:
+                    self.progress(
+                        f"Skipping incremental repack: only {len(packs)} packs"
+                    )
+                return True
+
+        # Perform a full repack to consolidate all packs
+        if self.progress:
+            self.progress(f"Consolidating {len(packs)} pack files")
+
+        count = self.repo.object_store.repack(progress=self.progress)
+
+        if self.progress:
+            self.progress(f"Repacked {count} objects")
+
+        return True
+
+
+class PackRefsTask(MaintenanceTask):
+    """Pack-refs maintenance task."""
+
+    name = "pack-refs"
+
+    def run(self) -> bool:
+        """Pack loose references.
+
+        Returns:
+            True if successful, False otherwise
+        """
+        if self.progress:
+            self.progress("Running pack-refs task")
+
+        self.repo.refs.pack_refs(all=True)
+        return True
+
+
+class PrefetchTask(MaintenanceTask):
+    """Prefetch maintenance task.
+
+    This prefetches remote refs to keep the object database up-to-date.
+    """
+
+    name = "prefetch"
+
+    def run(self) -> bool:
+        """Prefetch remote refs.
+
+        Returns:
+            True if successful, False otherwise
+        """
+        from .porcelain import fetch
+        from .repo import Repo
+
+        if self.progress:
+            self.progress("Running prefetch task")
+
+        config = self.repo.get_config()
+
+        # Get all configured remotes
+        remotes = set()
+        for section in config.sections():
+            if len(section) == 2 and section[0] == b"remote":
+                remotes.add(section[1].decode())
+
+        if not remotes:
+            if self.progress:
+                self.progress("No remotes configured, skipping prefetch")
+            return True
+
+        # Fetch from each remote
+        success = True
+        for remote_name in sorted(remotes):
+            try:
+                if self.progress:
+                    self.progress(f"Fetching from {remote_name}")
+
+                # Fetch quietly without updating working tree
+                # The fetch operation will update refs under refs/remotes/
+                assert isinstance(self.repo, Repo)
+                fetch(
+                    self.repo,
+                    remote_location=remote_name,
+                    quiet=True,
+                )
+            except Exception as e:
+                # Log error and mark as failed
+                logger.error(f"Failed to fetch from {remote_name}: {e}")
+                success = False
+
+        return success
+
+
+# Registry of available maintenance tasks
+MAINTENANCE_TASKS: dict[str, type[MaintenanceTask]] = {
+    "gc": GcTask,
+    "commit-graph": CommitGraphTask,
+    "loose-objects": LooseObjectsTask,
+    "incremental-repack": IncrementalRepackTask,
+    "pack-refs": PackRefsTask,
+    "prefetch": PrefetchTask,
+}
+
+
+def get_enabled_tasks(
+    repo: "BaseRepo",
+    task_filter: Optional[list[str]] = None,
+) -> list[str]:
+    """Get list of enabled maintenance tasks.
+
+    Args:
+        repo: Repository object
+        task_filter: Optional list of specific task names to run
+
+    Returns:
+        List of enabled task names
+    """
+    if task_filter:
+        # Validate requested tasks exist
+        return [name for name in task_filter if name in MAINTENANCE_TASKS]
+
+    enabled_tasks = []
+
+    # Check each task to see if it's enabled
+    for task_name, task_class in MAINTENANCE_TASKS.items():
+        # Create temporary task instance to check if enabled
+        task = task_class(repo, auto=False, progress=None)
+        if task.is_enabled():
+            enabled_tasks.append(task_name)
+
+    return enabled_tasks
+
+
+def run_maintenance(
+    repo: "BaseRepo",
+    tasks: Optional[list[str]] = None,
+    auto: bool = False,
+    progress: Optional[Callable[[str], None]] = None,
+) -> MaintenanceResult:
+    """Run maintenance tasks on a repository.
+
+    Args:
+        repo: Repository object
+        tasks: Optional list of specific task names to run
+        auto: If True, only run tasks if needed
+        progress: Optional progress callback
+
+    Returns:
+        MaintenanceResult with task execution results
+    """
+    result = MaintenanceResult()
+
+    enabled_tasks = get_enabled_tasks(repo, tasks)
+
+    for task_name in enabled_tasks:
+        result.tasks_run.append(task_name)
+
+        task_class = MAINTENANCE_TASKS.get(task_name)
+        if not task_class:
+            result.tasks_failed.append(task_name)
+            result.errors[task_name] = "Unknown task"
+            continue
+
+        try:
+            task = task_class(repo, auto=auto, progress=progress)
+            success = task.run()
+
+            if success:
+                result.tasks_succeeded.append(task_name)
+            else:
+                result.tasks_failed.append(task_name)
+        except Exception as e:
+            result.tasks_failed.append(task_name)
+            result.errors[task_name] = str(e)
+            logger.error(f"Task {task_name} failed: {e}")
+
+    return result
+
+
+def register_repository(repo: "Repo") -> None:
+    """Register a repository for background maintenance.
+
+    This adds the repository to the global maintenance.repo config and sets
+    up recommended configuration for scheduled maintenance.
+
+    Args:
+        repo: Repository to register
+    """
+    from .config import ConfigFile
+
+    repo_path = os.path.abspath(repo.path)
+
+    # Get global config path
+    global_config_path = os.path.expanduser("~/.gitconfig")
+    try:
+        global_config = ConfigFile.from_path(global_config_path)
+    except FileNotFoundError:
+        # Create new config file if it doesn't exist
+        global_config = ConfigFile()
+        global_config.path = global_config_path
+
+    # Add repository to maintenance.repo list
+    # Check if already registered
+    repo_path_bytes = repo_path.encode()
+    try:
+        existing_repos = list(global_config.get_multivar((b"maintenance",), b"repo"))
+    except KeyError:
+        existing_repos = []
+
+    if repo_path_bytes in existing_repos:
+        # Already registered
+        return
+
+    # Add to global config
+    global_config.set((b"maintenance",), b"repo", repo_path_bytes)
+
+    # Set up incremental strategy in global config if not already set
+    try:
+        global_config.get((b"maintenance",), b"strategy")
+    except KeyError:
+        global_config.set((b"maintenance",), b"strategy", b"incremental")
+
+    # Configure task schedules for incremental strategy
+    schedule_config = {
+        b"commit-graph": b"hourly",
+        b"prefetch": b"hourly",
+        b"loose-objects": b"daily",
+        b"incremental-repack": b"daily",
+    }
+
+    for task, schedule in schedule_config.items():
+        try:
+            global_config.get((b"maintenance", task), b"schedule")
+        except KeyError:
+            global_config.set((b"maintenance", task), b"schedule", schedule)
+
+    global_config.write_to_path()
+
+    # Disable foreground auto maintenance in the repository
+    repo_config = repo.get_config()
+    repo_config.set((b"maintenance",), b"auto", False)
+    repo_config.write_to_path()
+
+
+def unregister_repository(repo: "Repo", force: bool = False) -> None:
+    """Unregister a repository from background maintenance.
+
+    This removes the repository from the global maintenance.repo config.
+
+    Args:
+        repo: Repository to unregister
+        force: If True, don't error if repository is not registered
+
+    Raises:
+        ValueError: If repository is not registered and force is False
+    """
+    from .config import ConfigFile
+
+    repo_path = os.path.abspath(repo.path)
+
+    # Get global config
+    global_config_path = os.path.expanduser("~/.gitconfig")
+    try:
+        global_config = ConfigFile.from_path(global_config_path)
+    except FileNotFoundError:
+        if not force:
+            raise ValueError(
+                f"Repository {repo_path} is not registered for maintenance"
+            )
+        return
+
+    # Check if repository is registered
+    repo_path_bytes = repo_path.encode()
+    try:
+        existing_repos = list(global_config.get_multivar((b"maintenance",), b"repo"))
+    except KeyError:
+        if not force:
+            raise ValueError(
+                f"Repository {repo_path} is not registered for maintenance"
+            )
+        return
+
+    if repo_path_bytes not in existing_repos:
+        if not force:
+            raise ValueError(
+                f"Repository {repo_path} is not registered for maintenance"
+            )
+        return
+
+    # Remove from list
+    existing_repos.remove(repo_path_bytes)
+
+    # Delete the maintenance section and recreate it with remaining repos
+    try:
+        del global_config[(b"maintenance",)]
+    except KeyError:
+        pass
+
+    # Re-add remaining repos
+    for remaining_repo in existing_repos:
+        global_config.set((b"maintenance",), b"repo", remaining_repo)
+
+    global_config.write_to_path()

+ 84 - 9
dulwich/objectspec.py

@@ -87,31 +87,106 @@ def parse_object(repo: "Repo", objectish: Union[bytes, str]) -> "ShaFile":
     """
     objectish = to_bytes(objectish)
 
-    # Handle :<path> - lookup path in tree
+    # Handle :<path> - lookup path in tree or index
     if b":" in objectish:
         rev, path = objectish.split(b":", 1)
         if not rev:
-            raise NotImplementedError("Index path lookup (:path) not yet supported")
+            # Index path lookup: :path or :N:path where N is stage 0-3
+            stage = 0
+            if path and path[0:1].isdigit() and len(path) > 2 and path[1:2] == b":":
+                stage = int(path[0:1])
+                if stage > 3:
+                    raise ValueError(f"Invalid stage number: {stage}. Must be 0-3.")
+                path = path[2:]
+
+            # Open the index and look up the path
+
+            try:
+                index = repo.open_index()
+            except AttributeError:
+                raise NotImplementedError(
+                    "Index path lookup requires a non-bare repository"
+                )
+
+            if path not in index:
+                raise KeyError(f"Path {path!r} not found in index")
+
+            entry = index[path]
+            # Handle ConflictedIndexEntry (merge stages)
+            from .index import ConflictedIndexEntry
+
+            if isinstance(entry, ConflictedIndexEntry):
+                if stage == 0:
+                    raise ValueError(
+                        f"Path {path!r} has unresolved conflicts. "
+                        "Use :1:path, :2:path, or :3:path to access specific stages."
+                    )
+                elif stage == 1:
+                    if entry.ancestor is None:
+                        raise KeyError(f"Path {path!r} has no ancestor (stage 1)")
+                    return repo[entry.ancestor.sha]
+                elif stage == 2:
+                    if entry.this is None:
+                        raise KeyError(f"Path {path!r} has no 'this' version (stage 2)")
+                    return repo[entry.this.sha]
+                elif stage == 3:
+                    if entry.other is None:
+                        raise KeyError(
+                            f"Path {path!r} has no 'other' version (stage 3)"
+                        )
+                    return repo[entry.other.sha]
+            else:
+                # Regular IndexEntry - only stage 0 is valid
+                if stage != 0:
+                    raise ValueError(
+                        f"Path {path!r} has no conflicts. Only :0:{path!r} or :{path!r} is valid."
+                    )
+                return repo[entry.sha]
+
+        # Regular tree lookup: rev:path
         tree = parse_tree(repo, rev)
         _mode, sha = tree.lookup_path(repo.object_store.__getitem__, path)
         return repo[sha]
 
-    # Handle @{N} - reflog lookup
+    # Handle @{N} or @{time} - reflog lookup
     if b"@{" in objectish:
         base, rest = objectish.split(b"@{", 1)
         if not rest.endswith(b"}"):
             raise ValueError("Invalid @{} syntax")
         spec = rest[:-1]
-        if not spec.isdigit():
-            raise NotImplementedError(f"Only @{{N}} supported, not @{{{spec!r}}}")
 
         ref = base if base else b"HEAD"
         entries = list(repo.read_reflog(ref))
         entries.reverse()  # Git uses reverse chronological order
-        index = int(spec)
-        if index >= len(entries):
-            raise ValueError(f"Reflog for {ref!r} has only {len(entries)} entries")
-        return repo[entries[index].new_sha]
+
+        if spec.isdigit():
+            # Check if it's a small index or a timestamp
+            # Git treats values < number of entries as indices, larger values as timestamps
+            num = int(spec)
+            if num < len(entries):
+                # Treat as numeric index: @{N}
+                return repo[entries[num].new_sha]
+            # Otherwise fall through to treat as timestamp
+
+        # Time specification: @{time} (includes large numeric values)
+        from .approxidate import parse_approxidate
+
+        target_time = parse_approxidate(spec)
+
+        # Find the most recent entry at or before the target time
+        for reflog_entry in entries:
+            if reflog_entry.timestamp <= target_time:
+                return repo[reflog_entry.new_sha]
+
+        # If no entry is old enough, raise an error
+        if entries:
+            oldest_time = entries[-1].timestamp
+            raise ValueError(
+                f"Reflog for {ref!r} has no entries at or before {spec!r}. "
+                f"Oldest entry is at timestamp {oldest_time}"
+            )
+        else:
+            raise ValueError(f"Reflog for {ref!r} is empty")
 
     # Handle ^{} - tag dereferencing
     if objectish.endswith(b"^{}"):

+ 259 - 4
dulwich/porcelain.py

@@ -41,6 +41,7 @@ Currently implemented:
  * for_each_ref
  * grep
  * init
+ * interpret_trailers
  * ls_files
  * ls_remote
  * ls_tree
@@ -54,6 +55,7 @@ Currently implemented:
  * rm
  * remote{_add}
  * receive_pack
+ * replace{_create,_delete,_list}
  * reset
  * revert
  * sparse_checkout
@@ -120,6 +122,7 @@ else:
 if TYPE_CHECKING:
     from .filter_branch import CommitData
     from .gc import GCStats
+    from .maintenance import MaintenanceResult
 
 from . import replace_me
 from .archive import tar_stream
@@ -190,12 +193,14 @@ from .refs import (
     LOCAL_BRANCH_PREFIX,
     LOCAL_NOTES_PREFIX,
     LOCAL_REMOTE_PREFIX,
+    LOCAL_REPLACE_PREFIX,
     LOCAL_TAG_PREFIX,
     Ref,
     SymrefLoop,
     _import_remote_refs,
     filter_ref_prefix,
     local_branch_name,
+    local_replace_name,
     local_tag_name,
     shorten_ref_name,
 )
@@ -212,6 +217,7 @@ from .sparse_patterns import (
     apply_included_paths,
     determine_included_paths,
 )
+from .trailers import add_trailer_to_message, format_trailers, parse_trailers
 
 # Module level tuple definition for status output
 GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
@@ -816,6 +822,7 @@ def commit(
                 encoding=encoding,
                 no_verify=no_verify,
                 sign=sign,
+                signoff=signoff,
                 merge_heads=merge_heads,
                 ref=None,
             )
@@ -832,6 +839,7 @@ def commit(
                 encoding=encoding,
                 no_verify=no_verify,
                 sign=sign,
+                signoff=signoff,
                 merge_heads=merge_heads,
             )
 
@@ -860,6 +868,108 @@ def commit_tree(
         )
 
 
+def interpret_trailers(
+    message: Union[str, bytes],
+    *,
+    trailers: Optional[list[tuple[str, str]]] = None,
+    trim_empty: bool = False,
+    only_trailers: bool = False,
+    only_input: bool = False,
+    unfold: bool = False,
+    parse: bool = False,
+    where: str = "end",
+    if_exists: str = "addIfDifferentNeighbor",
+    if_missing: str = "add",
+    separators: str = ":",
+) -> bytes:
+    r"""Parse and manipulate trailers in a commit message.
+
+    This function implements the functionality of `git interpret-trailers`,
+    allowing parsing and manipulation of structured metadata (trailers) in
+    commit messages.
+
+    Trailers are key-value pairs at the end of commit messages, formatted like:
+        Signed-off-by: Alice <alice@example.com>
+        Reviewed-by: Bob <bob@example.com>
+
+    Args:
+        message: The commit message (string or bytes)
+        trailers: List of (key, value) tuples to add as new trailers
+        trim_empty: Remove trailers with empty values
+        only_trailers: Output only the trailers, not the message body
+        only_input: Don't add new trailers, only parse existing ones
+        unfold: Join multiline trailer values into a single line
+        parse: Shorthand for --only-trailers --only-input --unfold
+        where: Where to add new trailers ('end', 'start', 'after', 'before')
+        if_exists: How to handle duplicate keys
+            - 'add': Always add
+            - 'replace': Replace all existing
+            - 'addIfDifferent': Add only if value differs from all existing
+            - 'addIfDifferentNeighbor': Add only if value differs from neighbors
+            - 'doNothing': Don't add if key exists
+        if_missing: What to do if key doesn't exist ('add' or 'doNothing')
+        separators: Valid separator characters (default ':')
+
+    Returns:
+        The processed message as bytes
+
+    Examples:
+        >>> msg = b"Subject\\n\\nBody text\\n"
+        >>> interpret_trailers(msg, trailers=[("Signed-off-by", "Alice <alice@example.com>")])
+        b'Subject\\n\\nBody text\\n\\nSigned-off-by: Alice <alice@example.com>\\n'
+
+        >>> msg = b"Subject\\n\\nSigned-off-by: Alice\\n"
+        >>> interpret_trailers(msg, only_trailers=True)
+        b'Signed-off-by: Alice\\n'
+    """
+    # Handle --parse shorthand
+    if parse:
+        only_trailers = True
+        only_input = True
+        unfold = True
+
+    # Convert message to bytes
+    if isinstance(message, str):
+        message_bytes = message.encode("utf-8")
+    else:
+        message_bytes = message
+
+    # Parse existing trailers
+    _message_body, parsed_trailers = parse_trailers(message_bytes, separators)
+
+    # Apply unfold if requested
+    if unfold:
+        for trailer in parsed_trailers:
+            # Replace newlines and multiple spaces with single space
+            trailer.value = " ".join(trailer.value.split())
+
+    # Apply trim_empty if requested
+    if trim_empty:
+        parsed_trailers = [t for t in parsed_trailers if t.value.strip()]
+
+    # Add new trailers if requested and not only_input
+    if not only_input and trailers:
+        for key, value in trailers:
+            message_bytes = add_trailer_to_message(
+                message_bytes,
+                key,
+                value,
+                separators[0],  # Use first separator as default
+                where=where,
+                if_exists=if_exists,
+                if_missing=if_missing,
+            )
+        # Re-parse to get updated trailers for output
+        if only_trailers:
+            _message_body, parsed_trailers = parse_trailers(message_bytes, separators)
+
+    # Return based on only_trailers flag
+    if only_trailers:
+        return format_trailers(parsed_trailers)
+    else:
+        return message_bytes
+
+
 def init(
     path: Union[str, os.PathLike[str]] = ".",
     *,
@@ -987,7 +1097,7 @@ def clone(
     if recurse_submodules and not bare:
         try:
             submodule_init(repo)
-            submodule_update(repo, init=True)
+            submodule_update(repo, init=True, recursive=True)
         except FileNotFoundError as e:
             # .gitmodules file doesn't exist - no submodules to process
             logging.debug("No .gitmodules file found: %s", e)
@@ -1962,6 +2072,7 @@ def submodule_update(
     paths: Optional[Sequence[Union[str, bytes, os.PathLike[str]]]] = None,
     init: bool = False,
     force: bool = False,
+    recursive: bool = False,
     errstream: Optional[BinaryIO] = None,
 ) -> None:
     """Update submodules.
@@ -1971,6 +2082,7 @@ def submodule_update(
       paths: Optional list of specific submodule paths to update. If None, updates all.
       init: If True, initialize submodules first
       force: Force update even if local changes exist
+      recursive: If True, recursively update nested submodules
       errstream: Error stream for error messages
     """
     from .submodule import iter_cached_submodules
@@ -2028,7 +2140,7 @@ def submodule_update(
 
             # Get or create the submodule repository paths
             submodule_path = os.path.join(r.path, path_str)
-            submodule_git_dir = os.path.join(r.path, ".git", "modules", path_str)
+            submodule_git_dir = os.path.join(r.controldir(), "modules", path_str)
 
             # Clone or fetch the submodule
             if not os.path.exists(submodule_git_dir):
@@ -2044,8 +2156,7 @@ def submodule_update(
                     os.makedirs(submodule_path)
 
                 # Create .git file in the submodule directory
-                depth = path_str.count("/") + 1
-                relative_git_dir = "../" * depth + ".git/modules/" + path_str
+                relative_git_dir = os.path.relpath(submodule_git_dir, submodule_path)
                 git_file_path = os.path.join(submodule_path, ".git")
                 with open(git_file_path, "w") as f:
                     f.write(f"gitdir: {relative_git_dir}\n")
@@ -2089,6 +2200,19 @@ def submodule_update(
                     # Reset the working directory
                     reset(sub_repo, "hard", target_sha)
 
+            # Recursively update nested submodules if requested
+            if recursive:
+                submodule_gitmodules = os.path.join(submodule_path, ".gitmodules")
+                if os.path.exists(submodule_gitmodules):
+                    submodule_update(
+                        submodule_path,
+                        paths=None,
+                        init=True,  # Always initialize nested submodules
+                        force=force,
+                        recursive=True,
+                        errstream=errstream,
+                    )
+
 
 def tag_create(
     repo: RepoPath,
@@ -2411,6 +2535,77 @@ def notes_list(repo: RepoPath, ref: bytes = b"commits") -> list[tuple[bytes, byt
         return r.notes.list_notes(notes_ref, config=config)
 
 
+def replace_list(repo: RepoPath) -> list[tuple[bytes, bytes]]:
+    """List all replacement refs.
+
+    Args:
+      repo: Path to repository
+
+    Returns:
+      List of tuples of (object_sha, replacement_sha) where object_sha is the
+      object being replaced and replacement_sha is what it's replaced with
+    """
+    with open_repo_closing(repo) as r:
+        replacements = []
+        for ref in r.refs.keys():
+            if ref.startswith(LOCAL_REPLACE_PREFIX):
+                object_sha = ref[len(LOCAL_REPLACE_PREFIX) :]
+                replacement_sha = r.refs[ref]
+                replacements.append((object_sha, replacement_sha))
+        return replacements
+
+
+def replace_delete(repo: RepoPath, object_sha: Union[str, bytes]) -> None:
+    """Delete a replacement ref.
+
+    Args:
+      repo: Path to repository
+      object_sha: SHA of the object whose replacement should be removed
+    """
+    with open_repo_closing(repo) as r:
+        # Convert to bytes if string
+        if isinstance(object_sha, str):
+            object_sha_hex = object_sha.encode("ascii")
+        else:
+            object_sha_hex = object_sha
+
+        replace_ref = _make_replace_ref(object_sha_hex)
+        if replace_ref not in r.refs:
+            raise KeyError(
+                f"No replacement ref found for {object_sha_hex.decode('ascii')}"
+            )
+        del r.refs[replace_ref]
+
+
+def replace_create(
+    repo: RepoPath,
+    object_sha: Union[str, bytes],
+    replacement_sha: Union[str, bytes],
+) -> None:
+    """Create a replacement ref to replace one object with another.
+
+    Args:
+      repo: Path to repository
+      object_sha: SHA of the object to replace
+      replacement_sha: SHA of the replacement object
+    """
+    with open_repo_closing(repo) as r:
+        # Convert to bytes if string
+        if isinstance(object_sha, str):
+            object_sha_hex = object_sha.encode("ascii")
+        else:
+            object_sha_hex = object_sha
+
+        if isinstance(replacement_sha, str):
+            replacement_sha_hex = replacement_sha.encode("ascii")
+        else:
+            replacement_sha_hex = replacement_sha
+
+        # Create the replacement ref
+        replace_ref = _make_replace_ref(object_sha_hex)
+        r.refs[replace_ref] = replacement_sha_hex
+
+
 def reset(
     repo: Union[str, os.PathLike[str], Repo],
     mode: str,
@@ -3490,6 +3685,12 @@ def _make_tag_ref(name: Union[str, bytes]) -> Ref:
     return local_tag_name(name)
 
 
+def _make_replace_ref(name: Union[str, bytes]) -> Ref:
+    if isinstance(name, str):
+        name = name.encode(DEFAULT_ENCODING)
+    return local_replace_name(name)
+
+
 def branch_delete(
     repo: RepoPath, name: Union[str, bytes, Sequence[Union[str, bytes]]]
 ) -> None:
@@ -6372,6 +6573,60 @@ def prune(
             r.object_store.prune(grace_period=grace_period)
 
 
+def maintenance_run(
+    repo: RepoPath,
+    tasks: Optional[list[str]] = None,
+    auto: bool = False,
+    progress: Optional[Callable[[str], None]] = None,
+) -> "MaintenanceResult":
+    """Run maintenance tasks on a repository.
+
+    Args:
+      repo: Path to the repository or a Repo object
+      tasks: Optional list of specific task names to run
+             (e.g., ['gc', 'commit-graph', 'pack-refs'])
+      auto: If True, only run tasks if needed
+      progress: Optional progress callback
+
+    Returns:
+      MaintenanceResult object with task execution results
+    """
+    from .maintenance import run_maintenance
+
+    with open_repo_closing(repo) as r:
+        return run_maintenance(r, tasks=tasks, auto=auto, progress=progress)
+
+
+def maintenance_register(repo: RepoPath) -> None:
+    """Register a repository for background maintenance.
+
+    This adds the repository to the global maintenance.repo config and sets
+    up recommended configuration for scheduled maintenance.
+
+    Args:
+      repo: Path to the repository or repository object
+    """
+    from .maintenance import register_repository
+
+    with open_repo_closing(repo) as r:
+        register_repository(r)
+
+
+def maintenance_unregister(repo: RepoPath, force: bool = False) -> None:
+    """Unregister a repository from background maintenance.
+
+    This removes the repository from the global maintenance.repo config.
+
+    Args:
+      repo: Path to the repository or repository object
+      force: If True, don't error if repository is not registered
+    """
+    from .maintenance import unregister_repository
+
+    with open_repo_closing(repo) as r:
+        unregister_repository(r, force=force)
+
+
 def count_objects(repo: RepoPath = ".", verbose: bool = False) -> CountObjectsResult:
     """Count unpacked objects and their disk usage.
 

+ 188 - 0
dulwich/refs.py

@@ -54,6 +54,7 @@ LOCAL_BRANCH_PREFIX = b"refs/heads/"
 LOCAL_TAG_PREFIX = b"refs/tags/"
 LOCAL_REMOTE_PREFIX = b"refs/remotes/"
 LOCAL_NOTES_PREFIX = b"refs/notes/"
+LOCAL_REPLACE_PREFIX = b"refs/replace/"
 BAD_REF_CHARS = set(b"\177 ~^:?*[")
 PEELED_TAG_SUFFIX = b"^{}"
 
@@ -1508,6 +1509,26 @@ def local_tag_name(name: bytes) -> bytes:
     return LOCAL_TAG_PREFIX + name
 
 
+def local_replace_name(name: bytes) -> bytes:
+    """Build a full replace ref from a short name.
+
+    Args:
+      name: Short replace name (object SHA) or full ref
+
+    Returns:
+      Full replace ref name (e.g., b"refs/replace/<sha>")
+
+    Examples:
+      >>> local_replace_name(b"abc123")
+      b'refs/replace/abc123'
+      >>> local_replace_name(b"refs/replace/abc123")
+      b'refs/replace/abc123'
+    """
+    if name.startswith(LOCAL_REPLACE_PREFIX):
+        return name
+    return LOCAL_REPLACE_PREFIX + name
+
+
 def extract_branch_name(ref: bytes) -> bytes:
     """Extract branch name from a full branch ref.
 
@@ -1872,6 +1893,173 @@ class locked_ref:
         self._deleted = True
 
 
+class NamespacedRefsContainer(RefsContainer):
+    """Wrapper that adds namespace prefix to all ref operations.
+
+    This implements Git's GIT_NAMESPACE feature, which stores refs under
+    refs/namespaces/<namespace>/ and filters operations to only show refs
+    within that namespace.
+
+    Example:
+        With namespace "foo", a ref "refs/heads/master" is stored as
+        "refs/namespaces/foo/refs/heads/master" in the underlying container.
+    """
+
+    def __init__(self, refs: RefsContainer, namespace: bytes) -> None:
+        """Initialize NamespacedRefsContainer.
+
+        Args:
+          refs: The underlying refs container to wrap
+          namespace: The namespace prefix (e.g., b"foo" or b"foo/bar")
+        """
+        super().__init__(logger=refs._logger)
+        self._refs = refs
+        # Build namespace prefix: refs/namespaces/<namespace>/
+        # Support nested namespaces: foo/bar -> refs/namespaces/foo/refs/namespaces/bar/
+        namespace_parts = namespace.split(b"/")
+        self._namespace_prefix = b""
+        for part in namespace_parts:
+            self._namespace_prefix += b"refs/namespaces/" + part + b"/"
+
+    def _apply_namespace(self, name: bytes) -> bytes:
+        """Apply namespace prefix to a ref name."""
+        # HEAD and other special refs are not namespaced
+        if name == HEADREF or not name.startswith(b"refs/"):
+            return name
+        return self._namespace_prefix + name
+
+    def _strip_namespace(self, name: bytes) -> Optional[bytes]:
+        """Remove namespace prefix from a ref name.
+
+        Returns None if the ref is not in our namespace.
+        """
+        # HEAD and other special refs are not namespaced
+        if name == HEADREF or not name.startswith(b"refs/"):
+            return name
+        if name.startswith(self._namespace_prefix):
+            return name[len(self._namespace_prefix) :]
+        return None
+
+    def allkeys(self) -> set[bytes]:
+        """Return all reference keys in this namespace."""
+        keys = set()
+        for key in self._refs.allkeys():
+            stripped = self._strip_namespace(key)
+            if stripped is not None:
+                keys.add(stripped)
+        return keys
+
+    def read_loose_ref(self, name: bytes) -> Optional[bytes]:
+        """Read a loose reference."""
+        return self._refs.read_loose_ref(self._apply_namespace(name))
+
+    def get_packed_refs(self) -> dict[Ref, ObjectID]:
+        """Get packed refs within this namespace."""
+        packed = {}
+        for name, value in self._refs.get_packed_refs().items():
+            stripped = self._strip_namespace(name)
+            if stripped is not None:
+                packed[stripped] = value
+        return packed
+
+    def add_packed_refs(self, new_refs: Mapping[Ref, Optional[ObjectID]]) -> None:
+        """Add packed refs with namespace prefix."""
+        namespaced_refs = {
+            self._apply_namespace(name): value for name, value in new_refs.items()
+        }
+        self._refs.add_packed_refs(namespaced_refs)
+
+    def get_peeled(self, name: bytes) -> Optional[ObjectID]:
+        """Return the cached peeled value of a ref."""
+        return self._refs.get_peeled(self._apply_namespace(name))
+
+    def set_symbolic_ref(
+        self,
+        name: bytes,
+        other: bytes,
+        committer: Optional[bytes] = None,
+        timestamp: Optional[int] = None,
+        timezone: Optional[int] = None,
+        message: Optional[bytes] = None,
+    ) -> None:
+        """Make a ref point at another ref."""
+        self._refs.set_symbolic_ref(
+            self._apply_namespace(name),
+            self._apply_namespace(other),
+            committer=committer,
+            timestamp=timestamp,
+            timezone=timezone,
+            message=message,
+        )
+
+    def set_if_equals(
+        self,
+        name: bytes,
+        old_ref: Optional[bytes],
+        new_ref: bytes,
+        committer: Optional[bytes] = None,
+        timestamp: Optional[int] = None,
+        timezone: Optional[int] = None,
+        message: Optional[bytes] = None,
+    ) -> bool:
+        """Set a refname to new_ref only if it currently equals old_ref."""
+        return self._refs.set_if_equals(
+            self._apply_namespace(name),
+            old_ref,
+            new_ref,
+            committer=committer,
+            timestamp=timestamp,
+            timezone=timezone,
+            message=message,
+        )
+
+    def add_if_new(
+        self,
+        name: bytes,
+        ref: bytes,
+        committer: Optional[bytes] = None,
+        timestamp: Optional[int] = None,
+        timezone: Optional[int] = None,
+        message: Optional[bytes] = None,
+    ) -> bool:
+        """Add a new reference only if it does not already exist."""
+        return self._refs.add_if_new(
+            self._apply_namespace(name),
+            ref,
+            committer=committer,
+            timestamp=timestamp,
+            timezone=timezone,
+            message=message,
+        )
+
+    def remove_if_equals(
+        self,
+        name: bytes,
+        old_ref: Optional[bytes],
+        committer: Optional[bytes] = None,
+        timestamp: Optional[int] = None,
+        timezone: Optional[int] = None,
+        message: Optional[bytes] = None,
+    ) -> bool:
+        """Remove a refname only if it currently equals old_ref."""
+        return self._refs.remove_if_equals(
+            self._apply_namespace(name),
+            old_ref,
+            committer=committer,
+            timestamp=timestamp,
+            timezone=timezone,
+            message=message,
+        )
+
+    def pack_refs(self, all: bool = False) -> None:
+        """Pack loose refs into packed-refs file.
+
+        Note: This packs all refs in the underlying container, not just
+        those in the namespace.
+        """
+        self._refs.pack_refs(all=all)
+
+
 def filter_ref_prefix(refs: T, prefixes: Iterable[bytes]) -> T:
     """Filter refs to only include those with a given prefix.
 

+ 1 - 1
dulwich/repo.py

@@ -1158,7 +1158,7 @@ def read_gitfile(f: BinaryIO) -> str:
     cs = f.read()
     if not cs.startswith(b"gitdir: "):
         raise ValueError("Expected file to start with 'gitdir: '")
-    return cs[len(b"gitdir: ") :].rstrip(b"\n").decode("utf-8")
+    return cs[len(b"gitdir: ") :].rstrip(b"\r\n").decode("utf-8")
 
 
 class UnsupportedVersion(Exception):

+ 448 - 0
dulwich/trailers.py

@@ -0,0 +1,448 @@
+# trailers.py -- Git trailers parsing and manipulation
+# Copyright (C) 2025 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as public by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Git trailers parsing and manipulation.
+
+This module provides functionality for parsing and manipulating Git trailers,
+which are structured information blocks appended to commit messages.
+
+Trailers follow the format:
+    Token: value
+    Token: value
+
+They are similar to RFC 822 email headers and appear at the end of commit
+messages after free-form content.
+"""
+
+from typing import Optional
+
+
+class Trailer:
+    """Represents a single Git trailer.
+
+    Args:
+        key: The trailer key/token (e.g., "Signed-off-by")
+        value: The trailer value
+        separator: The separator character used (default ':')
+    """
+
+    def __init__(self, key: str, value: str, separator: str = ":") -> None:
+        """Initialize a Trailer instance.
+
+        Args:
+            key: The trailer key/token
+            value: The trailer value
+            separator: The separator character (default ':')
+        """
+        self.key = key
+        self.value = value
+        self.separator = separator
+
+    def __eq__(self, other: object) -> bool:
+        """Compare two Trailer instances for equality.
+
+        Args:
+            other: The object to compare with
+
+        Returns:
+            True if trailers have the same key, value, and separator
+        """
+        if not isinstance(other, Trailer):
+            return NotImplemented
+        return (
+            self.key == other.key
+            and self.value == other.value
+            and self.separator == other.separator
+        )
+
+    def __repr__(self) -> str:
+        """Return a string representation suitable for debugging.
+
+        Returns:
+            A string showing the trailer's key, value, and separator
+        """
+        return f"Trailer(key={self.key!r}, value={self.value!r}, separator={self.separator!r})"
+
+    def __str__(self) -> str:
+        """Return the trailer formatted as it would appear in a commit message.
+
+        Returns:
+            The trailer in the format "key: value"
+        """
+        return f"{self.key}{self.separator} {self.value}"
+
+
+def parse_trailers(
+    message: bytes,
+    separators: str = ":",
+) -> tuple[bytes, list[Trailer]]:
+    """Parse trailers from a commit message.
+
+    Trailers are extracted from the input by looking for a group of one or more
+    lines that (i) is all trailers, or (ii) contains at least one Git-generated
+    or user-configured trailer and consists of at least 25% trailers.
+
+    The group must be preceded by one or more empty (or whitespace-only) lines.
+    The group must either be at the end of the input or be the last non-whitespace
+    lines before a line that starts with '---'.
+
+    Args:
+        message: The commit message as bytes
+        separators: Characters to recognize as trailer separators (default ':')
+
+    Returns:
+        A tuple of (message_without_trailers, list_of_trailers)
+    """
+    if not message:
+        return (b"", [])
+
+    # Decode message
+    try:
+        text = message.decode("utf-8")
+    except UnicodeDecodeError:
+        text = message.decode("latin-1")
+
+    lines = text.splitlines(keepends=True)
+
+    # Find the trailer block by searching backwards
+    # Look for a blank line followed by trailer-like lines
+    trailer_start = None
+    cutoff_line = None
+
+    # First, check if there's a "---" line that marks the end of the message
+    for i in range(len(lines) - 1, -1, -1):
+        if lines[i].lstrip().startswith("---"):
+            cutoff_line = i
+            break
+
+    # Determine the search range
+    search_end = cutoff_line if cutoff_line is not None else len(lines)
+
+    # Search backwards for the trailer block
+    # A trailer block must be preceded by a blank line and extend to the end
+    for i in range(search_end - 1, -1, -1):
+        line = lines[i].rstrip()
+
+        # Check if this is a blank line
+        if not line:
+            # Check if the lines after this blank line are trailers
+            potential_trailers = lines[i + 1 : search_end]
+
+            # Remove trailing blank lines from potential trailers
+            while potential_trailers and not potential_trailers[-1].strip():
+                potential_trailers = potential_trailers[:-1]
+
+            # Check if these lines form a trailer block and extend to search_end
+            if potential_trailers and _is_trailer_block(potential_trailers, separators):
+                # Verify these trailers extend to the end (search_end)
+                # by checking there are no non-blank lines after them
+                last_trailer_index = i + 1 + len(potential_trailers)
+                has_content_after = False
+                for j in range(last_trailer_index, search_end):
+                    if lines[j].strip():
+                        has_content_after = True
+                        break
+
+                if not has_content_after:
+                    trailer_start = i + 1
+                    break
+
+    if trailer_start is None:
+        # No trailer block found
+        return (message, [])
+
+    # Parse the trailers
+    trailer_lines = lines[trailer_start:search_end]
+    trailers = _parse_trailer_lines(trailer_lines, separators)
+
+    # Reconstruct the message without trailers
+    # Keep everything before the blank line that precedes the trailers
+    message_lines = lines[:trailer_start]
+
+    # Remove trailing blank lines from the message
+    while message_lines and not message_lines[-1].strip():
+        message_lines.pop()
+
+    message_without_trailers = "".join(message_lines)
+    if message_without_trailers and not message_without_trailers.endswith("\n"):
+        message_without_trailers += "\n"
+
+    return (message_without_trailers.encode("utf-8"), trailers)
+
+
+def _is_trailer_block(lines: list[str], separators: str) -> bool:
+    """Check if a group of lines forms a valid trailer block.
+
+    A trailer block must be composed entirely of trailer lines (with possible
+    blank lines and continuation lines). A single non-trailer line invalidates
+    the entire block.
+
+    Args:
+        lines: The lines to check
+        separators: Valid separator characters
+
+    Returns:
+        True if the lines form a valid trailer block
+    """
+    if not lines:
+        return False
+
+    # Remove empty lines at the end
+    while lines and not lines[-1].strip():
+        lines = lines[:-1]
+
+    if not lines:
+        return False
+
+    has_any_trailer = False
+
+    i = 0
+    while i < len(lines):
+        line = lines[i].rstrip()
+
+        if not line:
+            # Empty lines are allowed within the trailer block
+            i += 1
+            continue
+
+        # Check if this line is a continuation (starts with whitespace)
+        if line and line[0].isspace():
+            # This is a continuation of the previous line
+            i += 1
+            continue
+
+        # Check if this is a trailer line
+        is_trailer = False
+        for sep in separators:
+            if sep in line:
+                key_part = line.split(sep, 1)[0]
+                # Key must not contain whitespace
+                if key_part and not any(c.isspace() for c in key_part):
+                    is_trailer = True
+                    has_any_trailer = True
+                    break
+
+        # If this is not a trailer line, the block is invalid
+        if not is_trailer:
+            return False
+
+        i += 1
+
+    # Must have at least one trailer
+    return has_any_trailer
+
+
+def _parse_trailer_lines(lines: list[str], separators: str) -> list[Trailer]:
+    """Parse individual trailer lines.
+
+    Args:
+        lines: The trailer lines to parse
+        separators: Valid separator characters
+
+    Returns:
+        List of parsed Trailer objects
+    """
+    trailers: list[Trailer] = []
+    current_trailer: Optional[Trailer] = None
+
+    for line in lines:
+        stripped = line.rstrip()
+
+        if not stripped:
+            # Empty line - finalize current trailer if any
+            if current_trailer:
+                trailers.append(current_trailer)
+                current_trailer = None
+            continue
+
+        # Check if this is a continuation line (starts with whitespace)
+        if stripped[0].isspace():
+            if current_trailer:
+                # Append to the current trailer value
+                continuation = stripped.lstrip()
+                current_trailer.value += " " + continuation
+            continue
+
+        # Finalize the previous trailer if any
+        if current_trailer:
+            trailers.append(current_trailer)
+            current_trailer = None
+
+        # Try to parse as a new trailer
+        for sep in separators:
+            if sep in stripped:
+                parts = stripped.split(sep, 1)
+                key = parts[0]
+
+                # Key must not contain whitespace
+                if key and not any(c.isspace() for c in key):
+                    value = parts[1].strip() if len(parts) > 1 else ""
+                    current_trailer = Trailer(key, value, sep)
+                    break
+
+    # Don't forget the last trailer
+    if current_trailer:
+        trailers.append(current_trailer)
+
+    return trailers
+
+
+def format_trailers(trailers: list[Trailer]) -> bytes:
+    """Format a list of trailers as bytes.
+
+    Args:
+        trailers: List of Trailer objects
+
+    Returns:
+        Formatted trailers as bytes
+    """
+    if not trailers:
+        return b""
+
+    lines = [str(trailer) for trailer in trailers]
+    return "\n".join(lines).encode("utf-8") + b"\n"
+
+
+def add_trailer_to_message(
+    message: bytes,
+    key: str,
+    value: str,
+    separator: str = ":",
+    where: str = "end",
+    if_exists: str = "addIfDifferentNeighbor",
+    if_missing: str = "add",
+) -> bytes:
+    """Add a trailer to a commit message.
+
+    Args:
+        message: The original commit message
+        key: The trailer key
+        value: The trailer value
+        separator: The separator to use
+        where: Where to add the trailer ('end', 'start', 'after', 'before')
+        if_exists: How to handle existing trailers with the same key
+            - 'add': Always add
+            - 'replace': Replace all existing
+            - 'addIfDifferent': Add only if value is different from all existing
+            - 'addIfDifferentNeighbor': Add only if value differs from neighbors
+            - 'doNothing': Don't add if key exists
+        if_missing: What to do if the key doesn't exist
+            - 'add': Add the trailer
+            - 'doNothing': Don't add the trailer
+
+    Returns:
+        The message with the trailer added
+    """
+    message_body, existing_trailers = parse_trailers(message, separator)
+
+    new_trailer = Trailer(key, value, separator)
+
+    # Check if the key exists
+    key_exists = any(t.key == key for t in existing_trailers)
+
+    if not key_exists:
+        if if_missing == "doNothing":
+            return message
+        # Add the new trailer
+        updated_trailers = [*existing_trailers, new_trailer]
+    else:
+        # Key exists - apply if_exists logic
+        if if_exists == "doNothing":
+            return message
+        elif if_exists == "replace":
+            # Replace all trailers with this key
+            updated_trailers = [t for t in existing_trailers if t.key != key]
+            updated_trailers.append(new_trailer)
+        elif if_exists == "addIfDifferent":
+            # Add only if no existing trailer has the same value
+            has_same_value = any(
+                t.key == key and t.value == value for t in existing_trailers
+            )
+            if has_same_value:
+                return message
+            updated_trailers = [*existing_trailers, new_trailer]
+        elif if_exists == "addIfDifferentNeighbor":
+            # Add only if adjacent trailers with same key have different values
+            should_add = True
+
+            # Check if there's a neighboring trailer with the same key and value
+            for i, t in enumerate(existing_trailers):
+                if t.key == key and t.value == value:
+                    # Check if it's a neighbor (last trailer with this key)
+                    is_neighbor = True
+                    for j in range(i + 1, len(existing_trailers)):
+                        if existing_trailers[j].key == key:
+                            is_neighbor = False
+                            break
+                    if is_neighbor:
+                        should_add = False
+                        break
+
+            if not should_add:
+                return message
+            updated_trailers = [*existing_trailers, new_trailer]
+        else:  # 'add'
+            updated_trailers = [*existing_trailers, new_trailer]
+
+    # Apply where logic
+    if where == "start":
+        updated_trailers = [new_trailer] + [
+            t for t in updated_trailers if t != new_trailer
+        ]
+    elif where == "before":
+        # Insert before the first trailer with the same key
+        result = []
+        inserted = False
+        for t in updated_trailers:
+            if not inserted and t.key == key and t != new_trailer:
+                result.append(new_trailer)
+                inserted = True
+            if t != new_trailer:
+                result.append(t)
+        if not inserted:
+            result.append(new_trailer)
+        updated_trailers = result
+    elif where == "after":
+        # Insert after the last trailer with the same key
+        result = []
+        last_key_index = -1
+        for i, t in enumerate(updated_trailers):
+            if t.key == key and t != new_trailer:
+                last_key_index = len(result)
+            if t != new_trailer:
+                result.append(t)
+
+        if last_key_index >= 0:
+            result.insert(last_key_index + 1, new_trailer)
+        else:
+            result.append(new_trailer)
+        updated_trailers = result
+    # 'end' is the default - trailer is already at the end
+
+    # Reconstruct the message
+    result_message = message_body
+    if result_message and not result_message.endswith(b"\n"):
+        result_message += b"\n"
+
+    if updated_trailers:
+        result_message += b"\n"
+        result_message += format_trailers(updated_trailers)
+
+    return result_message

+ 37 - 0
dulwich/worktree.py

@@ -46,6 +46,7 @@ from .repo import (
     check_user_identity,
     get_user_identity,
 )
+from .trailers import add_trailer_to_message
 
 
 class WorkTreeInfo:
@@ -428,6 +429,7 @@ class WorkTree:
         merge_heads: Sequence[ObjectID] | None = None,
         no_verify: bool = False,
         sign: bool | None = None,
+        signoff: bool | None = None,
     ) -> ObjectID:
         """Create a new commit.
 
@@ -456,6 +458,8 @@ class WorkTree:
           sign: GPG Sign the commit (bool, defaults to False,
             pass True to use default GPG key,
             pass a str containing Key ID to use a specific GPG key)
+          signoff: Add Signed-off-by line (DCO) to commit message.
+            If None, uses format.signoff config.
 
         Returns:
           New commit SHA1
@@ -557,6 +561,39 @@ class WorkTree:
             # FIXME: Try to read commit message from .git/MERGE_MSG
             raise ValueError("No commit message specified")
 
+        # Handle signoff
+        should_signoff = signoff
+        if should_signoff is None:
+            # Check format.signOff configuration
+            try:
+                should_signoff = config.get_boolean(
+                    (b"format",), b"signoff", default=False
+                )
+            except KeyError:
+                should_signoff = False
+
+        if should_signoff:
+            # Add Signed-off-by trailer
+            # Get the committer identity for the signoff
+            signoff_identity = committer
+            if isinstance(message, bytes):
+                message_bytes = message
+            else:
+                message_bytes = message.encode("utf-8")
+
+            message_bytes = add_trailer_to_message(
+                message_bytes,
+                "Signed-off-by",
+                signoff_identity.decode("utf-8")
+                if isinstance(signoff_identity, bytes)
+                else signoff_identity,
+                separator=":",
+                where="end",
+                if_exists="addIfDifferentNeighbor",
+                if_missing="add",
+            )
+            message = message_bytes
+
         try:
             if no_verify:
                 c.message = message

+ 2 - 0
tests/__init__.py

@@ -124,6 +124,7 @@ class BlackboxTestCase(TestCase):
 def self_test_suite() -> unittest.TestSuite:
     names = [
         "annotate",
+        "approxidate",
         "archive",
         "attrs",
         "bisect",
@@ -184,6 +185,7 @@ def self_test_suite() -> unittest.TestSuite:
         "sparse_patterns",
         "stash",
         "submodule",
+        "trailers",
         "utils",
         "walk",
         "web",

+ 47 - 0
tests/compat/test_bundle.py

@@ -283,3 +283,50 @@ class CompatBundleTestCase(CompatTestCase):
         # Verify the cloned repository exists and has content
         self.assertTrue(os.path.exists(clone_path))
         self.assertTrue(os.path.exists(os.path.join(clone_path, "test.txt")))
+
+    def test_unbundle_git_bundle(self) -> None:
+        """Test unbundling a bundle created by git using dulwich CLI."""
+        # Create a repository with commits using git
+        run_git_or_fail(["config", "user.name", "Test User"], cwd=self.repo_path)
+        run_git_or_fail(
+            ["config", "user.email", "test@example.com"], cwd=self.repo_path
+        )
+
+        # Create commits
+        test_file = os.path.join(self.repo_path, "test.txt")
+        with open(test_file, "w") as f:
+            f.write("content 1\n")
+        run_git_or_fail(["add", "test.txt"], cwd=self.repo_path)
+        run_git_or_fail(["commit", "-m", "Commit 1"], cwd=self.repo_path)
+
+        with open(test_file, "a") as f:
+            f.write("content 2\n")
+        run_git_or_fail(["add", "test.txt"], cwd=self.repo_path)
+        run_git_or_fail(["commit", "-m", "Commit 2"], cwd=self.repo_path)
+
+        # Get commit SHA for verification
+        head_sha = run_git_or_fail(["rev-parse", "HEAD"], cwd=self.repo_path).strip()
+
+        # Create bundle using git
+        bundle_path = os.path.join(self.test_dir, "unbundle_test.bundle")
+        run_git_or_fail(["bundle", "create", bundle_path, "master"], cwd=self.repo_path)
+
+        # Create a new empty repository to unbundle into
+        unbundle_repo_path = os.path.join(self.test_dir, "unbundle_repo")
+        unbundle_repo = Repo.init(unbundle_repo_path, mkdir=True)
+        self.addCleanup(unbundle_repo.close)
+
+        # Read the bundle and store objects using dulwich
+        with open(bundle_path, "rb") as f:
+            bundle = read_bundle(f)
+
+        # Use the bundle's store_objects method to unbundle
+        bundle.store_objects(unbundle_repo.object_store)
+
+        # Verify objects are now in the repository
+        # Check that the HEAD commit exists
+        self.assertIn(head_sha, unbundle_repo.object_store)
+
+        # Verify we can retrieve the commit
+        commit = unbundle_repo.object_store[head_sha]
+        self.assertEqual(b"Commit 2\n", commit.message)

+ 7 - 1
tests/compat/utils.py

@@ -140,6 +140,12 @@ def run_git(
     env["LC_ALL"] = env["LANG"] = "C"
     env["PATH"] = os.getenv("PATH")
 
+    # Isolate from system and user git config to prevent config leakage
+    # This prevents issues like Apple Git's system-wide init.defaultBranch=main
+    # from affecting test behavior (see issue #1188)
+    env["GIT_CONFIG_NOSYSTEM"] = "1"
+    env["GIT_CONFIG_GLOBAL"] = "/dev/null"
+
     # Preserve Git identity environment variables if they exist, otherwise set dummy values
     git_env_defaults = {
         "GIT_AUTHOR_NAME": "Test User",
@@ -247,7 +253,7 @@ class CompatTestCase(TestCase):
     min_git_version.
     """
 
-    min_git_version: tuple[int, ...] = (1, 5, 0)
+    min_git_version: tuple[int, ...] = (2, 32, 0)
 
     def setUp(self) -> None:
         super().setUp()

+ 161 - 0
tests/test_approxidate.py

@@ -0,0 +1,161 @@
+# test_approxidate.py -- tests for approxidate.py
+# Copyright (C) 2025 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as published by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for approxidate parsing."""
+
+import time
+
+from dulwich.approxidate import parse_approxidate, parse_relative_time
+
+from . import TestCase
+
+
+class ParseRelativeTimeTests(TestCase):
+    """Test parse_relative_time."""
+
+    def test_now(self) -> None:
+        self.assertEqual(0, parse_relative_time("now"))
+
+    def test_seconds_ago(self) -> None:
+        self.assertEqual(5, parse_relative_time("5 seconds ago"))
+        self.assertEqual(1, parse_relative_time("1 second ago"))
+
+    def test_minutes_ago(self) -> None:
+        self.assertEqual(5 * 60, parse_relative_time("5 minutes ago"))
+        self.assertEqual(1 * 60, parse_relative_time("1 minute ago"))
+
+    def test_hours_ago(self) -> None:
+        self.assertEqual(5 * 3600, parse_relative_time("5 hours ago"))
+        self.assertEqual(1 * 3600, parse_relative_time("1 hour ago"))
+
+    def test_days_ago(self) -> None:
+        self.assertEqual(5 * 86400, parse_relative_time("5 days ago"))
+        self.assertEqual(1 * 86400, parse_relative_time("1 day ago"))
+
+    def test_weeks_ago(self) -> None:
+        self.assertEqual(2 * 604800, parse_relative_time("2 weeks ago"))
+        self.assertEqual(1 * 604800, parse_relative_time("1 week ago"))
+
+    def test_months_ago(self) -> None:
+        self.assertEqual(2 * 2592000, parse_relative_time("2 months ago"))
+        self.assertEqual(1 * 2592000, parse_relative_time("1 month ago"))
+
+    def test_years_ago(self) -> None:
+        self.assertEqual(2 * 31536000, parse_relative_time("2 years ago"))
+        self.assertEqual(1 * 31536000, parse_relative_time("1 year ago"))
+
+    def test_dot_separated_format(self) -> None:
+        # Git supports both "2 weeks ago" and "2.weeks.ago"
+        self.assertEqual(2 * 604800, parse_relative_time("2.weeks.ago"))
+        self.assertEqual(5 * 86400, parse_relative_time("5.days.ago"))
+
+    def test_invalid_format(self) -> None:
+        self.assertRaises(ValueError, parse_relative_time, "not a time")
+        self.assertRaises(ValueError, parse_relative_time, "5 weeks")  # Missing "ago"
+
+    def test_invalid_unit(self) -> None:
+        self.assertRaises(ValueError, parse_relative_time, "5 fortnights ago")
+
+    def test_invalid_number(self) -> None:
+        self.assertRaises(ValueError, parse_relative_time, "abc weeks ago")
+
+
+class ParseApproxidateTests(TestCase):
+    """Test parse_approxidate."""
+
+    def test_now(self) -> None:
+        result = parse_approxidate("now")
+        # Should be close to current time
+        self.assertAlmostEqual(result, time.time(), delta=2)
+
+    def test_yesterday(self) -> None:
+        result = parse_approxidate("yesterday")
+        expected = time.time() - 86400
+        self.assertAlmostEqual(result, expected, delta=2)
+
+    def test_today(self) -> None:
+        result = parse_approxidate("today")
+        # Should be midnight of current day
+        from datetime import datetime
+
+        now = datetime.fromtimestamp(time.time())
+        expected_dt = now.replace(hour=0, minute=0, second=0, microsecond=0)
+        expected = int(expected_dt.timestamp())
+        self.assertEqual(result, expected)
+
+    def test_unix_timestamp(self) -> None:
+        self.assertEqual(1234567890, parse_approxidate("1234567890"))
+        self.assertEqual(0, parse_approxidate("0"))
+
+    def test_relative_times(self) -> None:
+        # Test relative time parsing
+        result = parse_approxidate("2 weeks ago")
+        expected = time.time() - (2 * 604800)
+        self.assertAlmostEqual(result, expected, delta=2)
+
+        result = parse_approxidate("5.days.ago")
+        expected = time.time() - (5 * 86400)
+        self.assertAlmostEqual(result, expected, delta=2)
+
+    def test_absolute_date_iso(self) -> None:
+        # Test ISO format date
+        result = parse_approxidate("2009-02-13")
+        # 2009-02-13 00:00:00 UTC
+        from datetime import datetime
+
+        expected = int(datetime(2009, 2, 13, 0, 0, 0).timestamp())
+        self.assertEqual(result, expected)
+
+    def test_absolute_datetime_iso(self) -> None:
+        # Test ISO format datetime
+        result = parse_approxidate("2009-02-13 23:31:30")
+        from datetime import datetime
+
+        expected = int(datetime(2009, 2, 13, 23, 31, 30).timestamp())
+        self.assertEqual(result, expected)
+
+    def test_absolute_datetime_iso8601(self) -> None:
+        # Test ISO 8601 format
+        result = parse_approxidate("2009-02-13T23:31:30")
+        from datetime import datetime
+
+        expected = int(datetime(2009, 2, 13, 23, 31, 30).timestamp())
+        self.assertEqual(result, expected)
+
+    def test_bytes_input(self) -> None:
+        # Test that bytes input works
+        result = parse_approxidate(b"1234567890")
+        self.assertEqual(1234567890, result)
+
+        result = parse_approxidate(b"yesterday")
+        expected = time.time() - 86400
+        self.assertAlmostEqual(result, expected, delta=2)
+
+    def test_whitespace_handling(self) -> None:
+        # Test that leading/trailing whitespace is handled
+        self.assertEqual(1234567890, parse_approxidate("  1234567890  "))
+        result = parse_approxidate("  yesterday  ")
+        expected = time.time() - 86400
+        self.assertAlmostEqual(result, expected, delta=2)
+
+    def test_invalid_spec(self) -> None:
+        self.assertRaises(ValueError, parse_approxidate, "not a valid time")
+        self.assertRaises(ValueError, parse_approxidate, "abc123")

+ 63 - 0
tests/test_bundle.py

@@ -449,3 +449,66 @@ class BundleTests(TestCase):
         # Verify capabilities are included
         self.assertEqual(bundle.capabilities, capabilities)
         self.assertEqual(bundle.version, 3)
+
+    def test_create_bundle_with_hex_bytestring_prerequisite(self) -> None:
+        """Test creating a bundle with prerequisite as 40-byte hex bytestring."""
+        repo = MemoryRepo()
+
+        # Create minimal objects
+        blob = Blob.from_string(b"Hello world")
+        repo.object_store.add_object(blob)
+
+        tree = Tree()
+        tree.add(b"hello.txt", 0o100644, blob.id)
+        repo.object_store.add_object(tree)
+
+        commit = Commit()
+        commit.tree = tree.id
+        commit.message = b"Initial commit"
+        commit.author = commit.committer = b"Test User <test@example.com>"
+        commit.commit_time = commit.author_time = 1234567890
+        commit.commit_timezone = commit.author_timezone = 0
+        repo.object_store.add_object(commit)
+
+        repo.refs[b"refs/heads/master"] = commit.id
+
+        # Create another blob to use as prerequisite
+        prereq_blob = Blob.from_string(b"prerequisite")
+
+        # Use blob.id directly (40-byte hex bytestring)
+        bundle = create_bundle_from_repo(repo, prerequisites=[prereq_blob.id])
+
+        # Verify the prerequisite was added correctly
+        self.assertEqual(len(bundle.prerequisites), 1)
+        self.assertEqual(bundle.prerequisites[0][0], prereq_blob.id)
+
+    def test_create_bundle_with_hex_bytestring_prerequisite_simple(self) -> None:
+        """Test creating a bundle with prerequisite as 40-byte hex bytestring."""
+        repo = MemoryRepo()
+
+        # Create minimal objects
+        blob = Blob.from_string(b"Hello world")
+        repo.object_store.add_object(blob)
+
+        tree = Tree()
+        tree.add(b"hello.txt", 0o100644, blob.id)
+        repo.object_store.add_object(tree)
+
+        commit = Commit()
+        commit.tree = tree.id
+        commit.message = b"Initial commit"
+        commit.author = commit.committer = b"Test User <test@example.com>"
+        commit.commit_time = commit.author_time = 1234567890
+        commit.commit_timezone = commit.author_timezone = 0
+        repo.object_store.add_object(commit)
+
+        repo.refs[b"refs/heads/master"] = commit.id
+
+        # Use a 40-byte hex bytestring as prerequisite
+        prereq_hex = b"aa" * 20
+
+        bundle = create_bundle_from_repo(repo, prerequisites=[prereq_hex])
+
+        # Verify the prerequisite was added correctly
+        self.assertEqual(len(bundle.prerequisites), 1)
+        self.assertEqual(bundle.prerequisites[0][0], prereq_hex)

+ 375 - 111
tests/test_cli.py

@@ -34,10 +34,12 @@ from unittest.mock import MagicMock, patch
 
 from dulwich import cli
 from dulwich.cli import (
+    AutoFlushBinaryIOWrapper,
+    AutoFlushTextIOWrapper,
+    _should_auto_flush,
     detect_terminal_width,
     format_bytes,
     launch_editor,
-    parse_relative_time,
     write_columns,
 )
 from dulwich.repo import Repo
@@ -143,30 +145,6 @@ class HelperFunctionsTest(TestCase):
         result = launch_editor(b"Test template content")
         self.assertEqual(b"Test template content", result)
 
-    def test_parse_relative_time(self):
-        """Test parsing relative time strings."""
-        from dulwich.cli import parse_relative_time
-
-        self.assertEqual(0, parse_relative_time("now"))
-        self.assertEqual(60, parse_relative_time("1 minute ago"))
-        self.assertEqual(120, parse_relative_time("2 minutes ago"))
-        self.assertEqual(3600, parse_relative_time("1 hour ago"))
-        self.assertEqual(7200, parse_relative_time("2 hours ago"))
-        self.assertEqual(86400, parse_relative_time("1 day ago"))
-        self.assertEqual(172800, parse_relative_time("2 days ago"))
-        self.assertEqual(604800, parse_relative_time("1 week ago"))
-        self.assertEqual(1209600, parse_relative_time("2 weeks ago"))
-        self.assertEqual(2592000, parse_relative_time("1 month ago"))
-        self.assertEqual(31536000, parse_relative_time("1 year ago"))
-
-        # Test invalid formats
-        with self.assertRaises(ValueError):
-            parse_relative_time("invalid")
-        with self.assertRaises(ValueError):
-            parse_relative_time("2 days")  # Missing "ago"
-        with self.assertRaises(ValueError):
-            parse_relative_time("two days ago")  # Not a number
-
     def test_parse_time_to_timestamp(self):
         """Test parsing time specifications to Unix timestamps."""
         import time
@@ -3061,92 +3039,6 @@ class FormatBytesTestCase(TestCase):
         self.assertEqual("1000.0 TB", format_bytes(1024 * 1024 * 1024 * 1024 * 1000))
 
 
-class ParseRelativeTimeTestCase(TestCase):
-    """Tests for parse_relative_time function."""
-
-    def test_now(self):
-        """Test parsing 'now'."""
-        self.assertEqual(0, parse_relative_time("now"))
-
-    def test_seconds(self):
-        """Test parsing seconds."""
-        self.assertEqual(1, parse_relative_time("1 second ago"))
-        self.assertEqual(5, parse_relative_time("5 seconds ago"))
-        self.assertEqual(30, parse_relative_time("30 seconds ago"))
-
-    def test_minutes(self):
-        """Test parsing minutes."""
-        self.assertEqual(60, parse_relative_time("1 minute ago"))
-        self.assertEqual(300, parse_relative_time("5 minutes ago"))
-        self.assertEqual(1800, parse_relative_time("30 minutes ago"))
-
-    def test_hours(self):
-        """Test parsing hours."""
-        self.assertEqual(3600, parse_relative_time("1 hour ago"))
-        self.assertEqual(7200, parse_relative_time("2 hours ago"))
-        self.assertEqual(86400, parse_relative_time("24 hours ago"))
-
-    def test_days(self):
-        """Test parsing days."""
-        self.assertEqual(86400, parse_relative_time("1 day ago"))
-        self.assertEqual(604800, parse_relative_time("7 days ago"))
-        self.assertEqual(2592000, parse_relative_time("30 days ago"))
-
-    def test_weeks(self):
-        """Test parsing weeks."""
-        self.assertEqual(604800, parse_relative_time("1 week ago"))
-        self.assertEqual(1209600, parse_relative_time("2 weeks ago"))
-        self.assertEqual(
-            36288000, parse_relative_time("60 weeks ago")
-        )  # 60 * 7 * 24 * 60 * 60
-
-    def test_invalid_format(self):
-        """Test invalid time formats."""
-        with self.assertRaises(ValueError) as cm:
-            parse_relative_time("invalid")
-        self.assertIn("Invalid relative time format", str(cm.exception))
-
-        with self.assertRaises(ValueError) as cm:
-            parse_relative_time("2 weeks")
-        self.assertIn("Invalid relative time format", str(cm.exception))
-
-        with self.assertRaises(ValueError) as cm:
-            parse_relative_time("ago")
-        self.assertIn("Invalid relative time format", str(cm.exception))
-
-        with self.assertRaises(ValueError) as cm:
-            parse_relative_time("two weeks ago")
-        self.assertIn("Invalid number in relative time", str(cm.exception))
-
-    def test_invalid_unit(self):
-        """Test invalid time units."""
-        with self.assertRaises(ValueError) as cm:
-            parse_relative_time("5 fortnights ago")
-        self.assertIn("Unknown time unit: fortnights", str(cm.exception))
-
-        with self.assertRaises(ValueError) as cm:
-            parse_relative_time("2 decades ago")
-        self.assertIn("Unknown time unit: decades", str(cm.exception))
-
-    def test_singular_plural(self):
-        """Test that both singular and plural forms work."""
-        self.assertEqual(
-            parse_relative_time("1 second ago"), parse_relative_time("1 seconds ago")
-        )
-        self.assertEqual(
-            parse_relative_time("1 minute ago"), parse_relative_time("1 minutes ago")
-        )
-        self.assertEqual(
-            parse_relative_time("1 hour ago"), parse_relative_time("1 hours ago")
-        )
-        self.assertEqual(
-            parse_relative_time("1 day ago"), parse_relative_time("1 days ago")
-        )
-        self.assertEqual(
-            parse_relative_time("1 week ago"), parse_relative_time("1 weeks ago")
-        )
-
-
 class GetPagerTest(TestCase):
     """Tests for get_pager function."""
 
@@ -3672,5 +3564,377 @@ class ConfigCommandTest(DulwichCliTestCase):
         self.assertEqual(stdout, "value1\nvalue2\nvalue3\n")
 
 
+class GitFlushTest(TestCase):
+    """Tests for GIT_FLUSH environment variable support."""
+
+    def test_should_auto_flush_with_git_flush_1(self):
+        """Test that GIT_FLUSH=1 enables auto-flushing."""
+
+        mock_stream = MagicMock()
+        mock_stream.isatty.return_value = True
+
+        self.assertTrue(_should_auto_flush(mock_stream, env={"GIT_FLUSH": "1"}))
+
+    def test_should_auto_flush_with_git_flush_0(self):
+        """Test that GIT_FLUSH=0 disables auto-flushing."""
+        mock_stream = MagicMock()
+        mock_stream.isatty.return_value = True
+
+        self.assertFalse(_should_auto_flush(mock_stream, env={"GIT_FLUSH": "0"}))
+
+    def test_should_auto_flush_auto_detect_tty(self):
+        """Test that auto-detect returns False for TTY (no flush needed)."""
+        mock_stream = MagicMock()
+        mock_stream.isatty.return_value = True
+
+        self.assertFalse(_should_auto_flush(mock_stream, env={}))
+
+    def test_should_auto_flush_auto_detect_pipe(self):
+        """Test that auto-detect returns True for pipes (flush needed)."""
+        mock_stream = MagicMock()
+        mock_stream.isatty.return_value = False
+
+        self.assertTrue(_should_auto_flush(mock_stream, env={}))
+
+    def test_text_wrapper_flushes_on_write(self):
+        """Test that AutoFlushTextIOWrapper flushes after write."""
+        mock_stream = MagicMock()
+        wrapper = AutoFlushTextIOWrapper(mock_stream)
+
+        wrapper.write("test")
+        mock_stream.write.assert_called_once_with("test")
+        mock_stream.flush.assert_called_once()
+
+    def test_text_wrapper_flushes_on_writelines(self):
+        """Test that AutoFlushTextIOWrapper flushes after writelines."""
+        from dulwich.cli import AutoFlushTextIOWrapper
+
+        mock_stream = MagicMock()
+        wrapper = AutoFlushTextIOWrapper(mock_stream)
+
+        wrapper.writelines(["line1\n", "line2\n"])
+        mock_stream.writelines.assert_called_once()
+        mock_stream.flush.assert_called_once()
+
+    def test_binary_wrapper_flushes_on_write(self):
+        """Test that AutoFlushBinaryIOWrapper flushes after write."""
+        mock_stream = MagicMock()
+        wrapper = AutoFlushBinaryIOWrapper(mock_stream)
+
+        wrapper.write(b"test")
+        mock_stream.write.assert_called_once_with(b"test")
+        mock_stream.flush.assert_called_once()
+
+    def test_text_wrapper_env_classmethod(self):
+        """Test that AutoFlushTextIOWrapper.env() respects GIT_FLUSH."""
+        mock_stream = MagicMock()
+        mock_stream.isatty.return_value = False
+
+        wrapper = AutoFlushTextIOWrapper.env(mock_stream, env={"GIT_FLUSH": "1"})
+        self.assertIsInstance(wrapper, AutoFlushTextIOWrapper)
+
+        wrapper = AutoFlushTextIOWrapper.env(mock_stream, env={"GIT_FLUSH": "0"})
+        self.assertIs(mock_stream, wrapper)
+
+    def test_binary_wrapper_env_classmethod(self):
+        """Test that AutoFlushBinaryIOWrapper.env() respects GIT_FLUSH."""
+        mock_stream = MagicMock()
+        mock_stream.isatty.return_value = False
+
+        wrapper = AutoFlushBinaryIOWrapper.env(mock_stream, env={"GIT_FLUSH": "1"})
+        self.assertIsInstance(wrapper, AutoFlushBinaryIOWrapper)
+
+        wrapper = AutoFlushBinaryIOWrapper.env(mock_stream, env={"GIT_FLUSH": "0"})
+        self.assertIs(wrapper, mock_stream)
+
+    def test_wrapper_delegates_attributes(self):
+        """Test that wrapper delegates unknown attributes to stream."""
+        mock_stream = MagicMock()
+        mock_stream.encoding = "utf-8"
+        wrapper = AutoFlushTextIOWrapper(mock_stream)
+
+        self.assertEqual(wrapper.encoding, "utf-8")
+
+    def test_wrapper_context_manager(self):
+        """Test that wrapper supports context manager protocol."""
+        mock_stream = MagicMock()
+        wrapper = AutoFlushTextIOWrapper(mock_stream)
+
+        with wrapper as w:
+            self.assertIs(w, wrapper)
+
+
+class MaintenanceCommandTest(DulwichCliTestCase):
+    """Tests for maintenance command."""
+
+    def setUp(self):
+        super().setUp()
+        # Set up a temporary HOME for testing global config
+        self.temp_home = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, self.temp_home)
+        self.overrideEnv("HOME", self.temp_home)
+
+    def test_maintenance_run_default(self):
+        """Test maintenance run with default tasks."""
+        result, _stdout, _stderr = self._run_cli("maintenance", "run")
+        self.assertIsNone(result)
+
+    def test_maintenance_run_specific_task(self):
+        """Test maintenance run with a specific task."""
+        result, _stdout, _stderr = self._run_cli(
+            "maintenance", "run", "--task", "pack-refs"
+        )
+        self.assertIsNone(result)
+
+    def test_maintenance_run_multiple_tasks(self):
+        """Test maintenance run with multiple specific tasks."""
+        result, _stdout, _stderr = self._run_cli(
+            "maintenance", "run", "--task", "pack-refs", "--task", "gc"
+        )
+        self.assertIsNone(result)
+
+    def test_maintenance_run_quiet(self):
+        """Test maintenance run with quiet flag."""
+        result, _stdout, _stderr = self._run_cli("maintenance", "run", "--quiet")
+        self.assertIsNone(result)
+
+    def test_maintenance_run_auto(self):
+        """Test maintenance run with auto flag."""
+        result, _stdout, _stderr = self._run_cli("maintenance", "run", "--auto")
+        self.assertIsNone(result)
+
+    def test_maintenance_no_subcommand(self):
+        """Test maintenance command without subcommand shows help."""
+        result, _stdout, _stderr = self._run_cli("maintenance")
+        self.assertEqual(result, 1)
+
+    def test_maintenance_register(self):
+        """Test maintenance register subcommand."""
+        result, _stdout, _stderr = self._run_cli("maintenance", "register")
+        self.assertIsNone(result)
+
+    def test_maintenance_unregister(self):
+        """Test maintenance unregister subcommand."""
+        # First register
+        _result, _stdout, _stderr = self._run_cli("maintenance", "register")
+
+        # Then unregister
+        result, _stdout, _stderr = self._run_cli("maintenance", "unregister")
+        self.assertIsNone(result)
+
+    def test_maintenance_unregister_not_registered(self):
+        """Test unregistering a repository that is not registered."""
+        result, _stdout, _stderr = self._run_cli("maintenance", "unregister")
+        self.assertEqual(result, 1)
+
+    def test_maintenance_unregister_force(self):
+        """Test unregistering with --force flag."""
+        result, _stdout, _stderr = self._run_cli("maintenance", "unregister", "--force")
+        self.assertIsNone(result)
+
+    def test_maintenance_unimplemented_subcommand(self):
+        """Test unimplemented maintenance subcommands."""
+        for subcommand in ["start", "stop"]:
+            result, _stdout, _stderr = self._run_cli("maintenance", subcommand)
+            self.assertEqual(result, 1)
+
+
+class InterpretTrailersCommandTest(DulwichCliTestCase):
+    """Tests for interpret-trailers command."""
+
+    def test_parse_trailers_from_file(self):
+        """Test parsing trailers from a file."""
+        # Create a message file with trailers
+        msg_file = os.path.join(self.test_dir, "message.txt")
+        with open(msg_file, "wb") as f:
+            f.write(b"Subject\n\nBody\n\nSigned-off-by: Alice <alice@example.com>\n")
+
+        result, stdout, _stderr = self._run_cli(
+            "interpret-trailers", "--only-trailers", msg_file
+        )
+        self.assertIsNone(result)
+        self.assertIn("Signed-off-by: Alice <alice@example.com>", stdout)
+
+    def test_add_trailer_to_message(self):
+        """Test adding a trailer to a message."""
+        msg_file = os.path.join(self.test_dir, "message.txt")
+        with open(msg_file, "wb") as f:
+            f.write(b"Subject\n\nBody text\n")
+
+        result, stdout, _stderr = self._run_cli(
+            "interpret-trailers",
+            "--trailer",
+            "Signed-off-by:Alice <alice@example.com>",
+            msg_file,
+        )
+        self.assertIsNone(result)
+        self.assertIn("Signed-off-by: Alice <alice@example.com>", stdout)
+        self.assertIn("Subject", stdout)
+        self.assertIn("Body text", stdout)
+
+    def test_add_multiple_trailers(self):
+        """Test adding multiple trailers."""
+        msg_file = os.path.join(self.test_dir, "message.txt")
+        with open(msg_file, "wb") as f:
+            f.write(b"Subject\n\nBody\n")
+
+        result, stdout, _stderr = self._run_cli(
+            "interpret-trailers",
+            "--trailer",
+            "Signed-off-by:Alice",
+            "--trailer",
+            "Reviewed-by:Bob",
+            msg_file,
+        )
+        self.assertIsNone(result)
+        self.assertIn("Signed-off-by: Alice", stdout)
+        self.assertIn("Reviewed-by: Bob", stdout)
+
+    def test_parse_shorthand(self):
+        """Test --parse shorthand option."""
+        msg_file = os.path.join(self.test_dir, "message.txt")
+        with open(msg_file, "wb") as f:
+            f.write(b"Subject\n\nBody\n\nSigned-off-by: Alice\n")
+
+        result, stdout, _stderr = self._run_cli(
+            "interpret-trailers", "--parse", msg_file
+        )
+        self.assertIsNone(result)
+        # --parse is shorthand for --only-trailers --only-input --unfold
+        self.assertEqual(stdout, "Signed-off-by: Alice\n")
+
+    def test_trim_empty(self):
+        """Test --trim-empty option."""
+        msg_file = os.path.join(self.test_dir, "message.txt")
+        with open(msg_file, "wb") as f:
+            f.write(b"Subject\n\nBody\n\nSigned-off-by: Alice\nReviewed-by: \n")
+
+        result, stdout, _stderr = self._run_cli(
+            "interpret-trailers", "--trim-empty", "--only-trailers", msg_file
+        )
+        self.assertIsNone(result)
+        self.assertIn("Signed-off-by: Alice", stdout)
+        self.assertNotIn("Reviewed-by:", stdout)
+
+    def test_if_exists_replace(self):
+        """Test --if-exists replace option."""
+        msg_file = os.path.join(self.test_dir, "message.txt")
+        with open(msg_file, "wb") as f:
+            f.write(b"Subject\n\nBody\n\nSigned-off-by: Alice\n")
+
+        result, stdout, _stderr = self._run_cli(
+            "interpret-trailers",
+            "--if-exists",
+            "replace",
+            "--trailer",
+            "Signed-off-by:Bob",
+            msg_file,
+        )
+        self.assertIsNone(result)
+        self.assertIn("Signed-off-by: Bob", stdout)
+        self.assertNotIn("Alice", stdout)
+
+    def test_trailer_with_equals(self):
+        """Test trailer with equals separator."""
+        msg_file = os.path.join(self.test_dir, "message.txt")
+        with open(msg_file, "wb") as f:
+            f.write(b"Subject\n\nBody\n")
+
+        result, stdout, _stderr = self._run_cli(
+            "interpret-trailers", "--trailer", "Bug=12345", msg_file
+        )
+        self.assertIsNone(result)
+        self.assertIn("Bug: 12345", stdout)
+
+
+class ReplaceCommandTest(DulwichCliTestCase):
+    """Tests for replace command."""
+
+    def test_replace_create(self):
+        """Test creating a replacement ref."""
+        # Create two commits
+        [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
+        self.repo[b"HEAD"] = c1.id
+
+        # Create replacement using the create form (decode to string for CLI)
+        c1_str = c1.id.decode("ascii")
+        c2_str = c2.id.decode("ascii")
+        _result, _stdout, _stderr = self._run_cli("replace", c1_str, c2_str)
+
+        # Verify the replacement ref was created
+        replace_ref = b"refs/replace/" + c1.id
+        self.assertIn(replace_ref, self.repo.refs.keys())
+        self.assertEqual(c2.id, self.repo.refs[replace_ref])
+
+    def test_replace_list_empty(self):
+        """Test listing replacements when there are none."""
+        _result, stdout, _stderr = self._run_cli("replace", "list")
+        self.assertEqual("", stdout)
+
+    def test_replace_list(self):
+        """Test listing replacement refs."""
+        # Create two commits
+        [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
+        self.repo[b"HEAD"] = c1.id
+
+        # Create replacement
+        c1_str = c1.id.decode("ascii")
+        c2_str = c2.id.decode("ascii")
+        self._run_cli("replace", c1_str, c2_str)
+
+        # List replacements
+        _result, stdout, _stderr = self._run_cli("replace", "list")
+        self.assertIn(c1_str, stdout)
+        self.assertIn(c2_str, stdout)
+
+    def test_replace_default_list(self):
+        """Test that replace without subcommand defaults to list."""
+        # Create two commits
+        [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
+        self.repo[b"HEAD"] = c1.id
+
+        # Create replacement
+        c1_str = c1.id.decode("ascii")
+        c2_str = c2.id.decode("ascii")
+        self._run_cli("replace", c1_str, c2_str)
+
+        # Call replace without subcommand (should list)
+        _result, stdout, _stderr = self._run_cli("replace")
+        self.assertIn(c1_str, stdout)
+        self.assertIn(c2_str, stdout)
+
+    def test_replace_delete(self):
+        """Test deleting a replacement ref."""
+        # Create two commits
+        [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
+        self.repo[b"HEAD"] = c1.id
+
+        # Create replacement
+        c1_str = c1.id.decode("ascii")
+        c2_str = c2.id.decode("ascii")
+        self._run_cli("replace", c1_str, c2_str)
+
+        # Verify it exists
+        replace_ref = b"refs/replace/" + c1.id
+        self.assertIn(replace_ref, self.repo.refs.keys())
+
+        # Delete the replacement
+        _result, _stdout, _stderr = self._run_cli("replace", "delete", c1_str)
+
+        # Verify it's gone
+        self.assertNotIn(replace_ref, self.repo.refs.keys())
+
+    def test_replace_delete_nonexistent(self):
+        """Test deleting a nonexistent replacement ref fails."""
+        # Create a commit
+        [c1] = build_commit_graph(self.repo.object_store, [[1]])
+        self.repo[b"HEAD"] = c1.id
+
+        # Try to delete a non-existent replacement
+        c1_str = c1.id.decode("ascii")
+        result, _stdout, _stderr = self._run_cli("replace", "delete", c1_str)
+        self.assertEqual(result, 1)
+
+
 if __name__ == "__main__":
     unittest.main()

+ 314 - 0
tests/test_maintenance.py

@@ -0,0 +1,314 @@
+# test_maintenance.py -- tests for maintenance functionality
+# Copyright (C) 2024 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as published by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for dulwich.maintenance."""
+
+import tempfile
+
+from dulwich import porcelain
+from dulwich.maintenance import (
+    CommitGraphTask,
+    GcTask,
+    IncrementalRepackTask,
+    LooseObjectsTask,
+    PackRefsTask,
+    PrefetchTask,
+    get_enabled_tasks,
+    run_maintenance,
+)
+from dulwich.objects import Blob
+from dulwich.repo import Repo
+
+from . import TestCase
+
+
+class MaintenanceTaskTestCase(TestCase):
+    """Base class for maintenance task tests."""
+
+    def setUp(self):
+        super().setUp()
+        self.test_dir = tempfile.mkdtemp()
+        self.addCleanup(self._cleanup_test_dir)
+        self.repo = Repo.init(self.test_dir)
+        self.addCleanup(self.repo.close)
+
+    def _cleanup_test_dir(self):
+        import shutil
+
+        shutil.rmtree(self.test_dir)
+
+    def _create_commit(self):
+        """Create a simple commit in the test repository."""
+        blob = Blob.from_string(b"test content")
+        self.repo.object_store.add_object(blob)
+        return blob
+
+
+class GcTaskTest(MaintenanceTaskTestCase):
+    """Tests for GcTask."""
+
+    def test_default_enabled(self):
+        """Test that GC task is enabled by default."""
+        task = GcTask(self.repo)
+        self.assertTrue(task.default_enabled())
+        self.assertTrue(task.is_enabled())
+
+    def test_run(self):
+        """Test running GC task."""
+        self._create_commit()
+        task = GcTask(self.repo)
+        result = task.run()
+        self.assertTrue(result)
+
+
+class CommitGraphTaskTest(MaintenanceTaskTestCase):
+    """Tests for CommitGraphTask."""
+
+    def test_default_enabled(self):
+        """Test that commit-graph task is enabled by default."""
+        task = CommitGraphTask(self.repo)
+        self.assertTrue(task.default_enabled())
+        self.assertTrue(task.is_enabled())
+
+    def test_run(self):
+        """Test running commit-graph task."""
+        self._create_commit()
+        task = CommitGraphTask(self.repo)
+        result = task.run()
+        self.assertTrue(result)
+
+
+class LooseObjectsTaskTest(MaintenanceTaskTestCase):
+    """Tests for LooseObjectsTask."""
+
+    def test_default_enabled(self):
+        """Test that loose-objects task is disabled by default."""
+        task = LooseObjectsTask(self.repo)
+        self.assertFalse(task.default_enabled())
+
+    def test_run(self):
+        """Test running loose-objects task."""
+        self._create_commit()
+        task = LooseObjectsTask(self.repo)
+        result = task.run()
+        self.assertTrue(result)
+
+
+class IncrementalRepackTaskTest(MaintenanceTaskTestCase):
+    """Tests for IncrementalRepackTask."""
+
+    def test_default_enabled(self):
+        """Test that incremental-repack task is disabled by default."""
+        task = IncrementalRepackTask(self.repo)
+        self.assertFalse(task.default_enabled())
+
+    def test_run_no_packs(self):
+        """Test running incremental-repack with no packs."""
+        task = IncrementalRepackTask(self.repo)
+        result = task.run()
+        self.assertTrue(result)
+
+    def test_run_auto_few_packs(self):
+        """Test that auto mode skips repacking when there are few packs."""
+        self._create_commit()
+        task = IncrementalRepackTask(self.repo, auto=True)
+        result = task.run()
+        self.assertTrue(result)
+
+
+class PackRefsTaskTest(MaintenanceTaskTestCase):
+    """Tests for PackRefsTask."""
+
+    def test_default_enabled(self):
+        """Test that pack-refs task is disabled by default."""
+        task = PackRefsTask(self.repo)
+        self.assertFalse(task.default_enabled())
+
+    def test_run(self):
+        """Test running pack-refs task."""
+        task = PackRefsTask(self.repo)
+        result = task.run()
+        self.assertTrue(result)
+
+
+class PrefetchTaskTest(MaintenanceTaskTestCase):
+    """Tests for PrefetchTask."""
+
+    def test_default_enabled(self):
+        """Test that prefetch task is disabled by default."""
+        task = PrefetchTask(self.repo)
+        self.assertFalse(task.default_enabled())
+
+    def test_run_no_remotes(self):
+        """Test running prefetch with no remotes configured."""
+        task = PrefetchTask(self.repo)
+        result = task.run()
+        self.assertTrue(result)
+
+
+class MaintenanceFunctionsTest(MaintenanceTaskTestCase):
+    """Tests for maintenance module functions."""
+
+    def test_get_enabled_tasks_default(self):
+        """Test getting enabled tasks with defaults."""
+        enabled = get_enabled_tasks(self.repo)
+        # By default, only gc and commit-graph are enabled
+        self.assertIn("gc", enabled)
+        self.assertIn("commit-graph", enabled)
+        self.assertNotIn("loose-objects", enabled)
+        self.assertNotIn("incremental-repack", enabled)
+        self.assertNotIn("pack-refs", enabled)
+        self.assertNotIn("prefetch", enabled)
+
+    def test_get_enabled_tasks_with_filter(self):
+        """Test getting enabled tasks with a filter."""
+        enabled = get_enabled_tasks(self.repo, ["gc", "pack-refs"])
+        self.assertEqual(set(enabled), {"gc", "pack-refs"})
+
+    def test_get_enabled_tasks_invalid(self):
+        """Test that invalid task names are ignored."""
+        enabled = get_enabled_tasks(self.repo, ["gc", "invalid-task"])
+        self.assertEqual(enabled, ["gc"])
+
+    def test_run_maintenance(self):
+        """Test running maintenance tasks."""
+        self._create_commit()
+        result = run_maintenance(self.repo)
+        self.assertIn("gc", result.tasks_run)
+        self.assertIn("commit-graph", result.tasks_run)
+        self.assertIn("gc", result.tasks_succeeded)
+        self.assertIn("commit-graph", result.tasks_succeeded)
+        self.assertEqual(len(result.tasks_failed), 0)
+
+    def test_run_maintenance_specific_tasks(self):
+        """Test running specific maintenance tasks."""
+        result = run_maintenance(self.repo, tasks=["pack-refs"])
+        self.assertEqual(result.tasks_run, ["pack-refs"])
+        self.assertEqual(result.tasks_succeeded, ["pack-refs"])
+        self.assertEqual(len(result.tasks_failed), 0)
+
+    def test_run_maintenance_with_progress(self):
+        """Test running maintenance with progress callback."""
+        messages = []
+
+        def progress(msg):
+            messages.append(msg)
+
+        self._create_commit()
+        result = run_maintenance(self.repo, progress=progress)
+        self.assertGreater(len(messages), 0)
+        self.assertIn("gc", result.tasks_succeeded)
+
+
+class PorcelainMaintenanceTest(MaintenanceTaskTestCase):
+    """Tests for porcelain.maintenance_run function."""
+
+    def test_maintenance_run(self):
+        """Test porcelain maintenance_run function."""
+        self._create_commit()
+        result = porcelain.maintenance_run(self.test_dir)
+        self.assertIn("gc", result.tasks_succeeded)
+        self.assertIn("commit-graph", result.tasks_succeeded)
+
+    def test_maintenance_run_with_tasks(self):
+        """Test porcelain maintenance_run with specific tasks."""
+        result = porcelain.maintenance_run(self.test_dir, tasks=["pack-refs"])
+        self.assertEqual(result.tasks_run, ["pack-refs"])
+        self.assertEqual(result.tasks_succeeded, ["pack-refs"])
+
+
+class MaintenanceRegisterTest(MaintenanceTaskTestCase):
+    """Tests for maintenance register/unregister."""
+
+    def setUp(self):
+        super().setUp()
+        # Set up a temporary HOME for testing global config
+        self.temp_home = tempfile.mkdtemp()
+        self.addCleanup(self._cleanup_temp_home)
+        self.overrideEnv("HOME", self.temp_home)
+
+    def _cleanup_temp_home(self):
+        import shutil
+
+        shutil.rmtree(self.temp_home)
+
+    def test_register_repository(self):
+        """Test registering a repository for maintenance."""
+        porcelain.maintenance_register(self.test_dir)
+
+        # Verify repository was added to global config
+        import os
+
+        from dulwich.config import ConfigFile
+
+        global_config_path = os.path.expanduser("~/.gitconfig")
+        global_config = ConfigFile.from_path(global_config_path)
+
+        repos = list(global_config.get_multivar((b"maintenance",), b"repo"))
+        self.assertIn(self.test_dir.encode(), repos)
+
+        # Verify strategy was set
+        strategy = global_config.get((b"maintenance",), b"strategy")
+        self.assertEqual(strategy, b"incremental")
+
+        # Verify auto maintenance was disabled in repo
+        repo_config = self.repo.get_config()
+        auto = repo_config.get_boolean((b"maintenance",), b"auto")
+        self.assertFalse(auto)
+
+    def test_register_already_registered(self):
+        """Test registering an already registered repository."""
+        porcelain.maintenance_register(self.test_dir)
+        # Should not error when registering again
+        porcelain.maintenance_register(self.test_dir)
+
+    def test_unregister_repository(self):
+        """Test unregistering a repository."""
+        # First register
+        porcelain.maintenance_register(self.test_dir)
+
+        # Then unregister
+        porcelain.maintenance_unregister(self.test_dir)
+
+        # Verify repository was removed from global config
+        import os
+
+        from dulwich.config import ConfigFile
+
+        global_config_path = os.path.expanduser("~/.gitconfig")
+        global_config = ConfigFile.from_path(global_config_path)
+
+        try:
+            repos = list(global_config.get_multivar((b"maintenance",), b"repo"))
+            self.assertNotIn(self.test_dir.encode(), repos)
+        except KeyError:
+            # No repos registered, which is fine
+            pass
+
+    def test_unregister_not_registered(self):
+        """Test unregistering a repository that is not registered."""
+        with self.assertRaises(ValueError):
+            porcelain.maintenance_unregister(self.test_dir)
+
+    def test_unregister_not_registered_force(self):
+        """Test unregistering with force flag."""
+        # Should not error with force=True
+        porcelain.maintenance_unregister(self.test_dir, force=True)

+ 171 - 0
tests/test_objectspec.py

@@ -246,6 +246,177 @@ class ParseObjectTests(TestCase):
             # HEAD@{2} is the third/oldest (c1)
             self.assertEqual(c1, parse_object(r, b"HEAD@{2}"))
 
+    def test_reflog_time_lookup(self) -> None:
+        # Use a real repo for reflog testing with time specifications
+        import tempfile
+
+        from dulwich.repo import Repo
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            r = Repo.init_bare(tmpdir)
+            c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 2]])
+
+            # Write reflog entries with specific timestamps
+            # 1234567890 = 2009-02-13 23:31:30 UTC
+            r._write_reflog(
+                b"HEAD",
+                None,
+                c1.id,
+                b"Test User <test@example.com>",
+                1234567890,
+                0,
+                b"commit: Initial commit",
+            )
+            # 1234657890 = 2009-02-14 23:31:30 UTC (1 day + 1 second later)
+            r._write_reflog(
+                b"HEAD",
+                c1.id,
+                c2.id,
+                b"Test User <test@example.com>",
+                1234657890,
+                0,
+                b"commit: Second commit",
+            )
+            # 1235000000 = 2009-02-18 19:33:20 UTC
+            r._write_reflog(
+                b"HEAD",
+                c2.id,
+                c3.id,
+                b"Test User <test@example.com>",
+                1235000000,
+                0,
+                b"commit: Third commit",
+            )
+
+            # Lookup by timestamp - should get the most recent entry at or before time
+            self.assertEqual(c1, parse_object(r, b"HEAD@{1234567890}"))
+            self.assertEqual(c2, parse_object(r, b"HEAD@{1234657890}"))
+            self.assertEqual(c3, parse_object(r, b"HEAD@{1235000000}"))
+            # Future timestamp should get latest entry
+            self.assertEqual(c3, parse_object(r, b"HEAD@{9999999999}"))
+
+    def test_index_path_lookup_stage0(self) -> None:
+        # Test index path lookup for stage 0 (normal files)
+        import tempfile
+
+        from dulwich.repo import Repo
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            r = Repo.init(tmpdir)
+
+            # Create a blob and add it to the index
+            b = Blob.from_string(b"Test content")
+            r.object_store.add_object(b)
+
+            # Add to index
+            index = r.open_index()
+            from dulwich.index import IndexEntry
+
+            index[b"test.txt"] = IndexEntry(
+                ctime=(0, 0),
+                mtime=(0, 0),
+                dev=0,
+                ino=0,
+                mode=0o100644,
+                uid=0,
+                gid=0,
+                size=len(b.data),
+                sha=b.id,
+            )
+            index.write()
+
+            # Test :path syntax (defaults to stage 0)
+            result = parse_object(r, b":test.txt")
+            self.assertEqual(b"Test content", result.data)
+
+            # Test :0:path syntax (explicit stage 0)
+            result = parse_object(r, b":0:test.txt")
+            self.assertEqual(b"Test content", result.data)
+
+    def test_index_path_lookup_conflicts(self) -> None:
+        # Test index path lookup with merge conflicts (stages 1-3)
+        import tempfile
+
+        from dulwich.index import ConflictedIndexEntry, IndexEntry
+        from dulwich.repo import Repo
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            r = Repo.init(tmpdir)
+
+            # Create three different versions of a file
+            b_ancestor = Blob.from_string(b"Ancestor content")
+            b_this = Blob.from_string(b"This content")
+            b_other = Blob.from_string(b"Other content")
+            r.object_store.add_object(b_ancestor)
+            r.object_store.add_object(b_this)
+            r.object_store.add_object(b_other)
+
+            # Add conflicted entry to index
+            index = r.open_index()
+            index[b"conflict.txt"] = ConflictedIndexEntry(
+                ancestor=IndexEntry(
+                    ctime=(0, 0),
+                    mtime=(0, 0),
+                    dev=0,
+                    ino=0,
+                    mode=0o100644,
+                    uid=0,
+                    gid=0,
+                    size=len(b_ancestor.data),
+                    sha=b_ancestor.id,
+                ),
+                this=IndexEntry(
+                    ctime=(0, 0),
+                    mtime=(0, 0),
+                    dev=0,
+                    ino=0,
+                    mode=0o100644,
+                    uid=0,
+                    gid=0,
+                    size=len(b_this.data),
+                    sha=b_this.id,
+                ),
+                other=IndexEntry(
+                    ctime=(0, 0),
+                    mtime=(0, 0),
+                    dev=0,
+                    ino=0,
+                    mode=0o100644,
+                    uid=0,
+                    gid=0,
+                    size=len(b_other.data),
+                    sha=b_other.id,
+                ),
+            )
+            index.write()
+
+            # Test stage 1 (ancestor)
+            result = parse_object(r, b":1:conflict.txt")
+            self.assertEqual(b"Ancestor content", result.data)
+
+            # Test stage 2 (this)
+            result = parse_object(r, b":2:conflict.txt")
+            self.assertEqual(b"This content", result.data)
+
+            # Test stage 3 (other)
+            result = parse_object(r, b":3:conflict.txt")
+            self.assertEqual(b"Other content", result.data)
+
+            # Test that :conflict.txt raises an error for conflicted files
+            self.assertRaises(ValueError, parse_object, r, b":conflict.txt")
+
+    def test_index_path_not_found(self) -> None:
+        # Test error when path not in index
+        import tempfile
+
+        from dulwich.repo import Repo
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            r = Repo.init(tmpdir)
+
+            # Try to lookup non-existent path
+            self.assertRaises(KeyError, parse_object, r, b":nonexistent.txt")
+
 
 class ParseCommitRangeTests(TestCase):
     """Test parse_commit_range."""

+ 205 - 0
tests/test_porcelain.py

@@ -4859,6 +4859,118 @@ class SubmoduleTests(PorcelainTestCase):
         with open(submodule_file) as f:
             self.assertEqual(f.read(), "submodule content")
 
+    def test_update_recursive(self) -> None:
+        # Create a nested (innermost) submodule repository
+        nested_repo_path = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, nested_repo_path)
+        nested_repo = Repo.init(nested_repo_path)
+        self.addCleanup(nested_repo.close)
+
+        # Add a file to the nested repo
+        nested_file = os.path.join(nested_repo_path, "nested.txt")
+        with open(nested_file, "w") as f:
+            f.write("nested submodule content")
+
+        porcelain.add(nested_repo, paths=[nested_file])
+        nested_commit = porcelain.commit(
+            nested_repo,
+            message=b"Initial nested commit",
+            author=b"Test Author <test@example.com>",
+            committer=b"Test Committer <test@example.com>",
+        )
+
+        # Create a middle submodule repository
+        middle_repo_path = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, middle_repo_path)
+        middle_repo = Repo.init(middle_repo_path)
+        self.addCleanup(middle_repo.close)
+
+        # Add a file to the middle repo
+        middle_file = os.path.join(middle_repo_path, "middle.txt")
+        with open(middle_file, "w") as f:
+            f.write("middle submodule content")
+
+        porcelain.add(middle_repo, paths=[middle_file])
+
+        # Add the nested submodule to the middle repository
+        porcelain.submodule_add(middle_repo, nested_repo_path, "nested")
+
+        # Manually add the nested submodule to the index
+        from dulwich.index import IndexEntry
+        from dulwich.objects import S_IFGITLINK
+
+        middle_index = middle_repo.open_index()
+        middle_index[b"nested"] = IndexEntry(
+            ctime=0,
+            mtime=0,
+            dev=0,
+            ino=0,
+            mode=S_IFGITLINK,
+            uid=0,
+            gid=0,
+            size=0,
+            sha=nested_commit,
+            flags=0,
+        )
+        middle_index.write()
+
+        porcelain.add(middle_repo, paths=[".gitmodules"])
+        middle_commit = porcelain.commit(
+            middle_repo,
+            message=b"Add nested submodule",
+            author=b"Test Author <test@example.com>",
+            committer=b"Test Committer <test@example.com>",
+        )
+
+        # Add the middle submodule to the main repository
+        porcelain.submodule_add(self.repo, middle_repo_path, "middle")
+
+        # Manually add the middle submodule to the index
+        main_index = self.repo.open_index()
+        main_index[b"middle"] = IndexEntry(
+            ctime=0,
+            mtime=0,
+            dev=0,
+            ino=0,
+            mode=S_IFGITLINK,
+            uid=0,
+            gid=0,
+            size=0,
+            sha=middle_commit,
+            flags=0,
+        )
+        main_index.write()
+
+        porcelain.add(self.repo, paths=[".gitmodules"])
+        porcelain.commit(
+            self.repo,
+            message=b"Add middle submodule",
+            author=b"Test Author <test@example.com>",
+            committer=b"Test Committer <test@example.com>",
+        )
+
+        # Initialize and recursively update the submodules
+        porcelain.submodule_init(self.repo)
+        porcelain.submodule_update(self.repo, recursive=True)
+
+        # Check that the middle submodule directory and file exist
+        middle_submodule_path = os.path.join(self.repo.path, "middle")
+        self.assertTrue(os.path.exists(middle_submodule_path))
+
+        middle_submodule_file = os.path.join(middle_submodule_path, "middle.txt")
+        self.assertTrue(os.path.exists(middle_submodule_file))
+        with open(middle_submodule_file) as f:
+            self.assertEqual(f.read(), "middle submodule content")
+
+        # Check that the nested submodule directory and file exist
+        nested_submodule_path = os.path.join(self.repo.path, "middle", "nested")
+        self.assertTrue(os.path.exists(nested_submodule_path))
+
+        nested_submodule_file = os.path.join(nested_submodule_path, "nested.txt")
+        self.assertTrue(os.path.exists(nested_submodule_file))
+        with open(nested_submodule_file) as f:
+            self.assertEqual(f.read(), "nested submodule content")
+
 
 class PushTests(PorcelainTestCase):
     def test_simple(self) -> None:
@@ -10594,3 +10706,96 @@ class GrepTests(PorcelainTestCase):
         outstream = StringIO()
         with self.assertRaises(ValueError):
             porcelain.grep(empty_repo, "pattern", outstream=outstream)
+
+
+class ReplaceListTests(PorcelainTestCase):
+    def test_empty(self) -> None:
+        """Test listing replacements when there are none."""
+        replacements = porcelain.replace_list(self.repo)
+        self.assertEqual([], replacements)
+
+    def test_list_replacements(self) -> None:
+        """Test listing replacement refs."""
+        [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
+        self.repo[b"HEAD"] = c1.id
+
+        # Create a replacement
+        porcelain.replace_create(self.repo, c1.id, c2.id)
+
+        # List replacements
+        replacements = porcelain.replace_list(self.repo)
+        self.assertEqual(1, len(replacements))
+        self.assertEqual((c1.id, c2.id), replacements[0])
+
+
+class ReplaceCreateTests(PorcelainTestCase):
+    def test_create_replacement(self) -> None:
+        """Test creating a replacement ref."""
+        [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
+        self.repo[b"HEAD"] = c1.id
+
+        # Create a replacement
+        porcelain.replace_create(self.repo, c1.id, c2.id)
+
+        # Verify the replacement ref was created (c1.id is already 40-char hex bytes)
+        replace_ref = b"refs/replace/" + c1.id
+        self.assertIn(replace_ref, self.repo.refs)
+        self.assertEqual(c2.id, self.repo.refs[replace_ref])
+
+    def test_create_replacement_with_bytes(self) -> None:
+        """Test creating a replacement ref with bytes arguments."""
+        [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
+        self.repo[b"HEAD"] = c1.id
+
+        # Create a replacement using bytes arguments
+        porcelain.replace_create(self.repo, c1.id, c2.id)
+
+        # Verify the replacement ref was created
+        replace_ref = b"refs/replace/" + c1.id
+        self.assertIn(replace_ref, self.repo.refs)
+        self.assertEqual(c2.id, self.repo.refs[replace_ref])
+
+
+class ReplaceDeleteTests(PorcelainTestCase):
+    def test_delete_replacement(self) -> None:
+        """Test deleting a replacement ref."""
+        [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
+        self.repo[b"HEAD"] = c1.id
+
+        # Create a replacement
+        porcelain.replace_create(self.repo, c1.id, c2.id)
+
+        # Verify it exists
+        replacements = porcelain.replace_list(self.repo)
+        self.assertEqual(1, len(replacements))
+
+        # Delete the replacement
+        porcelain.replace_delete(self.repo, c1.id)
+
+        # Verify it's gone
+        replacements = porcelain.replace_list(self.repo)
+        self.assertEqual(0, len(replacements))
+
+    def test_delete_replacement_with_bytes(self) -> None:
+        """Test deleting a replacement ref with bytes argument."""
+        [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
+        self.repo[b"HEAD"] = c1.id
+
+        # Create a replacement
+        porcelain.replace_create(self.repo, c1.id, c2.id)
+
+        # Delete using bytes argument
+        porcelain.replace_delete(self.repo, c1.id)
+
+        # Verify it's gone
+        replacements = porcelain.replace_list(self.repo)
+        self.assertEqual(0, len(replacements))
+
+    def test_delete_nonexistent_replacement(self) -> None:
+        """Test deleting a replacement ref that doesn't exist raises KeyError."""
+        [c1] = build_commit_graph(self.repo.object_store, [[1]])
+        self.repo[b"HEAD"] = c1.id
+
+        # Try to delete a non-existent replacement
+        with self.assertRaises(KeyError):
+            porcelain.replace_delete(self.repo, c1.id)

+ 149 - 0
tests/test_refs.py

@@ -33,6 +33,7 @@ from dulwich.objects import ZERO_SHA
 from dulwich.refs import (
     DictRefsContainer,
     InfoRefsContainer,
+    NamespacedRefsContainer,
     SymrefLoop,
     _split_ref_line,
     check_ref_format,
@@ -1301,3 +1302,151 @@ class RefUtilityFunctionsTests(TestCase):
 
         with self.assertRaises(ValueError):
             extract_tag_name(b"v1.0")
+
+
+class NamespacedRefsContainerTests(TestCase):
+    """Tests for NamespacedRefsContainer."""
+
+    def setUp(self) -> None:
+        TestCase.setUp(self)
+        # Create an underlying refs container
+        self._underlying_refs = DictRefsContainer(dict(_TEST_REFS))
+        # Create a namespaced view
+        self._refs = NamespacedRefsContainer(self._underlying_refs, b"foo")
+
+    def test_namespace_prefix_simple(self) -> None:
+        """Test simple namespace prefix."""
+        refs = NamespacedRefsContainer(self._underlying_refs, b"foo")
+        self.assertEqual(b"refs/namespaces/foo/", refs._namespace_prefix)
+
+    def test_namespace_prefix_nested(self) -> None:
+        """Test nested namespace prefix."""
+        refs = NamespacedRefsContainer(self._underlying_refs, b"foo/bar")
+        self.assertEqual(
+            b"refs/namespaces/foo/refs/namespaces/bar/", refs._namespace_prefix
+        )
+
+    def test_allkeys_empty_namespace(self) -> None:
+        """Test that newly created namespace has no refs except HEAD."""
+        # HEAD is shared across namespaces, so it appears even in empty namespace
+        self.assertEqual({b"HEAD"}, self._refs.allkeys())
+
+    def test_setitem_and_getitem(self) -> None:
+        """Test setting and getting refs in namespace."""
+        sha = b"9" * 40
+        self._refs[b"refs/heads/master"] = sha
+        self.assertEqual(sha, self._refs[b"refs/heads/master"])
+
+        # Verify it's stored with the namespace prefix in underlying container
+        self.assertIn(
+            b"refs/namespaces/foo/refs/heads/master", self._underlying_refs.allkeys()
+        )
+        self.assertEqual(
+            sha, self._underlying_refs[b"refs/namespaces/foo/refs/heads/master"]
+        )
+
+    def test_head_not_namespaced(self) -> None:
+        """Test that HEAD is not namespaced."""
+        sha = b"a" * 40
+        self._refs[b"HEAD"] = sha
+        self.assertEqual(sha, self._refs[b"HEAD"])
+
+        # HEAD should be directly in the underlying container, not namespaced
+        self.assertIn(b"HEAD", self._underlying_refs.allkeys())
+        self.assertNotIn(b"refs/namespaces/foo/HEAD", self._underlying_refs.allkeys())
+
+    def test_isolation_between_namespaces(self) -> None:
+        """Test that different namespaces are isolated."""
+        sha1 = b"a" * 40
+        sha2 = b"b" * 40
+
+        # Create two different namespaces
+        refs_foo = NamespacedRefsContainer(self._underlying_refs, b"foo")
+        refs_bar = NamespacedRefsContainer(self._underlying_refs, b"bar")
+
+        # Set ref in foo namespace
+        refs_foo[b"refs/heads/master"] = sha1
+
+        # Set ref in bar namespace
+        refs_bar[b"refs/heads/master"] = sha2
+
+        # Each namespace should only see its own refs (plus shared HEAD)
+        self.assertEqual(sha1, refs_foo[b"refs/heads/master"])
+        self.assertEqual(sha2, refs_bar[b"refs/heads/master"])
+        self.assertEqual({b"HEAD", b"refs/heads/master"}, refs_foo.allkeys())
+        self.assertEqual({b"HEAD", b"refs/heads/master"}, refs_bar.allkeys())
+
+    def test_allkeys_filters_namespace(self) -> None:
+        """Test that allkeys only returns refs in the namespace."""
+        # Add refs in multiple namespaces
+        self._underlying_refs[b"refs/namespaces/foo/refs/heads/master"] = b"a" * 40
+        self._underlying_refs[b"refs/namespaces/foo/refs/heads/develop"] = b"b" * 40
+        self._underlying_refs[b"refs/namespaces/bar/refs/heads/feature"] = b"c" * 40
+        self._underlying_refs[b"refs/heads/global"] = b"d" * 40
+
+        # Only refs in 'foo' namespace should be visible (plus HEAD which is shared)
+        foo_refs = NamespacedRefsContainer(self._underlying_refs, b"foo")
+        self.assertEqual(
+            {b"HEAD", b"refs/heads/master", b"refs/heads/develop"}, foo_refs.allkeys()
+        )
+
+    def test_set_symbolic_ref(self) -> None:
+        """Test symbolic ref creation in namespace."""
+        sha = b"e" * 40
+        self._refs[b"refs/heads/develop"] = sha
+        self._refs.set_symbolic_ref(b"refs/heads/main", b"refs/heads/develop")
+
+        # Both target and link should be namespaced
+        self.assertIn(
+            b"refs/namespaces/foo/refs/heads/main", self._underlying_refs.allkeys()
+        )
+        self.assertEqual(
+            b"ref: refs/namespaces/foo/refs/heads/develop",
+            self._underlying_refs.read_loose_ref(
+                b"refs/namespaces/foo/refs/heads/main"
+            ),
+        )
+
+    def test_remove_if_equals(self) -> None:
+        """Test removing refs from namespace."""
+        sha = b"f" * 40
+        self._refs[b"refs/heads/temp"] = sha
+
+        # Remove the ref
+        self.assertTrue(self._refs.remove_if_equals(b"refs/heads/temp", sha))
+        self.assertNotIn(b"refs/heads/temp", self._refs.allkeys())
+        self.assertNotIn(
+            b"refs/namespaces/foo/refs/heads/temp", self._underlying_refs.allkeys()
+        )
+
+    def test_get_packed_refs(self) -> None:
+        """Test get_packed_refs returns empty dict for DictRefsContainer."""
+        # DictRefsContainer doesn't support packed refs, so just verify
+        # the wrapper returns an empty dict
+        packed = self._refs.get_packed_refs()
+        self.assertEqual({}, packed)
+
+    def test_add_if_new(self) -> None:
+        """Test add_if_new in namespace."""
+        sha = b"1" * 40
+        # Should succeed - ref doesn't exist
+        self.assertTrue(self._refs.add_if_new(b"refs/heads/new", sha))
+        self.assertEqual(sha, self._refs[b"refs/heads/new"])
+
+        # Should fail - ref already exists
+        self.assertFalse(self._refs.add_if_new(b"refs/heads/new", b"2" * 40))
+        self.assertEqual(sha, self._refs[b"refs/heads/new"])
+
+    def test_set_if_equals(self) -> None:
+        """Test set_if_equals in namespace."""
+        sha1 = b"a" * 40
+        sha2 = b"b" * 40
+        self._refs[b"refs/heads/test"] = sha1
+
+        # Should fail with wrong old value
+        self.assertFalse(self._refs.set_if_equals(b"refs/heads/test", b"c" * 40, sha2))
+        self.assertEqual(sha1, self._refs[b"refs/heads/test"])
+
+        # Should succeed with correct old value
+        self.assertTrue(self._refs.set_if_equals(b"refs/heads/test", sha1, sha2))
+        self.assertEqual(sha2, self._refs[b"refs/heads/test"])

+ 274 - 0
tests/test_trailers.py

@@ -0,0 +1,274 @@
+# test_trailers.py -- tests for git trailers
+# Copyright (C) 2025 Jelmer Vernooij <jelmer@jelmer.uk>
+#
+# SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
+# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
+# General Public License as published by the Free Software Foundation; version 2.0
+# or (at your option) any later version. You can redistribute it and/or
+# modify it under the terms of either of these two licenses.
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# You should have received a copy of the licenses; if not, see
+# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
+# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
+# License, Version 2.0.
+#
+
+"""Tests for dulwich.trailers."""
+
+import unittest
+
+from dulwich.trailers import (
+    Trailer,
+    add_trailer_to_message,
+    format_trailers,
+    parse_trailers,
+)
+
+
+class TestTrailer(unittest.TestCase):
+    """Tests for the Trailer class."""
+
+    def test_init(self) -> None:
+        """Test Trailer initialization."""
+        trailer = Trailer("Signed-off-by", "Alice <alice@example.com>")
+        self.assertEqual(trailer.key, "Signed-off-by")
+        self.assertEqual(trailer.value, "Alice <alice@example.com>")
+        self.assertEqual(trailer.separator, ":")
+
+    def test_str(self) -> None:
+        """Test Trailer string representation."""
+        trailer = Trailer("Signed-off-by", "Alice <alice@example.com>")
+        self.assertEqual(str(trailer), "Signed-off-by: Alice <alice@example.com>")
+
+    def test_equality(self) -> None:
+        """Test Trailer equality."""
+        t1 = Trailer("Signed-off-by", "Alice")
+        t2 = Trailer("Signed-off-by", "Alice")
+        t3 = Trailer("Signed-off-by", "Bob")
+        self.assertEqual(t1, t2)
+        self.assertNotEqual(t1, t3)
+
+
+class TestParseTrailers(unittest.TestCase):
+    """Tests for parse_trailers function."""
+
+    def test_no_trailers(self) -> None:
+        """Test parsing a message with no trailers."""
+        message = b"Subject\n\nBody text\n"
+        body, trailers = parse_trailers(message)
+        self.assertEqual(body, b"Subject\n\nBody text\n")
+        self.assertEqual(trailers, [])
+
+    def test_simple_trailer(self) -> None:
+        """Test parsing a message with a single trailer."""
+        message = b"Subject\n\nBody text\n\nSigned-off-by: Alice <alice@example.com>\n"
+        body, trailers = parse_trailers(message)
+        self.assertEqual(body, b"Subject\n\nBody text\n")
+        self.assertEqual(len(trailers), 1)
+        self.assertEqual(trailers[0].key, "Signed-off-by")
+        self.assertEqual(trailers[0].value, "Alice <alice@example.com>")
+
+    def test_multiple_trailers(self) -> None:
+        """Test parsing a message with multiple trailers."""
+        message = b"Subject\n\nBody text\n\nSigned-off-by: Alice <alice@example.com>\nReviewed-by: Bob <bob@example.com>\n"
+        body, trailers = parse_trailers(message)
+        self.assertEqual(body, b"Subject\n\nBody text\n")
+        self.assertEqual(len(trailers), 2)
+        self.assertEqual(trailers[0].key, "Signed-off-by")
+        self.assertEqual(trailers[0].value, "Alice <alice@example.com>")
+        self.assertEqual(trailers[1].key, "Reviewed-by")
+        self.assertEqual(trailers[1].value, "Bob <bob@example.com>")
+
+    def test_trailer_with_multiline_value(self) -> None:
+        """Test parsing a trailer with multiline value."""
+        message = b"Subject\n\nBody\n\nTrailer: line1\n line2\n line3\n"
+        _body, trailers = parse_trailers(message)
+        self.assertEqual(len(trailers), 1)
+        self.assertEqual(trailers[0].key, "Trailer")
+        self.assertEqual(trailers[0].value, "line1 line2 line3")
+
+    def test_no_blank_line_before_trailer(self) -> None:
+        """Test that trailers without preceding blank line are not parsed."""
+        message = b"Subject\nBody\nSigned-off-by: Alice\n"
+        body, trailers = parse_trailers(message)
+        self.assertEqual(body, message)
+        self.assertEqual(trailers, [])
+
+    def test_trailer_at_end_only(self) -> None:
+        """Test that trailers must be at the end of the message."""
+        message = b"Subject\n\nSigned-off-by: Alice\n\nMore body text\n"
+        body, trailers = parse_trailers(message)
+        # The "Signed-off-by" is not at the end, so it shouldn't be parsed as a trailer
+        self.assertEqual(body, message)
+        self.assertEqual(trailers, [])
+
+    def test_different_separators(self) -> None:
+        """Test parsing trailers with different separators."""
+        message = b"Subject\n\nBody\n\nKey= value\n"
+        _body, trailers = parse_trailers(message, separators="=")
+        self.assertEqual(len(trailers), 1)
+        self.assertEqual(trailers[0].key, "Key")
+        self.assertEqual(trailers[0].value, "value")
+        self.assertEqual(trailers[0].separator, "=")
+
+    def test_empty_message(self) -> None:
+        """Test parsing an empty message."""
+        body, trailers = parse_trailers(b"")
+        self.assertEqual(body, b"")
+        self.assertEqual(trailers, [])
+
+
+class TestFormatTrailers(unittest.TestCase):
+    """Tests for format_trailers function."""
+
+    def test_empty_list(self) -> None:
+        """Test formatting an empty list of trailers."""
+        result = format_trailers([])
+        self.assertEqual(result, b"")
+
+    def test_single_trailer(self) -> None:
+        """Test formatting a single trailer."""
+        trailers = [Trailer("Signed-off-by", "Alice <alice@example.com>")]
+        result = format_trailers(trailers)
+        self.assertEqual(result, b"Signed-off-by: Alice <alice@example.com>\n")
+
+    def test_multiple_trailers(self) -> None:
+        """Test formatting multiple trailers."""
+        trailers = [
+            Trailer("Signed-off-by", "Alice <alice@example.com>"),
+            Trailer("Reviewed-by", "Bob <bob@example.com>"),
+        ]
+        result = format_trailers(trailers)
+        expected = b"Signed-off-by: Alice <alice@example.com>\nReviewed-by: Bob <bob@example.com>\n"
+        self.assertEqual(result, expected)
+
+
+class TestAddTrailerToMessage(unittest.TestCase):
+    """Tests for add_trailer_to_message function."""
+
+    def test_add_to_empty_message(self) -> None:
+        """Test adding a trailer to an empty message."""
+        message = b""
+        result = add_trailer_to_message(message, "Signed-off-by", "Alice")
+        # Empty messages should get a trailer added
+        self.assertIn(b"Signed-off-by: Alice", result)
+
+    def test_add_to_message_without_trailers(self) -> None:
+        """Test adding a trailer to a message without existing trailers."""
+        message = b"Subject\n\nBody text\n"
+        result = add_trailer_to_message(message, "Signed-off-by", "Alice")
+        expected = b"Subject\n\nBody text\n\nSigned-off-by: Alice\n"
+        self.assertEqual(result, expected)
+
+    def test_add_to_message_with_existing_trailers(self) -> None:
+        """Test adding a trailer to a message with existing trailers."""
+        message = b"Subject\n\nBody\n\nSigned-off-by: Alice\n"
+        result = add_trailer_to_message(message, "Reviewed-by", "Bob")
+        self.assertIn(b"Signed-off-by: Alice", result)
+        self.assertIn(b"Reviewed-by: Bob", result)
+
+    def test_add_duplicate_trailer_default(self) -> None:
+        """Test adding a duplicate trailer with default if_exists."""
+        message = b"Subject\n\nBody\n\nSigned-off-by: Alice\n"
+        result = add_trailer_to_message(
+            message, "Signed-off-by", "Alice", if_exists="addIfDifferentNeighbor"
+        )
+        # Should not add duplicate
+        self.assertEqual(result, message)
+
+    def test_add_duplicate_trailer_add(self) -> None:
+        """Test adding a duplicate trailer with if_exists=add."""
+        message = b"Subject\n\nBody\n\nSigned-off-by: Alice\n"
+        result = add_trailer_to_message(
+            message, "Signed-off-by", "Alice", if_exists="add"
+        )
+        # Should add duplicate
+        self.assertEqual(result.count(b"Signed-off-by: Alice"), 2)
+
+    def test_add_different_value(self) -> None:
+        """Test adding a trailer with same key but different value."""
+        message = b"Subject\n\nBody\n\nSigned-off-by: Alice\n"
+        result = add_trailer_to_message(message, "Signed-off-by", "Bob")
+        self.assertIn(b"Signed-off-by: Alice", result)
+        self.assertIn(b"Signed-off-by: Bob", result)
+
+    def test_replace_existing(self) -> None:
+        """Test replacing existing trailers with if_exists=replace."""
+        message = b"Subject\n\nBody\n\nSigned-off-by: Alice\nSigned-off-by: Bob\n"
+        result = add_trailer_to_message(
+            message, "Signed-off-by", "Charlie", if_exists="replace"
+        )
+        self.assertNotIn(b"Alice", result)
+        self.assertNotIn(b"Bob", result)
+        self.assertIn(b"Signed-off-by: Charlie", result)
+
+    def test_do_nothing_if_exists(self) -> None:
+        """Test if_exists=doNothing."""
+        message = b"Subject\n\nBody\n\nSigned-off-by: Alice\n"
+        result = add_trailer_to_message(
+            message, "Signed-off-by", "Bob", if_exists="doNothing"
+        )
+        # Should not modify the message
+        self.assertEqual(result, message)
+
+    def test_if_missing_do_nothing(self) -> None:
+        """Test if_missing=doNothing."""
+        message = b"Subject\n\nBody\n"
+        result = add_trailer_to_message(
+            message, "Signed-off-by", "Alice", if_missing="doNothing"
+        )
+        # Should not add the trailer
+        self.assertNotIn(b"Signed-off-by", result)
+
+    def test_where_start(self) -> None:
+        """Test adding trailer at start."""
+        message = b"Subject\n\nBody\n\nReviewed-by: Bob\n"
+        result = add_trailer_to_message(
+            message, "Signed-off-by", "Alice", where="start"
+        )
+        # Parse to check order
+        _, trailers = parse_trailers(result)
+        self.assertEqual(len(trailers), 2)
+        self.assertEqual(trailers[0].key, "Signed-off-by")
+        self.assertEqual(trailers[1].key, "Reviewed-by")
+
+    def test_custom_separator(self) -> None:
+        """Test adding trailer with custom separator."""
+        message = b"Subject\n\nBody\n"
+        result = add_trailer_to_message(message, "Key", "value", separator="=")
+        self.assertIn(b"Key= value", result)
+
+
+class TestIntegration(unittest.TestCase):
+    """Integration tests for trailers."""
+
+    def test_parse_and_format_roundtrip(self) -> None:
+        """Test that parse and format are inverse operations."""
+        original = b"Subject\n\nBody\n\nSigned-off-by: Alice\nReviewed-by: Bob\n"
+        body, trailers = parse_trailers(original)
+        formatted = body
+        if body and not body.endswith(b"\n"):
+            formatted += b"\n"
+        if trailers:
+            formatted += b"\n"
+            formatted += format_trailers(trailers)
+        self.assertEqual(formatted, original)
+
+    def test_add_multiple_trailers(self) -> None:
+        """Test adding multiple trailers in sequence."""
+        message = b"Subject\n\nBody\n"
+        message = add_trailer_to_message(message, "Signed-off-by", "Alice")
+        message = add_trailer_to_message(message, "Reviewed-by", "Bob")
+        message = add_trailer_to_message(message, "Tested-by", "Charlie")
+
+        _, trailers = parse_trailers(message)
+        self.assertEqual(len(trailers), 3)
+        self.assertEqual(trailers[0].key, "Signed-off-by")
+        self.assertEqual(trailers[1].key, "Reviewed-by")
+        self.assertEqual(trailers[2].key, "Tested-by")