Ver código fonte

Add more docstrings

Jelmer Vernooij 5 meses atrás
pai
commit
d122cc1386

+ 9 - 0
dulwich/__init__.py

@@ -48,6 +48,15 @@ except ImportError:
         since: Optional[Union[str, tuple[int, ...]]] = None,
         remove_in: Optional[Union[str, tuple[int, ...]]] = None,
     ) -> Callable[[F], F]:
+        """Decorator to mark functions as deprecated.
+        
+        Args:
+            since: Version when the function was deprecated
+            remove_in: Version when the function will be removed
+        
+        Returns:
+            Decorator function
+        """
         def decorator(func: F) -> F:
             import functools
             import warnings

+ 16 - 2
dulwich/archive.py

@@ -51,10 +51,23 @@ class ChunkedBytesIO:
     """
 
     def __init__(self, contents: list[bytes]) -> None:
+        """Initialize ChunkedBytesIO.
+        
+        Args:
+            contents: List of byte chunks
+        """
         self.contents = contents
         self.pos = (0, 0)
 
     def read(self, maxbytes: Optional[int] = None) -> bytes:
+        """Read bytes from the chunked stream.
+        
+        Args:
+            maxbytes: Maximum number of bytes to read (None for all)
+        
+        Returns:
+            Bytes read
+        """
         if maxbytes is None or maxbytes < 0:
             remaining = None
         else:
@@ -98,6 +111,7 @@ def tar_stream(
       tree: Tree object for the tree root
       mtime: UNIX timestamp that is assigned as the modification time for
         all files, and the gzip header modification time if format='gz'
+      prefix: Optional prefix to prepend to all paths in the archive
       format: Optional compression format for tarball
     Returns:
       Bytestrings
@@ -150,8 +164,8 @@ def tar_stream(
 def _walk_tree(
     store: "BaseObjectStore", tree: "Tree", root: bytes = b""
 ) -> Generator[tuple[bytes, "TreeEntry"], None, None]:
-    """Recursively walk a dulwich Tree, yielding tuples of
-    (absolute path, TreeEntry) along the way.
+    """Recursively walk a dulwich Tree, yielding tuples of (absolute path, TreeEntry) along the way.
+
     """
     for entry in tree.iteritems():
         entry_abspath = posixpath.join(root, entry.path)

+ 5 - 0
dulwich/attrs.py

@@ -164,6 +164,11 @@ class Pattern:
     """A single gitattributes pattern."""
 
     def __init__(self, pattern: bytes):
+        """Initialize GitAttributesPattern.
+        
+        Args:
+            pattern: Attribute pattern as bytes
+        """
         self.pattern = pattern
         self._regex: Optional[re.Pattern[bytes]] = None
         self._compile()

+ 5 - 0
dulwich/bisect.py

@@ -32,6 +32,11 @@ class BisectState:
     """Manages the state of a bisect session."""
 
     def __init__(self, repo: Repo) -> None:
+        """Initialize BisectState.
+        
+        Args:
+            repo: Repository to perform bisect on
+        """
         self.repo = repo
         self._bisect_dir = os.path.join(repo.controldir(), "BISECT_START")
 

+ 10 - 0
dulwich/bundle.py

@@ -32,6 +32,8 @@ if TYPE_CHECKING:
 
 
 class Bundle:
+    """Git bundle object representation."""
+    
     version: Optional[int]
 
     capabilities: dict[str, Optional[str]]
@@ -40,6 +42,7 @@ class Bundle:
     pack_data: PackData
 
     def __repr__(self) -> str:
+        """Return string representation of Bundle."""
         return (
             f"<{type(self).__name__}(version={self.version}, "
             f"capabilities={self.capabilities}, "
@@ -48,6 +51,7 @@ class Bundle:
         )
 
     def __eq__(self, other: object) -> bool:
+        """Check equality with another Bundle."""
         if not isinstance(other, type(self)):
             return False
         if self.version != other.version:
@@ -154,6 +158,12 @@ def read_bundle(f: BinaryIO) -> Bundle:
 
 
 def write_bundle(f: BinaryIO, bundle: Bundle) -> None:
+    """Write a bundle to a file.
+    
+    Args:
+        f: File-like object to write to
+        bundle: Bundle object to write
+    """
     version = bundle.version
     if version is None:
         if bundle.capabilities:

Diferenças do arquivo suprimidas por serem muito extensas
+ 422 - 0
dulwich/cli.py


+ 52 - 0
dulwich/client.py

@@ -150,6 +150,11 @@ class InvalidWants(Exception):
     """Invalid wants."""
 
     def __init__(self, wants) -> None:
+        """Initialize InvalidWants exception.
+
+        Args:
+            wants: List of invalid wants
+        """
         Exception.__init__(
             self, f"requested wants not in server provided refs: {wants!r}"
         )
@@ -159,6 +164,12 @@ class HTTPUnauthorized(Exception):
     """Raised when authentication fails."""
 
     def __init__(self, www_authenticate, url) -> None:
+        """Initialize HTTPUnauthorized exception.
+
+        Args:
+            www_authenticate: WWW-Authenticate header value
+            url: URL that requires authentication
+        """
         Exception.__init__(self, "No valid credentials provided")
         self.www_authenticate = www_authenticate
         self.url = url
@@ -168,6 +179,12 @@ class HTTPProxyUnauthorized(Exception):
     """Raised when proxy authentication fails."""
 
     def __init__(self, proxy_authenticate, url) -> None:
+        """Initialize HTTPProxyUnauthorized exception.
+
+        Args:
+            proxy_authenticate: Proxy-Authenticate header value
+            url: URL that requires proxy authentication
+        """
         Exception.__init__(self, "No valid proxy credentials provided")
         self.proxy_authenticate = proxy_authenticate
         self.url = url
@@ -211,6 +228,7 @@ class ReportStatusParser:
     """Handle status as reported by servers with 'report-status' capability."""
 
     def __init__(self) -> None:
+        """Initialize ReportStatusParser."""
         self._done = False
         self._pack_status = None
         self._ref_statuses: list[bytes] = []
@@ -409,6 +427,15 @@ class FetchPackResult(_DeprecatedDictProxy):
     def __init__(
         self, refs, symrefs, agent, new_shallow=None, new_unshallow=None
     ) -> None:
+        """Initialize FetchPackResult.
+
+        Args:
+            refs: Dictionary with all remote refs
+            symrefs: Dictionary with remote symrefs
+            agent: User agent string
+            new_shallow: New shallow commits
+            new_unshallow: New unshallow commits
+        """
         self.refs = refs
         self.symrefs = symrefs
         self.agent = agent
@@ -440,6 +467,12 @@ class LsRemoteResult(_DeprecatedDictProxy):
     """
 
     def __init__(self, refs, symrefs) -> None:
+        """Initialize LsRemoteResult.
+
+        Args:
+            refs: Dictionary with all remote refs
+            symrefs: Dictionary with remote symrefs
+        """
         self.refs = refs
         self.symrefs = symrefs
 
@@ -476,6 +509,13 @@ class SendPackResult(_DeprecatedDictProxy):
     """
 
     def __init__(self, refs, agent=None, ref_status=None) -> None:
+        """Initialize SendPackResult.
+
+        Args:
+            refs: Dictionary with all remote refs
+            agent: User agent string
+            ref_status: Optional dictionary mapping ref name to error message
+        """
         self.refs = refs
         self.agent = agent
         self.ref_status = ref_status
@@ -1559,6 +1599,18 @@ class TraditionalGitClient(GitClient):
         subdirs=None,
         prefix=None,
     ) -> None:
+        """Request an archive of a specific commit.
+
+        Args:
+            path: Repository path
+            committish: Commit ID or ref to archive
+            write_data: Function to write archive data
+            progress: Optional progress callback
+            write_error: Optional error callback
+            format: Optional archive format
+            subdirs: Optional subdirectories to include
+            prefix: Optional prefix for archived files
+        """
         proto, can_read, stderr = self._connect(b"upload-archive", path)
         with proto:
             if format is not None:

+ 1 - 2
dulwich/contrib/diffstat.py

@@ -122,8 +122,7 @@ def _parse_patch(
 # note must all done using bytes not string because on linux filenames
 # may not be encodable even to utf-8
 def diffstat(lines: list[bytes], max_width: int = 80) -> bytes:
-    """Generate summary statistics from a git style diff ala
-       (git diff tag1 tag2 --stat).
+    """Generate summary statistics from a git style diff ala (git diff tag1 tag2 --stat).
 
     Args:
       lines: list of byte string "lines" from the diff to be parsed

+ 72 - 0
dulwich/errors.py

@@ -39,6 +39,13 @@ class ChecksumMismatch(Exception):
         got: Union[bytes, str],
         extra: Optional[str] = None,
     ) -> None:
+        """Initialize a ChecksumMismatch exception.
+
+        Args:
+            expected: The expected checksum value (bytes or hex string).
+            got: The actual checksum value (bytes or hex string).
+            extra: Optional additional error information.
+        """
         if isinstance(expected, bytes) and len(expected) == 20:
             expected_str = binascii.hexlify(expected).decode("ascii")
         else:
@@ -70,6 +77,13 @@ class WrongObjectException(Exception):
     type_name: str
 
     def __init__(self, sha: bytes, *args: object, **kwargs: object) -> None:
+        """Initialize a WrongObjectException.
+
+        Args:
+            sha: The SHA of the object that was not of the expected type.
+            *args: Additional positional arguments.
+            **kwargs: Additional keyword arguments.
+        """
         Exception.__init__(self, f"{sha.decode('ascii')} is not a {self.type_name}")
 
 
@@ -101,6 +115,13 @@ class MissingCommitError(Exception):
     """Indicates that a commit was not found in the repository."""
 
     def __init__(self, sha: bytes, *args: object, **kwargs: object) -> None:
+        """Initialize a MissingCommitError.
+
+        Args:
+            sha: The SHA of the missing commit.
+            *args: Additional positional arguments.
+            **kwargs: Additional keyword arguments.
+        """
         self.sha = sha
         Exception.__init__(self, f"{sha.decode('ascii')} is not in the revision store")
 
@@ -109,6 +130,13 @@ class ObjectMissing(Exception):
     """Indicates that a requested object is missing."""
 
     def __init__(self, sha: bytes, *args: object, **kwargs: object) -> None:
+        """Initialize an ObjectMissing exception.
+
+        Args:
+            sha: The SHA of the missing object.
+            *args: Additional positional arguments.
+            **kwargs: Additional keyword arguments.
+        """
         Exception.__init__(self, f"{sha.decode('ascii')} is not in the pack")
 
 
@@ -116,6 +144,12 @@ class ApplyDeltaError(Exception):
     """Indicates that applying a delta failed."""
 
     def __init__(self, *args: object, **kwargs: object) -> None:
+        """Initialize an ApplyDeltaError.
+
+        Args:
+            *args: Error message and additional positional arguments.
+            **kwargs: Additional keyword arguments.
+        """
         Exception.__init__(self, *args, **kwargs)
 
 
@@ -123,6 +157,12 @@ class NotGitRepository(Exception):
     """Indicates that no Git repository was found."""
 
     def __init__(self, *args: object, **kwargs: object) -> None:
+        """Initialize a NotGitRepository exception.
+
+        Args:
+            *args: Error message and additional positional arguments.
+            **kwargs: Additional keyword arguments.
+        """
         Exception.__init__(self, *args, **kwargs)
 
 
@@ -130,9 +170,23 @@ class GitProtocolError(Exception):
     """Git protocol exception."""
 
     def __init__(self, *args: object, **kwargs: object) -> None:
+        """Initialize a GitProtocolError.
+
+        Args:
+            *args: Error message and additional positional arguments.
+            **kwargs: Additional keyword arguments.
+        """
         Exception.__init__(self, *args, **kwargs)
 
     def __eq__(self, other: object) -> bool:
+        """Check equality between GitProtocolError instances.
+
+        Args:
+            other: The object to compare with.
+
+        Returns:
+            True if both are GitProtocolError instances with same args, False otherwise.
+        """
         return isinstance(other, GitProtocolError) and self.args == other.args
 
 
@@ -144,6 +198,11 @@ class HangupException(GitProtocolError):
     """Hangup exception."""
 
     def __init__(self, stderr_lines: Optional[list[bytes]] = None) -> None:
+        """Initialize a HangupException.
+
+        Args:
+            stderr_lines: Optional list of stderr output lines from the remote server.
+        """
         if stderr_lines:
             super().__init__(
                 "\n".join(
@@ -155,6 +214,14 @@ class HangupException(GitProtocolError):
         self.stderr_lines = stderr_lines
 
     def __eq__(self, other: object) -> bool:
+        """Check equality between HangupException instances.
+
+        Args:
+            other: The object to compare with.
+
+        Returns:
+            True if both are HangupException instances with same stderr_lines, False otherwise.
+        """
         return (
             isinstance(other, HangupException)
             and self.stderr_lines == other.stderr_lines
@@ -165,6 +232,11 @@ class UnexpectedCommandError(GitProtocolError):
     """Unexpected command received in a proto line."""
 
     def __init__(self, command: Optional[str]) -> None:
+        """Initialize an UnexpectedCommandError.
+
+        Args:
+            command: The unexpected command received, or None for flush-pkt.
+        """
         command_str = "flush-pkt" if command is None else f"command {command}"
         super().__init__(f"Protocol got unexpected {command_str}")
 

+ 33 - 0
dulwich/hooks.py

@@ -115,6 +115,12 @@ class PreCommitShellHook(ShellHook):
     """pre-commit shell hook."""
 
     def __init__(self, cwd: str, controldir: str) -> None:
+        """Initialize pre-commit hook.
+
+        Args:
+            cwd: Working directory for hook execution
+            controldir: Path to the git control directory (.git)
+        """
         filepath = os.path.join(controldir, "hooks", "pre-commit")
 
         ShellHook.__init__(self, "pre-commit", filepath, 0, cwd=cwd)
@@ -124,6 +130,11 @@ class PostCommitShellHook(ShellHook):
     """post-commit shell hook."""
 
     def __init__(self, controldir: str) -> None:
+        """Initialize post-commit hook.
+
+        Args:
+            controldir: Path to the git control directory (.git)
+        """
         filepath = os.path.join(controldir, "hooks", "post-commit")
 
         ShellHook.__init__(self, "post-commit", filepath, 0, cwd=controldir)
@@ -133,6 +144,11 @@ class CommitMsgShellHook(ShellHook):
     """commit-msg shell hook."""
 
     def __init__(self, controldir: str) -> None:
+        """Initialize commit-msg hook.
+
+        Args:
+            controldir: Path to the git control directory (.git)
+        """
         filepath = os.path.join(controldir, "hooks", "commit-msg")
 
         def prepare_msg(*args: bytes) -> tuple[str, ...]:
@@ -163,11 +179,28 @@ class PostReceiveShellHook(ShellHook):
     """post-receive shell hook."""
 
     def __init__(self, controldir: str) -> None:
+        """Initialize post-receive hook.
+
+        Args:
+            controldir: Path to the git control directory (.git)
+        """
         self.controldir = controldir
         filepath = os.path.join(controldir, "hooks", "post-receive")
         ShellHook.__init__(self, "post-receive", path=filepath, numparam=0)
 
     def execute(self, client_refs: list[tuple[bytes, bytes, bytes]]) -> Optional[bytes]:
+        """Execute the post-receive hook.
+
+        Args:
+            client_refs: List of tuples containing (old_sha, new_sha, ref_name)
+                        for each updated reference
+
+        Returns:
+            Output from the hook execution or None if hook doesn't exist
+
+        Raises:
+            HookError: If hook execution fails
+        """
         # do nothing if the script doesn't exist
         if not os.path.exists(self.filepath):
             return None

+ 67 - 0
dulwich/ignore.py

@@ -308,6 +308,12 @@ class Pattern:
     """A single ignore pattern."""
 
     def __init__(self, pattern: bytes, ignorecase: bool = False) -> None:
+        """Initialize a Pattern object.
+
+        Args:
+            pattern: The gitignore pattern as bytes.
+            ignorecase: Whether to perform case-insensitive matching.
+        """
         self.pattern = pattern
         self.ignorecase = ignorecase
 
@@ -334,12 +340,30 @@ class Pattern:
         self._re = re.compile(translate(pattern), flags)
 
     def __bytes__(self) -> bytes:
+        """Return the pattern as bytes.
+
+        Returns:
+            The original pattern as bytes.
+        """
         return self.pattern
 
     def __str__(self) -> str:
+        """Return the pattern as a string.
+
+        Returns:
+            The pattern decoded as a string.
+        """
         return os.fsdecode(self.pattern)
 
     def __eq__(self, other: object) -> bool:
+        """Check equality with another Pattern object.
+
+        Args:
+            other: The object to compare with.
+
+        Returns:
+            True if patterns and ignorecase flags are equal, False otherwise.
+        """
         return (
             isinstance(other, type(self))
             and self.pattern == other.pattern
@@ -347,6 +371,11 @@ class Pattern:
         )
 
     def __repr__(self) -> str:
+        """Return a string representation of the Pattern object.
+
+        Returns:
+            A string representation for debugging.
+        """
         return f"{type(self).__name__}({self.pattern!r}, {self.ignorecase!r})"
 
     def match(self, path: bytes) -> bool:
@@ -389,6 +418,13 @@ class IgnoreFilter:
         ignorecase: bool = False,
         path: Optional[str] = None,
     ) -> None:
+        """Initialize an IgnoreFilter with a set of patterns.
+
+        Args:
+            patterns: An iterable of gitignore patterns as bytes.
+            ignorecase: Whether to perform case-insensitive matching.
+            path: Optional path to the ignore file for debugging purposes.
+        """
         self._patterns: list[Pattern] = []
         self._ignorecase = ignorecase
         self._path = path
@@ -450,10 +486,20 @@ class IgnoreFilter:
     def from_path(
         cls, path: Union[str, os.PathLike], ignorecase: bool = False
     ) -> "IgnoreFilter":
+        """Create an IgnoreFilter from a file path.
+
+        Args:
+            path: Path to the ignore file.
+            ignorecase: Whether to perform case-insensitive matching.
+
+        Returns:
+            An IgnoreFilter instance with patterns loaded from the file.
+        """
         with open(path, "rb") as f:
             return cls(read_ignore_patterns(f), ignorecase, path=str(path))
 
     def __repr__(self) -> str:
+        """Return string representation of IgnoreFilter."""
         path = getattr(self, "_path", None)
         if path is not None:
             return f"{type(self).__name__}.from_path({path!r})"
@@ -465,6 +511,11 @@ class IgnoreFilterStack:
     """Check for ignore status in multiple filters."""
 
     def __init__(self, filters: list[IgnoreFilter]) -> None:
+        """Initialize an IgnoreFilterStack with multiple filters.
+
+        Args:
+            filters: A list of IgnoreFilter objects to check in order.
+        """
         self._filters = filters
 
     def is_ignored(self, path: str) -> Optional[bool]:
@@ -482,6 +533,14 @@ class IgnoreFilterStack:
                 return status
         return None
 
+    def __repr__(self) -> str:
+        """Return a string representation of the IgnoreFilterStack.
+
+        Returns:
+            A string representation for debugging.
+        """
+        return f"{type(self).__name__}({self._filters!r})"
+
 
 def default_user_ignore_filter_path(config: Config) -> str:
     """Return default user ignore filter path.
@@ -514,12 +573,20 @@ class IgnoreFilterManager:
         global_filters: list[IgnoreFilter],
         ignorecase: bool,
     ) -> None:
+        """Initialize an IgnoreFilterManager.
+
+        Args:
+            top_path: The top-level directory path to manage ignores for.
+            global_filters: List of global ignore filters to apply.
+            ignorecase: Whether to perform case-insensitive matching.
+        """
         self._path_filters: dict[str, Optional[IgnoreFilter]] = {}
         self._top_path = top_path
         self._global_filters = global_filters
         self._ignorecase = ignorecase
 
     def __repr__(self) -> str:
+        """Return string representation of IgnoreFilterManager."""
         return f"{type(self).__name__}({self._top_path}, {self._global_filters!r}, {self._ignorecase!r})"
 
     def _load_path(self, path: str) -> Optional[IgnoreFilter]:

+ 4 - 2
dulwich/index.py

@@ -495,6 +495,7 @@ class IndexEntry:
 
     def set_skip_worktree(self, skip: bool = True) -> None:
         """Helper method to set or clear the skip-worktree bit in extended_flags.
+
         Also sets FLAG_EXTENDED in self.flags if needed.
         """
         if skip:
@@ -815,6 +816,7 @@ def read_index_dict(
     f: BinaryIO,
 ) -> dict[bytes, Union[IndexEntry, ConflictedIndexEntry]]:
     """Read an index file and return it as a dictionary.
+
        Dict Key is tuple of path and stage number, as
             path alone is not unique
     Args:
@@ -886,6 +888,7 @@ def write_index_dict(
     extensions: Optional[list[IndexExtension]] = None,
 ) -> None:
     """Write an index file based on the contents of a dictionary.
+
     being careful to sort by path and then by stage.
     """
     entries_list = []
@@ -1252,8 +1255,7 @@ def changes_from_tree(
         tuple[Optional[bytes], Optional[bytes]],
     ]
 ]:
-    """Find the differences between the contents of a tree and
-    a working copy.
+    """Find the differences between the contents of a tree and a working copy.
 
     Args:
       names: Iterable of names in the working copy

+ 3 - 5
dulwich/line_ending.py

@@ -323,8 +323,7 @@ def get_checkin_filter_autocrlf(
 
 
 class BlobNormalizer(FilterBlobNormalizer):
-    """An object to store computation result of which filter to apply based
-    on configuration, gitattributes, path and operation (checkin or checkout).
+    """An object to store computation result of which filter to apply based on configuration, gitattributes, path and operation (checkin or checkout).
 
     This class maintains backward compatibility while using the filter infrastructure.
     """
@@ -432,9 +431,8 @@ class BlobNormalizer(FilterBlobNormalizer):
 def normalize_blob(
     blob: Blob, conversion: Callable[[bytes], bytes], binary_detection: bool
 ) -> Blob:
-    """Takes a blob as input returns either the original blob if
-    binary_detection is True and the blob content looks like binary, else
-    return a new blob with converted data.
+    """Takes a blob as input returns either the original blob if binary_detection is True and the blob content looks like binary, else return a new blob with converted data.
+
     """
     # Read the original blob
     data = blob.data

+ 2 - 0
dulwich/object_store.py

@@ -156,6 +156,7 @@ def get_depth(
     max_depth=None,
 ):
     """Return the current available depth for the given head.
+
     For commits with multiple parents, the largest possible depth will be
     returned.
 
@@ -455,6 +456,7 @@ class BaseObjectStore:
         max_depth=None,
     ):
         """Return the current available depth for the given head.
+
         For commits with multiple parents, the largest possible depth will be
         returned.
 

+ 102 - 3
dulwich/pack.py

@@ -111,10 +111,17 @@ class UnresolvedDeltas(Exception):
     """Delta objects could not be resolved."""
 
     def __init__(self, shas: list[bytes]) -> None:
+        """Initialize UnresolvedDeltas exception.
+
+        Args:
+            shas: List of SHA hashes for unresolved delta objects
+        """
         self.shas = shas
 
 
 class ObjectContainer(Protocol):
+    """Protocol for objects that can contain git objects."""
+
     def add_object(self, obj: ShaFile) -> None:
         """Add a single object to this object store."""
 
@@ -127,6 +134,7 @@ class ObjectContainer(Protocol):
 
         Args:
           objects: Iterable over a list of (object, path) tuples
+          progress: Progress callback for object insertion
         """
 
     def __contains__(self, sha1: bytes) -> bool:
@@ -145,15 +153,34 @@ class ObjectContainer(Protocol):
 
 
 class PackedObjectContainer(ObjectContainer):
+    """Container for objects packed in a pack file."""
+
     def get_unpacked_object(
         self, sha1: bytes, *, include_comp: bool = False
     ) -> "UnpackedObject":
-        """Get a raw unresolved object."""
+        """Get a raw unresolved object.
+
+        Args:
+            sha1: SHA-1 hash of the object
+            include_comp: Whether to include compressed data
+
+        Returns:
+            UnpackedObject instance
+        """
         raise NotImplementedError(self.get_unpacked_object)
 
     def iterobjects_subset(
         self, shas: Iterable[bytes], *, allow_missing: bool = False
     ) -> Iterator[ShaFile]:
+        """Iterate over a subset of objects.
+
+        Args:
+            shas: Iterable of object SHAs to retrieve
+            allow_missing: If True, skip missing objects
+
+        Returns:
+            Iterator of ShaFile objects
+        """
         raise NotImplementedError(self.iterobjects_subset)
 
     def iter_unpacked_subset(
@@ -181,9 +208,11 @@ class UnpackedObjectStream:
     """Abstract base class for a stream of unpacked objects."""
 
     def __iter__(self) -> Iterator["UnpackedObject"]:
+        """Iterate over unpacked objects."""
         raise NotImplementedError(self.__iter__)
 
     def __len__(self) -> int:
+        """Return the number of objects in the stream."""
         raise NotImplementedError(self.__len__)
 
 
@@ -194,6 +223,10 @@ def take_msb_bytes(
 
     Args:
       read: Read function
+      crc32: Optional CRC32 checksum to update
+
+    Returns:
+      Tuple of (list of bytes read, updated CRC32 or None)
     """
     ret: list[int] = []
     while len(ret) == 0 or ret[-1] & 0x80:
@@ -208,6 +241,11 @@ class PackFileDisappeared(Exception):
     """Raised when a pack file unexpectedly disappears."""
 
     def __init__(self, obj: object) -> None:
+        """Initialize PackFileDisappeared exception.
+
+        Args:
+            obj: The object that triggered the exception
+        """
         self.obj = obj
 
 
@@ -259,6 +297,17 @@ class UnpackedObject:
         decomp_chunks: Optional[list[bytes]] = None,
         offset: Optional[int] = None,
     ) -> None:
+        """Initialize an UnpackedObject.
+
+        Args:
+            pack_type_num: Type number of this object in the pack
+            delta_base: Delta base (offset or SHA) if this is a delta object
+            decomp_len: Decompressed length of this object
+            crc32: CRC32 checksum
+            sha: SHA-1 hash of the object
+            decomp_chunks: Decompressed chunks
+            offset: Offset in the pack file
+        """
         self.offset = offset
         self._sha = sha
         self.pack_type_num = pack_type_num
@@ -301,6 +350,7 @@ class UnpackedObject:
             return self.decomp_chunks
 
     def __eq__(self, other: object) -> bool:
+        """Check equality with another UnpackedObject."""
         if not isinstance(other, UnpackedObject):
             return False
         for slot in self.__slots__:
@@ -507,6 +557,7 @@ class PackIndex:
     hash_size = 20
 
     def __eq__(self, other: object) -> bool:
+        """Check equality with another PackIndex."""
         if not isinstance(other, PackIndex):
             return False
 
@@ -546,6 +597,14 @@ class PackIndex:
 
     @replace_me(since="0.21.0", remove_in="0.23.0")
     def object_index(self, sha: bytes) -> int:
+        """Return the index for the given SHA.
+
+        Args:
+            sha: SHA-1 hash
+
+        Returns:
+            Index position
+        """
         return self.object_offset(sha)
 
     def object_offset(self, sha: bytes) -> int:
@@ -702,6 +761,7 @@ class FilePackIndex(PackIndex):
         return os.fspath(self._filename)
 
     def __eq__(self, other: object) -> bool:
+        """Check equality with another FilePackIndex."""
         # Quick optimization:
         if (
             isinstance(other, FilePackIndex)
@@ -866,6 +926,14 @@ class PackIndex1(FilePackIndex):
     def __init__(
         self, filename: Union[str, os.PathLike], file=None, contents=None, size=None
     ) -> None:
+        """Initialize a version 1 pack index.
+
+        Args:
+            filename: Path to the index file
+            file: Optional file object
+            contents: Optional mmap'd contents
+            size: Optional size of the index
+        """
         super().__init__(filename, file, contents, size)
         self.version = 1
         self._fan_out_table = self._read_fan_out_table(0)
@@ -897,6 +965,14 @@ class PackIndex2(FilePackIndex):
     def __init__(
         self, filename: Union[str, os.PathLike], file=None, contents=None, size=None
     ) -> None:
+        """Initialize a version 2 pack index.
+
+        Args:
+            filename: Path to the index file
+            file: Optional file object
+            contents: Optional mmap'd contents
+            size: Optional size of the index
+        """
         super().__init__(filename, file, contents, size)
         if self._contents[:4] != b"\377tOc":
             raise AssertionError("Not a v2 pack index file")
@@ -950,6 +1026,14 @@ class PackIndex3(FilePackIndex):
     def __init__(
         self, filename: Union[str, os.PathLike], file=None, contents=None, size=None
     ) -> None:
+        """Initialize a version 3 pack index.
+
+        Args:
+            filename: Path to the index file
+            file: Optional file object
+            contents: Optional mmap'd contents
+            size: Optional size of the index
+        """
         super().__init__(filename, file, contents, size)
         if self._contents[:4] != b"\377tOc":
             raise AssertionError("Not a v3 pack index file")
@@ -1134,6 +1218,13 @@ class PackStreamReader:
     """
 
     def __init__(self, read_all, read_some=None, zlib_bufsize=_ZLIB_BUFSIZE) -> None:
+        """Initialize a PackStreamReader.
+
+        Args:
+            read_all: Function to read all requested bytes
+            read_some: Function to read some bytes (defaults to read_all)
+            zlib_bufsize: Buffer size for zlib decompression
+        """
         self.read_all = read_all
         if read_some is None:
             self.read_some = read_all
@@ -1213,6 +1304,7 @@ class PackStreamReader:
         return self._read(self.read_some, size)
 
     def __len__(self) -> int:
+        """Return the number of objects in this pack."""
         return self._num_objects
 
     def read_objects(self, compute_crc32=False) -> Iterator[UnpackedObject]:
@@ -1483,9 +1575,11 @@ class PackData:
         self._file.close()
 
     def __enter__(self):
+        """Enter context manager."""
         return self
 
     def __exit__(self, exc_type, exc_val, exc_tb):
+        """Exit context manager."""
         self.close()
 
     def __eq__(self, other):
@@ -1549,6 +1643,7 @@ class PackData:
         Args:
           progress: Progress function, called with current and total
             object count.
+          resolve_ext_ref: Optional function to resolve external references
         Returns: iterator of tuples with (sha, offset, crc32)
         """
         num_objects = self._num_objects
@@ -1568,6 +1663,7 @@ class PackData:
         Args:
           progress: Progress function, called with current and total
             object count
+          resolve_ext_ref: Optional function to resolve external references
         Returns: Iterator of tuples with (sha, offset, crc32)
         """
         return sorted(
@@ -1580,6 +1676,7 @@ class PackData:
         Args:
           filename: Index filename.
           progress: Progress report function
+          resolve_ext_ref: Optional function to resolve external references
         Returns: Checksum of index file
         """
         entries = self.sorted_entries(
@@ -1594,6 +1691,7 @@ class PackData:
         Args:
           filename: Index filename.
           progress: Progress report function
+          resolve_ext_ref: Optional function to resolve external references
         Returns: Checksum of index file
         """
         entries = self.sorted_entries(
@@ -1898,6 +1996,7 @@ class DeltaChainIterator(Generic[T]):
         return self._walk_all_chains()
 
     def ext_refs(self):
+        """Return external references."""
         return self._ext_refs
 
 
@@ -3168,8 +3267,8 @@ class Pack:
 
     @classmethod
     def from_lazy_objects(cls, data_fn, idx_fn):
-        """Create a new pack object from callables to load pack data and
-        index objects.
+        """Create a new pack object from callables to load pack data and index objects.
+
         """
         ret = cls("")
         ret._data_load = data_fn

+ 1 - 2
dulwich/patch.py

@@ -147,8 +147,7 @@ def unified_diff(
     tree_encoding: str = "utf-8",
     output_encoding: str = "utf-8",
 ) -> Generator[bytes, None, None]:
-    """difflib.unified_diff that can detect "No newline at end of file" as
-    original "git diff" does.
+    """difflib.unified_diff that can detect "No newline at end of file" as original "git diff" does.
 
     Based on the same function in Python2.7 difflib.py
     """

+ 4 - 4
dulwich/porcelain.py

@@ -334,8 +334,8 @@ def parse_timezone_format(tz_str):
 
 
 def get_user_timezones():
-    """Retrieve local timezone as described in
-    https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
+    """Retrieve local timezone as described in https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
+
     Returns: A tuple containing author timezone, committer timezone.
     """
     local_timezone = time.localtime().tm_gmtoff
@@ -391,6 +391,7 @@ def open_repo_closing(
     path_or_repo: Union[str, os.PathLike, T],
 ) -> AbstractContextManager[Union[T, Repo]]:
     """Open an argument that can be a repository or a path for a repository.
+
     returns a context manager that will close the repo on exit if the argument
     is a path, else does nothing if the argument is a repo.
     """
@@ -402,8 +403,7 @@ def open_repo_closing(
 def path_to_tree_path(
     repopath: Union[str, os.PathLike], path, tree_encoding=DEFAULT_ENCODING
 ):
-    """Convert a path to a path usable in an index, e.g. bytes and relative to
-    the repository root.
+    """Convert a path to a path usable in an index, e.g. bytes and relative to the repository root.
 
     Args:
       repopath: Repository path, absolute or relative to the cwd

+ 1 - 0
dulwich/repo.py

@@ -1491,6 +1491,7 @@ class Repo(BaseRepo):
     @replace_me(remove_in="0.26.0")
     def unstage(self, fs_paths: list[str]) -> None:
         """Unstage specific file in the index
+
         Args:
           fs_paths: a list of files to unstage,
             relative to the repository path.

+ 1 - 2
dulwich/sparse_patterns.py

@@ -39,8 +39,7 @@ class BlobNotFoundError(Exception):
 
 
 def determine_included_paths(index: Index, lines: list[str], cone: bool) -> set[str]:
-    """Determine which paths in the index should be included based on either
-    a full-pattern match or a cone-mode approach.
+    """Determine which paths in the index should be included based on either a full-pattern match or a cone-mode approach.
 
     Args:
       index: An Index object containing the repository's index.

+ 2 - 2
dulwich/tests/test_object_store.py

@@ -125,8 +125,8 @@ class ObjectStoreTests:
         self.store.add_objects([])
 
     def test_store_resilience(self) -> None:
-        """Test if updating an existing stored object doesn't erase the
-        object from the store.
+        """Test if updating an existing stored object doesn't erase the object from the store.
+
         """
         test_object = make_object(Blob, data=b"data")
 

+ 6 - 6
dulwich/web.py

@@ -578,8 +578,8 @@ class HTTPGitApplication:
 
 
 class GunzipFilter:
-    """WSGI middleware that unzips gzip-encoded requests before
-    passing on to the underlying application.
+    """WSGI middleware that unzips gzip-encoded requests before passing on to the underlying application.
+
     """
 
     def __init__(self, application) -> None:
@@ -600,8 +600,8 @@ class GunzipFilter:
 
 
 class LimitedInputFilter:
-    """WSGI middleware that limits the input length of a request to that
-    specified in Content-Length.
+    """WSGI middleware that limits the input length of a request to that specified in Content-Length.
+
     """
 
     def __init__(self, application) -> None:
@@ -621,8 +621,8 @@ class LimitedInputFilter:
 
 
 def make_wsgi_chain(*args, **kwargs):
-    """Factory function to create an instance of HTTPGitApplication,
-    correctly wrapped with needed middleware.
+    """Factory function to create an instance of HTTPGitApplication, correctly wrapped with needed middleware.
+
     """
     app = HTTPGitApplication(*args, **kwargs)
     wrapped_app = LimitedInputFilter(GunzipFilter(app))

+ 1 - 0
dulwich/worktree.py

@@ -312,6 +312,7 @@ class WorkTree:
 
     def unstage(self, fs_paths: list[str]) -> None:
         """Unstage specific file in the index
+
         Args:
           fs_paths: a list of files to unstage,
             relative to the repository path.

+ 8 - 10
pyproject.toml

@@ -98,22 +98,20 @@ ignore = [
     "ANN204",
     "ANN205",
     "ANN206",
-    "D100",
-    "D101",
-    "D102",
-    "D103",
-    "D104",
-    "D105",
-    "D107",
-    "D204",
-    "D205",
-    "D417",
     "E501",  # line too long
 ]
 
 [tool.ruff.lint.pydocstyle]
 convention = "google"
 
+[tool.ruff.lint.per-file-ignores]
+"tests/**/*.py" = ["D"]  # Don't require docstrings in tests
+"fuzzing/**/*.py" = ["D"]  # Don't require docstrings in fuzzing
+"examples/**/*.py" = ["D"]  # Don't require docstrings in examples
+"devscripts/**/*.py" = ["D"]  # Don't require docstrings in devscripts
+"docs/conf.py" = ["D"]  # Don't require docstrings in docs config
+"setup.py" = ["D"]  # Don't require docstrings in setup.py
+
 [tool.cibuildwheel]
 skip = "cp314-*"
 environment = {PATH="$HOME/.cargo/bin:$PATH"}

Alguns arquivos não foram mostrados porque muitos arquivos mudaram nesse diff