Jelmer Vernooij hai 1 ano
pai
achega
574e529c1c
Modificáronse 74 ficheiros con 1072 adicións e 621 borrados
  1. 3 0
      Makefile
  2. 2 4
      dulwich/archive.py
  3. 2 3
      dulwich/bundle.py
  4. 1 1
      dulwich/cli.py
  5. 82 46
      dulwich/client.py
  6. 3 4
      dulwich/cloud/gcs.py
  7. 22 13
      dulwich/config.py
  8. 3 2
      dulwich/contrib/diffstat.py
  9. 2 2
      dulwich/contrib/paramiko_vendor.py
  10. 7 3
      dulwich/contrib/requests_vendor.py
  11. 45 34
      dulwich/contrib/swift.py
  12. 2 2
      dulwich/contrib/test_paramiko_vendor.py
  13. 4 4
      dulwich/contrib/test_release_robot.py
  14. 3 3
      dulwich/contrib/test_swift.py
  15. 3 3
      dulwich/contrib/test_swift_smoke.py
  16. 3 3
      dulwich/credentials.py
  17. 12 3
      dulwich/diff_tree.py
  18. 10 10
      dulwich/errors.py
  19. 3 4
      dulwich/fastexport.py
  20. 5 4
      dulwich/file.py
  21. 3 6
      dulwich/graph.py
  22. 6 3
      dulwich/greenthreads.py
  23. 13 15
      dulwich/hooks.py
  24. 5 8
      dulwich/ignore.py
  25. 31 12
      dulwich/index.py
  26. 1 1
      dulwich/lfs.py
  27. 13 15
      dulwich/line_ending.py
  28. 3 4
      dulwich/lru_cache.py
  29. 1 1
      dulwich/mailmap.py
  30. 61 26
      dulwich/object_store.py
  31. 45 30
      dulwich/objects.py
  32. 1 1
      dulwich/objectspec.py
  33. 46 29
      dulwich/pack.py
  34. 2 1
      dulwich/patch.py
  35. 60 31
      dulwich/porcelain.py
  36. 6 5
      dulwich/protocol.py
  37. 1 2
      dulwich/reflog.py
  38. 14 15
      dulwich/refs.py
  39. 101 53
      dulwich/repo.py
  40. 69 39
      dulwich/server.py
  41. 2 2
      dulwich/stash.py
  42. 2 3
      dulwich/submodule.py
  43. 2 2
      dulwich/tests/__init__.py
  44. 1 1
      dulwich/tests/compat/server_utils.py
  45. 9 3
      dulwich/tests/compat/test_client.py
  46. 1 2
      dulwich/tests/compat/test_repository.py
  47. 6 2
      dulwich/tests/compat/test_web.py
  48. 2 1
      dulwich/tests/compat/utils.py
  49. 27 13
      dulwich/tests/test_client.py
  50. 12 4
      dulwich/tests/test_config.py
  51. 1 2
      dulwich/tests/test_credentials.py
  52. 18 6
      dulwich/tests/test_diff_tree.py
  53. 9 2
      dulwich/tests/test_ignore.py
  54. 18 10
      dulwich/tests/test_index.py
  55. 8 4
      dulwich/tests/test_line_ending.py
  56. 1 1
      dulwich/tests/test_lru_cache.py
  57. 2 2
      dulwich/tests/test_missing_obj_finder.py
  58. 21 7
      dulwich/tests/test_object_store.py
  59. 28 12
      dulwich/tests/test_objects.py
  60. 10 3
      dulwich/tests/test_objectspec.py
  61. 30 13
      dulwich/tests/test_pack.py
  62. 8 2
      dulwich/tests/test_patch.py
  63. 3 8
      dulwich/tests/test_porcelain.py
  64. 14 5
      dulwich/tests/test_protocol.py
  65. 6 2
      dulwich/tests/test_reflog.py
  66. 12 4
      dulwich/tests/test_refs.py
  67. 9 6
      dulwich/tests/test_repository.py
  68. 25 11
      dulwich/tests/test_server.py
  69. 3 4
      dulwich/tests/test_walk.py
  70. 27 16
      dulwich/tests/test_web.py
  71. 11 4
      dulwich/tests/utils.py
  72. 10 6
      dulwich/walk.py
  73. 20 13
      dulwich/web.py
  74. 15 0
      pyproject.toml

+ 3 - 0
Makefile

@@ -75,3 +75,6 @@ coverage-html: coverage
 
 
 apidocs:
 apidocs:
 	pydoctor --intersphinx http://urllib3.readthedocs.org/en/latest/objects.inv --intersphinx http://docs.python.org/3/objects.inv --docformat=google dulwich --project-url=https://www.dulwich.io/ --project-name=dulwich
 	pydoctor --intersphinx http://urllib3.readthedocs.org/en/latest/objects.inv --intersphinx http://docs.python.org/3/objects.inv --docformat=google dulwich --project-url=https://www.dulwich.io/ --project-name=dulwich
+
+fix:
+	ruff check --fix .

+ 2 - 4
dulwich/archive.py

@@ -19,9 +19,7 @@
 # License, Version 2.0.
 # License, Version 2.0.
 #
 #
 
 
-"""Generates tarballs for Git trees.
-
-"""
+"""Generates tarballs for Git trees."""
 
 
 import posixpath
 import posixpath
 import stat
 import stat
@@ -43,7 +41,7 @@ class ChunkedBytesIO:
             list_of_bytestrings)
             list_of_bytestrings)
     """
     """
 
 
-    def __init__(self, contents):
+    def __init__(self, contents) -> None:
         self.contents = contents
         self.contents = contents
         self.pos = (0, 0)
         self.pos = (0, 0)
 
 

+ 2 - 3
dulwich/bundle.py

@@ -18,8 +18,7 @@
 # License, Version 2.0.
 # License, Version 2.0.
 #
 #
 
 
-"""Bundle format support.
-"""
+"""Bundle format support."""
 
 
 from typing import Dict, List, Optional, Sequence, Tuple, Union
 from typing import Dict, List, Optional, Sequence, Tuple, Union
 
 
@@ -35,7 +34,7 @@ class Bundle:
     references: Dict[str, bytes] = {}
     references: Dict[str, bytes] = {}
     pack_data: Union[PackData, Sequence[bytes]] = []
     pack_data: Union[PackData, Sequence[bytes]] = []
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return (f"<{type(self).__name__}(version={self.version}, "
         return (f"<{type(self).__name__}(version={self.version}, "
                 f"capabilities={self.capabilities}, "
                 f"capabilities={self.capabilities}, "
                 f"prerequisites={self.prerequisites}, "
                 f"prerequisites={self.prerequisites}, "

+ 1 - 1
dulwich/cli.py

@@ -21,7 +21,7 @@
 # License, Version 2.0.
 # License, Version 2.0.
 #
 #
 
 
-"""Simple command-line interface to Dulwich>
+"""Simple command-line interface to Dulwich>.
 
 
 This is a very simple command-line wrapper for Dulwich. It is by
 This is a very simple command-line wrapper for Dulwich. It is by
 no means intended to be a full-blown Git command-line interface but just
 no means intended to be a full-blown Git command-line interface but just

+ 82 - 46
dulwich/client.py

@@ -46,8 +46,20 @@ import subprocess
 import sys
 import sys
 from contextlib import closing
 from contextlib import closing
 from io import BufferedReader, BytesIO
 from io import BufferedReader, BytesIO
-from typing import (IO, TYPE_CHECKING, Any, Callable, Dict, Iterable, Iterator,
-                    List, Optional, Set, Tuple, Union)
+from typing import (
+    IO,
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    Set,
+    Tuple,
+    Union,
+)
 from urllib.parse import quote as urlquote
 from urllib.parse import quote as urlquote
 from urllib.parse import unquote as urlunquote
 from urllib.parse import unquote as urlunquote
 from urllib.parse import urljoin, urlparse, urlunparse, urlunsplit
 from urllib.parse import urljoin, urlparse, urlunparse, urlunsplit
@@ -59,23 +71,50 @@ import dulwich
 
 
 from .config import Config, apply_instead_of, get_xdg_config_home_path
 from .config import Config, apply_instead_of, get_xdg_config_home_path
 from .errors import GitProtocolError, NotGitRepository, SendPackError
 from .errors import GitProtocolError, NotGitRepository, SendPackError
-from .pack import (PACK_SPOOL_FILE_MAX_SIZE, PackChunkGenerator,
-                   UnpackedObject, write_pack_from_container)
-from .protocol import (_RBUFSIZE, CAPABILITIES_REF, CAPABILITY_AGENT,
-                       CAPABILITY_DELETE_REFS, CAPABILITY_INCLUDE_TAG,
-                       CAPABILITY_MULTI_ACK, CAPABILITY_MULTI_ACK_DETAILED,
-                       CAPABILITY_OFS_DELTA, CAPABILITY_QUIET,
-                       CAPABILITY_REPORT_STATUS, CAPABILITY_SHALLOW,
-                       CAPABILITY_SIDE_BAND_64K, CAPABILITY_SYMREF,
-                       CAPABILITY_THIN_PACK, COMMAND_DEEPEN, COMMAND_DONE,
-                       COMMAND_HAVE, COMMAND_SHALLOW, COMMAND_UNSHALLOW,
-                       COMMAND_WANT, KNOWN_RECEIVE_CAPABILITIES,
-                       KNOWN_UPLOAD_CAPABILITIES, SIDE_BAND_CHANNEL_DATA,
-                       SIDE_BAND_CHANNEL_FATAL, SIDE_BAND_CHANNEL_PROGRESS,
-                       TCP_GIT_PORT, ZERO_SHA, HangupException, PktLineParser,
-                       Protocol, agent_string, capability_agent,
-                       extract_capabilities, extract_capability_names,
-                       parse_capability, pkt_line)
+from .pack import (
+    PACK_SPOOL_FILE_MAX_SIZE,
+    PackChunkGenerator,
+    UnpackedObject,
+    write_pack_from_container,
+)
+from .protocol import (
+    _RBUFSIZE,
+    CAPABILITIES_REF,
+    CAPABILITY_AGENT,
+    CAPABILITY_DELETE_REFS,
+    CAPABILITY_INCLUDE_TAG,
+    CAPABILITY_MULTI_ACK,
+    CAPABILITY_MULTI_ACK_DETAILED,
+    CAPABILITY_OFS_DELTA,
+    CAPABILITY_QUIET,
+    CAPABILITY_REPORT_STATUS,
+    CAPABILITY_SHALLOW,
+    CAPABILITY_SIDE_BAND_64K,
+    CAPABILITY_SYMREF,
+    CAPABILITY_THIN_PACK,
+    COMMAND_DEEPEN,
+    COMMAND_DONE,
+    COMMAND_HAVE,
+    COMMAND_SHALLOW,
+    COMMAND_UNSHALLOW,
+    COMMAND_WANT,
+    KNOWN_RECEIVE_CAPABILITIES,
+    KNOWN_UPLOAD_CAPABILITIES,
+    SIDE_BAND_CHANNEL_DATA,
+    SIDE_BAND_CHANNEL_FATAL,
+    SIDE_BAND_CHANNEL_PROGRESS,
+    TCP_GIT_PORT,
+    ZERO_SHA,
+    HangupException,
+    PktLineParser,
+    Protocol,
+    agent_string,
+    capability_agent,
+    extract_capabilities,
+    extract_capability_names,
+    parse_capability,
+    pkt_line,
+)
 from .refs import PEELED_TAG_SUFFIX, _import_remote_refs, read_info_refs
 from .refs import PEELED_TAG_SUFFIX, _import_remote_refs, read_info_refs
 from .repo import Repo
 from .repo import Repo
 
 
@@ -89,7 +128,7 @@ logger = logging.getLogger(__name__)
 class InvalidWants(Exception):
 class InvalidWants(Exception):
     """Invalid wants."""
     """Invalid wants."""
 
 
-    def __init__(self, wants):
+    def __init__(self, wants) -> None:
         Exception.__init__(
         Exception.__init__(
             self, "requested wants not in server provided refs: %r" % wants
             self, "requested wants not in server provided refs: %r" % wants
         )
         )
@@ -98,7 +137,7 @@ class InvalidWants(Exception):
 class HTTPUnauthorized(Exception):
 class HTTPUnauthorized(Exception):
     """Raised when authentication fails."""
     """Raised when authentication fails."""
 
 
-    def __init__(self, www_authenticate, url):
+    def __init__(self, www_authenticate, url) -> None:
         Exception.__init__(self, "No valid credentials provided")
         Exception.__init__(self, "No valid credentials provided")
         self.www_authenticate = www_authenticate
         self.www_authenticate = www_authenticate
         self.url = url
         self.url = url
@@ -107,7 +146,7 @@ class HTTPUnauthorized(Exception):
 class HTTPProxyUnauthorized(Exception):
 class HTTPProxyUnauthorized(Exception):
     """Raised when proxy authentication fails."""
     """Raised when proxy authentication fails."""
 
 
-    def __init__(self, proxy_authenticate, url):
+    def __init__(self, proxy_authenticate, url) -> None:
         Exception.__init__(self, "No valid proxy credentials provided")
         Exception.__init__(self, "No valid proxy credentials provided")
         self.proxy_authenticate = proxy_authenticate
         self.proxy_authenticate = proxy_authenticate
         self.url = url
         self.url = url
@@ -148,7 +187,7 @@ RECEIVE_CAPABILITIES = [
 class ReportStatusParser:
 class ReportStatusParser:
     """Handle status as reported by servers with 'report-status' capability."""
     """Handle status as reported by servers with 'report-status' capability."""
 
 
-    def __init__(self):
+    def __init__(self) -> None:
         self._done = False
         self._done = False
         self._pack_status = None
         self._pack_status = None
         self._ref_statuses = []
         self._ref_statuses = []
@@ -241,7 +280,7 @@ class FetchPackResult:
         "viewvalues",
         "viewvalues",
     ]
     ]
 
 
-    def __init__(self, refs, symrefs, agent, new_shallow=None, new_unshallow=None):
+    def __init__(self, refs, symrefs, agent, new_shallow=None, new_unshallow=None) -> None:
         self.refs = refs
         self.refs = refs
         self.symrefs = symrefs
         self.symrefs = symrefs
         self.agent = agent
         self.agent = agent
@@ -267,7 +306,7 @@ class FetchPackResult:
             and self.agent == other.agent
             and self.agent == other.agent
         )
         )
 
 
-    def __contains__(self, name):
+    def __contains__(self, name) -> bool:
         self._warn_deprecated()
         self._warn_deprecated()
         return name in self.refs
         return name in self.refs
 
 
@@ -275,7 +314,7 @@ class FetchPackResult:
         self._warn_deprecated()
         self._warn_deprecated()
         return self.refs[name]
         return self.refs[name]
 
 
-    def __len__(self):
+    def __len__(self) -> int:
         self._warn_deprecated()
         self._warn_deprecated()
         return len(self.refs)
         return len(self.refs)
 
 
@@ -289,7 +328,7 @@ class FetchPackResult:
             return getattr(self.refs, name)
             return getattr(self.refs, name)
         return super().__getattribute__(name)
         return super().__getattribute__(name)
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "{}({!r}, {!r}, {!r})".format(
         return "{}({!r}, {!r}, {!r})".format(
             self.__class__.__name__,
             self.__class__.__name__,
             self.refs,
             self.refs,
@@ -325,7 +364,7 @@ class SendPackResult:
         "viewvalues",
         "viewvalues",
     ]
     ]
 
 
-    def __init__(self, refs, agent=None, ref_status=None):
+    def __init__(self, refs, agent=None, ref_status=None) -> None:
         self.refs = refs
         self.refs = refs
         self.agent = agent
         self.agent = agent
         self.ref_status = ref_status
         self.ref_status = ref_status
@@ -345,7 +384,7 @@ class SendPackResult:
             return self.refs == other
             return self.refs == other
         return self.refs == other.refs and self.agent == other.agent
         return self.refs == other.refs and self.agent == other.agent
 
 
-    def __contains__(self, name):
+    def __contains__(self, name) -> bool:
         self._warn_deprecated()
         self._warn_deprecated()
         return name in self.refs
         return name in self.refs
 
 
@@ -353,7 +392,7 @@ class SendPackResult:
         self._warn_deprecated()
         self._warn_deprecated()
         return self.refs[name]
         return self.refs[name]
 
 
-    def __len__(self):
+    def __len__(self) -> int:
         self._warn_deprecated()
         self._warn_deprecated()
         return len(self.refs)
         return len(self.refs)
 
 
@@ -367,7 +406,7 @@ class SendPackResult:
             return getattr(self.refs, name)
             return getattr(self.refs, name)
         return super().__getattribute__(name)
         return super().__getattribute__(name)
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "{}({!r}, {!r})".format(self.__class__.__name__, self.refs, self.agent)
         return "{}({!r}, {!r})".format(self.__class__.__name__, self.refs, self.agent)
 
 
 
 
@@ -387,7 +426,7 @@ def _read_shallow_updates(pkt_seq):
 
 
 class _v1ReceivePackHeader:
 class _v1ReceivePackHeader:
 
 
-    def __init__(self, capabilities, old_refs, new_refs):
+    def __init__(self, capabilities, old_refs, new_refs) -> None:
         self.want = []
         self.want = []
         self.have = []
         self.have = []
         self._it = self._handle_receive_pack_head(capabilities, old_refs, new_refs)
         self._it = self._handle_receive_pack_head(capabilities, old_refs, new_refs)
@@ -596,7 +635,7 @@ class GitClient:
         report_activity=None,
         report_activity=None,
         quiet=False,
         quiet=False,
         include_tags=False,
         include_tags=False,
-    ):
+    ) -> None:
         """Create a new GitClient instance.
         """Create a new GitClient instance.
 
 
         Args:
         Args:
@@ -924,8 +963,7 @@ class GitClient:
         subdirs=None,
         subdirs=None,
         prefix=None,
         prefix=None,
     ):
     ):
-        """Retrieve an archive of the specified tree.
-        """
+        """Retrieve an archive of the specified tree."""
         raise NotImplementedError(self.archive)
         raise NotImplementedError(self.archive)
 
 
 
 
@@ -961,7 +999,7 @@ class TraditionalGitClient(GitClient):
 
 
     DEFAULT_ENCODING = "utf-8"
     DEFAULT_ENCODING = "utf-8"
 
 
-    def __init__(self, path_encoding=DEFAULT_ENCODING, **kwargs):
+    def __init__(self, path_encoding=DEFAULT_ENCODING, **kwargs) -> None:
         self._remote_path_encoding = path_encoding
         self._remote_path_encoding = path_encoding
         super().__init__(**kwargs)
         super().__init__(**kwargs)
 
 
@@ -1203,7 +1241,7 @@ class TraditionalGitClient(GitClient):
 class TCPGitClient(TraditionalGitClient):
 class TCPGitClient(TraditionalGitClient):
     """A Git Client that works over TCP directly (i.e. git://)."""
     """A Git Client that works over TCP directly (i.e. git://)."""
 
 
-    def __init__(self, host, port=None, **kwargs):
+    def __init__(self, host, port=None, **kwargs) -> None:
         if port is None:
         if port is None:
             port = TCP_GIT_PORT
             port = TCP_GIT_PORT
         self._host = host
         self._host = host
@@ -1269,7 +1307,7 @@ class TCPGitClient(TraditionalGitClient):
 class SubprocessWrapper:
 class SubprocessWrapper:
     """A socket-like object that talks to a subprocess via pipes."""
     """A socket-like object that talks to a subprocess via pipes."""
 
 
-    def __init__(self, proc):
+    def __init__(self, proc) -> None:
         self.proc = proc
         self.proc = proc
         self.read = BufferedReader(proc.stdout).read
         self.read = BufferedReader(proc.stdout).read
         self.write = proc.stdin.write
         self.write = proc.stdin.write
@@ -1350,7 +1388,7 @@ class LocalGitClient(GitClient):
     """Git Client that just uses a local Repo."""
     """Git Client that just uses a local Repo."""
 
 
     def __init__(self, thin_packs=True, report_activity=None,
     def __init__(self, thin_packs=True, report_activity=None,
-                 config: Optional[Config] = None):
+                 config: Optional[Config] = None) -> None:
         """Create a new LocalGitClient instance.
         """Create a new LocalGitClient instance.
 
 
         Args:
         Args:
@@ -1503,7 +1541,6 @@ class LocalGitClient(GitClient):
 
 
     def get_refs(self, path):
     def get_refs(self, path):
         """Retrieve the current refs from a git smart server."""
         """Retrieve the current refs from a git smart server."""
-
         with self._open_repo(path) as target:
         with self._open_repo(path) as target:
             return target.get_refs()
             return target.get_refs()
 
 
@@ -1548,7 +1585,7 @@ class SSHVendor:
 class StrangeHostname(Exception):
 class StrangeHostname(Exception):
     """Refusing to connect to strange SSH hostname."""
     """Refusing to connect to strange SSH hostname."""
 
 
-    def __init__(self, hostname):
+    def __init__(self, hostname) -> None:
         super().__init__(hostname)
         super().__init__(hostname)
 
 
 
 
@@ -1682,7 +1719,7 @@ class SSHGitClient(TraditionalGitClient):
         key_filename=None,
         key_filename=None,
         ssh_command=None,
         ssh_command=None,
         **kwargs
         **kwargs
-    ):
+    ) -> None:
         self.host = host
         self.host = host
         self.port = port
         self.port = port
         self.username = username
         self.username = username
@@ -1907,7 +1944,7 @@ class AbstractHttpGitClient(GitClient):
     _http_request method.
     _http_request method.
     """
     """
 
 
-    def __init__(self, base_url, dumb=False, **kwargs):
+    def __init__(self, base_url, dumb=False, **kwargs) -> None:
         self._base_url = base_url.rstrip("/") + "/"
         self._base_url = base_url.rstrip("/") + "/"
         self.dumb = dumb
         self.dumb = dumb
         GitClient.__init__(self, **kwargs)
         GitClient.__init__(self, **kwargs)
@@ -1927,7 +1964,6 @@ class AbstractHttpGitClient(GitClient):
           method for the response data.
           method for the response data.
 
 
         """
         """
-
         raise NotImplementedError(self._http_request)
         raise NotImplementedError(self._http_request)
 
 
     def _discover_references(self, service, base_url):
     def _discover_references(self, service, base_url):
@@ -2150,7 +2186,7 @@ class AbstractHttpGitClient(GitClient):
             kwargs["username"] = urlunquote(username)
             kwargs["username"] = urlunquote(username)
         return cls(urlunparse(parsedurl), **kwargs)
         return cls(urlunparse(parsedurl), **kwargs)
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "{}({!r}, dumb={!r})".format(
         return "{}({!r}, dumb={!r})".format(
             type(self).__name__,
             type(self).__name__,
             self._base_url,
             self._base_url,
@@ -2168,7 +2204,7 @@ class Urllib3HttpGitClient(AbstractHttpGitClient):
         username=None,
         username=None,
         password=None,
         password=None,
         **kwargs
         **kwargs
-    ):
+    ) -> None:
         self._username = username
         self._username = username
         self._password = password
         self._password = password
 
 

+ 3 - 4
dulwich/cloud/gcs.py

@@ -25,20 +25,19 @@ import posixpath
 import tempfile
 import tempfile
 
 
 from ..object_store import BucketBasedObjectStore
 from ..object_store import BucketBasedObjectStore
-from ..pack import (PACK_SPOOL_FILE_MAX_SIZE, Pack, PackData,
-                    load_pack_index_file)
+from ..pack import PACK_SPOOL_FILE_MAX_SIZE, Pack, PackData, load_pack_index_file
 
 
 # TODO(jelmer): For performance, read ranges?
 # TODO(jelmer): For performance, read ranges?
 
 
 
 
 class GcsObjectStore(BucketBasedObjectStore):
 class GcsObjectStore(BucketBasedObjectStore):
 
 
-    def __init__(self, bucket, subpath=''):
+    def __init__(self, bucket, subpath='') -> None:
         super().__init__()
         super().__init__()
         self.bucket = bucket
         self.bucket = bucket
         self.subpath = subpath
         self.subpath = subpath
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "{}({!r}, subpath={!r})".format(
         return "{}({!r}, subpath={!r})".format(
             type(self).__name__, self.bucket, self.subpath)
             type(self).__name__, self.bucket, self.subpath)
 
 

+ 22 - 13
dulwich/config.py

@@ -20,7 +20,7 @@
 
 
 """Reading and writing Git configuration files.
 """Reading and writing Git configuration files.
 
 
-TODO:
+Todo:
  * preserve formatting when updating configuration files
  * preserve formatting when updating configuration files
  * treat subsection names as case-insensitive for [branch.foo] style
  * treat subsection names as case-insensitive for [branch.foo] style
    subsections
    subsections
@@ -29,8 +29,18 @@ TODO:
 import os
 import os
 import sys
 import sys
 from contextlib import suppress
 from contextlib import suppress
-from typing import (BinaryIO, Iterable, Iterator, KeysView, List,
-                    MutableMapping, Optional, Tuple, Union, overload)
+from typing import (
+    BinaryIO,
+    Iterable,
+    Iterator,
+    KeysView,
+    List,
+    MutableMapping,
+    Optional,
+    Tuple,
+    Union,
+    overload,
+)
 
 
 from .file import GitFile
 from .file import GitFile
 
 
@@ -49,7 +59,7 @@ def lower_key(key):
 
 
 class CaseInsensitiveOrderedMultiDict(MutableMapping):
 class CaseInsensitiveOrderedMultiDict(MutableMapping):
 
 
-    def __init__(self):
+    def __init__(self) -> None:
         self._real = []
         self._real = []
         self._keyed = {}
         self._keyed = {}
 
 
@@ -72,7 +82,7 @@ class CaseInsensitiveOrderedMultiDict(MutableMapping):
 
 
         return out
         return out
 
 
-    def __len__(self):
+    def __len__(self) -> int:
         return len(self._keyed)
         return len(self._keyed)
 
 
     def keys(self) -> KeysView[Tuple[bytes, ...]]:
     def keys(self) -> KeysView[Tuple[bytes, ...]]:
@@ -87,11 +97,11 @@ class CaseInsensitiveOrderedMultiDict(MutableMapping):
     def values(self):
     def values(self):
         return self._keyed.values()
         return self._keyed.values()
 
 
-    def __setitem__(self, key, value):
+    def __setitem__(self, key, value) -> None:
         self._real.append((key, value))
         self._real.append((key, value))
         self._keyed[lower_key(key)] = value
         self._keyed[lower_key(key)] = value
 
 
-    def __delitem__(self, key):
+    def __delitem__(self, key) -> None:
         key = lower_key(key)
         key = lower_key(key)
         del self._keyed[key]
         del self._keyed[key]
         for i, (actual, unused_value) in reversed(list(enumerate(self._real))):
         for i, (actual, unused_value) in reversed(list(enumerate(self._real))):
@@ -181,6 +191,7 @@ class Config:
           section: Tuple with section name and optional subsection name
           section: Tuple with section name and optional subsection name
           name: Name of the setting, including section and possible
           name: Name of the setting, including section and possible
             subsection.
             subsection.
+
         Returns:
         Returns:
           Contents of the setting
           Contents of the setting
         """
         """
@@ -670,7 +681,7 @@ class StackedConfig(Config):
 
 
     def __init__(
     def __init__(
         self, backends: List[ConfigFile], writable: Optional[ConfigFile] = None
         self, backends: List[ConfigFile], writable: Optional[ConfigFile] = None
-    ):
+    ) -> None:
         self.backends = backends
         self.backends = backends
         self.writable = writable
         self.writable = writable
 
 
@@ -744,7 +755,7 @@ class StackedConfig(Config):
 
 
 
 
 def read_submodules(path: str) -> Iterator[Tuple[bytes, bytes, bytes]]:
 def read_submodules(path: str) -> Iterator[Tuple[bytes, bytes, bytes]]:
-    """read a .gitmodules file."""
+    """Read a .gitmodules file."""
     cfg = ConfigFile.from_path(path)
     cfg = ConfigFile.from_path(path)
     return parse_submodules(cfg)
     return parse_submodules(cfg)
 
 
@@ -767,8 +778,7 @@ def parse_submodules(config: ConfigFile) -> Iterator[Tuple[bytes, bytes, bytes]]
 
 
 
 
 def iter_instead_of(config: Config, push: bool = False) -> Iterable[Tuple[str, str]]:
 def iter_instead_of(config: Config, push: bool = False) -> Iterable[Tuple[str, str]]:
-    """Iterate over insteadOf / pushInsteadOf values.
-    """
+    """Iterate over insteadOf / pushInsteadOf values."""
     for section in config.sections():
     for section in config.sections():
         if section[0] != b'url':
         if section[0] != b'url':
             continue
             continue
@@ -788,8 +798,7 @@ def iter_instead_of(config: Config, push: bool = False) -> Iterable[Tuple[str, s
 
 
 
 
 def apply_instead_of(config: Config, orig_url: str, push: bool = False) -> str:
 def apply_instead_of(config: Config, orig_url: str, push: bool = False) -> str:
-    """Apply insteadOf / pushInsteadOf to a URL.
-    """
+    """Apply insteadOf / pushInsteadOf to a URL."""
     longest_needle = ""
     longest_needle = ""
     updated_url = orig_url
     updated_url = orig_url
     for needle, replacement in iter_instead_of(config, push):
     for needle, replacement in iter_instead_of(config, push):

+ 3 - 2
dulwich/contrib/diffstat.py

@@ -110,13 +110,14 @@ def _parse_patch(lines: List[bytes]) -> Tuple[List[bytes], List[bool], List[Tupl
 # may not be encodable even to utf-8
 # may not be encodable even to utf-8
 def diffstat(lines, max_width=80):
 def diffstat(lines, max_width=80):
     """Generate summary statistics from a git style diff ala
     """Generate summary statistics from a git style diff ala
-       (git diff tag1 tag2 --stat)
+       (git diff tag1 tag2 --stat).
+
     Args:
     Args:
       lines: list of byte string "lines" from the diff to be parsed
       lines: list of byte string "lines" from the diff to be parsed
       max_width: maximum line length for generating the summary
       max_width: maximum line length for generating the summary
                  statistics (default 80)
                  statistics (default 80)
     Returns: A byte string that lists the changed files with change
     Returns: A byte string that lists the changed files with change
-             counts and histogram
+             counts and histogram.
     """
     """
     names, nametypes, counts = _parse_patch(lines)
     names, nametypes, counts = _parse_patch(lines)
     insert = []
     insert = []

+ 2 - 2
dulwich/contrib/paramiko_vendor.py

@@ -35,7 +35,7 @@ import paramiko.client
 
 
 
 
 class _ParamikoWrapper:
 class _ParamikoWrapper:
-    def __init__(self, client, channel):
+    def __init__(self, client, channel) -> None:
         self.client = client
         self.client = client
         self.channel = channel
         self.channel = channel
 
 
@@ -73,7 +73,7 @@ class _ParamikoWrapper:
 class ParamikoSSHVendor:
 class ParamikoSSHVendor:
     # http://docs.paramiko.org/en/2.4/api/client.html
     # http://docs.paramiko.org/en/2.4/api/client.html
 
 
-    def __init__(self, **kwargs):
+    def __init__(self, **kwargs) -> None:
         self.kwargs = kwargs
         self.kwargs = kwargs
 
 
     def run_command(
     def run_command(

+ 7 - 3
dulwich/contrib/requests_vendor.py

@@ -33,8 +33,12 @@ from io import BytesIO
 
 
 from requests import Session
 from requests import Session
 
 
-from ..client import (AbstractHttpGitClient, HTTPProxyUnauthorized,
-                      HTTPUnauthorized, default_user_agent_string)
+from ..client import (
+    AbstractHttpGitClient,
+    HTTPProxyUnauthorized,
+    HTTPUnauthorized,
+    default_user_agent_string,
+)
 from ..errors import GitProtocolError, NotGitRepository
 from ..errors import GitProtocolError, NotGitRepository
 
 
 
 
@@ -47,7 +51,7 @@ class RequestsHttpGitClient(AbstractHttpGitClient):
             username=None,
             username=None,
             password=None,
             password=None,
             **kwargs
             **kwargs
-    ):
+    ) -> None:
         self._username = username
         self._username = username
         self._password = password
         self._password = password
 
 

+ 45 - 34
dulwich/contrib/swift.py

@@ -43,10 +43,21 @@ from ..greenthreads import GreenThreadsMissingObjectFinder
 from ..lru_cache import LRUSizeCache
 from ..lru_cache import LRUSizeCache
 from ..object_store import INFODIR, PACKDIR, PackBasedObjectStore
 from ..object_store import INFODIR, PACKDIR, PackBasedObjectStore
 from ..objects import S_ISGITLINK, Blob, Commit, Tag, Tree
 from ..objects import S_ISGITLINK, Blob, Commit, Tag, Tree
-from ..pack import (Pack, PackData, PackIndexer, PackStreamCopier,
-                    _compute_object_size, compute_file_sha, iter_sha1,
-                    load_pack_index_file, read_pack_header, unpack_object,
-                    write_pack_header, write_pack_index_v2, write_pack_object)
+from ..pack import (
+    Pack,
+    PackData,
+    PackIndexer,
+    PackStreamCopier,
+    _compute_object_size,
+    compute_file_sha,
+    iter_sha1,
+    load_pack_index_file,
+    read_pack_header,
+    unpack_object,
+    write_pack_header,
+    write_pack_index_v2,
+    write_pack_object,
+)
 from ..protocol import TCP_GIT_PORT
 from ..protocol import TCP_GIT_PORT
 from ..refs import InfoRefsContainer, read_info_refs, write_info_refs
 from ..refs import InfoRefsContainer, read_info_refs, write_info_refs
 from ..repo import OBJECTDIR, BaseRepo
 from ..repo import OBJECTDIR, BaseRepo
@@ -104,7 +115,7 @@ class PackInfoMissingObjectFinder(GreenThreadsMissingObjectFinder):
 
 
 
 
 def load_conf(path=None, file=None):
 def load_conf(path=None, file=None):
-    """Load configuration in global var CONF
+    """Load configuration in global var CONF.
 
 
     Args:
     Args:
       path: The path to the configuration file
       path: The path to the configuration file
@@ -134,7 +145,7 @@ def load_conf(path=None, file=None):
 
 
 
 
 def swift_load_pack_index(scon, filename):
 def swift_load_pack_index(scon, filename):
-    """Read a pack index file from Swift
+    """Read a pack index file from Swift.
 
 
     Args:
     Args:
       scon: a `SwiftConnector` instance
       scon: a `SwiftConnector` instance
@@ -187,10 +198,10 @@ class SwiftException(Exception):
 
 
 
 
 class SwiftConnector:
 class SwiftConnector:
-    """A Connector to swift that manage authentication and errors catching"""
+    """A Connector to swift that manage authentication and errors catching."""
 
 
-    def __init__(self, root, conf):
-        """Initialize a SwiftConnector
+    def __init__(self, root, conf) -> None:
+        """Initialize a SwiftConnector.
 
 
         Args:
         Args:
           root: The swift container that will act as Git bare repository
           root: The swift container that will act as Git bare repository
@@ -301,7 +312,7 @@ class SwiftConnector:
         return endpoint[self.endpoint_type], token
         return endpoint[self.endpoint_type], token
 
 
     def test_root_exists(self):
     def test_root_exists(self):
-        """Check that Swift container exist
+        """Check that Swift container exist.
 
 
         Returns: True if exist or None it not
         Returns: True if exist or None it not
         """
         """
@@ -315,7 +326,7 @@ class SwiftConnector:
         return True
         return True
 
 
     def create_root(self):
     def create_root(self):
-        """Create the Swift container
+        """Create the Swift container.
 
 
         Raises:
         Raises:
           SwiftException: if unable to create
           SwiftException: if unable to create
@@ -328,7 +339,7 @@ class SwiftConnector:
                 )
                 )
 
 
     def get_container_objects(self):
     def get_container_objects(self):
-        """Retrieve objects list in a container
+        """Retrieve objects list in a container.
 
 
         Returns: A list of dict that describe objects
         Returns: A list of dict that describe objects
                  or None if container does not exist
                  or None if container does not exist
@@ -346,7 +357,7 @@ class SwiftConnector:
         return json.loads(content)
         return json.loads(content)
 
 
     def get_object_stat(self, name):
     def get_object_stat(self, name):
-        """Retrieve object stat
+        """Retrieve object stat.
 
 
         Args:
         Args:
           name: The object name
           name: The object name
@@ -367,7 +378,7 @@ class SwiftConnector:
         return resp_headers
         return resp_headers
 
 
     def put_object(self, name, content):
     def put_object(self, name, content):
-        """Put an object
+        """Put an object.
 
 
         Args:
         Args:
           name: The object name
           name: The object name
@@ -397,7 +408,7 @@ class SwiftConnector:
             )
             )
 
 
     def get_object(self, name, range=None):
     def get_object(self, name, range=None):
-        """Retrieve an object
+        """Retrieve an object.
 
 
         Args:
         Args:
           name: The object name
           name: The object name
@@ -424,7 +435,7 @@ class SwiftConnector:
         return BytesIO(content)
         return BytesIO(content)
 
 
     def del_object(self, name):
     def del_object(self, name):
-        """Delete an object
+        """Delete an object.
 
 
         Args:
         Args:
           name: The object name
           name: The object name
@@ -439,7 +450,7 @@ class SwiftConnector:
             )
             )
 
 
     def del_root(self):
     def del_root(self):
-        """Delete the root container by removing container content
+        """Delete the root container by removing container content.
 
 
         Raises:
         Raises:
           SwiftException: if unable to delete
           SwiftException: if unable to delete
@@ -454,7 +465,7 @@ class SwiftConnector:
 
 
 
 
 class SwiftPackReader:
 class SwiftPackReader:
-    """A SwiftPackReader that mimic read and sync method
+    """A SwiftPackReader that mimic read and sync method.
 
 
     The reader allows to read a specified amount of bytes from
     The reader allows to read a specified amount of bytes from
     a given offset of a Swift object. A read offset is kept internally.
     a given offset of a Swift object. A read offset is kept internally.
@@ -463,8 +474,8 @@ class SwiftPackReader:
     to read from Swift.
     to read from Swift.
     """
     """
 
 
-    def __init__(self, scon, filename, pack_length):
-        """Initialize a SwiftPackReader
+    def __init__(self, scon, filename, pack_length) -> None:
+        """Initialize a SwiftPackReader.
 
 
         Args:
         Args:
           scon: a `SwiftConnector` instance
           scon: a `SwiftConnector` instance
@@ -488,7 +499,7 @@ class SwiftPackReader:
         self.buff = ret
         self.buff = ret
 
 
     def read(self, length):
     def read(self, length):
-        """Read a specified amount of Bytes form the pack object
+        """Read a specified amount of Bytes form the pack object.
 
 
         Args:
         Args:
           length: amount of bytes to read
           length: amount of bytes to read
@@ -509,7 +520,7 @@ class SwiftPackReader:
         return data
         return data
 
 
     def seek(self, offset):
     def seek(self, offset):
-        """Seek to a specified offset
+        """Seek to a specified offset.
 
 
         Args:
         Args:
           offset: the offset to seek to
           offset: the offset to seek to
@@ -519,7 +530,7 @@ class SwiftPackReader:
         self.offset = 0
         self.offset = 0
 
 
     def read_checksum(self):
     def read_checksum(self):
-        """Read the checksum from the pack
+        """Read the checksum from the pack.
 
 
         Returns: the checksum bytestring
         Returns: the checksum bytestring
         """
         """
@@ -533,8 +544,8 @@ class SwiftPackData(PackData):
     using the Range header feature of Swift.
     using the Range header feature of Swift.
     """
     """
 
 
-    def __init__(self, scon, filename):
-        """Initialize a SwiftPackReader
+    def __init__(self, scon, filename) -> None:
+        """Initialize a SwiftPackReader.
 
 
         Args:
         Args:
           scon: a `SwiftConnector` instance
           scon: a `SwiftConnector` instance
@@ -578,7 +589,7 @@ class SwiftPack(Pack):
     PackData.
     PackData.
     """
     """
 
 
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         self.scon = kwargs["scon"]
         self.scon = kwargs["scon"]
         del kwargs["scon"]
         del kwargs["scon"]
         super().__init__(*args, **kwargs)
         super().__init__(*args, **kwargs)
@@ -597,13 +608,13 @@ class SwiftPack(Pack):
 
 
 
 
 class SwiftObjectStore(PackBasedObjectStore):
 class SwiftObjectStore(PackBasedObjectStore):
-    """A Swift Object Store
+    """A Swift Object Store.
 
 
     Allow to manage a bare Git repository from Openstack Swift.
     Allow to manage a bare Git repository from Openstack Swift.
     This object store only supports pack files and not loose objects.
     This object store only supports pack files and not loose objects.
     """
     """
 
 
-    def __init__(self, scon):
+    def __init__(self, scon) -> None:
         """Open a Swift object store.
         """Open a Swift object store.
 
 
         Args:
         Args:
@@ -630,7 +641,7 @@ class SwiftObjectStore(PackBasedObjectStore):
         return ret
         return ret
 
 
     def _iter_loose_objects(self):
     def _iter_loose_objects(self):
-        """Loose objects are not supported by this repository"""
+        """Loose objects are not supported by this repository."""
         return []
         return []
 
 
     def pack_info_get(self, sha):
     def pack_info_get(self, sha):
@@ -712,7 +723,7 @@ class SwiftObjectStore(PackBasedObjectStore):
         return None
         return None
 
 
     def add_thin_pack(self, read_all, read_some):
     def add_thin_pack(self, read_all, read_some):
-        """Read a thin pack
+        """Read a thin pack.
 
 
         Read it from a stream and complete it in a temporary file.
         Read it from a stream and complete it in a temporary file.
         Then the pack and the corresponding index file are uploaded to Swift.
         Then the pack and the corresponding index file are uploaded to Swift.
@@ -792,7 +803,7 @@ class SwiftObjectStore(PackBasedObjectStore):
 class SwiftInfoRefsContainer(InfoRefsContainer):
 class SwiftInfoRefsContainer(InfoRefsContainer):
     """Manage references in info/refs object."""
     """Manage references in info/refs object."""
 
 
-    def __init__(self, scon, store):
+    def __init__(self, scon, store) -> None:
         self.scon = scon
         self.scon = scon
         self.filename = "info/refs"
         self.filename = "info/refs"
         self.store = store
         self.store = store
@@ -850,7 +861,7 @@ class SwiftInfoRefsContainer(InfoRefsContainer):
 
 
 
 
 class SwiftRepo(BaseRepo):
 class SwiftRepo(BaseRepo):
-    def __init__(self, root, conf):
+    def __init__(self, root, conf) -> None:
         """Init a Git bare Repository on top of a Swift container.
         """Init a Git bare Repository on top of a Swift container.
 
 
         References are managed in info/refs objects by
         References are managed in info/refs objects by
@@ -884,7 +895,7 @@ class SwiftRepo(BaseRepo):
         return False
         return False
 
 
     def _put_named_file(self, filename, contents):
     def _put_named_file(self, filename, contents):
-        """Put an object in a Swift container
+        """Put an object in a Swift container.
 
 
         Args:
         Args:
           filename: the path to the object to put on Swift
           filename: the path to the object to put on Swift
@@ -916,7 +927,7 @@ class SwiftRepo(BaseRepo):
 
 
 
 
 class SwiftSystemBackend(Backend):
 class SwiftSystemBackend(Backend):
-    def __init__(self, logger, conf):
+    def __init__(self, logger, conf) -> None:
         self.conf = conf
         self.conf = conf
         self.logger = logger
         self.logger = logger
 
 

+ 2 - 2
dulwich/contrib/test_paramiko_vendor.py

@@ -35,8 +35,8 @@ else:
     from .paramiko_vendor import ParamikoSSHVendor
     from .paramiko_vendor import ParamikoSSHVendor
 
 
     class Server(paramiko.ServerInterface):
     class Server(paramiko.ServerInterface):
-        """http://docs.paramiko.org/en/2.4/api/server.html"""
-        def __init__(self, commands, *args, **kwargs):
+        """http://docs.paramiko.org/en/2.4/api/server.html."""
+        def __init__(self, commands, *args, **kwargs) -> None:
             super().__init__(*args, **kwargs)
             super().__init__(*args, **kwargs)
             self.commands = commands
             self.commands = commands
 
 

+ 4 - 4
dulwich/contrib/test_release_robot.py

@@ -40,10 +40,10 @@ def gmtime_to_datetime(gmt):
 
 
 
 
 class TagPatternTests(unittest.TestCase):
 class TagPatternTests(unittest.TestCase):
-    """test tag patterns"""
+    """test tag patterns."""
 
 
     def test_tag_pattern(self):
     def test_tag_pattern(self):
-        """test tag patterns"""
+        """Test tag patterns."""
         test_cases = {
         test_cases = {
             "0.3": "0.3",
             "0.3": "0.3",
             "v0.3": "0.3",
             "v0.3": "0.3",
@@ -63,7 +63,7 @@ class TagPatternTests(unittest.TestCase):
 
 
 
 
 class GetRecentTagsTest(unittest.TestCase):
 class GetRecentTagsTest(unittest.TestCase):
-    """test get recent tags"""
+    """test get recent tags."""
 
 
     # Git repo for dulwich project
     # Git repo for dulwich project
     test_repo = os.path.join(BASEDIR, "dulwich_test_repo.zip")
     test_repo = os.path.join(BASEDIR, "dulwich_test_repo.zip")
@@ -116,7 +116,7 @@ class GetRecentTagsTest(unittest.TestCase):
         shutil.rmtree(cls.projdir)
         shutil.rmtree(cls.projdir)
 
 
     def test_get_recent_tags(self):
     def test_get_recent_tags(self):
-        """test get recent tags"""
+        """Test get recent tags."""
         tags = release_robot.get_recent_tags(self.projdir)  # get test tags
         tags = release_robot.get_recent_tags(self.projdir)  # get test tags
         for tag, metadata in tags:
         for tag, metadata in tags:
             tag = tag.encode("utf-8")
             tag = tag.encode("utf-8")

+ 3 - 3
dulwich/contrib/test_swift.py

@@ -88,7 +88,7 @@ def create_swift_connector(store={}):
 
 
 
 
 class Response:
 class Response:
-    def __init__(self, headers={}, status=200, content=None):
+    def __init__(self, headers={}, status=200, content=None) -> None:
         self.headers = headers
         self.headers = headers
         self.status_code = status
         self.status_code = status
         self.content = content
         self.content = content
@@ -179,7 +179,7 @@ def create_commits(length=1, marker=b"Default"):
 
 
 @skipIf(missing_libs, skipmsg)
 @skipIf(missing_libs, skipmsg)
 class FakeSwiftConnector:
 class FakeSwiftConnector:
-    def __init__(self, root, conf, store=None):
+    def __init__(self, root, conf, store=None) -> None:
         if store:
         if store:
             self.store = store
             self.store = store
         else:
         else:
@@ -301,7 +301,7 @@ class TestSwiftInfoRefsContainer(TestCase):
         self.object_store = {}
         self.object_store = {}
 
 
     def test_init(self):
     def test_init(self):
-        """info/refs does not exists"""
+        """info/refs does not exists."""
         irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
         irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
         self.assertEqual(len(irc._refs), 0)
         self.assertEqual(len(irc._refs), 0)
         self.fsc.store = self.store
         self.fsc.store = self.store

+ 3 - 3
dulwich/contrib/test_swift_smoke.py

@@ -20,7 +20,7 @@
 # License, Version 2.0.
 # License, Version 2.0.
 #
 #
 
 
-"""Start functional tests
+"""Start functional tests.
 
 
 A Swift installation must be available before
 A Swift installation must be available before
 starting those tests. The account and authentication method used
 starting those tests. The account and authentication method used
@@ -48,9 +48,9 @@ from dulwich.contrib import swift  # noqa:E402
 
 
 
 
 class DulwichServer:
 class DulwichServer:
-    """Start the TCPGitServer with Swift backend"""
+    """Start the TCPGitServer with Swift backend."""
 
 
-    def __init__(self, backend, port):
+    def __init__(self, backend, port) -> None:
         self.port = port
         self.port = port
         self.backend = backend
         self.backend = backend
 
 

+ 3 - 3
dulwich/credentials.py

@@ -19,7 +19,7 @@
 # License, Version 2.0.
 # License, Version 2.0.
 #
 #
 
 
-"""Support for git credential helpers
+"""Support for git credential helpers.
 
 
 https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage
 https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage
 
 
@@ -43,7 +43,7 @@ def match_urls(url: ParseResult, url_prefix: ParseResult) -> bool:
 
 
 
 
 def match_partial_url(valid_url: ParseResult, partial_url: str) -> bool:
 def match_partial_url(valid_url: ParseResult, partial_url: str) -> bool:
-    """matches a parsed url with a partial url (no scheme/netloc)"""
+    """Matches a parsed url with a partial url (no scheme/netloc)."""
     if "://" not in partial_url:
     if "://" not in partial_url:
         parsed = urlparse("scheme://" + partial_url)
         parsed = urlparse("scheme://" + partial_url)
     else:
     else:
@@ -67,7 +67,7 @@ def match_partial_url(valid_url: ParseResult, partial_url: str) -> bool:
 def urlmatch_credential_sections(
 def urlmatch_credential_sections(
     config: ConfigDict, url: Optional[str]
     config: ConfigDict, url: Optional[str]
 ) -> Iterator[SectionLike]:
 ) -> Iterator[SectionLike]:
-    """Returns credential sections from the config which match the given URL"""
+    """Returns credential sections from the config which match the given URL."""
     encoding = config.encoding or sys.getdefaultencoding()
     encoding = config.encoding or sys.getdefaultencoding()
     parsed_url = urlparse(url or "")
     parsed_url = urlparse(url or "")
     for config_section in config.sections():
     for config_section in config.sections():

+ 12 - 3
dulwich/diff_tree.py

@@ -74,6 +74,7 @@ def _merge_entries(path, tree1, tree2):
       path: A path to prepend to all tree entry names.
       path: A path to prepend to all tree entry names.
       tree1: The first Tree object to iterate, or None.
       tree1: The first Tree object to iterate, or None.
       tree2: The second Tree object to iterate, or None.
       tree2: The second Tree object to iterate, or None.
+
     Returns:
     Returns:
       A list of pairs of TreeEntry objects for each pair of entries in
       A list of pairs of TreeEntry objects for each pair of entries in
         the trees. If an entry exists in one tree but not the other, the other
         the trees. If an entry exists in one tree but not the other, the other
@@ -124,6 +125,7 @@ def walk_trees(store, tree1_id, tree2_id, prune_identical=False):
       tree1_id: The SHA of the first Tree object to iterate, or None.
       tree1_id: The SHA of the first Tree object to iterate, or None.
       tree2_id: The SHA of the second Tree object to iterate, or None.
       tree2_id: The SHA of the second Tree object to iterate, or None.
       prune_identical: If True, identical subtrees will not be walked.
       prune_identical: If True, identical subtrees will not be walked.
+
     Returns:
     Returns:
       Iterator over Pairs of TreeEntry objects for each pair of entries
       Iterator over Pairs of TreeEntry objects for each pair of entries
         in the trees and their subtrees recursively. If an entry exists in one
         in the trees and their subtrees recursively. If an entry exists in one
@@ -177,6 +179,7 @@ def tree_changes(
       rename_detector: RenameDetector object for detecting renames.
       rename_detector: RenameDetector object for detecting renames.
       change_type_same: Whether to report change types in the same
       change_type_same: Whether to report change types in the same
         entry or as delete+add.
         entry or as delete+add.
+
     Returns:
     Returns:
       Iterator over TreeChange instances for each change between the
       Iterator over TreeChange instances for each change between the
         source and target tree.
         source and target tree.
@@ -304,6 +307,7 @@ def _count_blocks(obj):
 
 
     Args:
     Args:
       obj: The object to count blocks for.
       obj: The object to count blocks for.
+
     Returns:
     Returns:
       A dict of block hashcode -> total bytes occurring.
       A dict of block hashcode -> total bytes occurring.
     """
     """
@@ -339,6 +343,7 @@ def _common_bytes(blocks1, blocks2):
     Args:
     Args:
       blocks1: The first dict of block hashcode -> total bytes.
       blocks1: The first dict of block hashcode -> total bytes.
       blocks2: The second dict of block hashcode -> total bytes.
       blocks2: The second dict of block hashcode -> total bytes.
+
     Returns:
     Returns:
       The number of bytes in common between blocks1 and blocks2. This is
       The number of bytes in common between blocks1 and blocks2. This is
       only approximate due to possible hash collisions.
       only approximate due to possible hash collisions.
@@ -362,6 +367,7 @@ def _similarity_score(obj1, obj2, block_cache=None):
       obj2: The second object to score.
       obj2: The second object to score.
       block_cache: An optional dict of SHA to block counts to cache
       block_cache: An optional dict of SHA to block counts to cache
         results between calls.
         results between calls.
+
     Returns:
     Returns:
       The similarity score between the two objects, defined as the
       The similarity score between the two objects, defined as the
         number of bytes in common between the two objects divided by the
         number of bytes in common between the two objects divided by the
@@ -402,7 +408,7 @@ class RenameDetector:
         max_files=MAX_FILES,
         max_files=MAX_FILES,
         rewrite_threshold=REWRITE_THRESHOLD,
         rewrite_threshold=REWRITE_THRESHOLD,
         find_copies_harder=False,
         find_copies_harder=False,
-    ):
+    ) -> None:
         """Initialize the rename detector.
         """Initialize the rename detector.
 
 
         Args:
         Args:
@@ -631,7 +637,10 @@ _merge_entries_py = _merge_entries
 _count_blocks_py = _count_blocks
 _count_blocks_py = _count_blocks
 try:
 try:
     # Try to import C versions
     # Try to import C versions
-    from dulwich._diff_tree import (_count_blocks, _is_tree,  # type: ignore
-                                    _merge_entries)
+    from dulwich._diff_tree import (  # type: ignore
+        _count_blocks,
+        _is_tree,
+        _merge_entries,
+    )
 except ImportError:
 except ImportError:
     pass
     pass

+ 10 - 10
dulwich/errors.py

@@ -32,7 +32,7 @@ import binascii
 class ChecksumMismatch(Exception):
 class ChecksumMismatch(Exception):
     """A checksum didn't match the expected contents."""
     """A checksum didn't match the expected contents."""
 
 
-    def __init__(self, expected, got, extra=None):
+    def __init__(self, expected, got, extra=None) -> None:
         if len(expected) == 20:
         if len(expected) == 20:
             expected = binascii.hexlify(expected)
             expected = binascii.hexlify(expected)
         if len(got) == 20:
         if len(got) == 20:
@@ -63,7 +63,7 @@ class WrongObjectException(Exception):
 
 
     type_name: str
     type_name: str
 
 
-    def __init__(self, sha, *args, **kwargs):
+    def __init__(self, sha, *args, **kwargs) -> None:
         Exception.__init__(self, "{} is not a {}".format(sha, self.type_name))
         Exception.__init__(self, "{} is not a {}".format(sha, self.type_name))
 
 
 
 
@@ -92,9 +92,9 @@ class NotBlobError(WrongObjectException):
 
 
 
 
 class MissingCommitError(Exception):
 class MissingCommitError(Exception):
-    """Indicates that a commit was not found in the repository"""
+    """Indicates that a commit was not found in the repository."""
 
 
-    def __init__(self, sha, *args, **kwargs):
+    def __init__(self, sha, *args, **kwargs) -> None:
         self.sha = sha
         self.sha = sha
         Exception.__init__(self, "%s is not in the revision store" % sha)
         Exception.__init__(self, "%s is not in the revision store" % sha)
 
 
@@ -102,28 +102,28 @@ class MissingCommitError(Exception):
 class ObjectMissing(Exception):
 class ObjectMissing(Exception):
     """Indicates that a requested object is missing."""
     """Indicates that a requested object is missing."""
 
 
-    def __init__(self, sha, *args, **kwargs):
+    def __init__(self, sha, *args, **kwargs) -> None:
         Exception.__init__(self, "%s is not in the pack" % sha)
         Exception.__init__(self, "%s is not in the pack" % sha)
 
 
 
 
 class ApplyDeltaError(Exception):
 class ApplyDeltaError(Exception):
     """Indicates that applying a delta failed."""
     """Indicates that applying a delta failed."""
 
 
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         Exception.__init__(self, *args, **kwargs)
         Exception.__init__(self, *args, **kwargs)
 
 
 
 
 class NotGitRepository(Exception):
 class NotGitRepository(Exception):
     """Indicates that no Git repository was found."""
     """Indicates that no Git repository was found."""
 
 
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         Exception.__init__(self, *args, **kwargs)
         Exception.__init__(self, *args, **kwargs)
 
 
 
 
 class GitProtocolError(Exception):
 class GitProtocolError(Exception):
     """Git protocol exception."""
     """Git protocol exception."""
 
 
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         Exception.__init__(self, *args, **kwargs)
         Exception.__init__(self, *args, **kwargs)
 
 
     def __eq__(self, other):
     def __eq__(self, other):
@@ -137,7 +137,7 @@ class SendPackError(GitProtocolError):
 class HangupException(GitProtocolError):
 class HangupException(GitProtocolError):
     """Hangup exception."""
     """Hangup exception."""
 
 
-    def __init__(self, stderr_lines=None):
+    def __init__(self, stderr_lines=None) -> None:
         if stderr_lines:
         if stderr_lines:
             super().__init__(
             super().__init__(
                 "\n".join(
                 "\n".join(
@@ -157,7 +157,7 @@ class HangupException(GitProtocolError):
 class UnexpectedCommandError(GitProtocolError):
 class UnexpectedCommandError(GitProtocolError):
     """Unexpected command received in a proto line."""
     """Unexpected command received in a proto line."""
 
 
-    def __init__(self, command):
+    def __init__(self, command) -> None:
         if command is None:
         if command is None:
             command = "flush-pkt"
             command = "flush-pkt"
         else:
         else:

+ 3 - 4
dulwich/fastexport.py

@@ -23,9 +23,8 @@
 
 
 import stat
 import stat
 
 
-from fastimport import commands
+from fastimport import commands, parser, processor
 from fastimport import errors as fastimport_errors
 from fastimport import errors as fastimport_errors
-from fastimport import parser, processor
 
 
 from .index import commit_tree
 from .index import commit_tree
 from .object_store import iter_tree_contents
 from .object_store import iter_tree_contents
@@ -40,7 +39,7 @@ def split_email(text):
 class GitFastExporter:
 class GitFastExporter:
     """Generate a fast-export output stream for Git objects."""
     """Generate a fast-export output stream for Git objects."""
 
 
-    def __init__(self, outf, store):
+    def __init__(self, outf, store) -> None:
         self.outf = outf
         self.outf = outf
         self.store = store
         self.store = store
         self.markers = {}
         self.markers = {}
@@ -122,7 +121,7 @@ class GitImportProcessor(processor.ImportProcessor):
 
 
     # FIXME: Batch creation of objects?
     # FIXME: Batch creation of objects?
 
 
-    def __init__(self, repo, params=None, verbose=False, outf=None):
+    def __init__(self, repo, params=None, verbose=False, outf=None) -> None:
         processor.ImportProcessor.__init__(self, params, verbose)
         processor.ImportProcessor.__init__(self, params, verbose)
         self.repo = repo
         self.repo = repo
         self.last_commit = ZERO_SHA
         self.last_commit = ZERO_SHA

+ 5 - 4
dulwich/file.py

@@ -34,7 +34,7 @@ def ensure_dir_exists(dirname):
 
 
 
 
 def _fancy_rename(oldname, newname):
 def _fancy_rename(oldname, newname):
-    """Rename file with temporary backup file to rollback if rename fails"""
+    """Rename file with temporary backup file to rollback if rename fails."""
     if not os.path.exists(newname):
     if not os.path.exists(newname):
         try:
         try:
             os.rename(oldname, newname)
             os.rename(oldname, newname)
@@ -97,7 +97,7 @@ def GitFile(filename, mode="rb", bufsize=-1, mask=0o644):
 class FileLocked(Exception):
 class FileLocked(Exception):
     """File is already locked."""
     """File is already locked."""
 
 
-    def __init__(self, filename, lockfilename):
+    def __init__(self, filename, lockfilename) -> None:
         self.filename = filename
         self.filename = filename
         self.lockfilename = lockfilename
         self.lockfilename = lockfilename
         super().__init__(filename, lockfilename)
         super().__init__(filename, lockfilename)
@@ -138,7 +138,7 @@ class _GitFile:
         "writelines",
         "writelines",
     )
     )
 
 
-    def __init__(self, filename, mode, bufsize, mask):
+    def __init__(self, filename, mode, bufsize, mask) -> None:
         self._filename = filename
         self._filename = filename
         if isinstance(self._filename, bytes):
         if isinstance(self._filename, bytes):
             self._lockfilename = self._filename + b".lock"
             self._lockfilename = self._filename + b".lock"
@@ -180,6 +180,7 @@ class _GitFile:
             However, it is not guaranteed to do so (e.g. if a filesystem
             However, it is not guaranteed to do so (e.g. if a filesystem
             becomes suddenly read-only), which will prevent future writes to
             becomes suddenly read-only), which will prevent future writes to
             this file until the lockfile is removed manually.
             this file until the lockfile is removed manually.
+
         Raises:
         Raises:
           OSError: if the original file could not be overwritten. The
           OSError: if the original file could not be overwritten. The
             lock file is still closed, so further attempts to write to the same
             lock file is still closed, so further attempts to write to the same
@@ -203,7 +204,7 @@ class _GitFile:
         finally:
         finally:
             self.abort()
             self.abort()
 
 
-    def __del__(self):
+    def __del__(self) -> None:
         if not getattr(self, '_closed', True):
         if not getattr(self, '_closed', True):
             warnings.warn('unclosed %r' % self, ResourceWarning, stacklevel=2)
             warnings.warn('unclosed %r' % self, ResourceWarning, stacklevel=2)
             self.abort()
             self.abort()

+ 3 - 6
dulwich/graph.py

@@ -18,9 +18,7 @@
 # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
 # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
 # License, Version 2.0.
 # License, Version 2.0.
 
 
-"""
-Implementation of merge-base following the approach of git
-"""
+"""Implementation of merge-base following the approach of git."""
 
 
 from collections import deque
 from collections import deque
 from typing import Deque
 from typing import Deque
@@ -83,7 +81,7 @@ def _find_lcas(lookup_parents, c1, c2s):
 
 
 
 
 def find_merge_base(repo, commit_ids):
 def find_merge_base(repo, commit_ids):
-    """Find lowest common ancestors of commit_ids[0] and *any* of commits_ids[1:]
+    """Find lowest common ancestors of commit_ids[0] and *any* of commits_ids[1:].
 
 
     Args:
     Args:
       repo: Repository object
       repo: Repository object
@@ -104,7 +102,7 @@ def find_merge_base(repo, commit_ids):
 
 
 
 
 def find_octopus_base(repo, commit_ids):
 def find_octopus_base(repo, commit_ids):
-    """Find lowest common ancestors of *all* provided commit_ids
+    """Find lowest common ancestors of *all* provided commit_ids.
 
 
     Args:
     Args:
       repo: Repository
       repo: Repository
@@ -112,7 +110,6 @@ def find_octopus_base(repo, commit_ids):
     Returns:
     Returns:
       list of lowest common ancestor commit_ids
       list of lowest common ancestor commit_ids
     """
     """
-
     if not commit_ids:
     if not commit_ids:
         return []
         return []
     if len(commit_ids) <= 2:
     if len(commit_ids) <= 2:

+ 6 - 3
dulwich/greenthreads.py

@@ -25,8 +25,11 @@
 import gevent
 import gevent
 from gevent import pool
 from gevent import pool
 
 
-from .object_store import (MissingObjectFinder, _collect_ancestors,
-                           _collect_filetree_revs)
+from .object_store import (
+    MissingObjectFinder,
+    _collect_ancestors,
+    _collect_filetree_revs,
+)
 from .objects import Commit, Tag
 from .objects import Commit, Tag
 
 
 
 
@@ -75,7 +78,7 @@ class GreenThreadsMissingObjectFinder(MissingObjectFinder):
         get_tagged=None,
         get_tagged=None,
         concurrency=1,
         concurrency=1,
         get_parents=None,
         get_parents=None,
-    ):
+    ) -> None:
         def collect_tree_sha(sha):
         def collect_tree_sha(sha):
             self.sha_done.add(sha)
             self.sha_done.add(sha)
             cmt = object_store[sha]
             cmt = object_store[sha]

+ 13 - 15
dulwich/hooks.py

@@ -30,7 +30,7 @@ class Hook:
     """Generic hook object."""
     """Generic hook object."""
 
 
     def execute(self, *args):
     def execute(self, *args):
-        """Execute the hook with the given args
+        """Execute the hook with the given args.
 
 
         Args:
         Args:
           args: argument list to hook
           args: argument list to hook
@@ -43,7 +43,7 @@ class Hook:
 
 
 
 
 class ShellHook(Hook):
 class ShellHook(Hook):
-    """Hook by executable file
+    """Hook by executable file.
 
 
     Implements standard githooks(5) [0]:
     Implements standard githooks(5) [0]:
 
 
@@ -58,8 +58,8 @@ class ShellHook(Hook):
         pre_exec_callback=None,
         pre_exec_callback=None,
         post_exec_callback=None,
         post_exec_callback=None,
         cwd=None,
         cwd=None,
-    ):
-        """Setup shell hook definition
+    ) -> None:
+        """Setup shell hook definition.
 
 
         Args:
         Args:
           name: name of hook for error messages
           name: name of hook for error messages
@@ -85,8 +85,7 @@ class ShellHook(Hook):
         self.cwd = cwd
         self.cwd = cwd
 
 
     def execute(self, *args):
     def execute(self, *args):
-        """Execute the hook with given args"""
-
+        """Execute the hook with given args."""
         if len(args) != self.numparam:
         if len(args) != self.numparam:
             raise HookError(
             raise HookError(
                 "Hook %s executed with wrong number of args. \
                 "Hook %s executed with wrong number of args. \
@@ -115,28 +114,27 @@ class ShellHook(Hook):
 
 
 
 
 class PreCommitShellHook(ShellHook):
 class PreCommitShellHook(ShellHook):
-    """pre-commit shell hook"""
+    """pre-commit shell hook."""
 
 
-    def __init__(self, cwd, controldir):
+    def __init__(self, cwd, controldir) -> None:
         filepath = os.path.join(controldir, "hooks", "pre-commit")
         filepath = os.path.join(controldir, "hooks", "pre-commit")
 
 
         ShellHook.__init__(self, "pre-commit", filepath, 0, cwd=cwd)
         ShellHook.__init__(self, "pre-commit", filepath, 0, cwd=cwd)
 
 
 
 
 class PostCommitShellHook(ShellHook):
 class PostCommitShellHook(ShellHook):
-    """post-commit shell hook"""
+    """post-commit shell hook."""
 
 
-    def __init__(self, controldir):
+    def __init__(self, controldir) -> None:
         filepath = os.path.join(controldir, "hooks", "post-commit")
         filepath = os.path.join(controldir, "hooks", "post-commit")
 
 
         ShellHook.__init__(self, "post-commit", filepath, 0, cwd=controldir)
         ShellHook.__init__(self, "post-commit", filepath, 0, cwd=controldir)
 
 
 
 
 class CommitMsgShellHook(ShellHook):
 class CommitMsgShellHook(ShellHook):
-    """commit-msg shell hook
-    """
+    """commit-msg shell hook."""
 
 
-    def __init__(self, controldir):
+    def __init__(self, controldir) -> None:
         filepath = os.path.join(controldir, "hooks", "commit-msg")
         filepath = os.path.join(controldir, "hooks", "commit-msg")
 
 
         def prepare_msg(*args):
         def prepare_msg(*args):
@@ -163,9 +161,9 @@ class CommitMsgShellHook(ShellHook):
 
 
 
 
 class PostReceiveShellHook(ShellHook):
 class PostReceiveShellHook(ShellHook):
-    """post-receive shell hook"""
+    """post-receive shell hook."""
 
 
-    def __init__(self, controldir):
+    def __init__(self, controldir) -> None:
         self.controldir = controldir
         self.controldir = controldir
         filepath = os.path.join(controldir, "hooks", "post-receive")
         filepath = os.path.join(controldir, "hooks", "post-receive")
         ShellHook.__init__(self, "post-receive", path=filepath, numparam=0)
         ShellHook.__init__(self, "post-receive", path=filepath, numparam=0)

+ 5 - 8
dulwich/ignore.py

@@ -25,8 +25,7 @@ For details for the matching rules, see https://git-scm.com/docs/gitignore
 import os.path
 import os.path
 import re
 import re
 from contextlib import suppress
 from contextlib import suppress
-from typing import (TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional,
-                    Union)
+from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional, Union
 
 
 if TYPE_CHECKING:
 if TYPE_CHECKING:
     from .repo import Repo
     from .repo import Repo
@@ -80,7 +79,6 @@ def translate(pat: bytes) -> bytes:
     Originally copied from fnmatch in Python 2.7, but modified for Dulwich
     Originally copied from fnmatch in Python 2.7, but modified for Dulwich
     to cope with features in Git ignore patterns.
     to cope with features in Git ignore patterns.
     """
     """
-
     res = b"(?ms)"
     res = b"(?ms)"
 
 
     if b"/" not in pat[:-1]:
     if b"/" not in pat[:-1]:
@@ -115,7 +113,6 @@ def read_ignore_patterns(f: BinaryIO) -> Iterable[bytes]:
       f: File-like object to read from
       f: File-like object to read from
     Returns: List of patterns
     Returns: List of patterns
     """
     """
-
     for line in f:
     for line in f:
         line = line.rstrip(b"\r\n")
         line = line.rstrip(b"\r\n")
 
 
@@ -151,7 +148,7 @@ def match_pattern(path: bytes, pattern: bytes, ignorecase: bool = False) -> bool
 class Pattern:
 class Pattern:
     """A single ignore pattern."""
     """A single ignore pattern."""
 
 
-    def __init__(self, pattern: bytes, ignorecase: bool = False):
+    def __init__(self, pattern: bytes, ignorecase: bool = False) -> None:
         self.pattern = pattern
         self.pattern = pattern
         self.ignorecase = ignorecase
         self.ignorecase = ignorecase
         if pattern[0:1] == b"!":
         if pattern[0:1] == b"!":
@@ -197,7 +194,7 @@ class Pattern:
 
 
 
 
 class IgnoreFilter:
 class IgnoreFilter:
-    def __init__(self, patterns: Iterable[bytes], ignorecase: bool = False, path=None):
+    def __init__(self, patterns: Iterable[bytes], ignorecase: bool = False, path=None) -> None:
         self._patterns: List[Pattern] = []
         self._patterns: List[Pattern] = []
         self._ignorecase = ignorecase
         self._ignorecase = ignorecase
         self._path = path
         self._path = path
@@ -251,7 +248,7 @@ class IgnoreFilter:
 class IgnoreFilterStack:
 class IgnoreFilterStack:
     """Check for ignore status in multiple filters."""
     """Check for ignore status in multiple filters."""
 
 
-    def __init__(self, filters):
+    def __init__(self, filters) -> None:
         self._filters = filters
         self._filters = filters
 
 
     def is_ignored(self, path: str) -> Optional[bool]:
     def is_ignored(self, path: str) -> Optional[bool]:
@@ -297,7 +294,7 @@ class IgnoreFilterManager:
         top_path: str,
         top_path: str,
         global_filters: List[IgnoreFilter],
         global_filters: List[IgnoreFilter],
         ignorecase: bool,
         ignorecase: bool,
-    ):
+    ) -> None:
         self._path_filters: Dict[str, Optional[IgnoreFilter]] = {}
         self._path_filters: Dict[str, Optional[IgnoreFilter]] = {}
         self._top_path = top_path
         self._top_path = top_path
         self._global_filters = global_filters
         self._global_filters = global_filters

+ 31 - 12
dulwich/index.py

@@ -25,13 +25,30 @@ import os
 import stat
 import stat
 import struct
 import struct
 import sys
 import sys
-from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List,
-                    Optional, Tuple, Union)
+from typing import (
+    Any,
+    BinaryIO,
+    Callable,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    Tuple,
+    Union,
+)
 
 
 from .file import GitFile
 from .file import GitFile
 from .object_store import iter_tree_contents
 from .object_store import iter_tree_contents
-from .objects import (S_IFGITLINK, S_ISGITLINK, Blob, ObjectID, Tree,
-                      hex_to_sha, sha_to_hex)
+from .objects import (
+    S_IFGITLINK,
+    S_ISGITLINK,
+    Blob,
+    ObjectID,
+    Tree,
+    hex_to_sha,
+    sha_to_hex,
+)
 from .pack import ObjectContainer, SHA1Reader, SHA1Writer
 from .pack import ObjectContainer, SHA1Reader, SHA1Writer
 
 
 # TODO(jelmer): Switch to dataclass?
 # TODO(jelmer): Switch to dataclass?
@@ -78,6 +95,7 @@ def pathsplit(path: bytes) -> Tuple[bytes, bytes]:
 
 
     Args:
     Args:
       path: The path to split.
       path: The path to split.
+
     Returns:
     Returns:
       Tuple with directory name and basename
       Tuple with directory name and basename
     """
     """
@@ -211,7 +229,7 @@ def write_cache_entry(f, name: bytes, entry: IndexEntry, version: int) -> None:
 class UnsupportedIndexFormat(Exception):
 class UnsupportedIndexFormat(Exception):
     """An unsupported index format was encountered."""
     """An unsupported index format was encountered."""
 
 
-    def __init__(self, version):
+    def __init__(self, version) -> None:
         self.index_format_version = version
         self.index_format_version = version
 
 
 
 
@@ -274,6 +292,7 @@ def cleanup_mode(mode: int) -> int:
 
 
     Args:
     Args:
       mode: Mode to clean up.
       mode: Mode to clean up.
+
     Returns:
     Returns:
       mode
       mode
     """
     """
@@ -292,7 +311,7 @@ def cleanup_mode(mode: int) -> int:
 class Index:
 class Index:
     """A Git Index file."""
     """A Git Index file."""
 
 
-    def __init__(self, filename: Union[bytes, str], read=True):
+    def __init__(self, filename: Union[bytes, str], read=True) -> None:
         """Create an index object associated with the given filename.
         """Create an index object associated with the given filename.
 
 
         Args:
         Args:
@@ -310,7 +329,7 @@ class Index:
     def path(self):
     def path(self):
         return self._filename
         return self._filename
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "{}({!r})".format(self.__class__.__name__, self._filename)
         return "{}({!r})".format(self.__class__.__name__, self._filename)
 
 
     def write(self) -> None:
     def write(self) -> None:
@@ -371,13 +390,13 @@ class Index:
         """Remove all contents from this index."""
         """Remove all contents from this index."""
         self._byname = {}
         self._byname = {}
 
 
-    def __setitem__(self, name: bytes, x: IndexEntry):
+    def __setitem__(self, name: bytes, x: IndexEntry) -> None:
         assert isinstance(name, bytes)
         assert isinstance(name, bytes)
         assert len(x) == len(IndexEntry._fields)
         assert len(x) == len(IndexEntry._fields)
         # Remove the old entry if any
         # Remove the old entry if any
         self._byname[name] = IndexEntry(*x)
         self._byname[name] = IndexEntry(*x)
 
 
-    def __delitem__(self, name: bytes):
+    def __delitem__(self, name: bytes) -> None:
         assert isinstance(name, bytes)
         assert isinstance(name, bytes)
         del self._byname[name]
         del self._byname[name]
 
 
@@ -570,7 +589,7 @@ if sys.platform == 'win32':
 
 
     class WindowsSymlinkPermissionError(PermissionError):
     class WindowsSymlinkPermissionError(PermissionError):
 
 
-        def __init__(self, errno, msg, filename):
+        def __init__(self, errno, msg, filename) -> None:
             super(PermissionError, self).__init__(
             super(PermissionError, self).__init__(
                 errno, "Unable to create symlink; "
                 errno, "Unable to create symlink; "
                 "do you have developer mode enabled? %s" % msg,
                 "do you have developer mode enabled? %s" % msg,
@@ -668,7 +687,7 @@ def build_index_from_tree(
     validate_path_element=validate_path_element_default,
     validate_path_element=validate_path_element_default,
     symlink_fn=None
     symlink_fn=None
 ):
 ):
-    """Generate and materialize index from a tree
+    """Generate and materialize index from a tree.
 
 
     Args:
     Args:
       tree_id: Tree to materialize
       tree_id: Tree to materialize
@@ -995,7 +1014,7 @@ class locked_index:
 
 
     Works as a context manager.
     Works as a context manager.
     """
     """
-    def __init__(self, path: Union[bytes, str]):
+    def __init__(self, path: Union[bytes, str]) -> None:
         self._path = path
         self._path = path
 
 
     def __enter__(self):
     def __enter__(self):

+ 1 - 1
dulwich/lfs.py

@@ -26,7 +26,7 @@ import tempfile
 class LFSStore:
 class LFSStore:
     """Stores objects on disk, indexed by SHA256."""
     """Stores objects on disk, indexed by SHA256."""
 
 
-    def __init__(self, path):
+    def __init__(self, path) -> None:
         self.path = path
         self.path = path
 
 
     @classmethod
     @classmethod

+ 13 - 15
dulwich/line_ending.py

@@ -17,7 +17,7 @@
 # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
 # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
 # License, Version 2.0.
 # License, Version 2.0.
 #
 #
-"""All line-ending related functions, from conversions to config processing
+"""All line-ending related functions, from conversions to config processing.
 
 
 Line-ending normalization is a complex beast. Here is some notes and details
 Line-ending normalization is a complex beast. Here is some notes and details
 about how it seems to work.
 about how it seems to work.
@@ -145,7 +145,7 @@ LF = b"\n"
 
 
 
 
 def convert_crlf_to_lf(text_hunk):
 def convert_crlf_to_lf(text_hunk):
-    """Convert CRLF in text hunk into LF
+    """Convert CRLF in text hunk into LF.
 
 
     Args:
     Args:
       text_hunk: A bytes string representing a text hunk
       text_hunk: A bytes string representing a text hunk
@@ -155,7 +155,7 @@ def convert_crlf_to_lf(text_hunk):
 
 
 
 
 def convert_lf_to_crlf(text_hunk):
 def convert_lf_to_crlf(text_hunk):
-    """Convert LF in text hunk into CRLF
+    """Convert LF in text hunk into CRLF.
 
 
     Args:
     Args:
       text_hunk: A bytes string representing a text hunk
       text_hunk: A bytes string representing a text hunk
@@ -167,7 +167,7 @@ def convert_lf_to_crlf(text_hunk):
 
 
 
 
 def get_checkout_filter(core_eol, core_autocrlf, git_attributes):
 def get_checkout_filter(core_eol, core_autocrlf, git_attributes):
-    """Returns the correct checkout filter based on the passed arguments"""
+    """Returns the correct checkout filter based on the passed arguments."""
     # TODO this function should process the git_attributes for the path and if
     # TODO this function should process the git_attributes for the path and if
     # the text attribute is not defined, fallback on the
     # the text attribute is not defined, fallback on the
     # get_checkout_filter_autocrlf function with the autocrlf value
     # get_checkout_filter_autocrlf function with the autocrlf value
@@ -175,7 +175,7 @@ def get_checkout_filter(core_eol, core_autocrlf, git_attributes):
 
 
 
 
 def get_checkin_filter(core_eol, core_autocrlf, git_attributes):
 def get_checkin_filter(core_eol, core_autocrlf, git_attributes):
-    """Returns the correct checkin filter based on the passed arguments"""
+    """Returns the correct checkin filter based on the passed arguments."""
     # TODO this function should process the git_attributes for the path and if
     # TODO this function should process the git_attributes for the path and if
     # the text attribute is not defined, fallback on the
     # the text attribute is not defined, fallback on the
     # get_checkin_filter_autocrlf function with the autocrlf value
     # get_checkin_filter_autocrlf function with the autocrlf value
@@ -183,7 +183,7 @@ def get_checkin_filter(core_eol, core_autocrlf, git_attributes):
 
 
 
 
 def get_checkout_filter_autocrlf(core_autocrlf):
 def get_checkout_filter_autocrlf(core_autocrlf):
-    """Returns the correct checkout filter base on autocrlf value
+    """Returns the correct checkout filter base on autocrlf value.
 
 
     Args:
     Args:
       core_autocrlf: The bytes configuration value of core.autocrlf.
       core_autocrlf: The bytes configuration value of core.autocrlf.
@@ -191,7 +191,6 @@ def get_checkout_filter_autocrlf(core_autocrlf):
     Returns: Either None if no filter has to be applied or a function
     Returns: Either None if no filter has to be applied or a function
         accepting a single argument, a binary text hunk
         accepting a single argument, a binary text hunk
     """
     """
-
     if core_autocrlf == b"true":
     if core_autocrlf == b"true":
         return convert_lf_to_crlf
         return convert_lf_to_crlf
 
 
@@ -199,7 +198,7 @@ def get_checkout_filter_autocrlf(core_autocrlf):
 
 
 
 
 def get_checkin_filter_autocrlf(core_autocrlf):
 def get_checkin_filter_autocrlf(core_autocrlf):
-    """Returns the correct checkin filter base on autocrlf value
+    """Returns the correct checkin filter base on autocrlf value.
 
 
     Args:
     Args:
       core_autocrlf: The bytes configuration value of core.autocrlf.
       core_autocrlf: The bytes configuration value of core.autocrlf.
@@ -207,7 +206,6 @@ def get_checkin_filter_autocrlf(core_autocrlf):
     Returns: Either None if no filter has to be applied or a function
     Returns: Either None if no filter has to be applied or a function
         accepting a single argument, a binary text hunk
         accepting a single argument, a binary text hunk
     """
     """
-
     if core_autocrlf == b"true" or core_autocrlf == b"input":
     if core_autocrlf == b"true" or core_autocrlf == b"input":
         return convert_crlf_to_lf
         return convert_crlf_to_lf
 
 
@@ -217,10 +215,10 @@ def get_checkin_filter_autocrlf(core_autocrlf):
 
 
 class BlobNormalizer:
 class BlobNormalizer:
     """An object to store computation result of which filter to apply based
     """An object to store computation result of which filter to apply based
-    on configuration, gitattributes, path and operation (checkin or checkout)
+    on configuration, gitattributes, path and operation (checkin or checkout).
     """
     """
 
 
-    def __init__(self, config_stack, gitattributes):
+    def __init__(self, config_stack, gitattributes) -> None:
         self.config_stack = config_stack
         self.config_stack = config_stack
         self.gitattributes = gitattributes
         self.gitattributes = gitattributes
 
 
@@ -243,7 +241,7 @@ class BlobNormalizer:
         )
         )
 
 
     def checkin_normalize(self, blob, tree_path):
     def checkin_normalize(self, blob, tree_path):
-        """Normalize a blob during a checkin operation"""
+        """Normalize a blob during a checkin operation."""
         if self.fallback_write_filter is not None:
         if self.fallback_write_filter is not None:
             return normalize_blob(
             return normalize_blob(
                 blob, self.fallback_write_filter, binary_detection=True
                 blob, self.fallback_write_filter, binary_detection=True
@@ -252,7 +250,7 @@ class BlobNormalizer:
         return blob
         return blob
 
 
     def checkout_normalize(self, blob, tree_path):
     def checkout_normalize(self, blob, tree_path):
-        """Normalize a blob during a checkout operation"""
+        """Normalize a blob during a checkout operation."""
         if self.fallback_read_filter is not None:
         if self.fallback_read_filter is not None:
             return normalize_blob(
             return normalize_blob(
                 blob, self.fallback_read_filter, binary_detection=True
                 blob, self.fallback_read_filter, binary_detection=True
@@ -264,7 +262,7 @@ class BlobNormalizer:
 def normalize_blob(blob, conversion, binary_detection):
 def normalize_blob(blob, conversion, binary_detection):
     """Takes a blob as input returns either the original blob if
     """Takes a blob as input returns either the original blob if
     binary_detection is True and the blob content looks like binary, else
     binary_detection is True and the blob content looks like binary, else
-    return a new blob with converted data
+    return a new blob with converted data.
     """
     """
     # Read the original blob
     # Read the original blob
     data = blob.data
     data = blob.data
@@ -286,7 +284,7 @@ def normalize_blob(blob, conversion, binary_detection):
 
 
 
 
 class TreeBlobNormalizer(BlobNormalizer):
 class TreeBlobNormalizer(BlobNormalizer):
-    def __init__(self, config_stack, git_attributes, object_store, tree=None):
+    def __init__(self, config_stack, git_attributes, object_store, tree=None) -> None:
         super().__init__(config_stack, git_attributes)
         super().__init__(config_stack, git_attributes)
         if tree:
         if tree:
             self.existing_paths = {
             self.existing_paths = {

+ 3 - 4
dulwich/lru_cache.py

@@ -21,8 +21,7 @@
 
 
 """A simple least-recently-used (LRU) cache."""
 """A simple least-recently-used (LRU) cache."""
 
 
-from typing import (Callable, Dict, Generic, Iterable, Iterator, Optional,
-                    TypeVar)
+from typing import Callable, Dict, Generic, Iterable, Iterator, Optional, TypeVar
 
 
 _null_key = object()
 _null_key = object()
 
 
@@ -40,7 +39,7 @@ class _LRUNode(Generic[K, V]):
     next_key: K
     next_key: K
     size: Optional[int]
     size: Optional[int]
 
 
-    def __init__(self, key: K, value: V, cleanup=None):
+    def __init__(self, key: K, value: V, cleanup=None) -> None:
         self.prev = None
         self.prev = None
         self.next_key = _null_key  # type: ignore
         self.next_key = _null_key  # type: ignore
         self.key = key
         self.key = key
@@ -51,7 +50,7 @@ class _LRUNode(Generic[K, V]):
         #       actually costs us much of anything in normal usage
         #       actually costs us much of anything in normal usage
         self.size = None
         self.size = None
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         if self.prev is None:
         if self.prev is None:
             prev_key = None
             prev_key = None
         else:
         else:

+ 1 - 1
dulwich/mailmap.py

@@ -61,7 +61,7 @@ def read_mailmap(f):
 class Mailmap:
 class Mailmap:
     """Class for accessing a mailmap file."""
     """Class for accessing a mailmap file."""
 
 
-    def __init__(self, map=None):
+    def __init__(self, map=None) -> None:
         self._table = {}
         self._table = {}
         if map:
         if map:
             for (canonical_identity, from_identity) in map:
             for (canonical_identity, from_identity) in map:

+ 61 - 26
dulwich/object_store.py

@@ -28,8 +28,18 @@ import sys
 import warnings
 import warnings
 from contextlib import suppress
 from contextlib import suppress
 from io import BytesIO
 from io import BytesIO
-from typing import (Callable, Dict, Iterable, Iterator, List, Optional,
-                    Sequence, Set, Tuple, cast)
+from typing import (
+    Callable,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    Sequence,
+    Set,
+    Tuple,
+    cast,
+)
 
 
 try:
 try:
     from typing import Protocol
     from typing import Protocol
@@ -38,15 +48,43 @@ except ImportError:  # python << 3.8
 
 
 from .errors import NotTreeError
 from .errors import NotTreeError
 from .file import GitFile
 from .file import GitFile
-from .objects import (S_ISGITLINK, ZERO_SHA, Blob, Commit, ObjectID, ShaFile,
-                      Tag, Tree, TreeEntry, hex_to_filename, hex_to_sha,
-                      object_class, sha_to_hex, valid_hexsha)
-from .pack import (PACK_SPOOL_FILE_MAX_SIZE, ObjectContainer, Pack, PackData,
-                   PackedObjectContainer, PackFileDisappeared, PackHint,
-                   PackIndexer, PackInflater, PackStreamCopier, UnpackedObject,
-                   extend_pack, full_unpacked_object,
-                   generate_unpacked_objects, iter_sha1, load_pack_index_file,
-                   pack_objects_to_data, write_pack_data, write_pack_index)
+from .objects import (
+    S_ISGITLINK,
+    ZERO_SHA,
+    Blob,
+    Commit,
+    ObjectID,
+    ShaFile,
+    Tag,
+    Tree,
+    TreeEntry,
+    hex_to_filename,
+    hex_to_sha,
+    object_class,
+    sha_to_hex,
+    valid_hexsha,
+)
+from .pack import (
+    PACK_SPOOL_FILE_MAX_SIZE,
+    ObjectContainer,
+    Pack,
+    PackData,
+    PackedObjectContainer,
+    PackFileDisappeared,
+    PackHint,
+    PackIndexer,
+    PackInflater,
+    PackStreamCopier,
+    UnpackedObject,
+    extend_pack,
+    full_unpacked_object,
+    generate_unpacked_objects,
+    iter_sha1,
+    load_pack_index_file,
+    pack_objects_to_data,
+    write_pack_data,
+    write_pack_index,
+)
 from .protocol import DEPTH_INFINITE
 from .protocol import DEPTH_INFINITE
 from .refs import PEELED_TAG_SUFFIX, Ref
 from .refs import PEELED_TAG_SUFFIX, Ref
 
 
@@ -145,7 +183,7 @@ class BaseObjectStore:
         change_type_same=False,
         change_type_same=False,
         rename_detector=None,
         rename_detector=None,
     ):
     ):
-        """Find the differences between the contents of two trees
+        """Find the differences between the contents of two trees.
 
 
         Args:
         Args:
           source: SHA1 of the source tree
           source: SHA1 of the source tree
@@ -157,7 +195,6 @@ class BaseObjectStore:
         Returns: Iterator over tuples with
         Returns: Iterator over tuples with
             (oldpath, newpath), (oldmode, newmode), (oldsha, newsha)
             (oldpath, newpath), (oldmode, newmode), (oldsha, newsha)
         """
         """
-
         from .diff_tree import tree_changes
         from .diff_tree import tree_changes
         for change in tree_changes(
         for change in tree_changes(
             self,
             self,
@@ -322,7 +359,7 @@ class BaseObjectStore:
 
 
 
 
 class PackBasedObjectStore(BaseObjectStore):
 class PackBasedObjectStore(BaseObjectStore):
-    def __init__(self, pack_compression_level=-1):
+    def __init__(self, pack_compression_level=-1) -> None:
         self._pack_cache = {}
         self._pack_cache = {}
         self.pack_compression_level = pack_compression_level
         self.pack_compression_level = pack_compression_level
 
 
@@ -374,7 +411,7 @@ class PackBasedObjectStore(BaseObjectStore):
                 pass
                 pass
         return False
         return False
 
 
-    def __contains__(self, sha):
+    def __contains__(self, sha) -> bool:
         """Check if a particular object is present by SHA1.
         """Check if a particular object is present by SHA1.
 
 
         This method makes no distinction between loose and packed objects.
         This method makes no distinction between loose and packed objects.
@@ -655,7 +692,7 @@ class PackBasedObjectStore(BaseObjectStore):
 class DiskObjectStore(PackBasedObjectStore):
 class DiskObjectStore(PackBasedObjectStore):
     """Git-style object store that exists on disk."""
     """Git-style object store that exists on disk."""
 
 
-    def __init__(self, path, loose_compression_level=-1, pack_compression_level=-1):
+    def __init__(self, path, loose_compression_level=-1, pack_compression_level=-1) -> None:
         """Open an object store.
         """Open an object store.
 
 
         Args:
         Args:
@@ -672,7 +709,7 @@ class DiskObjectStore(PackBasedObjectStore):
         self.loose_compression_level = loose_compression_level
         self.loose_compression_level = loose_compression_level
         self.pack_compression_level = pack_compression_level
         self.pack_compression_level = pack_compression_level
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "<{}({!r})>".format(self.__class__.__name__, self.path)
         return "<{}({!r})>".format(self.__class__.__name__, self.path)
 
 
     @classmethod
     @classmethod
@@ -956,7 +993,7 @@ class DiskObjectStore(PackBasedObjectStore):
 class MemoryObjectStore(BaseObjectStore):
 class MemoryObjectStore(BaseObjectStore):
     """Object store that keeps all objects in memory."""
     """Object store that keeps all objects in memory."""
 
 
-    def __init__(self):
+    def __init__(self) -> None:
         super().__init__()
         super().__init__()
         self._data = {}
         self._data = {}
         self.pack_compression_level = -1
         self.pack_compression_level = -1
@@ -999,7 +1036,7 @@ class MemoryObjectStore(BaseObjectStore):
     def __getitem__(self, name: ObjectID):
     def __getitem__(self, name: ObjectID):
         return self._data[self._to_hexsha(name)].copy()
         return self._data[self._to_hexsha(name)].copy()
 
 
-    def __delitem__(self, name: ObjectID):
+    def __delitem__(self, name: ObjectID) -> None:
         """Delete an object from this store, for testing only."""
         """Delete an object from this store, for testing only."""
         del self._data[self._to_hexsha(name)]
         del self._data[self._to_hexsha(name)]
 
 
@@ -1068,7 +1105,6 @@ class MemoryObjectStore(BaseObjectStore):
           read_some: Read function that returns at least one byte, but may
           read_some: Read function that returns at least one byte, but may
             not return the number of bytes requested.
             not return the number of bytes requested.
         """
         """
-
         f, commit, abort = self.add_pack()
         f, commit, abort = self.add_pack()
         try:
         try:
             copier = PackStreamCopier(read_all, read_some, f)
             copier = PackStreamCopier(read_all, read_some, f)
@@ -1185,7 +1221,7 @@ class MissingObjectFinder:
         progress=None,
         progress=None,
         get_tagged=None,
         get_tagged=None,
         get_parents=lambda commit: commit.parents,
         get_parents=lambda commit: commit.parents,
-    ):
+    ) -> None:
         self.object_store = object_store
         self.object_store = object_store
         if shallow is None:
         if shallow is None:
             shallow = set()
             shallow = set()
@@ -1255,7 +1291,7 @@ class MissingObjectFinder:
         return self.remote_has
         return self.remote_has
 
 
     def add_todo(self, entries: Iterable[Tuple[ObjectID, Optional[bytes], Optional[int], bool]]):
     def add_todo(self, entries: Iterable[Tuple[ObjectID, Optional[bytes], Optional[int], bool]]):
-        self.objects_to_send.update([e for e in entries if not e[0] in self.sha_done])
+        self.objects_to_send.update([e for e in entries if e[0] not in self.sha_done])
 
 
     def __next__(self) -> Tuple[bytes, PackHint]:
     def __next__(self) -> Tuple[bytes, PackHint]:
         while True:
         while True:
@@ -1299,7 +1335,7 @@ class ObjectStoreGraphWalker:
       get_parents: Function to retrieve parents in the local repo
       get_parents: Function to retrieve parents in the local repo
     """
     """
 
 
-    def __init__(self, local_heads, get_parents, shallow=None):
+    def __init__(self, local_heads, get_parents, shallow=None) -> None:
         """Create a new instance.
         """Create a new instance.
 
 
         Args:
         Args:
@@ -1409,7 +1445,7 @@ def commit_tree_changes(object_store, tree, changes):
 class OverlayObjectStore(BaseObjectStore):
 class OverlayObjectStore(BaseObjectStore):
     """Object store that can overlay multiple object stores."""
     """Object store that can overlay multiple object stores."""
 
 
-    def __init__(self, bases, add_store=None):
+    def __init__(self, bases, add_store=None) -> None:
         self.bases = bases
         self.bases = bases
         self.add_store = add_store
         self.add_store = add_store
 
 
@@ -1489,8 +1525,7 @@ def read_packs_file(f):
 
 
 
 
 class BucketBasedObjectStore(PackBasedObjectStore):
 class BucketBasedObjectStore(PackBasedObjectStore):
-    """Object store implementation that uses a bucket store like S3 as backend.
-    """
+    """Object store implementation that uses a bucket store like S3 as backend."""
 
 
     def _iter_loose_objects(self):
     def _iter_loose_objects(self):
         """Iterate over the SHAs of all loose objects."""
         """Iterate over the SHAs of all loose objects."""

+ 45 - 30
dulwich/objects.py

@@ -30,14 +30,29 @@ import zlib
 from collections import namedtuple
 from collections import namedtuple
 from hashlib import sha1
 from hashlib import sha1
 from io import BytesIO
 from io import BytesIO
-from typing import (BinaryIO, Dict, Iterable, Iterator, List, Optional, Tuple,
-                    Type, Union)
+from typing import (
+    BinaryIO,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    Tuple,
+    Type,
+    Union,
+)
 
 
 from _hashlib import HASH
 from _hashlib import HASH
 
 
-from .errors import (ChecksumMismatch, FileFormatException, NotBlobError,
-                     NotCommitError, NotTagError, NotTreeError,
-                     ObjectFormatException)
+from .errors import (
+    ChecksumMismatch,
+    FileFormatException,
+    NotBlobError,
+    NotCommitError,
+    NotTagError,
+    NotTreeError,
+    ObjectFormatException,
+)
 from .file import GitFile
 from .file import GitFile
 
 
 ZERO_SHA = b"0" * 40
 ZERO_SHA = b"0" * 40
@@ -91,14 +106,14 @@ def _decompress(string):
 
 
 
 
 def sha_to_hex(sha):
 def sha_to_hex(sha):
-    """Takes a string and returns the hex of the sha within"""
+    """Takes a string and returns the hex of the sha within."""
     hexsha = binascii.hexlify(sha)
     hexsha = binascii.hexlify(sha)
     assert len(hexsha) == 40, "Incorrect length of sha1 string: %r" % hexsha
     assert len(hexsha) == 40, "Incorrect length of sha1 string: %r" % hexsha
     return hexsha
     return hexsha
 
 
 
 
 def hex_to_sha(hex):
 def hex_to_sha(hex):
-    """Takes a hex sha and returns a binary sha"""
+    """Takes a hex sha and returns a binary sha."""
     assert len(hex) == 40, "Incorrect length of hexsha: %s" % hex
     assert len(hex) == 40, "Incorrect length of hexsha: %s" % hex
     try:
     try:
         return binascii.unhexlify(hex)
         return binascii.unhexlify(hex)
@@ -236,7 +251,7 @@ class FixedSha:
 
 
     __slots__ = ("_hexsha", "_sha")
     __slots__ = ("_hexsha", "_sha")
 
 
-    def __init__(self, hexsha):
+    def __init__(self, hexsha) -> None:
         if getattr(hexsha, "encode", None) is not None:
         if getattr(hexsha, "encode", None) is not None:
             hexsha = hexsha.encode("ascii")
             hexsha = hexsha.encode("ascii")
         if not isinstance(hexsha, bytes):
         if not isinstance(hexsha, bytes):
@@ -407,8 +422,8 @@ class ShaFile:
             obj._parse_object(map)
             obj._parse_object(map)
         return obj
         return obj
 
 
-    def __init__(self):
-        """Don't call this directly"""
+    def __init__(self) -> None:
+        """Don't call this directly."""
         self._sha = None
         self._sha = None
         self._chunked_text = []
         self._chunked_text = []
         self._needs_serialization = True
         self._needs_serialization = True
@@ -530,7 +545,7 @@ class ShaFile:
         return self._sha
         return self._sha
 
 
     def copy(self):
     def copy(self):
-        """Create a new copy of this SHA1 object from its raw string"""
+        """Create a new copy of this SHA1 object from its raw string."""
         obj_class = object_class(self.type_num)
         obj_class = object_class(self.type_num)
         if obj_class is None:
         if obj_class is None:
             raise AssertionError('invalid type num %d' % self.type_num)
             raise AssertionError('invalid type num %d' % self.type_num)
@@ -541,7 +556,7 @@ class ShaFile:
         """The hex SHA of this object."""
         """The hex SHA of this object."""
         return self.sha().hexdigest().encode("ascii")
         return self.sha().hexdigest().encode("ascii")
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "<{} {}>".format(self.__class__.__name__, self.id)
         return "<{} {}>".format(self.__class__.__name__, self.id)
 
 
     def __ne__(self, other):
     def __ne__(self, other):
@@ -575,7 +590,7 @@ class Blob(ShaFile):
 
 
     _chunked_text: List[bytes]
     _chunked_text: List[bytes]
 
 
-    def __init__(self):
+    def __init__(self) -> None:
         super().__init__()
         super().__init__()
         self._chunked_text = []
         self._chunked_text = []
         self._needs_serialization = False
         self._needs_serialization = False
@@ -666,7 +681,7 @@ def _parse_message(chunks: Iterable[bytes]) -> Iterator[Tuple[Optional[bytes], O
     eof = False
     eof = False
 
 
     def _strip_last_newline(value):
     def _strip_last_newline(value):
-        """Strip the last newline from value"""
+        """Strip the last newline from value."""
         if value and value.endswith(b"\n"):
         if value and value.endswith(b"\n"):
             return value[:-1]
             return value[:-1]
         return value
         return value
@@ -736,7 +751,7 @@ class Tag(ShaFile):
 
 
     _tagger: Optional[bytes]
     _tagger: Optional[bytes]
 
 
-    def __init__(self):
+    def __init__(self) -> None:
         super().__init__()
         super().__init__()
         self._tagger = None
         self._tagger = None
         self._tag_time = None
         self._tag_time = None
@@ -806,7 +821,7 @@ class Tag(ShaFile):
         return list(_format_message(headers, body))
         return list(_format_message(headers, body))
 
 
     def _deserialize(self, chunks):
     def _deserialize(self, chunks):
-        """Grab the metadata attached to the tag"""
+        """Grab the metadata attached to the tag."""
         self._tagger = None
         self._tagger = None
         self._tag_time = None
         self._tag_time = None
         self._tag_timezone = None
         self._tag_timezone = None
@@ -943,6 +958,7 @@ def parse_tree(text, strict=False):
     Args:
     Args:
       text: Serialized text to parse
       text: Serialized text to parse
     Returns: iterator of tuples of (name, mode, sha)
     Returns: iterator of tuples of (name, mode, sha)
+
     Raises:
     Raises:
       ObjectFormatException: if the object was malformed in some way
       ObjectFormatException: if the object was malformed in some way
     """
     """
@@ -1045,20 +1061,20 @@ def pretty_format_tree_entry(name, mode, hexsha, encoding="utf-8") -> str:
 class SubmoduleEncountered(Exception):
 class SubmoduleEncountered(Exception):
     """A submodule was encountered while resolving a path."""
     """A submodule was encountered while resolving a path."""
 
 
-    def __init__(self, path, sha):
+    def __init__(self, path, sha) -> None:
         self.path = path
         self.path = path
         self.sha = sha
         self.sha = sha
 
 
 
 
 class Tree(ShaFile):
 class Tree(ShaFile):
-    """A Git tree object"""
+    """A Git tree object."""
 
 
     type_name = b"tree"
     type_name = b"tree"
     type_num = 2
     type_num = 2
 
 
     __slots__ = "_entries"
     __slots__ = "_entries"
 
 
-    def __init__(self):
+    def __init__(self) -> None:
         super().__init__()
         super().__init__()
         self._entries = {}
         self._entries = {}
 
 
@@ -1069,13 +1085,13 @@ class Tree(ShaFile):
             raise NotTreeError(filename)
             raise NotTreeError(filename)
         return tree
         return tree
 
 
-    def __contains__(self, name):
+    def __contains__(self, name) -> bool:
         return name in self._entries
         return name in self._entries
 
 
     def __getitem__(self, name):
     def __getitem__(self, name):
         return self._entries[name]
         return self._entries[name]
 
 
-    def __setitem__(self, name, value):
+    def __setitem__(self, name, value) -> None:
         """Set a tree entry by name.
         """Set a tree entry by name.
 
 
         Args:
         Args:
@@ -1088,11 +1104,11 @@ class Tree(ShaFile):
         self._entries[name] = (mode, hexsha)
         self._entries[name] = (mode, hexsha)
         self._needs_serialization = True
         self._needs_serialization = True
 
 
-    def __delitem__(self, name):
+    def __delitem__(self, name) -> None:
         del self._entries[name]
         del self._entries[name]
         self._needs_serialization = True
         self._needs_serialization = True
 
 
-    def __len__(self):
+    def __len__(self) -> int:
         return len(self._entries)
         return len(self._entries)
 
 
     def __iter__(self):
     def __iter__(self):
@@ -1128,7 +1144,7 @@ class Tree(ShaFile):
         return list(self.iteritems())
         return list(self.iteritems())
 
 
     def _deserialize(self, chunks):
     def _deserialize(self, chunks):
-        """Grab the entries in the tree"""
+        """Grab the entries in the tree."""
         try:
         try:
             parsed_entries = parse_tree(b"".join(chunks))
             parsed_entries = parse_tree(b"".join(chunks))
         except ValueError as exc:
         except ValueError as exc:
@@ -1218,7 +1234,7 @@ def parse_timezone(text):
     # cgit parses the first character as the sign, and the rest
     # cgit parses the first character as the sign, and the rest
     #  as an integer (using strtol), which could also be negative.
     #  as an integer (using strtol), which could also be negative.
     #  We do the same for compatibility. See #697828.
     #  We do the same for compatibility. See #697828.
-    if not text[0] in b"+-":
+    if text[0] not in b"+-":
         raise ValueError("Timezone must start with + or - (%(text)s)" % vars())
         raise ValueError("Timezone must start with + or - (%(text)s)" % vars())
     sign = text[:1]
     sign = text[:1]
     offset = int(text[1:])
     offset = int(text[1:])
@@ -1254,7 +1270,7 @@ def format_timezone(offset, unnecessary_negative_timezone=False):
 
 
 
 
 def parse_time_entry(value):
 def parse_time_entry(value):
-    """Parse event
+    """Parse event.
 
 
     Args:
     Args:
       value: Bytes representing a git commit/tag line
       value: Bytes representing a git commit/tag line
@@ -1279,8 +1295,7 @@ def parse_time_entry(value):
 
 
 
 
 def format_time_entry(person, time, timezone_info):
 def format_time_entry(person, time, timezone_info):
-    """Format an event
-    """
+    """Format an event."""
     (timezone, timezone_neg_utc) = timezone_info
     (timezone, timezone_neg_utc) = timezone_info
     return b" ".join([
     return b" ".join([
         person,
         person,
@@ -1341,7 +1356,7 @@ def parse_commit(chunks):
 
 
 
 
 class Commit(ShaFile):
 class Commit(ShaFile):
-    """A git commit object"""
+    """A git commit object."""
 
 
     type_name = b"commit"
     type_name = b"commit"
     type_num = 1
     type_num = 1
@@ -1364,7 +1379,7 @@ class Commit(ShaFile):
         "_gpgsig",
         "_gpgsig",
     )
     )
 
 
-    def __init__(self):
+    def __init__(self) -> None:
         super().__init__()
         super().__init__()
         self._parents = []
         self._parents = []
         self._encoding = None
         self._encoding = None

+ 1 - 1
dulwich/objectspec.py

@@ -197,7 +197,7 @@ def parse_commit_range(repo: "Repo", committishs: Union[str, bytes]) -> Iterator
 class AmbiguousShortId(Exception):
 class AmbiguousShortId(Exception):
     """The short id is ambiguous."""
     """The short id is ambiguous."""
 
 
-    def __init__(self, prefix, options):
+    def __init__(self, prefix, options) -> None:
         self.prefix = prefix
         self.prefix = prefix
         self.options = options
         self.options = options
 
 

+ 46 - 29
dulwich/pack.py

@@ -46,14 +46,27 @@ import os
 import struct
 import struct
 import sys
 import sys
 from itertools import chain
 from itertools import chain
-from typing import (BinaryIO, Callable, Deque, Dict, Generic, Iterable,
-                    Iterator, List, Optional, Sequence, Set, Tuple, TypeVar,
-                    Union)
+from typing import (
+    BinaryIO,
+    Callable,
+    Deque,
+    Dict,
+    Generic,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    Sequence,
+    Set,
+    Tuple,
+    TypeVar,
+    Union,
+)
 
 
 try:
 try:
     from typing import Protocol
     from typing import Protocol
 except ImportError:  # python << 3.8
 except ImportError:  # python << 3.8
-    from typing_extensions import Protocol   # type: ignore
+    from typing_extensions import Protocol  # type: ignore
 
 
 import warnings
 import warnings
 import zlib
 import zlib
@@ -156,7 +169,7 @@ def take_msb_bytes(read: Callable[[int], bytes], crc32: Optional[int] = None) ->
 
 
 
 
 class PackFileDisappeared(Exception):
 class PackFileDisappeared(Exception):
-    def __init__(self, obj):
+    def __init__(self, obj) -> None:
         self.obj = obj
         self.obj = obj
 
 
 
 
@@ -191,7 +204,7 @@ class UnpackedObject:
 
 
     # TODO(dborowitz): read_zlib_chunks and unpack_object could very well be
     # TODO(dborowitz): read_zlib_chunks and unpack_object could very well be
     # methods of this object.
     # methods of this object.
-    def __init__(self, pack_type_num, *, delta_base=None, decomp_len=None, crc32=None, sha=None, decomp_chunks=None, offset=None):
+    def __init__(self, pack_type_num, *, delta_base=None, decomp_len=None, crc32=None, sha=None, decomp_chunks=None, offset=None) -> None:
         self.offset = offset
         self.offset = offset
         self._sha = sha
         self._sha = sha
         self.pack_type_num = pack_type_num
         self.pack_type_num = pack_type_num
@@ -244,7 +257,7 @@ class UnpackedObject:
     def __ne__(self, other):
     def __ne__(self, other):
         return not (self == other)
         return not (self == other)
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         data = ["{}={!r}".format(s, getattr(self, s)) for s in self.__slots__]
         data = ["{}={!r}".format(s, getattr(self, s)) for s in self.__slots__]
         return "{}({})".format(self.__class__.__name__, ", ".join(data))
         return "{}({})".format(self.__class__.__name__, ", ".join(data))
 
 
@@ -276,6 +289,7 @@ def read_zlib_chunks(
       include_comp: If True, include compressed data in the result.
       include_comp: If True, include compressed data in the result.
       buffer_size: Size of the read buffer.
       buffer_size: Size of the read buffer.
     Returns: Leftover unused data from the decompression.
     Returns: Leftover unused data from the decompression.
+
     Raises:
     Raises:
       zlib.error: if a decompression error occurred.
       zlib.error: if a decompression error occurred.
     """
     """
@@ -503,7 +517,7 @@ class PackIndex:
 class MemoryPackIndex(PackIndex):
 class MemoryPackIndex(PackIndex):
     """Pack index that is stored entirely in memory."""
     """Pack index that is stored entirely in memory."""
 
 
-    def __init__(self, entries, pack_checksum=None):
+    def __init__(self, entries, pack_checksum=None) -> None:
         """Create a new MemoryPackIndex.
         """Create a new MemoryPackIndex.
 
 
         Args:
         Args:
@@ -521,7 +535,7 @@ class MemoryPackIndex(PackIndex):
     def get_pack_checksum(self):
     def get_pack_checksum(self):
         return self._pack_checksum
         return self._pack_checksum
 
 
-    def __len__(self):
+    def __len__(self) -> int:
         return len(self._entries)
         return len(self._entries)
 
 
     def object_offset(self, sha):
     def object_offset(self, sha):
@@ -561,7 +575,7 @@ class FilePackIndex(PackIndex):
 
 
     _fan_out_table: List[int]
     _fan_out_table: List[int]
 
 
-    def __init__(self, filename, file=None, contents=None, size=None):
+    def __init__(self, filename, file=None, contents=None, size=None) -> None:
         """Create a pack index object.
         """Create a pack index object.
 
 
         Provide it with the name of the index file to consider, and it will map
         Provide it with the name of the index file to consider, and it will map
@@ -711,7 +725,7 @@ class FilePackIndex(PackIndex):
 class PackIndex1(FilePackIndex):
 class PackIndex1(FilePackIndex):
     """Version 1 Pack Index file."""
     """Version 1 Pack Index file."""
 
 
-    def __init__(self, filename: str, file=None, contents=None, size=None):
+    def __init__(self, filename: str, file=None, contents=None, size=None) -> None:
         super().__init__(filename, file, contents, size)
         super().__init__(filename, file, contents, size)
         self.version = 1
         self.version = 1
         self._fan_out_table = self._read_fan_out_table(0)
         self._fan_out_table = self._read_fan_out_table(0)
@@ -736,7 +750,7 @@ class PackIndex1(FilePackIndex):
 class PackIndex2(FilePackIndex):
 class PackIndex2(FilePackIndex):
     """Version 2 Pack Index file."""
     """Version 2 Pack Index file."""
 
 
-    def __init__(self, filename: str, file=None, contents=None, size=None):
+    def __init__(self, filename: str, file=None, contents=None, size=None) -> None:
         super().__init__(filename, file, contents, size)
         super().__init__(filename, file, contents, size)
         if self._contents[:4] != b"\377tOc":
         if self._contents[:4] != b"\377tOc":
             raise AssertionError("Not a v2 pack index file")
             raise AssertionError("Not a v2 pack index file")
@@ -967,7 +981,7 @@ class PackStreamReader:
             return data
             return data
         return self._read(self.read_some, size)
         return self._read(self.read_some, size)
 
 
-    def __len__(self):
+    def __len__(self) -> int:
         return self._num_objects
         return self._num_objects
 
 
     def read_objects(self, compute_crc32=False) -> Iterator[UnpackedObject]:
     def read_objects(self, compute_crc32=False) -> Iterator[UnpackedObject]:
@@ -984,6 +998,7 @@ class PackStreamReader:
             decomp_chunks
             decomp_chunks
             decomp_len
             decomp_len
             crc32 (if compute_crc32 is True)
             crc32 (if compute_crc32 is True)
+
         Raises:
         Raises:
           ChecksumMismatch: if the checksum of the pack contents does not
           ChecksumMismatch: if the checksum of the pack contents does not
             match the checksum in the pack trailer.
             match the checksum in the pack trailer.
@@ -1030,7 +1045,7 @@ class PackStreamCopier(PackStreamReader):
     appropriate and written out to the given file-like object.
     appropriate and written out to the given file-like object.
     """
     """
 
 
-    def __init__(self, read_all, read_some, outfile, delta_iter=None):
+    def __init__(self, read_all, read_some, outfile, delta_iter=None) -> None:
         """Initialize the copier.
         """Initialize the copier.
 
 
         Args:
         Args:
@@ -1135,7 +1150,7 @@ class PackData:
     position.  It will all just throw a zlib or KeyError.
     position.  It will all just throw a zlib or KeyError.
     """
     """
 
 
-    def __init__(self, filename, file=None, size=None):
+    def __init__(self, filename, file=None, size=None) -> None:
         """Create a PackData object representing the pack in the given filename.
         """Create a PackData object representing the pack in the given filename.
 
 
         The file must exist and stay readable until the object is disposed of.
         The file must exist and stay readable until the object is disposed of.
@@ -1199,7 +1214,7 @@ class PackData:
             raise AssertionError(errmsg)
             raise AssertionError(errmsg)
         return self._size
         return self._size
 
 
-    def __len__(self):
+    def __len__(self) -> int:
         """Returns the number of objects in this pack."""
         """Returns the number of objects in this pack."""
         return self._num_objects
         return self._num_objects
 
 
@@ -1306,8 +1321,7 @@ class PackData:
             raise ChecksumMismatch(stored, actual)
             raise ChecksumMismatch(stored, actual)
 
 
     def get_unpacked_object_at(self, offset: int, *, include_comp: bool = False) -> UnpackedObject:
     def get_unpacked_object_at(self, offset: int, *, include_comp: bool = False) -> UnpackedObject:
-        """Given offset in the packfile return a UnpackedObject.
-        """
+        """Given offset in the packfile return a UnpackedObject."""
         assert offset >= self._header_size
         assert offset >= self._header_size
         self._file.seek(offset)
         self._file.seek(offset)
         unpacked, _ = unpack_object(self._file.read, include_comp=include_comp)
         unpacked, _ = unpack_object(self._file.read, include_comp=include_comp)
@@ -1522,7 +1536,7 @@ class PackInflater(DeltaChainIterator[ShaFile]):
 class SHA1Reader:
 class SHA1Reader:
     """Wrapper for file-like object that remembers the SHA1 of its data."""
     """Wrapper for file-like object that remembers the SHA1 of its data."""
 
 
-    def __init__(self, f):
+    def __init__(self, f) -> None:
         self.f = f
         self.f = f
         self.sha1 = sha1(b"")
         self.sha1 = sha1(b"")
 
 
@@ -1546,7 +1560,7 @@ class SHA1Reader:
 class SHA1Writer:
 class SHA1Writer:
     """Wrapper for file-like object that remembers the SHA1 of its data."""
     """Wrapper for file-like object that remembers the SHA1 of its data."""
 
 
-    def __init__(self, f):
+    def __init__(self, f) -> None:
         self.f = f
         self.f = f
         self.length = 0
         self.length = 0
         self.sha1 = sha1(b"")
         self.sha1 = sha1(b"")
@@ -1799,7 +1813,7 @@ def pack_objects_to_data(
         delta_window_size: Optional[int] = None,
         delta_window_size: Optional[int] = None,
         ofs_delta: bool = True,
         ofs_delta: bool = True,
         progress=None) -> Tuple[int, Iterator[UnpackedObject]]:
         progress=None) -> Tuple[int, Iterator[UnpackedObject]]:
-    """Create pack data from objects
+    """Create pack data from objects.
 
 
     Args:
     Args:
       objects: Pack objects
       objects: Pack objects
@@ -1837,7 +1851,7 @@ def generate_unpacked_objects(
         ofs_delta: bool = True,
         ofs_delta: bool = True,
         other_haves: Optional[Set[bytes]] = None,
         other_haves: Optional[Set[bytes]] = None,
         progress=None) -> Iterator[UnpackedObject]:
         progress=None) -> Iterator[UnpackedObject]:
-    """Create pack data from objects
+    """Create pack data from objects.
 
 
     Args:
     Args:
       objects: Pack objects
       objects: Pack objects
@@ -1941,7 +1955,7 @@ def write_pack_objects(
 
 
 class PackChunkGenerator:
 class PackChunkGenerator:
 
 
-    def __init__(self, num_records=None, records=None, progress=None, compression_level=-1, reuse_compressed=True):
+    def __init__(self, num_records=None, records=None, progress=None, compression_level=-1, reuse_compressed=True) -> None:
         self.cs = sha1(b"")
         self.cs = sha1(b"")
         self.entries = {}
         self.entries = {}
         self._it = self._pack_data_chunks(
         self._it = self._pack_data_chunks(
@@ -2256,7 +2270,7 @@ class Pack:
     _data: Optional[PackData]
     _data: Optional[PackData]
     _idx: Optional[PackIndex]
     _idx: Optional[PackIndex]
 
 
-    def __init__(self, basename, resolve_ext_ref: Optional[ResolveExtRefFn] = None):
+    def __init__(self, basename, resolve_ext_ref: Optional[ResolveExtRefFn] = None) -> None:
         self._basename = basename
         self._basename = basename
         self._data = None
         self._data = None
         self._idx = None
         self._idx = None
@@ -2269,7 +2283,8 @@ class Pack:
     @classmethod
     @classmethod
     def from_lazy_objects(cls, data_fn, idx_fn):
     def from_lazy_objects(cls, data_fn, idx_fn):
         """Create a new pack object from callables to load pack data and
         """Create a new pack object from callables to load pack data and
-        index objects."""
+        index objects.
+        """
         ret = cls("")
         ret = cls("")
         ret._data_load = data_fn
         ret._data_load = data_fn
         ret._idx_load = idx_fn
         ret._idx_load = idx_fn
@@ -2325,11 +2340,11 @@ class Pack:
     def __eq__(self, other):
     def __eq__(self, other):
         return isinstance(self, type(other)) and self.index == other.index
         return isinstance(self, type(other)) and self.index == other.index
 
 
-    def __len__(self):
+    def __len__(self) -> int:
         """Number of entries in this pack."""
         """Number of entries in this pack."""
         return len(self.index)
         return len(self.index)
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "{}({!r})".format(self.__class__.__name__, self._basename)
         return "{}({!r})".format(self.__class__.__name__, self._basename)
 
 
     def __iter__(self):
     def __iter__(self):
@@ -2592,7 +2607,9 @@ def extend_pack(f: BinaryIO, object_ids: Set[ObjectID], get_raw, *, compression_
 
 
 
 
 try:
 try:
-    from dulwich._pack import apply_delta  # type: ignore # noqa: F811
-    from dulwich._pack import bisect_find_sha  # type: ignore # noqa: F811
+    from dulwich._pack import (
+        apply_delta,  # type: ignore # noqa: F811
+        bisect_find_sha,  # type: ignore # noqa: F811
+    )
 except ImportError:
 except ImportError:
     pass
     pass

+ 2 - 1
dulwich/patch.py

@@ -41,6 +41,7 @@ def write_commit_patch(f, commit, contents, progress, version=None, encoding=Non
     Args:
     Args:
       commit: Commit object
       commit: Commit object
       progress: Tuple with current patch number and total.
       progress: Tuple with current patch number and total.
+
     Returns:
     Returns:
       tuple with filename and contents
       tuple with filename and contents
     """
     """
@@ -101,7 +102,7 @@ def get_summary(commit):
 
 
 #  Unified Diff
 #  Unified Diff
 def _format_range_unified(start, stop):
 def _format_range_unified(start, stop):
-    'Convert range to the "ed" format'
+    'Convert range to the "ed" format.'
     # Per the diff spec at http://www.unix.org/single_unix_specification/
     # Per the diff spec at http://www.unix.org/single_unix_specification/
     beginning = start + 1  # lines start numbering with one
     beginning = start + 1  # lines start numbering with one
     length = stop - start
     length = stop - start

+ 60 - 31
dulwich/porcelain.py

@@ -78,28 +78,57 @@ from typing import Optional, Tuple, Union
 from .archive import tar_stream
 from .archive import tar_stream
 from .client import get_transport_and_path
 from .client import get_transport_and_path
 from .config import Config, ConfigFile, StackedConfig, read_submodules
 from .config import Config, ConfigFile, StackedConfig, read_submodules
-from .diff_tree import (CHANGE_ADD, CHANGE_COPY, CHANGE_DELETE, CHANGE_MODIFY,
-                        CHANGE_RENAME, RENAME_CHANGE_TYPES)
+from .diff_tree import (
+    CHANGE_ADD,
+    CHANGE_COPY,
+    CHANGE_DELETE,
+    CHANGE_MODIFY,
+    CHANGE_RENAME,
+    RENAME_CHANGE_TYPES,
+)
 from .errors import SendPackError
 from .errors import SendPackError
 from .file import ensure_dir_exists
 from .file import ensure_dir_exists
 from .graph import can_fast_forward
 from .graph import can_fast_forward
 from .ignore import IgnoreFilterManager
 from .ignore import IgnoreFilterManager
-from .index import (_fs_to_tree_path, blob_from_path_and_stat,
-                    build_file_from_blob, get_unstaged_changes,
-                    index_entry_from_stat)
+from .index import (
+    _fs_to_tree_path,
+    blob_from_path_and_stat,
+    build_file_from_blob,
+    get_unstaged_changes,
+    index_entry_from_stat,
+)
 from .object_store import iter_tree_contents, tree_lookup_path
 from .object_store import iter_tree_contents, tree_lookup_path
-from .objects import (Commit, Tag, format_timezone, parse_timezone,
-                      pretty_format_tree_entry)
-from .objectspec import (parse_commit, parse_object, parse_ref,
-                         parse_reftuples, parse_tree, to_bytes)
+from .objects import (
+    Commit,
+    Tag,
+    format_timezone,
+    parse_timezone,
+    pretty_format_tree_entry,
+)
+from .objectspec import (
+    parse_commit,
+    parse_object,
+    parse_ref,
+    parse_reftuples,
+    parse_tree,
+    to_bytes,
+)
 from .pack import write_pack_from_container, write_pack_index
 from .pack import write_pack_from_container, write_pack_index
 from .patch import write_tree_diff
 from .patch import write_tree_diff
 from .protocol import ZERO_SHA, Protocol
 from .protocol import ZERO_SHA, Protocol
-from .refs import (LOCAL_BRANCH_PREFIX, LOCAL_REMOTE_PREFIX, LOCAL_TAG_PREFIX,
-                   _import_remote_refs)
+from .refs import (
+    LOCAL_BRANCH_PREFIX,
+    LOCAL_REMOTE_PREFIX,
+    LOCAL_TAG_PREFIX,
+    _import_remote_refs,
+)
 from .repo import BaseRepo, Repo
 from .repo import BaseRepo, Repo
-from .server import (FileSystemBackend, ReceivePackHandler, TCPGitServer,
-                     UploadPackHandler)
+from .server import (
+    FileSystemBackend,
+    ReceivePackHandler,
+    TCPGitServer,
+    UploadPackHandler,
+)
 from .server import update_server_info as server_update_server_info
 from .server import update_server_info as server_update_server_info
 
 
 # Module level tuple definition for status output
 # Module level tuple definition for status output
@@ -130,9 +159,9 @@ DEFAULT_ENCODING = "utf-8"
 
 
 
 
 class Error(Exception):
 class Error(Exception):
-    """Porcelain-based error. """
+    """Porcelain-based error."""
 
 
-    def __init__(self, msg):
+    def __init__(self, msg) -> None:
         super().__init__(msg)
         super().__init__(msg)
 
 
 
 
@@ -162,7 +191,7 @@ def parse_timezone_format(tz_str):
     Returns: Timezone offset as integer
     Returns: Timezone offset as integer
     Raises:
     Raises:
       TimezoneFormatError: if timezone information cannot be extracted
       TimezoneFormatError: if timezone information cannot be extracted
-   """
+    """
     import re
     import re
 
 
     # Git internal format
     # Git internal format
@@ -203,7 +232,7 @@ def parse_timezone_format(tz_str):
 def get_user_timezones():
 def get_user_timezones():
     """Retrieve local timezone as described in
     """Retrieve local timezone as described in
     https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
     https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
-    Returns: A tuple containing author timezone, committer timezone
+    Returns: A tuple containing author timezone, committer timezone.
     """
     """
     local_timezone = time.localtime().tm_gmtoff
     local_timezone = time.localtime().tm_gmtoff
 
 
@@ -286,7 +315,7 @@ def path_to_tree_path(repopath, path, tree_encoding=DEFAULT_ENCODING):
 class DivergedBranches(Error):
 class DivergedBranches(Error):
     """Branches have diverged and fast-forward is not possible."""
     """Branches have diverged and fast-forward is not possible."""
 
 
-    def __init__(self, current_sha, new_sha):
+    def __init__(self, current_sha, new_sha) -> None:
         self.current_sha = current_sha
         self.current_sha = current_sha
         self.new_sha = new_sha
         self.new_sha = new_sha
 
 
@@ -321,7 +350,6 @@ def archive(
       outstream: Output stream (defaults to stdout)
       outstream: Output stream (defaults to stdout)
       errstream: Error stream (defaults to stderr)
       errstream: Error stream (defaults to stderr)
     """
     """
-
     if committish is None:
     if committish is None:
         committish = "HEAD"
         committish = "HEAD"
     with open_repo_closing(repo) as repo_obj:
     with open_repo_closing(repo) as repo_obj:
@@ -566,7 +594,7 @@ def add(repo=".", paths=None):
 
 
 
 
 def _is_subdir(subdir, parentdir):
 def _is_subdir(subdir, parentdir):
-    """Check whether subdir is parentdir or a subdir of parentdir
+    """Check whether subdir is parentdir or a subdir of parentdir.
 
 
     If parentdir or subdir is a relative path, it will be disamgibuated
     If parentdir or subdir is a relative path, it will be disamgibuated
     relative to the pwd.
     relative to the pwd.
@@ -578,7 +606,7 @@ def _is_subdir(subdir, parentdir):
 
 
 # TODO: option to remove ignored files also, in line with `git clean -fdx`
 # TODO: option to remove ignored files also, in line with `git clean -fdx`
 def clean(repo=".", target_dir=None):
 def clean(repo=".", target_dir=None):
-    """Remove any untracked files from the target directory recursively
+    """Remove any untracked files from the target directory recursively.
 
 
     Equivalent to running ``git clean -fd`` in target_dir.
     Equivalent to running ``git clean -fd`` in target_dir.
 
 
@@ -1016,7 +1044,6 @@ def tag_create(
         pass True to use default GPG key,
         pass True to use default GPG key,
         pass a str containing Key ID to use a specific GPG key)
         pass a str containing Key ID to use a specific GPG key)
     """
     """
-
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         object = parse_object(r, objectish)
         object = parse_object(r, objectish)
 
 
@@ -1087,7 +1114,6 @@ def reset(repo, mode, treeish="HEAD"):
       mode: Mode ("hard", "soft", "mixed")
       mode: Mode ("hard", "soft", "mixed")
       treeish: Treeish to reset to
       treeish: Treeish to reset to
     """
     """
-
     if mode != "hard":
     if mode != "hard":
         raise Error("hard is the only mode currently supported")
         raise Error("hard is the only mode currently supported")
 
 
@@ -1129,7 +1155,7 @@ def push(
     force=False,
     force=False,
     **kwargs
     **kwargs
 ):
 ):
-    """Remote push with dulwich via dulwich.client
+    """Remote push with dulwich via dulwich.client.
 
 
     Args:
     Args:
       repo: Path to repository
       repo: Path to repository
@@ -1139,7 +1165,6 @@ def push(
       errstream: A stream file to write errors
       errstream: A stream file to write errors
       force: Force overwriting refs
       force: Force overwriting refs
     """
     """
-
     # Open the repo
     # Open the repo
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         if refspecs is None:
         if refspecs is None:
@@ -1215,7 +1240,7 @@ def pull(
     force=False,
     force=False,
     **kwargs
     **kwargs
 ):
 ):
-    """Pull from remote via dulwich.client
+    """Pull from remote via dulwich.client.
 
 
     Args:
     Args:
       repo: Path to repository
       repo: Path to repository
@@ -1315,7 +1340,7 @@ def status(repo=".", ignored=False, untracked_files="all"):
 
 
 
 
 def _walk_working_dir_paths(frompath, basepath, prune_dirnames=None):
 def _walk_working_dir_paths(frompath, basepath, prune_dirnames=None):
-    """Get path, is_dir for files in working dir from frompath
+    """Get path, is_dir for files in working dir from frompath.
 
 
     Args:
     Args:
       frompath: Path to begin walk
       frompath: Path to begin walk
@@ -1462,8 +1487,12 @@ def web_daemon(path=".", address=None, port=None):
       address: Optional address to listen on (defaults to ::)
       address: Optional address to listen on (defaults to ::)
       port: Optional port to listen on (defaults to 80)
       port: Optional port to listen on (defaults to 80)
     """
     """
-    from .web import (WSGIRequestHandlerLogger, WSGIServerLogger, make_server,
-                      make_wsgi_chain)
+    from .web import (
+        WSGIRequestHandlerLogger,
+        WSGIServerLogger,
+        make_server,
+        make_wsgi_chain,
+    )
 
 
     backend = FileSystemBackend(path)
     backend = FileSystemBackend(path)
     app = make_wsgi_chain(backend)
     app = make_wsgi_chain(backend)
@@ -1782,7 +1811,7 @@ def remote_add(repo: Repo, name: Union[bytes, str], url: Union[bytes, str]):
 
 
 
 
 def remote_remove(repo: Repo, name: Union[bytes, str]):
 def remote_remove(repo: Repo, name: Union[bytes, str]):
-    """Remove a remote
+    """Remove a remote.
 
 
     Args:
     Args:
       repo: Path to the repository
       repo: Path to the repository
@@ -1890,7 +1919,7 @@ def _update_head_during_checkout_branch(repo, target):
 
 
 
 
 def checkout_branch(repo, target: Union[bytes, str], force: bool = False):
 def checkout_branch(repo, target: Union[bytes, str], force: bool = False):
-    """switch branches or restore working tree files.
+    """Switch branches or restore working tree files.
 
 
     The implementation of this function will probably not scale well
     The implementation of this function will probably not scale well
     for branches with lots of local changes.
     for branches with lots of local changes.

+ 6 - 5
dulwich/protocol.py

@@ -178,7 +178,7 @@ class Protocol:
         Documentation/technical/protocol-common.txt
         Documentation/technical/protocol-common.txt
     """
     """
 
 
-    def __init__(self, read, write, close=None, report_activity=None):
+    def __init__(self, read, write, close=None, report_activity=None) -> None:
         self.read = read
         self.read = read
         self.write = write
         self.write = write
         self._close = close
         self._close = close
@@ -256,6 +256,7 @@ class Protocol:
 
 
         Args:
         Args:
           data: The data to unread, without the length prefix.
           data: The data to unread, without the length prefix.
+
         Raises:
         Raises:
           ValueError: If more than one pkt-line is unread.
           ValueError: If more than one pkt-line is unread.
         """
         """
@@ -315,7 +316,7 @@ class Protocol:
         self.write_pkt_line(format_cmd_pkt(cmd, *args))
         self.write_pkt_line(format_cmd_pkt(cmd, *args))
 
 
     def read_cmd(self):
     def read_cmd(self):
-        """Read a command and some arguments from the git client
+        """Read a command and some arguments from the git client.
 
 
         Only used for the TCP git protocol (git://).
         Only used for the TCP git protocol (git://).
 
 
@@ -342,7 +343,7 @@ class ReceivableProtocol(Protocol):
 
 
     def __init__(
     def __init__(
         self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE
         self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE
-    ):
+    ) -> None:
         super().__init__(
         super().__init__(
             self.read, write, close=close, report_activity=report_activity
             self.read, write, close=close, report_activity=report_activity
         )
         )
@@ -483,7 +484,7 @@ class BufferedPktLineWriter:
     (including length prefix) reach the buffer size.
     (including length prefix) reach the buffer size.
     """
     """
 
 
-    def __init__(self, write, bufsize=65515):
+    def __init__(self, write, bufsize=65515) -> None:
         """Initialize the BufferedPktLineWriter.
         """Initialize the BufferedPktLineWriter.
 
 
         Args:
         Args:
@@ -522,7 +523,7 @@ class BufferedPktLineWriter:
 class PktLineParser:
 class PktLineParser:
     """Packet line parser that hands completed packets off to a callback."""
     """Packet line parser that hands completed packets off to a callback."""
 
 
-    def __init__(self, handle_pkt):
+    def __init__(self, handle_pkt) -> None:
         self.handle_pkt = handle_pkt
         self.handle_pkt = handle_pkt
         self._readahead = BytesIO()
         self._readahead = BytesIO()
 
 

+ 1 - 2
dulwich/reflog.py

@@ -18,8 +18,7 @@
 # License, Version 2.0.
 # License, Version 2.0.
 #
 #
 
 
-"""Utilities for reading and generating reflogs.
-"""
+"""Utilities for reading and generating reflogs."""
 
 
 import collections
 import collections
 
 

+ 14 - 15
dulwich/refs.py

@@ -19,9 +19,7 @@
 #
 #
 
 
 
 
-"""Ref handling.
-
-"""
+"""Ref handling."""
 import os
 import os
 import warnings
 import warnings
 from contextlib import suppress
 from contextlib import suppress
@@ -49,7 +47,7 @@ ANNOTATED_TAG_SUFFIX = PEELED_TAG_SUFFIX
 class SymrefLoop(Exception):
 class SymrefLoop(Exception):
     """There is a loop between one or more symrefs."""
     """There is a loop between one or more symrefs."""
 
 
-    def __init__(self, ref, depth):
+    def __init__(self, ref, depth) -> None:
         self.ref = ref
         self.ref = ref
         self.depth = depth
         self.depth = depth
 
 
@@ -103,7 +101,7 @@ def check_ref_format(refname: Ref):
 class RefsContainer:
 class RefsContainer:
     """A container for refs."""
     """A container for refs."""
 
 
-    def __init__(self, logger=None):
+    def __init__(self, logger=None) -> None:
         self._logger = logger
         self._logger = logger
 
 
     def _log(
     def _log(
@@ -260,6 +258,7 @@ class RefsContainer:
 
 
         Args:
         Args:
           name: The name of the reference.
           name: The name of the reference.
+
         Raises:
         Raises:
           KeyError: if a refname is not HEAD or is otherwise not valid.
           KeyError: if a refname is not HEAD or is otherwise not valid.
         """
         """
@@ -311,7 +310,7 @@ class RefsContainer:
                 raise SymrefLoop(name, depth)
                 raise SymrefLoop(name, depth)
         return refnames, contents
         return refnames, contents
 
 
-    def __contains__(self, refname):
+    def __contains__(self, refname) -> bool:
         if self.read_ref(refname):
         if self.read_ref(refname):
             return True
             return True
         return False
         return False
@@ -362,7 +361,7 @@ class RefsContainer:
         """
         """
         raise NotImplementedError(self.add_if_new)
         raise NotImplementedError(self.add_if_new)
 
 
-    def __setitem__(self, name, ref):
+    def __setitem__(self, name, ref) -> None:
         """Set a reference name to point to the given SHA1.
         """Set a reference name to point to the given SHA1.
 
 
         This method follows all symbolic references if applicable for the
         This method follows all symbolic references if applicable for the
@@ -402,7 +401,7 @@ class RefsContainer:
         """
         """
         raise NotImplementedError(self.remove_if_equals)
         raise NotImplementedError(self.remove_if_equals)
 
 
-    def __delitem__(self, name):
+    def __delitem__(self, name) -> None:
         """Remove a refname.
         """Remove a refname.
 
 
         This method does not follow symbolic references, even if applicable for
         This method does not follow symbolic references, even if applicable for
@@ -440,7 +439,7 @@ class DictRefsContainer(RefsContainer):
     threadsafe.
     threadsafe.
     """
     """
 
 
-    def __init__(self, refs, logger=None):
+    def __init__(self, refs, logger=None) -> None:
         super().__init__(logger=logger)
         super().__init__(logger=logger)
         self._refs = refs
         self._refs = refs
         self._peeled = {}
         self._peeled = {}
@@ -581,7 +580,7 @@ class DictRefsContainer(RefsContainer):
 class InfoRefsContainer(RefsContainer):
 class InfoRefsContainer(RefsContainer):
     """Refs container that reads refs from a info/refs file."""
     """Refs container that reads refs from a info/refs file."""
 
 
-    def __init__(self, f):
+    def __init__(self, f) -> None:
         self._refs = {}
         self._refs = {}
         self._peeled = {}
         self._peeled = {}
         for line in f.readlines():
         for line in f.readlines():
@@ -615,7 +614,7 @@ class InfoRefsContainer(RefsContainer):
 class DiskRefsContainer(RefsContainer):
 class DiskRefsContainer(RefsContainer):
     """Refs container that reads refs from disk."""
     """Refs container that reads refs from disk."""
 
 
-    def __init__(self, path, worktree_path=None, logger=None):
+    def __init__(self, path, worktree_path=None, logger=None) -> None:
         super().__init__(logger=logger)
         super().__init__(logger=logger)
         if getattr(path, "encode", None) is not None:
         if getattr(path, "encode", None) is not None:
             path = os.fsencode(path)
             path = os.fsencode(path)
@@ -628,7 +627,7 @@ class DiskRefsContainer(RefsContainer):
         self._packed_refs = None
         self._packed_refs = None
         self._peeled_refs = None
         self._peeled_refs = None
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "{}({!r})".format(self.__class__.__name__, self.path)
         return "{}({!r})".format(self.__class__.__name__, self.path)
 
 
     def subkeys(self, base):
     def subkeys(self, base):
@@ -775,6 +774,7 @@ class DiskRefsContainer(RefsContainer):
           name: the refname to read, relative to refpath
           name: the refname to read, relative to refpath
         Returns: The contents of the ref file, or None if the file does not
         Returns: The contents of the ref file, or None if the file does not
             exist.
             exist.
+
         Raises:
         Raises:
           IOError: if any other error occurs
           IOError: if any other error occurs
         """
         """
@@ -1166,7 +1166,7 @@ def is_local_branch(x):
 
 
 
 
 def strip_peeled_refs(refs):
 def strip_peeled_refs(refs):
-    """Remove all peeled refs"""
+    """Remove all peeled refs."""
     return {
     return {
         ref: sha
         ref: sha
         for (ref, sha) in refs.items()
         for (ref, sha) in refs.items()
@@ -1187,8 +1187,7 @@ def _set_origin_head(refs, origin, origin_head):
 def _set_default_branch(
 def _set_default_branch(
         refs: RefsContainer, origin: bytes, origin_head: bytes, branch: bytes,
         refs: RefsContainer, origin: bytes, origin_head: bytes, branch: bytes,
         ref_message: Optional[bytes]) -> bytes:
         ref_message: Optional[bytes]) -> bytes:
-    """Set the default branch.
-    """
+    """Set the default branch."""
     origin_base = b"refs/remotes/" + origin + b"/"
     origin_base = b"refs/remotes/" + origin + b"/"
     if branch:
     if branch:
         origin_ref = origin_base + branch
         origin_ref = origin_base + branch

+ 101 - 53
dulwich/repo.py

@@ -34,36 +34,85 @@ import sys
 import time
 import time
 import warnings
 import warnings
 from io import BytesIO
 from io import BytesIO
-from typing import (TYPE_CHECKING, BinaryIO, Callable, Dict, FrozenSet,
-                    Iterable, List, Optional, Set, Tuple, Union)
+from typing import (
+    TYPE_CHECKING,
+    BinaryIO,
+    Callable,
+    Dict,
+    FrozenSet,
+    Iterable,
+    List,
+    Optional,
+    Set,
+    Tuple,
+    Union,
+)
 
 
 if TYPE_CHECKING:
 if TYPE_CHECKING:
     # There are no circular imports here, but we try to defer imports as long
     # There are no circular imports here, but we try to defer imports as long
     # as possible to reduce start-up time for anything that doesn't need
     # as possible to reduce start-up time for anything that doesn't need
     # these imports.
     # these imports.
-    from .config import StackedConfig, ConfigFile
+    from .config import ConfigFile, StackedConfig
     from .index import Index
     from .index import Index
 
 
-from .errors import (CommitError, HookError, NoIndexPresent, NotBlobError,
-                     NotCommitError, NotGitRepository, NotTagError,
-                     NotTreeError, RefFormatError)
+from .errors import (
+    CommitError,
+    HookError,
+    NoIndexPresent,
+    NotBlobError,
+    NotCommitError,
+    NotGitRepository,
+    NotTagError,
+    NotTreeError,
+    RefFormatError,
+)
 from .file import GitFile
 from .file import GitFile
-from .hooks import (CommitMsgShellHook, Hook, PostCommitShellHook,
-                    PostReceiveShellHook, PreCommitShellHook)
+from .hooks import (
+    CommitMsgShellHook,
+    Hook,
+    PostCommitShellHook,
+    PostReceiveShellHook,
+    PreCommitShellHook,
+)
 from .line_ending import BlobNormalizer, TreeBlobNormalizer
 from .line_ending import BlobNormalizer, TreeBlobNormalizer
-from .object_store import (DiskObjectStore, MemoryObjectStore,
-                           MissingObjectFinder, ObjectStoreGraphWalker,
-                           PackBasedObjectStore, peel_sha)
-from .objects import (Blob, Commit, ObjectID, ShaFile, Tag, Tree, check_hexsha,
-                      valid_hexsha)
+from .object_store import (
+    DiskObjectStore,
+    MemoryObjectStore,
+    MissingObjectFinder,
+    ObjectStoreGraphWalker,
+    PackBasedObjectStore,
+    peel_sha,
+)
+from .objects import (
+    Blob,
+    Commit,
+    ObjectID,
+    ShaFile,
+    Tag,
+    Tree,
+    check_hexsha,
+    valid_hexsha,
+)
 from .pack import generate_unpacked_objects
 from .pack import generate_unpacked_objects
-from .refs import (ANNOTATED_TAG_SUFFIX, LOCAL_BRANCH_PREFIX,  # noqa: F401
-                   LOCAL_TAG_PREFIX, SYMREF,
-                   DictRefsContainer, DiskRefsContainer, InfoRefsContainer,
-                   Ref, RefsContainer, _set_default_branch, _set_head,
-                   _set_origin_head, check_ref_format, read_packed_refs,
-                   read_packed_refs_with_peeled, serialize_refs,
-                   write_packed_refs)
+from .refs import (  # noqa: F401
+    ANNOTATED_TAG_SUFFIX,
+    LOCAL_BRANCH_PREFIX,
+    LOCAL_TAG_PREFIX,
+    SYMREF,
+    DictRefsContainer,
+    DiskRefsContainer,
+    InfoRefsContainer,
+    Ref,
+    RefsContainer,
+    _set_default_branch,
+    _set_head,
+    _set_origin_head,
+    check_ref_format,
+    read_packed_refs,
+    read_packed_refs_with_peeled,
+    serialize_refs,
+    write_packed_refs,
+)
 
 
 CONTROLDIR = ".git"
 CONTROLDIR = ".git"
 OBJECTDIR = "objects"
 OBJECTDIR = "objects"
@@ -88,9 +137,9 @@ DEFAULT_BRANCH = b"master"
 
 
 
 
 class InvalidUserIdentity(Exception):
 class InvalidUserIdentity(Exception):
-    """User identity is not of the format 'user <email>'"""
+    """User identity is not of the format 'user <email>'."""
 
 
-    def __init__(self, identity):
+    def __init__(self, identity) -> None:
         self.identity = identity
         self.identity = identity
 
 
 
 
@@ -200,7 +249,7 @@ def check_user_identity(identity):
 def parse_graftpoints(
 def parse_graftpoints(
     graftpoints: Iterable[bytes],
     graftpoints: Iterable[bytes],
 ) -> Dict[bytes, List[bytes]]:
 ) -> Dict[bytes, List[bytes]]:
-    """Convert a list of graftpoints into a dict
+    """Convert a list of graftpoints into a dict.
 
 
     Args:
     Args:
       graftpoints: Iterator of graftpoint lines
       graftpoints: Iterator of graftpoint lines
@@ -231,7 +280,7 @@ def parse_graftpoints(
 
 
 
 
 def serialize_graftpoints(graftpoints: Dict[bytes, List[bytes]]) -> bytes:
 def serialize_graftpoints(graftpoints: Dict[bytes, List[bytes]]) -> bytes:
-    """Convert a dictionary of grafts into string
+    """Convert a dictionary of grafts into string.
 
 
     The graft dictionary is:
     The graft dictionary is:
         <commit sha1>: [<parent sha1>*]
         <commit sha1>: [<parent sha1>*]
@@ -275,7 +324,7 @@ def _set_filesystem_hidden(path):
 
 
 
 
 class ParentsProvider:
 class ParentsProvider:
-    def __init__(self, store, grafts={}, shallows=[]):
+    def __init__(self, store, grafts={}, shallows=[]) -> None:
         self.store = store
         self.store = store
         self.grafts = grafts
         self.grafts = grafts
         self.shallows = set(shallows)
         self.shallows = set(shallows)
@@ -305,7 +354,7 @@ class BaseRepo:
         repository
         repository
     """
     """
 
 
-    def __init__(self, object_store: PackBasedObjectStore, refs: RefsContainer):
+    def __init__(self, object_store: PackBasedObjectStore, refs: RefsContainer) -> None:
         """Open a repository.
         """Open a repository.
 
 
         This shouldn't be called directly, but rather through one of the
         This shouldn't be called directly, but rather through one of the
@@ -497,7 +546,7 @@ class BaseRepo:
                 def get_remote_has(self):
                 def get_remote_has(self):
                     return None
                     return None
 
 
-                def __len__(self):
+                def __len__(self) -> int:
                     return 0
                     return 0
 
 
                 def __iter__(self):
                 def __iter__(self):
@@ -642,8 +691,7 @@ class BaseRepo:
         raise NotImplementedError(self.get_config)
         raise NotImplementedError(self.get_config)
 
 
     def get_worktree_config(self) -> "ConfigFile":
     def get_worktree_config(self) -> "ConfigFile":
-        """Retrieve the worktree config object.
-        """
+        """Retrieve the worktree config object."""
         raise NotImplementedError(self.get_worktree_config)
         raise NotImplementedError(self.get_worktree_config)
 
 
     def get_description(self):
     def get_description(self):
@@ -795,7 +843,7 @@ class BaseRepo:
         else:
         else:
             return name in self.refs
             return name in self.refs
 
 
-    def __setitem__(self, name: bytes, value: Union[ShaFile, bytes]):
+    def __setitem__(self, name: bytes, value: Union[ShaFile, bytes]) -> None:
         """Set a ref.
         """Set a ref.
 
 
         Args:
         Args:
@@ -812,7 +860,7 @@ class BaseRepo:
         else:
         else:
             raise ValueError(name)
             raise ValueError(name)
 
 
-    def __delitem__(self, name: bytes):
+    def __delitem__(self, name: bytes) -> None:
         """Remove a ref.
         """Remove a ref.
 
 
         Args:
         Args:
@@ -830,12 +878,11 @@ class BaseRepo:
         return get_user_identity(config)
         return get_user_identity(config)
 
 
     def _add_graftpoints(self, updated_graftpoints: Dict[bytes, List[bytes]]):
     def _add_graftpoints(self, updated_graftpoints: Dict[bytes, List[bytes]]):
-        """Add or modify graftpoints
+        """Add or modify graftpoints.
 
 
         Args:
         Args:
           updated_graftpoints: Dict of commit shas to list of parent shas
           updated_graftpoints: Dict of commit shas to list of parent shas
         """
         """
-
         # Simple validation
         # Simple validation
         for commit, parents in updated_graftpoints.items():
         for commit, parents in updated_graftpoints.items():
             for sha in [commit] + parents:
             for sha in [commit] + parents:
@@ -844,7 +891,7 @@ class BaseRepo:
         self._graftpoints.update(updated_graftpoints)
         self._graftpoints.update(updated_graftpoints)
 
 
     def _remove_graftpoints(self, to_remove: List[bytes] = []) -> None:
     def _remove_graftpoints(self, to_remove: List[bytes] = []) -> None:
-        """Remove graftpoints
+        """Remove graftpoints.
 
 
         Args:
         Args:
           to_remove: List of commit shas
           to_remove: List of commit shas
@@ -904,7 +951,6 @@ class BaseRepo:
         Returns:
         Returns:
           New commit SHA1
           New commit SHA1
         """
         """
-
         try:
         try:
             if not no_verify:
             if not no_verify:
                 self.hooks["pre-commit"].execute()
                 self.hooks["pre-commit"].execute()
@@ -1043,14 +1089,14 @@ def read_gitfile(f):
 class UnsupportedVersion(Exception):
 class UnsupportedVersion(Exception):
     """Unsupported repository version."""
     """Unsupported repository version."""
 
 
-    def __init__(self, version):
+    def __init__(self, version) -> None:
         self.version = version
         self.version = version
 
 
 
 
 class UnsupportedExtension(Exception):
 class UnsupportedExtension(Exception):
     """Unsupported repository extension."""
     """Unsupported repository extension."""
 
 
-    def __init__(self, extension):
+    def __init__(self, extension) -> None:
         self.extension = extension
         self.extension = extension
 
 
 
 
@@ -1067,7 +1113,6 @@ class Repo(BaseRepo):
     up those resources.
     up those resources.
 
 
     Attributes:
     Attributes:
-
       path: Path to the working copy (if it exists) or repository control
       path: Path to the working copy (if it exists) or repository control
         directory (if the repository is bare)
         directory (if the repository is bare)
       bare: Whether this is a bare repository
       bare: Whether this is a bare repository
@@ -1190,7 +1235,7 @@ class Repo(BaseRepo):
 
 
     @classmethod
     @classmethod
     def discover(cls, start="."):
     def discover(cls, start="."):
-        """Iterate parent directories to discover a repository
+        """Iterate parent directories to discover a repository.
 
 
         Return a Repo object for the first parent directory that looks like a
         Return a Repo object for the first parent directory that looks like a
         Git repository.
         Git repository.
@@ -1323,15 +1368,18 @@ class Repo(BaseRepo):
         Args:
         Args:
           fs_paths: List of paths, relative to the repository path
           fs_paths: List of paths, relative to the repository path
         """
         """
-
         root_path_bytes = os.fsencode(self.path)
         root_path_bytes = os.fsencode(self.path)
 
 
         if isinstance(fs_paths, (str, bytes, os.PathLike)):
         if isinstance(fs_paths, (str, bytes, os.PathLike)):
             fs_paths = [fs_paths]
             fs_paths = [fs_paths]
         fs_paths = list(fs_paths)
         fs_paths = list(fs_paths)
 
 
-        from .index import (_fs_to_tree_path, blob_from_path_and_stat,
-                            index_entry_from_directory, index_entry_from_stat)
+        from .index import (
+            _fs_to_tree_path,
+            blob_from_path_and_stat,
+            index_entry_from_directory,
+            index_entry_from_stat,
+        )
 
 
         index = self.open_index()
         index = self.open_index()
         blob_normalizer = self.get_blob_normalizer()
         blob_normalizer = self.get_blob_normalizer()
@@ -1376,10 +1424,10 @@ class Repo(BaseRepo):
         index.write()
         index.write()
 
 
     def unstage(self, fs_paths: List[str]):
     def unstage(self, fs_paths: List[str]):
-        """unstage specific file in the index
+        """Unstage specific file in the index
         Args:
         Args:
           fs_paths: a list of files to unstage,
           fs_paths: a list of files to unstage,
-            relative to the repository path
+            relative to the repository path.
         """
         """
         from .index import IndexEntry, _fs_to_tree_path
         from .index import IndexEntry, _fs_to_tree_path
 
 
@@ -1464,7 +1512,6 @@ class Repo(BaseRepo):
           symlinks: Symlinks setting (default to autodetect)
           symlinks: Symlinks setting (default to autodetect)
         Returns: Created repository as `Repo`
         Returns: Created repository as `Repo`
         """
         """
-
         encoded_path = os.fsencode(self.path)
         encoded_path = os.fsencode(self.path)
 
 
         if mkdir:
         if mkdir:
@@ -1536,10 +1583,12 @@ class Repo(BaseRepo):
         Args:
         Args:
           tree: Tree SHA to reset to, None for current HEAD tree.
           tree: Tree SHA to reset to, None for current HEAD tree.
         """
         """
-        from .index import (build_index_from_tree,
-                            symlink,
-                            validate_path_element_default,
-                            validate_path_element_ntfs)
+        from .index import (
+            build_index_from_tree,
+            symlink,
+            validate_path_element_default,
+            validate_path_element_ntfs,
+        )
 
 
         if tree is None:
         if tree is None:
             head = self[b"HEAD"]
             head = self[b"HEAD"]
@@ -1606,7 +1655,7 @@ class Repo(BaseRepo):
         except FileNotFoundError:
         except FileNotFoundError:
             return None
             return None
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "<Repo at %r>" % self.path
         return "<Repo at %r>" % self.path
 
 
     def set_description(self, description):
     def set_description(self, description):
@@ -1615,7 +1664,6 @@ class Repo(BaseRepo):
         Args:
         Args:
           description: Text to set as description for this repository.
           description: Text to set as description for this repository.
         """
         """
-
         self._put_named_file("description", description)
         self._put_named_file("description", description)
 
 
     @classmethod
     @classmethod
@@ -1723,7 +1771,7 @@ class Repo(BaseRepo):
         self.close()
         self.close()
 
 
     def get_blob_normalizer(self):
     def get_blob_normalizer(self):
-        """Return a BlobNormalizer object"""
+        """Return a BlobNormalizer object."""
         # TODO Parse the git attributes files
         # TODO Parse the git attributes files
         git_attributes = {}
         git_attributes = {}
         config_stack = self.get_config_stack()
         config_stack = self.get_config_stack()
@@ -1746,7 +1794,7 @@ class MemoryRepo(BaseRepo):
     those have a stronger dependency on the filesystem.
     those have a stronger dependency on the filesystem.
     """
     """
 
 
-    def __init__(self):
+    def __init__(self) -> None:
         from .config import ConfigFile
         from .config import ConfigFile
 
 
         self._reflog = []
         self._reflog = []

+ 69 - 39
dulwich/server.py

@@ -61,30 +61,61 @@ import zlib
 from dulwich import log_utils
 from dulwich import log_utils
 
 
 from .archive import tar_stream
 from .archive import tar_stream
-from .errors import (ApplyDeltaError, ChecksumMismatch, GitProtocolError,
-                     HookError, NotGitRepository, ObjectFormatException,
-                     UnexpectedCommandError)
+from .errors import (
+    ApplyDeltaError,
+    ChecksumMismatch,
+    GitProtocolError,
+    HookError,
+    NotGitRepository,
+    ObjectFormatException,
+    UnexpectedCommandError,
+)
 from .object_store import peel_sha
 from .object_store import peel_sha
 from .objects import Commit, ObjectID, valid_hexsha
 from .objects import Commit, ObjectID, valid_hexsha
-from .pack import (ObjectContainer, PackedObjectContainer,
-                   write_pack_from_container)
-from .protocol import (CAPABILITIES_REF, CAPABILITY_AGENT,
-                       CAPABILITY_DELETE_REFS, CAPABILITY_INCLUDE_TAG,
-                       CAPABILITY_MULTI_ACK, CAPABILITY_MULTI_ACK_DETAILED,
-                       CAPABILITY_NO_DONE, CAPABILITY_NO_PROGRESS,
-                       CAPABILITY_OFS_DELTA, CAPABILITY_QUIET,
-                       CAPABILITY_REPORT_STATUS, CAPABILITY_SHALLOW,
-                       CAPABILITY_SIDE_BAND_64K, CAPABILITY_THIN_PACK,
-                       COMMAND_DEEPEN, COMMAND_DONE, COMMAND_HAVE,
-                       COMMAND_SHALLOW, COMMAND_UNSHALLOW, COMMAND_WANT,
-                       MULTI_ACK, MULTI_ACK_DETAILED, NAK_LINE,
-                       SIDE_BAND_CHANNEL_DATA, SIDE_BAND_CHANNEL_FATAL,
-                       SIDE_BAND_CHANNEL_PROGRESS, SINGLE_ACK, TCP_GIT_PORT,
-                       ZERO_SHA, BufferedPktLineWriter, Protocol,
-                       ReceivableProtocol, ack_type, capability_agent,
-                       extract_capabilities, extract_want_line_capabilities,
-                       format_ack_line, format_ref_line, format_shallow_line,
-                       format_unshallow_line, symref_capabilities)
+from .pack import ObjectContainer, PackedObjectContainer, write_pack_from_container
+from .protocol import (
+    CAPABILITIES_REF,
+    CAPABILITY_AGENT,
+    CAPABILITY_DELETE_REFS,
+    CAPABILITY_INCLUDE_TAG,
+    CAPABILITY_MULTI_ACK,
+    CAPABILITY_MULTI_ACK_DETAILED,
+    CAPABILITY_NO_DONE,
+    CAPABILITY_NO_PROGRESS,
+    CAPABILITY_OFS_DELTA,
+    CAPABILITY_QUIET,
+    CAPABILITY_REPORT_STATUS,
+    CAPABILITY_SHALLOW,
+    CAPABILITY_SIDE_BAND_64K,
+    CAPABILITY_THIN_PACK,
+    COMMAND_DEEPEN,
+    COMMAND_DONE,
+    COMMAND_HAVE,
+    COMMAND_SHALLOW,
+    COMMAND_UNSHALLOW,
+    COMMAND_WANT,
+    MULTI_ACK,
+    MULTI_ACK_DETAILED,
+    NAK_LINE,
+    SIDE_BAND_CHANNEL_DATA,
+    SIDE_BAND_CHANNEL_FATAL,
+    SIDE_BAND_CHANNEL_PROGRESS,
+    SINGLE_ACK,
+    TCP_GIT_PORT,
+    ZERO_SHA,
+    BufferedPktLineWriter,
+    Protocol,
+    ReceivableProtocol,
+    ack_type,
+    capability_agent,
+    extract_capabilities,
+    extract_want_line_capabilities,
+    format_ack_line,
+    format_ref_line,
+    format_shallow_line,
+    format_unshallow_line,
+    symref_capabilities,
+)
 from .refs import PEELED_TAG_SUFFIX, RefsContainer, write_info_refs
 from .refs import PEELED_TAG_SUFFIX, RefsContainer, write_info_refs
 from .repo import BaseRepo, Repo
 from .repo import BaseRepo, Repo
 
 
@@ -117,8 +148,7 @@ class BackendRepo(TypingProtocol):
     refs: RefsContainer
     refs: RefsContainer
 
 
     def get_refs(self) -> Dict[bytes, bytes]:
     def get_refs(self) -> Dict[bytes, bytes]:
-        """
-        Get all the refs in the repository
+        """Get all the refs in the repository.
 
 
         Returns: dict of name -> sha
         Returns: dict of name -> sha
         """
         """
@@ -137,8 +167,7 @@ class BackendRepo(TypingProtocol):
         return None
         return None
 
 
     def find_missing_objects(self, determine_wants, graph_walker, progress, get_tagged=None):
     def find_missing_objects(self, determine_wants, graph_walker, progress, get_tagged=None):
-        """
-        Yield the objects required for a list of commits.
+        """Yield the objects required for a list of commits.
 
 
         Args:
         Args:
           progress: is a callback to send progress messages to the client
           progress: is a callback to send progress messages to the client
@@ -151,7 +180,7 @@ class BackendRepo(TypingProtocol):
 class DictBackend(Backend):
 class DictBackend(Backend):
     """Trivial backend that looks up Git repositories in a dictionary."""
     """Trivial backend that looks up Git repositories in a dictionary."""
 
 
-    def __init__(self, repos):
+    def __init__(self, repos) -> None:
         self.repos = repos
         self.repos = repos
 
 
     def open_repository(self, path: str) -> BaseRepo:
     def open_repository(self, path: str) -> BaseRepo:
@@ -167,7 +196,7 @@ class DictBackend(Backend):
 class FileSystemBackend(Backend):
 class FileSystemBackend(Backend):
     """Simple backend looking up Git repositories in the local file system."""
     """Simple backend looking up Git repositories in the local file system."""
 
 
-    def __init__(self, root=os.sep):
+    def __init__(self, root=os.sep) -> None:
         super().__init__()
         super().__init__()
         self.root = (os.path.abspath(root) + os.sep).replace(os.sep * 2, os.sep)
         self.root = (os.path.abspath(root) + os.sep).replace(os.sep * 2, os.sep)
 
 
@@ -184,7 +213,7 @@ class FileSystemBackend(Backend):
 class Handler:
 class Handler:
     """Smart protocol command handler base class."""
     """Smart protocol command handler base class."""
 
 
-    def __init__(self, backend, proto, stateless_rpc=False):
+    def __init__(self, backend, proto, stateless_rpc=False) -> None:
         self.backend = backend
         self.backend = backend
         self.proto = proto
         self.proto = proto
         self.stateless_rpc = stateless_rpc
         self.stateless_rpc = stateless_rpc
@@ -196,7 +225,7 @@ class Handler:
 class PackHandler(Handler):
 class PackHandler(Handler):
     """Protocol handler for packs."""
     """Protocol handler for packs."""
 
 
-    def __init__(self, backend, proto, stateless_rpc=False):
+    def __init__(self, backend, proto, stateless_rpc=False) -> None:
         super().__init__(backend, proto, stateless_rpc)
         super().__init__(backend, proto, stateless_rpc)
         self._client_capabilities = None
         self._client_capabilities = None
         # Flags needed for the no-done capability
         # Flags needed for the no-done capability
@@ -253,7 +282,7 @@ class PackHandler(Handler):
 class UploadPackHandler(PackHandler):
 class UploadPackHandler(PackHandler):
     """Protocol handler for uploading a pack to the client."""
     """Protocol handler for uploading a pack to the client."""
 
 
-    def __init__(self, backend, args, proto, stateless_rpc=False, advertise_refs=False):
+    def __init__(self, backend, args, proto, stateless_rpc=False, advertise_refs=False) -> None:
         super().__init__(
         super().__init__(
             backend, proto, stateless_rpc=stateless_rpc
             backend, proto, stateless_rpc=stateless_rpc
         )
         )
@@ -528,7 +557,7 @@ class _ProtocolGraphWalker:
     any calls to next() or ack() are made.
     any calls to next() or ack() are made.
     """
     """
 
 
-    def __init__(self, handler, object_store: ObjectContainer, get_peeled, get_symrefs):
+    def __init__(self, handler, object_store: ObjectContainer, get_peeled, get_symrefs) -> None:
         self.handler = handler
         self.handler = handler
         self.store: ObjectContainer = object_store
         self.store: ObjectContainer = object_store
         self.get_peeled = get_peeled
         self.get_peeled = get_peeled
@@ -660,6 +689,7 @@ class _ProtocolGraphWalker:
         Args:
         Args:
           allowed: An iterable of command names that should be allowed.
           allowed: An iterable of command names that should be allowed.
         Returns: A tuple of (command, value); see _split_proto_line.
         Returns: A tuple of (command, value); see _split_proto_line.
+
         Raises:
         Raises:
           UnexpectedCommandError: If an error occurred reading the line.
           UnexpectedCommandError: If an error occurred reading the line.
         """
         """
@@ -731,7 +761,7 @@ _GRAPH_WALKER_COMMANDS = (COMMAND_HAVE, COMMAND_DONE, None)
 class SingleAckGraphWalkerImpl:
 class SingleAckGraphWalkerImpl:
     """Graph walker implementation that speaks the single-ack protocol."""
     """Graph walker implementation that speaks the single-ack protocol."""
 
 
-    def __init__(self, walker):
+    def __init__(self, walker) -> None:
         self.walker = walker
         self.walker = walker
         self._common = []
         self._common = []
 
 
@@ -775,7 +805,7 @@ class SingleAckGraphWalkerImpl:
 class MultiAckGraphWalkerImpl:
 class MultiAckGraphWalkerImpl:
     """Graph walker implementation that speaks the multi-ack protocol."""
     """Graph walker implementation that speaks the multi-ack protocol."""
 
 
-    def __init__(self, walker):
+    def __init__(self, walker) -> None:
         self.walker = walker
         self.walker = walker
         self._found_base = False
         self._found_base = False
         self._common = []
         self._common = []
@@ -834,7 +864,7 @@ class MultiAckGraphWalkerImpl:
 class MultiAckDetailedGraphWalkerImpl:
 class MultiAckDetailedGraphWalkerImpl:
     """Graph walker implementation speaking the multi-ack-detailed protocol."""
     """Graph walker implementation speaking the multi-ack-detailed protocol."""
 
 
-    def __init__(self, walker):
+    def __init__(self, walker) -> None:
         self.walker = walker
         self.walker = walker
         self._common = []
         self._common = []
 
 
@@ -899,7 +929,7 @@ class MultiAckDetailedGraphWalkerImpl:
 class ReceivePackHandler(PackHandler):
 class ReceivePackHandler(PackHandler):
     """Protocol handler for downloading a pack from the client."""
     """Protocol handler for downloading a pack from the client."""
 
 
-    def __init__(self, backend, args, proto, stateless_rpc=False, advertise_refs=False):
+    def __init__(self, backend, args, proto, stateless_rpc=False, advertise_refs=False) -> None:
         super().__init__(
         super().__init__(
             backend, proto, stateless_rpc=stateless_rpc
             backend, proto, stateless_rpc=stateless_rpc
         )
         )
@@ -1063,7 +1093,7 @@ class ReceivePackHandler(PackHandler):
 
 
 
 
 class UploadArchiveHandler(Handler):
 class UploadArchiveHandler(Handler):
-    def __init__(self, backend, args, proto, stateless_rpc=False):
+    def __init__(self, backend, args, proto, stateless_rpc=False) -> None:
         super().__init__(backend, proto, stateless_rpc)
         super().__init__(backend, proto, stateless_rpc)
         self.repo = backend.open_repository(args[0])
         self.repo = backend.open_repository(args[0])
 
 
@@ -1111,7 +1141,7 @@ DEFAULT_HANDLERS = {
 
 
 
 
 class TCPGitRequestHandler(socketserver.StreamRequestHandler):
 class TCPGitRequestHandler(socketserver.StreamRequestHandler):
-    def __init__(self, handlers, *args, **kwargs):
+    def __init__(self, handlers, *args, **kwargs) -> None:
         self.handlers = handlers
         self.handlers = handlers
         socketserver.StreamRequestHandler.__init__(self, *args, **kwargs)
         socketserver.StreamRequestHandler.__init__(self, *args, **kwargs)
 
 
@@ -1135,7 +1165,7 @@ class TCPGitServer(socketserver.TCPServer):
     def _make_handler(self, *args, **kwargs):
     def _make_handler(self, *args, **kwargs):
         return TCPGitRequestHandler(self.handlers, *args, **kwargs)
         return TCPGitRequestHandler(self.handlers, *args, **kwargs)
 
 
-    def __init__(self, backend, listen_addr, port=TCP_GIT_PORT, handlers=None):
+    def __init__(self, backend, listen_addr, port=TCP_GIT_PORT, handlers=None) -> None:
         self.handlers = dict(DEFAULT_HANDLERS)
         self.handlers = dict(DEFAULT_HANDLERS)
         if handlers is not None:
         if handlers is not None:
             self.handlers.update(handlers)
             self.handlers.update(handlers)

+ 2 - 2
dulwich/stash.py

@@ -36,7 +36,7 @@ class Stash:
     Note that this doesn't currently update the working tree.
     Note that this doesn't currently update the working tree.
     """
     """
 
 
-    def __init__(self, repo, ref=DEFAULT_STASH_REF):
+    def __init__(self, repo, ref=DEFAULT_STASH_REF) -> None:
         self._ref = ref
         self._ref = ref
         self._repo = repo
         self._repo = repo
 
 
@@ -128,5 +128,5 @@ class Stash:
     def __getitem__(self, index):
     def __getitem__(self, index):
         return list(self.stashes())[index]
         return list(self.stashes())[index]
 
 
-    def __len__(self):
+    def __len__(self) -> int:
         return len(list(self.stashes()))
         return len(list(self.stashes()))

+ 2 - 3
dulwich/submodule.py

@@ -18,8 +18,7 @@
 # License, Version 2.0.
 # License, Version 2.0.
 #
 #
 
 
-"""Working with Git submodules.
-"""
+"""Working with Git submodules."""
 
 
 from typing import Iterator, Tuple
 from typing import Iterator, Tuple
 
 
@@ -28,7 +27,7 @@ from .objects import S_ISGITLINK
 
 
 
 
 def iter_cached_submodules(store, root_tree_id: bytes) -> Iterator[Tuple[str, bytes]]:
 def iter_cached_submodules(store, root_tree_id: bytes) -> Iterator[Tuple[str, bytes]]:
-    """iterate over cached submodules.
+    """Iterate over cached submodules.
 
 
     Args:
     Args:
       store: Object store to iterate
       store: Object store to iterate

+ 2 - 2
dulwich/tests/__init__.py

@@ -34,11 +34,11 @@ import shutil
 import subprocess
 import subprocess
 import sys
 import sys
 import tempfile
 import tempfile
+
 # If Python itself provides an exception, use that
 # If Python itself provides an exception, use that
 import unittest
 import unittest
-from unittest import SkipTest
+from unittest import SkipTest, expectedFailure, skipIf
 from unittest import TestCase as _TestCase  # noqa: F401
 from unittest import TestCase as _TestCase  # noqa: F401
-from unittest import expectedFailure, skipIf
 
 
 
 
 class TestCase(_TestCase):
 class TestCase(_TestCase):

+ 1 - 1
dulwich/tests/compat/server_utils.py

@@ -37,7 +37,7 @@ from .utils import require_git_version, run_git_or_fail
 class _StubRepo:
 class _StubRepo:
     """A stub repo that just contains a path to tear down."""
     """A stub repo that just contains a path to tear down."""
 
 
-    def __init__(self, name):
+    def __init__(self, name) -> None:
         temp_dir = tempfile.mkdtemp()
         temp_dir = tempfile.mkdtemp()
         self.path = os.path.join(temp_dir, name)
         self.path = os.path.join(temp_dir, name)
         os.mkdir(self.path)
         os.mkdir(self.path)

+ 9 - 3
dulwich/tests/compat/test_client.py

@@ -38,8 +38,14 @@ from urllib.parse import unquote
 from dulwich import client, file, index, objects, protocol, repo
 from dulwich import client, file, index, objects, protocol, repo
 from dulwich.tests import SkipTest, expectedFailure
 from dulwich.tests import SkipTest, expectedFailure
 
 
-from .utils import (_DEFAULT_GIT, CompatTestCase, check_for_daemon,
-                    import_repo_to_dir, rmtree_ro, run_git_or_fail)
+from .utils import (
+    _DEFAULT_GIT,
+    CompatTestCase,
+    check_for_daemon,
+    import_repo_to_dir,
+    rmtree_ro,
+    run_git_or_fail,
+)
 
 
 if sys.platform == "win32":
 if sys.platform == "win32":
     import ctypes
     import ctypes
@@ -621,7 +627,7 @@ class HTTPGitServer(http.server.HTTPServer):
 
 
     allow_reuse_address = True
     allow_reuse_address = True
 
 
-    def __init__(self, server_address, root_path):
+    def __init__(self, server_address, root_path) -> None:
         http.server.HTTPServer.__init__(self, server_address, GitHTTPRequestHandler)
         http.server.HTTPServer.__init__(self, server_address, GitHTTPRequestHandler)
         self.root_path = root_path
         self.root_path = root_path
         self.server_name = "localhost"
         self.server_name = "localhost"

+ 1 - 2
dulwich/tests/compat/test_repository.py

@@ -28,8 +28,7 @@ from itertools import chain
 
 
 from ...objects import hex_to_sha
 from ...objects import hex_to_sha
 from ...repo import Repo, check_ref_format
 from ...repo import Repo, check_ref_format
-from .utils import (CompatTestCase, require_git_version, rmtree_ro,
-                    run_git_or_fail)
+from .utils import CompatTestCase, require_git_version, rmtree_ro, run_git_or_fail
 
 
 
 
 class ObjectStoreTestCase(CompatTestCase):
 class ObjectStoreTestCase(CompatTestCase):

+ 6 - 2
dulwich/tests/compat/test_web.py

@@ -33,8 +33,12 @@ from wsgiref import simple_server
 from dulwich.tests import SkipTest, skipIf
 from dulwich.tests import SkipTest, skipIf
 
 
 from ...server import DictBackend, ReceivePackHandler, UploadPackHandler
 from ...server import DictBackend, ReceivePackHandler, UploadPackHandler
-from ...web import (HTTPGitApplication, WSGIRequestHandlerLogger,
-                    WSGIServerLogger, make_wsgi_chain)
+from ...web import (
+    HTTPGitApplication,
+    WSGIRequestHandlerLogger,
+    WSGIServerLogger,
+    make_wsgi_chain,
+)
 from .server_utils import NoSideBand64kReceivePackHandler, ServerTests
 from .server_utils import NoSideBand64kReceivePackHandler, ServerTests
 from .utils import CompatTestCase
 from .utils import CompatTestCase
 
 

+ 2 - 1
dulwich/tests/compat/utils.py

@@ -84,6 +84,7 @@ def require_git_version(required_version, git_path=_DEFAULT_GIT):
         sub-point); omitted components default to 0.
         sub-point); omitted components default to 0.
       git_path: Path to the git executable; defaults to the version in
       git_path: Path to the git executable; defaults to the version in
         the system path.
         the system path.
+
     Raises:
     Raises:
       ValueError: if the required version tuple has too many parts.
       ValueError: if the required version tuple has too many parts.
       SkipTest: if no suitable git version was found at the given path.
       SkipTest: if no suitable git version was found at the given path.
@@ -132,10 +133,10 @@ def run_git(
     Returns: A tuple of (returncode, stdout contents, stderr contents).
     Returns: A tuple of (returncode, stdout contents, stderr contents).
         If capture_stdout is False, None will be returned as stdout contents.
         If capture_stdout is False, None will be returned as stdout contents.
         If capture_stderr is False, None will be returned as stderr contents.
         If capture_stderr is False, None will be returned as stderr contents.
+
     Raises:
     Raises:
       OSError: if the git executable was not found.
       OSError: if the git executable was not found.
     """
     """
-
     env = popen_kwargs.pop("env", {})
     env = popen_kwargs.pop("env", {})
     env["LC_ALL"] = env["LANG"] = "C"
     env["LC_ALL"] = env["LANG"] = "C"
     env["PATH"] = os.getenv("PATH")
     env["PATH"] = os.getenv("PATH")

+ 27 - 13
dulwich/tests/test_client.py

@@ -33,15 +33,29 @@ import dulwich
 from dulwich import client
 from dulwich import client
 from dulwich.tests import TestCase, skipIf
 from dulwich.tests import TestCase, skipIf
 
 
-from ..client import (FetchPackResult, GitProtocolError, HangupException,
-                      HttpGitClient, InvalidWants, LocalGitClient,
-                      PLinkSSHVendor, ReportStatusParser, SendPackError,
-                      SSHGitClient, StrangeHostname, SubprocessSSHVendor,
-                      TCPGitClient, TraditionalGitClient,
-                      _remote_error_from_stderr, check_wants,
-                      default_urllib3_manager, get_credentials_from_store,
-                      get_transport_and_path, get_transport_and_path_from_url,
-                      parse_rsync_url)
+from ..client import (
+    FetchPackResult,
+    GitProtocolError,
+    HangupException,
+    HttpGitClient,
+    InvalidWants,
+    LocalGitClient,
+    PLinkSSHVendor,
+    ReportStatusParser,
+    SendPackError,
+    SSHGitClient,
+    StrangeHostname,
+    SubprocessSSHVendor,
+    TCPGitClient,
+    TraditionalGitClient,
+    _remote_error_from_stderr,
+    check_wants,
+    default_urllib3_manager,
+    get_credentials_from_store,
+    get_transport_and_path,
+    get_transport_and_path_from_url,
+    parse_rsync_url,
+)
 from ..config import ConfigDict
 from ..config import ConfigDict
 from ..objects import Commit, Tree
 from ..objects import Commit, Tree
 from ..pack import pack_objects_to_data, write_pack_data, write_pack_objects
 from ..pack import pack_objects_to_data, write_pack_data, write_pack_objects
@@ -51,7 +65,7 @@ from .utils import open_repo, setup_warning_catcher, tear_down_repo
 
 
 
 
 class DummyClient(TraditionalGitClient):
 class DummyClient(TraditionalGitClient):
-    def __init__(self, can_read, read, write):
+    def __init__(self, can_read, read, write) -> None:
         self.can_read = can_read
         self.can_read = can_read
         self.read = read
         self.read = read
         self.write = write
         self.write = write
@@ -62,7 +76,7 @@ class DummyClient(TraditionalGitClient):
 
 
 
 
 class DummyPopen:
 class DummyPopen:
-    def __init__(self, *args, **kwards):
+    def __init__(self, *args, **kwards) -> None:
         self.stdin = BytesIO(b"stdin")
         self.stdin = BytesIO(b"stdin")
         self.stdout = BytesIO(b"stdout")
         self.stdout = BytesIO(b"stdout")
         self.stderr = BytesIO(b"stderr")
         self.stderr = BytesIO(b"stderr")
@@ -680,7 +694,7 @@ class TestGetTransportAndPathFromUrl(TestCase):
 
 
 
 
 class TestSSHVendor:
 class TestSSHVendor:
-    def __init__(self):
+    def __init__(self) -> None:
         self.host = None
         self.host = None
         self.command = ""
         self.command = ""
         self.username = None
         self.username = None
@@ -1075,7 +1089,7 @@ class HttpGitClientTests(TestCase):
         # we need to mock urllib3.PoolManager as this test will fail
         # we need to mock urllib3.PoolManager as this test will fail
         # otherwise without an active internet connection
         # otherwise without an active internet connection
         class PoolManagerMock:
         class PoolManagerMock:
-            def __init__(self):
+            def __init__(self) -> None:
                 self.headers = {}
                 self.headers = {}
 
 
             def request(self, method, url, fields=None, headers=None, redirect=True, preload_content=True):
             def request(self, method, url, fields=None, headers=None, redirect=True, preload_content=True):

+ 12 - 4
dulwich/tests/test_config.py

@@ -28,10 +28,18 @@ from unittest.mock import patch
 
 
 from dulwich.tests import TestCase
 from dulwich.tests import TestCase
 
 
-from ..config import (ConfigDict, ConfigFile, StackedConfig,
-                      _check_section_name, _check_variable_name, _escape_value,
-                      _format_string, _parse_string, apply_instead_of,
-                      parse_submodules)
+from ..config import (
+    ConfigDict,
+    ConfigFile,
+    StackedConfig,
+    _check_section_name,
+    _check_variable_name,
+    _escape_value,
+    _format_string,
+    _parse_string,
+    apply_instead_of,
+    parse_submodules,
+)
 
 
 
 
 class ConfigFileTests(TestCase):
 class ConfigFileTests(TestCase):

+ 1 - 2
dulwich/tests/test_credentials.py

@@ -24,8 +24,7 @@ from urllib.parse import urlparse
 from dulwich.tests import TestCase
 from dulwich.tests import TestCase
 
 
 from ..config import ConfigDict
 from ..config import ConfigDict
-from ..credentials import (match_partial_url, match_urls,
-                           urlmatch_credential_sections)
+from ..credentials import match_partial_url, match_urls, urlmatch_credential_sections
 
 
 
 
 class TestCredentialHelpersUtils(TestCase):
 class TestCredentialHelpersUtils(TestCase):

+ 18 - 6
dulwich/tests/test_diff_tree.py

@@ -24,12 +24,24 @@ from itertools import permutations
 
 
 from dulwich.tests import TestCase
 from dulwich.tests import TestCase
 
 
-from ..diff_tree import (CHANGE_COPY, CHANGE_MODIFY, CHANGE_RENAME,
-                         CHANGE_UNCHANGED, RenameDetector, TreeChange,
-                         _count_blocks, _count_blocks_py, _is_tree,
-                         _is_tree_py, _merge_entries, _merge_entries_py,
-                         _similarity_score, _tree_change_key, tree_changes,
-                         tree_changes_for_merge)
+from ..diff_tree import (
+    CHANGE_COPY,
+    CHANGE_MODIFY,
+    CHANGE_RENAME,
+    CHANGE_UNCHANGED,
+    RenameDetector,
+    TreeChange,
+    _count_blocks,
+    _count_blocks_py,
+    _is_tree,
+    _is_tree_py,
+    _merge_entries,
+    _merge_entries_py,
+    _similarity_score,
+    _tree_change_key,
+    tree_changes,
+    tree_changes_for_merge,
+)
 from ..index import commit_tree
 from ..index import commit_tree
 from ..object_store import MemoryObjectStore
 from ..object_store import MemoryObjectStore
 from ..objects import Blob, ShaFile, Tree, TreeEntry
 from ..objects import Blob, ShaFile, Tree, TreeEntry

+ 9 - 2
dulwich/tests/test_ignore.py

@@ -28,8 +28,15 @@ from io import BytesIO
 
 
 from dulwich.tests import TestCase
 from dulwich.tests import TestCase
 
 
-from ..ignore import (IgnoreFilter, IgnoreFilterManager, IgnoreFilterStack,
-                      Pattern, match_pattern, read_ignore_patterns, translate)
+from ..ignore import (
+    IgnoreFilter,
+    IgnoreFilterManager,
+    IgnoreFilterStack,
+    Pattern,
+    match_pattern,
+    read_ignore_patterns,
+    translate,
+)
 from ..repo import Repo
 from ..repo import Repo
 
 
 POSITIVE_MATCH_TESTS = [
 POSITIVE_MATCH_TESTS = [

+ 18 - 10
dulwich/tests/test_index.py

@@ -32,12 +32,24 @@ from io import BytesIO
 
 
 from dulwich.tests import TestCase, skipIf
 from dulwich.tests import TestCase, skipIf
 
 
-from ..index import (Index, IndexEntry, _fs_to_tree_path, _tree_to_fs_path,
-                     build_index_from_tree, cleanup_mode, commit_tree,
-                     get_unstaged_changes, index_entry_from_stat, read_index,
-                     read_index_dict, validate_path_element_default,
-                     validate_path_element_ntfs, write_cache_time, write_index,
-                     write_index_dict)
+from ..index import (
+    Index,
+    IndexEntry,
+    _fs_to_tree_path,
+    _tree_to_fs_path,
+    build_index_from_tree,
+    cleanup_mode,
+    commit_tree,
+    get_unstaged_changes,
+    index_entry_from_stat,
+    read_index,
+    read_index_dict,
+    validate_path_element_default,
+    validate_path_element_ntfs,
+    write_cache_time,
+    write_index,
+    write_index_dict,
+)
 from ..object_store import MemoryObjectStore
 from ..object_store import MemoryObjectStore
 from ..objects import S_IFGITLINK, Blob, Commit, Tree
 from ..objects import S_IFGITLINK, Blob, Commit, Tree
 from ..repo import Repo
 from ..repo import Repo
@@ -643,7 +655,6 @@ class BuildIndexTests(TestCase):
 class GetUnstagedChangesTests(TestCase):
 class GetUnstagedChangesTests(TestCase):
     def test_get_unstaged_changes(self):
     def test_get_unstaged_changes(self):
         """Unit test for get_unstaged_changes."""
         """Unit test for get_unstaged_changes."""
-
         repo_dir = tempfile.mkdtemp()
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:
@@ -676,7 +687,6 @@ class GetUnstagedChangesTests(TestCase):
 
 
     def test_get_unstaged_deleted_changes(self):
     def test_get_unstaged_deleted_changes(self):
         """Unit test for get_unstaged_changes."""
         """Unit test for get_unstaged_changes."""
-
         repo_dir = tempfile.mkdtemp()
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:
@@ -701,7 +711,6 @@ class GetUnstagedChangesTests(TestCase):
 
 
     def test_get_unstaged_changes_removed_replaced_by_directory(self):
     def test_get_unstaged_changes_removed_replaced_by_directory(self):
         """Unit test for get_unstaged_changes."""
         """Unit test for get_unstaged_changes."""
-
         repo_dir = tempfile.mkdtemp()
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:
@@ -728,7 +737,6 @@ class GetUnstagedChangesTests(TestCase):
     @skipIf(not can_symlink(), "Requires symlink support")
     @skipIf(not can_symlink(), "Requires symlink support")
     def test_get_unstaged_changes_removed_replaced_by_link(self):
     def test_get_unstaged_changes_removed_replaced_by_link(self):
         """Unit test for get_unstaged_changes."""
         """Unit test for get_unstaged_changes."""
-
         repo_dir = tempfile.mkdtemp()
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:

+ 8 - 4
dulwich/tests/test_line_ending.py

@@ -23,14 +23,18 @@
 
 
 from dulwich.tests import TestCase
 from dulwich.tests import TestCase
 
 
-from ..line_ending import (convert_crlf_to_lf, convert_lf_to_crlf,
-                           get_checkin_filter_autocrlf,
-                           get_checkout_filter_autocrlf, normalize_blob)
+from ..line_ending import (
+    convert_crlf_to_lf,
+    convert_lf_to_crlf,
+    get_checkin_filter_autocrlf,
+    get_checkout_filter_autocrlf,
+    normalize_blob,
+)
 from ..objects import Blob
 from ..objects import Blob
 
 
 
 
 class LineEndingConversion(TestCase):
 class LineEndingConversion(TestCase):
-    """Test the line ending conversion functions in various cases"""
+    """Test the line ending conversion functions in various cases."""
 
 
     def test_convert_crlf_to_lf_no_op(self):
     def test_convert_crlf_to_lf_no_op(self):
         self.assertEqual(convert_crlf_to_lf(b"foobar"), b"foobar")
         self.assertEqual(convert_crlf_to_lf(b"foobar"), b"foobar")

+ 1 - 1
dulwich/tests/test_lru_cache.py

@@ -354,7 +354,7 @@ class TestLRUSizeCache(TestCase):
         self.assertEqual([("test", "key that is too big")], cleanup_calls)
         self.assertEqual([("test", "key that is too big")], cleanup_calls)
 
 
     def test_adding_clears_cache_based_on_size(self):
     def test_adding_clears_cache_based_on_size(self):
-        """The cache is cleared in LRU order until small enough"""
+        """The cache is cleared in LRU order until small enough."""
         cache = lru_cache.LRUSizeCache(max_size=20)
         cache = lru_cache.LRUSizeCache(max_size=20)
         cache.add("key1", "value")  # 5 chars
         cache.add("key1", "value")  # 5 chars
         cache.add("key2", "value2")  # 6 chars
         cache.add("key2", "value2")  # 6 chars

+ 2 - 2
dulwich/tests/test_missing_obj_finder.py

@@ -96,14 +96,14 @@ class MOFLinearRepoTest(MissingObjectFinderTest):
         self.assertMissingMatch([self.cmt(1).id], [self.cmt(3).id], self.missing_1_3)
         self.assertMissingMatch([self.cmt(1).id], [self.cmt(3).id], self.missing_1_3)
 
 
     def test_bogus_haves(self):
     def test_bogus_haves(self):
-        """Ensure non-existent SHA in haves are tolerated"""
+        """Ensure non-existent SHA in haves are tolerated."""
         bogus_sha = self.cmt(2).id[::-1]
         bogus_sha = self.cmt(2).id[::-1]
         haves = [self.cmt(1).id, bogus_sha]
         haves = [self.cmt(1).id, bogus_sha]
         wants = [self.cmt(3).id]
         wants = [self.cmt(3).id]
         self.assertMissingMatch(haves, wants, self.missing_1_3)
         self.assertMissingMatch(haves, wants, self.missing_1_3)
 
 
     def test_bogus_wants_failure(self):
     def test_bogus_wants_failure(self):
-        """Ensure non-existent SHA in wants are not tolerated"""
+        """Ensure non-existent SHA in wants are not tolerated."""
         bogus_sha = self.cmt(2).id[::-1]
         bogus_sha = self.cmt(2).id[::-1]
         haves = [self.cmt(1).id]
         haves = [self.cmt(1).id]
         wants = [self.cmt(3).id, bogus_sha]
         wants = [self.cmt(3).id, bogus_sha]

+ 21 - 7
dulwich/tests/test_object_store.py

@@ -34,12 +34,26 @@ from dulwich.tests import TestCase
 
 
 from ..errors import NotTreeError
 from ..errors import NotTreeError
 from ..index import commit_tree
 from ..index import commit_tree
-from ..object_store import (DiskObjectStore, MemoryObjectStore,
-                            ObjectStoreGraphWalker, OverlayObjectStore,
-                            commit_tree_changes, iter_tree_contents, peel_sha,
-                            read_packs_file, tree_lookup_path)
-from ..objects import (S_IFGITLINK, Blob, EmptyFileException,
-                       SubmoduleEncountered, Tree, TreeEntry, sha_to_hex)
+from ..object_store import (
+    DiskObjectStore,
+    MemoryObjectStore,
+    ObjectStoreGraphWalker,
+    OverlayObjectStore,
+    commit_tree_changes,
+    iter_tree_contents,
+    peel_sha,
+    read_packs_file,
+    tree_lookup_path,
+)
+from ..objects import (
+    S_IFGITLINK,
+    Blob,
+    EmptyFileException,
+    SubmoduleEncountered,
+    Tree,
+    TreeEntry,
+    sha_to_hex,
+)
 from ..pack import REF_DELTA, write_pack_objects
 from ..pack import REF_DELTA, write_pack_objects
 from ..protocol import DEPTH_INFINITE
 from ..protocol import DEPTH_INFINITE
 from .utils import build_pack, make_object, make_tag
 from .utils import build_pack, make_object, make_tag
@@ -427,7 +441,7 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
         self.assertEqual(oct(mode), packmode)
         self.assertEqual(oct(mode), packmode)
 
 
     def test_corrupted_object_raise_exception(self):
     def test_corrupted_object_raise_exception(self):
-        """Corrupted sha1 disk file should raise specific exception"""
+        """Corrupted sha1 disk file should raise specific exception."""
         self.store.add_object(testobject)
         self.store.add_object(testobject)
         self.assertEqual(
         self.assertEqual(
             (Blob.type_num, b"yummy data"), self.store.get_raw(testobject.id)
             (Blob.type_num, b"yummy data"), self.store.get_raw(testobject.id)

+ 28 - 12
dulwich/tests/test_objects.py

@@ -33,13 +33,29 @@ from itertools import permutations
 from dulwich.tests import TestCase
 from dulwich.tests import TestCase
 
 
 from ..errors import ObjectFormatException
 from ..errors import ObjectFormatException
-from ..objects import (MAX_TIME, Blob, Commit, ShaFile, Tag, Tree, TreeEntry,
-                       _parse_tree_py, _sorted_tree_items_py, check_hexsha,
-                       check_identity, format_timezone, hex_to_filename,
-                       hex_to_sha, object_class, parse_timezone, parse_tree,
-                       pretty_format_tree_entry, sha_to_hex, sorted_tree_items)
-from .utils import (ext_functest_builder, functest_builder, make_commit,
-                    make_object)
+from ..objects import (
+    MAX_TIME,
+    Blob,
+    Commit,
+    ShaFile,
+    Tag,
+    Tree,
+    TreeEntry,
+    _parse_tree_py,
+    _sorted_tree_items_py,
+    check_hexsha,
+    check_identity,
+    format_timezone,
+    hex_to_filename,
+    hex_to_sha,
+    object_class,
+    parse_timezone,
+    parse_tree,
+    pretty_format_tree_entry,
+    sha_to_hex,
+    sorted_tree_items,
+)
+from .utils import ext_functest_builder, functest_builder, make_commit, make_object
 
 
 a_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8"
 a_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8"
 b_sha = b"2969be3e8ee1c0222396a5611407e4769f14e54b"
 b_sha = b"2969be3e8ee1c0222396a5611407e4769f14e54b"
@@ -57,14 +73,14 @@ class TestHexToSha(TestCase):
 
 
 
 
 class BlobReadTests(TestCase):
 class BlobReadTests(TestCase):
-    """Test decompression of blobs"""
+    """Test decompression of blobs."""
 
 
     def get_sha_file(self, cls, base, sha):
     def get_sha_file(self, cls, base, sha):
         dir = os.path.join(os.path.dirname(__file__), "..", "..", "testdata", base)
         dir = os.path.join(os.path.dirname(__file__), "..", "..", "testdata", base)
         return cls.from_path(hex_to_filename(dir, sha))
         return cls.from_path(hex_to_filename(dir, sha))
 
 
     def get_blob(self, sha):
     def get_blob(self, sha):
-        """Return the blob named sha from the test data dir"""
+        """Return the blob named sha from the test data dir."""
         return self.get_sha_file(Blob, "blobs", sha)
         return self.get_sha_file(Blob, "blobs", sha)
 
 
     def get_tree(self, sha):
     def get_tree(self, sha):
@@ -665,7 +681,7 @@ class CommitParseTests(ShaFileCheckTests):
         )
         )
 
 
     def test_check_commit_with_overflow_date(self):
     def test_check_commit_with_overflow_date(self):
-        """Date with overflow should raise an ObjectFormatException when checked"""
+        """Date with overflow should raise an ObjectFormatException when checked."""
         identity_with_wrong_time = (
         identity_with_wrong_time = (
             b"Igor Sysoev <igor@sysoev.ru> 18446743887488505614 +42707004"
             b"Igor Sysoev <igor@sysoev.ru> 18446743887488505614 +42707004"
         )
         )
@@ -686,7 +702,7 @@ class CommitParseTests(ShaFileCheckTests):
                 commit.check()
                 commit.check()
 
 
     def test_mangled_author_line(self):
     def test_mangled_author_line(self):
-        """Mangled author line should successfully parse"""
+        """Mangled author line should successfully parse."""
         author_line = (
         author_line = (
             b'Karl MacMillan <kmacmill@redhat.com> <"Karl MacMillan '
             b'Karl MacMillan <kmacmill@redhat.com> <"Karl MacMillan '
             b'<kmacmill@redhat.com>"> 1197475547 -0500'
             b'<kmacmill@redhat.com>"> 1197475547 -0500'
@@ -1116,7 +1132,7 @@ class TagParseTests(ShaFileCheckTests):
         )
         )
 
 
     def test_check_tag_with_overflow_time(self):
     def test_check_tag_with_overflow_time(self):
-        """Date with overflow should raise an ObjectFormatException when checked"""
+        """Date with overflow should raise an ObjectFormatException when checked."""
         author = "Some Dude <some@dude.org> {} +0000".format(MAX_TIME + 1)
         author = "Some Dude <some@dude.org> {} +0000".format(MAX_TIME + 1)
         tag = Tag.from_string(self.make_tag_text(tagger=(author.encode())))
         tag = Tag.from_string(self.make_tag_text(tagger=(author.encode())))
         with self.assertRaises(ObjectFormatException):
         with self.assertRaises(ObjectFormatException):

+ 10 - 3
dulwich/tests/test_objectspec.py

@@ -26,9 +26,16 @@
 from dulwich.tests import TestCase
 from dulwich.tests import TestCase
 
 
 from ..objects import Blob
 from ..objects import Blob
-from ..objectspec import (parse_commit, parse_commit_range, parse_object,
-                          parse_ref, parse_refs, parse_reftuple,
-                          parse_reftuples, parse_tree)
+from ..objectspec import (
+    parse_commit,
+    parse_commit_range,
+    parse_object,
+    parse_ref,
+    parse_refs,
+    parse_reftuple,
+    parse_reftuples,
+    parse_tree,
+)
 from ..repo import MemoryRepo
 from ..repo import MemoryRepo
 from .utils import build_commit_graph
 from .utils import build_commit_graph
 
 

+ 30 - 13
dulwich/tests/test_pack.py

@@ -36,13 +36,30 @@ from ..errors import ApplyDeltaError, ChecksumMismatch
 from ..file import GitFile
 from ..file import GitFile
 from ..object_store import MemoryObjectStore
 from ..object_store import MemoryObjectStore
 from ..objects import Blob, Commit, Tree, hex_to_sha, sha_to_hex
 from ..objects import Blob, Commit, Tree, hex_to_sha, sha_to_hex
-from ..pack import (OFS_DELTA, REF_DELTA, DeltaChainIterator, MemoryPackIndex,
-                    Pack, PackData, PackStreamReader, UnpackedObject,
-                    _delta_encode_size, _encode_copy_operation, apply_delta,
-                    compute_file_sha, create_delta, deltify_pack_objects,
-                    load_pack_index, read_zlib_chunks, unpack_object,
-                    write_pack, write_pack_header, write_pack_index_v1,
-                    write_pack_index_v2, write_pack_object)
+from ..pack import (
+    OFS_DELTA,
+    REF_DELTA,
+    DeltaChainIterator,
+    MemoryPackIndex,
+    Pack,
+    PackData,
+    PackStreamReader,
+    UnpackedObject,
+    _delta_encode_size,
+    _encode_copy_operation,
+    apply_delta,
+    compute_file_sha,
+    create_delta,
+    deltify_pack_objects,
+    load_pack_index,
+    read_zlib_chunks,
+    unpack_object,
+    write_pack,
+    write_pack_header,
+    write_pack_index_v1,
+    write_pack_index_v2,
+    write_pack_object,
+)
 from .utils import build_pack, make_object
 from .utils import build_pack, make_object
 
 
 pack1_sha = b"bc63ddad95e7321ee734ea11a7a62d314e0d7481"
 pack1_sha = b"bc63ddad95e7321ee734ea11a7a62d314e0d7481"
@@ -54,7 +71,7 @@ indexmode = "0o100644" if sys.platform != "win32" else "0o100666"
 
 
 
 
 class PackTests(TestCase):
 class PackTests(TestCase):
-    """Base class for testing packs"""
+    """Base class for testing packs."""
 
 
     def setUp(self):
     def setUp(self):
         super().setUp()
         super().setUp()
@@ -64,13 +81,13 @@ class PackTests(TestCase):
     datadir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../testdata/packs"))
     datadir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../testdata/packs"))
 
 
     def get_pack_index(self, sha):
     def get_pack_index(self, sha):
-        """Returns a PackIndex from the datadir with the given sha"""
+        """Returns a PackIndex from the datadir with the given sha."""
         return load_pack_index(
         return load_pack_index(
             os.path.join(self.datadir, "pack-%s.idx" % sha.decode("ascii"))
             os.path.join(self.datadir, "pack-%s.idx" % sha.decode("ascii"))
         )
         )
 
 
     def get_pack_data(self, sha):
     def get_pack_data(self, sha):
-        """Returns a PackData object from the datadir with the given sha"""
+        """Returns a PackData object from the datadir with the given sha."""
         return PackData(
         return PackData(
             os.path.join(self.datadir, "pack-%s.pack" % sha.decode("ascii"))
             os.path.join(self.datadir, "pack-%s.pack" % sha.decode("ascii"))
         )
         )
@@ -86,7 +103,7 @@ class PackTests(TestCase):
 
 
 
 
 class PackIndexTests(PackTests):
 class PackIndexTests(PackTests):
-    """Class that tests the index of packfiles"""
+    """Class that tests the index of packfiles."""
 
 
     def test_object_offset(self):
     def test_object_offset(self):
         """Tests that the correct object offset is returned from the index."""
         """Tests that the correct object offset is returned from the index."""
@@ -377,7 +394,7 @@ class TestPack(PackTests):
             self.assertEqual(3, len(tuples))
             self.assertEqual(3, len(tuples))
 
 
     def test_get_object_at(self):
     def test_get_object_at(self):
-        """Tests random access for non-delta objects"""
+        """Tests random access for non-delta objects."""
         with self.get_pack(pack1_sha) as p:
         with self.get_pack(pack1_sha) as p:
             obj = p[a_sha]
             obj = p[a_sha]
             self.assertEqual(obj.type_name, b"blob")
             self.assertEqual(obj.type_name, b"blob")
@@ -924,7 +941,7 @@ class TestPackIterator(DeltaChainIterator):
 
 
     _compute_crc32 = True
     _compute_crc32 = True
 
 
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         super().__init__(*args, **kwargs)
         super().__init__(*args, **kwargs)
         self._unpacked_offsets = set()
         self._unpacked_offsets = set()
 
 

+ 8 - 2
dulwich/tests/test_patch.py

@@ -26,8 +26,14 @@ from dulwich.tests import SkipTest, TestCase
 
 
 from ..object_store import MemoryObjectStore
 from ..object_store import MemoryObjectStore
 from ..objects import S_IFGITLINK, Blob, Commit, Tree
 from ..objects import S_IFGITLINK, Blob, Commit, Tree
-from ..patch import (get_summary, git_am_patch_split, write_blob_diff,
-                     write_commit_patch, write_object_diff, write_tree_diff)
+from ..patch import (
+    get_summary,
+    git_am_patch_split,
+    write_blob_diff,
+    write_commit_patch,
+    write_object_diff,
+    write_tree_diff,
+)
 
 
 
 
 class WriteCommitPatchTests(TestCase):
 class WriteCommitPatchTests(TestCase):

+ 3 - 8
dulwich/tests/test_porcelain.py

@@ -592,7 +592,7 @@ class TimezoneTests(PorcelainTestCase):
 
 
 class CleanTests(PorcelainTestCase):
 class CleanTests(PorcelainTestCase):
     def put_files(self, tracked, ignored, untracked, empty_dirs):
     def put_files(self, tracked, ignored, untracked, empty_dirs):
-        """Put the described files in the wd"""
+        """Put the described files in the wd."""
         all_files = tracked | ignored | untracked
         all_files = tracked | ignored | untracked
         for file_path in all_files:
         for file_path in all_files:
             abs_path = os.path.join(self.repo.path, file_path)
             abs_path = os.path.join(self.repo.path, file_path)
@@ -617,7 +617,7 @@ class CleanTests(PorcelainTestCase):
         porcelain.commit(repo=self.repo.path, message="init commit")
         porcelain.commit(repo=self.repo.path, message="init commit")
 
 
     def assert_wd(self, expected_paths):
     def assert_wd(self, expected_paths):
-        """Assert paths of files and dirs in wd are same as expected_paths"""
+        """Assert paths of files and dirs in wd are same as expected_paths."""
         control_dir_rel = os.path.relpath(self.repo._controldir, self.repo.path)
         control_dir_rel = os.path.relpath(self.repo._controldir, self.repo.path)
 
 
         # normalize paths to simplify comparison across platforms
         # normalize paths to simplify comparison across platforms
@@ -1981,8 +1981,7 @@ class SubmoduleTests(PorcelainTestCase):
 
 
 class PushTests(PorcelainTestCase):
 class PushTests(PorcelainTestCase):
     def test_simple(self):
     def test_simple(self):
-        """
-        Basic test of porcelain push where self.repo is the remote.  First
+        """Basic test of porcelain push where self.repo is the remote.  First
         clone the remote, commit a file to the clone, then push the changes
         clone the remote, commit a file to the clone, then push the changes
         back to the remote.
         back to the remote.
         """
         """
@@ -2392,7 +2391,6 @@ class StatusTests(PorcelainTestCase):
 
 
     def test_status_base(self):
     def test_status_base(self):
         """Integration test for `status` functionality."""
         """Integration test for `status` functionality."""
-
         # Commit a dummy file then modify it
         # Commit a dummy file then modify it
         fullpath = os.path.join(self.repo.path, "foo")
         fullpath = os.path.join(self.repo.path, "foo")
         with open(fullpath, "w") as f:
         with open(fullpath, "w") as f:
@@ -2561,7 +2559,6 @@ class StatusTests(PorcelainTestCase):
 
 
     def test_get_tree_changes_add(self):
     def test_get_tree_changes_add(self):
         """Unit test for get_tree_changes add."""
         """Unit test for get_tree_changes add."""
-
         # Make a dummy file, stage
         # Make a dummy file, stage
         filename = "bar"
         filename = "bar"
         fullpath = os.path.join(self.repo.path, filename)
         fullpath = os.path.join(self.repo.path, filename)
@@ -2589,7 +2586,6 @@ class StatusTests(PorcelainTestCase):
 
 
     def test_get_tree_changes_modify(self):
     def test_get_tree_changes_modify(self):
         """Unit test for get_tree_changes modify."""
         """Unit test for get_tree_changes modify."""
-
         # Make a dummy file, stage, commit, modify
         # Make a dummy file, stage, commit, modify
         filename = "foo"
         filename = "foo"
         fullpath = os.path.join(self.repo.path, filename)
         fullpath = os.path.join(self.repo.path, filename)
@@ -2614,7 +2610,6 @@ class StatusTests(PorcelainTestCase):
 
 
     def test_get_tree_changes_delete(self):
     def test_get_tree_changes_delete(self):
         """Unit test for get_tree_changes delete."""
         """Unit test for get_tree_changes delete."""
-
         # Make a dummy file, stage, commit, remove
         # Make a dummy file, stage, commit, remove
         filename = "foo"
         filename = "foo"
         fullpath = os.path.join(self.repo.path, filename)
         fullpath = os.path.join(self.repo.path, filename)

+ 14 - 5
dulwich/tests/test_protocol.py

@@ -26,10 +26,19 @@ from io import BytesIO
 from dulwich.tests import TestCase
 from dulwich.tests import TestCase
 
 
 from ..errors import HangupException
 from ..errors import HangupException
-from ..protocol import (MULTI_ACK, MULTI_ACK_DETAILED, SINGLE_ACK,
-                        BufferedPktLineWriter, GitProtocolError, PktLineParser,
-                        Protocol, ReceivableProtocol, ack_type,
-                        extract_capabilities, extract_want_line_capabilities)
+from ..protocol import (
+    MULTI_ACK,
+    MULTI_ACK_DETAILED,
+    SINGLE_ACK,
+    BufferedPktLineWriter,
+    GitProtocolError,
+    PktLineParser,
+    Protocol,
+    ReceivableProtocol,
+    ack_type,
+    extract_capabilities,
+    extract_want_line_capabilities,
+)
 
 
 
 
 class BaseProtocolTests:
 class BaseProtocolTests:
@@ -109,7 +118,7 @@ class ProtocolTests(BaseProtocolTests, TestCase):
 class ReceivableBytesIO(BytesIO):
 class ReceivableBytesIO(BytesIO):
     """BytesIO with socket-like recv semantics for testing."""
     """BytesIO with socket-like recv semantics for testing."""
 
 
-    def __init__(self):
+    def __init__(self) -> None:
         BytesIO.__init__(self)
         BytesIO.__init__(self)
         self.allow_read_past_eof = False
         self.allow_read_past_eof = False
 
 

+ 6 - 2
dulwich/tests/test_reflog.py

@@ -25,8 +25,12 @@ from io import BytesIO
 from dulwich.tests import TestCase
 from dulwich.tests import TestCase
 
 
 from ..objects import ZERO_SHA
 from ..objects import ZERO_SHA
-from ..reflog import (drop_reflog_entry, format_reflog_line, parse_reflog_line,
-                      read_reflog)
+from ..reflog import (
+    drop_reflog_entry,
+    format_reflog_line,
+    parse_reflog_line,
+    read_reflog,
+)
 
 
 
 
 class ReflogLineTests(TestCase):
 class ReflogLineTests(TestCase):

+ 12 - 4
dulwich/tests/test_refs.py

@@ -30,10 +30,18 @@ from dulwich.tests import SkipTest, TestCase
 
 
 from ..file import GitFile
 from ..file import GitFile
 from ..objects import ZERO_SHA
 from ..objects import ZERO_SHA
-from ..refs import (DictRefsContainer, InfoRefsContainer, SymrefLoop,
-                    _split_ref_line, check_ref_format, parse_symref_value,
-                    read_packed_refs, read_packed_refs_with_peeled,
-                    strip_peeled_refs, write_packed_refs)
+from ..refs import (
+    DictRefsContainer,
+    InfoRefsContainer,
+    SymrefLoop,
+    _split_ref_line,
+    check_ref_format,
+    parse_symref_value,
+    read_packed_refs,
+    read_packed_refs_with_peeled,
+    strip_peeled_refs,
+    write_packed_refs,
+)
 from ..repo import Repo
 from ..repo import Repo
 from .utils import open_repo, tear_down_repo
 from .utils import open_repo, tear_down_repo
 
 

+ 9 - 6
dulwich/tests/test_repository.py

@@ -35,9 +35,14 @@ from dulwich.tests import TestCase, skipIf
 from ..config import Config
 from ..config import Config
 from ..errors import NotGitRepository
 from ..errors import NotGitRepository
 from ..object_store import tree_lookup_path
 from ..object_store import tree_lookup_path
-from ..repo import (InvalidUserIdentity, MemoryRepo, Repo,
-                    UnsupportedExtension, UnsupportedVersion,
-                    check_user_identity)
+from ..repo import (
+    InvalidUserIdentity,
+    MemoryRepo,
+    Repo,
+    UnsupportedExtension,
+    UnsupportedVersion,
+    check_user_identity,
+)
 from .utils import open_repo, setup_warning_catcher, tear_down_repo
 from .utils import open_repo, setup_warning_catcher, tear_down_repo
 
 
 missing_sha = b"b91fa4d900e17e99b433218e988c4eb4a3e9a097"
 missing_sha = b"b91fa4d900e17e99b433218e988c4eb4a3e9a097"
@@ -565,13 +570,11 @@ class RepositoryRootTests(TestCase):
         self.assertIsInstance(r.get_config_stack(), Config)
         self.assertIsInstance(r.get_config_stack(), Config)
 
 
     def test_common_revisions(self):
     def test_common_revisions(self):
-        """
-        This test demonstrates that ``find_common_revisions()`` actually
+        """This test demonstrates that ``find_common_revisions()`` actually
         returns common heads, not revisions; dulwich already uses
         returns common heads, not revisions; dulwich already uses
         ``find_common_revisions()`` in such a manner (see
         ``find_common_revisions()`` in such a manner (see
         ``Repo.find_objects()``).
         ``Repo.find_objects()``).
         """
         """
-
         expected_shas = {b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e"}
         expected_shas = {b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e"}
 
 
         # Source for objects.
         # Source for objects.

+ 25 - 11
dulwich/tests/test_server.py

@@ -28,18 +28,32 @@ from io import BytesIO
 
 
 from dulwich.tests import TestCase
 from dulwich.tests import TestCase
 
 
-from ..errors import (GitProtocolError, HangupException, NotGitRepository,
-                      UnexpectedCommandError)
+from ..errors import (
+    GitProtocolError,
+    HangupException,
+    NotGitRepository,
+    UnexpectedCommandError,
+)
 from ..object_store import MemoryObjectStore
 from ..object_store import MemoryObjectStore
 from ..objects import Tree
 from ..objects import Tree
 from ..protocol import ZERO_SHA, format_capability_line
 from ..protocol import ZERO_SHA, format_capability_line
 from ..repo import MemoryRepo, Repo
 from ..repo import MemoryRepo, Repo
-from ..server import (Backend, DictBackend, FileSystemBackend,
-                      MultiAckDetailedGraphWalkerImpl, MultiAckGraphWalkerImpl,
-                      PackHandler, ReceivePackHandler,
-                      SingleAckGraphWalkerImpl, UploadPackHandler,
-                      _find_shallow, _ProtocolGraphWalker, _split_proto_line,
-                      serve_command, update_server_info)
+from ..server import (
+    Backend,
+    DictBackend,
+    FileSystemBackend,
+    MultiAckDetailedGraphWalkerImpl,
+    MultiAckGraphWalkerImpl,
+    PackHandler,
+    ReceivePackHandler,
+    SingleAckGraphWalkerImpl,
+    UploadPackHandler,
+    _find_shallow,
+    _ProtocolGraphWalker,
+    _split_proto_line,
+    serve_command,
+    update_server_info,
+)
 from .utils import make_commit, make_tag
 from .utils import make_commit, make_tag
 
 
 ONE = b"1" * 40
 ONE = b"1" * 40
@@ -51,7 +65,7 @@ SIX = b"6" * 40
 
 
 
 
 class TestProto:
 class TestProto:
-    def __init__(self):
+    def __init__(self) -> None:
         self._output = []
         self._output = []
         self._received = {0: [], 1: [], 2: [], 3: []}
         self._received = {0: [], 1: [], 2: [], 3: []}
 
 
@@ -81,7 +95,7 @@ class TestProto:
 
 
 
 
 class TestGenericPackHandler(PackHandler):
 class TestGenericPackHandler(PackHandler):
-    def __init__(self):
+    def __init__(self) -> None:
         PackHandler.__init__(self, Backend(), None)
         PackHandler.__init__(self, Backend(), None)
 
 
     @classmethod
     @classmethod
@@ -573,7 +587,7 @@ class ProtocolGraphWalkerTestCase(TestCase):
 
 
 
 
 class TestProtocolGraphWalker:
 class TestProtocolGraphWalker:
-    def __init__(self):
+    def __init__(self) -> None:
         self.acks = []
         self.acks = []
         self.lines = []
         self.lines = []
         self.wants_satisified = False
         self.wants_satisified = False

+ 3 - 4
dulwich/tests/test_walk.py

@@ -25,8 +25,7 @@ from unittest import expectedFailure
 
 
 from dulwich.tests import TestCase
 from dulwich.tests import TestCase
 
 
-from ..diff_tree import (CHANGE_MODIFY, CHANGE_RENAME, RenameDetector,
-                         TreeChange)
+from ..diff_tree import CHANGE_MODIFY, CHANGE_RENAME, RenameDetector, TreeChange
 from ..errors import MissingCommitError
 from ..errors import MissingCommitError
 from ..object_store import MemoryObjectStore
 from ..object_store import MemoryObjectStore
 from ..objects import Blob, Commit
 from ..objects import Blob, Commit
@@ -35,11 +34,11 @@ from .utils import F, build_commit_graph, make_object, make_tag
 
 
 
 
 class TestWalkEntry:
 class TestWalkEntry:
-    def __init__(self, commit, changes):
+    def __init__(self, commit, changes) -> None:
         self.commit = commit
         self.commit = commit
         self.changes = changes
         self.changes = changes
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "<TestWalkEntry commit={}, changes={!r}>".format(
         return "<TestWalkEntry commit={}, changes={!r}>".format(
             self.commit.id,
             self.commit.id,
             self.changes,
             self.changes,

+ 27 - 16
dulwich/tests/test_web.py

@@ -32,18 +32,31 @@ from ..object_store import MemoryObjectStore
 from ..objects import Blob
 from ..objects import Blob
 from ..repo import BaseRepo, MemoryRepo
 from ..repo import BaseRepo, MemoryRepo
 from ..server import DictBackend
 from ..server import DictBackend
-from ..web import (HTTP_ERROR, HTTP_FORBIDDEN, HTTP_NOT_FOUND, HTTP_OK,
-                   GunzipFilter, HTTPGitApplication, HTTPGitRequest,
-                   _LengthLimitedFile, get_idx_file, get_info_packs,
-                   get_info_refs, get_loose_object, get_pack_file,
-                   get_text_file, handle_service_request, send_file)
+from ..web import (
+    HTTP_ERROR,
+    HTTP_FORBIDDEN,
+    HTTP_NOT_FOUND,
+    HTTP_OK,
+    GunzipFilter,
+    HTTPGitApplication,
+    HTTPGitRequest,
+    _LengthLimitedFile,
+    get_idx_file,
+    get_info_packs,
+    get_info_refs,
+    get_loose_object,
+    get_pack_file,
+    get_text_file,
+    handle_service_request,
+    send_file,
+)
 from .utils import make_object, make_tag
 from .utils import make_object, make_tag
 
 
 
 
 class MinimalistWSGIInputStream:
 class MinimalistWSGIInputStream:
     """WSGI input stream with no 'seek()' and 'tell()' methods."""
     """WSGI input stream with no 'seek()' and 'tell()' methods."""
 
 
-    def __init__(self, data):
+    def __init__(self, data) -> None:
         self.data = data
         self.data = data
         self.pos = 0
         self.pos = 0
 
 
@@ -69,7 +82,7 @@ class MinimalistWSGIInputStream2(MinimalistWSGIInputStream):
 class TestHTTPGitRequest(HTTPGitRequest):
 class TestHTTPGitRequest(HTTPGitRequest):
     """HTTPGitRequest with overridden methods to help test caching."""
     """HTTPGitRequest with overridden methods to help test caching."""
 
 
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         HTTPGitRequest.__init__(self, *args, **kwargs)
         HTTPGitRequest.__init__(self, *args, **kwargs)
         self.cached = None
         self.cached = None
 
 
@@ -142,7 +155,7 @@ class DumbHandlersTestCase(WebTestCase):
 
 
     def test_send_file_error(self):
     def test_send_file_error(self):
         class TestFile:
         class TestFile:
-            def __init__(self, exc_class):
+            def __init__(self, exc_class) -> None:
                 self.closed = False
                 self.closed = False
                 self._exc_class = exc_class
                 self._exc_class = exc_class
 
 
@@ -270,11 +283,11 @@ class DumbHandlersTestCase(WebTestCase):
 
 
     def test_get_info_packs(self):
     def test_get_info_packs(self):
         class TestPackData:
         class TestPackData:
-            def __init__(self, sha):
+            def __init__(self, sha) -> None:
                 self.filename = "pack-%s.pack" % sha
                 self.filename = "pack-%s.pack" % sha
 
 
         class TestPack:
         class TestPack:
-            def __init__(self, sha):
+            def __init__(self, sha) -> None:
                 self.data = TestPackData(sha)
                 self.data = TestPackData(sha)
 
 
         packs = [TestPack(str(i) * 40) for i in range(1, 4)]
         packs = [TestPack(str(i) * 40) for i in range(1, 4)]
@@ -308,7 +321,7 @@ class SmartHandlersTestCase(WebTestCase):
             proto,
             proto,
             stateless_rpc=None,
             stateless_rpc=None,
             advertise_refs=False,
             advertise_refs=False,
-        ):
+        ) -> None:
             self.args = args
             self.args = args
             self.proto = proto
             self.proto = proto
             self.stateless_rpc = stateless_rpc
             self.stateless_rpc = stateless_rpc
@@ -536,10 +549,9 @@ class GunzipTestCase(HTTPGitApplicationTestCase):
         self._test_call(self.example_text, *self._get_zstream(self.example_text))
         self._test_call(self.example_text, *self._get_zstream(self.example_text))
 
 
     def test_call_no_seek(self):
     def test_call_no_seek(self):
-        """
-        This ensures that the gunzipping code doesn't require any methods on
+        """This ensures that the gunzipping code doesn't require any methods on
         'wsgi.input' except for '.read()'.  (In particular, it shouldn't
         'wsgi.input' except for '.read()'.  (In particular, it shouldn't
-        require '.seek()'. See https://github.com/jelmer/dulwich/issues/140.)
+        require '.seek()'. See https://github.com/jelmer/dulwich/issues/140.).
         """
         """
         zstream, zlength = self._get_zstream(self.example_text)
         zstream, zlength = self._get_zstream(self.example_text)
         self._test_call(
         self._test_call(
@@ -549,8 +561,7 @@ class GunzipTestCase(HTTPGitApplicationTestCase):
         )
         )
 
 
     def test_call_no_working_seek(self):
     def test_call_no_working_seek(self):
-        """
-        Similar to 'test_call_no_seek', but this time the methods are available
+        """Similar to 'test_call_no_seek', but this time the methods are available
         (but defunct).  See https://github.com/jonashaag/klaus/issues/154.
         (but defunct).  See https://github.com/jonashaag/klaus/issues/154.
         """
         """
         zstream, zlength = self._get_zstream(self.example_text)
         zstream, zlength = self._get_zstream(self.example_text)

+ 11 - 4
dulwich/tests/utils.py

@@ -33,9 +33,16 @@ from dulwich.tests import SkipTest, skipIf  # noqa: F401
 
 
 from ..index import commit_tree
 from ..index import commit_tree
 from ..objects import Commit, FixedSha, Tag, object_class
 from ..objects import Commit, FixedSha, Tag, object_class
-from ..pack import (DELTA_TYPES, OFS_DELTA, REF_DELTA, SHA1Writer,
-                    create_delta, obj_sha, write_pack_header,
-                    write_pack_object)
+from ..pack import (
+    DELTA_TYPES,
+    OFS_DELTA,
+    REF_DELTA,
+    SHA1Writer,
+    create_delta,
+    obj_sha,
+    write_pack_header,
+    write_pack_object,
+)
 from ..repo import Repo
 from ..repo import Repo
 
 
 # Plain files are very frequently used in tests, so let the mode be very short.
 # Plain files are very frequently used in tests, so let the mode be very short.
@@ -295,6 +302,7 @@ def build_commit_graph(object_store, commit_spec, trees=None, attrs=None):
       attrs: A dict of commit number -> (dict of attribute -> value) for
       attrs: A dict of commit number -> (dict of attribute -> value) for
         assigning additional values to the commits.
         assigning additional values to the commits.
     Returns: The list of commit objects created.
     Returns: The list of commit objects created.
+
     Raises:
     Raises:
       ValueError: If an undefined commit identifier is listed as a parent.
       ValueError: If an undefined commit identifier is listed as a parent.
     """
     """
@@ -345,7 +353,6 @@ def build_commit_graph(object_store, commit_spec, trees=None, attrs=None):
 
 
 def setup_warning_catcher():
 def setup_warning_catcher():
     """Wrap warnings.showwarning with code that records warnings."""
     """Wrap warnings.showwarning with code that records warnings."""
-
     caught_warnings = []
     caught_warnings = []
     original_showwarning = warnings.showwarning
     original_showwarning = warnings.showwarning
 
 

+ 10 - 6
dulwich/walk.py

@@ -26,8 +26,12 @@ import heapq
 from itertools import chain
 from itertools import chain
 from typing import Deque, List, Optional, Set, Tuple
 from typing import Deque, List, Optional, Set, Tuple
 
 
-from .diff_tree import (RENAME_CHANGE_TYPES, RenameDetector, tree_changes,
-                        tree_changes_for_merge)
+from .diff_tree import (
+    RENAME_CHANGE_TYPES,
+    RenameDetector,
+    tree_changes,
+    tree_changes_for_merge,
+)
 from .errors import MissingCommitError
 from .errors import MissingCommitError
 from .objects import Commit, ObjectID, Tag
 from .objects import Commit, ObjectID, Tag
 
 
@@ -43,7 +47,7 @@ _MAX_EXTRA_COMMITS = 5
 class WalkEntry:
 class WalkEntry:
     """Object encapsulating a single result from a walk."""
     """Object encapsulating a single result from a walk."""
 
 
-    def __init__(self, walker, commit):
+    def __init__(self, walker, commit) -> None:
         self.commit = commit
         self.commit = commit
         self._store = walker.store
         self._store = walker.store
         self._get_parents = walker.get_parents
         self._get_parents = walker.get_parents
@@ -111,7 +115,7 @@ class WalkEntry:
             self._changes[path_prefix] = cached
             self._changes[path_prefix] = cached
         return self._changes[path_prefix]
         return self._changes[path_prefix]
 
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "<WalkEntry commit={}, changes={!r}>".format(
         return "<WalkEntry commit={}, changes={!r}>".format(
             self.commit.id,
             self.commit.id,
             self.changes(),
             self.changes(),
@@ -121,7 +125,7 @@ class WalkEntry:
 class _CommitTimeQueue:
 class _CommitTimeQueue:
     """Priority queue of WalkEntry objects by commit time."""
     """Priority queue of WalkEntry objects by commit time."""
 
 
-    def __init__(self, walker: "Walker"):
+    def __init__(self, walker: "Walker") -> None:
         self._walker = walker
         self._walker = walker
         self._store = walker.store
         self._store = walker.store
         self._get_parents = walker.get_parents
         self._get_parents = walker.get_parents
@@ -244,7 +248,7 @@ class Walker:
         until: Optional[int] = None,
         until: Optional[int] = None,
         get_parents=lambda commit: commit.parents,
         get_parents=lambda commit: commit.parents,
         queue_cls=_CommitTimeQueue,
         queue_cls=_CommitTimeQueue,
-    ):
+    ) -> None:
         """Constructor.
         """Constructor.
 
 
         Args:
         Args:

+ 20 - 13
dulwich/web.py

@@ -28,15 +28,23 @@ import time
 from io import BytesIO
 from io import BytesIO
 from typing import List, Optional, Tuple
 from typing import List, Optional, Tuple
 from urllib.parse import parse_qs
 from urllib.parse import parse_qs
-from wsgiref.simple_server import (ServerHandler, WSGIRequestHandler,
-                                   WSGIServer, make_server)
+from wsgiref.simple_server import (
+    ServerHandler,
+    WSGIRequestHandler,
+    WSGIServer,
+    make_server,
+)
 
 
 from dulwich import log_utils
 from dulwich import log_utils
 
 
 from .protocol import ReceivableProtocol
 from .protocol import ReceivableProtocol
 from .repo import BaseRepo, NotGitRepository, Repo
 from .repo import BaseRepo, NotGitRepository, Repo
-from .server import (DEFAULT_HANDLERS, DictBackend, generate_info_refs,
-                     generate_objects_info_packs)
+from .server import (
+    DEFAULT_HANDLERS,
+    DictBackend,
+    generate_info_refs,
+    generate_objects_info_packs,
+)
 
 
 logger = log_utils.getLogger(__name__)
 logger = log_utils.getLogger(__name__)
 
 
@@ -248,7 +256,7 @@ def _chunk_iter(f):
 
 
 class ChunkReader:
 class ChunkReader:
 
 
-    def __init__(self, f):
+    def __init__(self, f) -> None:
         self._iter = _chunk_iter(f)
         self._iter = _chunk_iter(f)
         self._buffer = []
         self._buffer = []
 
 
@@ -272,7 +280,7 @@ class _LengthLimitedFile:
     but not implemented in wsgiref as of 2.5.
     but not implemented in wsgiref as of 2.5.
     """
     """
 
 
-    def __init__(self, input, max_bytes):
+    def __init__(self, input, max_bytes) -> None:
         self._input = input
         self._input = input
         self._bytes_avail = max_bytes
         self._bytes_avail = max_bytes
 
 
@@ -319,7 +327,7 @@ class HTTPGitRequest:
       environ: the WSGI environment for the request.
       environ: the WSGI environment for the request.
     """
     """
 
 
-    def __init__(self, environ, start_response, dumb: bool = False, handlers=None):
+    def __init__(self, environ, start_response, dumb: bool = False, handlers=None) -> None:
         self.environ = environ
         self.environ = environ
         self.dumb = dumb
         self.dumb = dumb
         self.handlers = handlers
         self.handlers = handlers
@@ -405,7 +413,7 @@ class HTTPGitApplication:
         ("POST", re.compile("/git-receive-pack$")): handle_service_request,
         ("POST", re.compile("/git-receive-pack$")): handle_service_request,
     }
     }
 
 
-    def __init__(self, backend, dumb: bool = False, handlers=None, fallback_app=None):
+    def __init__(self, backend, dumb: bool = False, handlers=None, fallback_app=None) -> None:
         self.backend = backend
         self.backend = backend
         self.dumb = dumb
         self.dumb = dumb
         self.handlers = dict(DEFAULT_HANDLERS)
         self.handlers = dict(DEFAULT_HANDLERS)
@@ -443,7 +451,7 @@ class GunzipFilter:
     passing on to the underlying application.
     passing on to the underlying application.
     """
     """
 
 
-    def __init__(self, application):
+    def __init__(self, application) -> None:
         self.app = application
         self.app = application
 
 
     def __call__(self, environ, start_response):
     def __call__(self, environ, start_response):
@@ -464,7 +472,7 @@ class LimitedInputFilter:
     specified in Content-Length.
     specified in Content-Length.
     """
     """
 
 
-    def __init__(self, application):
+    def __init__(self, application) -> None:
         self.app = application
         self.app = application
 
 
     def __call__(self, environ, start_response):
     def __call__(self, environ, start_response):
@@ -521,8 +529,7 @@ class WSGIRequestHandlerLogger(WSGIRequestHandler):
         logger.error(*args)
         logger.error(*args)
 
 
     def handle(self):
     def handle(self):
-        """Handle a single HTTP request"""
-
+        """Handle a single HTTP request."""
         self.raw_requestline = self.rfile.readline()
         self.raw_requestline = self.rfile.readline()
         if not self.parse_request():  # An error code has been sent, just exit
         if not self.parse_request():  # An error code has been sent, just exit
             return
             return
@@ -536,7 +543,7 @@ class WSGIRequestHandlerLogger(WSGIRequestHandler):
 
 
 class WSGIServerLogger(WSGIServer):
 class WSGIServerLogger(WSGIServer):
     def handle_error(self, request, client_address):
     def handle_error(self, request, client_address):
-        """Handle an error. """
+        """Handle an error."""
         logger.exception(
         logger.exception(
             "Exception happened during processing of request from %s"
             "Exception happened during processing of request from %s"
             % str(client_address)
             % str(client_address)

+ 15 - 0
pyproject.toml

@@ -66,3 +66,18 @@ license-files = ["COPYING"]
 
 
 [tool.setuptools.dynamic]
 [tool.setuptools.dynamic]
 version = {attr = "dulwich.__version__"}
 version = {attr = "dulwich.__version__"}
+
+[tool.ruff]
+select = [
+    "ANN",
+    "D",
+    "E",
+    "F",
+    "I"
+]
+ignore = [
+    "ANN101",  # missing-type-self
+]
+
+[tool.ruff.pydocstyle]
+convention = "google"