Browse Source

Add more types, courtesy of ruff

Jelmer Vernooij 4 months ago
parent
commit
8bb9c9e84d
88 changed files with 1987 additions and 1918 deletions
  1. 1 1
      Makefile
  2. 1 1
      dulwich/bundle.py
  3. 82 74
      dulwich/cli.py
  4. 24 24
      dulwich/client.py
  5. 2 2
      dulwich/cloud/gcs.py
  6. 1 1
      dulwich/contrib/diffstat.py
  7. 1 1
      dulwich/contrib/paramiko_vendor.py
  8. 21 20
      dulwich/contrib/swift.py
  9. 15 10
      dulwich/diff_tree.py
  10. 8 8
      dulwich/fastexport.py
  11. 4 4
      dulwich/file.py
  12. 9 7
      dulwich/graph.py
  13. 2 2
      dulwich/greenthreads.py
  14. 2 1
      dulwich/hooks.py
  15. 9 9
      dulwich/index.py
  16. 3 3
      dulwich/log_utils.py
  17. 2 2
      dulwich/lru_cache.py
  18. 2 2
      dulwich/mailmap.py
  19. 35 35
      dulwich/object_store.py
  20. 31 29
      dulwich/objects.py
  21. 14 14
      dulwich/pack.py
  22. 5 5
      dulwich/patch.py
  23. 51 51
      dulwich/porcelain.py
  24. 11 10
      dulwich/protocol.py
  25. 1 1
      dulwich/reflog.py
  26. 30 30
      dulwich/refs.py
  27. 22 22
      dulwich/repo.py
  28. 50 42
      dulwich/server.py
  29. 1 1
      dulwich/stash.py
  30. 53 25
      dulwich/tests/test_object_store.py
  31. 5 5
      dulwich/tests/utils.py
  32. 6 5
      dulwich/walk.py
  33. 12 12
      dulwich/web.py
  34. 3 2
      fuzzing/fuzz-targets/fuzz_bundle.py
  35. 3 2
      fuzzing/fuzz-targets/fuzz_configfile.py
  36. 3 2
      fuzzing/fuzz-targets/fuzz_object_store.py
  37. 3 2
      fuzzing/fuzz-targets/fuzz_repo.py
  38. 2 2
      fuzzing/fuzz-targets/test_utils.py
  39. 6 6
      tests/__init__.py
  40. 17 17
      tests/compat/server_utils.py
  41. 43 42
      tests/compat/test_client.py
  42. 6 5
      tests/compat/test_pack.py
  43. 2 2
      tests/compat/test_patch.py
  44. 2 2
      tests/compat/test_porcelain.py
  45. 17 17
      tests/compat/test_repository.py
  46. 4 4
      tests/compat/test_server.py
  47. 9 9
      tests/compat/test_utils.py
  48. 15 14
      tests/compat/test_web.py
  49. 8 8
      tests/compat/utils.py
  50. 9 8
      tests/contrib/test_paramiko_vendor.py
  51. 4 4
      tests/contrib/test_release_robot.py
  52. 29 29
      tests/contrib/test_swift.py
  53. 15 15
      tests/contrib/test_swift_smoke.py
  54. 6 6
      tests/test_archive.py
  55. 5 5
      tests/test_blackbox.py
  56. 1 1
      tests/test_bundle.py
  57. 121 120
      tests/test_client.py
  58. 63 63
      tests/test_config.py
  59. 3 3
      tests/test_credentials.py
  60. 68 68
      tests/test_diff_tree.py
  61. 15 15
      tests/test_fastexport.py
  62. 16 16
      tests/test_file.py
  63. 20 20
      tests/test_grafts.py
  64. 12 12
      tests/test_graph.py
  65. 2 2
      tests/test_greenthreads.py
  66. 4 4
      tests/test_hooks.py
  67. 15 15
      tests/test_ignore.py
  68. 47 47
      tests/test_index.py
  69. 3 3
      tests/test_lfs.py
  70. 18 18
      tests/test_line_ending.py
  71. 34 34
      tests/test_lru_cache.py
  72. 2 2
      tests/test_mailmap.py
  73. 21 21
      tests/test_missing_obj_finder.py
  74. 39 39
      tests/test_object_store.py
  75. 95 95
      tests/test_objects.py
  76. 32 32
      tests/test_objectspec.py
  77. 109 108
      tests/test_pack.py
  78. 22 21
      tests/test_patch.py
  79. 117 117
      tests/test_porcelain.py
  80. 39 39
      tests/test_protocol.py
  81. 6 6
      tests/test_reflog.py
  82. 58 58
      tests/test_refs.py
  83. 100 100
      tests/test_repository.py
  84. 79 79
      tests/test_server.py
  85. 1 1
      tests/test_stash.py
  86. 7 7
      tests/test_utils.py
  87. 38 38
      tests/test_walk.py
  88. 53 52
      tests/test_web.py

+ 1 - 1
Makefile

@@ -45,7 +45,7 @@ check-contrib:: clean
 check-all: check check-pypy check-noextensions
 
 typing:
-	mypy dulwich
+	$(PYTHON) -m mypy dulwich
 
 clean::
 	$(SETUP) clean --all

+ 1 - 1
dulwich/bundle.py

@@ -103,7 +103,7 @@ def read_bundle(f):
     raise AssertionError(f"unsupported bundle format header: {firstline!r}")
 
 
-def write_bundle(f, bundle):
+def write_bundle(f, bundle) -> None:
     version = bundle.version
     if version is None:
         if bundle.capabilities:

+ 82 - 74
dulwich/cli.py

@@ -33,7 +33,7 @@ import os
 import signal
 import sys
 from getopt import getopt
-from typing import ClassVar, Optional
+from typing import ClassVar, Optional, TYPE_CHECKING
 
 from dulwich import porcelain
 
@@ -44,12 +44,16 @@ from .objectspec import parse_commit
 from .pack import Pack, sha_to_hex
 from .repo import Repo
 
+if TYPE_CHECKING:
+    from .objects import ObjectID
+    from .refs import Ref
 
-def signal_int(signal, frame):
+
+def signal_int(signal, frame) -> None:
     sys.exit(1)
 
 
-def signal_quit(signal, frame):
+def signal_quit(signal, frame) -> None:
     import pdb
 
     pdb.set_trace()
@@ -58,13 +62,13 @@ def signal_quit(signal, frame):
 class Command:
     """A Dulwich subcommand."""
 
-    def run(self, args):
+    def run(self, args) -> Optional[int]:
         """Run the command."""
         raise NotImplementedError(self.run)
 
 
 class cmd_archive(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument(
             "--remote",
@@ -88,7 +92,7 @@ class cmd_archive(Command):
 
 
 class cmd_add(Command):
-    def run(self, argv):
+    def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         args = parser.parse_args(argv)
 
@@ -96,7 +100,7 @@ class cmd_add(Command):
 
 
 class cmd_rm(Command):
-    def run(self, argv):
+    def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         args = parser.parse_args(argv)
 
@@ -104,10 +108,11 @@ class cmd_rm(Command):
 
 
 class cmd_fetch_pack(Command):
-    def run(self, argv):
+    def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("--all", action="store_true")
         parser.add_argument("location", nargs="?", type=str)
+        parser.add_argument("refs", nargs="*", type=str)
         args = parser.parse_args(argv)
         client, path = get_transport_and_path(args.location)
         r = Repo(".")
@@ -115,26 +120,28 @@ class cmd_fetch_pack(Command):
             determine_wants = r.object_store.determine_wants_all
         else:
 
-            def determine_wants(x, **kwargs):
-                return [y for y in args if y not in r.object_store]
+            def determine_wants(refs: dict[Ref, ObjectID], depth: Optional[int] = None) -> list[ObjectID]:
+                return [y.encode('utf-8') for y in args.refs if y not in r.object_store]
 
         client.fetch(path, r, determine_wants)
 
 
 class cmd_fetch(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", [])
-        opts = dict(opts)
+        kwopts = dict(opts)
         client, path = get_transport_and_path(args.pop(0))
         r = Repo(".")
-        refs = client.fetch(path, r, progress=sys.stdout.write)
+        def progress(msg: bytes) -> None:
+            sys.stdout.buffer.write(msg)
+        refs = client.fetch(path, r, progress=progress)
         print("Remote refs:")
         for item in refs.items():
             print("{} -> {}".format(*item))
 
 
 class cmd_for_each_ref(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("pattern", type=str, nargs="?")
         args = parser.parse_args(args)
@@ -143,15 +150,15 @@ class cmd_for_each_ref(Command):
 
 
 class cmd_fsck(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", [])
-        opts = dict(opts)
+        kwopts = dict(opts)
         for obj, msg in porcelain.fsck("."):
             print(f"{obj}: {msg}")
 
 
 class cmd_log(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         parser.add_option(
             "--reverse",
@@ -177,7 +184,7 @@ class cmd_log(Command):
 
 
 class cmd_diff(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", [])
 
         r = Repo(".")
@@ -193,7 +200,7 @@ class cmd_diff(Command):
 
 
 class cmd_dump_pack(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", [])
 
         if args == []:
@@ -204,8 +211,7 @@ class cmd_dump_pack(Command):
         x = Pack(basename)
         print(f"Object names checksum: {x.name()}")
         print(f"Checksum: {sha_to_hex(x.get_stored_checksum())}")
-        if not x.check():
-            print("CHECKSUM DOES NOT MATCH")
+        x.check()
         print("Length: %d" % len(x))
         for name in x:
             try:
@@ -217,7 +223,7 @@ class cmd_dump_pack(Command):
 
 
 class cmd_dump_index(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", [])
 
         if args == []:
@@ -232,20 +238,20 @@ class cmd_dump_index(Command):
 
 
 class cmd_init(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", ["bare"])
-        opts = dict(opts)
+        kwopts = dict(opts)
 
         if args == []:
             path = os.getcwd()
         else:
             path = args[0]
 
-        porcelain.init(path, bare=("--bare" in opts))
+        porcelain.init(path, bare=("--bare" in kwopts))
 
 
 class cmd_clone(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         parser.add_option(
             "--bare",
@@ -307,29 +313,29 @@ class cmd_clone(Command):
 
 
 class cmd_commit(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", ["message"])
-        opts = dict(opts)
-        porcelain.commit(".", message=opts["--message"])
+        kwopts = dict(opts)
+        porcelain.commit(".", message=kwopts["--message"])
 
 
 class cmd_commit_tree(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", ["message"])
         if args == []:
             print("usage: dulwich commit-tree tree")
             sys.exit(1)
-        opts = dict(opts)
-        porcelain.commit_tree(".", tree=args[0], message=opts["--message"])
+        kwopts = dict(opts)
+        porcelain.commit_tree(".", tree=args[0], message=kwopts["--message"])
 
 
 class cmd_update_server_info(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         porcelain.update_server_info(".")
 
 
 class cmd_symbolic_ref(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", ["ref-name", "force"])
         if not args:
             print("Usage: dulwich symbolic-ref REF_NAME [--force]")
@@ -340,7 +346,7 @@ class cmd_symbolic_ref(Command):
 
 
 class cmd_pack_refs(Command):
-    def run(self, argv):
+    def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("--all", action="store_true")
         # ignored, we never prune
@@ -352,7 +358,7 @@ class cmd_pack_refs(Command):
 
 
 class cmd_show(Command):
-    def run(self, argv):
+    def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("objectish", type=str, nargs="*")
         args = parser.parse_args(argv)
@@ -360,7 +366,7 @@ class cmd_show(Command):
 
 
 class cmd_diff_tree(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", [])
         if len(args) < 2:
             print("Usage: dulwich diff-tree OLD-TREE NEW-TREE")
@@ -369,7 +375,7 @@ class cmd_diff_tree(Command):
 
 
 class cmd_rev_list(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", [])
         if len(args) < 1:
             print("Usage: dulwich rev-list COMMITID...")
@@ -378,7 +384,7 @@ class cmd_rev_list(Command):
 
 
 class cmd_tag(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         parser.add_option(
             "-a",
@@ -396,28 +402,28 @@ class cmd_tag(Command):
 
 
 class cmd_repack(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", [])
-        opts = dict(opts)
+        kwopts = dict(opts)
         porcelain.repack(".")
 
 
 class cmd_reset(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", ["hard", "soft", "mixed"])
-        opts = dict(opts)
+        kwopts = dict(opts)
         mode = ""
-        if "--hard" in opts:
+        if "--hard" in kwopts:
             mode = "hard"
-        elif "--soft" in opts:
+        elif "--soft" in kwopts:
             mode = "soft"
-        elif "--mixed" in opts:
+        elif "--mixed" in kwopts:
             mode = "mixed"
-        porcelain.reset(".", mode=mode, *args)
+        porcelain.reset(".", mode=mode)
 
 
 class cmd_daemon(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         from dulwich import log_utils
 
         from .protocol import TCP_GIT_PORT
@@ -450,7 +456,7 @@ class cmd_daemon(Command):
 
 
 class cmd_web_daemon(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         from dulwich import log_utils
 
         parser = optparse.OptionParser()
@@ -481,14 +487,14 @@ class cmd_web_daemon(Command):
 
 
 class cmd_write_tree(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         options, args = parser.parse_args(args)
         sys.stdout.write("{}\n".format(porcelain.write_tree(".")))
 
 
 class cmd_receive_pack(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         options, args = parser.parse_args(args)
         if len(args) >= 1:
@@ -499,7 +505,7 @@ class cmd_receive_pack(Command):
 
 
 class cmd_upload_pack(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         options, args = parser.parse_args(args)
         if len(args) >= 1:
@@ -510,7 +516,7 @@ class cmd_upload_pack(Command):
 
 
 class cmd_status(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         options, args = parser.parse_args(args)
         if len(args) >= 1:
@@ -539,7 +545,7 @@ class cmd_status(Command):
 
 
 class cmd_ls_remote(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         opts, args = getopt(args, "", [])
         if len(args) < 1:
             print("Usage: dulwich ls-remote URL")
@@ -550,7 +556,7 @@ class cmd_ls_remote(Command):
 
 
 class cmd_ls_tree(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         parser.add_option(
             "-r",
@@ -574,20 +580,20 @@ class cmd_ls_tree(Command):
 
 
 class cmd_pack_objects(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         deltify = False
         reuse_deltas = True
         opts, args = getopt(args, "", ["stdout", "deltify", "no-reuse-deltas"])
-        opts = dict(opts)
-        if len(args) < 1 and "--stdout" not in opts.keys():
+        kwopts = dict(opts)
+        if len(args) < 1 and "--stdout" not in kwopts.keys():
             print("Usage: dulwich pack-objects basename")
             sys.exit(1)
         object_ids = [line.strip() for line in sys.stdin.readlines()]
-        if "--deltify" in opts.keys():
+        if "--deltify" in kwopts.keys():
             deltify = True
-        if "--no-reuse-deltas" in opts.keys():
+        if "--no-reuse-deltas" in kwopts.keys():
             reuse_deltas = False
-        if "--stdout" in opts.keys():
+        if "--stdout" in kwopts.keys():
             packf = getattr(sys.stdout, "buffer", sys.stdout)
             idxf = None
             close = []
@@ -604,7 +610,7 @@ class cmd_pack_objects(Command):
 
 
 class cmd_pull(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = argparse.ArgumentParser()
         parser.add_argument("from_location", type=str)
         parser.add_argument("refspec", type=str, nargs="*")
@@ -621,7 +627,7 @@ class cmd_pull(Command):
 
 
 class cmd_push(Command):
-    def run(self, argv):
+    def run(self, argv) -> Optional[int]:
         parser = argparse.ArgumentParser()
         parser.add_argument("-f", "--force", action="store_true", help="Force")
         parser.add_argument("to_location", type=str)
@@ -635,9 +641,11 @@ class cmd_push(Command):
             sys.stderr.write("Diverged branches; specify --force to override")
             return 1
 
+        return None
+
 
 class cmd_remote_add(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         options, args = parser.parse_args(args)
         porcelain.remote_add(".", args[0], args[1])
@@ -669,7 +677,7 @@ class cmd_remote(SuperCommand):
 
 
 class cmd_submodule_list(Command):
-    def run(self, argv):
+    def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(argv)
         for path, sha in porcelain.submodule_list("."):
@@ -677,7 +685,7 @@ class cmd_submodule_list(Command):
 
 
 class cmd_submodule_init(Command):
-    def run(self, argv):
+    def run(self, argv) -> None:
         parser = argparse.ArgumentParser()
         parser.parse_args(argv)
         porcelain.submodule_init(".")
@@ -703,7 +711,7 @@ class cmd_check_ignore(Command):
 
 
 class cmd_check_mailmap(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         options, args = parser.parse_args(args)
         for arg in args:
@@ -712,7 +720,7 @@ class cmd_check_mailmap(Command):
 
 
 class cmd_stash_list(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         options, args = parser.parse_args(args)
         for i, entry in porcelain.stash_list("."):
@@ -720,7 +728,7 @@ class cmd_stash_list(Command):
 
 
 class cmd_stash_push(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         options, args = parser.parse_args(args)
         porcelain.stash_push(".")
@@ -728,7 +736,7 @@ class cmd_stash_push(Command):
 
 
 class cmd_stash_pop(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         options, args = parser.parse_args(args)
         porcelain.stash_pop(".")
@@ -744,7 +752,7 @@ class cmd_stash(SuperCommand):
 
 
 class cmd_ls_files(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         options, args = parser.parse_args(args)
         for name in porcelain.ls_files("."):
@@ -752,14 +760,14 @@ class cmd_ls_files(Command):
 
 
 class cmd_describe(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         options, args = parser.parse_args(args)
         print(porcelain.describe("."))
 
 
 class cmd_help(Command):
-    def run(self, args):
+    def run(self, args) -> None:
         parser = optparse.OptionParser()
         parser.add_option(
             "-a",
@@ -850,7 +858,7 @@ def main(argv=None):
     return cmd_kls().run(argv[1:])
 
 
-def _main():
+def _main() -> None:
     if "DULWICH_PDB" in os.environ and getattr(signal, "SIGQUIT", None):
         signal.signal(signal.SIGQUIT, signal_quit)  # type: ignore
     signal.signal(signal.SIGINT, signal_int)

+ 24 - 24
dulwich/client.py

@@ -239,7 +239,7 @@ class ReportStatusParser:
             else:
                 raise GitProtocolError(f"invalid ref status {status!r}")
 
-    def handle_packet(self, pkt):
+    def handle_packet(self, pkt) -> None:
         """Handle a packet.
 
         Raises:
@@ -258,7 +258,7 @@ class ReportStatusParser:
             self._ref_statuses.append(ref_status)
 
 
-def negotiate_protocol_version(proto):
+def negotiate_protocol_version(proto) -> int:
     pkt = proto.read_pkt_line()
     if pkt is not None and pkt.strip() == b"version 2":
         return 2
@@ -353,7 +353,7 @@ class FetchPackResult:
         self.new_shallow = new_shallow
         self.new_unshallow = new_unshallow
 
-    def _warn_deprecated(self):
+    def _warn_deprecated(self) -> None:
         import warnings
 
         warnings.warn(
@@ -430,7 +430,7 @@ class SendPackResult:
         self.agent = agent
         self.ref_status = ref_status
 
-    def _warn_deprecated(self):
+    def _warn_deprecated(self) -> None:
         import warnings
 
         warnings.warn(
@@ -663,7 +663,7 @@ def _handle_upload_pack_tail(
     progress: Optional[Callable[[bytes], None]] = None,
     rbufsize=_RBUFSIZE,
     protocol_version=0,
-):
+) -> None:
     """Handle the tail of a 'git-upload-pack' request.
 
     Args:
@@ -696,7 +696,7 @@ def _handle_upload_pack_tail(
         if progress is None:
             # Just ignore progress data
 
-            def progress(x):
+            def progress(x) -> None:
                 pass
 
         for chan, data in _read_side_band64k_data(proto.read_pkt_seq()):
@@ -771,7 +771,7 @@ class GitClient:
             self._fetch_capabilities.add(CAPABILITY_INCLUDE_TAG)
         self.protocol_version = 0  # will be overridden later
 
-    def get_url(self, path):
+    def get_url(self, path) -> str:
         """Retrieves full url to given path.
 
         Args:
@@ -784,7 +784,7 @@ class GitClient:
         raise NotImplementedError(self.get_url)
 
     @classmethod
-    def from_parsedurl(cls, parsedurl, **kwargs):
+    def from_parsedurl(cls, parsedurl, **kwargs) -> "GitClient":
         """Create an instance of this client from a urlparse.parsed object.
 
         Args:
@@ -803,7 +803,7 @@ class GitClient:
             [set[bytes], set[bytes], bool], tuple[int, Iterator[UnpackedObject]]
         ],
         progress=None,
-    ):
+        ) -> SendPackResult:
         """Upload a pack to a remote repository.
 
         Args:
@@ -969,13 +969,13 @@ class GitClient:
                 dir=getattr(target.object_store, "path", None),
             )
 
-            def commit():
+            def commit() -> None:
                 if f.tell():
                     f.seek(0)
-                    target.object_store.add_thin_pack(f.read, None, progress=progress)
+                    target.object_store.add_thin_pack(f.read, None, progress=progress)  # type: ignore
                 f.close()
 
-            def abort():
+            def abort() -> None:
                 f.close()
 
         else:
@@ -1012,7 +1012,7 @@ class GitClient:
         ref_prefix: Optional[list[Ref]] = None,
         filter_spec=None,
         protocol_version: Optional[int] = None,
-    ):
+        ) -> FetchPackResult:
         """Retrieve a pack from a git smart server.
 
         Args:
@@ -1044,7 +1044,7 @@ class GitClient:
         path,
         protocol_version: Optional[int] = None,
         ref_prefix: Optional[list[Ref]] = None,
-    ):
+    ) -> dict[Ref, ObjectID]:
         """Retrieve the current refs from a git smart server.
 
         Args:
@@ -1085,7 +1085,7 @@ class GitClient:
         if CAPABILITY_SIDE_BAND_64K in capabilities or self.protocol_version == 2:
             if progress is None:
 
-                def progress(x):
+                def progress(x) -> None:
                     pass
 
             if CAPABILITY_REPORT_STATUS in capabilities:
@@ -1147,12 +1147,12 @@ class GitClient:
         format=None,
         subdirs=None,
         prefix=None,
-    ):
+        ) -> None:
         """Retrieve an archive of the specified tree."""
         raise NotImplementedError(self.archive)
 
     @staticmethod
-    def _warn_filter_objects():
+    def _warn_filter_objects() -> None:
         import warnings
 
         warnings.warn(
@@ -1161,7 +1161,7 @@ class GitClient:
         )
 
 
-def check_wants(wants, refs):
+def check_wants(wants, refs) -> None:
     """Check that a set of wants is valid.
 
     Args:
@@ -1516,7 +1516,7 @@ class TraditionalGitClient(GitClient):
         format=None,
         subdirs=None,
         prefix=None,
-    ):
+    ) -> None:
         proto, can_read, stderr = self._connect(b"upload-archive", path)
         with proto:
             if format is not None:
@@ -1607,7 +1607,7 @@ class TCPGitClient(TraditionalGitClient):
         # 0 means unbuffered
         wfile = s.makefile("wb", 0)
 
-        def close():
+        def close() -> None:
             rfile.close()
             wfile.close()
             s.close()
@@ -1664,7 +1664,7 @@ class SubprocessWrapper:
         else:
             return _fileno_can_read(self.proc.stdout.fileno())
 
-    def close(self):
+    def close(self) -> None:
         self.proc.stdin.close()
         self.proc.stdout.close()
         if self.proc.stderr:
@@ -1784,7 +1784,7 @@ class LocalGitClient(GitClient):
         """
         if not progress:
 
-            def progress(x):
+            def progress(x) -> None:
                 pass
 
         with self._open_repo(path) as target:
@@ -1948,7 +1948,7 @@ class SSHVendor:
         key_filename=None,
         ssh_command=None,
         protocol_version: Optional[int] = None,
-    ):
+        ):
         """Connect to an SSH server.
 
         Run a command remotely and return a file-like object for interaction
@@ -2313,7 +2313,7 @@ def default_urllib3_manager(
     return manager
 
 
-def check_for_proxy_bypass(base_url):
+def check_for_proxy_bypass(base_url) -> bool:
     # Check if a proxy bypass is defined with the no_proxy environment variable
     if base_url:  # only check if base_url is provided
         no_proxy_str = os.environ.get("no_proxy")

+ 2 - 2
dulwich/cloud/gcs.py

@@ -39,7 +39,7 @@ class GcsObjectStore(BucketBasedObjectStore):
     def __repr__(self) -> str:
         return f"{type(self).__name__}({self.bucket!r}, subpath={self.subpath!r})"
 
-    def _remove_pack(self, name):
+    def _remove_pack(self, name) -> None:
         self.bucket.delete_blobs(
             [posixpath.join(self.subpath, name) + "." + ext for ext in ["pack", "idx"]]
         )
@@ -72,7 +72,7 @@ class GcsObjectStore(BucketBasedObjectStore):
             lambda: self._load_pack_data(name), lambda: self._load_pack_index(name)
         )
 
-    def _upload_pack(self, basename, pack_file, index_file):
+    def _upload_pack(self, basename, pack_file, index_file) -> None:
         idxblob = self.bucket.blob(posixpath.join(self.subpath, basename + ".idx"))
         datablob = self.bucket.blob(posixpath.join(self.subpath, basename + ".pack"))
         idxblob.upload_from_file(index_file)

+ 1 - 1
dulwich/contrib/diffstat.py

@@ -188,7 +188,7 @@ def diffstat(lines, max_width=80):
     return output
 
 
-def main():
+def main() -> int:
     argv = sys.argv
     # allow diffstat.py to also be used from the command line
     if len(sys.argv) > 1:

+ 1 - 1
dulwich/contrib/paramiko_vendor.py

@@ -66,7 +66,7 @@ class _ParamikoWrapper:
             return data + self.read(diff_len)
         return data
 
-    def close(self):
+    def close(self) -> None:
         self.channel.close()
 
 

+ 21 - 20
dulwich/contrib/swift.py

@@ -36,6 +36,7 @@ import urllib.parse as urlparse
 import zlib
 from configparser import ConfigParser
 from io import BytesIO
+from typing import Optional
 
 from geventhttpclient import HTTPClient
 
@@ -302,7 +303,7 @@ class SwiftConnector:
         )
         return endpoint[self.endpoint_type], token
 
-    def test_root_exists(self):
+    def test_root_exists(self) -> Optional[bool]:
         """Check that Swift container exist.
 
         Returns: True if exist or None it not
@@ -316,7 +317,7 @@ class SwiftConnector:
             )
         return True
 
-    def create_root(self):
+    def create_root(self) -> None:
         """Create the Swift container.
 
         Raises:
@@ -368,7 +369,7 @@ class SwiftConnector:
             resp_headers[header.lower()] = value
         return resp_headers
 
-    def put_object(self, name, content):
+    def put_object(self, name, content) -> None:
         """Put an object.
 
         Args:
@@ -425,7 +426,7 @@ class SwiftConnector:
             return content
         return BytesIO(content)
 
-    def del_object(self, name):
+    def del_object(self, name) -> None:
         """Delete an object.
 
         Args:
@@ -440,7 +441,7 @@ class SwiftConnector:
                 f"DELETE request failed with error code {ret.status_code}"
             )
 
-    def del_root(self):
+    def del_root(self) -> None:
         """Delete the root container by removing container content.
 
         Raises:
@@ -481,7 +482,7 @@ class SwiftPackReader:
         self.buff = b""
         self.buff_length = self.scon.chunk_length
 
-    def _read(self, more=False):
+    def _read(self, more=False) -> None:
         if more:
             self.buff_length = self.buff_length * 2
         offset = self.base_offset
@@ -510,7 +511,7 @@ class SwiftPackReader:
         self.offset = end
         return data
 
-    def seek(self, offset):
+    def seek(self, offset) -> None:
         """Seek to a specified offset.
 
         Args:
@@ -568,7 +569,7 @@ class SwiftPackData(PackData):
         pack_reader = SwiftPackReader(self.scon, self._filename, self.pack_length)
         return pack_reader.read_checksum()
 
-    def close(self):
+    def close(self) -> None:
         pass
 
 
@@ -695,22 +696,22 @@ class SwiftObjectStore(PackBasedObjectStore):
             else:
                 return None
 
-        def abort():
+        def abort() -> None:
             pass
 
         return f, commit, abort
 
-    def add_object(self, obj):
+    def add_object(self, obj) -> None:
         self.add_objects(
             [
                 (obj, None),
             ]
         )
 
-    def _pack_cache_stale(self):
+    def _pack_cache_stale(self) -> bool:
         return False
 
-    def _get_loose_object(self, sha):
+    def _get_loose_object(self, sha) -> None:
         return None
 
     def add_thin_pack(self, read_all, read_some):
@@ -815,12 +816,12 @@ class SwiftInfoRefsContainer(InfoRefsContainer):
                 return False
         return refs
 
-    def _write_refs(self, refs):
+    def _write_refs(self, refs) -> None:
         f = BytesIO()
         f.writelines(write_info_refs(refs, self.store))
         self.scon.put_object(self.filename, f)
 
-    def set_if_equals(self, name, old_ref, new_ref):
+    def set_if_equals(self, name, old_ref, new_ref, committer=None, timestamp=None, timezone=None, message=None) -> bool:
         """Set a refname to new_ref only if it currently equals old_ref."""
         if name == "HEAD":
             return True
@@ -832,7 +833,7 @@ class SwiftInfoRefsContainer(InfoRefsContainer):
         self._refs[name] = new_ref
         return True
 
-    def remove_if_equals(self, name, old_ref):
+    def remove_if_equals(self, name, old_ref, committer=None, timestamp=None, timezone=None, message=None) -> bool:
         """Remove a refname only if it currently equals old_ref."""
         if name == "HEAD":
             return True
@@ -879,14 +880,14 @@ class SwiftRepo(BaseRepo):
         refs = SwiftInfoRefsContainer(self.scon, object_store)
         BaseRepo.__init__(self, object_store, refs)
 
-    def _determine_file_mode(self):
+    def _determine_file_mode(self) -> bool:
         """Probe the file-system to determine whether permissions can be trusted.
 
         Returns: True if permissions can be trusted, False otherwise.
         """
         return False
 
-    def _put_named_file(self, filename, contents):
+    def _put_named_file(self, filename, contents) -> None:
         """Put an object in a Swift container.
 
         Args:
@@ -928,7 +929,7 @@ class SwiftSystemBackend(Backend):
         return SwiftRepo(path, self.conf)
 
 
-def cmd_daemon(args):
+def cmd_daemon(args) -> None:
     """Entry point for starting a TCP git server."""
     import optparse
 
@@ -980,7 +981,7 @@ def cmd_daemon(args):
     server.serve_forever()
 
 
-def cmd_init(args):
+def cmd_init(args) -> None:
     import optparse
 
     parser = optparse.OptionParser()
@@ -1001,7 +1002,7 @@ def cmd_init(args):
     SwiftRepo.init_bare(scon, conf)
 
 
-def main(argv=sys.argv):
+def main(argv=sys.argv) -> None:
     commands = {
         "init": cmd_init,
         "daemon": cmd_daemon,

+ 15 - 10
dulwich/diff_tree.py

@@ -230,7 +230,7 @@ def tree_changes(
         yield TreeChange(change_type, entry1, entry2)
 
 
-def _all_eq(seq, key, value):
+def _all_eq(seq, key, value) -> bool:
     for e in seq:
         if key(e) != value:
             return False
@@ -411,6 +411,11 @@ def _tree_change_key(entry):
 class RenameDetector:
     """Object for handling rename detection between two trees."""
 
+    _adds: list[TreeChange]
+    _deletes: list[TreeChange]
+    _changes: list[TreeChange]
+    _candidates: list[tuple[int, TreeChange]]
+
     def __init__(
         self,
         store,
@@ -443,7 +448,7 @@ class RenameDetector:
         self._find_copies_harder = find_copies_harder
         self._want_unchanged = False
 
-    def _reset(self):
+    def _reset(self) -> None:
         self._adds = []
         self._deletes = []
         self._changes = []
@@ -459,7 +464,7 @@ class RenameDetector:
         new_obj = self._store[change.new.sha]
         return _similarity_score(old_obj, new_obj) < self._rewrite_threshold
 
-    def _add_change(self, change):
+    def _add_change(self, change) -> None:
         if change.type == CHANGE_ADD:
             self._adds.append(change)
         elif change.type == CHANGE_DELETE:
@@ -478,7 +483,7 @@ class RenameDetector:
         else:
             self._changes.append(change)
 
-    def _collect_changes(self, tree1_id, tree2_id):
+    def _collect_changes(self, tree1_id, tree2_id) -> None:
         want_unchanged = self._find_copies_harder or self._want_unchanged
         for change in tree_changes(
             self._store,
@@ -489,11 +494,11 @@ class RenameDetector:
         ):
             self._add_change(change)
 
-    def _prune(self, add_paths, delete_paths):
+    def _prune(self, add_paths, delete_paths) -> None:
         self._adds = [a for a in self._adds if a.new.path not in add_paths]
         self._deletes = [d for d in self._deletes if d.old.path not in delete_paths]
 
-    def _find_exact_renames(self):
+    def _find_exact_renames(self) -> None:
         add_map = defaultdict(list)
         for add in self._adds:
             add_map[add.new.sha].append(add.new)
@@ -541,7 +546,7 @@ class RenameDetector:
             return CHANGE_COPY
         return CHANGE_RENAME
 
-    def _find_content_rename_candidates(self):
+    def _find_content_rename_candidates(self) -> None:
         candidates = self._candidates = []
         # TODO: Optimizations:
         #  - Compare object sizes before counting blocks.
@@ -570,7 +575,7 @@ class RenameDetector:
                     rename = TreeChange(new_type, delete.old, add.new)
                     candidates.append((-score, rename))
 
-    def _choose_content_renames(self):
+    def _choose_content_renames(self) -> None:
         # Sort scores from highest to lowest, but keep names in ascending
         # order.
         self._candidates.sort()
@@ -594,7 +599,7 @@ class RenameDetector:
             self._changes.append(change)
         self._prune(add_paths, delete_paths)
 
-    def _join_modifies(self):
+    def _join_modifies(self) -> None:
         if self._rewrite_threshold is None:
             return
 
@@ -620,7 +625,7 @@ class RenameDetector:
         result.sort(key=_tree_change_key)
         return result
 
-    def _prune_unchanged(self):
+    def _prune_unchanged(self) -> None:
         if self._want_unchanged:
             return
         self._deletes = [d for d in self._deletes if d.type != CHANGE_UNCHANGED]

+ 8 - 8
dulwich/fastexport.py

@@ -45,7 +45,7 @@ class GitFastExporter:
         self.markers: dict[bytes, bytes] = {}
         self._marker_idx = 0
 
-    def print_cmd(self, cmd):
+    def print_cmd(self, cmd) -> None:
         self.outf.write(getattr(cmd, "__bytes__", cmd.__repr__)() + b"\n")
 
     def _allocate_marker(self):
@@ -138,17 +138,17 @@ class GitImportProcessor(processor.ImportProcessor):
         self.process(p.iter_commands)
         return self.markers
 
-    def blob_handler(self, cmd):
+    def blob_handler(self, cmd) -> None:
         """Process a BlobCommand."""
         blob = Blob.from_string(cmd.data)
         self.repo.object_store.add_object(blob)
         if cmd.mark:
             self.markers[cmd.mark] = blob.id
 
-    def checkpoint_handler(self, cmd):
+    def checkpoint_handler(self, cmd) -> None:
         """Process a CheckpointCommand."""
 
-    def commit_handler(self, cmd):
+    def commit_handler(self, cmd) -> None:
         """Process a CommitCommand."""
         commit = Commit()
         if cmd.author is not None:
@@ -207,10 +207,10 @@ class GitImportProcessor(processor.ImportProcessor):
         if cmd.mark:
             self.markers[cmd.mark] = commit.id
 
-    def progress_handler(self, cmd):
+    def progress_handler(self, cmd) -> None:
         """Process a ProgressCommand."""
 
-    def _reset_base(self, commit_id):
+    def _reset_base(self, commit_id) -> None:
         if self.last_commit == commit_id:
             return
         self._contents = {}
@@ -224,7 +224,7 @@ class GitImportProcessor(processor.ImportProcessor):
             ) in iter_tree_contents(self.repo.object_store, tree_id):
                 self._contents[path] = (mode, hexsha)
 
-    def reset_handler(self, cmd):
+    def reset_handler(self, cmd) -> None:
         """Process a ResetCommand."""
         if cmd.from_ is None:
             from_ = ZERO_SHA
@@ -233,7 +233,7 @@ class GitImportProcessor(processor.ImportProcessor):
         self._reset_base(from_)
         self.repo.refs[cmd.ref] = from_
 
-    def tag_handler(self, cmd):
+    def tag_handler(self, cmd) -> None:
         """Process a TagCommand."""
         tag = Tag()
         tag.tagger = cmd.tagger

+ 4 - 4
dulwich/file.py

@@ -26,7 +26,7 @@ import warnings
 from typing import ClassVar
 
 
-def ensure_dir_exists(dirname):
+def ensure_dir_exists(dirname) -> None:
     """Ensure a directory exists, creating if necessary."""
     try:
         os.makedirs(dirname)
@@ -34,7 +34,7 @@ def ensure_dir_exists(dirname):
         pass
 
 
-def _fancy_rename(oldname, newname):
+def _fancy_rename(oldname, newname) -> None:
     """Rename file with temporary backup file to rollback if rename fails."""
     if not os.path.exists(newname):
         try:
@@ -159,7 +159,7 @@ class _GitFile:
         for method in self.PROXY_METHODS:
             setattr(self, method, getattr(self._file, method))
 
-    def abort(self):
+    def abort(self) -> None:
         """Close and discard the lockfile without overwriting the target.
 
         If the file is already closed, this is a no-op.
@@ -174,7 +174,7 @@ class _GitFile:
             # The file may have been removed already, which is ok.
             self._closed = True
 
-    def close(self):
+    def close(self) -> None:
         """Close this file, saving the lockfile over the original.
 
         Note: If this method fails, it will attempt to delete the lockfile.

+ 9 - 7
dulwich/graph.py

@@ -20,29 +20,31 @@
 """Implementation of merge-base following the approach of git."""
 
 from heapq import heappop, heappush
+from typing import Generic, Optional, Iterator, TypeVar
 
 from .lru_cache import LRUCache
 
+T = TypeVar("T")
 
 # priority queue using builtin python minheap tools
 # why they do not have a builtin maxheap is simply ridiculous but
 # liveable with integer time stamps using negation
-class WorkList:
-    def __init__(self):
-        self.pq = []
+class WorkList(Generic[T]):
+    def __init__(self) -> None:
+        self.pq: list[tuple[int, T]] = []
 
-    def add(self, item):
+    def add(self, item: tuple[int, T]) -> None:
         dt, cmt = item
         heappush(self.pq, (-dt, cmt))
 
-    def get(self):
+    def get(self) -> Optional[tuple[int, T]]:
         item = heappop(self.pq)
         if item:
             pr, cmt = item
             return -pr, cmt
         return None
 
-    def iter(self):
+    def iter(self) -> Iterator[tuple[int, T]]:
         for pr, cmt in self.pq:
             yield (-pr, cmt)
 
@@ -57,7 +59,7 @@ def _find_lcas(lookup_parents, c1, c2s, lookup_stamp, min_stamp=0):
     _DNC = 4  # Do Not Consider
     _LCA = 8  # potential LCA (Lowest Common Ancestor)
 
-    def _has_candidates(wlst, cstates):
+    def _has_candidates(wlst, cstates) -> bool:
         for dt, cmt in wlst.iter():
             if cmt in cstates:
                 if not ((cstates[cmt] & _DNC) == _DNC):

+ 2 - 2
dulwich/greenthreads.py

@@ -44,7 +44,7 @@ def _split_commits_and_tags(obj_store, lst, *, ignore_unknown=False, pool=None):
     commits = set()
     tags = set()
 
-    def find_commit_type(sha):
+    def find_commit_type(sha) -> None:
         try:
             o = obj_store[sha]
         except KeyError:
@@ -81,7 +81,7 @@ class GreenThreadsMissingObjectFinder(MissingObjectFinder):
         concurrency=1,
         get_parents=None,
     ) -> None:
-        def collect_tree_sha(sha):
+        def collect_tree_sha(sha) -> None:
             self.sha_done.add(sha)
             cmt = object_store[sha]
             _collect_filetree_revs(object_store, cmt.tree, self.sha_done)

+ 2 - 1
dulwich/hooks.py

@@ -22,6 +22,7 @@
 
 import os
 import subprocess
+from typing import Any
 
 from .errors import HookError
 
@@ -29,7 +30,7 @@ from .errors import HookError
 class Hook:
     """Generic hook object."""
 
-    def execute(self, *args):
+    def execute(self, *args) -> Any:
         """Execute the hook with the given args.
 
         Args:

+ 9 - 9
dulwich/index.py

@@ -192,7 +192,7 @@ def read_cache_time(f):
     return struct.unpack(">LL", f.read(8))
 
 
-def write_cache_time(f, t):
+def write_cache_time(f, t) -> None:
     """Write a cache time.
 
     Args:
@@ -337,7 +337,7 @@ def read_index_dict(f) -> dict[bytes, Union[IndexEntry, ConflictedIndexEntry]]:
 
 def write_index(
     f: BinaryIO, entries: list[SerializedIndexEntry], version: Optional[int] = None
-):
+) -> None:
     """Write an index file.
 
     Args:
@@ -440,7 +440,7 @@ class Index:
         finally:
             f.close()
 
-    def read(self):
+    def read(self) -> None:
         """Read current contents of index from disk."""
         if not os.path.exists(self._filename):
             return
@@ -470,7 +470,7 @@ class Index:
         """Iterate over the paths and stages in this index."""
         return iter(self._byname)
 
-    def __contains__(self, key):
+    def __contains__(self, key) -> bool:
         return key in self._byname
 
     def get_sha1(self, path: bytes) -> bytes:
@@ -501,7 +501,7 @@ class Index:
                 return True
         return False
 
-    def clear(self):
+    def clear(self) -> None:
         """Remove all contents from this index."""
         self._byname = {}
 
@@ -522,7 +522,7 @@ class Index:
     def items(self) -> Iterator[tuple[bytes, Union[IndexEntry, ConflictedIndexEntry]]]:
         return iter(self._byname.items())
 
-    def update(self, entries: dict[bytes, Union[IndexEntry, ConflictedIndexEntry]]):
+    def update(self, entries: dict[bytes, Union[IndexEntry, ConflictedIndexEntry]]) -> None:
         for key, value in entries.items():
             self[key] = value
 
@@ -808,7 +808,7 @@ def build_index_from_tree(
     honor_filemode: bool = True,
     validate_path_element=validate_path_element_default,
     symlink_fn=None,
-):
+) -> None:
     """Generate and materialize index from a tree.
 
     Args:
@@ -935,7 +935,7 @@ def read_submodule_head(path: Union[str, bytes]) -> Optional[bytes]:
         return None
 
 
-def _has_directory_changed(tree_path: bytes, entry):
+def _has_directory_changed(tree_path: bytes, entry) -> bool:
     """Check if a directory has changed after getting an error.
 
     When handling an error trying to create a blob from a path, call this
@@ -1123,7 +1123,7 @@ def iter_fresh_objects(
             yield path, entry.sha, cleanup_mode(entry.mode)
 
 
-def refresh_index(index: Index, root_path: bytes):
+def refresh_index(index: Index, root_path: bytes) -> None:
     """Refresh the contents of an index.
 
     This is the equivalent to running 'git commit -a'.

+ 3 - 3
dulwich/log_utils.py

@@ -44,7 +44,7 @@ getLogger = logging.getLogger
 class _NullHandler(logging.Handler):
     """No-op logging handler to avoid unexpected logging warnings."""
 
-    def emit(self, record):
+    def emit(self, record) -> None:
         pass
 
 
@@ -53,7 +53,7 @@ _DULWICH_LOGGER = getLogger("dulwich")
 _DULWICH_LOGGER.addHandler(_NULL_HANDLER)
 
 
-def default_logging_config():
+def default_logging_config() -> None:
     """Set up the default Dulwich loggers."""
     remove_null_handler()
     logging.basicConfig(
@@ -63,7 +63,7 @@ def default_logging_config():
     )
 
 
-def remove_null_handler():
+def remove_null_handler() -> None:
     """Remove the null handler from the Dulwich loggers.
 
     If a caller wants to set up logging using something other than

+ 2 - 2
dulwich/lru_cache.py

@@ -214,7 +214,7 @@ class LRUCache(Generic[K, V]):
         """Get the key:value pairs as a dict."""
         return {k: n.value for k, n in self._cache.items()}
 
-    def cleanup(self):
+    def cleanup(self) -> None:
         """Clear the cache until it shrinks to the requested size.
 
         This does not completely wipe the cache, just makes sure it is under
@@ -291,7 +291,7 @@ class LRUCache(Generic[K, V]):
         """Change the number of entries that will be cached."""
         self._update_max_cache(max_cache, after_cleanup_count=after_cleanup_count)
 
-    def _update_max_cache(self, max_cache, after_cleanup_count=None):
+    def _update_max_cache(self, max_cache, after_cleanup_count=None) -> None:
         self._max_cache = max_cache
         if after_cleanup_count is None:
             self._after_cleanup_count = self._max_cache * 8 / 10

+ 2 - 2
dulwich/mailmap.py

@@ -64,12 +64,12 @@ class Mailmap:
     """Class for accessing a mailmap file."""
 
     def __init__(self, map=None) -> None:
-        self._table: dict[tuple[Optional[str], str], tuple[str, str]] = {}
+        self._table: dict[tuple[Optional[str], Optional[str]], tuple[str, str]] = {}
         if map:
             for canonical_identity, from_identity in map:
                 self.add_entry(canonical_identity, from_identity)
 
-    def add_entry(self, canonical_identity, from_identity=None):
+    def add_entry(self, canonical_identity, from_identity=None) -> None:
         """Add an entry to the mail mail.
 
         Any of the fields can be None, but at least one of them needs to be

+ 35 - 35
dulwich/object_store.py

@@ -114,7 +114,7 @@ class BaseObjectStore:
             and not sha == ZERO_SHA
         ]
 
-    def contains_loose(self, sha):
+    def contains_loose(self, sha) -> bool:
         """Check if a particular object is present by SHA1 and is loose."""
         raise NotImplementedError(self.contains_loose)
 
@@ -130,7 +130,7 @@ class BaseObjectStore:
         """Iterable of pack objects."""
         raise NotImplementedError
 
-    def get_raw(self, name):
+    def get_raw(self, name) -> tuple[int, bytes]:
         """Obtain the raw text for an object.
 
         Args:
@@ -148,11 +148,11 @@ class BaseObjectStore:
         """Iterate over the SHAs that are present in this store."""
         raise NotImplementedError(self.__iter__)
 
-    def add_object(self, obj):
+    def add_object(self, obj) -> None:
         """Add a single object to this object store."""
         raise NotImplementedError(self.add_object)
 
-    def add_objects(self, objects, progress=None):
+    def add_objects(self, objects, progress=None) -> None:
         """Add a set of objects to this object store.
 
         Args:
@@ -348,7 +348,7 @@ class BaseObjectStore:
             )
         return current_depth
 
-    def close(self):
+    def close(self) -> None:
         """Close any files opened by this object store."""
         # Default implementation is a NO-OP
 
@@ -404,7 +404,7 @@ class PackBasedObjectStore(BaseObjectStore):
     def alternates(self):
         return []
 
-    def contains_packed(self, sha):
+    def contains_packed(self, sha) -> bool:
         """Check if a particular object is present by SHA1 and is packed.
 
         This does not check alternates.
@@ -429,7 +429,7 @@ class PackBasedObjectStore(BaseObjectStore):
                 return True
         return False
 
-    def _add_cached_pack(self, base_name, pack):
+    def _add_cached_pack(self, base_name, pack) -> None:
         """Add a newly appeared pack to the cache by path."""
         prev_pack = self._pack_cache.get(base_name)
         if prev_pack is not pack:
@@ -462,7 +462,7 @@ class PackBasedObjectStore(BaseObjectStore):
             other_haves=remote_has,
         )
 
-    def _clear_cached_packs(self):
+    def _clear_cached_packs(self) -> None:
         pack_cache = self._pack_cache
         self._pack_cache = {}
         while pack_cache:
@@ -472,10 +472,10 @@ class PackBasedObjectStore(BaseObjectStore):
     def _iter_cached_packs(self):
         return self._pack_cache.values()
 
-    def _update_pack_cache(self):
+    def _update_pack_cache(self) -> list[Pack]:
         raise NotImplementedError(self._update_pack_cache)
 
-    def close(self):
+    def close(self) -> None:
         self._clear_cached_packs()
 
     @property
@@ -492,13 +492,13 @@ class PackBasedObjectStore(BaseObjectStore):
         """Iterate over the SHAs of all loose objects."""
         raise NotImplementedError(self._iter_loose_objects)
 
-    def _get_loose_object(self, sha):
+    def _get_loose_object(self, sha) -> Optional[ShaFile]:
         raise NotImplementedError(self._get_loose_object)
 
-    def _remove_loose_object(self, sha):
+    def _remove_loose_object(self, sha) -> None:
         raise NotImplementedError(self._remove_loose_object)
 
-    def _remove_pack(self, name):
+    def _remove_pack(self, name) -> None:
         raise NotImplementedError(self._remove_pack)
 
     def pack_loose_objects(self):
@@ -793,7 +793,7 @@ class DiskObjectStore(PackBasedObjectStore):
                 else:
                     yield os.fsdecode(os.path.join(os.fsencode(self.path), line))
 
-    def add_alternate_path(self, path):
+    def add_alternate_path(self, path) -> None:
         """Add an alternate path to this object store."""
         try:
             os.mkdir(os.path.join(self.path, INFODIR))
@@ -864,10 +864,10 @@ class DiskObjectStore(PackBasedObjectStore):
         except FileNotFoundError:
             return None
 
-    def _remove_loose_object(self, sha):
+    def _remove_loose_object(self, sha) -> None:
         os.remove(self._get_shafile_path(sha))
 
-    def _remove_pack(self, pack):
+    def _remove_pack(self, pack) -> None:
         try:
             del self._pack_cache[os.path.basename(pack._basename)]
         except KeyError:
@@ -997,13 +997,13 @@ class DiskObjectStore(PackBasedObjectStore):
                 os.remove(path)
                 return None
 
-        def abort():
+        def abort() -> None:
             f.close()
             os.remove(path)
 
         return f, commit, abort
 
-    def add_object(self, obj):
+    def add_object(self, obj) -> None:
         """Add a single object to this object store.
 
         Args:
@@ -1087,7 +1087,7 @@ class MemoryObjectStore(BaseObjectStore):
         """Check if a particular object is present by SHA1 and is loose."""
         return self._to_hexsha(sha) in self._data
 
-    def contains_packed(self, sha):
+    def contains_packed(self, sha) -> bool:
         """Check if a particular object is present by SHA1 and is packed."""
         return False
 
@@ -1117,11 +1117,11 @@ class MemoryObjectStore(BaseObjectStore):
         """Delete an object from this store, for testing only."""
         del self._data[self._to_hexsha(name)]
 
-    def add_object(self, obj):
+    def add_object(self, obj) -> None:
         """Add a single object to this object store."""
         self._data[obj.id] = obj.copy()
 
-    def add_objects(self, objects, progress=None):
+    def add_objects(self, objects, progress=None) -> None:
         """Add a set of objects to this object store.
 
         Args:
@@ -1143,7 +1143,7 @@ class MemoryObjectStore(BaseObjectStore):
 
         f = SpooledTemporaryFile(max_size=PACK_SPOOL_FILE_MAX_SIZE, prefix="incoming-")
 
-        def commit():
+        def commit() -> None:
             size = f.tell()
             if size > 0:
                 f.seek(0)
@@ -1154,7 +1154,7 @@ class MemoryObjectStore(BaseObjectStore):
             else:
                 f.close()
 
-        def abort():
+        def abort() -> None:
             f.close()
 
         return f, commit, abort
@@ -1171,7 +1171,7 @@ class MemoryObjectStore(BaseObjectStore):
         for unpacked_object in unpacked_objects:
             self.add_object(unpacked_object.sha_file())
 
-    def add_thin_pack(self, read_all, read_some, progress=None):
+    def add_thin_pack(self, read_all, read_some, progress=None) -> None:
         """Add a new thin pack to this object store.
 
         Thin packs are packs that contain deltas with parents that exist
@@ -1372,7 +1372,7 @@ class MissingObjectFinder:
 
     def add_todo(
         self, entries: Iterable[tuple[ObjectID, Optional[bytes], Optional[int], bool]]
-    ):
+    ) -> None:
         self.objects_to_send.update([e for e in entries if e[0] not in self.sha_done])
 
     def __next__(self) -> tuple[bytes, Optional[PackHint]]:
@@ -1445,10 +1445,10 @@ class ObjectStoreGraphWalker:
             shallow = set()
         self.shallow = shallow
 
-    def nak(self):
+    def nak(self) -> None:
         """Nothing in common was found."""
 
-    def ack(self, sha):
+    def ack(self, sha) -> None:
         """Ack that a revision and its ancestors are present in the source."""
         if len(sha) != 40:
             raise ValueError(f"unexpected sha {sha!r} received")
@@ -1610,13 +1610,13 @@ class OverlayObjectStore(BaseObjectStore):
                 pass
         raise KeyError(sha_id)
 
-    def contains_packed(self, sha):
+    def contains_packed(self, sha) -> bool:
         for b in self.bases:
             if b.contains_packed(sha):
                 return True
         return False
 
-    def contains_loose(self, sha):
+    def contains_loose(self, sha) -> bool:
         for b in self.bases:
             if b.contains_loose(sha):
                 return True
@@ -1641,20 +1641,20 @@ class BucketBasedObjectStore(PackBasedObjectStore):
         """Iterate over the SHAs of all loose objects."""
         return iter([])
 
-    def _get_loose_object(self, sha):
+    def _get_loose_object(self, sha) -> None:
         return None
 
-    def _remove_loose_object(self, sha):
+    def _remove_loose_object(self, sha) -> None:
         # Doesn't exist..
         pass
 
-    def _remove_pack(self, name):
+    def _remove_pack(self, name) -> None:
         raise NotImplementedError(self._remove_pack)
 
-    def _iter_pack_names(self):
+    def _iter_pack_names(self) -> Iterator[str]:
         raise NotImplementedError(self._iter_pack_names)
 
-    def _get_pack(self, name):
+    def _get_pack(self, name) -> Pack:
         raise NotImplementedError(self._get_pack)
 
     def _update_pack_cache(self):
@@ -1672,7 +1672,7 @@ class BucketBasedObjectStore(PackBasedObjectStore):
             self._pack_cache.pop(f).close()
         return new_packs
 
-    def _upload_pack(self, basename, pack_file, index_file):
+    def _upload_pack(self, basename, pack_file, index_file) -> None:
         raise NotImplementedError
 
     def add_pack(self):

+ 31 - 29
dulwich/objects.py

@@ -120,7 +120,7 @@ def hex_to_sha(hex):
         raise ValueError(exc.args[0]) from exc
 
 
-def valid_hexsha(hex):
+def valid_hexsha(hex) -> bool:
     if len(hex) != 40:
         return False
     try:
@@ -168,7 +168,7 @@ def object_header(num_type: int, length: int) -> bytes:
 def serializable_property(name: str, docstring: Optional[str] = None):
     """A property that helps tracking whether serialization is necessary."""
 
-    def set(obj, value):
+    def set(obj, value) -> None:
         setattr(obj, "_" + name, value)
         obj._needs_serialization = True
 
@@ -189,7 +189,7 @@ def object_class(type: Union[bytes, int]) -> Optional[type["ShaFile"]]:
     return _TYPE_MAP.get(type, None)
 
 
-def check_hexsha(hex, error_msg):
+def check_hexsha(hex, error_msg) -> None:
     """Check if a string is a valid hex sha string.
 
     Args:
@@ -202,7 +202,7 @@ def check_hexsha(hex, error_msg):
         raise ObjectFormatException(f"{error_msg} {hex}")
 
 
-def check_identity(identity: bytes, error_msg: str) -> None:
+def check_identity(identity: Optional[bytes], error_msg: str) -> None:
     """Check if the specified identity is valid.
 
     This will raise an exception if the identity is not valid.
@@ -211,6 +211,8 @@ def check_identity(identity: bytes, error_msg: str) -> None:
       identity: Identity string
       error_msg: Error message to use in exception
     """
+    if identity is None:
+        raise ObjectFormatException(error_msg)
     email_start = identity.find(b"<")
     email_end = identity.find(b">")
     if not all(
@@ -226,7 +228,7 @@ def check_identity(identity: bytes, error_msg: str) -> None:
         raise ObjectFormatException(error_msg)
 
 
-def check_time(time_seconds):
+def check_time(time_seconds) -> None:
     """Check if the specified time is not prone to overflow error.
 
     This will raise an exception if the time is not valid.
@@ -491,7 +493,7 @@ class ShaFile:
         obj.set_raw_string(string)
         return obj
 
-    def _check_has_member(self, member, error_msg):
+    def _check_has_member(self, member, error_msg) -> None:
         """Check that the object has a given member variable.
 
         Args:
@@ -598,7 +600,7 @@ class Blob(ShaFile):
     def _get_data(self):
         return self.as_raw_string()
 
-    def _set_data(self, data):
+    def _set_data(self, data) -> None:
         self.set_raw_string(data)
 
     data = property(
@@ -608,13 +610,13 @@ class Blob(ShaFile):
     def _get_chunked(self):
         return self._chunked_text
 
-    def _set_chunked(self, chunks: list[bytes]):
+    def _set_chunked(self, chunks: list[bytes]) -> None:
         self._chunked_text = chunks
 
     def _serialize(self):
         return self._chunked_text
 
-    def _deserialize(self, chunks):
+    def _deserialize(self, chunks) -> None:
         self._chunked_text = chunks
 
     chunked = property(
@@ -630,7 +632,7 @@ class Blob(ShaFile):
             raise NotBlobError(path)
         return blob
 
-    def check(self):
+    def check(self) -> None:
         """Check this object for internal consistency.
 
         Raises:
@@ -759,7 +761,7 @@ class Tag(ShaFile):
         self._tag_time = None
         self._tag_timezone = None
         self._tag_timezone_neg_utc = False
-        self._signature = None
+        self._signature: Optional[bytes] = None
 
     @classmethod
     def from_path(cls, filename):
@@ -768,7 +770,7 @@ class Tag(ShaFile):
             raise NotTagError(filename)
         return tag
 
-    def check(self):
+    def check(self) -> None:
         """Check this object for internal consistency.
 
         Raises:
@@ -829,7 +831,7 @@ class Tag(ShaFile):
             body = (self.message or b"") + (self._signature or b"")
         return list(_format_message(headers, body))
 
-    def _deserialize(self, chunks):
+    def _deserialize(self, chunks) -> None:
         """Grab the metadata attached to the tag."""
         self._tagger = None
         self._tag_time = None
@@ -866,7 +868,7 @@ class Tag(ShaFile):
                         self._message = value[:sig_idx]
                         self._signature = value[sig_idx:]
             else:
-                raise ObjectFormatException(f"Unknown field {field}")
+                raise ObjectFormatException(f"Unknown field {field.decode('ascii', 'replace')}")
 
     def _get_object(self):
         """Get the object pointed to by this tag.
@@ -875,7 +877,7 @@ class Tag(ShaFile):
         """
         return (self._object_class, self._object_sha)
 
-    def _set_object(self, value):
+    def _set_object(self, value) -> None:
         (self._object_class, self._object_sha) = value
         self._needs_serialization = True
 
@@ -897,7 +899,7 @@ class Tag(ShaFile):
 
     signature = serializable_property("signature", "Optional detached GPG signature")
 
-    def sign(self, keyid: Optional[str] = None):
+    def sign(self, keyid: Optional[str] = None) -> None:
         import gpg
 
         with gpg.Context(armor=True) as c:
@@ -1118,7 +1120,7 @@ class Tree(ShaFile):
     def __iter__(self):
         return iter(self._entries)
 
-    def add(self, name, mode, hexsha):
+    def add(self, name, mode, hexsha) -> None:
         """Add an entry to the tree.
 
         Args:
@@ -1147,7 +1149,7 @@ class Tree(ShaFile):
         """
         return list(self.iteritems())
 
-    def _deserialize(self, chunks):
+    def _deserialize(self, chunks) -> None:
         """Grab the entries in the tree."""
         try:
             parsed_entries = parse_tree(b"".join(chunks))
@@ -1158,7 +1160,7 @@ class Tree(ShaFile):
         # genexp.
         self._entries = {n: (m, s) for n, m, s in parsed_entries}
 
-    def check(self):
+    def check(self) -> None:
         """Check this object for internal consistency.
 
         Raises:
@@ -1385,12 +1387,12 @@ class Commit(ShaFile):
     def __init__(self) -> None:
         super().__init__()
         self._parents: list[bytes] = []
-        self._encoding = None
+        self._encoding: Optional[bytes] = None
         self._mergetag: list[Tag] = []
-        self._gpgsig = None
-        self._extra: list[tuple[bytes, bytes]] = []
-        self._author_timezone_neg_utc = False
-        self._commit_timezone_neg_utc = False
+        self._gpgsig: Optional[bytes] = None
+        self._extra: list[tuple[bytes, Optional[bytes]]] = []
+        self._author_timezone_neg_utc: Optional[bool] = False
+        self._commit_timezone_neg_utc: Optional[bool] = False
 
     @classmethod
     def from_path(cls, path):
@@ -1399,7 +1401,7 @@ class Commit(ShaFile):
             raise NotCommitError(path)
         return commit
 
-    def _deserialize(self, chunks):
+    def _deserialize(self, chunks) -> None:
         self._parents = []
         self._extra = []
         self._tree = None
@@ -1444,7 +1446,7 @@ class Commit(ShaFile):
             (self._commit_timezone, self._commit_timezone_neg_utc),
         ) = commit_info
 
-    def check(self):
+    def check(self) -> None:
         """Check this object for internal consistency.
 
         Raises:
@@ -1490,7 +1492,7 @@ class Commit(ShaFile):
 
         # TODO: optionally check for duplicate parents
 
-    def sign(self, keyid: Optional[str] = None):
+    def sign(self, keyid: Optional[str] = None) -> None:
         import gpg
 
         with gpg.Context(armor=True) as c:
@@ -1506,7 +1508,7 @@ class Commit(ShaFile):
                     self.as_raw_string(), mode=gpg.constants.sig.mode.DETACH
                 )
 
-    def verify(self, keyids: Optional[Iterable[str]] = None):
+    def verify(self, keyids: Optional[Iterable[str]] = None) -> None:
         """Verify GPG signature for this commit (if it is signed).
 
         Args:
@@ -1583,7 +1585,7 @@ class Commit(ShaFile):
         """Return a list of parents of this commit."""
         return self._parents
 
-    def _set_parents(self, value):
+    def _set_parents(self, value) -> None:
         """Set a list of parents of this commit."""
         self._needs_serialization = True
         self._parents = value

+ 14 - 14
dulwich/pack.py

@@ -99,7 +99,7 @@ PackHint = tuple[int, Optional[bytes]]
 class UnresolvedDeltas(Exception):
     """Delta objects could not be resolved."""
 
-    def __init__(self, shas):
+    def __init__(self, shas) -> None:
         self.shas = shas
 
 
@@ -525,7 +525,7 @@ class PackIndex:
         """Yield all the SHA1's of the objects in the index, sorted."""
         raise NotImplementedError(self._itersha)
 
-    def close(self):
+    def close(self) -> None:
         pass
 
     def check(self) -> None:
@@ -642,15 +642,15 @@ class FilePackIndex(PackIndex):
         """
         raise NotImplementedError(self._unpack_entry)
 
-    def _unpack_name(self, i):
+    def _unpack_name(self, i) -> bytes:
         """Unpack the i-th name from the index file."""
         raise NotImplementedError(self._unpack_name)
 
-    def _unpack_offset(self, i):
+    def _unpack_offset(self, i) -> int:
         """Unpack the i-th object offset from the index file."""
         raise NotImplementedError(self._unpack_offset)
 
-    def _unpack_crc32_checksum(self, i):
+    def _unpack_crc32_checksum(self, i) -> Optional[int]:
         """Unpack the crc32 checksum for the ith object from the index file."""
         raise NotImplementedError(self._unpack_crc32_checksum)
 
@@ -754,7 +754,7 @@ class FilePackIndex(PackIndex):
         assert start <= end
         started = False
         for i in range(start, end):
-            name = self._unpack_name(i)
+            name: bytes = self._unpack_name(i)
             if name.startswith(prefix):
                 yield name
                 started = True
@@ -782,7 +782,7 @@ class PackIndex1(FilePackIndex):
         offset = (0x100 * 4) + (i * 24)
         return unpack_from(">L", self._contents, offset)[0]
 
-    def _unpack_crc32_checksum(self, i):
+    def _unpack_crc32_checksum(self, i) -> None:
         # Not stored in v1 index files
         return None
 
@@ -1109,7 +1109,7 @@ class PackStreamCopier(PackStreamReader):
         self.outfile.write(data)
         return data
 
-    def verify(self, progress=None):
+    def verify(self, progress=None) -> None:
         """Verify a pack stream and write it to the output file.
 
         See PackStreamReader.iterobjects for a list of exceptions this may
@@ -1231,7 +1231,7 @@ class PackData:
     def from_path(cls, path):
         return cls(filename=path)
 
-    def close(self):
+    def close(self) -> None:
         self._file.close()
 
     def __enter__(self):
@@ -1370,7 +1370,7 @@ class PackData:
         self._file.seek(-20, SEEK_END)
         return self._file.read(20)
 
-    def check(self):
+    def check(self) -> None:
         """Check the consistency of this pack."""
         actual = self.calculate_checksum()
         stored = self.get_stored_checksum()
@@ -1611,7 +1611,7 @@ class SHA1Reader:
         self.sha1.update(data)
         return data
 
-    def check_sha(self):
+    def check_sha(self) -> None:
         stored = self.f.read(20)
         if stored != self.sha1.digest():
             raise ChecksumMismatch(self.sha1.hexdigest(), sha_to_hex(stored))
@@ -1631,7 +1631,7 @@ class SHA1Writer:
         self.length = 0
         self.sha1 = sha1(b"")
 
-    def write(self, data):
+    def write(self, data) -> None:
         self.sha1.update(data)
         self.f.write(data)
         self.length += len(data)
@@ -1766,7 +1766,7 @@ def pack_header_chunks(num_objects):
     yield struct.pack(b">L", num_objects)  # Number of objects in pack
 
 
-def write_pack_header(write, num_objects):
+def write_pack_header(write, num_objects) -> None:
     """Write a pack header for the given number of objects."""
     if hasattr(write, "write"):
         write = write.write
@@ -2470,7 +2470,7 @@ class Pack:
             self._idx = self._idx_load()
         return self._idx
 
-    def close(self):
+    def close(self) -> None:
         if self._data is not None:
             self._data.close()
         if self._idx is not None:

+ 5 - 5
dulwich/patch.py

@@ -35,7 +35,7 @@ from .pack import ObjectContainer
 FIRST_FEW_BYTES = 8000
 
 
-def write_commit_patch(f, commit, contents, progress, version=None, encoding=None):
+def write_commit_patch(f, commit, contents, progress, version=None, encoding=None) -> None:
     """Write a individual file patch.
 
     Args:
@@ -101,7 +101,7 @@ def get_summary(commit):
 
 
 #  Unified Diff
-def _format_range_unified(start, stop):
+def _format_range_unified(start, stop) -> str:
     """Convert range to the "ed" format."""
     # Per the diff spec at http://www.unix.org/single_unix_specification/
     beginning = start + 1  # lines start numbering with one
@@ -188,7 +188,7 @@ def patch_filename(p, root):
         return root + b"/" + p
 
 
-def write_object_diff(f, store: ObjectContainer, old_file, new_file, diff_binary=False):
+def write_object_diff(f, store: ObjectContainer, old_file, new_file, diff_binary=False) -> None:
     """Write the diff for an object.
 
     Args:
@@ -279,7 +279,7 @@ def gen_diff_header(paths, modes, shas):
 
 
 # TODO(jelmer): Support writing unicode, rather than bytes.
-def write_blob_diff(f, old_file, new_file):
+def write_blob_diff(f, old_file, new_file) -> None:
     """Write blob diff.
 
     Args:
@@ -314,7 +314,7 @@ def write_blob_diff(f, old_file, new_file):
     )
 
 
-def write_tree_diff(f, store, old_tree, new_tree, diff_binary=False):
+def write_tree_diff(f, store, old_tree, new_tree, diff_binary=False) -> None:
     """Write tree diff.
 
     Args:

+ 51 - 51
dulwich/porcelain.py

@@ -140,16 +140,16 @@ GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
 class NoneStream(RawIOBase):
     """Fallback if stdout or stderr are unavailable, does nothing."""
 
-    def read(self, size=-1):
+    def read(self, size=-1) -> None:
         return None
 
-    def readall(self):
-        return None
+    def readall(self) -> bytes:
+        return b""
 
-    def readinto(self, b):
+    def readinto(self, b) -> None:
         return None
 
-    def write(self, b):
+    def write(self, b) -> None:
         return None
 
 
@@ -325,7 +325,7 @@ class DivergedBranches(Error):
         self.new_sha = new_sha
 
 
-def check_diverged(repo, current_sha, new_sha):
+def check_diverged(repo, current_sha, new_sha) -> None:
     """Check if updating to a sha can be done with fast forwarding.
 
     Args:
@@ -346,7 +346,7 @@ def archive(
     committish=None,
     outstream=default_bytes_out_stream,
     errstream=default_bytes_err_stream,
-):
+) -> None:
     """Create an archive.
 
     Args:
@@ -365,7 +365,7 @@ def archive(
             outstream.write(chunk)
 
 
-def update_server_info(repo="."):
+def update_server_info(repo=".") -> None:
     """Update server info files for a repository.
 
     Args:
@@ -375,7 +375,7 @@ def update_server_info(repo="."):
         server_update_server_info(r)
 
 
-def symbolic_ref(repo, ref_name, force=False):
+def symbolic_ref(repo, ref_name, force=False) -> None:
     """Set git symbolic ref into HEAD.
 
     Args:
@@ -390,7 +390,7 @@ def symbolic_ref(repo, ref_name, force=False):
         repo_obj.refs.set_symbolic_ref(b"HEAD", ref_path)
 
 
-def pack_refs(repo, all=False):
+def pack_refs(repo, all=False) -> None:
     with open_repo_closing(repo) as repo_obj:
         refs = repo_obj.refs
         packed_refs = {
@@ -623,7 +623,7 @@ def _is_subdir(subdir, parentdir):
 
 
 # TODO: option to remove ignored files also, in line with `git clean -fdx`
-def clean(repo=".", target_dir=None):
+def clean(repo=".", target_dir=None) -> None:
     """Remove any untracked files from the target directory recursively.
 
     Equivalent to running ``git clean -fd`` in target_dir.
@@ -669,7 +669,7 @@ def clean(repo=".", target_dir=None):
                     os.remove(ap)
 
 
-def remove(repo=".", paths=None, cached=False):
+def remove(repo=".", paths=None, cached=False) -> None:
     """Remove files from the staging area.
 
     Args:
@@ -736,7 +736,7 @@ def commit_encode(commit, contents, default_encoding=DEFAULT_ENCODING):
     return contents.encode(encoding)
 
 
-def print_commit(commit, decode, outstream=sys.stdout):
+def print_commit(commit, decode, outstream=sys.stdout) -> None:
     """Write a human-readable commit log entry.
 
     Args:
@@ -764,7 +764,7 @@ def print_commit(commit, decode, outstream=sys.stdout):
     outstream.write("\n")
 
 
-def print_tag(tag, decode, outstream=sys.stdout):
+def print_tag(tag, decode, outstream=sys.stdout) -> None:
     """Write a human-readable tag.
 
     Args:
@@ -782,7 +782,7 @@ def print_tag(tag, decode, outstream=sys.stdout):
     outstream.write("\n")
 
 
-def show_blob(repo, blob, decode, outstream=sys.stdout):
+def show_blob(repo, blob, decode, outstream=sys.stdout) -> None:
     """Write a blob to a stream.
 
     Args:
@@ -794,7 +794,7 @@ def show_blob(repo, blob, decode, outstream=sys.stdout):
     outstream.write(decode(blob.data))
 
 
-def show_commit(repo, commit, decode, outstream=sys.stdout):
+def show_commit(repo, commit, decode, outstream=sys.stdout) -> None:
     """Show a commit to a stream.
 
     Args:
@@ -815,7 +815,7 @@ def show_commit(repo, commit, decode, outstream=sys.stdout):
     outstream.write(commit_decode(commit, diffstream.getvalue()))
 
 
-def show_tree(repo, tree, decode, outstream=sys.stdout):
+def show_tree(repo, tree, decode, outstream=sys.stdout) -> None:
     """Print a tree to a stream.
 
     Args:
@@ -828,7 +828,7 @@ def show_tree(repo, tree, decode, outstream=sys.stdout):
         outstream.write(decode(n) + "\n")
 
 
-def show_tag(repo, tag, decode, outstream=sys.stdout):
+def show_tag(repo, tag, decode, outstream=sys.stdout) -> None:
     """Print a tag to a stream.
 
     Args:
@@ -886,7 +886,7 @@ def log(
     max_entries=None,
     reverse=False,
     name_status=False,
-):
+) -> None:
     """Write commit logs.
 
     Args:
@@ -917,7 +917,7 @@ def show(
     objects=None,
     outstream=sys.stdout,
     default_encoding=DEFAULT_ENCODING,
-):
+) -> None:
     """Print the changes in a commit.
 
     Args:
@@ -947,7 +947,7 @@ def show(
             show_object(r, o, decode, outstream)
 
 
-def diff_tree(repo, old_tree, new_tree, outstream=default_bytes_out_stream):
+def diff_tree(repo, old_tree, new_tree, outstream=default_bytes_out_stream) -> None:
     """Compares the content and mode of blobs found via two tree objects.
 
     Args:
@@ -960,7 +960,7 @@ def diff_tree(repo, old_tree, new_tree, outstream=default_bytes_out_stream):
         write_tree_diff(outstream, r.object_store, old_tree, new_tree)
 
 
-def rev_list(repo, commits, outstream=sys.stdout):
+def rev_list(repo, commits, outstream=sys.stdout) -> None:
     """Lists commit objects in reverse chronological order.
 
     Args:
@@ -980,7 +980,7 @@ def _canonical_part(url: str) -> str:
     return name
 
 
-def submodule_add(repo, url, path=None, name=None):
+def submodule_add(repo, url, path=None, name=None) -> None:
     """Add a new submodule.
 
     Args:
@@ -1006,7 +1006,7 @@ def submodule_add(repo, url, path=None, name=None):
         config.write_to_path()
 
 
-def submodule_init(repo):
+def submodule_init(repo) -> None:
     """Initialize submodules.
 
     Args:
@@ -1045,7 +1045,7 @@ def tag_create(
     tag_timezone=None,
     sign=False,
     encoding=DEFAULT_ENCODING,
-):
+) -> None:
     """Creates a tag in git via dulwich calls.
 
     Args:
@@ -1104,7 +1104,7 @@ def tag_list(repo, outstream=sys.stdout):
         return tags
 
 
-def tag_delete(repo, name):
+def tag_delete(repo, name) -> None:
     """Remove a tag.
 
     Args:
@@ -1122,7 +1122,7 @@ def tag_delete(repo, name):
             del r.refs[_make_tag_ref(name)]
 
 
-def reset(repo, mode, treeish="HEAD"):
+def reset(repo, mode, treeish="HEAD") -> None:
     """Reset current HEAD to the specified state.
 
     Args:
@@ -1170,7 +1170,7 @@ def push(
     errstream=default_bytes_err_stream,
     force=False,
     **kwargs,
-):
+) -> None:
     """Remote push with dulwich via dulwich.client.
 
     Args:
@@ -1255,7 +1255,7 @@ def pull(
     filter_spec=None,
     protocol_version=None,
     **kwargs,
-):
+) -> None:
     """Pull from remote via dulwich.client.
 
     Args:
@@ -1280,7 +1280,7 @@ def pull(
         if refspecs is None:
             refspecs = [b"HEAD"]
 
-        def determine_wants(remote_refs, **kwargs):
+        def determine_wants(remote_refs, *args, **kwargs):
             selected_refs.extend(
                 parse_reftuples(remote_refs, r.refs, refspecs, force=force)
             )
@@ -1494,7 +1494,7 @@ def get_tree_changes(repo):
         return tracked_changes
 
 
-def daemon(path=".", address=None, port=None):
+def daemon(path=".", address=None, port=None) -> None:
     """Run a daemon serving Git requests over TCP/IP.
 
     Args:
@@ -1508,7 +1508,7 @@ def daemon(path=".", address=None, port=None):
     server.serve_forever()
 
 
-def web_daemon(path=".", address=None, port=None):
+def web_daemon(path=".", address=None, port=None) -> None:
     """Run a daemon serving Git requests over HTTP.
 
     Args:
@@ -1535,7 +1535,7 @@ def web_daemon(path=".", address=None, port=None):
     server.serve_forever()
 
 
-def upload_pack(path=".", inf=None, outf=None):
+def upload_pack(path=".", inf=None, outf=None) -> int:
     """Upload a pack file after negotiating its contents using smart protocol.
 
     Args:
@@ -1550,7 +1550,7 @@ def upload_pack(path=".", inf=None, outf=None):
     path = os.path.expanduser(path)
     backend = FileSystemBackend(path)
 
-    def send_fn(data):
+    def send_fn(data) -> None:
         outf.write(data)
         outf.flush()
 
@@ -1561,7 +1561,7 @@ def upload_pack(path=".", inf=None, outf=None):
     return 0
 
 
-def receive_pack(path=".", inf=None, outf=None):
+def receive_pack(path=".", inf=None, outf=None) -> int:
     """Receive a pack file after negotiating its contents using smart protocol.
 
     Args:
@@ -1576,7 +1576,7 @@ def receive_pack(path=".", inf=None, outf=None):
     path = os.path.expanduser(path)
     backend = FileSystemBackend(path)
 
-    def send_fn(data):
+    def send_fn(data) -> None:
         outf.write(data)
         outf.flush()
 
@@ -1599,7 +1599,7 @@ def _make_tag_ref(name):
     return LOCAL_TAG_PREFIX + name
 
 
-def branch_delete(repo, name):
+def branch_delete(repo, name) -> None:
     """Delete a branch.
 
     Args:
@@ -1615,7 +1615,7 @@ def branch_delete(repo, name):
             del r.refs[_make_branch_ref(name)]
 
 
-def branch_create(repo, name, objectish=None, force=False):
+def branch_create(repo, name, objectish=None, force=False) -> None:
     """Create a branch.
 
     Args:
@@ -1800,7 +1800,7 @@ def ls_remote(remote, config: Optional[Config] = None, **kwargs):
     return client.get_refs(host_path)
 
 
-def repack(repo):
+def repack(repo) -> None:
     """Repack loose files in a repository.
 
     Currently this only packs loose objects.
@@ -1820,7 +1820,7 @@ def pack_objects(
     delta_window_size=None,
     deltify=None,
     reuse_deltas=True,
-):
+) -> None:
     """Pack objects into a file.
 
     Args:
@@ -1853,7 +1853,7 @@ def ls_tree(
     outstream=sys.stdout,
     recursive=False,
     name_only=False,
-):
+) -> None:
     """List contents of a tree.
 
     Args:
@@ -1864,7 +1864,7 @@ def ls_tree(
       name_only: Only print item name
     """
 
-    def list_tree(store, treeid, base):
+    def list_tree(store, treeid, base) -> None:
         for name, mode, sha in store[treeid].iteritems():
             if base:
                 name = posixpath.join(base, name)
@@ -1880,7 +1880,7 @@ def ls_tree(
         list_tree(r.object_store, tree.id, "")
 
 
-def remote_add(repo: Repo, name: Union[bytes, str], url: Union[bytes, str]):
+def remote_add(repo, name: Union[bytes, str], url: Union[bytes, str]) -> None:
     """Add a remote.
 
     Args:
@@ -1901,7 +1901,7 @@ def remote_add(repo: Repo, name: Union[bytes, str], url: Union[bytes, str]):
         c.write_to_path()
 
 
-def remote_remove(repo: Repo, name: Union[bytes, str]):
+def remote_remove(repo: Repo, name: Union[bytes, str]) -> None:
     """Remove a remote.
 
     Args:
@@ -1938,7 +1938,7 @@ def check_ignore(repo, paths, no_index=False):
                 yield path
 
 
-def update_head(repo, target, detached=False, new_branch=None):
+def update_head(repo, target, detached=False, new_branch=None) -> None:
     """Update HEAD to point at a new branch/commit.
 
     Note that this does not actually update the working tree.
@@ -1966,7 +1966,7 @@ def update_head(repo, target, detached=False, new_branch=None):
             r.refs.set_symbolic_ref(b"HEAD", to_set)
 
 
-def reset_file(repo, file_path: str, target: bytes = b"HEAD", symlink_fn=None):
+def reset_file(repo, file_path: str, target: bytes = b"HEAD", symlink_fn=None) -> None:
     """Reset the file to specific commit or branch.
 
     Args:
@@ -2010,7 +2010,7 @@ def _update_head_during_checkout_branch(repo, target):
     return checkout_target
 
 
-def checkout_branch(repo, target: Union[bytes, str], force: bool = False):
+def checkout_branch(repo, target: Union[bytes, str], force: bool = False) -> None:
     """Switch branches or restore working tree files.
 
     The implementation of this function will probably not scale well
@@ -2143,7 +2143,7 @@ def stash_list(repo):
         return enumerate(list(stash.stashes()))
 
 
-def stash_push(repo):
+def stash_push(repo) -> None:
     """Push a new stash onto the stack."""
     with open_repo_closing(repo) as r:
         from .stash import Stash
@@ -2152,16 +2152,16 @@ def stash_push(repo):
         stash.push()
 
 
-def stash_pop(repo, index):
+def stash_pop(repo) -> None:
     """Pop a stash from the stack."""
     with open_repo_closing(repo) as r:
         from .stash import Stash
 
         stash = Stash.from_repo(r)
-        stash.pop(index)
+        stash.pop()
 
 
-def stash_drop(repo, index):
+def stash_drop(repo, index) -> None:
     """Drop a stash from the stack."""
     with open_repo_closing(repo) as r:
         from .stash import Stash

+ 11 - 10
dulwich/protocol.py

@@ -23,6 +23,7 @@
 
 from io import BytesIO
 from os import SEEK_END
+from typing import Optional
 
 import dulwich
 
@@ -221,9 +222,9 @@ class Protocol:
         self.write = write
         self._close = close
         self.report_activity = report_activity
-        self._readahead = None
+        self._readahead: Optional[BytesIO] = None
 
-    def close(self):
+    def close(self) -> None:
         if self._close:
             self._close()
 
@@ -270,7 +271,7 @@ class Protocol:
                 )
             return pkt_contents
 
-    def eof(self):
+    def eof(self) -> bool:
         """Test whether the protocol stream has reached EOF.
 
         Note that this refers to the actual stream EOF and not just a
@@ -285,7 +286,7 @@ class Protocol:
         self.unread_pkt_line(next_line)
         return False
 
-    def unread_pkt_line(self, data):
+    def unread_pkt_line(self, data) -> None:
         """Unread a single line of data into the readahead buffer.
 
         This method can be used to unread a single pkt-line into a fixed
@@ -312,7 +313,7 @@ class Protocol:
             yield pkt
             pkt = self.read_pkt_line()
 
-    def write_pkt_line(self, line):
+    def write_pkt_line(self, line) -> None:
         """Sends a pkt-line to the remote git process.
 
         Args:
@@ -327,7 +328,7 @@ class Protocol:
         except OSError as exc:
             raise GitProtocolError(str(exc)) from exc
 
-    def write_sideband(self, channel, blob):
+    def write_sideband(self, channel, blob) -> None:
         """Write multiplexed data to the sideband.
 
         Args:
@@ -341,7 +342,7 @@ class Protocol:
             self.write_pkt_line(bytes(bytearray([channel])) + blob[:65515])
             blob = blob[65515:]
 
-    def send_cmd(self, cmd, *args):
+    def send_cmd(self, cmd, *args) -> None:
         """Send a command and some arguments to a git server.
 
         Only used for the TCP git protocol (git://).
@@ -531,7 +532,7 @@ class BufferedPktLineWriter:
         self._wbuf = BytesIO()
         self._buflen = 0
 
-    def write(self, data):
+    def write(self, data) -> None:
         """Write data, wrapping it in a pkt-line."""
         line = pkt_line(data)
         line_len = len(line)
@@ -546,7 +547,7 @@ class BufferedPktLineWriter:
         self._wbuf.write(saved)
         self._buflen += len(saved)
 
-    def flush(self):
+    def flush(self) -> None:
         """Flush all data from the buffer."""
         data = self._wbuf.getvalue()
         if data:
@@ -562,7 +563,7 @@ class PktLineParser:
         self.handle_pkt = handle_pkt
         self._readahead = BytesIO()
 
-    def parse(self, data):
+    def parse(self, data) -> None:
         """Parse a fragment of data and call back for any completed packets."""
         self._readahead.write(data)
         buf = self._readahead.getvalue()

+ 1 - 1
dulwich/reflog.py

@@ -90,7 +90,7 @@ def read_reflog(f):
         yield parse_reflog_line(line)
 
 
-def drop_reflog_entry(f, index, rewrite=False):
+def drop_reflog_entry(f, index, rewrite=False) -> None:
     """Drop the specified reflog entry.
 
     Args:

+ 30 - 30
dulwich/refs.py

@@ -24,7 +24,7 @@
 import os
 import warnings
 from contextlib import suppress
-from typing import Any, Optional
+from typing import Any, Optional, Iterator
 
 from .errors import PackedRefsException, RefFormatError
 from .file import GitFile, ensure_dir_exists
@@ -65,7 +65,7 @@ def parse_symref_value(contents: bytes) -> bytes:
     raise ValueError(contents)
 
 
-def check_ref_format(refname: Ref):
+def check_ref_format(refname: Ref) -> bool:
     """Check if a refname is correctly formatted.
 
     Implements all the same rules as git-check-ref-format[1].
@@ -114,7 +114,7 @@ class RefsContainer:
         timestamp=None,
         timezone=None,
         message=None,
-    ):
+    ) -> None:
         if self._logger is None:
             return
         if message is None:
@@ -129,7 +129,7 @@ class RefsContainer:
         timestamp=None,
         timezone=None,
         message=None,
-    ):
+    ) -> None:
         """Make a ref point at another ref.
 
         Args:
@@ -139,7 +139,7 @@ class RefsContainer:
         """
         raise NotImplementedError(self.set_symbolic_ref)
 
-    def get_packed_refs(self):
+    def get_packed_refs(self) -> dict[Ref, ObjectID]:
         """Get contents of the packed-refs file.
 
         Returns: Dictionary mapping ref names to SHA1s
@@ -149,7 +149,7 @@ class RefsContainer:
         """
         raise NotImplementedError(self.get_packed_refs)
 
-    def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]):
+    def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]) -> None:
         """Add the given refs as packed refs.
 
         Args:
@@ -158,7 +158,7 @@ class RefsContainer:
         """
         raise NotImplementedError(self.add_packed_refs)
 
-    def get_peeled(self, name):
+    def get_peeled(self, name) -> Optional[ObjectID]:
         """Return the cached peeled value of a ref, if available.
 
         Args:
@@ -178,7 +178,7 @@ class RefsContainer:
         timezone: Optional[bytes] = None,
         message: Optional[bytes] = None,
         prune: bool = False,
-    ):
+    ) -> None:
         if prune:
             to_delete = set(self.subkeys(base))
         else:
@@ -198,7 +198,7 @@ class RefsContainer:
         for ref in to_delete:
             self.remove_if_equals(b"/".join((base, ref)), None, message=message)
 
-    def allkeys(self):
+    def allkeys(self) -> Iterator[Ref]:
         """All refs present in this container."""
         raise NotImplementedError(self.allkeys)
 
@@ -249,7 +249,7 @@ class RefsContainer:
 
         return ret
 
-    def _check_refname(self, name):
+    def _check_refname(self, name) -> None:
         """Ensure a refname is valid and lives in refs or is HEAD.
 
         HEAD is not a valid refname according to git-check-ref-format, but this
@@ -281,7 +281,7 @@ class RefsContainer:
             contents = self.get_packed_refs().get(refname, None)
         return contents
 
-    def read_loose_ref(self, name):
+    def read_loose_ref(self, name) -> bytes:
         """Read a loose reference and return its contents.
 
         Args:
@@ -335,7 +335,7 @@ class RefsContainer:
         timestamp=None,
         timezone=None,
         message=None,
-    ):
+        ) -> bool:
         """Set a refname to new_ref only if it currently equals old_ref.
 
         This method follows all symbolic references if applicable for the
@@ -354,7 +354,7 @@ class RefsContainer:
 
     def add_if_new(
         self, name, ref, committer=None, timestamp=None, timezone=None, message=None
-    ):
+        ) -> bool:
         """Add a new reference only if it does not already exist.
 
         Args:
@@ -389,7 +389,7 @@ class RefsContainer:
         timestamp=None,
         timezone=None,
         message=None,
-    ):
+    ) -> bool:
         """Remove a refname only if it currently equals old_ref.
 
         This method does not follow symbolic references, even if applicable for
@@ -458,7 +458,7 @@ class DictRefsContainer(RefsContainer):
     def get_packed_refs(self):
         return {}
 
-    def _notify(self, ref, newsha):
+    def _notify(self, ref, newsha) -> None:
         for watcher in self._watchers:
             watcher._notify((ref, newsha))
 
@@ -470,7 +470,7 @@ class DictRefsContainer(RefsContainer):
         timestamp=None,
         timezone=None,
         message=None,
-    ):
+    ) -> None:
         old = self.follow(name)[-1]
         new = SYMREF + other
         self._refs[name] = new
@@ -494,7 +494,7 @@ class DictRefsContainer(RefsContainer):
         timestamp=None,
         timezone=None,
         message=None,
-    ):
+    ) -> bool:
         if old_ref is not None and self._refs.get(name, ZERO_SHA) != old_ref:
             return False
         realnames, _ = self.follow(name)
@@ -522,7 +522,7 @@ class DictRefsContainer(RefsContainer):
         timestamp=None,
         timezone=None,
         message: Optional[bytes] = None,
-    ):
+    ) -> bool:
         if name in self._refs:
             return False
         self._refs[name] = ref
@@ -546,7 +546,7 @@ class DictRefsContainer(RefsContainer):
         timestamp=None,
         timezone=None,
         message=None,
-    ):
+    ) -> bool:
         if old_ref is not None and self._refs.get(name, ZERO_SHA) != old_ref:
             return False
         try:
@@ -569,14 +569,14 @@ class DictRefsContainer(RefsContainer):
     def get_peeled(self, name):
         return self._peeled.get(name)
 
-    def _update(self, refs):
+    def _update(self, refs) -> None:
         """Update multiple refs; intended only for testing."""
         # TODO(dborowitz): replace this with a public function that uses
         # set_if_equal.
         for ref, sha in refs.items():
             self.set_if_equals(ref, None, sha)
 
-    def _update_peeled(self, peeled):
+    def _update_peeled(self, peeled) -> None:
         """Update cached peeled refs; intended only for testing."""
         self._peeled.update(peeled)
 
@@ -704,7 +704,7 @@ class DiskRefsContainer(RefsContainer):
                         self._packed_refs[name] = sha
         return self._packed_refs
 
-    def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]):
+    def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]) -> None:
         """Add the given refs as packed refs.
 
         Args:
@@ -789,7 +789,7 @@ class DiskRefsContainer(RefsContainer):
             # errors depending on the specific operating system
             return None
 
-    def _remove_packed_ref(self, name):
+    def _remove_packed_ref(self, name) -> None:
         if self._packed_refs is None:
             return
         filename = os.path.join(self.path, b"packed-refs")
@@ -818,7 +818,7 @@ class DiskRefsContainer(RefsContainer):
         timestamp=None,
         timezone=None,
         message=None,
-    ):
+    ) -> None:
         """Make a ref point at another ref.
 
         Args:
@@ -857,7 +857,7 @@ class DiskRefsContainer(RefsContainer):
         timestamp=None,
         timezone=None,
         message=None,
-    ):
+    ) -> bool:
         """Set a refname to new_ref only if it currently equals old_ref.
 
         This method follows all symbolic references, and can be used to perform
@@ -925,7 +925,7 @@ class DiskRefsContainer(RefsContainer):
         timestamp=None,
         timezone=None,
         message: Optional[bytes] = None,
-    ):
+    ) -> bool:
         """Add a new reference only if it does not already exist.
 
         This method follows symrefs, and only ensures that the last ref in the
@@ -976,7 +976,7 @@ class DiskRefsContainer(RefsContainer):
         timestamp=None,
         timezone=None,
         message=None,
-    ):
+    ) -> bool:
         """Remove a refname only if it currently equals old_ref.
 
         This method does not follow symbolic references. It can be used to
@@ -1111,7 +1111,7 @@ def read_packed_refs_with_peeled(f):
         yield (sha, name, None)
 
 
-def write_packed_refs(f, packed_refs, peeled_refs=None):
+def write_packed_refs(f, packed_refs, peeled_refs=None) -> None:
     """Write a packed refs file.
 
     Args:
@@ -1180,7 +1180,7 @@ def split_peeled_refs(refs):
     return regular, peeled
 
 
-def _set_origin_head(refs, origin, origin_head):
+def _set_origin_head(refs, origin, origin_head) -> None:
     # set refs/remotes/origin/HEAD
     origin_base = b"refs/remotes/" + origin + b"/"
     if origin_head and origin_head.startswith(LOCAL_BRANCH_PREFIX):
@@ -1251,7 +1251,7 @@ def _import_remote_refs(
     message: Optional[bytes] = None,
     prune: bool = False,
     prune_tags: bool = False,
-):
+) -> None:
     stripped_refs = strip_peeled_refs(refs)
     branches = {
         n[len(LOCAL_BRANCH_PREFIX) :]: v

+ 22 - 22
dulwich/repo.py

@@ -232,7 +232,7 @@ def get_user_identity(config: "StackedConfig", kind: Optional[str] = None) -> by
     return user + b" <" + email + b">"
 
 
-def check_user_identity(identity):
+def check_user_identity(identity) -> None:
     """Verify that a user identity is formatted correctly.
 
     Args:
@@ -304,7 +304,7 @@ def serialize_graftpoints(graftpoints: dict[bytes, list[bytes]]) -> bytes:
     return b"\n".join(graft_lines)
 
 
-def _set_filesystem_hidden(path):
+def _set_filesystem_hidden(path) -> None:
     """Mark path as to be hidden if supported by platform and filesystem.
 
     On win32 uses SetFileAttributesW api:
@@ -427,7 +427,7 @@ class BaseRepo:
         """
         raise NotImplementedError(self.get_named_file)
 
-    def _put_named_file(self, path: str, contents: bytes):
+    def _put_named_file(self, path: str, contents: bytes) -> None:
         """Write a file to the control dir with the given name and contents.
 
         Args:
@@ -436,7 +436,7 @@ class BaseRepo:
         """
         raise NotImplementedError(self._put_named_file)
 
-    def _del_named_file(self, path: str):
+    def _del_named_file(self, path: str) -> None:
         """Delete a file in the control directory with the given name."""
         raise NotImplementedError(self._del_named_file)
 
@@ -549,7 +549,7 @@ class BaseRepo:
                 return None
 
             class DummyMissingObjectFinder:
-                def get_remote_has(self):
+                def get_remote_has(self) -> None:
                     return None
 
                 def __len__(self) -> int:
@@ -702,7 +702,7 @@ class BaseRepo:
         """Retrieve the worktree config object."""
         raise NotImplementedError(self.get_worktree_config)
 
-    def get_description(self):
+    def get_description(self) -> Optional[str]:
         """Retrieve the description for this repository.
 
         Returns: String with the description of the repository
@@ -710,7 +710,7 @@ class BaseRepo:
         """
         raise NotImplementedError(self.get_description)
 
-    def set_description(self, description):
+    def set_description(self, description) -> None:
         """Set the description for this repository.
 
         Args:
@@ -748,7 +748,7 @@ class BaseRepo:
         with f:
             return {line.strip() for line in f}
 
-    def update_shallow(self, new_shallow, new_unshallow):
+    def update_shallow(self, new_shallow, new_unshallow) -> None:
         """Update the list of shallow objects.
 
         Args:
@@ -884,7 +884,7 @@ class BaseRepo:
         )
         return get_user_identity(config)
 
-    def _add_graftpoints(self, updated_graftpoints: dict[bytes, list[bytes]]):
+    def _add_graftpoints(self, updated_graftpoints: dict[bytes, list[bytes]]) -> None:
         """Add or modify graftpoints.
 
         Args:
@@ -1215,7 +1215,7 @@ class Repo(BaseRepo):
 
     def _write_reflog(
         self, ref, old_sha, new_sha, committer, timestamp, timezone, message
-    ):
+    ) -> None:
         from .reflog import format_reflog_line
 
         path = os.path.join(self.controldir(), "logs", os.fsdecode(ref))
@@ -1305,7 +1305,7 @@ class Repo(BaseRepo):
         # TODO(jelmer): Actually probe disk / look at filesystem
         return sys.platform != "win32"
 
-    def _put_named_file(self, path, contents):
+    def _put_named_file(self, path, contents) -> None:
         """Write a file to the control dir with the given name and contents.
 
         Args:
@@ -1316,7 +1316,7 @@ class Repo(BaseRepo):
         with GitFile(os.path.join(self.controldir(), path), "wb") as f:
             f.write(contents)
 
-    def _del_named_file(self, path):
+    def _del_named_file(self, path) -> None:
         try:
             os.unlink(os.path.join(self.controldir(), path))
         except FileNotFoundError:
@@ -1362,7 +1362,7 @@ class Repo(BaseRepo):
             raise NoIndexPresent
         return Index(self.index_path())
 
-    def has_index(self):
+    def has_index(self) -> bool:
         """Check if an index is present."""
         # Bare repos must never have index files; non-bare repos may have a
         # missing index file, which is treated as empty.
@@ -1434,7 +1434,7 @@ class Repo(BaseRepo):
                     index[tree_path] = index_entry_from_stat(st, blob.id)
         index.write()
 
-    def unstage(self, fs_paths: list[str]):
+    def unstage(self, fs_paths: list[str]) -> None:
         """Unstage specific file in the index
         Args:
           fs_paths: a list of files to unstage,
@@ -1613,7 +1613,7 @@ class Repo(BaseRepo):
             symlink_fn = symlink
         else:
 
-            def symlink_fn(source, target):  # type: ignore
+            def symlink_fn(source, target) -> None:  # type: ignore
                 with open(
                     target, "w" + ("b" if isinstance(source, bytes) else "")
                 ) as f:
@@ -1670,7 +1670,7 @@ class Repo(BaseRepo):
     def __repr__(self) -> str:
         return f"<Repo at {self.path!r}>"
 
-    def set_description(self, description):
+    def set_description(self, description) -> None:
         """Set the description for this repository.
 
         Args:
@@ -1801,7 +1801,7 @@ class Repo(BaseRepo):
 
     create = init_bare
 
-    def close(self):
+    def close(self) -> None:
         """Close any files opened by this repository."""
         self.object_store.close()
 
@@ -1846,10 +1846,10 @@ class MemoryRepo(BaseRepo):
         self._config = ConfigFile()
         self._description = None
 
-    def _append_reflog(self, *args):
+    def _append_reflog(self, *args) -> None:
         self._reflog.append(args)
 
-    def set_description(self, description):
+    def set_description(self, description) -> None:
         self._description = description
 
     def get_description(self):
@@ -1869,7 +1869,7 @@ class MemoryRepo(BaseRepo):
         """
         return sys.platform != "win32"
 
-    def _put_named_file(self, path, contents):
+    def _put_named_file(self, path, contents) -> None:
         """Write a file to the control dir with the given name and contents.
 
         Args:
@@ -1878,7 +1878,7 @@ class MemoryRepo(BaseRepo):
         """
         self._named_files[path] = contents
 
-    def _del_named_file(self, path):
+    def _del_named_file(self, path) -> None:
         try:
             del self._named_files[path]
         except KeyError:
@@ -1900,7 +1900,7 @@ class MemoryRepo(BaseRepo):
             return None
         return BytesIO(contents)
 
-    def open_index(self):
+    def open_index(self) -> "Index":
         """Fail to open index for this repo, since it is bare.
 
         Raises:

+ 50 - 42
dulwich/server.py

@@ -51,8 +51,7 @@ import time
 import zlib
 from collections.abc import Iterable
 from functools import partial
-from typing import Optional, cast
-from typing import Protocol as TypingProtocol
+from typing import Optional, cast, Iterator, Protocol as TypingProtocol, Type
 
 from dulwich import log_utils
 
@@ -121,7 +120,7 @@ logger = log_utils.getLogger(__name__)
 class Backend:
     """A backend for the Git smart server implementation."""
 
-    def open_repository(self, path):
+    def open_repository(self, path) -> BackendRepo:
         """Open the repository at a path.
 
         Args:
@@ -164,7 +163,7 @@ class BackendRepo(TypingProtocol):
 
     def find_missing_objects(
         self, determine_wants, graph_walker, progress, get_tagged=None
-    ):
+    ) -> Iterator[ObjectID]:
         """Yield the objects required for a list of commits.
 
         Args:
@@ -181,7 +180,7 @@ class DictBackend(Backend):
     def __init__(self, repos) -> None:
         self.repos = repos
 
-    def open_repository(self, path: str) -> BaseRepo:
+    def open_repository(self, path: str) -> BackendRepo:
         logger.debug("Opening repository at %s", path)
         try:
             return self.repos[path]
@@ -314,15 +313,15 @@ class UploadPackHandler(PackHandler):
             CAPABILITY_OFS_DELTA,
         )
 
-    def progress(self, message: bytes):
+    def progress(self, message: bytes) -> None:
         pass
 
-    def _start_pack_send_phase(self):
+    def _start_pack_send_phase(self) -> None:
         if self.has_capability(CAPABILITY_SIDE_BAND_64K):
             # The provided haves are processed, and it is safe to send side-
             # band data now.
             if not self.has_capability(CAPABILITY_NO_PROGRESS):
-                self.progress = partial(
+                self.progress = partial(  # type: ignore
                     self.proto.write_sideband, SIDE_BAND_CHANNEL_PROGRESS
                 )
 
@@ -364,7 +363,7 @@ class UploadPackHandler(PackHandler):
                 tagged[peeled_sha] = sha
         return tagged
 
-    def handle(self):
+    def handle(self) -> None:
         # Note the fact that client is only processing responses related
         # to the have lines it sent, and any other data (including side-
         # band) will be be considered a fatal error.
@@ -501,7 +500,7 @@ def _find_shallow(store: ObjectContainer, heads, depth):
     return shallow, not_shallow
 
 
-def _want_satisfied(store: ObjectContainer, haves, want, earliest):
+def _want_satisfied(store: ObjectContainer, haves, want, earliest) -> bool:
     o = store[want]
     pending = collections.deque([o])
     known = {want}
@@ -524,7 +523,7 @@ def _want_satisfied(store: ObjectContainer, haves, want, earliest):
     return False
 
 
-def _all_wants_satisfied(store: ObjectContainer, haves, wants):
+def _all_wants_satisfied(store: ObjectContainer, haves, wants) -> bool:
     """Check whether all the current wants are satisfied by a set of haves.
 
     Args:
@@ -547,6 +546,15 @@ def _all_wants_satisfied(store: ObjectContainer, haves, wants):
     return True
 
 
+class AckGraphWalkerImpl:
+
+    def __init__(self, graph_walker):
+        raise NotImplementedError
+
+    def ack(self, have_ref: ObjectID) -> None:
+        raise NotImplementedError
+
+
 class _ProtocolGraphWalker:
     """A graph walker that knows the git protocol.
 
@@ -578,7 +586,7 @@ class _ProtocolGraphWalker:
         self._cached = False
         self._cache: list[bytes] = []
         self._cache_index = 0
-        self._impl = None
+        self._impl: Optional[AckGraphWalkerImpl] = None
 
     def determine_wants(self, heads, depth=None):
         """Determine the wants for a set of heads.
@@ -662,12 +670,12 @@ class _ProtocolGraphWalker:
 
         return want_revs
 
-    def unread_proto_line(self, command, value):
+    def unread_proto_line(self, command, value) -> None:
         if isinstance(value, int):
             value = str(value).encode("ascii")
         self.proto.unread_pkt_line(command + b" " + value)
 
-    def nak(self):
+    def nak(self) -> None:
         pass
 
     def ack(self, have_ref):
@@ -675,7 +683,7 @@ class _ProtocolGraphWalker:
             raise ValueError(f"invalid sha {have_ref!r}")
         return self._impl.ack(have_ref)
 
-    def reset(self):
+    def reset(self) -> None:
         self._cached = True
         self._cache_index = 0
 
@@ -703,7 +711,7 @@ class _ProtocolGraphWalker:
         """
         return _split_proto_line(self.proto.read_pkt_line(), allowed)
 
-    def _handle_shallow_request(self, wants):
+    def _handle_shallow_request(self, wants) -> None:
         while True:
             command, val = self.read_proto_line((COMMAND_DEEPEN, COMMAND_SHALLOW))
             if command == COMMAND_DEEPEN:
@@ -727,21 +735,21 @@ class _ProtocolGraphWalker:
 
         self.proto.write_pkt_line(None)
 
-    def notify_done(self):
+    def notify_done(self) -> None:
         # relay the message down to the handler.
         self.handler.notify_done()
 
-    def send_ack(self, sha, ack_type=b""):
+    def send_ack(self, sha, ack_type=b"") -> None:
         self.proto.write_pkt_line(format_ack_line(sha, ack_type))
 
-    def send_nak(self):
+    def send_nak(self) -> None:
         self.proto.write_pkt_line(NAK_LINE)
 
     def handle_done(self, done_required, done_received):
         # Delegate this to the implementation.
         return self._impl.handle_done(done_required, done_received)
 
-    def set_wants(self, wants):
+    def set_wants(self, wants) -> None:
         self._wants = wants
 
     def all_wants_satisfied(self, haves):
@@ -754,8 +762,8 @@ class _ProtocolGraphWalker:
         """
         return _all_wants_satisfied(self.store, haves, self._wants)
 
-    def set_ack_type(self, ack_type):
-        impl_classes = {
+    def set_ack_type(self, ack_type) -> None:
+        impl_classes: dict[int, Type[AckGraphWalkerImpl]] = {
             MULTI_ACK: MultiAckGraphWalkerImpl,
             MULTI_ACK_DETAILED: MultiAckDetailedGraphWalkerImpl,
             SINGLE_ACK: SingleAckGraphWalkerImpl,
@@ -766,14 +774,14 @@ class _ProtocolGraphWalker:
 _GRAPH_WALKER_COMMANDS = (COMMAND_HAVE, COMMAND_DONE, None)
 
 
-class SingleAckGraphWalkerImpl:
+class SingleAckGraphWalkerImpl(AckGraphWalkerImpl):
     """Graph walker implementation that speaks the single-ack protocol."""
 
     def __init__(self, walker) -> None:
         self.walker = walker
         self._common: list[bytes] = []
 
-    def ack(self, have_ref):
+    def ack(self, have_ref) -> None:
         if not self._common:
             self.walker.send_ack(have_ref)
             self._common.append(have_ref)
@@ -789,7 +797,7 @@ class SingleAckGraphWalkerImpl:
 
     __next__ = next
 
-    def handle_done(self, done_required, done_received):
+    def handle_done(self, done_required, done_received) -> bool:
         if not self._common:
             self.walker.send_nak()
 
@@ -810,7 +818,7 @@ class SingleAckGraphWalkerImpl:
         return True
 
 
-class MultiAckGraphWalkerImpl:
+class MultiAckGraphWalkerImpl(AckGraphWalkerImpl):
     """Graph walker implementation that speaks the multi-ack protocol."""
 
     def __init__(self, walker) -> None:
@@ -818,7 +826,7 @@ class MultiAckGraphWalkerImpl:
         self._found_base = False
         self._common: list[bytes] = []
 
-    def ack(self, have_ref):
+    def ack(self, have_ref) -> None:
         self._common.append(have_ref)
         if not self._found_base:
             self.walker.send_ack(have_ref, b"continue")
@@ -845,7 +853,7 @@ class MultiAckGraphWalkerImpl:
 
     __next__ = next
 
-    def handle_done(self, done_required, done_received):
+    def handle_done(self, done_required, done_received) -> bool:
         if done_required and not done_received:
             # we are not done, especially when done is required; skip
             # the pack for this request and especially do not handle
@@ -869,14 +877,14 @@ class MultiAckGraphWalkerImpl:
         return True
 
 
-class MultiAckDetailedGraphWalkerImpl:
+class MultiAckDetailedGraphWalkerImpl(AckGraphWalkerImpl):
     """Graph walker implementation speaking the multi-ack-detailed protocol."""
 
     def __init__(self, walker) -> None:
         self.walker = walker
         self._common: list[bytes] = []
 
-    def ack(self, have_ref):
+    def ack(self, have_ref) -> None:
         # Should only be called iff have_ref is common
         self._common.append(have_ref)
         self.walker.send_ack(have_ref, b"common")
@@ -910,7 +918,7 @@ class MultiAckDetailedGraphWalkerImpl:
 
     __next__ = next
 
-    def handle_done(self, done_required, done_received):
+    def handle_done(self, done_required, done_received) -> bool:
         if done_required and not done_received:
             # we are not done, especially when done is required; skip
             # the pack for this request and especially do not handle
@@ -1022,14 +1030,14 @@ class ReceivePackHandler(PackHandler):
             )
             write = writer.write
 
-            def flush():
+            def flush() -> None:
                 writer.flush()
                 self.proto.write_pkt_line(None)
 
         else:
             write = self.proto.write_pkt_line
 
-            def flush():
+            def flush() -> None:
                 pass
 
         for name, msg in status:
@@ -1042,7 +1050,7 @@ class ReceivePackHandler(PackHandler):
         write(None)
         flush()
 
-    def _on_post_receive(self, client_refs):
+    def _on_post_receive(self, client_refs) -> None:
         hook = self.repo.hooks.get("post-receive", None)
         if not hook:
             return
@@ -1155,7 +1163,7 @@ class TCPGitRequestHandler(socketserver.StreamRequestHandler):
         self.handlers = handlers
         socketserver.StreamRequestHandler.__init__(self, *args, **kwargs)
 
-    def handle(self):
+    def handle(self) -> None:
         proto = ReceivableProtocol(self.connection.recv, self.wfile.write)
         command, args = proto.read_cmd()
         logger.info("Handling %s request, args=%s", command, args)
@@ -1163,7 +1171,7 @@ class TCPGitRequestHandler(socketserver.StreamRequestHandler):
         cls = self.handlers.get(command, None)
         if not callable(cls):
             raise GitProtocolError(f"Invalid service {command}")
-        h = cls(self.server.backend, args, proto)
+        h = cls(self.server.backend, args, proto)  # type: ignore
         h.handle()
 
 
@@ -1182,18 +1190,18 @@ class TCPGitServer(socketserver.TCPServer):
         logger.info("Listening for TCP connections on %s:%d", listen_addr, port)
         socketserver.TCPServer.__init__(self, (listen_addr, port), self._make_handler)
 
-    def verify_request(self, request, client_address):
+    def verify_request(self, request, client_address) -> bool:
         logger.info("Handling request from %s", client_address)
         return True
 
-    def handle_error(self, request, client_address):
+    def handle_error(self, request, client_address) -> None:
         logger.exception(
             "Exception happened during processing of request " "from %s",
             client_address,
         )
 
 
-def main(argv=sys.argv):
+def main(argv=sys.argv) -> None:
     """Entry point for starting a TCP git server."""
     import optparse
 
@@ -1228,7 +1236,7 @@ def main(argv=sys.argv):
 
 def serve_command(
     handler_cls, argv=sys.argv, backend=None, inf=sys.stdin, outf=sys.stdout
-):
+) -> int:
     """Serve a single command.
 
     This is mostly useful for the implementation of commands used by e.g.
@@ -1245,7 +1253,7 @@ def serve_command(
     if backend is None:
         backend = FileSystemBackend()
 
-    def send_fn(data):
+    def send_fn(data) -> None:
         outf.write(data)
         outf.flush()
 
@@ -1268,7 +1276,7 @@ def generate_objects_info_packs(repo):
         yield (b"P " + os.fsencode(pack.data.filename) + b"\n")
 
 
-def update_server_info(repo):
+def update_server_info(repo) -> None:
     """Generate server info for dumb file access.
 
     This generates info/refs and objects/info/packs,

+ 1 - 1
dulwich/stash.py

@@ -55,7 +55,7 @@ class Stash:
         """Create a new stash from a Repo object."""
         return cls(repo)
 
-    def drop(self, index):
+    def drop(self, index) -> None:
         """Drop entry with specified index."""
         with open(self._reflog_path, "rb+") as f:
             drop_reflog_entry(f, index, rewrite=True)

+ 53 - 25
dulwich/tests/test_object_store.py

@@ -26,15 +26,22 @@ from dulwich.index import commit_tree
 from dulwich.object_store import (
     iter_tree_contents,
     peel_sha,
+    PackBasedObjectStore,
 )
 from dulwich.objects import (
     Blob,
     TreeEntry,
+    Tree,
 )
 from dulwich.protocol import DEPTH_INFINITE
 
 from .utils import make_object, make_tag
 
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from dulwich.object_store import BaseObjectStore
+
 try:
     from unittest.mock import patch
 except ImportError:
@@ -45,19 +52,36 @@ testobject = make_object(Blob, data=b"yummy data")
 
 
 class ObjectStoreTests:
-    def test_determine_wants_all(self):
+    store: "BaseObjectStore"
+
+    def assertEqual(self, a, b) -> None:
+        raise NotImplementedError
+
+    def assertRaises(self, exc, func) -> None:
+        raise NotImplementedError
+
+    def assertNotIn(self, a, b) -> None:
+        raise NotImplementedError
+
+    def assertNotEqual(self, a, b) -> None:
+        raise NotImplementedError
+
+    def assertIn(self, a, b) -> None:
+        raise NotImplementedError
+
+    def test_determine_wants_all(self) -> None:
         self.assertEqual(
             [b"1" * 40],
             self.store.determine_wants_all({b"refs/heads/foo": b"1" * 40}),
         )
 
-    def test_determine_wants_all_zero(self):
+    def test_determine_wants_all_zero(self) -> None:
         self.assertEqual(
             [], self.store.determine_wants_all({b"refs/heads/foo": b"0" * 40})
         )
 
     @skipUnless(patch, "Required mock.patch")
-    def test_determine_wants_all_depth(self):
+    def test_determine_wants_all_depth(self) -> None:
         self.store.add_object(testobject)
         refs = {b"refs/heads/foo": testobject.id}
         with patch.object(self.store, "_get_depth", return_value=1) as m:
@@ -74,7 +98,7 @@ class ObjectStoreTests:
                 [testobject.id], self.store.determine_wants_all(refs, depth=2)
             )
 
-    def test_get_depth(self):
+    def test_get_depth(self) -> None:
         self.assertEqual(0, self.store._get_depth(testobject.id))
 
         self.store.add_object(testobject)
@@ -92,24 +116,24 @@ class ObjectStoreTests:
             ),
         )
 
-    def test_iter(self):
+    def test_iter(self) -> None:
         self.assertEqual([], list(self.store))
 
-    def test_get_nonexistant(self):
+    def test_get_nonexistant(self) -> None:
         self.assertRaises(KeyError, lambda: self.store[b"a" * 40])
 
-    def test_contains_nonexistant(self):
+    def test_contains_nonexistant(self) -> None:
         self.assertNotIn(b"a" * 40, self.store)
 
-    def test_add_objects_empty(self):
+    def test_add_objects_empty(self) -> None:
         self.store.add_objects([])
 
-    def test_add_commit(self):
+    def test_add_commit(self) -> None:
         # TODO: Argh, no way to construct Git commit objects without
         # access to a serialized form.
         self.store.add_objects([])
 
-    def test_store_resilience(self):
+    def test_store_resilience(self) -> None:
         """Test if updating an existing stored object doesn't erase the
         object from the store.
         """
@@ -123,14 +147,14 @@ class ObjectStoreTests:
         self.assertNotEqual(test_object.id, stored_test_object.id)
         self.assertEqual(stored_test_object.id, test_object_id)
 
-    def test_add_object(self):
+    def test_add_object(self) -> None:
         self.store.add_object(testobject)
         self.assertEqual({testobject.id}, set(self.store))
         self.assertIn(testobject.id, self.store)
         r = self.store[testobject.id]
         self.assertEqual(r, testobject)
 
-    def test_add_objects(self):
+    def test_add_objects(self) -> None:
         data = [(testobject, "mypath")]
         self.store.add_objects(data)
         self.assertEqual({testobject.id}, set(self.store))
@@ -138,7 +162,7 @@ class ObjectStoreTests:
         r = self.store[testobject.id]
         self.assertEqual(r, testobject)
 
-    def test_tree_changes(self):
+    def test_tree_changes(self) -> None:
         blob_a1 = make_object(Blob, data=b"a1")
         blob_a2 = make_object(Blob, data=b"a2")
         blob_b = make_object(Blob, data=b"b")
@@ -163,7 +187,7 @@ class ObjectStoreTests:
             list(self.store.tree_changes(tree1_id, tree2_id, want_unchanged=True)),
         )
 
-    def test_iter_tree_contents(self):
+    def test_iter_tree_contents(self) -> None:
         blob_a = make_object(Blob, data=b"a")
         blob_b = make_object(Blob, data=b"b")
         blob_c = make_object(Blob, data=b"c")
@@ -184,7 +208,7 @@ class ObjectStoreTests:
         )
         self.assertEqual([], list(iter_tree_contents(self.store, None)))
 
-    def test_iter_tree_contents_include_trees(self):
+    def test_iter_tree_contents_include_trees(self) -> None:
         blob_a = make_object(Blob, data=b"a")
         blob_b = make_object(Blob, data=b"b")
         blob_c = make_object(Blob, data=b"c")
@@ -198,7 +222,9 @@ class ObjectStoreTests:
         ]
         tree_id = commit_tree(self.store, blobs)
         tree = self.store[tree_id]
+        assert isinstance(tree, Tree)
         tree_ad = self.store[tree[b"ad"][1]]
+        assert isinstance(tree_ad, Tree)
         tree_bd = self.store[tree_ad[b"bd"][1]]
 
         expected = [
@@ -217,7 +243,7 @@ class ObjectStoreTests:
         self.store.add_object(tag)
         return tag
 
-    def test_peel_sha(self):
+    def test_peel_sha(self) -> None:
         self.store.add_object(testobject)
         tag1 = self.make_tag(b"1", testobject)
         tag2 = self.make_tag(b"2", testobject)
@@ -225,18 +251,18 @@ class ObjectStoreTests:
         for obj in [testobject, tag1, tag2, tag3]:
             self.assertEqual((obj, testobject), peel_sha(self.store, obj.id))
 
-    def test_get_raw(self):
+    def test_get_raw(self) -> None:
         self.store.add_object(testobject)
         self.assertEqual(
             (Blob.type_num, b"yummy data"), self.store.get_raw(testobject.id)
         )
 
-    def test_close(self):
+    def test_close(self) -> None:
         # For now, just check that close doesn't barf.
         self.store.add_object(testobject)
         self.store.close()
 
-    def test_iter_prefix(self):
+    def test_iter_prefix(self) -> None:
         self.store.add_object(testobject)
         self.assertEqual([testobject.id], list(self.store.iter_prefix(testobject.id)))
         self.assertEqual(
@@ -247,19 +273,21 @@ class ObjectStoreTests:
         )
         self.assertEqual([testobject.id], list(self.store.iter_prefix(b"")))
 
-    def test_iter_prefix_not_found(self):
+    def test_iter_prefix_not_found(self) -> None:
         self.assertEqual([], list(self.store.iter_prefix(b"1" * 40)))
 
 
 class PackBasedObjectStoreTests(ObjectStoreTests):
-    def tearDown(self):
+    store: PackBasedObjectStore
+
+    def tearDown(self) -> None:
         for pack in self.store.packs:
             pack.close()
 
-    def test_empty_packs(self):
+    def test_empty_packs(self) -> None:
         self.assertEqual([], list(self.store.packs))
 
-    def test_pack_loose_objects(self):
+    def test_pack_loose_objects(self) -> None:
         b1 = make_object(Blob, data=b"yummy data")
         self.store.add_object(b1)
         b2 = make_object(Blob, data=b"more yummy data")
@@ -273,7 +301,7 @@ class PackBasedObjectStoreTests(ObjectStoreTests):
         self.assertNotEqual([], list(self.store.packs))
         self.assertEqual(0, self.store.pack_loose_objects())
 
-    def test_repack(self):
+    def test_repack(self) -> None:
         b1 = make_object(Blob, data=b"yummy data")
         self.store.add_object(b1)
         b2 = make_object(Blob, data=b"more yummy data")
@@ -290,7 +318,7 @@ class PackBasedObjectStoreTests(ObjectStoreTests):
         self.assertEqual(1, len(self.store.packs))
         self.assertEqual(0, self.store.pack_loose_objects())
 
-    def test_repack_existing(self):
+    def test_repack_existing(self) -> None:
         b1 = make_object(Blob, data=b"yummy data")
         self.store.add_object(b1)
         b2 = make_object(Blob, data=b"more yummy data")

+ 5 - 5
dulwich/tests/utils.py

@@ -71,7 +71,7 @@ def open_repo(name, temp_dir=None):
     return Repo(temp_repo_dir)
 
 
-def tear_down_repo(repo):
+def tear_down_repo(repo) -> None:
     """Tear down a test repository."""
     repo.close()
     temp_dir = os.path.dirname(repo.path.rstrip(os.sep))
@@ -160,7 +160,7 @@ def make_tag(target, **attrs):
 def functest_builder(method, func):
     """Generate a test method that tests the given function."""
 
-    def do_test(self):
+    def do_test(self) -> None:
         method(self, func)
 
     return do_test
@@ -188,7 +188,7 @@ def ext_functest_builder(method, func):
       func: The function implementation to pass to method.
     """
 
-    def do_test(self):
+    def do_test(self) -> None:
         if not isinstance(func, types.BuiltinFunctionType):
             raise SkipTest(f"{func} extension not found")
         method(self, func)
@@ -354,12 +354,12 @@ def setup_warning_catcher():
     caught_warnings = []
     original_showwarning = warnings.showwarning
 
-    def custom_showwarning(*args, **kwargs):
+    def custom_showwarning(*args, **kwargs) -> None:
         caught_warnings.append(args[0])
 
     warnings.showwarning = custom_showwarning
 
-    def restore_showwarning():
+    def restore_showwarning() -> None:
         warnings.showwarning = original_showwarning
 
     return caught_warnings, restore_showwarning

+ 6 - 5
dulwich/walk.py

@@ -139,7 +139,7 @@ class _CommitTimeQueue:
         for commit_id in chain(walker.include, walker.excluded):
             self._push(commit_id)
 
-    def _push(self, object_id: bytes):
+    def _push(self, object_id: bytes) -> None:
         try:
             obj = self._store[object_id]
         except KeyError as exc:
@@ -154,7 +154,7 @@ class _CommitTimeQueue:
             self._pq_set.add(commit.id)
             self._seen.add(commit.id)
 
-    def _exclude_parents(self, commit):
+    def _exclude_parents(self, commit) -> None:
         excluded = self._excluded
         seen = self._seen
         todo = [commit]
@@ -299,7 +299,7 @@ class Walker:
         self._queue = queue_cls(self)
         self._out_queue: collections.deque[WalkEntry] = collections.deque()
 
-    def _path_matches(self, changed_path):
+    def _path_matches(self, changed_path) -> bool:
         if changed_path is None:
             return False
         if self.paths is None:
@@ -314,7 +314,8 @@ class Walker:
                 return True
         return False
 
-    def _change_matches(self, change):
+    def _change_matches(self, change) -> bool:
+        assert self.paths
         if not change:
             return False
 
@@ -329,7 +330,7 @@ class Walker:
             return True
         return False
 
-    def _should_return(self, entry):
+    def _should_return(self, entry) -> Optional[bool]:
         """Determine if a walk entry should be returned..
 
         Args:

+ 12 - 12
dulwich/web.py

@@ -338,7 +338,7 @@ class HTTPGitRequest:
         self._cache_headers: list[tuple[str, str]] = []
         self._headers: list[tuple[str, str]] = []
 
-    def add_header(self, name, value):
+    def add_header(self, name, value) -> None:
         """Add a header to the response."""
         self._headers.append((name, value))
 
@@ -511,35 +511,35 @@ def make_wsgi_chain(*args, **kwargs):
 class ServerHandlerLogger(ServerHandler):
     """ServerHandler that uses dulwich's logger for logging exceptions."""
 
-    def log_exception(self, exc_info):
+    def log_exception(self, exc_info) -> None:
         logger.exception(
             "Exception happened during processing of request",
             exc_info=exc_info,
         )
 
-    def log_message(self, format, *args):
+    def log_message(self, format, *args) -> None:
         logger.info(format, *args)
 
-    def log_error(self, *args):
+    def log_error(self, *args) -> None:
         logger.error(*args)
 
 
 class WSGIRequestHandlerLogger(WSGIRequestHandler):
     """WSGIRequestHandler that uses dulwich's logger for logging exceptions."""
 
-    def log_exception(self, exc_info):
+    def log_exception(self, exc_info) -> None:
         logger.exception(
             "Exception happened during processing of request",
             exc_info=exc_info,
         )
 
-    def log_message(self, format, *args):
+    def log_message(self, format, *args) -> None:
         logger.info(format, *args)
 
-    def log_error(self, *args):
+    def log_error(self, *args) -> None:
         logger.error(*args)
 
-    def handle(self):
+    def handle(self) -> None:
         """Handle a single HTTP request."""
         self.raw_requestline = self.rfile.readline()
         if not self.parse_request():  # An error code has been sent, just exit
@@ -548,19 +548,19 @@ class WSGIRequestHandlerLogger(WSGIRequestHandler):
         handler = ServerHandlerLogger(
             self.rfile, self.wfile, self.get_stderr(), self.get_environ()
         )
-        handler.request_handler = self  # backpointer for logging
-        handler.run(self.server.get_app())
+        handler.request_handler = self   # type: ignore  # backpointer for logging
+        handler.run(self.server.get_app())  # type: ignore
 
 
 class WSGIServerLogger(WSGIServer):
-    def handle_error(self, request, client_address):
+    def handle_error(self, request, client_address) -> None:
         """Handle an error."""
         logger.exception(
             f"Exception happened during processing of request from {client_address!s}"
         )
 
 
-def main(argv=sys.argv):
+def main(argv=sys.argv) -> None:
     """Entry point for starting an HTTP git server."""
     import optparse
 

+ 3 - 2
fuzzing/fuzz-targets/fuzz_bundle.py

@@ -1,5 +1,6 @@
 import sys
 from io import BytesIO
+from typing import Optional
 
 import atheris
 
@@ -11,7 +12,7 @@ with atheris.instrument_imports():
     from dulwich.pack import PackData, write_pack_objects
 
 
-def TestOneInput(data):
+def TestOneInput(data) -> Optional[int]:
     fdp = EnhancedFuzzedDataProvider(data)
     bundle = Bundle()
     bundle.version = fdp.PickValueInList([2, 3, None])
@@ -45,7 +46,7 @@ def TestOneInput(data):
             raise e
 
 
-def main():
+def main() -> None:
     atheris.Setup(sys.argv, TestOneInput)
     atheris.Fuzz()
 

+ 3 - 2
fuzzing/fuzz-targets/fuzz_configfile.py

@@ -1,5 +1,6 @@
 import sys
 from io import BytesIO
+from typing import Optional
 
 import atheris
 from test_utils import is_expected_exception
@@ -8,7 +9,7 @@ with atheris.instrument_imports():
     from dulwich.config import ConfigFile
 
 
-def TestOneInput(data):
+def TestOneInput(data) -> Optional[int]:
     try:
         ConfigFile.from_file(BytesIO(data))
     except ValueError as e:
@@ -27,7 +28,7 @@ def TestOneInput(data):
             raise e
 
 
-def main():
+def main() -> None:
     atheris.Setup(sys.argv, TestOneInput)
     atheris.Fuzz()
 

+ 3 - 2
fuzzing/fuzz-targets/fuzz_object_store.py

@@ -1,6 +1,7 @@
 import stat
 import sys
 from io import BytesIO
+from typing import Optional
 
 import atheris
 
@@ -17,7 +18,7 @@ with atheris.instrument_imports():
     )
 
 
-def TestOneInput(data):
+def TestOneInput(data) -> Optional[int]:
     fdp = EnhancedFuzzedDataProvider(data)
     repo = MemoryRepo()
     blob = Blob.from_string(fdp.ConsumeRandomBytes())
@@ -85,7 +86,7 @@ def TestOneInput(data):
             raise e
 
 
-def main():
+def main() -> None:
     atheris.Setup(sys.argv, TestOneInput)
     atheris.Fuzz()
 

+ 3 - 2
fuzzing/fuzz-targets/fuzz_repo.py

@@ -1,6 +1,7 @@
 import os
 import sys
 import tempfile
+from typing import Optional
 
 import atheris
 
@@ -14,7 +15,7 @@ with atheris.instrument_imports():
     )
 
 
-def TestOneInput(data):
+def TestOneInput(data) -> Optional[int]:
     fdp = EnhancedFuzzedDataProvider(data)
     with tempfile.TemporaryDirectory() as temp_dir:
         repo = Repo.init(temp_dir)
@@ -54,7 +55,7 @@ def TestOneInput(data):
                 raise e
 
 
-def main():
+def main() -> None:
     atheris.Setup(sys.argv, TestOneInput)
     atheris.Fuzz()
 

+ 2 - 2
fuzzing/fuzz-targets/test_utils.py

@@ -4,7 +4,7 @@ import atheris  # pragma: no cover
 @atheris.instrument_func
 def is_expected_exception(
     error_message_list: list[str], exception: Exception
-):  # pragma: no cover
+) -> bool:  # pragma: no cover
     """Checks if the message of a given exception matches any of the expected error messages.
 
     Args:
@@ -23,7 +23,7 @@ def is_expected_exception(
 class EnhancedFuzzedDataProvider(atheris.FuzzedDataProvider):  # pragma: no cover
     """Extends atheris.FuzzedDataProvider to offer additional methods to make fuzz testing slightly more DRY."""
 
-    def __init__(self, data):
+    def __init__(self, data) -> None:
         """Initializes the EnhancedFuzzedDataProvider with fuzzing data from the argument provided to TestOneInput.
 
         Args:

+ 6 - 6
tests/__init__.py

@@ -43,13 +43,13 @@ from unittest import TestCase as _TestCase
 
 
 class TestCase(_TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.overrideEnv("HOME", "/nonexistent")
         self.overrideEnv("GIT_CONFIG_NOSYSTEM", "1")
 
-    def overrideEnv(self, name, value):
-        def restore():
+    def overrideEnv(self, name, value) -> None:
+        def restore() -> None:
             if oldval is not None:
                 os.environ[name] = oldval
             else:
@@ -168,7 +168,7 @@ def tutorial_test_suite():
 
     to_restore = []
 
-    def overrideEnv(name, value):
+    def overrideEnv(name, value) -> None:
         oldval = os.environ.get(name)
         if value is not None:
             os.environ[name] = value
@@ -176,7 +176,7 @@ def tutorial_test_suite():
             del os.environ[name]
         to_restore.append((name, oldval))
 
-    def setup(test):
+    def setup(test) -> None:
         test.__old_cwd = os.getcwd()
         test.tempdir = tempfile.mkdtemp()
         test.globs.update({"tempdir": test.tempdir})
@@ -184,7 +184,7 @@ def tutorial_test_suite():
         overrideEnv("HOME", "/nonexistent")
         overrideEnv("GIT_CONFIG_NOSYSTEM", "1")
 
-    def teardown(test):
+    def teardown(test) -> None:
         os.chdir(test.__old_cwd)
         shutil.rmtree(test.tempdir)
         for name, oldval in to_restore:

+ 17 - 17
tests/compat/server_utils.py

@@ -43,7 +43,7 @@ class _StubRepo:
         self.path = os.path.join(temp_dir, name)
         os.mkdir(self.path)
 
-    def close(self):
+    def close(self) -> None:
         pass
 
 
@@ -74,11 +74,11 @@ class ServerTests:
         10,
     )
 
-    def import_repos(self):
+    def import_repos(self) -> None:
         self._old_repo = self.import_repo("server_old.export")
         self._new_repo = self.import_repo("server_new.export")
 
-    def url(self, port):
+    def url(self, port) -> str:
         return f"{self.protocol}://localhost:{port}/"
 
     def branch_args(self, branches=None):
@@ -86,7 +86,7 @@ class ServerTests:
             branches = ["master", "branch"]
         return [f"{b}:{b}" for b in branches]
 
-    def test_push_to_dulwich(self):
+    def test_push_to_dulwich(self) -> None:
         self.import_repos()
         self.assertReposNotEqual(self._old_repo, self._new_repo)
         port = self._start_server(self._old_repo)
@@ -97,7 +97,7 @@ class ServerTests:
         )
         self.assertReposEqual(self._old_repo, self._new_repo)
 
-    def test_push_to_dulwich_no_op(self):
+    def test_push_to_dulwich_no_op(self) -> None:
         self._old_repo = self.import_repo("server_old.export")
         self._new_repo = self.import_repo("server_old.export")
         self.assertReposEqual(self._old_repo, self._new_repo)
@@ -109,7 +109,7 @@ class ServerTests:
         )
         self.assertReposEqual(self._old_repo, self._new_repo)
 
-    def test_push_to_dulwich_remove_branch(self):
+    def test_push_to_dulwich_remove_branch(self) -> None:
         self._old_repo = self.import_repo("server_old.export")
         self._new_repo = self.import_repo("server_old.export")
         self.assertReposEqual(self._old_repo, self._new_repo)
@@ -119,7 +119,7 @@ class ServerTests:
 
         self.assertEqual(list(self._old_repo.get_refs().keys()), [b"refs/heads/branch"])
 
-    def test_fetch_from_dulwich(self):
+    def test_fetch_from_dulwich(self) -> None:
         self.import_repos()
         self.assertReposNotEqual(self._old_repo, self._new_repo)
         port = self._start_server(self._new_repo)
@@ -132,7 +132,7 @@ class ServerTests:
         self._old_repo.object_store._pack_cache_time = 0
         self.assertReposEqual(self._old_repo, self._new_repo)
 
-    def test_fetch_from_dulwich_no_op(self):
+    def test_fetch_from_dulwich_no_op(self) -> None:
         self._old_repo = self.import_repo("server_old.export")
         self._new_repo = self.import_repo("server_old.export")
         self.assertReposEqual(self._old_repo, self._new_repo)
@@ -146,7 +146,7 @@ class ServerTests:
         self._old_repo.object_store._pack_cache_time = 0
         self.assertReposEqual(self._old_repo, self._new_repo)
 
-    def test_clone_from_dulwich_empty(self):
+    def test_clone_from_dulwich_empty(self) -> None:
         old_repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, old_repo_dir)
         self._old_repo = Repo.init_bare(old_repo_dir)
@@ -159,13 +159,13 @@ class ServerTests:
         new_repo = Repo(new_repo_dir)
         self.assertReposEqual(self._old_repo, new_repo)
 
-    def test_lsremote_from_dulwich(self):
+    def test_lsremote_from_dulwich(self) -> None:
         self._repo = self.import_repo("server_old.export")
         port = self._start_server(self._repo)
         o = run_git_or_fail(["ls-remote", self.url(port)])
         self.assertEqual(len(o.split(b"\n")), 4)
 
-    def test_new_shallow_clone_from_dulwich(self):
+    def test_new_shallow_clone_from_dulwich(self) -> None:
         require_git_version(self.min_single_branch_version)
         self._source_repo = self.import_repo("server_new.export")
         self._stub_repo = _StubRepo("shallow")
@@ -191,7 +191,7 @@ class ServerTests:
         self.assertEqual(expected_shallow, _get_shallow(clone))
         self.assertReposNotEqual(clone, self._source_repo)
 
-    def test_shallow_clone_from_git_is_identical(self):
+    def test_shallow_clone_from_git_is_identical(self) -> None:
         require_git_version(self.min_single_branch_version)
         self._source_repo = self.import_repo("server_new.export")
         self._stub_repo_git = _StubRepo("shallow-git")
@@ -228,7 +228,7 @@ class ServerTests:
             Repo(self._stub_repo_git.path), Repo(self._stub_repo_dw.path)
         )
 
-    def test_fetch_same_depth_into_shallow_clone_from_dulwich(self):
+    def test_fetch_same_depth_into_shallow_clone_from_dulwich(self) -> None:
         require_git_version(self.min_single_branch_version)
         self._source_repo = self.import_repo("server_new.export")
         self._stub_repo = _StubRepo("shallow")
@@ -260,7 +260,7 @@ class ServerTests:
         self.assertEqual(expected_shallow, _get_shallow(clone))
         self.assertReposNotEqual(clone, self._source_repo)
 
-    def test_fetch_full_depth_into_shallow_clone_from_dulwich(self):
+    def test_fetch_full_depth_into_shallow_clone_from_dulwich(self) -> None:
         require_git_version(self.min_single_branch_version)
         self._source_repo = self.import_repo("server_new.export")
         self._stub_repo = _StubRepo("shallow")
@@ -294,7 +294,7 @@ class ServerTests:
         self.assertEqual([], _get_shallow(clone))
         self.assertReposEqual(clone, self._source_repo)
 
-    def test_fetch_from_dulwich_issue_88_standard(self):
+    def test_fetch_from_dulwich_issue_88_standard(self) -> None:
         # Basically an integration test to see that the ACK/NAK
         # generation works on repos with common head.
         self._source_repo = self.import_repo("issue88_expect_ack_nak_server.export")
@@ -306,7 +306,7 @@ class ServerTests:
             self._source_repo.object_store, self._client_repo.object_store
         )
 
-    def test_fetch_from_dulwich_issue_88_alternative(self):
+    def test_fetch_from_dulwich_issue_88_alternative(self) -> None:
         # likewise, but the case where the two repos have no common parent
         self._source_repo = self.import_repo("issue88_expect_ack_nak_other.export")
         self._client_repo = self.import_repo("issue88_expect_ack_nak_client.export")
@@ -325,7 +325,7 @@ class ServerTests:
             ).type_name,
         )
 
-    def test_push_to_dulwich_issue_88_standard(self):
+    def test_push_to_dulwich_issue_88_standard(self) -> None:
         # Same thing, but we reverse the role of the server/client
         # and do a push instead.
         self._source_repo = self.import_repo("issue88_expect_ack_nak_client.export")

+ 43 - 42
tests/compat/test_client.py

@@ -33,6 +33,7 @@ import tempfile
 import threading
 from contextlib import suppress
 from io import BytesIO
+from typing import NoReturn
 from unittest.mock import patch
 from urllib.parse import unquote
 
@@ -56,7 +57,7 @@ if sys.platform == "win32":
 class DulwichClientTestBase:
     """Tests for client/server compatibility."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         self.gitroot = os.path.dirname(
             import_repo_to_dir("server_new.export").rstrip(os.sep)
         )
@@ -64,23 +65,23 @@ class DulwichClientTestBase:
         file.ensure_dir_exists(self.dest)
         run_git_or_fail(["init", "--quiet", "--bare"], cwd=self.dest)
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         rmtree_ro(self.gitroot)
 
-    def assertDestEqualsSrc(self):
+    def assertDestEqualsSrc(self) -> None:
         repo_dir = os.path.join(self.gitroot, "server_new.export")
         dest_repo_dir = os.path.join(self.gitroot, "dest")
         with repo.Repo(repo_dir) as src:
             with repo.Repo(dest_repo_dir) as dest:
                 self.assertReposEqual(src, dest)
 
-    def _client(self):
+    def _client(self) -> NoReturn:
         raise NotImplementedError
 
-    def _build_path(self):
+    def _build_path(self) -> NoReturn:
         raise NotImplementedError
 
-    def _do_send_pack(self):
+    def _do_send_pack(self) -> None:
         c = self._client()
         srcpath = os.path.join(self.gitroot, "server_new.export")
         with repo.Repo(srcpath) as src:
@@ -92,11 +93,11 @@ class DulwichClientTestBase:
                 src.generate_pack_data,
             )
 
-    def test_send_pack(self):
+    def test_send_pack(self) -> None:
         self._do_send_pack()
         self.assertDestEqualsSrc()
 
-    def test_send_pack_nothing_to_send(self):
+    def test_send_pack_nothing_to_send(self) -> None:
         self._do_send_pack()
         self.assertDestEqualsSrc()
         # nothing to send, but shouldn't raise either.
@@ -112,7 +113,7 @@ class DulwichClientTestBase:
         repo.object_store.add_object(tree)
         return tree.id
 
-    def test_send_pack_from_shallow_clone(self):
+    def test_send_pack_from_shallow_clone(self) -> None:
         c = self._client()
         server_new_path = os.path.join(self.gitroot, "server_new.export")
         run_git_or_fail(["config", "http.uploadpack", "true"], cwd=server_new_path)
@@ -139,7 +140,7 @@ class DulwichClientTestBase:
         with repo.Repo(server_new_path) as remote:
             self.assertEqual(remote.head(), commit_id)
 
-    def test_send_without_report_status(self):
+    def test_send_without_report_status(self) -> None:
         c = self._client()
         c._send_capabilities.remove(b"report-status")
         srcpath = os.path.join(self.gitroot, "server_new.export")
@@ -180,7 +181,7 @@ class DulwichClientTestBase:
         del sendrefs[b"HEAD"]
         return sendrefs, src.generate_pack_data
 
-    def test_send_pack_one_error(self):
+    def test_send_pack_one_error(self) -> None:
         dest, dummy_commit = self.disable_ff_and_make_dummy_commit()
         dest.refs[b"refs/heads/master"] = dummy_commit
         repo_dir = os.path.join(self.gitroot, "server_new.export")
@@ -198,7 +199,7 @@ class DulwichClientTestBase:
                 result.ref_status,
             )
 
-    def test_send_pack_multiple_errors(self):
+    def test_send_pack_multiple_errors(self) -> None:
         dest, dummy = self.disable_ff_and_make_dummy_commit()
         # set up for two non-ff errors
         branch, master = b"refs/heads/branch", b"refs/heads/master"
@@ -215,7 +216,7 @@ class DulwichClientTestBase:
                 result.ref_status,
             )
 
-    def test_archive(self):
+    def test_archive(self) -> None:
         c = self._client()
         f = BytesIO()
         c.archive(self._build_path("/server_new.export"), b"HEAD", f.write)
@@ -223,7 +224,7 @@ class DulwichClientTestBase:
         tf = tarfile.open(fileobj=f)
         self.assertEqual(["baz", "foo"], tf.getnames())
 
-    def test_fetch_pack(self):
+    def test_fetch_pack(self) -> None:
         c = self._client()
         with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
             result = c.fetch(self._build_path("/server_new.export"), dest)
@@ -235,7 +236,7 @@ class DulwichClientTestBase:
                 dest.refs.set_if_equals(r[0], None, r[1])
             self.assertDestEqualsSrc()
 
-    def test_fetch_pack_with_nondefault_symref(self):
+    def test_fetch_pack_with_nondefault_symref(self) -> None:
         c = self._client()
         src = repo.Repo(os.path.join(self.gitroot, "server_new.export"))
         src.refs.add_if_new(b"refs/heads/main", src.refs[b"refs/heads/master"])
@@ -250,7 +251,7 @@ class DulwichClientTestBase:
                 dest.refs.set_if_equals(r[0], None, r[1])
             self.assertDestEqualsSrc()
 
-    def test_get_refs_with_peeled_tag(self):
+    def test_get_refs_with_peeled_tag(self) -> None:
         tag_create(
             os.path.join(self.gitroot, "server_new.export"),
             b"v1.0",
@@ -270,7 +271,7 @@ class DulwichClientTestBase:
             sorted(refs.keys()),
         )
 
-    def test_get_refs_with_ref_prefix(self):
+    def test_get_refs_with_ref_prefix(self) -> None:
         c = self._client()
         refs = c.get_refs(
             self._build_path("/server_new.export"), ref_prefix=[b"refs/heads"]
@@ -283,7 +284,7 @@ class DulwichClientTestBase:
             sorted(refs.keys()),
         )
 
-    def test_fetch_pack_depth(self):
+    def test_fetch_pack_depth(self) -> None:
         c = self._client()
         with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
             result = c.fetch(self._build_path("/server_new.export"), dest, depth=1)
@@ -297,7 +298,7 @@ class DulwichClientTestBase:
                 },
             )
 
-    def test_repeat(self):
+    def test_repeat(self) -> None:
         c = self._client()
         with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
             result = c.fetch(self._build_path("/server_new.export"), dest)
@@ -309,7 +310,7 @@ class DulwichClientTestBase:
                 dest.refs.set_if_equals(r[0], None, r[1])
             self.assertDestEqualsSrc()
 
-    def test_fetch_empty_pack(self):
+    def test_fetch_empty_pack(self) -> None:
         c = self._client()
         with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
             result = c.fetch(self._build_path("/server_new.export"), dest)
@@ -329,7 +330,7 @@ class DulwichClientTestBase:
                 dest.refs.set_if_equals(r[0], None, r[1])
             self.assertDestEqualsSrc()
 
-    def test_incremental_fetch_pack(self):
+    def test_incremental_fetch_pack(self) -> None:
         self.test_fetch_pack()
         dest, dummy = self.disable_ff_and_make_dummy_commit()
         dest.refs[b"refs/heads/master"] = dummy
@@ -341,7 +342,7 @@ class DulwichClientTestBase:
                 dest.refs.set_if_equals(r[0], None, r[1])
             self.assertDestEqualsSrc()
 
-    def test_fetch_pack_no_side_band_64k(self):
+    def test_fetch_pack_no_side_band_64k(self) -> None:
         if protocol.DEFAULT_GIT_PROTOCOL_VERSION_FETCH >= 2:
             raise SkipTest("side-band-64k cannot be disabled with git protocol v2")
         c = self._client()
@@ -352,7 +353,7 @@ class DulwichClientTestBase:
                 dest.refs.set_if_equals(r[0], None, r[1])
             self.assertDestEqualsSrc()
 
-    def test_fetch_pack_zero_sha(self):
+    def test_fetch_pack_zero_sha(self) -> None:
         # zero sha1s are already present on the client, and should
         # be ignored
         c = self._client()
@@ -365,7 +366,7 @@ class DulwichClientTestBase:
             for r in result.refs.items():
                 dest.refs.set_if_equals(r[0], None, r[1])
 
-    def test_send_remove_branch(self):
+    def test_send_remove_branch(self) -> None:
         with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
             dummy_commit = self.make_dummy_commit(dest)
             dest.refs[b"refs/heads/master"] = dummy_commit
@@ -382,7 +383,7 @@ class DulwichClientTestBase:
             c.send_pack(self._build_path("/dest"), lambda _: sendrefs, gen_pack)
             self.assertNotIn(b"refs/heads/abranch", dest.refs)
 
-    def test_send_new_branch_empty_pack(self):
+    def test_send_new_branch_empty_pack(self) -> None:
         with repo.Repo(os.path.join(self.gitroot, "dest")) as dest:
             dummy_commit = self.make_dummy_commit(dest)
             dest.refs[b"refs/heads/master"] = dummy_commit
@@ -397,7 +398,7 @@ class DulwichClientTestBase:
             c.send_pack(self._build_path("/dest"), lambda _: sendrefs, gen_pack)
             self.assertEqual(dummy_commit, dest.refs[b"refs/heads/abranch"])
 
-    def test_get_refs(self):
+    def test_get_refs(self) -> None:
         c = self._client()
         refs = c.get_refs(self._build_path("/server_new.export"))
 
@@ -407,7 +408,7 @@ class DulwichClientTestBase:
 
 
 class DulwichTCPClientTest(CompatTestCase, DulwichClientTestBase):
-    def setUp(self):
+    def setUp(self) -> None:
         CompatTestCase.setUp(self)
         DulwichClientTestBase.setUp(self)
         if check_for_daemon(limit=1):
@@ -440,7 +441,7 @@ class DulwichTCPClientTest(CompatTestCase, DulwichClientTestBase):
         if not check_for_daemon():
             raise SkipTest("git-daemon failed to start")
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         with open(self.pidfile) as f:
             pid = int(f.read().strip())
         if sys.platform == "win32":
@@ -467,10 +468,10 @@ class DulwichTCPClientTest(CompatTestCase, DulwichClientTestBase):
     if sys.platform == "win32" and protocol.DEFAULT_GIT_PROTOCOL_VERSION_FETCH < 2:
 
         @expectedFailure
-        def test_fetch_pack_no_side_band_64k(self):
+        def test_fetch_pack_no_side_band_64k(self) -> None:
             DulwichClientTestBase.test_fetch_pack_no_side_band_64k(self)
 
-    def test_send_remove_branch(self):
+    def test_send_remove_branch(self) -> None:
         # This test fails intermittently on my machine, probably due to some sort
         # of race condition. Probably also related to #1015
         self.skipTest("skip flaky test; see #1015")
@@ -513,13 +514,13 @@ class TestSSHVendor:
 
 
 class DulwichMockSSHClientTest(CompatTestCase, DulwichClientTestBase):
-    def setUp(self):
+    def setUp(self) -> None:
         CompatTestCase.setUp(self)
         DulwichClientTestBase.setUp(self)
         self.real_vendor = client.get_ssh_vendor
         client.get_ssh_vendor = TestSSHVendor
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         DulwichClientTestBase.tearDown(self)
         CompatTestCase.tearDown(self)
         client.get_ssh_vendor = self.real_vendor
@@ -537,11 +538,11 @@ class DulwichMockSSHClientTestGitProtov0(DulwichMockSSHClientTest):
 
 
 class DulwichSubprocessClientTest(CompatTestCase, DulwichClientTestBase):
-    def setUp(self):
+    def setUp(self) -> None:
         CompatTestCase.setUp(self)
         DulwichClientTestBase.setUp(self)
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         DulwichClientTestBase.tearDown(self)
         CompatTestCase.tearDown(self)
 
@@ -564,20 +565,20 @@ class GitHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
     # the rest to a subprocess, so we can't use buffered input.
     rbufsize = 0
 
-    def do_POST(self):
+    def do_POST(self) -> None:
         self.run_backend()
 
-    def do_GET(self):
+    def do_GET(self) -> None:
         self.run_backend()
 
     def send_head(self):
         return self.run_backend()
 
-    def log_request(self, code="-", size="-"):
+    def log_request(self, code="-", size="-") -> None:
         # Let's be quiet, the test suite is noisy enough already
         pass
 
-    def run_backend(self):
+    def run_backend(self) -> None:
         """Call out to git http-backend."""
         # Based on CGIHTTPServer.CGIHTTPRequestHandler.run_cgi:
         # Copyright (c) 2001-2010 Python Software Foundation;
@@ -713,14 +714,14 @@ class HTTPGitServer(http.server.HTTPServer):
         self.root_path = root_path
         self.server_name = "localhost"
 
-    def get_url(self):
+    def get_url(self) -> str:
         return f"http://{self.server_name}:{self.server_port}/"
 
 
 class DulwichHttpClientTest(CompatTestCase, DulwichClientTestBase):
     min_git_version = (1, 7, 0, 2)
 
-    def setUp(self):
+    def setUp(self) -> None:
         CompatTestCase.setUp(self)
         DulwichClientTestBase.setUp(self)
         self._httpd = HTTPGitServer(("localhost", 0), self.gitroot)
@@ -729,7 +730,7 @@ class DulwichHttpClientTest(CompatTestCase, DulwichClientTestBase):
         run_git_or_fail(["config", "http.uploadpack", "true"], cwd=self.dest)
         run_git_or_fail(["config", "http.receivepack", "true"], cwd=self.dest)
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         DulwichClientTestBase.tearDown(self)
         CompatTestCase.tearDown(self)
         self._httpd.shutdown()
@@ -741,7 +742,7 @@ class DulwichHttpClientTest(CompatTestCase, DulwichClientTestBase):
     def _build_path(self, path):
         return path
 
-    def test_archive(self):
+    def test_archive(self) -> NoReturn:
         raise SkipTest("exporting archives not supported over http")
 
 

+ 6 - 5
tests/compat/test_pack.py

@@ -25,6 +25,7 @@ import os
 import re
 import shutil
 import tempfile
+from typing import NoReturn
 
 from dulwich.objects import Blob
 from dulwich.pack import write_pack
@@ -51,13 +52,13 @@ def _git_verify_pack_object_list(output):
 class TestPack(PackTests):
     """Compatibility tests for reading and writing pack files."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         require_git_version((1, 5, 0))
         super().setUp()
         self._tempdir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, self._tempdir)
 
-    def test_copy(self):
+    def test_copy(self) -> None:
         with self.get_pack(pack1_sha) as origpack:
             self.assertSucceeds(origpack.index.check)
             pack_path = os.path.join(self._tempdir, "Elch")
@@ -66,7 +67,7 @@ class TestPack(PackTests):
             orig_shas = {o.id for o in origpack.iterobjects()}
             self.assertEqual(orig_shas, _git_verify_pack_object_list(output))
 
-    def test_deltas_work(self):
+    def test_deltas_work(self) -> None:
         with self.get_pack(pack1_sha) as orig_pack:
             orig_blob = orig_pack[a_sha]
             new_blob = Blob()
@@ -91,7 +92,7 @@ class TestPack(PackTests):
             "Expected 3 non-delta objects, got %d" % got_non_delta,
         )
 
-    def test_delta_medium_object(self):
+    def test_delta_medium_object(self) -> None:
         # This tests an object set that will have a copy operation
         # 2**20 in size.
         with self.get_pack(pack1_sha) as orig_pack:
@@ -130,7 +131,7 @@ class TestPack(PackTests):
     # on the input size. It's impractical to produce deltas for
     # objects this large, but it's still worth doing the right thing
     # when it happens.
-    def test_delta_large_object(self):
+    def test_delta_large_object(self) -> NoReturn:
         # This tests an object set that will have a copy operation
         # 2**25 in size. This is a copy large enough that it requires
         # two copy operations in git's binary delta format.

+ 2 - 2
tests/compat/test_patch.py

@@ -32,7 +32,7 @@ from .utils import CompatTestCase, run_git_or_fail
 
 
 class CompatPatchTestCase(CompatTestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.test_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, self.test_dir)
@@ -40,7 +40,7 @@ class CompatPatchTestCase(CompatTestCase):
         self.repo = Repo.init(self.repo_path, mkdir=True)
         self.addCleanup(self.repo.close)
 
-    def test_patch_apply(self):
+    def test_patch_apply(self) -> None:
         # Prepare the repository
 
         # Create some files and commit them

+ 2 - 2
tests/compat/test_porcelain.py

@@ -40,7 +40,7 @@ except ImportError:
     "gpgme not available, skipping tests that require GPG signing",
 )
 class TagCreateSignTestCase(PorcelainGpgTestCase, CompatTestCase):
-    def test_sign(self):
+    def test_sign(self) -> None:
         # Test that dulwich signatures can be verified by CGit
         c1, c2, c3 = build_commit_graph(
             self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
@@ -64,7 +64,7 @@ class TagCreateSignTestCase(PorcelainGpgTestCase, CompatTestCase):
             env={"GNUPGHOME": os.environ["GNUPGHOME"]},
         )
 
-    def test_verify(self):
+    def test_verify(self) -> None:
         # Test that CGit signatures can be verified by dulwich
         c1, c2, c3 = build_commit_graph(
             self.repo.object_store, [[1], [2, 1], [3, 1, 2]]

+ 17 - 17
tests/compat/test_repository.py

@@ -34,7 +34,7 @@ from .utils import CompatTestCase, require_git_version, rmtree_ro, run_git_or_fa
 class ObjectStoreTestCase(CompatTestCase):
     """Tests for git repository compatibility."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._repo = self.import_repo("server_new.export")
 
@@ -55,17 +55,17 @@ class ObjectStoreTestCase(CompatTestCase):
     def _parse_objects(self, output):
         return {s.rstrip(b"\n").split(b" ")[0] for s in BytesIO(output)}
 
-    def test_bare(self):
+    def test_bare(self) -> None:
         self.assertTrue(self._repo.bare)
         self.assertFalse(os.path.exists(os.path.join(self._repo.path, ".git")))
 
-    def test_head(self):
+    def test_head(self) -> None:
         output = self._run_git(["rev-parse", "HEAD"])
         head_sha = output.rstrip(b"\n")
         hex_to_sha(head_sha)
         self.assertEqual(head_sha, self._repo.refs[b"HEAD"])
 
-    def test_refs(self):
+    def test_refs(self) -> None:
         output = self._run_git(
             ["for-each-ref", "--format=%(refname) %(objecttype) %(objectname)"]
         )
@@ -90,7 +90,7 @@ class ObjectStoreTestCase(CompatTestCase):
         output = self._run_git(["rev-list", "--all", "--objects"])
         return self._parse_objects(output)
 
-    def assertShasMatch(self, expected_shas, actual_shas_iter):
+    def assertShasMatch(self, expected_shas, actual_shas_iter) -> None:
         actual_shas = set()
         for sha in actual_shas_iter:
             obj = self._repo[sha]
@@ -98,7 +98,7 @@ class ObjectStoreTestCase(CompatTestCase):
             actual_shas.add(sha)
         self.assertEqual(expected_shas, actual_shas)
 
-    def test_loose_objects(self):
+    def test_loose_objects(self) -> None:
         # TODO(dborowitz): This is currently not very useful since
         # fast-imported repos only contained packed objects.
         expected_shas = self._get_loose_shas()
@@ -106,13 +106,13 @@ class ObjectStoreTestCase(CompatTestCase):
             expected_shas, self._repo.object_store._iter_loose_objects()
         )
 
-    def test_packed_objects(self):
+    def test_packed_objects(self) -> None:
         expected_shas = self._get_all_shas() - self._get_loose_shas()
         self.assertShasMatch(
             expected_shas, chain.from_iterable(self._repo.object_store.packs)
         )
 
-    def test_all_objects(self):
+    def test_all_objects(self) -> None:
         expected_shas = self._get_all_shas()
         self.assertShasMatch(expected_shas, iter(self._repo.object_store))
 
@@ -136,7 +136,7 @@ class WorkingTreeTestCase(ObjectStoreTestCase):
         self.addCleanup(rmtree_ro, temp_dir)
         return temp_dir
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._worktree_path = self.create_new_worktree(self._repo.path, "branch")
         self._worktree_repo = Repo(self._worktree_path)
@@ -145,18 +145,18 @@ class WorkingTreeTestCase(ObjectStoreTestCase):
         self._number_of_working_tree = 2
         self._repo = self._worktree_repo
 
-    def test_refs(self):
+    def test_refs(self) -> None:
         super().test_refs()
         self.assertEqual(
             self._mainworktree_repo.refs.allkeys(), self._repo.refs.allkeys()
         )
 
-    def test_head_equality(self):
+    def test_head_equality(self) -> None:
         self.assertNotEqual(
             self._repo.refs[b"HEAD"], self._mainworktree_repo.refs[b"HEAD"]
         )
 
-    def test_bare(self):
+    def test_bare(self) -> None:
         self.assertFalse(self._repo.bare)
         self.assertTrue(os.path.isfile(os.path.join(self._repo.path, ".git")))
 
@@ -167,7 +167,7 @@ class WorkingTreeTestCase(ObjectStoreTestCase):
             worktrees.append(tuple(f.decode() for f in fields))
         return worktrees
 
-    def test_git_worktree_list(self):
+    def test_git_worktree_list(self) -> None:
         # 'git worktree list' was introduced in 2.7.0
         require_git_version((2, 7, 0))
         output = run_git_or_fail(["worktree", "list"], cwd=self._repo.path)
@@ -182,7 +182,7 @@ class WorkingTreeTestCase(ObjectStoreTestCase):
         self.assertEqual(worktrees[0][1], "(bare)")
         self.assertTrue(os.path.samefile(worktrees[0][0], self._mainworktree_repo.path))
 
-    def test_git_worktree_config(self):
+    def test_git_worktree_config(self) -> None:
         """Test that git worktree config parsing matches the git CLI's behavior."""
         # Set some config value in the main repo using the git CLI
         require_git_version((2, 7, 0))
@@ -222,7 +222,7 @@ class InitNewWorkingDirectoryTestCase(WorkingTreeTestCase):
 
     min_git_version = (2, 5, 0)
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._other_worktree = self._repo
         worktree_repo_path = tempfile.mkdtemp()
@@ -233,11 +233,11 @@ class InitNewWorkingDirectoryTestCase(WorkingTreeTestCase):
         self.addCleanup(self._repo.close)
         self._number_of_working_tree = 3
 
-    def test_head_equality(self):
+    def test_head_equality(self) -> None:
         self.assertEqual(
             self._repo.refs[b"HEAD"], self._mainworktree_repo.refs[b"HEAD"]
         )
 
-    def test_bare(self):
+    def test_bare(self) -> None:
         self.assertFalse(self._repo.bare)
         self.assertTrue(os.path.isfile(os.path.join(self._repo.path, ".git")))

+ 4 - 4
tests/compat/test_server.py

@@ -48,7 +48,7 @@ class GitServerTestCase(ServerTests, CompatTestCase):
     def _handlers(self):
         return {b"git-receive-pack": NoSideBand64kReceivePackHandler}
 
-    def _check_server(self, dul_server):
+    def _check_server(self, dul_server) -> None:
         receive_pack_handler_cls = dul_server.handlers[b"git-receive-pack"]
         caps = receive_pack_handler_cls.capabilities()
         self.assertNotIn(b"side-band-64k", caps)
@@ -72,7 +72,7 @@ class GitServerSideBand64kTestCase(GitServerTestCase):
     # side-band-64k in git-receive-pack was introduced in git 1.7.0.2
     min_git_version = (1, 7, 0, 2)
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         # side-band-64k is broken in the windows client.
         # https://github.com/msysgit/git/issues/101
@@ -80,10 +80,10 @@ class GitServerSideBand64kTestCase(GitServerTestCase):
         if os.name == "nt":
             require_git_version((1, 9, 3))
 
-    def _handlers(self):
+    def _handlers(self) -> None:
         return None  # default handlers include side-band-64k
 
-    def _check_server(self, server):
+    def _check_server(self, server) -> None:
         receive_pack_handler_cls = server.handlers[b"git-receive-pack"]
         caps = receive_pack_handler_cls.capabilities()
         self.assertIn(b"side-band-64k", caps)

+ 9 - 9
tests/compat/test_utils.py

@@ -25,7 +25,7 @@ from . import utils
 
 
 class GitVersionTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._orig_run_git = utils.run_git
         self._version_str = None  # tests can override to set stub version
@@ -36,36 +36,36 @@ class GitVersionTests(TestCase):
 
         utils.run_git = run_git
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         super().tearDown()
         utils.run_git = self._orig_run_git
 
-    def test_git_version_none(self):
+    def test_git_version_none(self) -> None:
         self._version_str = b"not a git version"
         self.assertEqual(None, utils.git_version())
 
-    def test_git_version_3(self):
+    def test_git_version_3(self) -> None:
         self._version_str = b"git version 1.6.6"
         self.assertEqual((1, 6, 6, 0), utils.git_version())
 
-    def test_git_version_4(self):
+    def test_git_version_4(self) -> None:
         self._version_str = b"git version 1.7.0.2"
         self.assertEqual((1, 7, 0, 2), utils.git_version())
 
-    def test_git_version_extra(self):
+    def test_git_version_extra(self) -> None:
         self._version_str = b"git version 1.7.0.3.295.gd8fa2"
         self.assertEqual((1, 7, 0, 3), utils.git_version())
 
-    def assertRequireSucceeds(self, required_version):
+    def assertRequireSucceeds(self, required_version) -> None:
         try:
             utils.require_git_version(required_version)
         except SkipTest:
             self.fail()
 
-    def assertRequireFails(self, required_version):
+    def assertRequireFails(self, required_version) -> None:
         self.assertRaises(SkipTest, utils.require_git_version, required_version)
 
-    def test_require_git_version(self):
+    def test_require_git_version(self) -> None:
         try:
             self._version_str = b"git version 1.6.6"
             self.assertRequireSucceeds((1, 6, 6))

+ 15 - 14
tests/compat/test_web.py

@@ -27,6 +27,7 @@ warning: these tests should be fairly stable, but when writing/debugging new
 
 import sys
 import threading
+from typing import NoReturn
 from wsgiref import simple_server
 
 from dulwich.server import DictBackend, ReceivePackHandler, UploadPackHandler
@@ -82,7 +83,7 @@ class SmartWebTestCase(WebTests, CompatTestCase):
     def _handlers(self):
         return {b"git-receive-pack": NoSideBand64kReceivePackHandler}
 
-    def _check_app(self, app):
+    def _check_app(self, app) -> None:
         receive_pack_handler_cls = app.handlers[b"git-receive-pack"]
         caps = receive_pack_handler_cls.capabilities()
         self.assertNotIn(b"side-band-64k", caps)
@@ -119,20 +120,20 @@ class SmartWebSideBand64kTestCase(SmartWebTestCase):
     # side-band-64k in git-receive-pack was introduced in git 1.7.0.2
     min_git_version = (1, 7, 0, 2)
 
-    def setUp(self):
+    def setUp(self) -> None:
         self.o_uph_cap = patch_capabilities(UploadPackHandler, (b"no-done",))
         self.o_rph_cap = patch_capabilities(ReceivePackHandler, (b"no-done",))
         super().setUp()
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         super().tearDown()
         UploadPackHandler.capabilities = self.o_uph_cap
         ReceivePackHandler.capabilities = self.o_rph_cap
 
-    def _handlers(self):
+    def _handlers(self) -> None:
         return None  # default handlers include side-band-64k
 
-    def _check_app(self, app):
+    def _check_app(self, app) -> None:
         receive_pack_handler_cls = app.handlers[b"git-receive-pack"]
         caps = receive_pack_handler_cls.capabilities()
         self.assertIn(b"side-band-64k", caps)
@@ -147,10 +148,10 @@ class SmartWebSideBand64kNoDoneTestCase(SmartWebTestCase):
     # no-done was introduced in git 1.7.4
     min_git_version = (1, 7, 4)
 
-    def _handlers(self):
+    def _handlers(self) -> None:
         return None  # default handlers include side-band-64k
 
-    def _check_app(self, app):
+    def _check_app(self, app) -> None:
         receive_pack_handler_cls = app.handlers[b"git-receive-pack"]
         caps = receive_pack_handler_cls.capabilities()
         self.assertIn(b"side-band-64k", caps)
@@ -164,33 +165,33 @@ class DumbWebTestCase(WebTests, CompatTestCase):
     def _make_app(self, backend):
         return make_wsgi_chain(backend, dumb=True)
 
-    def test_push_to_dulwich(self):
+    def test_push_to_dulwich(self) -> NoReturn:
         # Note: remove this if dulwich implements dumb web pushing.
         raise SkipTest("Dumb web pushing not supported.")
 
-    def test_push_to_dulwich_remove_branch(self):
+    def test_push_to_dulwich_remove_branch(self) -> NoReturn:
         # Note: remove this if dumb pushing is supported
         raise SkipTest("Dumb web pushing not supported.")
 
-    def test_new_shallow_clone_from_dulwich(self):
+    def test_new_shallow_clone_from_dulwich(self) -> NoReturn:
         # Note: remove this if C git and dulwich implement dumb web shallow
         # clones.
         raise SkipTest("Dumb web shallow cloning not supported.")
 
-    def test_shallow_clone_from_git_is_identical(self):
+    def test_shallow_clone_from_git_is_identical(self) -> NoReturn:
         # Note: remove this if C git and dulwich implement dumb web shallow
         # clones.
         raise SkipTest("Dumb web shallow cloning not supported.")
 
-    def test_fetch_same_depth_into_shallow_clone_from_dulwich(self):
+    def test_fetch_same_depth_into_shallow_clone_from_dulwich(self) -> NoReturn:
         # Note: remove this if C git and dulwich implement dumb web shallow
         # clones.
         raise SkipTest("Dumb web shallow cloning not supported.")
 
-    def test_fetch_full_depth_into_shallow_clone_from_dulwich(self):
+    def test_fetch_full_depth_into_shallow_clone_from_dulwich(self) -> NoReturn:
         # Note: remove this if C git and dulwich implement dumb web shallow
         # clones.
         raise SkipTest("Dumb web shallow cloning not supported.")
 
-    def test_push_to_dulwich_issue_88_standard(self):
+    def test_push_to_dulwich_issue_88_standard(self) -> NoReturn:
         raise SkipTest("Dumb web pushing not supported.")

+ 8 - 8
tests/compat/utils.py

@@ -73,7 +73,7 @@ def git_version(git_path=_DEFAULT_GIT):
     return tuple(nums[:_VERSION_LEN])
 
 
-def require_git_version(required_version, git_path=_DEFAULT_GIT):
+def require_git_version(required_version, git_path=_DEFAULT_GIT) -> None:
     """Require git version >= version, or skip the calling test.
 
     Args:
@@ -196,7 +196,7 @@ def import_repo_to_dir(name):
     return temp_repo_dir
 
 
-def check_for_daemon(limit=10, delay=0.1, timeout=0.1, port=TCP_GIT_PORT):
+def check_for_daemon(limit=10, delay=0.1, timeout=0.1, port=TCP_GIT_PORT) -> bool:
     """Check for a running TCP daemon.
 
     Defaults to checking 10 times with a delay of 0.1 sec between tries.
@@ -237,18 +237,18 @@ class CompatTestCase(TestCase):
 
     min_git_version: tuple[int, ...] = (1, 5, 0)
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         require_git_version(self.min_git_version)
 
-    def assertObjectStoreEqual(self, store1, store2):
+    def assertObjectStoreEqual(self, store1, store2) -> None:
         self.assertEqual(sorted(set(store1)), sorted(set(store2)))
 
-    def assertReposEqual(self, repo1, repo2):
+    def assertReposEqual(self, repo1, repo2) -> None:
         self.assertEqual(repo1.get_refs(), repo2.get_refs())
         self.assertObjectStoreEqual(repo1.object_store, repo2.object_store)
 
-    def assertReposNotEqual(self, repo1, repo2):
+    def assertReposNotEqual(self, repo1, repo2) -> None:
         refs1 = repo1.get_refs()
         objs1 = set(repo1.object_store)
         refs2 = repo2.get_refs()
@@ -267,7 +267,7 @@ class CompatTestCase(TestCase):
         path = import_repo_to_dir(name)
         repo = Repo(path)
 
-        def cleanup():
+        def cleanup() -> None:
             repo.close()
             rmtree_ro(os.path.dirname(path.rstrip(os.sep)))
 
@@ -277,7 +277,7 @@ class CompatTestCase(TestCase):
 
 if sys.platform == "win32":
 
-    def remove_ro(action, name, exc):
+    def remove_ro(action, name, exc) -> None:
         os.chmod(name, stat.S_IWRITE)
         os.remove(name)
 

+ 9 - 8
tests/contrib/test_paramiko_vendor.py

@@ -22,6 +22,7 @@
 import socket
 import threading
 from io import StringIO
+from typing import Optional
 from unittest import skipIf
 
 from .. import TestCase
@@ -41,7 +42,7 @@ else:
             super().__init__(*args, **kwargs)
             self.commands = commands
 
-        def check_channel_exec_request(self, channel, command):
+        def check_channel_exec_request(self, channel, command) -> bool:
             self.commands.append(command)
             return True
 
@@ -61,7 +62,7 @@ else:
                 return paramiko.OPEN_SUCCEEDED
             return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
 
-        def get_allowed_auths(self, username):
+        def get_allowed_auths(self, username) -> str:
             return "password,publickey"
 
 
@@ -127,7 +128,7 @@ WxtWBWHwxfSmqgTXilEA3ALJp0kNolLnEttnhENwJpZHlqtes0ZA4w==
 
 @skipIf(not has_paramiko, "paramiko is not installed")
 class ParamikoSSHVendorTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         import paramiko.transport
 
         # re-enable server functionality for tests
@@ -145,10 +146,10 @@ class ParamikoSSHVendorTests(TestCase):
         self.thread = threading.Thread(target=self._run)
         self.thread.start()
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         self.thread.join()
 
-    def _run(self):
+    def _run(self) -> Optional[bool]:
         try:
             conn, addr = self.socket.accept()
         except OSError:
@@ -160,7 +161,7 @@ class ParamikoSSHVendorTests(TestCase):
         server = Server(self.commands)
         self.transport.start_server(server=server)
 
-    def test_run_command_password(self):
+    def test_run_command_password(self) -> None:
         vendor = ParamikoSSHVendor(
             allow_agent=False,
             look_for_keys=False,
@@ -175,7 +176,7 @@ class ParamikoSSHVendorTests(TestCase):
 
         self.assertIn(b"test_run_command_password", self.commands)
 
-    def test_run_command_with_privkey(self):
+    def test_run_command_with_privkey(self) -> None:
         key = paramiko.RSAKey.from_private_key(StringIO(CLIENT_KEY))
 
         vendor = ParamikoSSHVendor(
@@ -192,7 +193,7 @@ class ParamikoSSHVendorTests(TestCase):
 
         self.assertIn(b"test_run_command_with_privkey", self.commands)
 
-    def test_run_command_data_transfer(self):
+    def test_run_command_data_transfer(self) -> None:
         vendor = ParamikoSSHVendor(
             allow_agent=False,
             look_for_keys=False,

+ 4 - 4
tests/contrib/test_release_robot.py

@@ -42,7 +42,7 @@ def gmtime_to_datetime(gmt):
 class TagPatternTests(unittest.TestCase):
     """test tag patterns."""
 
-    def test_tag_pattern(self):
+    def test_tag_pattern(self) -> None:
         """Test tag patterns."""
         test_cases = {
             "0.3": "0.3",
@@ -77,7 +77,7 @@ class GetRecentTagsTest(unittest.TestCase):
     }
 
     @classmethod
-    def setUpClass(cls):
+    def setUpClass(cls) -> None:
         cls.projdir = tempfile.mkdtemp()  # temporary project directory
         cls.repo = Repo.init(cls.projdir)  # test repo
         obj_store = cls.repo.object_store  # test repo object store
@@ -113,11 +113,11 @@ class GetRecentTagsTest(unittest.TestCase):
         cls.repo[b"refs/tags/" + cls.t2.name] = cls.t2.id  # add annotated tag
 
     @classmethod
-    def tearDownClass(cls):
+    def tearDownClass(cls) -> None:
         cls.repo.close()
         shutil.rmtree(cls.projdir)
 
-    def test_get_recent_tags(self):
+    def test_get_recent_tags(self) -> None:
         """Test get recent tags."""
         tags = release_robot.get_recent_tags(self.projdir)  # get test tags
         for tag, metadata in tags:

+ 29 - 29
tests/contrib/test_swift.py

@@ -190,7 +190,7 @@ class FakeSwiftConnector:
         self.chunk_length = 12228
         self.cache_length = 1
 
-    def put_object(self, name, content):
+    def put_object(self, name, content) -> None:
         name = posixpath.join(self.root, name)
         if hasattr(content, "seek"):
             content.seek(0)
@@ -218,7 +218,7 @@ class FakeSwiftConnector:
     def get_container_objects(self):
         return [{"name": k.replace(self.root + "/", "")} for k in self.store]
 
-    def create_root(self):
+    def create_root(self) -> None:
         if self.root in self.store.keys():
             pass
         else:
@@ -233,11 +233,11 @@ class FakeSwiftConnector:
 
 @skipIf(missing_libs, skipmsg)
 class TestSwiftRepo(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.conf = swift.load_conf(file=StringIO(config_file % def_config_file))
 
-    def test_init(self):
+    def test_init(self) -> None:
         store = {"fakerepo/objects/pack": ""}
         with patch(
             "dulwich.contrib.swift.SwiftConnector",
@@ -246,14 +246,14 @@ class TestSwiftRepo(TestCase):
         ):
             swift.SwiftRepo("fakerepo", conf=self.conf)
 
-    def test_init_no_data(self):
+    def test_init_no_data(self) -> None:
         with patch(
             "dulwich.contrib.swift.SwiftConnector",
             new_callable=create_swift_connector,
         ):
             self.assertRaises(Exception, swift.SwiftRepo, "fakerepo", self.conf)
 
-    def test_init_bad_data(self):
+    def test_init_bad_data(self) -> None:
         store = {"fakerepo/.git/objects/pack": ""}
         with patch(
             "dulwich.contrib.swift.SwiftConnector",
@@ -262,7 +262,7 @@ class TestSwiftRepo(TestCase):
         ):
             self.assertRaises(Exception, swift.SwiftRepo, "fakerepo", self.conf)
 
-    def test_put_named_file(self):
+    def test_put_named_file(self) -> None:
         store = {"fakerepo/objects/pack": ""}
         with patch(
             "dulwich.contrib.swift.SwiftConnector",
@@ -274,7 +274,7 @@ class TestSwiftRepo(TestCase):
             repo._put_named_file("description", desc)
         self.assertEqual(repo.scon.store["fakerepo/description"], desc)
 
-    def test_init_bare(self):
+    def test_init_bare(self) -> None:
         fsc = FakeSwiftConnector("fakeroot", conf=self.conf)
         with patch(
             "dulwich.contrib.swift.SwiftConnector",
@@ -289,7 +289,7 @@ class TestSwiftRepo(TestCase):
 
 @skipIf(missing_libs, skipmsg)
 class TestSwiftInfoRefsContainer(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         content = (
             b"22effb216e3a82f97da599b8885a6cadb488b4c5\trefs/heads/master\n"
@@ -300,7 +300,7 @@ class TestSwiftInfoRefsContainer(TestCase):
         self.fsc = FakeSwiftConnector("fakerepo", conf=self.conf)
         self.object_store = {}
 
-    def test_init(self):
+    def test_init(self) -> None:
         """info/refs does not exists."""
         irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
         self.assertEqual(len(irc._refs), 0)
@@ -309,7 +309,7 @@ class TestSwiftInfoRefsContainer(TestCase):
         self.assertIn(b"refs/heads/dev", irc.allkeys())
         self.assertIn(b"refs/heads/master", irc.allkeys())
 
-    def test_set_if_equals(self):
+    def test_set_if_equals(self) -> None:
         self.fsc.store = self.store
         irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
         irc.set_if_equals(
@@ -319,7 +319,7 @@ class TestSwiftInfoRefsContainer(TestCase):
         )
         self.assertEqual(irc[b"refs/heads/dev"], b"1" * 40)
 
-    def test_remove_if_equals(self):
+    def test_remove_if_equals(self) -> None:
         self.fsc.store = self.store
         irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store)
         irc.remove_if_equals(
@@ -330,13 +330,13 @@ class TestSwiftInfoRefsContainer(TestCase):
 
 @skipIf(missing_libs, skipmsg)
 class TestSwiftConnector(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.conf = swift.load_conf(file=StringIO(config_file % def_config_file))
         with patch("geventhttpclient.HTTPClient.request", fake_auth_request_v1):
             self.conn = swift.SwiftConnector("fakerepo", conf=self.conf)
 
-    def test_init_connector(self):
+    def test_init_connector(self) -> None:
         self.assertEqual(self.conn.auth_ver, "1")
         self.assertEqual(self.conn.auth_url, "http://127.0.0.1:8080/auth/v1.0")
         self.assertEqual(self.conn.user, "test:tester")
@@ -363,18 +363,18 @@ class TestSwiftConnector(TestCase):
                 lambda: swift.SwiftConnector("fakerepo", conf=self.conf),
             )
 
-    def test_root_exists(self):
+    def test_root_exists(self) -> None:
         with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()):
             self.assertEqual(self.conn.test_root_exists(), True)
 
-    def test_root_not_exists(self):
+    def test_root_not_exists(self) -> None:
         with patch(
             "geventhttpclient.HTTPClient.request",
             lambda *args: Response(status=404),
         ):
             self.assertEqual(self.conn.test_root_exists(), None)
 
-    def test_create_root(self):
+    def test_create_root(self) -> None:
         with patch(
             "dulwich.contrib.swift.SwiftConnector.test_root_exists",
             lambda *args: None,
@@ -382,7 +382,7 @@ class TestSwiftConnector(TestCase):
             with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()):
                 self.assertEqual(self.conn.create_root(), None)
 
-    def test_create_root_fails(self):
+    def test_create_root_fails(self) -> None:
         with patch(
             "dulwich.contrib.swift.SwiftConnector.test_root_exists",
             lambda *args: None,
@@ -393,42 +393,42 @@ class TestSwiftConnector(TestCase):
             ):
                 self.assertRaises(swift.SwiftException, self.conn.create_root)
 
-    def test_get_container_objects(self):
+    def test_get_container_objects(self) -> None:
         with patch(
             "geventhttpclient.HTTPClient.request",
             lambda *args: Response(content=json.dumps(({"name": "a"}, {"name": "b"}))),
         ):
             self.assertEqual(len(self.conn.get_container_objects()), 2)
 
-    def test_get_container_objects_fails(self):
+    def test_get_container_objects_fails(self) -> None:
         with patch(
             "geventhttpclient.HTTPClient.request",
             lambda *args: Response(status=404),
         ):
             self.assertEqual(self.conn.get_container_objects(), None)
 
-    def test_get_object_stat(self):
+    def test_get_object_stat(self) -> None:
         with patch(
             "geventhttpclient.HTTPClient.request",
             lambda *args: Response(headers={"content-length": "10"}),
         ):
             self.assertEqual(self.conn.get_object_stat("a")["content-length"], "10")
 
-    def test_get_object_stat_fails(self):
+    def test_get_object_stat_fails(self) -> None:
         with patch(
             "geventhttpclient.HTTPClient.request",
             lambda *args: Response(status=404),
         ):
             self.assertEqual(self.conn.get_object_stat("a"), None)
 
-    def test_put_object(self):
+    def test_put_object(self) -> None:
         with patch(
             "geventhttpclient.HTTPClient.request",
             lambda *args, **kwargs: Response(),
         ):
             self.assertEqual(self.conn.put_object("a", BytesIO(b"content")), None)
 
-    def test_put_object_fails(self):
+    def test_put_object_fails(self) -> None:
         with patch(
             "geventhttpclient.HTTPClient.request",
             lambda *args, **kwargs: Response(status=400),
@@ -438,7 +438,7 @@ class TestSwiftConnector(TestCase):
                 lambda: self.conn.put_object("a", BytesIO(b"content")),
             )
 
-    def test_get_object(self):
+    def test_get_object(self) -> None:
         with patch(
             "geventhttpclient.HTTPClient.request",
             lambda *args, **kwargs: Response(content=b"content"),
@@ -450,18 +450,18 @@ class TestSwiftConnector(TestCase):
         ):
             self.assertEqual(self.conn.get_object("a", range="0-6"), b"content")
 
-    def test_get_object_fails(self):
+    def test_get_object_fails(self) -> None:
         with patch(
             "geventhttpclient.HTTPClient.request",
             lambda *args, **kwargs: Response(status=404),
         ):
             self.assertEqual(self.conn.get_object("a"), None)
 
-    def test_del_object(self):
+    def test_del_object(self) -> None:
         with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()):
             self.assertEqual(self.conn.del_object("a"), None)
 
-    def test_del_root(self):
+    def test_del_root(self) -> None:
         with patch(
             "dulwich.contrib.swift.SwiftConnector.del_object",
             lambda *args: None,
@@ -479,7 +479,7 @@ class TestSwiftConnector(TestCase):
 
 @skipIf(missing_libs, skipmsg)
 class SwiftObjectStoreTests(ObjectStoreTests, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         conf = swift.load_conf(file=StringIO(config_file % def_config_file))
         fsc = FakeSwiftConnector("fakerepo", conf=conf)

+ 15 - 15
tests/contrib/test_swift_smoke.py

@@ -54,11 +54,11 @@ class DulwichServer:
         self.port = port
         self.backend = backend
 
-    def run(self):
+    def run(self) -> None:
         self.server = server.TCPGitServer(self.backend, "localhost", port=self.port)
         self.job = gevent.spawn(self.server.serve_forever)
 
-    def stop(self):
+    def stop(self) -> None:
         self.server.shutdown()
         gevent.joinall((self.job,))
 
@@ -70,7 +70,7 @@ class SwiftSystemBackend(server.Backend):
 
 class SwiftRepoSmokeTest(unittest.TestCase):
     @classmethod
-    def setUpClass(cls):
+    def setUpClass(cls) -> None:
         cls.backend = SwiftSystemBackend()
         cls.port = 9148
         cls.server_address = "localhost"
@@ -80,10 +80,10 @@ class SwiftRepoSmokeTest(unittest.TestCase):
         cls.conf = swift.load_conf()
 
     @classmethod
-    def tearDownClass(cls):
+    def tearDownClass(cls) -> None:
         cls.th_server.stop()
 
-    def setUp(self):
+    def setUp(self) -> None:
         self.scon = swift.SwiftConnector(self.fakerepo, self.conf)
         if self.scon.test_root_exists():
             try:
@@ -94,7 +94,7 @@ class SwiftRepoSmokeTest(unittest.TestCase):
         if os.path.isdir(self.temp_d):
             shutil.rmtree(self.temp_d)
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         if self.scon.test_root_exists():
             try:
                 self.scon.del_root()
@@ -103,7 +103,7 @@ class SwiftRepoSmokeTest(unittest.TestCase):
         if os.path.isdir(self.temp_d):
             shutil.rmtree(self.temp_d)
 
-    def test_init_bare(self):
+    def test_init_bare(self) -> None:
         swift.SwiftRepo.init_bare(self.scon, self.conf)
         self.assertTrue(self.scon.test_root_exists())
         obj = self.scon.get_container_objects()
@@ -112,7 +112,7 @@ class SwiftRepoSmokeTest(unittest.TestCase):
         ]
         self.assertEqual(len(filtered), 2)
 
-    def test_clone_bare(self):
+    def test_clone_bare(self) -> None:
         local_repo = repo.Repo.init(self.temp_d, mkdir=True)
         swift.SwiftRepo.init_bare(self.scon, self.conf)
         tcp_client = client.TCPGitClient(self.server_address, port=self.port)
@@ -120,7 +120,7 @@ class SwiftRepoSmokeTest(unittest.TestCase):
         # The remote repo is empty (no refs retrieved)
         self.assertEqual(remote_refs, None)
 
-    def test_push_commit(self):
+    def test_push_commit(self) -> None:
         def determine_wants(*args, **kwargs):
             return {"refs/heads/master": local_repo.refs["HEAD"]}
 
@@ -137,7 +137,7 @@ class SwiftRepoSmokeTest(unittest.TestCase):
         remote_sha = swift_repo.refs.read_loose_ref("refs/heads/master")
         self.assertEqual(sha, remote_sha)
 
-    def test_push_branch(self):
+    def test_push_branch(self) -> None:
         def determine_wants(*args, **kwargs):
             return {"refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"]}
 
@@ -154,7 +154,7 @@ class SwiftRepoSmokeTest(unittest.TestCase):
         remote_sha = swift_repo.refs.read_loose_ref("refs/heads/mybranch")
         self.assertEqual(sha, remote_sha)
 
-    def test_push_multiple_branch(self):
+    def test_push_multiple_branch(self) -> None:
         def determine_wants(*args, **kwargs):
             return {
                 "refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"],
@@ -182,7 +182,7 @@ class SwiftRepoSmokeTest(unittest.TestCase):
             remote_shas[branch] = swift_repo.refs.read_loose_ref(f"refs/heads/{branch}")
         self.assertDictEqual(local_shas, remote_shas)
 
-    def test_push_data_branch(self):
+    def test_push_data_branch(self) -> None:
         def determine_wants(*args, **kwargs):
             return {"refs/heads/master": local_repo.refs["HEAD"]}
 
@@ -216,7 +216,7 @@ class SwiftRepoSmokeTest(unittest.TestCase):
         # Tree
         self.assertEqual(objs_[0][0], 2)
 
-    def test_clone_then_push_data(self):
+    def test_clone_then_push_data(self) -> None:
         self.test_push_data_branch()
         shutil.rmtree(self.temp_d)
         local_repo = repo.Repo.init(self.temp_d, mkdir=True)
@@ -250,7 +250,7 @@ class SwiftRepoSmokeTest(unittest.TestCase):
             "/fakerepo", determine_wants, local_repo.generate_pack_data
         )
 
-    def test_push_remove_branch(self):
+    def test_push_remove_branch(self) -> None:
         def determine_wants(*args, **kwargs):
             return {
                 "refs/heads/pullr-108": objects.ZERO_SHA,
@@ -267,7 +267,7 @@ class SwiftRepoSmokeTest(unittest.TestCase):
         swift_repo = swift.SwiftRepo("fakerepo", self.conf)
         self.assertNotIn("refs/heads/pullr-108", swift_repo.refs.allkeys())
 
-    def test_push_annotated_tag(self):
+    def test_push_annotated_tag(self) -> None:
         def determine_wants(*args, **kwargs):
             return {
                 "refs/heads/master": local_repo.refs["HEAD"],

+ 6 - 6
tests/test_archive.py

@@ -39,7 +39,7 @@ except ImportError:
 
 
 class ArchiveTests(TestCase):
-    def test_empty(self):
+    def test_empty(self) -> None:
         store = MemoryObjectStore()
         c1, c2, c3 = build_commit_graph(store, [[1], [2, 1], [3, 1, 2]])
         tree = store[c3.tree]
@@ -59,13 +59,13 @@ class ArchiveTests(TestCase):
         stream = b"".join(tar_stream(store, t1, *tar_stream_args, **tar_stream_kwargs))
         return BytesIO(stream)
 
-    def test_simple(self):
+    def test_simple(self) -> None:
         stream = self._get_example_tar_stream(mtime=0)
         tf = tarfile.TarFile(fileobj=stream)
         self.addCleanup(tf.close)
         self.assertEqual(["somename"], tf.getnames())
 
-    def test_unicode(self):
+    def test_unicode(self) -> None:
         store = MemoryObjectStore()
         b1 = Blob.from_string(b"somedata")
         store.add_object(b1)
@@ -77,19 +77,19 @@ class ArchiveTests(TestCase):
         self.addCleanup(tf.close)
         self.assertEqual(["ő"], tf.getnames())
 
-    def test_prefix(self):
+    def test_prefix(self) -> None:
         stream = self._get_example_tar_stream(mtime=0, prefix=b"blah")
         tf = tarfile.TarFile(fileobj=stream)
         self.addCleanup(tf.close)
         self.assertEqual(["blah/somename"], tf.getnames())
 
-    def test_gzip_mtime(self):
+    def test_gzip_mtime(self) -> None:
         stream = self._get_example_tar_stream(mtime=1234, format="gz")
         expected_mtime = struct.pack("<L", 1234)
         self.assertEqual(stream.getvalue()[4:8], expected_mtime)
 
     @skipUnless(patch, "Required mock.patch")
-    def test_same_file(self):
+    def test_same_file(self) -> None:
         contents = [None, None]
         for format in ["", "gz", "bz2"]:
             for i in [0, 1]:

+ 5 - 5
tests/test_blackbox.py

@@ -31,19 +31,19 @@ from . import BlackboxTestCase
 class GitReceivePackTests(BlackboxTestCase):
     """Blackbox tests for dul-receive-pack."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.path = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, self.path)
         self.repo = Repo.init(self.path)
 
-    def test_basic(self):
+    def test_basic(self) -> None:
         process = self.run_command("dul-receive-pack", [self.path])
         (stdout, stderr) = process.communicate(b"0000")
         self.assertEqual(b"0000", stdout[-4:])
         self.assertEqual(0, process.returncode)
 
-    def test_missing_arg(self):
+    def test_missing_arg(self) -> None:
         process = self.run_command("dul-receive-pack", [])
         (stdout, stderr) = process.communicate()
         self.assertEqual(
@@ -56,13 +56,13 @@ class GitReceivePackTests(BlackboxTestCase):
 class GitUploadPackTests(BlackboxTestCase):
     """Blackbox tests for dul-upload-pack."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.path = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, self.path)
         self.repo = Repo.init(self.path)
 
-    def test_missing_arg(self):
+    def test_missing_arg(self) -> None:
         process = self.run_command("dul-upload-pack", [])
         (stdout, stderr) = process.communicate()
         self.assertEqual(

+ 1 - 1
tests/test_bundle.py

@@ -31,7 +31,7 @@ from . import TestCase
 
 
 class BundleTests(TestCase):
-    def test_roundtrip_bundle(self):
+    def test_roundtrip_bundle(self) -> None:
         origbundle = Bundle()
         origbundle.version = 3
         origbundle.capabilities = {"foo": None}

File diff suppressed because it is too large
+ 121 - 120
tests/test_client.py


+ 63 - 63
tests/test_config.py

@@ -46,13 +46,13 @@ class ConfigFileTests(TestCase):
     def from_file(self, text):
         return ConfigFile.from_file(BytesIO(text))
 
-    def test_empty(self):
+    def test_empty(self) -> None:
         ConfigFile()
 
-    def test_eq(self):
+    def test_eq(self) -> None:
         self.assertEqual(ConfigFile(), ConfigFile())
 
-    def test_default_config(self):
+    def test_default_config(self) -> None:
         cf = self.from_file(
             b"""[core]
 \trepositoryformatversion = 0
@@ -75,127 +75,127 @@ class ConfigFileTests(TestCase):
             cf,
         )
 
-    def test_from_file_empty(self):
+    def test_from_file_empty(self) -> None:
         cf = self.from_file(b"")
         self.assertEqual(ConfigFile(), cf)
 
-    def test_empty_line_before_section(self):
+    def test_empty_line_before_section(self) -> None:
         cf = self.from_file(b"\n[section]\n")
         self.assertEqual(ConfigFile({(b"section",): {}}), cf)
 
-    def test_comment_before_section(self):
+    def test_comment_before_section(self) -> None:
         cf = self.from_file(b"# foo\n[section]\n")
         self.assertEqual(ConfigFile({(b"section",): {}}), cf)
 
-    def test_comment_after_section(self):
+    def test_comment_after_section(self) -> None:
         cf = self.from_file(b"[section] # foo\n")
         self.assertEqual(ConfigFile({(b"section",): {}}), cf)
 
-    def test_comment_after_variable(self):
+    def test_comment_after_variable(self) -> None:
         cf = self.from_file(b"[section]\nbar= foo # a comment\n")
         self.assertEqual(ConfigFile({(b"section",): {b"bar": b"foo"}}), cf)
 
-    def test_comment_character_within_value_string(self):
+    def test_comment_character_within_value_string(self) -> None:
         cf = self.from_file(b'[section]\nbar= "foo#bar"\n')
         self.assertEqual(ConfigFile({(b"section",): {b"bar": b"foo#bar"}}), cf)
 
-    def test_comment_character_within_section_string(self):
+    def test_comment_character_within_section_string(self) -> None:
         cf = self.from_file(b'[branch "foo#bar"] # a comment\nbar= foo\n')
         self.assertEqual(ConfigFile({(b"branch", b"foo#bar"): {b"bar": b"foo"}}), cf)
 
-    def test_closing_bracket_within_section_string(self):
+    def test_closing_bracket_within_section_string(self) -> None:
         cf = self.from_file(b'[branch "foo]bar"] # a comment\nbar= foo\n')
         self.assertEqual(ConfigFile({(b"branch", b"foo]bar"): {b"bar": b"foo"}}), cf)
 
-    def test_from_file_section(self):
+    def test_from_file_section(self) -> None:
         cf = self.from_file(b"[core]\nfoo = bar\n")
         self.assertEqual(b"bar", cf.get((b"core",), b"foo"))
         self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo"))
 
-    def test_from_file_multiple(self):
+    def test_from_file_multiple(self) -> None:
         cf = self.from_file(b"[core]\nfoo = bar\nfoo = blah\n")
         self.assertEqual([b"bar", b"blah"], list(cf.get_multivar((b"core",), b"foo")))
         self.assertEqual([], list(cf.get_multivar((b"core",), b"blah")))
 
-    def test_from_file_utf8_bom(self):
+    def test_from_file_utf8_bom(self) -> None:
         text = "[core]\nfoo = b\u00e4r\n".encode("utf-8-sig")
         cf = self.from_file(text)
         self.assertEqual(b"b\xc3\xa4r", cf.get((b"core",), b"foo"))
 
-    def test_from_file_section_case_insensitive_lower(self):
+    def test_from_file_section_case_insensitive_lower(self) -> None:
         cf = self.from_file(b"[cOre]\nfOo = bar\n")
         self.assertEqual(b"bar", cf.get((b"core",), b"foo"))
         self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo"))
 
-    def test_from_file_section_case_insensitive_mixed(self):
+    def test_from_file_section_case_insensitive_mixed(self) -> None:
         cf = self.from_file(b"[cOre]\nfOo = bar\n")
         self.assertEqual(b"bar", cf.get((b"core",), b"fOo"))
         self.assertEqual(b"bar", cf.get((b"cOre", b"fOo"), b"fOo"))
 
-    def test_from_file_with_mixed_quoted(self):
+    def test_from_file_with_mixed_quoted(self) -> None:
         cf = self.from_file(b'[core]\nfoo = "bar"la\n')
         self.assertEqual(b"barla", cf.get((b"core",), b"foo"))
 
-    def test_from_file_section_with_open_brackets(self):
+    def test_from_file_section_with_open_brackets(self) -> None:
         self.assertRaises(ValueError, self.from_file, b"[core\nfoo = bar\n")
 
-    def test_from_file_value_with_open_quoted(self):
+    def test_from_file_value_with_open_quoted(self) -> None:
         self.assertRaises(ValueError, self.from_file, b'[core]\nfoo = "bar\n')
 
-    def test_from_file_with_quotes(self):
+    def test_from_file_with_quotes(self) -> None:
         cf = self.from_file(b"[core]\n" b'foo = " bar"\n')
         self.assertEqual(b" bar", cf.get((b"core",), b"foo"))
 
-    def test_from_file_with_interrupted_line(self):
+    def test_from_file_with_interrupted_line(self) -> None:
         cf = self.from_file(b"[core]\n" b"foo = bar\\\n" b" la\n")
         self.assertEqual(b"barla", cf.get((b"core",), b"foo"))
 
-    def test_from_file_with_boolean_setting(self):
+    def test_from_file_with_boolean_setting(self) -> None:
         cf = self.from_file(b"[core]\n" b"foo\n")
         self.assertEqual(b"true", cf.get((b"core",), b"foo"))
 
-    def test_from_file_subsection(self):
+    def test_from_file_subsection(self) -> None:
         cf = self.from_file(b'[branch "foo"]\nfoo = bar\n')
         self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo"))
 
-    def test_from_file_subsection_invalid(self):
+    def test_from_file_subsection_invalid(self) -> None:
         self.assertRaises(ValueError, self.from_file, b'[branch "foo]\nfoo = bar\n')
 
-    def test_from_file_subsection_not_quoted(self):
+    def test_from_file_subsection_not_quoted(self) -> None:
         cf = self.from_file(b"[branch.foo]\nfoo = bar\n")
         self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo"))
 
-    def test_write_preserve_multivar(self):
+    def test_write_preserve_multivar(self) -> None:
         cf = self.from_file(b"[core]\nfoo = bar\nfoo = blah\n")
         f = BytesIO()
         cf.write_to_file(f)
         self.assertEqual(b"[core]\n\tfoo = bar\n\tfoo = blah\n", f.getvalue())
 
-    def test_write_to_file_empty(self):
+    def test_write_to_file_empty(self) -> None:
         c = ConfigFile()
         f = BytesIO()
         c.write_to_file(f)
         self.assertEqual(b"", f.getvalue())
 
-    def test_write_to_file_section(self):
+    def test_write_to_file_section(self) -> None:
         c = ConfigFile()
         c.set((b"core",), b"foo", b"bar")
         f = BytesIO()
         c.write_to_file(f)
         self.assertEqual(b"[core]\n\tfoo = bar\n", f.getvalue())
 
-    def test_write_to_file_subsection(self):
+    def test_write_to_file_subsection(self) -> None:
         c = ConfigFile()
         c.set((b"branch", b"blie"), b"foo", b"bar")
         f = BytesIO()
         c.write_to_file(f)
         self.assertEqual(b'[branch "blie"]\n\tfoo = bar\n', f.getvalue())
 
-    def test_same_line(self):
+    def test_same_line(self) -> None:
         cf = self.from_file(b"[branch.foo] foo = bar\n")
         self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo"))
 
-    def test_quoted_newlines_windows(self):
+    def test_quoted_newlines_windows(self) -> None:
         cf = self.from_file(
             b"[alias]\r\n"
             b"c = '!f() { \\\r\n"
@@ -209,7 +209,7 @@ class ConfigFileTests(TestCase):
             cf.get((b"alias",), b"c"),
         )
 
-    def test_quoted(self):
+    def test_quoted(self) -> None:
         cf = self.from_file(
             b"""[gui]
 \tfontdiff = -family \\\"Ubuntu Mono\\\" -size 11 -overstrike 0
@@ -226,7 +226,7 @@ class ConfigFileTests(TestCase):
             cf,
         )
 
-    def test_quoted_multiline(self):
+    def test_quoted_multiline(self) -> None:
         cf = self.from_file(
             b"""[alias]
 who = \"!who() {\\
@@ -249,7 +249,7 @@ who\"
             cf,
         )
 
-    def test_set_hash_gets_quoted(self):
+    def test_set_hash_gets_quoted(self) -> None:
         c = ConfigFile()
         c.set(b"xandikos", b"color", b"#665544")
         f = BytesIO()
@@ -258,7 +258,7 @@ who\"
 
 
 class ConfigDictTests(TestCase):
-    def test_get_set(self):
+    def test_get_set(self) -> None:
         cd = ConfigDict()
         self.assertRaises(KeyError, cd.get, b"foo", b"core")
         cd.set((b"core",), b"foo", b"bla")
@@ -266,7 +266,7 @@ class ConfigDictTests(TestCase):
         cd.set((b"core",), b"foo", b"bloe")
         self.assertEqual(b"bloe", cd.get((b"core",), b"foo"))
 
-    def test_get_boolean(self):
+    def test_get_boolean(self) -> None:
         cd = ConfigDict()
         cd.set((b"core",), b"foo", b"true")
         self.assertTrue(cd.get_boolean((b"core",), b"foo"))
@@ -275,7 +275,7 @@ class ConfigDictTests(TestCase):
         cd.set((b"core",), b"foo", b"invalid")
         self.assertRaises(ValueError, cd.get_boolean, (b"core",), b"foo")
 
-    def test_dict(self):
+    def test_dict(self) -> None:
         cd = ConfigDict()
         cd.set((b"core",), b"foo", b"bla")
         cd.set((b"core2",), b"foo", b"bloe")
@@ -286,20 +286,20 @@ class ConfigDictTests(TestCase):
         cd[b"a"] = b"b"
         self.assertEqual(cd[b"a"], b"b")
 
-    def test_items(self):
+    def test_items(self) -> None:
         cd = ConfigDict()
         cd.set((b"core",), b"foo", b"bla")
         cd.set((b"core2",), b"foo", b"bloe")
 
         self.assertEqual([(b"foo", b"bla")], list(cd.items((b"core",))))
 
-    def test_items_nonexistant(self):
+    def test_items_nonexistant(self) -> None:
         cd = ConfigDict()
         cd.set((b"core2",), b"foo", b"bloe")
 
         self.assertEqual([], list(cd.items((b"core",))))
 
-    def test_sections(self):
+    def test_sections(self) -> None:
         cd = ConfigDict()
         cd.set((b"core2",), b"foo", b"bloe")
 
@@ -307,11 +307,11 @@ class ConfigDictTests(TestCase):
 
 
 class StackedConfigTests(TestCase):
-    def test_default_backends(self):
+    def test_default_backends(self) -> None:
         StackedConfig.default_backends()
 
     @skipIf(sys.platform != "win32", "Windows specific config location.")
-    def test_windows_config_from_path(self):
+    def test_windows_config_from_path(self) -> None:
         from dulwich.config import get_win_system_paths
 
         install_dir = os.path.join("C:", "foo", "Git")
@@ -327,7 +327,7 @@ class StackedConfigTests(TestCase):
         )
 
     @skipIf(sys.platform != "win32", "Windows specific config location.")
-    def test_windows_config_from_reg(self):
+    def test_windows_config_from_reg(self) -> None:
         import winreg
 
         from dulwich.config import get_win_system_paths
@@ -350,66 +350,66 @@ class StackedConfigTests(TestCase):
 
 
 class EscapeValueTests(TestCase):
-    def test_nothing(self):
+    def test_nothing(self) -> None:
         self.assertEqual(b"foo", _escape_value(b"foo"))
 
-    def test_backslash(self):
+    def test_backslash(self) -> None:
         self.assertEqual(b"foo\\\\", _escape_value(b"foo\\"))
 
-    def test_newline(self):
+    def test_newline(self) -> None:
         self.assertEqual(b"foo\\n", _escape_value(b"foo\n"))
 
 
 class FormatStringTests(TestCase):
-    def test_quoted(self):
+    def test_quoted(self) -> None:
         self.assertEqual(b'" foo"', _format_string(b" foo"))
         self.assertEqual(b'"\\tfoo"', _format_string(b"\tfoo"))
 
-    def test_not_quoted(self):
+    def test_not_quoted(self) -> None:
         self.assertEqual(b"foo", _format_string(b"foo"))
         self.assertEqual(b"foo bar", _format_string(b"foo bar"))
 
 
 class ParseStringTests(TestCase):
-    def test_quoted(self):
+    def test_quoted(self) -> None:
         self.assertEqual(b" foo", _parse_string(b'" foo"'))
         self.assertEqual(b"\tfoo", _parse_string(b'"\\tfoo"'))
 
-    def test_not_quoted(self):
+    def test_not_quoted(self) -> None:
         self.assertEqual(b"foo", _parse_string(b"foo"))
         self.assertEqual(b"foo bar", _parse_string(b"foo bar"))
 
-    def test_nothing(self):
+    def test_nothing(self) -> None:
         self.assertEqual(b"", _parse_string(b""))
 
-    def test_tab(self):
+    def test_tab(self) -> None:
         self.assertEqual(b"\tbar\t", _parse_string(b"\\tbar\\t"))
 
-    def test_newline(self):
+    def test_newline(self) -> None:
         self.assertEqual(b"\nbar\t", _parse_string(b"\\nbar\\t\t"))
 
-    def test_quote(self):
+    def test_quote(self) -> None:
         self.assertEqual(b'"foo"', _parse_string(b'\\"foo\\"'))
 
 
 class CheckVariableNameTests(TestCase):
-    def test_invalid(self):
+    def test_invalid(self) -> None:
         self.assertFalse(_check_variable_name(b"foo "))
         self.assertFalse(_check_variable_name(b"bar,bar"))
         self.assertFalse(_check_variable_name(b"bar.bar"))
 
-    def test_valid(self):
+    def test_valid(self) -> None:
         self.assertTrue(_check_variable_name(b"FOO"))
         self.assertTrue(_check_variable_name(b"foo"))
         self.assertTrue(_check_variable_name(b"foo-bar"))
 
 
 class CheckSectionNameTests(TestCase):
-    def test_invalid(self):
+    def test_invalid(self) -> None:
         self.assertFalse(_check_section_name(b"foo "))
         self.assertFalse(_check_section_name(b"bar,bar"))
 
-    def test_valid(self):
+    def test_valid(self) -> None:
         self.assertTrue(_check_section_name(b"FOO"))
         self.assertTrue(_check_section_name(b"foo"))
         self.assertTrue(_check_section_name(b"foo-bar"))
@@ -417,7 +417,7 @@ class CheckSectionNameTests(TestCase):
 
 
 class SubmodulesTests(TestCase):
-    def testSubmodules(self):
+    def testSubmodules(self) -> None:
         cf = ConfigFile.from_file(
             BytesIO(
                 b"""\
@@ -439,7 +439,7 @@ class SubmodulesTests(TestCase):
             got,
         )
 
-    def testMalformedSubmodules(self):
+    def testMalformedSubmodules(self) -> None:
         cf = ConfigFile.from_file(
             BytesIO(
                 b"""\
@@ -466,20 +466,20 @@ class SubmodulesTests(TestCase):
 
 
 class ApplyInsteadOfTests(TestCase):
-    def test_none(self):
+    def test_none(self) -> None:
         config = ConfigDict()
         self.assertEqual(
             "https://example.com/", apply_instead_of(config, "https://example.com/")
         )
 
-    def test_apply(self):
+    def test_apply(self) -> None:
         config = ConfigDict()
         config.set(("url", "https://samba.org/"), "insteadOf", "https://example.com/")
         self.assertEqual(
             "https://samba.org/", apply_instead_of(config, "https://example.com/")
         )
 
-    def test_apply_multiple(self):
+    def test_apply_multiple(self) -> None:
         config = ConfigDict()
         config.set(("url", "https://samba.org/"), "insteadOf", "https://blah.com/")
         config.set(("url", "https://samba.org/"), "insteadOf", "https://example.com/")

+ 3 - 3
tests/test_credentials.py

@@ -32,7 +32,7 @@ from . import TestCase
 
 
 class TestCredentialHelpersUtils(TestCase):
-    def test_match_urls(self):
+    def test_match_urls(self) -> None:
         url = urlparse("https://github.com/jelmer/dulwich/")
         url_1 = urlparse("https://github.com/jelmer/dulwich")
         url_2 = urlparse("https://github.com/jelmer")
@@ -44,7 +44,7 @@ class TestCredentialHelpersUtils(TestCase):
         non_matching = urlparse("https://git.sr.ht/")
         self.assertFalse(match_urls(url, non_matching))
 
-    def test_match_partial_url(self):
+    def test_match_partial_url(self) -> None:
         url = urlparse("https://github.com/jelmer/dulwich/")
         self.assertTrue(match_partial_url(url, "github.com"))
         self.assertFalse(match_partial_url(url, "github.com/jelmer/"))
@@ -52,7 +52,7 @@ class TestCredentialHelpersUtils(TestCase):
         self.assertFalse(match_partial_url(url, "github.com/jel"))
         self.assertFalse(match_partial_url(url, "github.com/jel/"))
 
-    def test_urlmatch_credential_sections(self):
+    def test_urlmatch_credential_sections(self) -> None:
         config = ConfigDict()
         config.set((b"credential", "https://github.com"), b"helper", "foo")
         config.set((b"credential", "git.sr.ht"), b"helper", "foo")

+ 68 - 68
tests/test_diff_tree.py

@@ -49,7 +49,7 @@ from . import TestCase
 
 
 class DiffTestCase(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.store = MemoryObjectStore()
         self.empty_tree = self.commit_tree([])
@@ -72,16 +72,16 @@ class DiffTestCase(TestCase):
 
 
 class TreeChangesTest(DiffTestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.detector = RenameDetector(self.store)
 
-    def assertMergeFails(self, merge_entries, name, mode, sha):
+    def assertMergeFails(self, merge_entries, name, mode, sha) -> None:
         t = Tree()
         t[name] = (mode, sha)
         self.assertRaises((TypeError, ValueError), merge_entries, "", t, t)
 
-    def _do_test_merge_entries(self, merge_entries):
+    def _do_test_merge_entries(self, merge_entries) -> None:
         blob_a1 = make_object(Blob, data=b"a1")
         blob_a2 = make_object(Blob, data=b"a2")
         blob_b1 = make_object(Blob, data=b"b1")
@@ -140,7 +140,7 @@ class TreeChangesTest(DiffTestCase):
         _do_test_merge_entries, _merge_entries
     )
 
-    def _do_test_is_tree(self, is_tree):
+    def _do_test_is_tree(self, is_tree) -> None:
         self.assertFalse(is_tree(TreeEntry(None, None, None)))
         self.assertFalse(is_tree(TreeEntry(b"a", 0o100644, b"a" * 40)))
         self.assertFalse(is_tree(TreeEntry(b"a", 0o100755, b"a" * 40)))
@@ -152,16 +152,16 @@ class TreeChangesTest(DiffTestCase):
     test_is_tree = functest_builder(_do_test_is_tree, _is_tree_py)
     test_is_tree_extension = ext_functest_builder(_do_test_is_tree, _is_tree)
 
-    def assertChangesEqual(self, expected, tree1, tree2, **kwargs):
+    def assertChangesEqual(self, expected, tree1, tree2, **kwargs) -> None:
         actual = list(tree_changes(self.store, tree1.id, tree2.id, **kwargs))
         self.assertEqual(expected, actual)
 
     # For brevity, the following tests use tuples instead of TreeEntry objects.
 
-    def test_tree_changes_empty(self):
+    def test_tree_changes_empty(self) -> None:
         self.assertChangesEqual([], self.empty_tree, self.empty_tree)
 
-    def test_tree_changes_no_changes(self):
+    def test_tree_changes_no_changes(self) -> None:
         blob = make_object(Blob, data=b"blob")
         tree = self.commit_tree([(b"a", blob), (b"b/c", blob)])
         self.assertChangesEqual([], self.empty_tree, self.empty_tree)
@@ -180,7 +180,7 @@ class TreeChangesTest(DiffTestCase):
             want_unchanged=True,
         )
 
-    def test_tree_changes_add_delete(self):
+    def test_tree_changes_add_delete(self) -> None:
         blob_a = make_object(Blob, data=b"a")
         blob_b = make_object(Blob, data=b"b")
         tree = self.commit_tree([(b"a", blob_a, 0o100644), (b"x/b", blob_b, 0o100755)])
@@ -201,7 +201,7 @@ class TreeChangesTest(DiffTestCase):
             self.empty_tree,
         )
 
-    def test_tree_changes_modify_contents(self):
+    def test_tree_changes_modify_contents(self) -> None:
         blob_a1 = make_object(Blob, data=b"a1")
         blob_a2 = make_object(Blob, data=b"a2")
         tree1 = self.commit_tree([(b"a", blob_a1)])
@@ -212,7 +212,7 @@ class TreeChangesTest(DiffTestCase):
             tree2,
         )
 
-    def test_tree_changes_modify_mode(self):
+    def test_tree_changes_modify_mode(self) -> None:
         blob_a = make_object(Blob, data=b"a")
         tree1 = self.commit_tree([(b"a", blob_a, 0o100644)])
         tree2 = self.commit_tree([(b"a", blob_a, 0o100755)])
@@ -228,7 +228,7 @@ class TreeChangesTest(DiffTestCase):
             tree2,
         )
 
-    def test_tree_changes_change_type(self):
+    def test_tree_changes_change_type(self) -> None:
         blob_a1 = make_object(Blob, data=b"a")
         blob_a2 = make_object(Blob, data=b"/foo/bar")
         tree1 = self.commit_tree([(b"a", blob_a1, 0o100644)])
@@ -242,7 +242,7 @@ class TreeChangesTest(DiffTestCase):
             tree2,
         )
 
-    def test_tree_changes_change_type_same(self):
+    def test_tree_changes_change_type_same(self) -> None:
         blob_a1 = make_object(Blob, data=b"a")
         blob_a2 = make_object(Blob, data=b"/foo/bar")
         tree1 = self.commit_tree([(b"a", blob_a1, 0o100644)])
@@ -260,7 +260,7 @@ class TreeChangesTest(DiffTestCase):
             change_type_same=True,
         )
 
-    def test_tree_changes_to_tree(self):
+    def test_tree_changes_to_tree(self) -> None:
         blob_a = make_object(Blob, data=b"a")
         blob_x = make_object(Blob, data=b"x")
         tree1 = self.commit_tree([(b"a", blob_a)])
@@ -274,7 +274,7 @@ class TreeChangesTest(DiffTestCase):
             tree2,
         )
 
-    def test_tree_changes_complex(self):
+    def test_tree_changes_complex(self) -> None:
         blob_a_1 = make_object(Blob, data=b"a1_1")
         blob_bx1_1 = make_object(Blob, data=b"bx1_1")
         blob_bx2_1 = make_object(Blob, data=b"bx2_1")
@@ -320,7 +320,7 @@ class TreeChangesTest(DiffTestCase):
             tree2,
         )
 
-    def test_tree_changes_name_order(self):
+    def test_tree_changes_name_order(self) -> None:
         blob = make_object(Blob, data=b"a")
         tree1 = self.commit_tree([(b"a", blob), (b"a.", blob), (b"a..", blob)])
         # Tree order is the reverse of this, so if we used tree order, 'a..'
@@ -338,7 +338,7 @@ class TreeChangesTest(DiffTestCase):
             tree2,
         )
 
-    def test_tree_changes_prune(self):
+    def test_tree_changes_prune(self) -> None:
         blob_a1 = make_object(Blob, data=b"a1")
         blob_a2 = make_object(Blob, data=b"a2")
         blob_x = make_object(Blob, data=b"x")
@@ -356,7 +356,7 @@ class TreeChangesTest(DiffTestCase):
             tree2,
         )
 
-    def test_tree_changes_rename_detector(self):
+    def test_tree_changes_rename_detector(self) -> None:
         blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob_a2 = make_object(Blob, data=b"a\nb\nc\ne\n")
         blob_b = make_object(Blob, data=b"b")
@@ -407,7 +407,7 @@ class TreeChangesTest(DiffTestCase):
             want_unchanged=True,
         )
 
-    def assertChangesForMergeEqual(self, expected, parent_trees, merge_tree, **kwargs):
+    def assertChangesForMergeEqual(self, expected, parent_trees, merge_tree, **kwargs) -> None:
         parent_tree_ids = [t.id for t in parent_trees]
         actual = list(
             tree_changes_for_merge(self.store, parent_tree_ids, merge_tree.id, **kwargs)
@@ -421,14 +421,14 @@ class TreeChangesTest(DiffTestCase):
         )
         self.assertEqual(expected, actual)
 
-    def test_tree_changes_for_merge_add_no_conflict(self):
+    def test_tree_changes_for_merge_add_no_conflict(self) -> None:
         blob = make_object(Blob, data=b"blob")
         parent1 = self.commit_tree([])
         parent2 = merge = self.commit_tree([(b"a", blob)])
         self.assertChangesForMergeEqual([], [parent1, parent2], merge)
         self.assertChangesForMergeEqual([], [parent2, parent2], merge)
 
-    def test_tree_changes_for_merge_add_modify_conflict(self):
+    def test_tree_changes_for_merge_add_modify_conflict(self) -> None:
         blob1 = make_object(Blob, data=b"1")
         blob2 = make_object(Blob, data=b"2")
         parent1 = self.commit_tree([])
@@ -445,7 +445,7 @@ class TreeChangesTest(DiffTestCase):
             merge,
         )
 
-    def test_tree_changes_for_merge_modify_modify_conflict(self):
+    def test_tree_changes_for_merge_modify_modify_conflict(self) -> None:
         blob1 = make_object(Blob, data=b"1")
         blob2 = make_object(Blob, data=b"2")
         blob3 = make_object(Blob, data=b"3")
@@ -463,14 +463,14 @@ class TreeChangesTest(DiffTestCase):
             merge,
         )
 
-    def test_tree_changes_for_merge_modify_no_conflict(self):
+    def test_tree_changes_for_merge_modify_no_conflict(self) -> None:
         blob1 = make_object(Blob, data=b"1")
         blob2 = make_object(Blob, data=b"2")
         parent1 = self.commit_tree([(b"a", blob1)])
         parent2 = merge = self.commit_tree([(b"a", blob2)])
         self.assertChangesForMergeEqual([], [parent1, parent2], merge)
 
-    def test_tree_changes_for_merge_delete_delete_conflict(self):
+    def test_tree_changes_for_merge_delete_delete_conflict(self) -> None:
         blob1 = make_object(Blob, data=b"1")
         blob2 = make_object(Blob, data=b"2")
         parent1 = self.commit_tree([(b"a", blob1)])
@@ -487,14 +487,14 @@ class TreeChangesTest(DiffTestCase):
             merge,
         )
 
-    def test_tree_changes_for_merge_delete_no_conflict(self):
+    def test_tree_changes_for_merge_delete_no_conflict(self) -> None:
         blob = make_object(Blob, data=b"blob")
         has = self.commit_tree([(b"a", blob)])
         doesnt_have = self.commit_tree([])
         self.assertChangesForMergeEqual([], [has, has], doesnt_have)
         self.assertChangesForMergeEqual([], [has, doesnt_have], doesnt_have)
 
-    def test_tree_changes_for_merge_octopus_no_conflict(self):
+    def test_tree_changes_for_merge_octopus_no_conflict(self) -> None:
         r = list(range(5))
         blobs = [make_object(Blob, data=bytes(i)) for i in r]
         parents = [self.commit_tree([(b"a", blobs[i])]) for i in r]
@@ -502,7 +502,7 @@ class TreeChangesTest(DiffTestCase):
             # Take the SHA from each of the parents.
             self.assertChangesForMergeEqual([], parents, parents[i])
 
-    def test_tree_changes_for_merge_octopus_modify_conflict(self):
+    def test_tree_changes_for_merge_octopus_modify_conflict(self) -> None:
         # Because the octopus merge strategy is limited, I doubt it's possible
         # to create this with the git command line. But the output is well-
         # defined, so test it anyway.
@@ -523,7 +523,7 @@ class TreeChangesTest(DiffTestCase):
         ]
         self.assertChangesForMergeEqual(expected, parents, merge)
 
-    def test_tree_changes_for_merge_octopus_delete(self):
+    def test_tree_changes_for_merge_octopus_delete(self) -> None:
         blob1 = make_object(Blob, data=b"1")
         blob2 = make_object(Blob, data=b"3")
         parent1 = self.commit_tree([(b"a", blob1)])
@@ -544,7 +544,7 @@ class TreeChangesTest(DiffTestCase):
             merge,
         )
 
-    def test_tree_changes_for_merge_add_add_same_conflict(self):
+    def test_tree_changes_for_merge_add_add_same_conflict(self) -> None:
         blob = make_object(Blob, data=b"a\nb\nc\nd\n")
         parent1 = self.commit_tree([(b"a", blob)])
         parent2 = self.commit_tree([])
@@ -552,7 +552,7 @@ class TreeChangesTest(DiffTestCase):
         add = TreeChange.add((b"b", F, blob.id))
         self.assertChangesForMergeEqual([[add, add]], [parent1, parent2], merge)
 
-    def test_tree_changes_for_merge_add_exact_rename_conflict(self):
+    def test_tree_changes_for_merge_add_exact_rename_conflict(self) -> None:
         blob = make_object(Blob, data=b"a\nb\nc\nd\n")
         parent1 = self.commit_tree([(b"a", blob)])
         parent2 = self.commit_tree([])
@@ -569,7 +569,7 @@ class TreeChangesTest(DiffTestCase):
             rename_detector=self.detector,
         )
 
-    def test_tree_changes_for_merge_add_content_rename_conflict(self):
+    def test_tree_changes_for_merge_add_content_rename_conflict(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
         parent1 = self.commit_tree([(b"a", blob1)])
@@ -587,7 +587,7 @@ class TreeChangesTest(DiffTestCase):
             rename_detector=self.detector,
         )
 
-    def test_tree_changes_for_merge_modify_rename_conflict(self):
+    def test_tree_changes_for_merge_modify_rename_conflict(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
         parent1 = self.commit_tree([(b"a", blob1)])
@@ -607,7 +607,7 @@ class TreeChangesTest(DiffTestCase):
 
 
 class RenameDetectionTest(DiffTestCase):
-    def _do_test_count_blocks(self, count_blocks):
+    def _do_test_count_blocks(self, count_blocks) -> None:
         blob = make_object(Blob, data=b"a\nb\na\n")
         self.assertBlockCountEqual({b"a\n": 4, b"b\n": 2}, count_blocks(blob))
 
@@ -616,7 +616,7 @@ class RenameDetectionTest(DiffTestCase):
         _do_test_count_blocks, _count_blocks
     )
 
-    def _do_test_count_blocks_no_newline(self, count_blocks):
+    def _do_test_count_blocks_no_newline(self, count_blocks) -> None:
         blob = make_object(Blob, data=b"a\na")
         self.assertBlockCountEqual({b"a\n": 2, b"a": 1}, _count_blocks(blob))
 
@@ -627,13 +627,13 @@ class RenameDetectionTest(DiffTestCase):
         _do_test_count_blocks_no_newline, _count_blocks
     )
 
-    def assertBlockCountEqual(self, expected, got):
+    def assertBlockCountEqual(self, expected, got) -> None:
         self.assertEqual(
             {(hash(block) & 0xFFFFFFFF): count for (block, count) in expected.items()},
             {(block & 0xFFFFFFFF): count for (block, count) in got.items()},
         )
 
-    def _do_test_count_blocks_chunks(self, count_blocks):
+    def _do_test_count_blocks_chunks(self, count_blocks) -> None:
         blob = ShaFile.from_raw_chunks(Blob.type_num, [b"a\nb", b"\na\n"])
         self.assertBlockCountEqual({b"a\n": 4, b"b\n": 2}, _count_blocks(blob))
 
@@ -644,7 +644,7 @@ class RenameDetectionTest(DiffTestCase):
         _do_test_count_blocks_chunks, _count_blocks
     )
 
-    def _do_test_count_blocks_long_lines(self, count_blocks):
+    def _do_test_count_blocks_long_lines(self, count_blocks) -> None:
         a = b"a" * 64
         data = a + b"xxx\ny\n" + a + b"zzz\n"
         blob = make_object(Blob, data=data)
@@ -660,11 +660,11 @@ class RenameDetectionTest(DiffTestCase):
         _do_test_count_blocks_long_lines, _count_blocks
     )
 
-    def assertSimilar(self, expected_score, blob1, blob2):
+    def assertSimilar(self, expected_score, blob1, blob2) -> None:
         self.assertEqual(expected_score, _similarity_score(blob1, blob2))
         self.assertEqual(expected_score, _similarity_score(blob2, blob1))
 
-    def test_similarity_score(self):
+    def test_similarity_score(self) -> None:
         blob0 = make_object(Blob, data=b"")
         blob1 = make_object(Blob, data=b"ab\ncd\ncd\n")
         blob2 = make_object(Blob, data=b"ab\n")
@@ -679,7 +679,7 @@ class RenameDetectionTest(DiffTestCase):
         self.assertSimilar(0, blob2, blob3)
         self.assertSimilar(50, blob3, blob4)
 
-    def test_similarity_score_cache(self):
+    def test_similarity_score_cache(self) -> None:
         blob1 = make_object(Blob, data=b"ab\ncd\n")
         blob2 = make_object(Blob, data=b"ab\n")
 
@@ -687,7 +687,7 @@ class RenameDetectionTest(DiffTestCase):
         self.assertEqual(50, _similarity_score(blob1, blob2, block_cache=block_cache))
         self.assertEqual({blob1.id, blob2.id}, set(block_cache))
 
-        def fail_chunks():
+        def fail_chunks() -> None:
             self.fail("Unexpected call to as_raw_chunks()")
 
         blob1.as_raw_chunks = blob2.as_raw_chunks = fail_chunks
@@ -695,7 +695,7 @@ class RenameDetectionTest(DiffTestCase):
         blob2.raw_length = lambda: 3
         self.assertEqual(50, _similarity_score(blob1, blob2, block_cache=block_cache))
 
-    def test_tree_entry_sort(self):
+    def test_tree_entry_sort(self) -> None:
         sha = "abcd" * 10
         expected_entries = [
             TreeChange.add(TreeEntry(b"aaa", F, sha)),
@@ -726,7 +726,7 @@ class RenameDetectionTest(DiffTestCase):
             tree1.id, tree2.id, want_unchanged=want_unchanged
         )
 
-    def test_no_renames(self):
+    def test_no_renames(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob2 = make_object(Blob, data=b"a\nb\ne\nf\n")
         blob3 = make_object(Blob, data=b"a\nb\ng\nh\n")
@@ -737,7 +737,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_exact_rename_one_to_one(self):
+    def test_exact_rename_one_to_one(self) -> None:
         blob1 = make_object(Blob, data=b"1")
         blob2 = make_object(Blob, data=b"2")
         tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
@@ -750,7 +750,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_exact_rename_split_different_type(self):
+    def test_exact_rename_split_different_type(self) -> None:
         blob = make_object(Blob, data=b"/foo")
         tree1 = self.commit_tree([(b"a", blob, 0o100644)])
         tree2 = self.commit_tree([(b"a", blob, 0o120000)])
@@ -762,7 +762,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_exact_rename_and_different_type(self):
+    def test_exact_rename_and_different_type(self) -> None:
         blob1 = make_object(Blob, data=b"1")
         blob2 = make_object(Blob, data=b"2")
         tree1 = self.commit_tree([(b"a", blob1)])
@@ -775,7 +775,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_exact_rename_one_to_many(self):
+    def test_exact_rename_one_to_many(self) -> None:
         blob = make_object(Blob, data=b"1")
         tree1 = self.commit_tree([(b"a", blob)])
         tree2 = self.commit_tree([(b"b", blob), (b"c", blob)])
@@ -787,7 +787,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_exact_rename_many_to_one(self):
+    def test_exact_rename_many_to_one(self) -> None:
         blob = make_object(Blob, data=b"1")
         tree1 = self.commit_tree([(b"a", blob), (b"b", blob)])
         tree2 = self.commit_tree([(b"c", blob)])
@@ -799,7 +799,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_exact_rename_many_to_many(self):
+    def test_exact_rename_many_to_many(self) -> None:
         blob = make_object(Blob, data=b"1")
         tree1 = self.commit_tree([(b"a", blob), (b"b", blob)])
         tree2 = self.commit_tree([(b"c", blob), (b"d", blob), (b"e", blob)])
@@ -812,7 +812,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_exact_copy_modify(self):
+    def test_exact_copy_modify(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
         tree1 = self.commit_tree([(b"a", blob1)])
@@ -825,7 +825,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_exact_copy_change_mode(self):
+    def test_exact_copy_change_mode(self) -> None:
         blob = make_object(Blob, data=b"a\nb\nc\nd\n")
         tree1 = self.commit_tree([(b"a", blob)])
         tree2 = self.commit_tree([(b"a", blob, 0o100755), (b"b", blob)])
@@ -841,7 +841,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_rename_threshold(self):
+    def test_rename_threshold(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\n")
         blob2 = make_object(Blob, data=b"a\nb\nd\n")
         tree1 = self.commit_tree([(b"a", blob1)])
@@ -858,7 +858,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2, rename_threshold=75),
         )
 
-    def test_content_rename_max_files(self):
+    def test_content_rename_max_files(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\nd")
         blob4 = make_object(Blob, data=b"a\nb\nc\ne\n")
         blob2 = make_object(Blob, data=b"e\nf\ng\nh\n")
@@ -882,7 +882,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2, max_files=1),
         )
 
-    def test_content_rename_one_to_one(self):
+    def test_content_rename_one_to_one(self) -> None:
         b11 = make_object(Blob, data=b"a\nb\nc\nd\n")
         b12 = make_object(Blob, data=b"a\nb\nc\ne\n")
         b21 = make_object(Blob, data=b"e\nf\ng\n\nh")
@@ -897,7 +897,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_content_rename_one_to_one_ordering(self):
+    def test_content_rename_one_to_one_ordering(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\nd\ne\nf\n")
         blob2 = make_object(Blob, data=b"a\nb\nc\nd\ng\nh\n")
         # 6/10 match to blob1, 8/10 match to blob2
@@ -922,7 +922,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree3, tree4),
         )
 
-    def test_content_rename_one_to_many(self):
+    def test_content_rename_one_to_many(self) -> None:
         blob1 = make_object(Blob, data=b"aa\nb\nc\nd\ne\n")
         blob2 = make_object(Blob, data=b"ab\nb\nc\nd\ne\n")  # 8/11 match
         blob3 = make_object(Blob, data=b"aa\nb\nc\nd\nf\n")  # 9/11 match
@@ -936,7 +936,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_content_rename_many_to_one(self):
+    def test_content_rename_many_to_one(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
         blob3 = make_object(Blob, data=b"a\nb\nc\nf\n")
@@ -950,7 +950,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_content_rename_many_to_many(self):
+    def test_content_rename_many_to_many(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
         blob3 = make_object(Blob, data=b"a\nb\nc\nf\n")
@@ -968,7 +968,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_content_rename_with_more_deletions(self):
+    def test_content_rename_with_more_deletions(self) -> None:
         blob1 = make_object(Blob, data=b"")
         tree1 = self.commit_tree(
             [(b"a", blob1), (b"b", blob1), (b"c", blob1), (b"d", blob1)]
@@ -985,7 +985,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_content_rename_gitlink(self):
+    def test_content_rename_gitlink(self) -> None:
         blob1 = make_object(Blob, data=b"blob1")
         blob2 = make_object(Blob, data=b"blob2")
         link1 = b"1" * 40
@@ -1002,7 +1002,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2),
         )
 
-    def test_exact_rename_swap(self):
+    def test_exact_rename_swap(self) -> None:
         blob1 = make_object(Blob, data=b"1")
         blob2 = make_object(Blob, data=b"2")
         tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)])
@@ -1022,7 +1022,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2, rewrite_threshold=50),
         )
 
-    def test_content_rename_swap(self):
+    def test_content_rename_swap(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob2 = make_object(Blob, data=b"e\nf\ng\nh\n")
         blob3 = make_object(Blob, data=b"a\nb\nc\ne\n")
@@ -1037,7 +1037,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2, rewrite_threshold=60),
         )
 
-    def test_rewrite_threshold(self):
+    def test_rewrite_threshold(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
         blob3 = make_object(Blob, data=b"a\nb\nf\ng\n")
@@ -1061,7 +1061,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2, rewrite_threshold=80),
         )
 
-    def test_find_copies_harder_exact(self):
+    def test_find_copies_harder_exact(self) -> None:
         blob = make_object(Blob, data=b"blob")
         tree1 = self.commit_tree([(b"a", blob)])
         tree2 = self.commit_tree([(b"a", blob), (b"b", blob)])
@@ -1074,7 +1074,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2, find_copies_harder=True),
         )
 
-    def test_find_copies_harder_content(self):
+    def test_find_copies_harder_content(self) -> None:
         blob1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob2 = make_object(Blob, data=b"a\nb\nc\ne\n")
         tree1 = self.commit_tree([(b"a", blob1)])
@@ -1088,7 +1088,7 @@ class RenameDetectionTest(DiffTestCase):
             self.detect_renames(tree1, tree2, find_copies_harder=True),
         )
 
-    def test_find_copies_harder_with_rewrites(self):
+    def test_find_copies_harder_with_rewrites(self) -> None:
         blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob_a2 = make_object(Blob, data=b"f\ng\nh\ni\n")
         blob_b2 = make_object(Blob, data=b"a\nb\nc\ne\n")
@@ -1111,7 +1111,7 @@ class RenameDetectionTest(DiffTestCase):
             ),
         )
 
-    def test_reuse_detector(self):
+    def test_reuse_detector(self) -> None:
         blob = make_object(Blob, data=b"blob")
         tree1 = self.commit_tree([(b"a", blob)])
         tree2 = self.commit_tree([(b"b", blob)])
@@ -1120,7 +1120,7 @@ class RenameDetectionTest(DiffTestCase):
         self.assertEqual(changes, detector.changes_with_renames(tree1.id, tree2.id))
         self.assertEqual(changes, detector.changes_with_renames(tree1.id, tree2.id))
 
-    def test_want_unchanged(self):
+    def test_want_unchanged(self) -> None:
         blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n")
         blob_b = make_object(Blob, data=b"b")
         blob_c2 = make_object(Blob, data=b"a\nb\nc\ne\n")

+ 15 - 15
tests/test_fastexport.py

@@ -32,7 +32,7 @@ from . import SkipTest, TestCase
 class GitFastExporterTests(TestCase):
     """Tests for the GitFastExporter tests."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.store = MemoryObjectStore()
         self.stream = BytesIO()
@@ -42,13 +42,13 @@ class GitFastExporterTests(TestCase):
             raise SkipTest("python-fastimport not available") from exc
         self.fastexporter = GitFastExporter(self.stream, self.store)
 
-    def test_emit_blob(self):
+    def test_emit_blob(self) -> None:
         b = Blob()
         b.data = b"fooBAR"
         self.fastexporter.emit_blob(b)
         self.assertEqual(b"blob\nmark :1\ndata 6\nfooBAR\n", self.stream.getvalue())
 
-    def test_emit_commit(self):
+    def test_emit_commit(self) -> None:
         b = Blob()
         b.data = b"FOO"
         t = Tree()
@@ -81,7 +81,7 @@ M 644 :1 foo
 class GitImportProcessorTests(TestCase):
     """Tests for the GitImportProcessor tests."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.repo = MemoryRepo()
         try:
@@ -90,7 +90,7 @@ class GitImportProcessorTests(TestCase):
             raise SkipTest("python-fastimport not available") from exc
         self.processor = GitImportProcessor(self.repo)
 
-    def test_reset_handler(self):
+    def test_reset_handler(self) -> None:
         from fastimport import commands
 
         [c1] = build_commit_graph(self.repo.object_store, [[1]])
@@ -99,7 +99,7 @@ class GitImportProcessorTests(TestCase):
         self.assertEqual(c1.id, self.repo.get_refs()[b"refs/heads/foo"])
         self.assertEqual(c1.id, self.processor.last_commit)
 
-    def test_reset_handler_marker(self):
+    def test_reset_handler_marker(self) -> None:
         from fastimport import commands
 
         [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
@@ -108,7 +108,7 @@ class GitImportProcessorTests(TestCase):
         self.processor.reset_handler(cmd)
         self.assertEqual(c1.id, self.repo.get_refs()[b"refs/heads/foo"])
 
-    def test_reset_handler_default(self):
+    def test_reset_handler_default(self) -> None:
         from fastimport import commands
 
         [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]])
@@ -116,7 +116,7 @@ class GitImportProcessorTests(TestCase):
         self.processor.reset_handler(cmd)
         self.assertEqual(ZERO_SHA, self.repo.get_refs()[b"refs/heads/foo"])
 
-    def test_commit_handler(self):
+    def test_commit_handler(self) -> None:
         from fastimport import commands
 
         cmd = commands.CommitCommand(
@@ -141,7 +141,7 @@ class GitImportProcessorTests(TestCase):
         self.assertEqual(3600, commit.author_timezone)
         self.assertEqual(commit, self.repo[b"refs/heads/foo"])
 
-    def test_commit_handler_markers(self):
+    def test_commit_handler_markers(self) -> None:
         from fastimport import commands
 
         [c1, c2, c3] = build_commit_graph(self.repo.object_store, [[1], [2], [3]])
@@ -164,7 +164,7 @@ class GitImportProcessorTests(TestCase):
         self.assertEqual(c2.id, commit.parents[1])
         self.assertEqual(c3.id, commit.parents[2])
 
-    def test_import_stream(self):
+    def test_import_stream(self) -> None:
         markers = self.processor.import_stream(
             BytesIO(
                 b"""blob
@@ -186,7 +186,7 @@ M 100644 :1 a
         self.assertIsInstance(self.repo[markers[b"1"]], Blob)
         self.assertIsInstance(self.repo[markers[b"2"]], Commit)
 
-    def test_file_add(self):
+    def test_file_add(self) -> None:
         from fastimport import commands
 
         cmd = commands.BlobCommand(b"23", b"data")
@@ -249,7 +249,7 @@ M 100644 :1 a
         self.processor.commit_handler(cmd)
         return self.repo[self.processor.last_commit]
 
-    def test_file_copy(self):
+    def test_file_copy(self) -> None:
         from fastimport import commands
 
         self.simple_commit()
@@ -270,7 +270,7 @@ M 100644 :1 a
             self.repo[commit.tree].items(),
         )
 
-    def test_file_move(self):
+    def test_file_move(self) -> None:
         from fastimport import commands
 
         self.simple_commit()
@@ -288,14 +288,14 @@ M 100644 :1 a
             self.repo[commit.tree].items(),
         )
 
-    def test_file_delete(self):
+    def test_file_delete(self) -> None:
         from fastimport import commands
 
         self.simple_commit()
         commit = self.make_file_commit([commands.FileDeleteCommand(b"path")])
         self.assertEqual([], self.repo[commit.tree].items())
 
-    def test_file_deleteall(self):
+    def test_file_deleteall(self) -> None:
         from fastimport import commands
 
         self.simple_commit()

+ 16 - 16
tests/test_file.py

@@ -30,26 +30,26 @@ from . import SkipTest, TestCase
 
 
 class FancyRenameTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._tempdir = tempfile.mkdtemp()
         self.foo = self.path("foo")
         self.bar = self.path("bar")
         self.create(self.foo, b"foo contents")
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         shutil.rmtree(self._tempdir)
         super().tearDown()
 
     def path(self, filename):
         return os.path.join(self._tempdir, filename)
 
-    def create(self, path, contents):
+    def create(self, path, contents) -> None:
         f = open(path, "wb")
         f.write(contents)
         f.close()
 
-    def test_no_dest_exists(self):
+    def test_no_dest_exists(self) -> None:
         self.assertFalse(os.path.exists(self.bar))
         _fancy_rename(self.foo, self.bar)
         self.assertFalse(os.path.exists(self.foo))
@@ -58,7 +58,7 @@ class FancyRenameTests(TestCase):
         self.assertEqual(b"foo contents", new_f.read())
         new_f.close()
 
-    def test_dest_exists(self):
+    def test_dest_exists(self) -> None:
         self.create(self.bar, b"bar contents")
         _fancy_rename(self.foo, self.bar)
         self.assertFalse(os.path.exists(self.foo))
@@ -67,7 +67,7 @@ class FancyRenameTests(TestCase):
         self.assertEqual(b"foo contents", new_f.read())
         new_f.close()
 
-    def test_dest_opened(self):
+    def test_dest_opened(self) -> None:
         if sys.platform != "win32":
             raise SkipTest("platform allows overwriting open files")
         self.create(self.bar, b"bar contents")
@@ -86,21 +86,21 @@ class FancyRenameTests(TestCase):
 
 
 class GitFileTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._tempdir = tempfile.mkdtemp()
         f = open(self.path("foo"), "wb")
         f.write(b"foo contents")
         f.close()
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         shutil.rmtree(self._tempdir)
         super().tearDown()
 
     def path(self, filename):
         return os.path.join(self._tempdir, filename)
 
-    def test_invalid(self):
+    def test_invalid(self) -> None:
         foo = self.path("foo")
         self.assertRaises(IOError, GitFile, foo, mode="r")
         self.assertRaises(IOError, GitFile, foo, mode="ab")
@@ -108,7 +108,7 @@ class GitFileTests(TestCase):
         self.assertRaises(IOError, GitFile, foo, mode="w+b")
         self.assertRaises(IOError, GitFile, foo, mode="a+bU")
 
-    def test_readonly(self):
+    def test_readonly(self) -> None:
         f = GitFile(self.path("foo"), "rb")
         self.assertIsInstance(f, io.IOBase)
         self.assertEqual(b"foo contents", f.read())
@@ -117,12 +117,12 @@ class GitFileTests(TestCase):
         self.assertEqual(b"contents", f.read())
         f.close()
 
-    def test_default_mode(self):
+    def test_default_mode(self) -> None:
         f = GitFile(self.path("foo"))
         self.assertEqual(b"foo contents", f.read())
         f.close()
 
-    def test_write(self):
+    def test_write(self) -> None:
         foo = self.path("foo")
         foo_lock = f"{foo}.lock"
 
@@ -146,7 +146,7 @@ class GitFileTests(TestCase):
         self.assertEqual(b"new contents", new_f.read())
         new_f.close()
 
-    def test_open_twice(self):
+    def test_open_twice(self) -> None:
         foo = self.path("foo")
         f1 = GitFile(foo, "wb")
         f1.write(b"new")
@@ -165,7 +165,7 @@ class GitFileTests(TestCase):
         self.assertEqual(b"new contents", f.read())
         f.close()
 
-    def test_abort(self):
+    def test_abort(self) -> None:
         foo = self.path("foo")
         foo_lock = f"{foo}.lock"
 
@@ -183,7 +183,7 @@ class GitFileTests(TestCase):
         self.assertEqual(new_orig_f.read(), b"foo contents")
         new_orig_f.close()
 
-    def test_abort_close(self):
+    def test_abort_close(self) -> None:
         foo = self.path("foo")
         f = GitFile(foo, "wb")
         f.abort()
@@ -199,7 +199,7 @@ class GitFileTests(TestCase):
         except OSError:
             self.fail()
 
-    def test_abort_close_removed(self):
+    def test_abort_close_removed(self) -> None:
         foo = self.path("foo")
         f = GitFile(foo, "wb")
 

+ 20 - 20
tests/test_grafts.py

@@ -35,22 +35,22 @@ def makesha(digit):
 
 
 class GraftParserTests(TestCase):
-    def assertParse(self, expected, graftpoints):
+    def assertParse(self, expected, graftpoints) -> None:
         self.assertEqual(expected, parse_graftpoints(iter(graftpoints)))
 
-    def test_no_grafts(self):
+    def test_no_grafts(self) -> None:
         self.assertParse({}, [])
 
-    def test_no_parents(self):
+    def test_no_parents(self) -> None:
         self.assertParse({makesha(0): []}, [makesha(0)])
 
-    def test_parents(self):
+    def test_parents(self) -> None:
         self.assertParse(
             {makesha(0): [makesha(1), makesha(2)]},
             [b" ".join([makesha(0), makesha(1), makesha(2)])],
         )
 
-    def test_multiple_hybrid(self):
+    def test_multiple_hybrid(self) -> None:
         self.assertParse(
             {
                 makesha(0): [],
@@ -66,22 +66,22 @@ class GraftParserTests(TestCase):
 
 
 class GraftSerializerTests(TestCase):
-    def assertSerialize(self, expected, graftpoints):
+    def assertSerialize(self, expected, graftpoints) -> None:
         self.assertEqual(sorted(expected), sorted(serialize_graftpoints(graftpoints)))
 
-    def test_no_grafts(self):
+    def test_no_grafts(self) -> None:
         self.assertSerialize(b"", {})
 
-    def test_no_parents(self):
+    def test_no_parents(self) -> None:
         self.assertSerialize(makesha(0), {makesha(0): []})
 
-    def test_parents(self):
+    def test_parents(self) -> None:
         self.assertSerialize(
             b" ".join([makesha(0), makesha(1), makesha(2)]),
             {makesha(0): [makesha(1), makesha(2)]},
         )
 
-    def test_multiple_hybrid(self):
+    def test_multiple_hybrid(self) -> None:
         self.assertSerialize(
             b"\n".join(
                 [
@@ -99,7 +99,7 @@ class GraftSerializerTests(TestCase):
 
 
 class GraftsInRepositoryBase:
-    def tearDown(self):
+    def tearDown(self) -> None:
         super().tearDown()
 
     def get_repo_with_grafts(self, grafts):
@@ -107,18 +107,18 @@ class GraftsInRepositoryBase:
         r._add_graftpoints(grafts)
         return r
 
-    def test_no_grafts(self):
+    def test_no_grafts(self) -> None:
         r = self.get_repo_with_grafts({})
 
         shas = [e.commit.id for e in r.get_walker()]
         self.assertEqual(shas, self._shas[::-1])
 
-    def test_no_parents_graft(self):
+    def test_no_parents_graft(self) -> None:
         r = self.get_repo_with_grafts({self._repo.head(): []})
 
         self.assertEqual([e.commit.id for e in r.get_walker()], [r.head()])
 
-    def test_existing_parent_graft(self):
+    def test_existing_parent_graft(self) -> None:
         r = self.get_repo_with_grafts({self._shas[-1]: [self._shas[0]]})
 
         self.assertEqual(
@@ -126,13 +126,13 @@ class GraftsInRepositoryBase:
             [self._shas[-1], self._shas[0]],
         )
 
-    def test_remove_graft(self):
+    def test_remove_graft(self) -> None:
         r = self.get_repo_with_grafts({self._repo.head(): []})
         r._remove_graftpoints([self._repo.head()])
 
         self.assertEqual([e.commit.id for e in r.get_walker()], self._shas[::-1])
 
-    def test_object_store_fail_invalid_parents(self):
+    def test_object_store_fail_invalid_parents(self) -> None:
         r = self._repo
 
         self.assertRaises(
@@ -141,7 +141,7 @@ class GraftsInRepositoryBase:
 
 
 class GraftsInRepoTests(GraftsInRepositoryBase, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._repo_dir = os.path.join(tempfile.mkdtemp())
         r = self._repo = Repo.init(self._repo_dir)
@@ -162,14 +162,14 @@ class GraftsInRepoTests(GraftsInRepositoryBase, TestCase):
         self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
         self._shas.append(r.do_commit(b"empty commit", **commit_kwargs))
 
-    def test_init_with_empty_info_grafts(self):
+    def test_init_with_empty_info_grafts(self) -> None:
         r = self._repo
         r._put_named_file(os.path.join("info", "grafts"), b"")
 
         r = Repo(self._repo_dir)
         self.assertEqual({}, r._graftpoints)
 
-    def test_init_with_info_grafts(self):
+    def test_init_with_info_grafts(self) -> None:
         r = self._repo
         r._put_named_file(
             os.path.join("info", "grafts"),
@@ -181,7 +181,7 @@ class GraftsInRepoTests(GraftsInRepositoryBase, TestCase):
 
 
 class GraftsInMemoryRepoTests(GraftsInRepositoryBase, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         r = self._repo = MemoryRepo()
 

+ 12 - 12
tests/test_graph.py

@@ -32,7 +32,7 @@ class FindMergeBaseTests(TestCase):
         def lookup_parents(commit_id):
             return dag[commit_id]
 
-        def lookup_stamp(commit_id):
+        def lookup_stamp(commit_id) -> int:
             # any constant timestamp value here will work to force
             # this test to test the same behaviour as done previously
             return 100
@@ -41,7 +41,7 @@ class FindMergeBaseTests(TestCase):
         c2s = inputs[1:]
         return set(_find_lcas(lookup_parents, c1, c2s, lookup_stamp))
 
-    def test_multiple_lca(self):
+    def test_multiple_lca(self) -> None:
         # two lowest common ancestors
         graph = {
             "5": ["1", "2"],
@@ -53,7 +53,7 @@ class FindMergeBaseTests(TestCase):
         }
         self.assertEqual(self.run_test(graph, ["4", "5"]), {"1", "2"})
 
-    def test_no_common_ancestor(self):
+    def test_no_common_ancestor(self) -> None:
         # no common ancestor
         graph = {
             "4": ["2"],
@@ -64,7 +64,7 @@ class FindMergeBaseTests(TestCase):
         }
         self.assertEqual(self.run_test(graph, ["4", "3"]), set())
 
-    def test_ancestor(self):
+    def test_ancestor(self) -> None:
         # ancestor
         graph = {
             "G": ["D", "F"],
@@ -77,7 +77,7 @@ class FindMergeBaseTests(TestCase):
         }
         self.assertEqual(self.run_test(graph, ["D", "C"]), {"C"})
 
-    def test_direct_parent(self):
+    def test_direct_parent(self) -> None:
         # parent
         graph = {
             "G": ["D", "F"],
@@ -90,7 +90,7 @@ class FindMergeBaseTests(TestCase):
         }
         self.assertEqual(self.run_test(graph, ["G", "D"]), {"D"})
 
-    def test_another_crossover(self):
+    def test_another_crossover(self) -> None:
         # Another cross over
         graph = {
             "G": ["D", "F"],
@@ -103,7 +103,7 @@ class FindMergeBaseTests(TestCase):
         }
         self.assertEqual(self.run_test(graph, ["D", "F"]), {"E", "C"})
 
-    def test_three_way_merge_lca(self):
+    def test_three_way_merge_lca(self) -> None:
         # three way merge commit straight from git docs
         graph = {
             "C": ["C1"],
@@ -126,7 +126,7 @@ class FindMergeBaseTests(TestCase):
         # which actually means find the first LCA from either of B OR C with A
         self.assertEqual(self.run_test(graph, ["A", "B", "C"]), {"1"})
 
-    def test_octopus(self):
+    def test_octopus(self) -> None:
         # octopus algorithm test
         # test straight from git docs of A, B, and C
         # but this time use octopus to find lcas of A, B, and C simultaneously
@@ -151,7 +151,7 @@ class FindMergeBaseTests(TestCase):
         def lookup_parents(cid):
             return graph[cid]
 
-        def lookup_stamp(commit_id):
+        def lookup_stamp(commit_id) -> int:
             # any constant timestamp value here will work to force
             # this test to test the same behaviour as done previously
             return 100
@@ -168,7 +168,7 @@ class FindMergeBaseTests(TestCase):
 
 
 class CanFastForwardTests(TestCase):
-    def test_ff(self):
+    def test_ff(self) -> None:
         r = MemoryRepo()
         base = make_commit()
         c1 = make_commit(parents=[base.id])
@@ -179,7 +179,7 @@ class CanFastForwardTests(TestCase):
         self.assertTrue(can_fast_forward(r, c1.id, c2.id))
         self.assertFalse(can_fast_forward(r, c2.id, c1.id))
 
-    def test_diverged(self):
+    def test_diverged(self) -> None:
         r = MemoryRepo()
         base = make_commit()
         c1 = make_commit(parents=[base.id])
@@ -193,7 +193,7 @@ class CanFastForwardTests(TestCase):
 
 
 class WorkListTest(TestCase):
-    def test_WorkList(self):
+    def test_WorkList(self) -> None:
         # tuples of (timestamp, value) are stored in a Priority MaxQueue
         # repeated use of get should return them in maxheap timestamp
         # order: largest time value (most recent in time) first then earlier/older

+ 2 - 2
tests/test_greenthreads.py

@@ -66,13 +66,13 @@ def init_store(store, count=1):
 
 @skipIf(not gevent_support, skipmsg)
 class TestGreenThreadsMissingObjectFinder(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.store = MemoryObjectStore()
         self.cmt_amount = 10
         self.objs = init_store(self.store, self.cmt_amount)
 
-    def test_finder(self):
+    def test_finder(self) -> None:
         wants = [sha.id for sha in self.objs if isinstance(sha, Commit)]
         finder = GreenThreadsMissingObjectFinder(self.store, (), wants)
         self.assertEqual(len(finder.sha_done), 0)

+ 4 - 4
tests/test_hooks.py

@@ -32,13 +32,13 @@ from . import TestCase
 
 
 class ShellHookTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         if os.name != "posix":
             self.skipTest("shell hook tests requires POSIX shell")
         self.assertTrue(os.path.exists("/bin/sh"))
 
-    def test_hook_pre_commit(self):
+    def test_hook_pre_commit(self) -> None:
         repo_dir = os.path.join(tempfile.mkdtemp())
         os.mkdir(os.path.join(repo_dir, "hooks"))
         self.addCleanup(shutil.rmtree, repo_dir)
@@ -89,7 +89,7 @@ exit 0
 
         hook.execute()
 
-    def test_hook_commit_msg(self):
+    def test_hook_commit_msg(self) -> None:
         repo_dir = os.path.join(tempfile.mkdtemp())
         os.mkdir(os.path.join(repo_dir, "hooks"))
         self.addCleanup(shutil.rmtree, repo_dir)
@@ -133,7 +133,7 @@ if [ "$(pwd)" = '"""
 
         hook.execute(b"empty commit")
 
-    def test_hook_post_commit(self):
+    def test_hook_post_commit(self) -> None:
         (fd, path) = tempfile.mkstemp()
         os.close(fd)
 

+ 15 - 15
tests/test_ignore.py

@@ -89,7 +89,7 @@ TRANSLATE_TESTS = [
 
 
 class TranslateTests(TestCase):
-    def test_translate(self):
+    def test_translate(self) -> None:
         for pattern, regex in TRANSLATE_TESTS:
             if re.escape(b"/") == b"/":
                 # Slash is no longer escaped in Python3.7, so undo the escaping
@@ -103,7 +103,7 @@ class TranslateTests(TestCase):
 
 
 class ReadIgnorePatterns(TestCase):
-    def test_read_file(self):
+    def test_read_file(self) -> None:
         f = BytesIO(
             b"""
 # a comment
@@ -128,14 +128,14 @@ with escaped trailing whitespace\\
 
 
 class MatchPatternTests(TestCase):
-    def test_matches(self):
+    def test_matches(self) -> None:
         for path, pattern in POSITIVE_MATCH_TESTS:
             self.assertTrue(
                 match_pattern(path, pattern),
                 f"path: {path!r}, pattern: {pattern!r}",
             )
 
-    def test_no_matches(self):
+    def test_no_matches(self) -> None:
         for path, pattern in NEGATIVE_MATCH_TESTS:
             self.assertFalse(
                 match_pattern(path, pattern),
@@ -144,14 +144,14 @@ class MatchPatternTests(TestCase):
 
 
 class IgnoreFilterTests(TestCase):
-    def test_included(self):
+    def test_included(self) -> None:
         filter = IgnoreFilter([b"a.c", b"b.c"])
         self.assertTrue(filter.is_ignored(b"a.c"))
         self.assertIs(None, filter.is_ignored(b"c.c"))
         self.assertEqual([Pattern(b"a.c")], list(filter.find_matching(b"a.c")))
         self.assertEqual([], list(filter.find_matching(b"c.c")))
 
-    def test_included_ignorecase(self):
+    def test_included_ignorecase(self) -> None:
         filter = IgnoreFilter([b"a.c", b"b.c"], ignorecase=False)
         self.assertTrue(filter.is_ignored(b"a.c"))
         self.assertFalse(filter.is_ignored(b"A.c"))
@@ -160,14 +160,14 @@ class IgnoreFilterTests(TestCase):
         self.assertTrue(filter.is_ignored(b"A.c"))
         self.assertTrue(filter.is_ignored(b"A.C"))
 
-    def test_excluded(self):
+    def test_excluded(self) -> None:
         filter = IgnoreFilter([b"a.c", b"b.c", b"!c.c"])
         self.assertFalse(filter.is_ignored(b"c.c"))
         self.assertIs(None, filter.is_ignored(b"d.c"))
         self.assertEqual([Pattern(b"!c.c")], list(filter.find_matching(b"c.c")))
         self.assertEqual([], list(filter.find_matching(b"d.c")))
 
-    def test_include_exclude_include(self):
+    def test_include_exclude_include(self) -> None:
         filter = IgnoreFilter([b"a.c", b"!a.c", b"a.c"])
         self.assertTrue(filter.is_ignored(b"a.c"))
         self.assertEqual(
@@ -175,7 +175,7 @@ class IgnoreFilterTests(TestCase):
             list(filter.find_matching(b"a.c")),
         )
 
-    def test_manpage(self):
+    def test_manpage(self) -> None:
         # A specific example from the gitignore manpage
         filter = IgnoreFilter([b"/*", b"!/foo", b"/foo/*", b"!/foo/bar"])
         self.assertTrue(filter.is_ignored(b"a.c"))
@@ -185,7 +185,7 @@ class IgnoreFilterTests(TestCase):
         self.assertFalse(filter.is_ignored(b"foo/bar/"))
         self.assertFalse(filter.is_ignored(b"foo/bar/bloe"))
 
-    def test_regex_special(self):
+    def test_regex_special(self) -> None:
         # See https://github.com/dulwich/dulwich/issues/930#issuecomment-1026166429
         filter = IgnoreFilter([b"/foo\\[bar\\]", b"/foo"])
         self.assertTrue(filter.is_ignored("foo"))
@@ -193,7 +193,7 @@ class IgnoreFilterTests(TestCase):
 
 
 class IgnoreFilterStackTests(TestCase):
-    def test_stack_first(self):
+    def test_stack_first(self) -> None:
         filter1 = IgnoreFilter([b"[a].c", b"[b].c", b"![d].c"])
         filter2 = IgnoreFilter([b"[a].c", b"![b],c", b"[c].c", b"[d].c"])
         stack = IgnoreFilterStack([filter1, filter2])
@@ -205,7 +205,7 @@ class IgnoreFilterStackTests(TestCase):
 
 
 class IgnoreFilterManagerTests(TestCase):
-    def test_load_ignore(self):
+    def test_load_ignore(self) -> None:
         tmp_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
         repo = Repo.init(tmp_dir)
@@ -232,7 +232,7 @@ class IgnoreFilterManagerTests(TestCase):
         self.assertTrue(m.is_ignored("dir3/"))
         self.assertTrue(m.is_ignored("dir3/bla"))
 
-    def test_nested_gitignores(self):
+    def test_nested_gitignores(self) -> None:
         tmp_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
         repo = Repo.init(tmp_dir)
@@ -251,7 +251,7 @@ class IgnoreFilterManagerTests(TestCase):
         m = IgnoreFilterManager.from_repo(repo)
         self.assertTrue(m.is_ignored("foo/bar"))
 
-    def test_load_ignore_ignorecase(self):
+    def test_load_ignore_ignorecase(self) -> None:
         tmp_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
         repo = Repo.init(tmp_dir)
@@ -265,7 +265,7 @@ class IgnoreFilterManagerTests(TestCase):
         self.assertTrue(m.is_ignored(os.path.join("dir", "blie")))
         self.assertTrue(m.is_ignored(os.path.join("DIR", "blie")))
 
-    def test_ignored_contents(self):
+    def test_ignored_contents(self) -> None:
         tmp_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
         repo = Repo.init(tmp_dir)

+ 47 - 47
tests/test_index.py

@@ -54,7 +54,7 @@ from dulwich.repo import Repo
 from . import TestCase, skipIf
 
 
-def can_symlink():
+def can_symlink() -> bool:
     """Return whether running process can create symlinks."""
     if sys.platform != "win32":
         # Platforms other than Windows should allow symlinks without issues.
@@ -77,19 +77,19 @@ class IndexTestCase(TestCase):
 
 
 class SimpleIndexTestCase(IndexTestCase):
-    def test_len(self):
+    def test_len(self) -> None:
         self.assertEqual(1, len(self.get_simple_index("index")))
 
-    def test_iter(self):
+    def test_iter(self) -> None:
         self.assertEqual([b"bla"], list(self.get_simple_index("index")))
 
-    def test_iterobjects(self):
+    def test_iterobjects(self) -> None:
         self.assertEqual(
             [(b"bla", b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", 33188)],
             list(self.get_simple_index("index").iterobjects()),
         )
 
-    def test_getitem(self):
+    def test_getitem(self) -> None:
         self.assertEqual(
             IndexEntry(
                 (1230680220, 0),
@@ -105,12 +105,12 @@ class SimpleIndexTestCase(IndexTestCase):
             self.get_simple_index("index")[b"bla"],
         )
 
-    def test_empty(self):
+    def test_empty(self) -> None:
         i = self.get_simple_index("notanindex")
         self.assertEqual(0, len(i))
         self.assertFalse(os.path.exists(i._filename))
 
-    def test_against_empty_tree(self):
+    def test_against_empty_tree(self) -> None:
         i = self.get_simple_index("index")
         changes = list(i.changes_from_tree(MemoryObjectStore(), None))
         self.assertEqual(1, len(changes))
@@ -120,15 +120,15 @@ class SimpleIndexTestCase(IndexTestCase):
 
 
 class SimpleIndexWriterTestCase(IndexTestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         IndexTestCase.setUp(self)
         self.tempdir = tempfile.mkdtemp()
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         IndexTestCase.tearDown(self)
         shutil.rmtree(self.tempdir)
 
-    def test_simple_write(self):
+    def test_simple_write(self) -> None:
         entries = [
             (
                 SerializedIndexEntry(
@@ -156,15 +156,15 @@ class SimpleIndexWriterTestCase(IndexTestCase):
 
 
 class ReadIndexDictTests(IndexTestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         IndexTestCase.setUp(self)
         self.tempdir = tempfile.mkdtemp()
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         IndexTestCase.tearDown(self)
         shutil.rmtree(self.tempdir)
 
-    def test_simple_write(self):
+    def test_simple_write(self) -> None:
         entries = {
             b"barbla": IndexEntry(
                 (1230680220, 0),
@@ -187,11 +187,11 @@ class ReadIndexDictTests(IndexTestCase):
 
 
 class CommitTreeTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.store = MemoryObjectStore()
 
-    def test_single_blob(self):
+    def test_single_blob(self) -> None:
         blob = Blob()
         blob.data = b"foo"
         self.store.add_object(blob)
@@ -201,7 +201,7 @@ class CommitTreeTests(TestCase):
         self.assertEqual((stat.S_IFREG, blob.id), self.store[rootid][b"bla"])
         self.assertEqual({rootid, blob.id}, set(self.store._data.keys()))
 
-    def test_nested(self):
+    def test_nested(self) -> None:
         blob = Blob()
         blob.data = b"foo"
         self.store.add_object(blob)
@@ -216,49 +216,49 @@ class CommitTreeTests(TestCase):
 
 
 class CleanupModeTests(TestCase):
-    def assertModeEqual(self, expected, got):
+    def assertModeEqual(self, expected, got) -> None:
         self.assertEqual(expected, got, f"{expected:o} != {got:o}")
 
-    def test_file(self):
+    def test_file(self) -> None:
         self.assertModeEqual(0o100644, cleanup_mode(0o100000))
 
-    def test_executable(self):
+    def test_executable(self) -> None:
         self.assertModeEqual(0o100755, cleanup_mode(0o100711))
         self.assertModeEqual(0o100755, cleanup_mode(0o100700))
 
-    def test_symlink(self):
+    def test_symlink(self) -> None:
         self.assertModeEqual(0o120000, cleanup_mode(0o120711))
 
-    def test_dir(self):
+    def test_dir(self) -> None:
         self.assertModeEqual(0o040000, cleanup_mode(0o40531))
 
-    def test_submodule(self):
+    def test_submodule(self) -> None:
         self.assertModeEqual(0o160000, cleanup_mode(0o160744))
 
 
 class WriteCacheTimeTests(TestCase):
-    def test_write_string(self):
+    def test_write_string(self) -> None:
         f = BytesIO()
         self.assertRaises(TypeError, write_cache_time, f, "foo")
 
-    def test_write_int(self):
+    def test_write_int(self) -> None:
         f = BytesIO()
         write_cache_time(f, 434343)
         self.assertEqual(struct.pack(">LL", 434343, 0), f.getvalue())
 
-    def test_write_tuple(self):
+    def test_write_tuple(self) -> None:
         f = BytesIO()
         write_cache_time(f, (434343, 21))
         self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue())
 
-    def test_write_float(self):
+    def test_write_float(self) -> None:
         f = BytesIO()
         write_cache_time(f, 434343.000000021)
         self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue())
 
 
 class IndexEntryFromStatTests(TestCase):
-    def test_simple(self):
+    def test_simple(self) -> None:
         st = os.stat_result(
             (
                 16877,
@@ -289,7 +289,7 @@ class IndexEntryFromStatTests(TestCase):
             ),
         )
 
-    def test_override_mode(self):
+    def test_override_mode(self) -> None:
         st = os.stat_result(
             (
                 stat.S_IFREG + 0o644,
@@ -322,19 +322,19 @@ class IndexEntryFromStatTests(TestCase):
 
 
 class BuildIndexTests(TestCase):
-    def assertReasonableIndexEntry(self, index_entry, mode, filesize, sha):
+    def assertReasonableIndexEntry(self, index_entry, mode, filesize, sha) -> None:
         self.assertEqual(index_entry.mode, mode)  # mode
         self.assertEqual(index_entry.size, filesize)  # filesize
         self.assertEqual(index_entry.sha, sha)  # sha
 
-    def assertFileContents(self, path, contents, symlink=False):
+    def assertFileContents(self, path, contents, symlink=False) -> None:
         if symlink:
             self.assertEqual(os.readlink(path), contents)
         else:
             with open(path, "rb") as f:
                 self.assertEqual(f.read(), contents)
 
-    def test_empty(self):
+    def test_empty(self) -> None:
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
@@ -352,7 +352,7 @@ class BuildIndexTests(TestCase):
             # Verify no files
             self.assertEqual([".git"], os.listdir(repo.path))
 
-    def test_git_dir(self):
+    def test_git_dir(self) -> None:
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
@@ -386,7 +386,7 @@ class BuildIndexTests(TestCase):
             )
             self.assertFileContents(epath, b"d")
 
-    def test_nonempty(self):
+    def test_nonempty(self) -> None:
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
@@ -441,7 +441,7 @@ class BuildIndexTests(TestCase):
             self.assertEqual(["d"], sorted(os.listdir(os.path.join(repo.path, "c"))))
 
     @skipIf(not getattr(os, "sync", None), "Requires sync support")
-    def test_norewrite(self):
+    def test_norewrite(self) -> None:
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
@@ -483,7 +483,7 @@ class BuildIndexTests(TestCase):
                 self.assertEqual(b"file a", fh.read())
 
     @skipIf(not can_symlink(), "Requires symlink support")
-    def test_symlink(self):
+    def test_symlink(self) -> None:
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
@@ -515,7 +515,7 @@ class BuildIndexTests(TestCase):
             )
             self.assertFileContents(epath, "d", symlink=True)
 
-    def test_no_decode_encode(self):
+    def test_no_decode_encode(self) -> None:
         repo_dir = tempfile.mkdtemp()
         repo_dir_bytes = os.fsencode(repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
@@ -560,7 +560,7 @@ class BuildIndexTests(TestCase):
 
             self.assertTrue(os.path.exists(utf8_path))
 
-    def test_git_submodule(self):
+    def test_git_submodule(self) -> None:
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
@@ -600,7 +600,7 @@ class BuildIndexTests(TestCase):
             self.assertEqual(index[b"c"].mode, S_IFGITLINK)  # mode
             self.assertEqual(index[b"c"].sha, c.id)  # sha
 
-    def test_git_submodule_exists(self):
+    def test_git_submodule_exists(self) -> None:
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
@@ -643,7 +643,7 @@ class BuildIndexTests(TestCase):
 
 
 class GetUnstagedChangesTests(TestCase):
-    def test_get_unstaged_changes(self):
+    def test_get_unstaged_changes(self) -> None:
         """Unit test for get_unstaged_changes."""
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
@@ -674,7 +674,7 @@ class GetUnstagedChangesTests(TestCase):
 
             self.assertEqual(list(changes), [b"foo1"])
 
-    def test_get_unstaged_deleted_changes(self):
+    def test_get_unstaged_deleted_changes(self) -> None:
         """Unit test for get_unstaged_changes."""
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
@@ -697,7 +697,7 @@ class GetUnstagedChangesTests(TestCase):
 
             self.assertEqual(list(changes), [b"foo1"])
 
-    def test_get_unstaged_changes_removed_replaced_by_directory(self):
+    def test_get_unstaged_changes_removed_replaced_by_directory(self) -> None:
         """Unit test for get_unstaged_changes."""
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
@@ -722,7 +722,7 @@ class GetUnstagedChangesTests(TestCase):
             self.assertEqual(list(changes), [b"foo1"])
 
     @skipIf(not can_symlink(), "Requires symlink support")
-    def test_get_unstaged_changes_removed_replaced_by_link(self):
+    def test_get_unstaged_changes_removed_replaced_by_link(self) -> None:
         """Unit test for get_unstaged_changes."""
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
@@ -748,7 +748,7 @@ class GetUnstagedChangesTests(TestCase):
 
 
 class TestValidatePathElement(TestCase):
-    def test_default(self):
+    def test_default(self) -> None:
         self.assertTrue(validate_path_element_default(b"bla"))
         self.assertTrue(validate_path_element_default(b".bla"))
         self.assertFalse(validate_path_element_default(b".git"))
@@ -756,7 +756,7 @@ class TestValidatePathElement(TestCase):
         self.assertFalse(validate_path_element_default(b".."))
         self.assertTrue(validate_path_element_default(b"git~1"))
 
-    def test_ntfs(self):
+    def test_ntfs(self) -> None:
         self.assertTrue(validate_path_element_ntfs(b"bla"))
         self.assertTrue(validate_path_element_ntfs(b".bla"))
         self.assertFalse(validate_path_element_ntfs(b".git"))
@@ -766,7 +766,7 @@ class TestValidatePathElement(TestCase):
 
 
 class TestTreeFSPathConversion(TestCase):
-    def test_tree_to_fs_path(self):
+    def test_tree_to_fs_path(self) -> None:
         tree_path = "délwíçh/foo".encode()
         fs_path = _tree_to_fs_path(b"/prefix/path", tree_path)
         self.assertEqual(
@@ -774,12 +774,12 @@ class TestTreeFSPathConversion(TestCase):
             os.fsencode(os.path.join("/prefix/path", "délwíçh", "foo")),
         )
 
-    def test_fs_to_tree_path_str(self):
+    def test_fs_to_tree_path_str(self) -> None:
         fs_path = os.path.join(os.path.join("délwíçh", "foo"))
         tree_path = _fs_to_tree_path(fs_path)
         self.assertEqual(tree_path, "délwíçh/foo".encode())
 
-    def test_fs_to_tree_path_bytes(self):
+    def test_fs_to_tree_path_bytes(self) -> None:
         fs_path = os.path.join(os.fsencode(os.path.join("délwíçh", "foo")))
         tree_path = _fs_to_tree_path(fs_path)
         self.assertEqual(tree_path, "délwíçh/foo".encode())

+ 3 - 3
tests/test_lfs.py

@@ -29,16 +29,16 @@ from . import TestCase
 
 
 class LFSTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.test_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, self.test_dir)
         self.lfs = LFSStore.create(self.test_dir)
 
-    def test_create(self):
+    def test_create(self) -> None:
         sha = self.lfs.write_object([b"a", b"b"])
         with self.lfs.open_object(sha) as f:
             self.assertEqual(b"ab", f.read())
 
-    def test_missing(self):
+    def test_missing(self) -> None:
         self.assertRaises(KeyError, self.lfs.open_object, "abcdeabcdeabcdeabcde")

+ 18 - 18
tests/test_line_ending.py

@@ -35,59 +35,59 @@ from . import TestCase
 class LineEndingConversion(TestCase):
     """Test the line ending conversion functions in various cases."""
 
-    def test_convert_crlf_to_lf_no_op(self):
+    def test_convert_crlf_to_lf_no_op(self) -> None:
         self.assertEqual(convert_crlf_to_lf(b"foobar"), b"foobar")
 
-    def test_convert_crlf_to_lf(self):
+    def test_convert_crlf_to_lf(self) -> None:
         self.assertEqual(convert_crlf_to_lf(b"line1\r\nline2"), b"line1\nline2")
 
-    def test_convert_crlf_to_lf_mixed(self):
+    def test_convert_crlf_to_lf_mixed(self) -> None:
         self.assertEqual(convert_crlf_to_lf(b"line1\r\n\nline2"), b"line1\n\nline2")
 
-    def test_convert_lf_to_crlf_no_op(self):
+    def test_convert_lf_to_crlf_no_op(self) -> None:
         self.assertEqual(convert_lf_to_crlf(b"foobar"), b"foobar")
 
-    def test_convert_lf_to_crlf(self):
+    def test_convert_lf_to_crlf(self) -> None:
         self.assertEqual(convert_lf_to_crlf(b"line1\nline2"), b"line1\r\nline2")
 
-    def test_convert_lf_to_crlf_mixed(self):
+    def test_convert_lf_to_crlf_mixed(self) -> None:
         self.assertEqual(convert_lf_to_crlf(b"line1\r\n\nline2"), b"line1\r\n\r\nline2")
 
 
 class GetLineEndingAutocrlfFilters(TestCase):
-    def test_get_checkin_filter_autocrlf_default(self):
+    def test_get_checkin_filter_autocrlf_default(self) -> None:
         checkin_filter = get_checkin_filter_autocrlf(b"false")
 
         self.assertEqual(checkin_filter, None)
 
-    def test_get_checkin_filter_autocrlf_true(self):
+    def test_get_checkin_filter_autocrlf_true(self) -> None:
         checkin_filter = get_checkin_filter_autocrlf(b"true")
 
         self.assertEqual(checkin_filter, convert_crlf_to_lf)
 
-    def test_get_checkin_filter_autocrlf_input(self):
+    def test_get_checkin_filter_autocrlf_input(self) -> None:
         checkin_filter = get_checkin_filter_autocrlf(b"input")
 
         self.assertEqual(checkin_filter, convert_crlf_to_lf)
 
-    def test_get_checkout_filter_autocrlf_default(self):
+    def test_get_checkout_filter_autocrlf_default(self) -> None:
         checkout_filter = get_checkout_filter_autocrlf(b"false")
 
         self.assertEqual(checkout_filter, None)
 
-    def test_get_checkout_filter_autocrlf_true(self):
+    def test_get_checkout_filter_autocrlf_true(self) -> None:
         checkout_filter = get_checkout_filter_autocrlf(b"true")
 
         self.assertEqual(checkout_filter, convert_lf_to_crlf)
 
-    def test_get_checkout_filter_autocrlf_input(self):
+    def test_get_checkout_filter_autocrlf_input(self) -> None:
         checkout_filter = get_checkout_filter_autocrlf(b"input")
 
         self.assertEqual(checkout_filter, None)
 
 
 class NormalizeBlobTestCase(TestCase):
-    def test_normalize_to_lf_no_op(self):
+    def test_normalize_to_lf_no_op(self) -> None:
         base_content = b"line1\nline2"
         base_sha = "f8be7bb828880727816015d21abcbc37d033f233"
 
@@ -104,7 +104,7 @@ class NormalizeBlobTestCase(TestCase):
         self.assertEqual(filtered_blob.as_raw_chunks(), [base_content])
         self.assertEqual(filtered_blob.sha().hexdigest(), base_sha)
 
-    def test_normalize_to_lf(self):
+    def test_normalize_to_lf(self) -> None:
         base_content = b"line1\r\nline2"
         base_sha = "3a1bd7a52799fe5cf6411f1d35f4c10bacb1db96"
 
@@ -124,7 +124,7 @@ class NormalizeBlobTestCase(TestCase):
         self.assertEqual(filtered_blob.as_raw_chunks(), [normalized_content])
         self.assertEqual(filtered_blob.sha().hexdigest(), normalized_sha)
 
-    def test_normalize_to_lf_binary(self):
+    def test_normalize_to_lf_binary(self) -> None:
         base_content = b"line1\r\nline2\0"
         base_sha = "b44504193b765f7cd79673812de8afb55b372ab2"
 
@@ -141,7 +141,7 @@ class NormalizeBlobTestCase(TestCase):
         self.assertEqual(filtered_blob.as_raw_chunks(), [base_content])
         self.assertEqual(filtered_blob.sha().hexdigest(), base_sha)
 
-    def test_normalize_to_crlf_no_op(self):
+    def test_normalize_to_crlf_no_op(self) -> None:
         base_content = b"line1\r\nline2"
         base_sha = "3a1bd7a52799fe5cf6411f1d35f4c10bacb1db96"
 
@@ -158,7 +158,7 @@ class NormalizeBlobTestCase(TestCase):
         self.assertEqual(filtered_blob.as_raw_chunks(), [base_content])
         self.assertEqual(filtered_blob.sha().hexdigest(), base_sha)
 
-    def test_normalize_to_crlf(self):
+    def test_normalize_to_crlf(self) -> None:
         base_content = b"line1\nline2"
         base_sha = "f8be7bb828880727816015d21abcbc37d033f233"
 
@@ -178,7 +178,7 @@ class NormalizeBlobTestCase(TestCase):
         self.assertEqual(filtered_blob.as_raw_chunks(), [normalized_content])
         self.assertEqual(filtered_blob.sha().hexdigest(), normalized_sha)
 
-    def test_normalize_to_crlf_binary(self):
+    def test_normalize_to_crlf_binary(self) -> None:
         base_content = b"line1\r\nline2\0"
         base_sha = "b44504193b765f7cd79673812de8afb55b372ab2"
 

+ 34 - 34
tests/test_lru_cache.py

@@ -27,7 +27,7 @@ from . import TestCase
 class TestLRUCache(TestCase):
     """Test that LRU cache properly keeps track of entries."""
 
-    def test_cache_size(self):
+    def test_cache_size(self) -> None:
         cache = lru_cache.LRUCache(max_cache=10)
         self.assertEqual(10, cache.cache_size())
 
@@ -37,7 +37,7 @@ class TestLRUCache(TestCase):
         cache.resize(512)
         self.assertEqual(512, cache.cache_size())
 
-    def test_missing(self):
+    def test_missing(self) -> None:
         cache = lru_cache.LRUCache(max_cache=10)
 
         self.assertNotIn("foo", cache)
@@ -48,7 +48,7 @@ class TestLRUCache(TestCase):
         self.assertIn("foo", cache)
         self.assertNotIn("bar", cache)
 
-    def test_map_None(self):
+    def test_map_None(self) -> None:
         # Make sure that we can properly map None as a key.
         cache = lru_cache.LRUCache(max_cache=10)
         self.assertNotIn(None, cache)
@@ -65,11 +65,11 @@ class TestLRUCache(TestCase):
         cache[None]
         self.assertEqual([None, 1], [n.key for n in cache._walk_lru()])
 
-    def test_add__null_key(self):
+    def test_add__null_key(self) -> None:
         cache = lru_cache.LRUCache(max_cache=10)
         self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1)
 
-    def test_overflow(self):
+    def test_overflow(self) -> None:
         """Adding extra entries will pop out old ones."""
         cache = lru_cache.LRUCache(max_cache=1, after_cleanup_count=1)
 
@@ -82,7 +82,7 @@ class TestLRUCache(TestCase):
 
         self.assertEqual("biz", cache["baz"])
 
-    def test_by_usage(self):
+    def test_by_usage(self) -> None:
         """Accessing entries bumps them up in priority."""
         cache = lru_cache.LRUCache(max_cache=2)
 
@@ -96,11 +96,11 @@ class TestLRUCache(TestCase):
 
         self.assertNotIn("foo", cache)
 
-    def test_cleanup(self):
+    def test_cleanup(self) -> None:
         """Test that we can use a cleanup function."""
         cleanup_called = []
 
-        def cleanup_func(key, val):
+        def cleanup_func(key, val) -> None:
             cleanup_called.append((key, val))
 
         cache = lru_cache.LRUCache(max_cache=2, after_cleanup_count=2)
@@ -116,11 +116,11 @@ class TestLRUCache(TestCase):
         cache.clear()
         self.assertEqual([("baz", "1"), ("biz", "3"), ("foo", "2")], cleanup_called)
 
-    def test_cleanup_on_replace(self):
+    def test_cleanup_on_replace(self) -> None:
         """Replacing an object should cleanup the old value."""
         cleanup_called = []
 
-        def cleanup_func(key, val):
+        def cleanup_func(key, val) -> None:
             cleanup_called.append((key, val))
 
         cache = lru_cache.LRUCache(max_cache=2)
@@ -135,7 +135,7 @@ class TestLRUCache(TestCase):
         cache[2] = 26
         self.assertEqual([(2, 20), (2, 25)], cleanup_called)
 
-    def test_len(self):
+    def test_len(self) -> None:
         cache = lru_cache.LRUCache(max_cache=10, after_cleanup_count=10)
 
         cache[1] = 10
@@ -167,7 +167,7 @@ class TestLRUCache(TestCase):
             [n.key for n in cache._walk_lru()],
         )
 
-    def test_cleanup_shrinks_to_after_clean_count(self):
+    def test_cleanup_shrinks_to_after_clean_count(self) -> None:
         cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=3)
 
         cache.add(1, 10)
@@ -182,16 +182,16 @@ class TestLRUCache(TestCase):
         cache.add(6, 40)
         self.assertEqual(3, len(cache))
 
-    def test_after_cleanup_larger_than_max(self):
+    def test_after_cleanup_larger_than_max(self) -> None:
         cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=10)
         self.assertEqual(5, cache._after_cleanup_count)
 
-    def test_after_cleanup_none(self):
+    def test_after_cleanup_none(self) -> None:
         cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=None)
         # By default _after_cleanup_size is 80% of the normal size
         self.assertEqual(4, cache._after_cleanup_count)
 
-    def test_cleanup_2(self):
+    def test_cleanup_2(self) -> None:
         cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=2)
 
         # Add these in order
@@ -206,7 +206,7 @@ class TestLRUCache(TestCase):
         cache.cleanup()
         self.assertEqual(2, len(cache))
 
-    def test_preserve_last_access_order(self):
+    def test_preserve_last_access_order(self) -> None:
         cache = lru_cache.LRUCache(max_cache=5)
 
         # Add these in order
@@ -225,7 +225,7 @@ class TestLRUCache(TestCase):
         cache[2]
         self.assertEqual([2, 3, 5, 4, 1], [n.key for n in cache._walk_lru()])
 
-    def test_get(self):
+    def test_get(self) -> None:
         cache = lru_cache.LRUCache(max_cache=5)
 
         cache.add(1, 10)
@@ -238,7 +238,7 @@ class TestLRUCache(TestCase):
         self.assertEqual(10, cache.get(1))
         self.assertEqual([1, 2], [n.key for n in cache._walk_lru()])
 
-    def test_keys(self):
+    def test_keys(self) -> None:
         cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=5)
 
         cache[1] = 2
@@ -250,7 +250,7 @@ class TestLRUCache(TestCase):
         cache[6] = 7
         self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys()))
 
-    def test_resize_smaller(self):
+    def test_resize_smaller(self) -> None:
         cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4)
         cache[1] = 2
         cache[2] = 3
@@ -269,7 +269,7 @@ class TestLRUCache(TestCase):
         cache[8] = 9
         self.assertEqual([7, 8], sorted(cache.keys()))
 
-    def test_resize_larger(self):
+    def test_resize_larger(self) -> None:
         cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4)
         cache[1] = 2
         cache[2] = 3
@@ -291,23 +291,23 @@ class TestLRUCache(TestCase):
 
 
 class TestLRUSizeCache(TestCase):
-    def test_basic_init(self):
+    def test_basic_init(self) -> None:
         cache = lru_cache.LRUSizeCache()
         self.assertEqual(2048, cache._max_cache)
         self.assertEqual(int(cache._max_size * 0.8), cache._after_cleanup_size)
         self.assertEqual(0, cache._value_size)
 
-    def test_add__null_key(self):
+    def test_add__null_key(self) -> None:
         cache = lru_cache.LRUSizeCache()
         self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1)
 
-    def test_add_tracks_size(self):
+    def test_add_tracks_size(self) -> None:
         cache = lru_cache.LRUSizeCache()
         self.assertEqual(0, cache._value_size)
         cache.add("my key", "my value text")
         self.assertEqual(13, cache._value_size)
 
-    def test_remove_tracks_size(self):
+    def test_remove_tracks_size(self) -> None:
         cache = lru_cache.LRUSizeCache()
         self.assertEqual(0, cache._value_size)
         cache.add("my key", "my value text")
@@ -316,7 +316,7 @@ class TestLRUSizeCache(TestCase):
         cache._remove_node(node)
         self.assertEqual(0, cache._value_size)
 
-    def test_no_add_over_size(self):
+    def test_no_add_over_size(self) -> None:
         """Adding a large value may not be cached at all."""
         cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5)
         self.assertEqual(0, cache._value_size)
@@ -337,11 +337,11 @@ class TestLRUSizeCache(TestCase):
         self.assertEqual(3, cache._value_size)
         self.assertEqual({"test": "key"}, cache.items())
 
-    def test_no_add_over_size_cleanup(self):
+    def test_no_add_over_size_cleanup(self) -> None:
         """If a large value is not cached, we will call cleanup right away."""
         cleanup_calls = []
 
-        def cleanup(key, value):
+        def cleanup(key, value) -> None:
             cleanup_calls.append((key, value))
 
         cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5)
@@ -354,7 +354,7 @@ class TestLRUSizeCache(TestCase):
         # and cleanup was called
         self.assertEqual([("test", "key that is too big")], cleanup_calls)
 
-    def test_adding_clears_cache_based_on_size(self):
+    def test_adding_clears_cache_based_on_size(self) -> None:
         """The cache is cleared in LRU order until small enough."""
         cache = lru_cache.LRUSizeCache(max_size=20)
         cache.add("key1", "value")  # 5 chars
@@ -367,7 +367,7 @@ class TestLRUSizeCache(TestCase):
         self.assertEqual(6 + 8, cache._value_size)
         self.assertEqual({"key2": "value2", "key4": "value234"}, cache.items())
 
-    def test_adding_clears_to_after_cleanup_size(self):
+    def test_adding_clears_to_after_cleanup_size(self) -> None:
         cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
         cache.add("key1", "value")  # 5 chars
         cache.add("key2", "value2")  # 6 chars
@@ -379,7 +379,7 @@ class TestLRUSizeCache(TestCase):
         self.assertEqual(8, cache._value_size)
         self.assertEqual({"key4": "value234"}, cache.items())
 
-    def test_custom_sizes(self):
+    def test_custom_sizes(self) -> None:
         def size_of_list(lst):
             return sum(len(x) for x in lst)
 
@@ -397,7 +397,7 @@ class TestLRUSizeCache(TestCase):
         self.assertEqual(8, cache._value_size)
         self.assertEqual({"key4": ["value", "234"]}, cache.items())
 
-    def test_cleanup(self):
+    def test_cleanup(self) -> None:
         cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
 
         # Add these in order
@@ -410,7 +410,7 @@ class TestLRUSizeCache(TestCase):
         # Only the most recent fits after cleaning up
         self.assertEqual(7, cache._value_size)
 
-    def test_keys(self):
+    def test_keys(self) -> None:
         cache = lru_cache.LRUSizeCache(max_size=10)
 
         cache[1] = "a"
@@ -418,7 +418,7 @@ class TestLRUSizeCache(TestCase):
         cache[3] = "cdef"
         self.assertEqual([1, 2, 3], sorted(cache.keys()))
 
-    def test_resize_smaller(self):
+    def test_resize_smaller(self) -> None:
         cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9)
         cache[1] = "abc"
         cache[2] = "def"
@@ -435,7 +435,7 @@ class TestLRUSizeCache(TestCase):
         cache[6] = "pqr"
         self.assertEqual([6], sorted(cache.keys()))
 
-    def test_resize_larger(self):
+    def test_resize_larger(self) -> None:
         cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9)
         cache[1] = "abc"
         cache[2] = "def"

+ 2 - 2
tests/test_mailmap.py

@@ -27,7 +27,7 @@ from dulwich.mailmap import Mailmap, read_mailmap
 
 
 class ReadMailmapTests(TestCase):
-    def test_read(self):
+    def test_read(self) -> None:
         b = BytesIO(
             b"""\
 Jane Doe         <jane@desktop.(none)>
@@ -67,7 +67,7 @@ Santa Claus <santa.claus@northpole.xx> <me@company.xx>
 
 
 class MailmapTests(TestCase):
-    def test_lookup(self):
+    def test_lookup(self) -> None:
         m = Mailmap()
         m.add_entry((b"Jane Doe", b"jane@desktop.(none)"), (None, None))
         m.add_entry((b"Joe R. Developer", b"joe@example.com"), None)

+ 21 - 21
tests/test_missing_obj_finder.py

@@ -26,7 +26,7 @@ from . import TestCase
 
 
 class MissingObjectFinderTest(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.store = MemoryObjectStore()
         self.commits = []
@@ -34,7 +34,7 @@ class MissingObjectFinderTest(TestCase):
     def cmt(self, n):
         return self.commits[n - 1]
 
-    def assertMissingMatch(self, haves, wants, expected):
+    def assertMissingMatch(self, haves, wants, expected) -> None:
         for sha, path in MissingObjectFinder(self.store, haves, wants, shallow=set()):
             self.assertIn(
                 sha, expected, f"({sha},{path}) erroneously reported as missing"
@@ -49,7 +49,7 @@ class MissingObjectFinderTest(TestCase):
 
 
 class MOFLinearRepoTest(MissingObjectFinderTest):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         # present in 1, removed in 3
         f1_1 = make_object(Blob, data=b"f1")
@@ -84,23 +84,23 @@ class MOFLinearRepoTest(MissingObjectFinderTest):
             f2_3.id,
         ]
 
-    def test_1_to_2(self):
+    def test_1_to_2(self) -> None:
         self.assertMissingMatch([self.cmt(1).id], [self.cmt(2).id], self.missing_1_2)
 
-    def test_2_to_3(self):
+    def test_2_to_3(self) -> None:
         self.assertMissingMatch([self.cmt(2).id], [self.cmt(3).id], self.missing_2_3)
 
-    def test_1_to_3(self):
+    def test_1_to_3(self) -> None:
         self.assertMissingMatch([self.cmt(1).id], [self.cmt(3).id], self.missing_1_3)
 
-    def test_bogus_haves(self):
+    def test_bogus_haves(self) -> None:
         """Ensure non-existent SHA in haves are tolerated."""
         bogus_sha = self.cmt(2).id[::-1]
         haves = [self.cmt(1).id, bogus_sha]
         wants = [self.cmt(3).id]
         self.assertMissingMatch(haves, wants, self.missing_1_3)
 
-    def test_bogus_wants_failure(self):
+    def test_bogus_wants_failure(self) -> None:
         """Ensure non-existent SHA in wants are not tolerated."""
         bogus_sha = self.cmt(2).id[::-1]
         haves = [self.cmt(1).id]
@@ -109,7 +109,7 @@ class MOFLinearRepoTest(MissingObjectFinderTest):
             KeyError, MissingObjectFinder, self.store, haves, wants, shallow=set()
         )
 
-    def test_no_changes(self):
+    def test_no_changes(self) -> None:
         self.assertMissingMatch([self.cmt(3).id], [self.cmt(3).id], [])
 
 
@@ -120,7 +120,7 @@ class MOFMergeForkRepoTest(MissingObjectFinderTest):
     #            \
     #             5
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         f1_1 = make_object(Blob, data=b"f1")
         f1_2 = make_object(Blob, data=b"f1-2")
@@ -153,7 +153,7 @@ class MOFMergeForkRepoTest(MissingObjectFinderTest):
 
         self.assertEqual(f1_2.id, f1_7.id, "[sanity]")
 
-    def test_have6_want7(self):
+    def test_have6_want7(self) -> None:
         # have 6, want 7. Ideally, shall not report f1_7 as it's the same as
         # f1_2, however, to do so, MissingObjectFinder shall not record trees
         # of common commits only, but also all parent trees and tree items,
@@ -166,7 +166,7 @@ class MOFMergeForkRepoTest(MissingObjectFinderTest):
             [self.cmt(7).id, self.cmt(7).tree, self.f1_7_id],
         )
 
-    def test_have4_want7(self):
+    def test_have4_want7(self) -> None:
         # have 4, want 7. Shall not include rev5 as it is not in the tree
         # between 4 and 7 (well, it is, but its SHA's are irrelevant for 4..7
         # commit hierarchy)
@@ -185,7 +185,7 @@ class MOFMergeForkRepoTest(MissingObjectFinderTest):
             ],
         )
 
-    def test_have1_want6(self):
+    def test_have1_want6(self) -> None:
         # have 1, want 6. Shall not include rev5
         self.assertMissingMatch(
             [self.cmt(1).id],
@@ -206,7 +206,7 @@ class MOFMergeForkRepoTest(MissingObjectFinderTest):
             ],
         )
 
-    def test_have3_want6(self):
+    def test_have3_want6(self) -> None:
         # have 3, want 7. Shall not report rev2 and its tree, because
         # haves(3) means has parents, i.e. rev2, too
         # BUT shall report any changes descending rev2 (excluding rev3)
@@ -225,7 +225,7 @@ class MOFMergeForkRepoTest(MissingObjectFinderTest):
             ],
         )
 
-    def test_have5_want7(self):
+    def test_have5_want7(self) -> None:
         # have 5, want 7. Common parent is rev2, hence children of rev2 from
         # a descent line other than rev5 shall be reported
         # expects f1_4 from rev6. f3_5 is known in rev5;
@@ -246,7 +246,7 @@ class MOFMergeForkRepoTest(MissingObjectFinderTest):
 
 
 class MOFTagsTest(MissingObjectFinderTest):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         f1_1 = make_object(Blob, data=b"f1")
         commit_spec = [[1]]
@@ -270,7 +270,7 @@ class MOFTagsTest(MissingObjectFinderTest):
 
         self.f1_1_id = f1_1.id
 
-    def test_tagged_commit(self):
+    def test_tagged_commit(self) -> None:
         # The user already has the tagged commit, all they want is the tag,
         # so send them only the tag object.
         self.assertMissingMatch(
@@ -278,7 +278,7 @@ class MOFTagsTest(MissingObjectFinderTest):
         )
 
     # The remaining cases are unusual, but do happen in the wild.
-    def test_tagged_tag(self):
+    def test_tagged_tag(self) -> None:
         # User already has tagged tag, send only tag of tag
         self.assertMissingMatch(
             [self._normal_tag.id], [self._tag_of_tag.id], [self._tag_of_tag.id]
@@ -290,19 +290,19 @@ class MOFTagsTest(MissingObjectFinderTest):
             [self._normal_tag.id, self._tag_of_tag.id],
         )
 
-    def test_tagged_tree(self):
+    def test_tagged_tree(self) -> None:
         self.assertMissingMatch(
             [],
             [self._tag_of_tree.id],
             [self._tag_of_tree.id, self.cmt(1).tree, self.f1_1_id],
         )
 
-    def test_tagged_blob(self):
+    def test_tagged_blob(self) -> None:
         self.assertMissingMatch(
             [], [self._tag_of_blob.id], [self._tag_of_blob.id, self.f1_1_id]
         )
 
-    def test_tagged_tagged_blob(self):
+    def test_tagged_tagged_blob(self) -> None:
         self.assertMissingMatch(
             [],
             [self._tag_of_tag_of_blob.id],

+ 39 - 39
tests/test_object_store.py

@@ -58,18 +58,18 @@ testobject = make_object(Blob, data=b"yummy data")
 
 
 class OverlayObjectStoreTests(ObjectStoreTests, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         self.bases = [MemoryObjectStore(), MemoryObjectStore()]
         self.store = OverlayObjectStore(self.bases, self.bases[0])
 
 
 class MemoryObjectStoreTests(ObjectStoreTests, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         self.store = MemoryObjectStore()
 
-    def test_add_pack(self):
+    def test_add_pack(self) -> None:
         o = MemoryObjectStore()
         f, commit, abort = o.add_pack()
         try:
@@ -81,12 +81,12 @@ class MemoryObjectStoreTests(ObjectStoreTests, TestCase):
         else:
             commit()
 
-    def test_add_pack_emtpy(self):
+    def test_add_pack_emtpy(self) -> None:
         o = MemoryObjectStore()
         f, commit, abort = o.add_pack()
         commit()
 
-    def test_add_thin_pack(self):
+    def test_add_thin_pack(self) -> None:
         o = MemoryObjectStore()
         blob = make_object(Blob, data=b"yummy data")
         o.add_object(blob)
@@ -105,7 +105,7 @@ class MemoryObjectStoreTests(ObjectStoreTests, TestCase):
             (Blob.type_num, b"more yummy data"), o.get_raw(packed_blob_sha)
         )
 
-    def test_add_thin_pack_empty(self):
+    def test_add_thin_pack_empty(self) -> None:
         o = MemoryObjectStore()
 
         f = BytesIO()
@@ -115,24 +115,24 @@ class MemoryObjectStoreTests(ObjectStoreTests, TestCase):
 
 
 class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         self.store_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, self.store_dir)
         self.store = DiskObjectStore.init(self.store_dir)
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         TestCase.tearDown(self)
         PackBasedObjectStoreTests.tearDown(self)
 
-    def test_loose_compression_level(self):
+    def test_loose_compression_level(self) -> None:
         alternate_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, alternate_dir)
         alternate_store = DiskObjectStore(alternate_dir, loose_compression_level=6)
         b2 = make_object(Blob, data=b"yummy data")
         alternate_store.add_object(b2)
 
-    def test_alternates(self):
+    def test_alternates(self) -> None:
         alternate_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, alternate_dir)
         alternate_store = DiskObjectStore(alternate_dir)
@@ -144,7 +144,7 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
         self.assertIn(b2.id, store)
         self.assertEqual(b2, store[b2.id])
 
-    def test_read_alternate_paths(self):
+    def test_read_alternate_paths(self) -> None:
         store = DiskObjectStore(self.store_dir)
 
         abs_path = os.path.abspath(os.path.normpath("/abspath"))
@@ -164,7 +164,7 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
         for alt_path in store._read_alternate_paths():
             self.assertNotIn("#", alt_path)
 
-    def test_file_modes(self):
+    def test_file_modes(self) -> None:
         self.store.add_object(testobject)
         path = self.store._get_shafile_path(testobject.id)
         mode = os.stat(path).st_mode
@@ -172,7 +172,7 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
         packmode = "0o100444" if sys.platform != "win32" else "0o100666"
         self.assertEqual(oct(mode), packmode)
 
-    def test_corrupted_object_raise_exception(self):
+    def test_corrupted_object_raise_exception(self) -> None:
         """Corrupted sha1 disk file should raise specific exception."""
         self.store.add_object(testobject)
         self.assertEqual(
@@ -202,7 +202,7 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
         # this does not change iteration on loose objects though
         self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
 
-    def test_tempfile_in_loose_store(self):
+    def test_tempfile_in_loose_store(self) -> None:
         self.store.add_object(testobject)
         self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
 
@@ -216,7 +216,7 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
 
         self.assertEqual([testobject.id], list(self.store._iter_loose_objects()))
 
-    def test_add_alternate_path(self):
+    def test_add_alternate_path(self) -> None:
         store = DiskObjectStore(self.store_dir)
         self.assertEqual([], list(store._read_alternate_paths()))
         store.add_alternate_path(os.path.abspath("/foo/path"))
@@ -239,7 +239,7 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
                 list(store._read_alternate_paths()),
             )
 
-    def test_rel_alternative_path(self):
+    def test_rel_alternative_path(self) -> None:
         alternate_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, alternate_dir)
         alternate_store = DiskObjectStore(alternate_dir)
@@ -252,11 +252,11 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
         self.assertIn(b2.id, store)
         self.assertEqual(b2, store[b2.id])
 
-    def test_pack_dir(self):
+    def test_pack_dir(self) -> None:
         o = DiskObjectStore(self.store_dir)
         self.assertEqual(os.path.join(self.store_dir, "pack"), o.pack_dir)
 
-    def test_add_pack(self):
+    def test_add_pack(self) -> None:
         o = DiskObjectStore(self.store_dir)
         self.addCleanup(o.close)
         f, commit, abort = o.add_pack()
@@ -269,7 +269,7 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
         else:
             commit()
 
-    def test_add_thin_pack(self):
+    def test_add_thin_pack(self) -> None:
         o = DiskObjectStore(self.store_dir)
         try:
             blob = make_object(Blob, data=b"yummy data")
@@ -297,7 +297,7 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
         finally:
             o.close()
 
-    def test_add_thin_pack_empty(self):
+    def test_add_thin_pack_empty(self) -> None:
         with closing(DiskObjectStore(self.store_dir)) as o:
             f = BytesIO()
             entries = build_pack(f, [], store=o)
@@ -306,7 +306,7 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
 
 
 class TreeLookupPathTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         self.store = MemoryObjectStore()
         blob_a = make_object(Blob, data=b"a")
@@ -328,11 +328,11 @@ class TreeLookupPathTests(TestCase):
     def get_object(self, sha):
         return self.store[sha]
 
-    def test_lookup_blob(self):
+    def test_lookup_blob(self) -> None:
         o_id = tree_lookup_path(self.get_object, self.tree_id, b"a")[1]
         self.assertIsInstance(self.store[o_id], Blob)
 
-    def test_lookup_tree(self):
+    def test_lookup_tree(self) -> None:
         o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad")[1]
         self.assertIsInstance(self.store[o_id], Tree)
         o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd")[1]
@@ -340,7 +340,7 @@ class TreeLookupPathTests(TestCase):
         o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd/")[1]
         self.assertIsInstance(self.store[o_id], Tree)
 
-    def test_lookup_submodule(self):
+    def test_lookup_submodule(self) -> None:
         tree_lookup_path(self.get_object, self.tree_id, b"d")[1]
         self.assertRaises(
             SubmoduleEncountered,
@@ -350,12 +350,12 @@ class TreeLookupPathTests(TestCase):
             b"d/a",
         )
 
-    def test_lookup_nonexistent(self):
+    def test_lookup_nonexistent(self) -> None:
         self.assertRaises(
             KeyError, tree_lookup_path, self.get_object, self.tree_id, b"j"
         )
 
-    def test_lookup_not_tree(self):
+    def test_lookup_not_tree(self) -> None:
         self.assertRaises(
             NotTreeError,
             tree_lookup_path,
@@ -374,40 +374,40 @@ class ObjectStoreGraphWalkerTests(TestCase):
             [x * 40 for x in heads], new_parent_map.__getitem__
         )
 
-    def test_ack_invalid_value(self):
+    def test_ack_invalid_value(self) -> None:
         gw = self.get_walker([], {})
         self.assertRaises(ValueError, gw.ack, "tooshort")
 
-    def test_empty(self):
+    def test_empty(self) -> None:
         gw = self.get_walker([], {})
         self.assertIs(None, next(gw))
         gw.ack(b"a" * 40)
         self.assertIs(None, next(gw))
 
-    def test_descends(self):
+    def test_descends(self) -> None:
         gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
         self.assertEqual(b"a" * 40, next(gw))
         self.assertEqual(b"b" * 40, next(gw))
 
-    def test_present(self):
+    def test_present(self) -> None:
         gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
         gw.ack(b"a" * 40)
         self.assertIs(None, next(gw))
 
-    def test_parent_present(self):
+    def test_parent_present(self) -> None:
         gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []})
         self.assertEqual(b"a" * 40, next(gw))
         gw.ack(b"a" * 40)
         self.assertIs(None, next(gw))
 
-    def test_child_ack_later(self):
+    def test_child_ack_later(self) -> None:
         gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": [b"c"], b"c": []})
         self.assertEqual(b"a" * 40, next(gw))
         self.assertEqual(b"b" * 40, next(gw))
         gw.ack(b"a" * 40)
         self.assertIs(None, next(gw))
 
-    def test_only_once(self):
+    def test_only_once(self) -> None:
         # a  b
         # |  |
         # c  d
@@ -447,7 +447,7 @@ class ObjectStoreGraphWalkerTests(TestCase):
 
 
 class CommitTreeChangesTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.store = MemoryObjectStore()
         self.blob_a = make_object(Blob, data=b"a")
@@ -465,13 +465,13 @@ class CommitTreeChangesTests(TestCase):
         ]
         self.tree_id = commit_tree(self.store, blobs)
 
-    def test_no_changes(self):
+    def test_no_changes(self) -> None:
         self.assertEqual(
             self.store[self.tree_id],
             commit_tree_changes(self.store, self.store[self.tree_id], []),
         )
 
-    def test_add_blob(self):
+    def test_add_blob(self) -> None:
         blob_d = make_object(Blob, data=b"d")
         new_tree = commit_tree_changes(
             self.store, self.store[self.tree_id], [(b"d", 0o100644, blob_d.id)]
@@ -481,7 +481,7 @@ class CommitTreeChangesTests(TestCase):
             (33188, b"c59d9b6344f1af00e504ba698129f07a34bbed8d"),
         )
 
-    def test_add_blob_in_dir(self):
+    def test_add_blob_in_dir(self) -> None:
         blob_d = make_object(Blob, data=b"d")
         new_tree = commit_tree_changes(
             self.store,
@@ -522,7 +522,7 @@ class CommitTreeChangesTests(TestCase):
             [TreeEntry(path=b"d", mode=stat.S_IFREG | 0o100644, sha=blob_d.id)],
         )
 
-    def test_delete_blob(self):
+    def test_delete_blob(self) -> None:
         new_tree = commit_tree_changes(
             self.store, self.store[self.tree_id], [(b"ad/bd/c", None, None)]
         )
@@ -532,7 +532,7 @@ class CommitTreeChangesTests(TestCase):
 
 
 class TestReadPacksFile(TestCase):
-    def test_read_packs(self):
+    def test_read_packs(self) -> None:
         self.assertEqual(
             ["pack-1.pack"],
             list(

+ 95 - 95
tests/test_objects.py

@@ -73,10 +73,10 @@ tag_sha = b"71033db03a03c6a36721efcf1968dd8f8e0cf023"
 
 
 class TestHexToSha(TestCase):
-    def test_simple(self):
+    def test_simple(self) -> None:
         self.assertEqual(b"\xab\xcd" * 10, hex_to_sha(b"abcd" * 10))
 
-    def test_reverse(self):
+    def test_reverse(self) -> None:
         self.assertEqual(b"abcd" * 10, sha_to_hex(b"\xab\xcd" * 10))
 
 
@@ -100,46 +100,46 @@ class BlobReadTests(TestCase):
     def commit(self, sha):
         return self.get_sha_file(Commit, "commits", sha)
 
-    def test_decompress_simple_blob(self):
+    def test_decompress_simple_blob(self) -> None:
         b = self.get_blob(a_sha)
         self.assertEqual(b.data, b"test 1\n")
         self.assertEqual(b.sha().hexdigest().encode("ascii"), a_sha)
 
-    def test_hash(self):
+    def test_hash(self) -> None:
         b = self.get_blob(a_sha)
         self.assertEqual(hash(b.id), hash(b))
 
-    def test_parse_empty_blob_object(self):
+    def test_parse_empty_blob_object(self) -> None:
         sha = b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"
         b = self.get_blob(sha)
         self.assertEqual(b.data, b"")
         self.assertEqual(b.id, sha)
         self.assertEqual(b.sha().hexdigest().encode("ascii"), sha)
 
-    def test_create_blob_from_string(self):
+    def test_create_blob_from_string(self) -> None:
         string = b"test 2\n"
         b = Blob.from_string(string)
         self.assertEqual(b.data, string)
         self.assertEqual(b.sha().hexdigest().encode("ascii"), b_sha)
 
-    def test_legacy_from_file(self):
+    def test_legacy_from_file(self) -> None:
         b1 = Blob.from_string(b"foo")
         b_raw = b1.as_legacy_object()
         b2 = b1.from_file(BytesIO(b_raw))
         self.assertEqual(b1, b2)
 
-    def test_legacy_from_file_compression_level(self):
+    def test_legacy_from_file_compression_level(self) -> None:
         b1 = Blob.from_string(b"foo")
         b_raw = b1.as_legacy_object(compression_level=6)
         b2 = b1.from_file(BytesIO(b_raw))
         self.assertEqual(b1, b2)
 
-    def test_chunks(self):
+    def test_chunks(self) -> None:
         string = b"test 5\n"
         b = Blob.from_string(string)
         self.assertEqual([string], b.chunked)
 
-    def test_splitlines(self):
+    def test_splitlines(self) -> None:
         for case in [
             [],
             [b"foo\nbar\n"],
@@ -155,7 +155,7 @@ class BlobReadTests(TestCase):
             b.chunked = case
             self.assertEqual(b.data.splitlines(True), b.splitlines())
 
-    def test_set_chunks(self):
+    def test_set_chunks(self) -> None:
         b = Blob()
         b.chunked = [b"te", b"st", b" 5\n"]
         self.assertEqual(b"test 5\n", b.data)
@@ -163,26 +163,26 @@ class BlobReadTests(TestCase):
         self.assertEqual(b"test 6\n", b.as_raw_string())
         self.assertEqual(b"test 6\n", bytes(b))
 
-    def test_parse_legacy_blob(self):
+    def test_parse_legacy_blob(self) -> None:
         string = b"test 3\n"
         b = self.get_blob(c_sha)
         self.assertEqual(b.data, string)
         self.assertEqual(b.sha().hexdigest().encode("ascii"), c_sha)
 
-    def test_eq(self):
+    def test_eq(self) -> None:
         blob1 = self.get_blob(a_sha)
         blob2 = self.get_blob(a_sha)
         self.assertEqual(blob1, blob2)
 
-    def test_read_tree_from_file(self):
+    def test_read_tree_from_file(self) -> None:
         t = self.get_tree(tree_sha)
         self.assertEqual(t.items()[0], (b"a", 33188, a_sha))
         self.assertEqual(t.items()[1], (b"b", 33188, b_sha))
 
-    def test_read_tree_from_file_parse_count(self):
+    def test_read_tree_from_file_parse_count(self) -> None:
         old_deserialize = Tree._deserialize
 
-        def reset_deserialize():
+        def reset_deserialize() -> None:
             Tree._deserialize = old_deserialize
 
         self.addCleanup(reset_deserialize)
@@ -198,7 +198,7 @@ class BlobReadTests(TestCase):
         self.assertEqual(t.items()[1], (b"b", 33188, b_sha))
         self.assertEqual(self.deserialize_count, 1)
 
-    def test_read_tag_from_file(self):
+    def test_read_tag_from_file(self) -> None:
         t = self.get_tag(tag_sha)
         self.assertEqual(
             t.object, (Commit, b"51b668fd5bf7061b7d6fa525f88803e6cfadaa51")
@@ -219,7 +219,7 @@ class BlobReadTests(TestCase):
             b"-----END PGP SIGNATURE-----\n",
         )
 
-    def test_read_commit_from_file(self):
+    def test_read_commit_from_file(self) -> None:
         sha = b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e"
         c = self.commit(sha)
         self.assertEqual(c.tree, tree_sha)
@@ -231,7 +231,7 @@ class BlobReadTests(TestCase):
         self.assertEqual(c.author_timezone, 0)
         self.assertEqual(c.message, b"Test commit\n")
 
-    def test_read_commit_no_parents(self):
+    def test_read_commit_no_parents(self) -> None:
         sha = b"0d89f20333fbb1d2f3a94da77f4981373d8f4310"
         c = self.commit(sha)
         self.assertEqual(c.tree, b"90182552c4a85a45ec2a835cadc3451bebdfe870")
@@ -243,7 +243,7 @@ class BlobReadTests(TestCase):
         self.assertEqual(c.author_timezone, 0)
         self.assertEqual(c.message, b"Test commit\n")
 
-    def test_read_commit_two_parents(self):
+    def test_read_commit_two_parents(self) -> None:
         sha = b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc"
         c = self.commit(sha)
         self.assertEqual(c.tree, b"d80c186a03f423a81b39df39dc87fd269736ca86")
@@ -261,7 +261,7 @@ class BlobReadTests(TestCase):
         self.assertEqual(c.author_timezone, 0)
         self.assertEqual(c.message, b"Merge ../b\n")
 
-    def test_stub_sha(self):
+    def test_stub_sha(self) -> None:
         sha = b"5" * 40
         c = make_commit(id=sha, message=b"foo")
         self.assertIsInstance(c, Commit)
@@ -270,16 +270,16 @@ class BlobReadTests(TestCase):
 
 
 class ShaFileCheckTests(TestCase):
-    def assertCheckFails(self, cls, data):
+    def assertCheckFails(self, cls, data) -> None:
         obj = cls()
 
-        def do_check():
+        def do_check() -> None:
             obj.set_raw_string(data)
             obj.check()
 
         self.assertRaises(ObjectFormatException, do_check)
 
-    def assertCheckSucceeds(self, cls, data):
+    def assertCheckSucceeds(self, cls, data) -> None:
         obj = cls()
         obj.set_raw_string(data)
         self.assertEqual(None, obj.check())
@@ -300,7 +300,7 @@ small_buffer_zlib_object = (
 
 
 class ShaFileTests(TestCase):
-    def test_deflated_smaller_window_buffer(self):
+    def test_deflated_smaller_window_buffer(self) -> None:
         # zlib on some systems uses smaller buffers,
         # resulting in a different header.
         # See https://github.com/libgit2/libgit2/pull/464
@@ -328,17 +328,17 @@ class CommitSerializationTests(TestCase):
         attrs.update(kwargs)
         return make_commit(**attrs)
 
-    def test_encoding(self):
+    def test_encoding(self) -> None:
         c = self.make_commit(encoding=b"iso8859-1")
         self.assertIn(b"encoding iso8859-1\n", c.as_raw_string())
 
-    def test_short_timestamp(self):
+    def test_short_timestamp(self) -> None:
         c = self.make_commit(commit_time=30)
         c1 = Commit()
         c1.set_raw_string(c.as_raw_string())
         self.assertEqual(30, c1.commit_time)
 
-    def test_full_tree(self):
+    def test_full_tree(self) -> None:
         c = self.make_commit(commit_time=30)
         t = Tree()
         t.add(b"data-x", 0o644, Blob().id)
@@ -348,11 +348,11 @@ class CommitSerializationTests(TestCase):
         self.assertEqual(t.id, c1.tree)
         self.assertEqual(c.as_raw_string(), c1.as_raw_string())
 
-    def test_raw_length(self):
+    def test_raw_length(self) -> None:
         c = self.make_commit()
         self.assertEqual(len(c.as_raw_string()), c.raw_length())
 
-    def test_simple(self):
+    def test_simple(self) -> None:
         c = self.make_commit()
         self.assertEqual(c.id, b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc")
         self.assertEqual(
@@ -368,21 +368,21 @@ class CommitSerializationTests(TestCase):
             c.as_raw_string(),
         )
 
-    def test_timezone(self):
+    def test_timezone(self) -> None:
         c = self.make_commit(commit_timezone=(5 * 60))
         self.assertIn(b" +0005\n", c.as_raw_string())
 
-    def test_neg_timezone(self):
+    def test_neg_timezone(self) -> None:
         c = self.make_commit(commit_timezone=(-1 * 3600))
         self.assertIn(b" -0100\n", c.as_raw_string())
 
-    def test_deserialize(self):
+    def test_deserialize(self) -> None:
         c = self.make_commit()
         d = Commit()
         d._deserialize(c.as_raw_chunks())
         self.assertEqual(c, d)
 
-    def test_serialize_gpgsig(self):
+    def test_serialize_gpgsig(self) -> None:
         commit = self.make_commit(
             gpgsig=b"""-----BEGIN PGP SIGNATURE-----
 Version: GnuPG v1
@@ -433,7 +433,7 @@ Merge ../b
             commit.as_raw_string(),
         )
 
-    def test_serialize_mergetag(self):
+    def test_serialize_mergetag(self) -> None:
         tag = make_object(
             Tag,
             object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
@@ -471,7 +471,7 @@ Merge ../b
             commit.as_raw_string(),
         )
 
-    def test_serialize_mergetags(self):
+    def test_serialize_mergetags(self) -> None:
         tag = make_object(
             Tag,
             object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
@@ -522,7 +522,7 @@ Merge ../b
             commit.as_raw_string(),
         )
 
-    def test_deserialize_mergetag(self):
+    def test_deserialize_mergetag(self) -> None:
         tag = make_object(
             Tag,
             object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
@@ -539,7 +539,7 @@ Merge ../b
         d._deserialize(commit.as_raw_chunks())
         self.assertEqual(commit, d)
 
-    def test_deserialize_mergetags(self):
+    def test_deserialize_mergetags(self) -> None:
         tag = make_object(
             Tag,
             object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"),
@@ -596,7 +596,7 @@ class CommitParseTests(ShaFileCheckTests):
     def make_commit_text(self, **kwargs):
         return b"\n".join(self.make_commit_lines(**kwargs))
 
-    def test_simple(self):
+    def test_simple(self) -> None:
         c = Commit.from_string(self.make_commit_text())
         self.assertEqual(b"Merge ../b\n", c.message)
         self.assertEqual(b"James Westby <jw+debian@jameswestby.net>", c.author)
@@ -620,15 +620,15 @@ class CommitParseTests(ShaFileCheckTests):
         self.assertEqual(0, c.author_timezone)
         self.assertEqual(None, c.encoding)
 
-    def test_custom(self):
+    def test_custom(self) -> None:
         c = Commit.from_string(self.make_commit_text(extra={b"extra-field": b"data"}))
         self.assertEqual([(b"extra-field", b"data")], c._extra)
 
-    def test_encoding(self):
+    def test_encoding(self) -> None:
         c = Commit.from_string(self.make_commit_text(encoding=b"UTF-8"))
         self.assertEqual(b"UTF-8", c.encoding)
 
-    def test_check(self):
+    def test_check(self) -> None:
         self.assertCheckSucceeds(Commit, self.make_commit_text())
         self.assertCheckSucceeds(Commit, self.make_commit_text(parents=None))
         self.assertCheckSucceeds(Commit, self.make_commit_text(encoding=b"UTF-8"))
@@ -644,7 +644,7 @@ class CommitParseTests(ShaFileCheckTests):
             Commit, self.make_commit_text(author=None, committer=None)
         )
 
-    def test_check_duplicates(self):
+    def test_check_duplicates(self) -> None:
         # duplicate each of the header fields
         for i in range(5):
             lines = self.make_commit_lines(parents=[a_sha], encoding=b"UTF-8")
@@ -656,7 +656,7 @@ class CommitParseTests(ShaFileCheckTests):
             else:
                 self.assertCheckFails(Commit, text)
 
-    def test_check_order(self):
+    def test_check_order(self) -> None:
         lines = self.make_commit_lines(parents=[a_sha], encoding=b"UTF-8")
         headers = lines[:5]
         rest = lines[5:]
@@ -669,7 +669,7 @@ class CommitParseTests(ShaFileCheckTests):
             else:
                 self.assertCheckFails(Commit, text)
 
-    def test_check_commit_with_unparseable_time(self):
+    def test_check_commit_with_unparseable_time(self) -> None:
         identity_with_wrong_time = (
             b"Igor Sysoev <igor@sysoev.ru> 18446743887488505614+42707004"
         )
@@ -688,7 +688,7 @@ class CommitParseTests(ShaFileCheckTests):
             ),
         )
 
-    def test_check_commit_with_overflow_date(self):
+    def test_check_commit_with_overflow_date(self) -> None:
         """Date with overflow should raise an ObjectFormatException when checked."""
         identity_with_wrong_time = (
             b"Igor Sysoev <igor@sysoev.ru> 18446743887488505614 +42707004"
@@ -709,7 +709,7 @@ class CommitParseTests(ShaFileCheckTests):
             with self.assertRaises(ObjectFormatException):
                 commit.check()
 
-    def test_mangled_author_line(self):
+    def test_mangled_author_line(self) -> None:
         """Mangled author line should successfully parse."""
         author_line = (
             b'Karl MacMillan <kmacmill@redhat.com> <"Karl MacMillan '
@@ -728,7 +728,7 @@ class CommitParseTests(ShaFileCheckTests):
         with self.assertRaises(ObjectFormatException):
             commit.check()
 
-    def test_parse_gpgsig(self):
+    def test_parse_gpgsig(self) -> None:
         c = Commit.from_string(
             b"""tree aaff74984cccd156a469afa7d9ab10e4777beb24
 author Jelmer Vernooij <jelmer@samba.org> 1412179807 +0200
@@ -777,7 +777,7 @@ fDeF1m4qYs+cUXKNUZ03
             c.gpgsig,
         )
 
-    def test_parse_header_trailing_newline(self):
+    def test_parse_header_trailing_newline(self) -> None:
         c = Commit.from_string(
             b"""\
 tree a7d6277f78d3ecd0230a1a5df6db00b1d9c521ac
@@ -852,40 +852,40 @@ _SORTED_TREE_ITEMS_BUG_1325 = [
 
 
 class TreeTests(ShaFileCheckTests):
-    def test_add(self):
+    def test_add(self) -> None:
         myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86"
         x = Tree()
         x.add(b"myname", 0o100755, myhexsha)
         self.assertEqual(x[b"myname"], (0o100755, myhexsha))
         self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string())
 
-    def test_simple(self):
+    def test_simple(self) -> None:
         myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86"
         x = Tree()
         x[b"myname"] = (0o100755, myhexsha)
         self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string())
         self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), bytes(x))
 
-    def test_tree_update_id(self):
+    def test_tree_update_id(self) -> None:
         x = Tree()
         x[b"a.c"] = (0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86")
         self.assertEqual(b"0c5c6bc2c081accfbc250331b19e43b904ab9cdd", x.id)
         x[b"a.b"] = (stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86")
         self.assertEqual(b"07bfcb5f3ada15bbebdfa3bbb8fd858a363925c8", x.id)
 
-    def test_tree_iteritems_dir_sort(self):
+    def test_tree_iteritems_dir_sort(self) -> None:
         x = Tree()
         for name, item in _TREE_ITEMS.items():
             x[name] = item
         self.assertEqual(_SORTED_TREE_ITEMS, x.items())
 
-    def test_tree_items_dir_sort(self):
+    def test_tree_items_dir_sort(self) -> None:
         x = Tree()
         for name, item in _TREE_ITEMS.items():
             x[name] = item
         self.assertEqual(_SORTED_TREE_ITEMS, x.items())
 
-    def _do_test_parse_tree(self, parse_tree):
+    def _do_test_parse_tree(self, parse_tree) -> None:
         dir = os.path.join(os.path.dirname(__file__), "..", "testdata", "trees")
         o = Tree.from_path(hex_to_filename(dir, tree_sha))
         self.assertEqual(
@@ -908,7 +908,7 @@ class TreeTests(ShaFileCheckTests):
         _do_test_parse_tree, _parse_tree_rs
     )
 
-    def _do_test_sorted_tree_items(self, sorted_tree_items):
+    def _do_test_sorted_tree_items(self, sorted_tree_items) -> None:
         def do_sort(entries, name_order):
             return list(sorted_tree_items(entries, name_order))
 
@@ -946,7 +946,7 @@ class TreeTests(ShaFileCheckTests):
             _do_test_sorted_tree_items, _sorted_tree_items_rs
         )
 
-    def _do_test_sorted_tree_items_name_order(self, sorted_tree_items):
+    def _do_test_sorted_tree_items_name_order(self, sorted_tree_items) -> None:
         self.assertEqual(
             [
                 TreeEntry(
@@ -986,7 +986,7 @@ class TreeTests(ShaFileCheckTests):
             _do_test_sorted_tree_items_name_order, _sorted_tree_items_rs
         )
 
-    def test_check(self):
+    def test_check(self) -> None:
         t = Tree
         sha = hex_to_sha(a_sha)
 
@@ -1020,14 +1020,14 @@ class TreeTests(ShaFileCheckTests):
         self.assertCheckFails(t, b"100644 a\0" + sha + b"100755 a\0" + sha2)
         self.assertCheckFails(t, b"100644 b\0" + sha2 + b"100644 a\0" + sha)
 
-    def test_iter(self):
+    def test_iter(self) -> None:
         t = Tree()
         t[b"foo"] = (0o100644, a_sha)
         self.assertEqual({b"foo"}, set(t))
 
 
 class TagSerializeTests(TestCase):
-    def test_serialize_simple(self):
+    def test_serialize_simple(self) -> None:
         x = make_object(
             Tag,
             tagger=b"Jelmer Vernooij <jelmer@samba.org>",
@@ -1050,7 +1050,7 @@ class TagSerializeTests(TestCase):
             x.as_raw_string(),
         )
 
-    def test_serialize_none_message(self):
+    def test_serialize_none_message(self) -> None:
         x = make_object(
             Tag,
             tagger=b"Jelmer Vernooij <jelmer@samba.org>",
@@ -1112,7 +1112,7 @@ class TagParseTests(ShaFileCheckTests):
     def make_tag_text(self, **kwargs):
         return b"\n".join(self.make_tag_lines(**kwargs))
 
-    def test_parse(self):
+    def test_parse(self) -> None:
         x = Tag()
         x.set_raw_string(self.make_tag_text())
         self.assertEqual(
@@ -1128,14 +1128,14 @@ class TagParseTests(ShaFileCheckTests):
         )
         self.assertEqual(-25200, x.tag_timezone)
 
-    def test_parse_no_tagger(self):
+    def test_parse_no_tagger(self) -> None:
         x = Tag()
         x.set_raw_string(self.make_tag_text(tagger=None))
         self.assertEqual(None, x.tagger)
         self.assertEqual(b"v2.6.22-rc7", x.name)
         self.assertEqual(None, x.tag_time)
 
-    def test_parse_no_message(self):
+    def test_parse_no_message(self) -> None:
         x = Tag()
         x.set_raw_string(self.make_tag_text(message=None))
         self.assertEqual(None, x.message)
@@ -1149,7 +1149,7 @@ class TagParseTests(ShaFileCheckTests):
         self.assertEqual(-25200, x.tag_timezone)
         self.assertEqual(b"v2.6.22-rc7", x.name)
 
-    def test_check(self):
+    def test_check(self) -> None:
         self.assertCheckSucceeds(Tag, self.make_tag_text())
         self.assertCheckFails(Tag, self.make_tag_text(object_sha=None))
         self.assertCheckFails(Tag, self.make_tag_text(object_type_name=None))
@@ -1173,7 +1173,7 @@ class TagParseTests(ShaFileCheckTests):
         )
         self.assertCheckFails(Tag, self.make_tag_text(object_sha=b"xxx"))
 
-    def test_check_tag_with_unparseable_field(self):
+    def test_check_tag_with_unparseable_field(self) -> None:
         self.assertCheckFails(
             Tag,
             self.make_tag_text(
@@ -1184,21 +1184,21 @@ class TagParseTests(ShaFileCheckTests):
             ),
         )
 
-    def test_check_tag_with_overflow_time(self):
+    def test_check_tag_with_overflow_time(self) -> None:
         """Date with overflow should raise an ObjectFormatException when checked."""
         author = f"Some Dude <some@dude.org> {MAX_TIME + 1} +0000"
         tag = Tag.from_string(self.make_tag_text(tagger=(author.encode())))
         with self.assertRaises(ObjectFormatException):
             tag.check()
 
-    def test_check_duplicates(self):
+    def test_check_duplicates(self) -> None:
         # duplicate each of the header fields
         for i in range(4):
             lines = self.make_tag_lines()
             lines.insert(i, lines[i])
             self.assertCheckFails(Tag, b"\n".join(lines))
 
-    def test_check_order(self):
+    def test_check_order(self) -> None:
         lines = self.make_tag_lines()
         headers = lines[:4]
         rest = lines[4:]
@@ -1211,7 +1211,7 @@ class TagParseTests(ShaFileCheckTests):
             else:
                 self.assertCheckFails(Tag, text)
 
-    def test_tree_copy_after_update(self):
+    def test_tree_copy_after_update(self) -> None:
         """Check Tree.id is correctly updated when the tree is copied after updated."""
         shas = []
         tree = Tree()
@@ -1226,7 +1226,7 @@ class TagParseTests(ShaFileCheckTests):
 
 
 class CheckTests(TestCase):
-    def test_check_hexsha(self):
+    def test_check_hexsha(self) -> None:
         check_hexsha(a_sha, "failed to check good sha")
         self.assertRaises(
             ObjectFormatException, check_hexsha, b"1" * 39, "sha too short"
@@ -1241,7 +1241,7 @@ class CheckTests(TestCase):
             "invalid characters",
         )
 
-    def test_check_identity(self):
+    def test_check_identity(self) -> None:
         check_identity(
             b"Dave Borowitz <dborowitz@google.com>",
             "failed to check good identity",
@@ -1319,46 +1319,46 @@ class CheckTests(TestCase):
 
 
 class TimezoneTests(TestCase):
-    def test_parse_timezone_utc(self):
+    def test_parse_timezone_utc(self) -> None:
         self.assertEqual((0, False), parse_timezone(b"+0000"))
 
-    def test_parse_timezone_utc_negative(self):
+    def test_parse_timezone_utc_negative(self) -> None:
         self.assertEqual((0, True), parse_timezone(b"-0000"))
 
-    def test_generate_timezone_utc(self):
+    def test_generate_timezone_utc(self) -> None:
         self.assertEqual(b"+0000", format_timezone(0))
 
-    def test_generate_timezone_utc_negative(self):
+    def test_generate_timezone_utc_negative(self) -> None:
         self.assertEqual(b"-0000", format_timezone(0, True))
 
-    def test_parse_timezone_cet(self):
+    def test_parse_timezone_cet(self) -> None:
         self.assertEqual((60 * 60, False), parse_timezone(b"+0100"))
 
-    def test_format_timezone_cet(self):
+    def test_format_timezone_cet(self) -> None:
         self.assertEqual(b"+0100", format_timezone(60 * 60))
 
-    def test_format_timezone_pdt(self):
+    def test_format_timezone_pdt(self) -> None:
         self.assertEqual(b"-0400", format_timezone(-4 * 60 * 60))
 
-    def test_parse_timezone_pdt(self):
+    def test_parse_timezone_pdt(self) -> None:
         self.assertEqual((-4 * 60 * 60, False), parse_timezone(b"-0400"))
 
-    def test_format_timezone_pdt_half(self):
+    def test_format_timezone_pdt_half(self) -> None:
         self.assertEqual(b"-0440", format_timezone(int(((-4 * 60) - 40) * 60)))
 
-    def test_format_timezone_double_negative(self):
+    def test_format_timezone_double_negative(self) -> None:
         self.assertEqual(b"--700", format_timezone(int((7 * 60) * 60), True))
 
-    def test_parse_timezone_pdt_half(self):
+    def test_parse_timezone_pdt_half(self) -> None:
         self.assertEqual((((-4 * 60) - 40) * 60, False), parse_timezone(b"-0440"))
 
-    def test_parse_timezone_double_negative(self):
+    def test_parse_timezone_double_negative(self) -> None:
         self.assertEqual((int((7 * 60) * 60), False), parse_timezone(b"+700"))
         self.assertEqual((int((7 * 60) * 60), True), parse_timezone(b"--700"))
 
 
 class ShaFileCopyTests(TestCase):
-    def assert_copy(self, orig):
+    def assert_copy(self, orig) -> None:
         oclass = object_class(orig.type_num)
 
         copy = orig.copy()
@@ -1366,7 +1366,7 @@ class ShaFileCopyTests(TestCase):
         self.assertEqual(copy, orig)
         self.assertIsNot(copy, orig)
 
-    def test_commit_copy(self):
+    def test_commit_copy(self) -> None:
         attrs = {
             "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86",
             "parents": [
@@ -1384,17 +1384,17 @@ class ShaFileCopyTests(TestCase):
         commit = make_commit(**attrs)
         self.assert_copy(commit)
 
-    def test_blob_copy(self):
+    def test_blob_copy(self) -> None:
         blob = make_object(Blob, data=b"i am a blob")
         self.assert_copy(blob)
 
-    def test_tree_copy(self):
+    def test_tree_copy(self) -> None:
         blob = make_object(Blob, data=b"i am a blob")
         tree = Tree()
         tree[b"blob"] = (stat.S_IFREG, blob.id)
         self.assert_copy(tree)
 
-    def test_tag_copy(self):
+    def test_tag_copy(self) -> None:
         tag = make_object(
             Tag,
             name=b"tag",
@@ -1427,7 +1427,7 @@ class ShaFileSerializeTests(TestCase):
         self.assertFalse(obj._needs_serialization)
         self.assertNotEqual(old_id, new_id)
 
-    def test_commit_serialize(self):
+    def test_commit_serialize(self) -> None:
         attrs = {
             "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86",
             "parents": [
@@ -1447,7 +1447,7 @@ class ShaFileSerializeTests(TestCase):
         with self.assert_serialization_on_change(commit):
             commit.parents = [b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd"]
 
-    def test_blob_serialize(self):
+    def test_blob_serialize(self) -> None:
         blob = make_object(Blob, data=b"i am a blob")
 
         with self.assert_serialization_on_change(
@@ -1455,7 +1455,7 @@ class ShaFileSerializeTests(TestCase):
         ):
             blob.data = b"i am another blob"
 
-    def test_tree_serialize(self):
+    def test_tree_serialize(self) -> None:
         blob = make_object(Blob, data=b"i am a blob")
         tree = Tree()
         tree[b"blob"] = (stat.S_IFREG, blob.id)
@@ -1463,7 +1463,7 @@ class ShaFileSerializeTests(TestCase):
         with self.assert_serialization_on_change(tree):
             tree[b"blob2"] = (stat.S_IFREG, blob.id)
 
-    def test_tag_serialize(self):
+    def test_tag_serialize(self) -> None:
         tag = make_object(
             Tag,
             name=b"tag",
@@ -1477,7 +1477,7 @@ class ShaFileSerializeTests(TestCase):
         with self.assert_serialization_on_change(tag):
             tag.message = b"new message"
 
-    def test_tag_serialize_time_error(self):
+    def test_tag_serialize_time_error(self) -> None:
         with self.assertRaises(ObjectFormatException):
             tag = make_object(
                 Tag,
@@ -1490,7 +1490,7 @@ class ShaFileSerializeTests(TestCase):
 
 
 class PrettyFormatTreeEntryTests(TestCase):
-    def test_format(self):
+    def test_format(self) -> None:
         self.assertEqual(
             "40000 tree 40820c38cfb182ce6c8b261555410d8382a5918b\tfoo\n",
             pretty_format_tree_entry(

+ 32 - 32
tests/test_objectspec.py

@@ -42,11 +42,11 @@ from . import TestCase
 class ParseObjectTests(TestCase):
     """Test parse_object."""
 
-    def test_nonexistent(self):
+    def test_nonexistent(self) -> None:
         r = MemoryRepo()
         self.assertRaises(KeyError, parse_object, r, "thisdoesnotexist")
 
-    def test_blob_by_sha(self):
+    def test_blob_by_sha(self) -> None:
         r = MemoryRepo()
         b = Blob.from_string(b"Blah")
         r.object_store.add_object(b)
@@ -56,11 +56,11 @@ class ParseObjectTests(TestCase):
 class ParseCommitRangeTests(TestCase):
     """Test parse_commit_range."""
 
-    def test_nonexistent(self):
+    def test_nonexistent(self) -> None:
         r = MemoryRepo()
         self.assertRaises(KeyError, parse_commit_range, r, "thisdoesnotexist")
 
-    def test_commit_by_sha(self):
+    def test_commit_by_sha(self) -> None:
         r = MemoryRepo()
         c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]])
         self.assertEqual([c1], list(parse_commit_range(r, c1.id)))
@@ -69,27 +69,27 @@ class ParseCommitRangeTests(TestCase):
 class ParseCommitTests(TestCase):
     """Test parse_commit."""
 
-    def test_nonexistent(self):
+    def test_nonexistent(self) -> None:
         r = MemoryRepo()
         self.assertRaises(KeyError, parse_commit, r, "thisdoesnotexist")
 
-    def test_commit_by_sha(self):
+    def test_commit_by_sha(self) -> None:
         r = MemoryRepo()
         [c1] = build_commit_graph(r.object_store, [[1]])
         self.assertEqual(c1, parse_commit(r, c1.id))
 
-    def test_commit_by_short_sha(self):
+    def test_commit_by_short_sha(self) -> None:
         r = MemoryRepo()
         [c1] = build_commit_graph(r.object_store, [[1]])
         self.assertEqual(c1, parse_commit(r, c1.id[:10]))
 
 
 class ParseRefTests(TestCase):
-    def test_nonexistent(self):
+    def test_nonexistent(self) -> None:
         r = {}
         self.assertRaises(KeyError, parse_ref, r, b"thisdoesnotexist")
 
-    def test_ambiguous_ref(self):
+    def test_ambiguous_ref(self) -> None:
         r = {
             b"ambig1": "bla",
             b"refs/ambig1": "bla",
@@ -100,7 +100,7 @@ class ParseRefTests(TestCase):
         }
         self.assertEqual(b"ambig1", parse_ref(r, b"ambig1"))
 
-    def test_ambiguous_ref2(self):
+    def test_ambiguous_ref2(self) -> None:
         r = {
             b"refs/ambig2": "bla",
             b"refs/tags/ambig2": "bla",
@@ -110,7 +110,7 @@ class ParseRefTests(TestCase):
         }
         self.assertEqual(b"refs/ambig2", parse_ref(r, b"ambig2"))
 
-    def test_ambiguous_tag(self):
+    def test_ambiguous_tag(self) -> None:
         r = {
             b"refs/tags/ambig3": "bla",
             b"refs/heads/ambig3": "bla",
@@ -119,7 +119,7 @@ class ParseRefTests(TestCase):
         }
         self.assertEqual(b"refs/tags/ambig3", parse_ref(r, b"ambig3"))
 
-    def test_ambiguous_head(self):
+    def test_ambiguous_head(self) -> None:
         r = {
             b"refs/heads/ambig4": "bla",
             b"refs/remotes/ambig4": "bla",
@@ -127,47 +127,47 @@ class ParseRefTests(TestCase):
         }
         self.assertEqual(b"refs/heads/ambig4", parse_ref(r, b"ambig4"))
 
-    def test_ambiguous_remote(self):
+    def test_ambiguous_remote(self) -> None:
         r = {b"refs/remotes/ambig5": "bla", b"refs/remotes/ambig5/HEAD": "bla"}
         self.assertEqual(b"refs/remotes/ambig5", parse_ref(r, b"ambig5"))
 
-    def test_ambiguous_remote_head(self):
+    def test_ambiguous_remote_head(self) -> None:
         r = {b"refs/remotes/ambig6/HEAD": "bla"}
         self.assertEqual(b"refs/remotes/ambig6/HEAD", parse_ref(r, b"ambig6"))
 
-    def test_heads_full(self):
+    def test_heads_full(self) -> None:
         r = {b"refs/heads/foo": "bla"}
         self.assertEqual(b"refs/heads/foo", parse_ref(r, b"refs/heads/foo"))
 
-    def test_heads_partial(self):
+    def test_heads_partial(self) -> None:
         r = {b"refs/heads/foo": "bla"}
         self.assertEqual(b"refs/heads/foo", parse_ref(r, b"heads/foo"))
 
-    def test_tags_partial(self):
+    def test_tags_partial(self) -> None:
         r = {b"refs/tags/foo": "bla"}
         self.assertEqual(b"refs/tags/foo", parse_ref(r, b"tags/foo"))
 
 
 class ParseRefsTests(TestCase):
-    def test_nonexistent(self):
+    def test_nonexistent(self) -> None:
         r = {}
         self.assertRaises(KeyError, parse_refs, r, [b"thisdoesnotexist"])
 
-    def test_head(self):
+    def test_head(self) -> None:
         r = {b"refs/heads/foo": "bla"}
         self.assertEqual([b"refs/heads/foo"], parse_refs(r, [b"foo"]))
 
-    def test_full(self):
+    def test_full(self) -> None:
         r = {b"refs/heads/foo": "bla"}
         self.assertEqual([b"refs/heads/foo"], parse_refs(r, b"refs/heads/foo"))
 
 
 class ParseReftupleTests(TestCase):
-    def test_nonexistent(self):
+    def test_nonexistent(self) -> None:
         r = {}
         self.assertRaises(KeyError, parse_reftuple, r, r, b"thisdoesnotexist")
 
-    def test_head(self):
+    def test_head(self) -> None:
         r = {b"refs/heads/foo": "bla"}
         self.assertEqual(
             (b"refs/heads/foo", b"refs/heads/foo", False),
@@ -186,28 +186,28 @@ class ParseReftupleTests(TestCase):
             parse_reftuple(r, {}, b"foo", True),
         )
 
-    def test_full(self):
+    def test_full(self) -> None:
         r = {b"refs/heads/foo": "bla"}
         self.assertEqual(
             (b"refs/heads/foo", b"refs/heads/foo", False),
             parse_reftuple(r, r, b"refs/heads/foo"),
         )
 
-    def test_no_left_ref(self):
+    def test_no_left_ref(self) -> None:
         r = {b"refs/heads/foo": "bla"}
         self.assertEqual(
             (None, b"refs/heads/foo", False),
             parse_reftuple(r, r, b":refs/heads/foo"),
         )
 
-    def test_no_right_ref(self):
+    def test_no_right_ref(self) -> None:
         r = {b"refs/heads/foo": "bla"}
         self.assertEqual(
             (b"refs/heads/foo", None, False),
             parse_reftuple(r, r, b"refs/heads/foo:"),
         )
 
-    def test_default_with_string(self):
+    def test_default_with_string(self) -> None:
         r = {b"refs/heads/foo": "bla"}
         self.assertEqual(
             (b"refs/heads/foo", b"refs/heads/foo", False),
@@ -216,18 +216,18 @@ class ParseReftupleTests(TestCase):
 
 
 class ParseReftuplesTests(TestCase):
-    def test_nonexistent(self):
+    def test_nonexistent(self) -> None:
         r = {}
         self.assertRaises(KeyError, parse_reftuples, r, r, [b"thisdoesnotexist"])
 
-    def test_head(self):
+    def test_head(self) -> None:
         r = {b"refs/heads/foo": "bla"}
         self.assertEqual(
             [(b"refs/heads/foo", b"refs/heads/foo", False)],
             parse_reftuples(r, r, [b"foo"]),
         )
 
-    def test_full(self):
+    def test_full(self) -> None:
         r = {b"refs/heads/foo": "bla"}
         self.assertEqual(
             [(b"refs/heads/foo", b"refs/heads/foo", False)],
@@ -243,17 +243,17 @@ class ParseReftuplesTests(TestCase):
 class ParseTreeTests(TestCase):
     """Test parse_tree."""
 
-    def test_nonexistent(self):
+    def test_nonexistent(self) -> None:
         r = MemoryRepo()
         self.assertRaises(KeyError, parse_tree, r, "thisdoesnotexist")
 
-    def test_from_commit(self):
+    def test_from_commit(self) -> None:
         r = MemoryRepo()
         c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]])
         self.assertEqual(r[c1.tree], parse_tree(r, c1.id))
         self.assertEqual(r[c1.tree], parse_tree(r, c1.tree))
 
-    def test_from_ref(self):
+    def test_from_ref(self) -> None:
         r = MemoryRepo()
         c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]])
         r.refs[b"refs/heads/foo"] = c1.id

+ 109 - 108
tests/test_pack.py

@@ -28,6 +28,7 @@ import tempfile
 import zlib
 from hashlib import sha1
 from io import BytesIO
+from typing import NoReturn
 
 from dulwich.errors import ApplyDeltaError, ChecksumMismatch
 from dulwich.file import GitFile
@@ -73,7 +74,7 @@ indexmode = "0o100644" if sys.platform != "win32" else "0o100666"
 class PackTests(TestCase):
     """Base class for testing packs."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.tempdir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, self.tempdir)
@@ -97,7 +98,7 @@ class PackTests(TestCase):
     def get_pack(self, sha):
         return Pack(os.path.join(self.datadir, "pack-{}".format(sha.decode("ascii"))))
 
-    def assertSucceeds(self, func, *args, **kwargs):
+    def assertSucceeds(self, func, *args, **kwargs) -> None:
         try:
             func(*args, **kwargs)
         except ChecksumMismatch as e:
@@ -107,7 +108,7 @@ class PackTests(TestCase):
 class PackIndexTests(PackTests):
     """Class that tests the index of packfiles."""
 
-    def test_object_offset(self):
+    def test_object_offset(self) -> None:
         """Tests that the correct object offset is returned from the index."""
         p = self.get_pack_index(pack1_sha)
         self.assertRaises(KeyError, p.object_offset, pack1_sha)
@@ -115,7 +116,7 @@ class PackIndexTests(PackTests):
         self.assertEqual(p.object_offset(tree_sha), 138)
         self.assertEqual(p.object_offset(commit_sha), 12)
 
-    def test_object_sha1(self):
+    def test_object_sha1(self) -> None:
         """Tests that the correct object offset is returned from the index."""
         p = self.get_pack_index(pack1_sha)
         self.assertRaises(KeyError, p.object_sha1, 876)
@@ -123,7 +124,7 @@ class PackIndexTests(PackTests):
         self.assertEqual(p.object_sha1(138), hex_to_sha(tree_sha))
         self.assertEqual(p.object_sha1(12), hex_to_sha(commit_sha))
 
-    def test_iter_prefix(self):
+    def test_iter_prefix(self) -> None:
         p = self.get_pack_index(pack1_sha)
         self.assertEqual([p.object_sha1(178)], list(p.iter_prefix(hex_to_sha(a_sha))))
         self.assertEqual(
@@ -133,11 +134,11 @@ class PackIndexTests(PackTests):
             [p.object_sha1(178)], list(p.iter_prefix(hex_to_sha(a_sha)[:2]))
         )
 
-    def test_index_len(self):
+    def test_index_len(self) -> None:
         p = self.get_pack_index(pack1_sha)
         self.assertEqual(3, len(p))
 
-    def test_get_stored_checksum(self):
+    def test_get_stored_checksum(self) -> None:
         p = self.get_pack_index(pack1_sha)
         self.assertEqual(
             b"f2848e2ad16f329ae1c92e3b95e91888daa5bd01",
@@ -148,11 +149,11 @@ class PackIndexTests(PackTests):
             sha_to_hex(p.get_pack_checksum()),
         )
 
-    def test_index_check(self):
+    def test_index_check(self) -> None:
         p = self.get_pack_index(pack1_sha)
         self.assertSucceeds(p.check)
 
-    def test_iterentries(self):
+    def test_iterentries(self) -> None:
         p = self.get_pack_index(pack1_sha)
         entries = [(sha_to_hex(s), o, c) for s, o, c in p.iterentries()]
         self.assertEqual(
@@ -164,7 +165,7 @@ class PackIndexTests(PackTests):
             entries,
         )
 
-    def test_iter(self):
+    def test_iter(self) -> None:
         p = self.get_pack_index(pack1_sha)
         self.assertEqual({tree_sha, commit_sha, a_sha}, set(p))
 
@@ -178,36 +179,36 @@ class TestPackDeltas(TestCase):
     test_string_big = b"Z" * 8192
     test_string_huge = b"Z" * 100000
 
-    def _test_roundtrip(self, base, target):
+    def _test_roundtrip(self, base, target) -> None:
         self.assertEqual(
             target, b"".join(apply_delta(base, list(create_delta(base, target))))
         )
 
-    def test_nochange(self):
+    def test_nochange(self) -> None:
         self._test_roundtrip(self.test_string1, self.test_string1)
 
-    def test_nochange_huge(self):
+    def test_nochange_huge(self) -> None:
         self._test_roundtrip(self.test_string_huge, self.test_string_huge)
 
-    def test_change(self):
+    def test_change(self) -> None:
         self._test_roundtrip(self.test_string1, self.test_string2)
 
-    def test_rewrite(self):
+    def test_rewrite(self) -> None:
         self._test_roundtrip(self.test_string1, self.test_string3)
 
-    def test_empty_to_big(self):
+    def test_empty_to_big(self) -> None:
         self._test_roundtrip(self.test_string_empty, self.test_string_big)
 
-    def test_empty_to_huge(self):
+    def test_empty_to_huge(self) -> None:
         self._test_roundtrip(self.test_string_empty, self.test_string_huge)
 
-    def test_huge_copy(self):
+    def test_huge_copy(self) -> None:
         self._test_roundtrip(
             self.test_string_huge + self.test_string1,
             self.test_string_huge + self.test_string2,
         )
 
-    def test_dest_overflow(self):
+    def test_dest_overflow(self) -> None:
         self.assertRaises(
             ApplyDeltaError,
             apply_delta,
@@ -218,7 +219,7 @@ class TestPackDeltas(TestCase):
             ApplyDeltaError, apply_delta, b"", b"\x00\x80\x02\xb0\x11\x11"
         )
 
-    def test_pypy_issue(self):
+    def test_pypy_issue(self) -> None:
         # Test for https://github.com/jelmer/dulwich/issues/509 /
         # https://bitbucket.org/pypy/pypy/issues/2499/cpyext-pystring_asstring-doesnt-work
         chunks = [
@@ -261,25 +262,25 @@ class TestPackDeltas(TestCase):
 class TestPackData(PackTests):
     """Tests getting the data from the packfile."""
 
-    def test_create_pack(self):
+    def test_create_pack(self) -> None:
         self.get_pack_data(pack1_sha).close()
 
-    def test_from_file(self):
+    def test_from_file(self) -> None:
         path = os.path.join(
             self.datadir, "pack-{}.pack".format(pack1_sha.decode("ascii"))
         )
         with open(path, "rb") as f:
             PackData.from_file(f, os.path.getsize(path))
 
-    def test_pack_len(self):
+    def test_pack_len(self) -> None:
         with self.get_pack_data(pack1_sha) as p:
             self.assertEqual(3, len(p))
 
-    def test_index_check(self):
+    def test_index_check(self) -> None:
         with self.get_pack_data(pack1_sha) as p:
             self.assertSucceeds(p.check)
 
-    def test_iter_unpacked(self):
+    def test_iter_unpacked(self) -> None:
         with self.get_pack_data(pack1_sha) as p:
             commit_data = (
                 b"tree b2a2766a2879c209ab1176e7e778b81ae422eeaa\n"
@@ -317,7 +318,7 @@ class TestPackData(PackTests):
                 actual,
             )
 
-    def test_iterentries(self):
+    def test_iterentries(self) -> None:
         with self.get_pack_data(pack1_sha) as p:
             entries = {(sha_to_hex(s), o, c) for s, o, c in p.iterentries()}
             self.assertEqual(
@@ -341,7 +342,7 @@ class TestPackData(PackTests):
                 entries,
             )
 
-    def test_create_index_v1(self):
+    def test_create_index_v1(self) -> None:
         with self.get_pack_data(pack1_sha) as p:
             filename = os.path.join(self.tempdir, "v1test.idx")
             p.create_index_v1(filename)
@@ -350,7 +351,7 @@ class TestPackData(PackTests):
             self.assertEqual(oct(os.stat(filename).st_mode), indexmode)
             self.assertEqual(idx1, idx2)
 
-    def test_create_index_v2(self):
+    def test_create_index_v2(self) -> None:
         with self.get_pack_data(pack1_sha) as p:
             filename = os.path.join(self.tempdir, "v2test.idx")
             p.create_index_v2(filename)
@@ -359,7 +360,7 @@ class TestPackData(PackTests):
             self.assertEqual(oct(os.stat(filename).st_mode), indexmode)
             self.assertEqual(idx1, idx2)
 
-    def test_compute_file_sha(self):
+    def test_compute_file_sha(self) -> None:
         f = BytesIO(b"abcd1234wxyz")
         self.assertEqual(
             sha1(b"abcd1234wxyz").hexdigest(), compute_file_sha(f).hexdigest()
@@ -381,7 +382,7 @@ class TestPackData(PackTests):
             compute_file_sha(f, start_ofs=4, end_ofs=-4).hexdigest(),
         )
 
-    def test_compute_file_sha_short_file(self):
+    def test_compute_file_sha_short_file(self) -> None:
         f = BytesIO(b"abcd1234wxyz")
         self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=-20)
         self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=20)
@@ -391,28 +392,28 @@ class TestPackData(PackTests):
 
 
 class TestPack(PackTests):
-    def test_len(self):
+    def test_len(self) -> None:
         with self.get_pack(pack1_sha) as p:
             self.assertEqual(3, len(p))
 
-    def test_contains(self):
+    def test_contains(self) -> None:
         with self.get_pack(pack1_sha) as p:
             self.assertIn(tree_sha, p)
 
-    def test_get(self):
+    def test_get(self) -> None:
         with self.get_pack(pack1_sha) as p:
             self.assertEqual(type(p[tree_sha]), Tree)
 
-    def test_iter(self):
+    def test_iter(self) -> None:
         with self.get_pack(pack1_sha) as p:
             self.assertEqual({tree_sha, commit_sha, a_sha}, set(p))
 
-    def test_iterobjects(self):
+    def test_iterobjects(self) -> None:
         with self.get_pack(pack1_sha) as p:
             expected = {p[s] for s in [commit_sha, tree_sha, a_sha]}
             self.assertEqual(expected, set(list(p.iterobjects())))
 
-    def test_pack_tuples(self):
+    def test_pack_tuples(self) -> None:
         with self.get_pack(pack1_sha) as p:
             tuples = p.pack_tuples()
             expected = {(p[s], None) for s in [commit_sha, tree_sha, a_sha]}
@@ -420,7 +421,7 @@ class TestPack(PackTests):
             self.assertEqual(expected, set(list(tuples)))
             self.assertEqual(3, len(tuples))
 
-    def test_get_object_at(self):
+    def test_get_object_at(self) -> None:
         """Tests random access for non-delta objects."""
         with self.get_pack(pack1_sha) as p:
             obj = p[a_sha]
@@ -433,7 +434,7 @@ class TestPack(PackTests):
             self.assertEqual(obj.type_name, b"commit")
             self.assertEqual(obj.sha().hexdigest().encode("ascii"), commit_sha)
 
-    def test_copy(self):
+    def test_copy(self) -> None:
         with self.get_pack(pack1_sha) as origpack:
             self.assertSucceeds(origpack.index.check)
             basename = os.path.join(self.tempdir, "Elch")
@@ -453,7 +454,7 @@ class TestPack(PackTests):
                 new_checksum = newpack.index.get_stored_checksum()
                 self.assertTrue(wrong_version or orig_checksum == new_checksum)
 
-    def test_commit_obj(self):
+    def test_commit_obj(self) -> None:
         with self.get_pack(pack1_sha) as p:
             commit = p[commit_sha]
             self.assertEqual(b"James Westby <jw+debian@jameswestby.net>", commit.author)
@@ -464,7 +465,7 @@ class TestPack(PackTests):
         write_pack(basename, origpack.pack_tuples())
         return Pack(basename)
 
-    def test_keep_no_message(self):
+    def test_keep_no_message(self) -> None:
         with self.get_pack(pack1_sha) as p:
             p = self._copy_pack(p)
 
@@ -478,7 +479,7 @@ class TestPack(PackTests):
             buf = f.read()
             self.assertEqual("", buf)
 
-    def test_keep_message(self):
+    def test_keep_message(self) -> None:
         with self.get_pack(pack1_sha) as p:
             p = self._copy_pack(p)
 
@@ -494,11 +495,11 @@ class TestPack(PackTests):
             buf = f.read()
             self.assertEqual(msg + b"\n", buf)
 
-    def test_name(self):
+    def test_name(self) -> None:
         with self.get_pack(pack1_sha) as p:
             self.assertEqual(pack1_sha, p.name())
 
-    def test_length_mismatch(self):
+    def test_length_mismatch(self) -> None:
         with self.get_pack_data(pack1_sha) as data:
             index = self.get_pack_index(pack1_sha)
             Pack.from_objects(data, index).check_length_and_checksum()
@@ -513,7 +514,7 @@ class TestPack(PackTests):
             self.assertRaises(AssertionError, lambda: bad_pack.data)
             self.assertRaises(AssertionError, bad_pack.check_length_and_checksum)
 
-    def test_checksum_mismatch(self):
+    def test_checksum_mismatch(self) -> None:
         with self.get_pack_data(pack1_sha) as data:
             index = self.get_pack_index(pack1_sha)
             Pack.from_objects(data, index).check_length_and_checksum()
@@ -525,7 +526,7 @@ class TestPack(PackTests):
             self.assertRaises(ChecksumMismatch, lambda: bad_pack.data)
             self.assertRaises(ChecksumMismatch, bad_pack.check_length_and_checksum)
 
-    def test_iterobjects_2(self):
+    def test_iterobjects_2(self) -> None:
         with self.get_pack(pack1_sha) as p:
             objs = {o.id: o for o in p.iterobjects()}
             self.assertEqual(3, len(objs))
@@ -534,7 +535,7 @@ class TestPack(PackTests):
             self.assertIsInstance(objs[tree_sha], Tree)
             self.assertIsInstance(objs[commit_sha], Commit)
 
-    def test_iterobjects_subset(self):
+    def test_iterobjects_subset(self) -> None:
         with self.get_pack(pack1_sha) as p:
             objs = {o.id: o for o in p.iterobjects_subset([commit_sha])}
             self.assertEqual(1, len(objs))
@@ -542,7 +543,7 @@ class TestPack(PackTests):
 
 
 class TestThinPack(PackTests):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.store = MemoryObjectStore()
         self.blobs = {}
@@ -583,13 +584,13 @@ class TestThinPack(PackTests):
             resolve_ext_ref=self.store.get_raw if resolve_ext_ref else None,
         )
 
-    def test_get_raw(self):
+    def test_get_raw(self) -> None:
         with self.make_pack(False) as p:
             self.assertRaises(KeyError, p.get_raw, self.blobs[b"foo1234"].id)
         with self.make_pack(True) as p:
             self.assertEqual((3, b"foo1234"), p.get_raw(self.blobs[b"foo1234"].id))
 
-    def test_get_unpacked_object(self):
+    def test_get_unpacked_object(self) -> None:
         self.maxDiff = None
         with self.make_pack(False) as p:
             expected = UnpackedObject(
@@ -613,7 +614,7 @@ class TestThinPack(PackTests):
                 got,
             )
 
-    def test_iterobjects(self):
+    def test_iterobjects(self) -> None:
         with self.make_pack(False) as p:
             self.assertRaises(UnresolvedDeltas, list, p.iterobjects())
         with self.make_pack(True) as p:
@@ -630,12 +631,12 @@ class TestThinPack(PackTests):
 
 
 class WritePackTests(TestCase):
-    def test_write_pack_header(self):
+    def test_write_pack_header(self) -> None:
         f = BytesIO()
         write_pack_header(f.write, 42)
         self.assertEqual(b"PACK\x00\x00\x00\x02\x00\x00\x00*", f.getvalue())
 
-    def test_write_pack_object(self):
+    def test_write_pack_object(self) -> None:
         f = BytesIO()
         f.write(b"header")
         offset = f.tell()
@@ -651,7 +652,7 @@ class WritePackTests(TestCase):
         self.assertEqual(crc32, unpacked.crc32)
         self.assertEqual(b"x", unused)
 
-    def test_write_pack_object_sha(self):
+    def test_write_pack_object_sha(self) -> None:
         f = BytesIO()
         f.write(b"header")
         offset = f.tell()
@@ -662,7 +663,7 @@ class WritePackTests(TestCase):
         sha_b.update(f.getvalue()[offset:])
         self.assertEqual(sha_a.digest(), sha_b.digest())
 
-    def test_write_pack_object_compression_level(self):
+    def test_write_pack_object_compression_level(self) -> None:
         f = BytesIO()
         f.write(b"header")
         offset = f.tell()
@@ -680,21 +681,21 @@ pack_checksum = hex_to_sha("721980e866af9a5f93ad674144e1459b8ba3e7b7")
 
 
 class BaseTestPackIndexWriting:
-    def assertSucceeds(self, func, *args, **kwargs):
+    def assertSucceeds(self, func, *args, **kwargs) -> None:
         try:
             func(*args, **kwargs)
         except ChecksumMismatch as e:
             self.fail(e)
 
-    def index(self, filename, entries, pack_checksum):
+    def index(self, filename, entries, pack_checksum) -> NoReturn:
         raise NotImplementedError(self.index)
 
-    def test_empty(self):
+    def test_empty(self) -> None:
         idx = self.index("empty.idx", [], pack_checksum)
         self.assertEqual(idx.get_pack_checksum(), pack_checksum)
         self.assertEqual(0, len(idx))
 
-    def test_large(self):
+    def test_large(self) -> None:
         entry1_sha = hex_to_sha("4e6388232ec39792661e2e75db8fb117fc869ce6")
         entry2_sha = hex_to_sha("e98f071751bd77f59967bfa671cd2caebdccc9a2")
         entries = [
@@ -721,7 +722,7 @@ class BaseTestPackIndexWriting:
             else:
                 self.assertIsNone(actual_crc)
 
-    def test_single(self):
+    def test_single(self) -> None:
         entry_sha = hex_to_sha("6f670c0fb53f9463760b7295fbb814e965fb20c8")
         my_entries = [(entry_sha, 178, 42)]
         idx = self.index("single.idx", my_entries, pack_checksum)
@@ -741,10 +742,10 @@ class BaseTestPackIndexWriting:
 
 
 class BaseTestFilePackIndexWriting(BaseTestPackIndexWriting):
-    def setUp(self):
+    def setUp(self) -> None:
         self.tempdir = tempfile.mkdtemp()
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         shutil.rmtree(self.tempdir)
 
     def index(self, filename, entries, pack_checksum):
@@ -755,14 +756,14 @@ class BaseTestFilePackIndexWriting(BaseTestPackIndexWriting):
         self.assertEqual(idx.version, self._expected_version)
         return idx
 
-    def writeIndex(self, filename, entries, pack_checksum):
+    def writeIndex(self, filename, entries, pack_checksum) -> None:
         # FIXME: Write to BytesIO instead rather than hitting disk ?
         with GitFile(filename, "wb") as f:
             self._write_fn(f, entries, pack_checksum)
 
 
 class TestMemoryIndexWriting(TestCase, BaseTestPackIndexWriting):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         self._has_crc32_checksum = True
         self._supports_large = True
@@ -770,12 +771,12 @@ class TestMemoryIndexWriting(TestCase, BaseTestPackIndexWriting):
     def index(self, filename, entries, pack_checksum):
         return MemoryPackIndex(entries, pack_checksum)
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         TestCase.tearDown(self)
 
 
 class TestPackIndexWritingv1(TestCase, BaseTestFilePackIndexWriting):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         BaseTestFilePackIndexWriting.setUp(self)
         self._has_crc32_checksum = False
@@ -783,13 +784,13 @@ class TestPackIndexWritingv1(TestCase, BaseTestFilePackIndexWriting):
         self._supports_large = False
         self._write_fn = write_pack_index_v1
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         TestCase.tearDown(self)
         BaseTestFilePackIndexWriting.tearDown(self)
 
 
 class TestPackIndexWritingv2(TestCase, BaseTestFilePackIndexWriting):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         BaseTestFilePackIndexWriting.setUp(self)
         self._has_crc32_checksum = True
@@ -797,7 +798,7 @@ class TestPackIndexWritingv2(TestCase, BaseTestFilePackIndexWriting):
         self._expected_version = 2
         self._write_fn = write_pack_index_v2
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         TestCase.tearDown(self)
         BaseTestFilePackIndexWriting.tearDown(self)
 
@@ -814,14 +815,14 @@ class ReadZlibTests(TestCase):
     comp = zlib.compress(decomp)
     extra = b"nextobject"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.read = BytesIO(self.comp + self.extra).read
         self.unpacked = UnpackedObject(
             Tree.type_num, decomp_len=len(self.decomp), crc32=0
         )
 
-    def test_decompress_size(self):
+    def test_decompress_size(self) -> None:
         good_decomp_len = len(self.decomp)
         self.unpacked.decomp_len = -1
         self.assertRaises(ValueError, read_zlib_chunks, self.read, self.unpacked)
@@ -830,14 +831,14 @@ class ReadZlibTests(TestCase):
         self.unpacked.decomp_len = good_decomp_len + 1
         self.assertRaises(zlib.error, read_zlib_chunks, self.read, self.unpacked)
 
-    def test_decompress_truncated(self):
+    def test_decompress_truncated(self) -> None:
         read = BytesIO(self.comp[:10]).read
         self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked)
 
         read = BytesIO(self.comp).read
         self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked)
 
-    def test_decompress_empty(self):
+    def test_decompress_empty(self) -> None:
         unpacked = UnpackedObject(Tree.type_num, decomp_len=0)
         comp = zlib.compress(b"")
         read = BytesIO(comp + self.extra).read
@@ -846,12 +847,12 @@ class ReadZlibTests(TestCase):
         self.assertNotEqual(b"", unused)
         self.assertEqual(self.extra, unused + read())
 
-    def test_decompress_no_crc32(self):
+    def test_decompress_no_crc32(self) -> None:
         self.unpacked.crc32 = None
         read_zlib_chunks(self.read, self.unpacked)
         self.assertEqual(None, self.unpacked.crc32)
 
-    def _do_decompress_test(self, buffer_size, **kwargs):
+    def _do_decompress_test(self, buffer_size, **kwargs) -> None:
         unused = read_zlib_chunks(
             self.read, self.unpacked, buffer_size=buffer_size, **kwargs
         )
@@ -860,34 +861,34 @@ class ReadZlibTests(TestCase):
         self.assertNotEqual(b"", unused)
         self.assertEqual(self.extra, unused + self.read())
 
-    def test_simple_decompress(self):
+    def test_simple_decompress(self) -> None:
         self._do_decompress_test(4096)
         self.assertEqual(None, self.unpacked.comp_chunks)
 
     # These buffer sizes are not intended to be realistic, but rather simulate
     # larger buffer sizes that may end at various places.
-    def test_decompress_buffer_size_1(self):
+    def test_decompress_buffer_size_1(self) -> None:
         self._do_decompress_test(1)
 
-    def test_decompress_buffer_size_2(self):
+    def test_decompress_buffer_size_2(self) -> None:
         self._do_decompress_test(2)
 
-    def test_decompress_buffer_size_3(self):
+    def test_decompress_buffer_size_3(self) -> None:
         self._do_decompress_test(3)
 
-    def test_decompress_buffer_size_4(self):
+    def test_decompress_buffer_size_4(self) -> None:
         self._do_decompress_test(4)
 
-    def test_decompress_include_comp(self):
+    def test_decompress_include_comp(self) -> None:
         self._do_decompress_test(4096, include_comp=True)
         self.assertEqual(self.comp, b"".join(self.unpacked.comp_chunks))
 
 
 class DeltifyTests(TestCase):
-    def test_empty(self):
+    def test_empty(self) -> None:
         self.assertEqual([], list(deltify_pack_objects([])))
 
-    def test_single(self):
+    def test_single(self) -> None:
         b = Blob.from_string(b"foo")
         self.assertEqual(
             [
@@ -901,7 +902,7 @@ class DeltifyTests(TestCase):
             list(deltify_pack_objects([(b, b"")])),
         )
 
-    def test_simple_delta(self):
+    def test_simple_delta(self) -> None:
         b1 = Blob.from_string(b"a" * 101)
         b2 = Blob.from_string(b"a" * 100)
         delta = list(create_delta(b1.as_raw_chunks(), b2.as_raw_chunks()))
@@ -925,13 +926,13 @@ class DeltifyTests(TestCase):
 
 
 class TestPackStreamReader(TestCase):
-    def test_read_objects_emtpy(self):
+    def test_read_objects_emtpy(self) -> None:
         f = BytesIO()
         build_pack(f, [])
         reader = PackStreamReader(f.read)
         self.assertEqual(0, len(list(reader.read_objects())))
 
-    def test_read_objects(self):
+    def test_read_objects(self) -> None:
         f = BytesIO()
         entries = build_pack(
             f,
@@ -964,7 +965,7 @@ class TestPackStreamReader(TestCase):
         self.assertEqual(b"".join(delta), b"".join(unpacked_delta.decomp_chunks))
         self.assertEqual(entries[1][4], unpacked_delta.crc32)
 
-    def test_read_objects_buffered(self):
+    def test_read_objects_buffered(self) -> None:
         f = BytesIO()
         build_pack(
             f,
@@ -976,7 +977,7 @@ class TestPackStreamReader(TestCase):
         reader = PackStreamReader(f.read, zlib_bufsize=4)
         self.assertEqual(2, len(list(reader.read_objects())))
 
-    def test_read_objects_empty(self):
+    def test_read_objects_empty(self) -> None:
         reader = PackStreamReader(BytesIO().read)
         self.assertRaises(AssertionError, list, reader.read_objects())
 
@@ -1007,7 +1008,7 @@ class TestPackIterator(DeltaChainIterator):
 
 
 class DeltaChainIteratorTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.store = MemoryObjectStore()
         self.fetched = set()
@@ -1048,11 +1049,11 @@ class DeltaChainIteratorTests(TestCase):
             pack, subset, resolve_ext_ref=resolve_ext_ref
         )
 
-    def assertEntriesMatch(self, expected_indexes, entries, pack_iter):
+    def assertEntriesMatch(self, expected_indexes, entries, pack_iter) -> None:
         expected = [entries[i] for i in expected_indexes]
         self.assertEqual(expected, list(pack_iter._walk_all_chains()))
 
-    def test_no_deltas(self):
+    def test_no_deltas(self) -> None:
         f = BytesIO()
         entries = build_pack(
             f,
@@ -1080,7 +1081,7 @@ class DeltaChainIteratorTests(TestCase):
             ),
         )
 
-    def test_ofs_deltas(self):
+    def test_ofs_deltas(self) -> None:
         f = BytesIO()
         entries = build_pack(
             f,
@@ -1099,7 +1100,7 @@ class DeltaChainIteratorTests(TestCase):
             self.make_pack_iter_subset(f, [entries[1][3], entries[2][3]]),
         )
 
-    def test_ofs_deltas_chain(self):
+    def test_ofs_deltas_chain(self) -> None:
         f = BytesIO()
         entries = build_pack(
             f,
@@ -1111,7 +1112,7 @@ class DeltaChainIteratorTests(TestCase):
         )
         self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f))
 
-    def test_ref_deltas(self):
+    def test_ref_deltas(self) -> None:
         f = BytesIO()
         entries = build_pack(
             f,
@@ -1124,7 +1125,7 @@ class DeltaChainIteratorTests(TestCase):
         # Delta resolution changed to DFS
         self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f))
 
-    def test_ref_deltas_chain(self):
+    def test_ref_deltas_chain(self) -> None:
         f = BytesIO()
         entries = build_pack(
             f,
@@ -1136,7 +1137,7 @@ class DeltaChainIteratorTests(TestCase):
         )
         self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f))
 
-    def test_ofs_and_ref_deltas(self):
+    def test_ofs_and_ref_deltas(self) -> None:
         # Deltas pending on this offset are popped before deltas depending on
         # this ref.
         f = BytesIO()
@@ -1152,7 +1153,7 @@ class DeltaChainIteratorTests(TestCase):
         # Delta resolution changed to DFS
         self.assertEntriesMatch([1, 0, 2], entries, self.make_pack_iter(f))
 
-    def test_mixed_chain(self):
+    def test_mixed_chain(self) -> None:
         f = BytesIO()
         entries = build_pack(
             f,
@@ -1167,7 +1168,7 @@ class DeltaChainIteratorTests(TestCase):
         # Delta resolution changed to DFS
         self.assertEntriesMatch([0, 4, 2, 1, 3], entries, self.make_pack_iter(f))
 
-    def test_long_chain(self):
+    def test_long_chain(self) -> None:
         n = 100
         objects_spec = [(Blob.type_num, b"blob")]
         for i in range(n):
@@ -1176,7 +1177,7 @@ class DeltaChainIteratorTests(TestCase):
         entries = build_pack(f, objects_spec)
         self.assertEntriesMatch(range(n + 1), entries, self.make_pack_iter(f))
 
-    def test_branchy_chain(self):
+    def test_branchy_chain(self) -> None:
         n = 100
         objects_spec = [(Blob.type_num, b"blob")]
         for i in range(n):
@@ -1187,7 +1188,7 @@ class DeltaChainIteratorTests(TestCase):
         indices = [0, *list(range(100, 0, -1))]
         self.assertEntriesMatch(indices, entries, self.make_pack_iter(f))
 
-    def test_ext_ref(self):
+    def test_ext_ref(self) -> None:
         (blob,) = self.store_blobs([b"blob"])
         f = BytesIO()
         entries = build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store)
@@ -1195,7 +1196,7 @@ class DeltaChainIteratorTests(TestCase):
         self.assertEntriesMatch([0], entries, pack_iter)
         self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
 
-    def test_ext_ref_chain(self):
+    def test_ext_ref_chain(self) -> None:
         (blob,) = self.store_blobs([b"blob"])
         f = BytesIO()
         entries = build_pack(
@@ -1210,7 +1211,7 @@ class DeltaChainIteratorTests(TestCase):
         self.assertEntriesMatch([1, 0], entries, pack_iter)
         self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
 
-    def test_ext_ref_chain_degenerate(self):
+    def test_ext_ref_chain_degenerate(self) -> None:
         # Test a degenerate case where the sender is sending a REF_DELTA
         # object that expands to an object already in the repository.
         (blob,) = self.store_blobs([b"blob"])
@@ -1230,7 +1231,7 @@ class DeltaChainIteratorTests(TestCase):
         self.assertEntriesMatch([0, 1], entries, pack_iter)
         self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
 
-    def test_ext_ref_multiple_times(self):
+    def test_ext_ref_multiple_times(self) -> None:
         (blob,) = self.store_blobs([b"blob"])
         f = BytesIO()
         entries = build_pack(
@@ -1245,7 +1246,7 @@ class DeltaChainIteratorTests(TestCase):
         self.assertEntriesMatch([0, 1], entries, pack_iter)
         self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
 
-    def test_multiple_ext_refs(self):
+    def test_multiple_ext_refs(self) -> None:
         b1, b2 = self.store_blobs([b"foo", b"bar"])
         f = BytesIO()
         entries = build_pack(
@@ -1260,7 +1261,7 @@ class DeltaChainIteratorTests(TestCase):
         self.assertEntriesMatch([0, 1], entries, pack_iter)
         self.assertEqual([hex_to_sha(b1.id), hex_to_sha(b2.id)], pack_iter.ext_refs())
 
-    def test_bad_ext_ref_non_thin_pack(self):
+    def test_bad_ext_ref_non_thin_pack(self) -> None:
         (blob,) = self.store_blobs([b"blob"])
         f = BytesIO()
         build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store)
@@ -1271,7 +1272,7 @@ class DeltaChainIteratorTests(TestCase):
         except UnresolvedDeltas as e:
             self.assertEqual([blob.id], e.shas)
 
-    def test_bad_ext_ref_thin_pack(self):
+    def test_bad_ext_ref_thin_pack(self) -> None:
         b1, b2, b3 = self.store_blobs([b"foo", b"bar", b"baz"])
         f = BytesIO()
         build_pack(
@@ -1293,7 +1294,7 @@ class DeltaChainIteratorTests(TestCase):
         except UnresolvedDeltas as e:
             self.assertEqual((sorted([b2.id, b3.id]),), (sorted(e.shas),))
 
-    def test_ext_ref_deltified_object_based_on_itself(self):
+    def test_ext_ref_deltified_object_based_on_itself(self) -> None:
         b1_content = b"foo"
         (b1,) = self.store_blobs([b1_content])
         f = BytesIO()
@@ -1323,7 +1324,7 @@ class DeltaChainIteratorTests(TestCase):
 
 
 class DeltaEncodeSizeTests(TestCase):
-    def test_basic(self):
+    def test_basic(self) -> None:
         self.assertEqual(b"\x00", _delta_encode_size(0))
         self.assertEqual(b"\x01", _delta_encode_size(1))
         self.assertEqual(b"\xfa\x01", _delta_encode_size(250))
@@ -1332,7 +1333,7 @@ class DeltaEncodeSizeTests(TestCase):
 
 
 class EncodeCopyOperationTests(TestCase):
-    def test_basic(self):
+    def test_basic(self) -> None:
         self.assertEqual(b"\x80", _encode_copy_operation(0, 0))
         self.assertEqual(b"\x91\x01\x0a", _encode_copy_operation(1, 10))
         self.assertEqual(b"\xb1\x64\xe8\x03", _encode_copy_operation(100, 1000))

+ 22 - 21
tests/test_patch.py

@@ -21,6 +21,7 @@
 """Tests for patch.py."""
 
 from io import BytesIO, StringIO
+from typing import NoReturn
 
 from dulwich.object_store import MemoryObjectStore
 from dulwich.objects import S_IFGITLINK, Blob, Commit, Tree
@@ -37,7 +38,7 @@ from . import SkipTest, TestCase
 
 
 class WriteCommitPatchTests(TestCase):
-    def test_simple_bytesio(self):
+    def test_simple_bytesio(self) -> None:
         f = BytesIO()
         c = Commit()
         c.committer = c.author = b"Jelmer <jelmer@samba.org>"
@@ -70,7 +71,7 @@ class WriteCommitPatchTests(TestCase):
 
 
 class ReadGitAmPatch(TestCase):
-    def test_extract_string(self):
+    def test_extract_string(self) -> None:
         text = b"""\
 From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
 From: Jelmer Vernooij <jelmer@samba.org>
@@ -102,7 +103,7 @@ Subject: [PATCH 1/2] Remove executable bit from prey.ico (triggers a warning).
         )
         self.assertEqual(b"1.7.0.4", version)
 
-    def test_extract_bytes(self):
+    def test_extract_bytes(self) -> None:
         text = b"""\
 From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
 From: Jelmer Vernooij <jelmer@samba.org>
@@ -134,7 +135,7 @@ Subject: [PATCH 1/2] Remove executable bit from prey.ico (triggers a warning).
         )
         self.assertEqual(b"1.7.0.4", version)
 
-    def test_extract_spaces(self):
+    def test_extract_spaces(self) -> None:
         text = b"""From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
 From: Jelmer Vernooij <jelmer@samba.org>
 Date: Thu, 15 Apr 2010 15:40:28 +0200
@@ -164,7 +165,7 @@ Added unit tests for dulwich.object_store.tree_lookup_path.
             c.message,
         )
 
-    def test_extract_pseudo_from_header(self):
+    def test_extract_pseudo_from_header(self) -> None:
         text = b"""From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
 From: Jelmer Vernooij <jelmer@samba.org>
 Date: Thu, 15 Apr 2010 15:40:28 +0200
@@ -197,7 +198,7 @@ Added unit tests for dulwich.object_store.tree_lookup_path.
             c.message,
         )
 
-    def test_extract_no_version_tail(self):
+    def test_extract_no_version_tail(self) -> None:
         text = b"""\
 From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001
 From: Jelmer Vernooij <jelmer@samba.org>
@@ -216,7 +217,7 @@ From: Jelmer Vernooij <jelmer@debian.org>
         c, diff, version = git_am_patch_split(BytesIO(text), "utf-8")
         self.assertEqual(None, version)
 
-    def test_extract_mercurial(self):
+    def test_extract_mercurial(self) -> NoReturn:
         raise SkipTest(
             "git_am_patch_split doesn't handle Mercurial patches " "properly yet"
         )
@@ -262,7 +263,7 @@ More help   : https://help.launchpad.net/ListHelp
 class DiffTests(TestCase):
     """Tests for write_blob_diff and write_tree_diff."""
 
-    def test_blob_diff(self):
+    def test_blob_diff(self) -> None:
         f = BytesIO()
         write_blob_diff(
             f,
@@ -283,7 +284,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_blob_add(self):
+    def test_blob_add(self) -> None:
         f = BytesIO()
         write_blob_diff(
             f,
@@ -304,7 +305,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_blob_remove(self):
+    def test_blob_remove(self) -> None:
         f = BytesIO()
         write_blob_diff(
             f,
@@ -325,7 +326,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_tree_diff(self):
+    def test_tree_diff(self) -> None:
         f = BytesIO()
         store = MemoryObjectStore()
         added = Blob.from_string(b"add\n")
@@ -384,7 +385,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_tree_diff_submodule(self):
+    def test_tree_diff_submodule(self) -> None:
         f = BytesIO()
         store = MemoryObjectStore()
         tree1 = Tree()
@@ -414,7 +415,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_object_diff_blob(self):
+    def test_object_diff_blob(self) -> None:
         f = BytesIO()
         b1 = Blob.from_string(b"old\nsame\n")
         b2 = Blob.from_string(b"new\nsame\n")
@@ -437,7 +438,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_object_diff_add_blob(self):
+    def test_object_diff_add_blob(self) -> None:
         f = BytesIO()
         store = MemoryObjectStore()
         b2 = Blob.from_string(b"new\nsame\n")
@@ -457,7 +458,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_object_diff_remove_blob(self):
+    def test_object_diff_remove_blob(self) -> None:
         f = BytesIO()
         b1 = Blob.from_string(b"new\nsame\n")
         store = MemoryObjectStore()
@@ -477,7 +478,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_object_diff_bin_blob_force(self):
+    def test_object_diff_bin_blob_force(self) -> None:
         f = BytesIO()
         # Prepare two slightly different PNG headers
         b1 = Blob.from_string(
@@ -521,7 +522,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_object_diff_bin_blob(self):
+    def test_object_diff_bin_blob(self) -> None:
         f = BytesIO()
         # Prepare two slightly different PNG headers
         b1 = Blob.from_string(
@@ -550,7 +551,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_object_diff_add_bin_blob(self):
+    def test_object_diff_add_bin_blob(self) -> None:
         f = BytesIO()
         b2 = Blob.from_string(
             b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
@@ -571,7 +572,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_object_diff_remove_bin_blob(self):
+    def test_object_diff_remove_bin_blob(self) -> None:
         f = BytesIO()
         b1 = Blob.from_string(
             b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"
@@ -592,7 +593,7 @@ class DiffTests(TestCase):
             f.getvalue().splitlines(),
         )
 
-    def test_object_diff_kind_change(self):
+    def test_object_diff_kind_change(self) -> None:
         f = BytesIO()
         b1 = Blob.from_string(b"new\nsame\n")
         store = MemoryObjectStore()
@@ -625,7 +626,7 @@ class DiffTests(TestCase):
 
 
 class GetSummaryTests(TestCase):
-    def test_simple(self):
+    def test_simple(self) -> None:
         c = Commit()
         c.committer = c.author = b"Jelmer <jelmer@samba.org>"
         c.commit_time = c.author_time = 1271350201

File diff suppressed because it is too large
+ 117 - 117
tests/test_porcelain.py


+ 39 - 39
tests/test_protocol.py

@@ -44,17 +44,17 @@ from . import TestCase
 
 
 class PktLineTests(TestCase):
-    def test_pkt_line(self):
+    def test_pkt_line(self) -> None:
         self.assertEqual(b"0007bla", pkt_line(b"bla"))
         self.assertEqual(b"0000", pkt_line(None))
 
-    def test_pkt_seq(self):
+    def test_pkt_seq(self) -> None:
         self.assertEqual(b"0007bla0007foo0000", pkt_seq(b"bla", b"foo"))
         self.assertEqual(b"0000", pkt_seq())
 
 
 class FilterRefPrefixTests(TestCase):
-    def test_filter_ref_prefix(self):
+    def test_filter_ref_prefix(self) -> None:
         self.assertEqual(
             {b"refs/heads/foo": b"0123456789", b"refs/heads/bar": b"0123456789"},
             filter_ref_prefix(
@@ -69,20 +69,20 @@ class FilterRefPrefixTests(TestCase):
 
 
 class BaseProtocolTests:
-    def test_write_pkt_line_none(self):
+    def test_write_pkt_line_none(self) -> None:
         self.proto.write_pkt_line(None)
         self.assertEqual(self.rout.getvalue(), b"0000")
 
-    def test_write_pkt_line(self):
+    def test_write_pkt_line(self) -> None:
         self.proto.write_pkt_line(b"bla")
         self.assertEqual(self.rout.getvalue(), b"0007bla")
 
-    def test_read_pkt_line(self):
+    def test_read_pkt_line(self) -> None:
         self.rin.write(b"0008cmd ")
         self.rin.seek(0)
         self.assertEqual(b"cmd ", self.proto.read_pkt_line())
 
-    def test_eof(self):
+    def test_eof(self) -> None:
         self.rin.write(b"0000")
         self.rin.seek(0)
         self.assertFalse(self.proto.eof())
@@ -90,7 +90,7 @@ class BaseProtocolTests:
         self.assertTrue(self.proto.eof())
         self.assertRaises(HangupException, self.proto.read_pkt_line)
 
-    def test_unread_pkt_line(self):
+    def test_unread_pkt_line(self) -> None:
         self.rin.write(b"0007foo0000")
         self.rin.seek(0)
         self.assertEqual(b"foo", self.proto.read_pkt_line())
@@ -100,42 +100,42 @@ class BaseProtocolTests:
         self.proto.unread_pkt_line(b"baz1")
         self.assertRaises(ValueError, self.proto.unread_pkt_line, b"baz2")
 
-    def test_read_pkt_seq(self):
+    def test_read_pkt_seq(self) -> None:
         self.rin.write(b"0008cmd 0005l0000")
         self.rin.seek(0)
         self.assertEqual([b"cmd ", b"l"], list(self.proto.read_pkt_seq()))
 
-    def test_read_pkt_line_none(self):
+    def test_read_pkt_line_none(self) -> None:
         self.rin.write(b"0000")
         self.rin.seek(0)
         self.assertEqual(None, self.proto.read_pkt_line())
 
-    def test_read_pkt_line_wrong_size(self):
+    def test_read_pkt_line_wrong_size(self) -> None:
         self.rin.write(b"0100too short")
         self.rin.seek(0)
         self.assertRaises(GitProtocolError, self.proto.read_pkt_line)
 
-    def test_write_sideband(self):
+    def test_write_sideband(self) -> None:
         self.proto.write_sideband(3, b"bloe")
         self.assertEqual(self.rout.getvalue(), b"0009\x03bloe")
 
-    def test_send_cmd(self):
+    def test_send_cmd(self) -> None:
         self.proto.send_cmd(b"fetch", b"a", b"b")
         self.assertEqual(self.rout.getvalue(), b"000efetch a\x00b\x00")
 
-    def test_read_cmd(self):
+    def test_read_cmd(self) -> None:
         self.rin.write(b"0012cmd arg1\x00arg2\x00")
         self.rin.seek(0)
         self.assertEqual((b"cmd", [b"arg1", b"arg2"]), self.proto.read_cmd())
 
-    def test_read_cmd_noend0(self):
+    def test_read_cmd_noend0(self) -> None:
         self.rin.write(b"0011cmd arg1\x00arg2")
         self.rin.seek(0)
         self.assertRaises(AssertionError, self.proto.read_cmd)
 
 
 class ProtocolTests(BaseProtocolTests, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         self.rout = BytesIO()
         self.rin = BytesIO()
@@ -161,21 +161,21 @@ class ReceivableBytesIO(BytesIO):
 
 
 class ReceivableProtocolTests(BaseProtocolTests, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         self.rout = BytesIO()
         self.rin = ReceivableBytesIO()
         self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
         self.proto._rbufsize = 8
 
-    def test_eof(self):
+    def test_eof(self) -> None:
         # Allow blocking reads past EOF just for this test. The only parts of
         # the protocol that might check for EOF do not depend on the recv()
         # semantics anyway.
         self.rin.allow_read_past_eof = True
         BaseProtocolTests.test_eof(self)
 
-    def test_recv(self):
+    def test_recv(self) -> None:
         all_data = b"1234567" * 10  # not a multiple of bufsize
         self.rin.write(all_data)
         self.rin.seek(0)
@@ -188,7 +188,7 @@ class ReceivableProtocolTests(BaseProtocolTests, TestCase):
         self.assertRaises(GitProtocolError, self.proto.recv, 10)
         self.assertEqual(all_data, data)
 
-    def test_recv_read(self):
+    def test_recv_read(self) -> None:
         all_data = b"1234567"  # recv exactly in one call
         self.rin.write(all_data)
         self.rin.seek(0)
@@ -196,7 +196,7 @@ class ReceivableProtocolTests(BaseProtocolTests, TestCase):
         self.assertEqual(b"567", self.proto.read(3))
         self.assertRaises(GitProtocolError, self.proto.recv, 10)
 
-    def test_read_recv(self):
+    def test_read_recv(self) -> None:
         all_data = b"12345678abcdefg"
         self.rin.write(all_data)
         self.rin.seek(0)
@@ -205,7 +205,7 @@ class ReceivableProtocolTests(BaseProtocolTests, TestCase):
         self.assertEqual(b"defg", self.proto.read(4))
         self.assertRaises(GitProtocolError, self.proto.recv, 10)
 
-    def test_mixed(self):
+    def test_mixed(self) -> None:
         # arbitrary non-repeating string
         all_data = b",".join(str(i).encode("ascii") for i in range(100))
         self.rin.write(all_data)
@@ -231,18 +231,18 @@ class ReceivableProtocolTests(BaseProtocolTests, TestCase):
 
 
 class CapabilitiesTestCase(TestCase):
-    def test_plain(self):
+    def test_plain(self) -> None:
         self.assertEqual((b"bla", []), extract_capabilities(b"bla"))
 
-    def test_caps(self):
+    def test_caps(self) -> None:
         self.assertEqual((b"bla", [b"la"]), extract_capabilities(b"bla\0la"))
         self.assertEqual((b"bla", [b"la"]), extract_capabilities(b"bla\0la\n"))
         self.assertEqual((b"bla", [b"la", b"la"]), extract_capabilities(b"bla\0la la"))
 
-    def test_plain_want_line(self):
+    def test_plain_want_line(self) -> None:
         self.assertEqual((b"want bla", []), extract_want_line_capabilities(b"want bla"))
 
-    def test_caps_want_line(self):
+    def test_caps_want_line(self) -> None:
         self.assertEqual(
             (b"want bla", [b"la"]),
             extract_want_line_capabilities(b"want bla la"),
@@ -256,7 +256,7 @@ class CapabilitiesTestCase(TestCase):
             extract_want_line_capabilities(b"want bla la la"),
         )
 
-    def test_ack_type(self):
+    def test_ack_type(self) -> None:
         self.assertEqual(SINGLE_ACK, ack_type([b"foo", b"bar"]))
         self.assertEqual(MULTI_ACK, ack_type([b"foo", b"bar", b"multi_ack"]))
         self.assertEqual(
@@ -271,42 +271,42 @@ class CapabilitiesTestCase(TestCase):
 
 
 class BufferedPktLineWriterTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         self._output = BytesIO()
         self._writer = BufferedPktLineWriter(self._output.write, bufsize=16)
 
-    def assertOutputEquals(self, expected):
+    def assertOutputEquals(self, expected) -> None:
         self.assertEqual(expected, self._output.getvalue())
 
-    def _truncate(self):
+    def _truncate(self) -> None:
         self._output.seek(0)
         self._output.truncate()
 
-    def test_write(self):
+    def test_write(self) -> None:
         self._writer.write(b"foo")
         self.assertOutputEquals(b"")
         self._writer.flush()
         self.assertOutputEquals(b"0007foo")
 
-    def test_write_none(self):
+    def test_write_none(self) -> None:
         self._writer.write(None)
         self.assertOutputEquals(b"")
         self._writer.flush()
         self.assertOutputEquals(b"0000")
 
-    def test_flush_empty(self):
+    def test_flush_empty(self) -> None:
         self._writer.flush()
         self.assertOutputEquals(b"")
 
-    def test_write_multiple(self):
+    def test_write_multiple(self) -> None:
         self._writer.write(b"foo")
         self._writer.write(b"bar")
         self.assertOutputEquals(b"")
         self._writer.flush()
         self.assertOutputEquals(b"0007foo0007bar")
 
-    def test_write_across_boundary(self):
+    def test_write_across_boundary(self) -> None:
         self._writer.write(b"foo")
         self._writer.write(b"barbaz")
         self.assertOutputEquals(b"0007foo000abarba")
@@ -314,7 +314,7 @@ class BufferedPktLineWriterTests(TestCase):
         self._writer.flush()
         self.assertOutputEquals(b"z")
 
-    def test_write_to_boundary(self):
+    def test_write_to_boundary(self) -> None:
         self._writer.write(b"foo")
         self._writer.write(b"barba")
         self.assertOutputEquals(b"0007foo0009barba")
@@ -325,14 +325,14 @@ class BufferedPktLineWriterTests(TestCase):
 
 
 class PktLineParserTests(TestCase):
-    def test_none(self):
+    def test_none(self) -> None:
         pktlines = []
         parser = PktLineParser(pktlines.append)
         parser.parse(b"0000")
         self.assertEqual(pktlines, [None])
         self.assertEqual(b"", parser.get_tail())
 
-    def test_small_fragments(self):
+    def test_small_fragments(self) -> None:
         pktlines = []
         parser = PktLineParser(pktlines.append)
         parser.parse(b"00")
@@ -341,7 +341,7 @@ class PktLineParserTests(TestCase):
         self.assertEqual(pktlines, [b"z", None])
         self.assertEqual(b"", parser.get_tail())
 
-    def test_multiple_packets(self):
+    def test_multiple_packets(self) -> None:
         pktlines = []
         parser = PktLineParser(pktlines.append)
         parser.parse(b"0005z0006aba")

+ 6 - 6
tests/test_reflog.py

@@ -34,7 +34,7 @@ from . import TestCase
 
 
 class ReflogLineTests(TestCase):
-    def test_format(self):
+    def test_format(self) -> None:
         self.assertEqual(
             b"0000000000000000000000000000000000000000 "
             b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij "
@@ -65,7 +65,7 @@ class ReflogLineTests(TestCase):
             ),
         )
 
-    def test_parse(self):
+    def test_parse(self) -> None:
         reflog_line = (
             b"0000000000000000000000000000000000000000 "
             b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij "
@@ -102,7 +102,7 @@ _TEST_REFLOG = (
 
 
 class ReflogDropTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         self.f = BytesIO(_TEST_REFLOG)
         self.original_log = list(read_reflog(self.f))
@@ -112,10 +112,10 @@ class ReflogDropTests(TestCase):
         self.f.seek(0)
         return list(read_reflog(self.f))
 
-    def test_invalid(self):
+    def test_invalid(self) -> None:
         self.assertRaises(ValueError, drop_reflog_entry, self.f, -1)
 
-    def test_drop_entry(self):
+    def test_drop_entry(self) -> None:
         drop_reflog_entry(self.f, 0)
         log = self._read_log()
         self.assertEqual(len(log), 2)
@@ -127,7 +127,7 @@ class ReflogDropTests(TestCase):
         self.assertEqual(len(log), 1)
         self.assertEqual(self.original_log[1], log[0])
 
-    def test_drop_entry_with_rewrite(self):
+    def test_drop_entry_with_rewrite(self) -> None:
         drop_reflog_entry(self.f, 1, True)
         log = self._read_log()
         self.assertEqual(len(log), 2)

+ 58 - 58
tests/test_refs.py

@@ -54,7 +54,7 @@ class CheckRefFormatTests(TestCase):
     These are the same tests as in the git test suite.
     """
 
-    def test_valid(self):
+    def test_valid(self) -> None:
         self.assertTrue(check_ref_format(b"heads/foo"))
         self.assertTrue(check_ref_format(b"foo/bar/baz"))
         self.assertTrue(check_ref_format(b"refs///heads/foo"))
@@ -62,7 +62,7 @@ class CheckRefFormatTests(TestCase):
         self.assertTrue(check_ref_format(b"heads/foo@bar"))
         self.assertTrue(check_ref_format(b"heads/fix.lock.error"))
 
-    def test_invalid(self):
+    def test_invalid(self) -> None:
         self.assertFalse(check_ref_format(b"foo"))
         self.assertFalse(check_ref_format(b"heads/foo/"))
         self.assertFalse(check_ref_format(b"./foo"))
@@ -81,7 +81,7 @@ FOURS = b"4" * 40
 
 
 class PackedRefsFileTests(TestCase):
-    def test_split_ref_line_errors(self):
+    def test_split_ref_line_errors(self) -> None:
         self.assertRaises(errors.PackedRefsException, _split_ref_line, b"singlefield")
         self.assertRaises(errors.PackedRefsException, _split_ref_line, b"badsha name")
         self.assertRaises(
@@ -90,17 +90,17 @@ class PackedRefsFileTests(TestCase):
             ONES + b" bad/../refname",
         )
 
-    def test_read_without_peeled(self):
+    def test_read_without_peeled(self) -> None:
         f = BytesIO(b"\n".join([b"# comment", ONES + b" ref/1", TWOS + b" ref/2"]))
         self.assertEqual(
             [(ONES, b"ref/1"), (TWOS, b"ref/2")], list(read_packed_refs(f))
         )
 
-    def test_read_without_peeled_errors(self):
+    def test_read_without_peeled_errors(self) -> None:
         f = BytesIO(b"\n".join([ONES + b" ref/1", b"^" + TWOS]))
         self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f))
 
-    def test_read_with_peeled(self):
+    def test_read_with_peeled(self) -> None:
         f = BytesIO(
             b"\n".join(
                 [
@@ -120,14 +120,14 @@ class PackedRefsFileTests(TestCase):
             list(read_packed_refs_with_peeled(f)),
         )
 
-    def test_read_with_peeled_errors(self):
+    def test_read_with_peeled_errors(self) -> None:
         f = BytesIO(b"\n".join([b"^" + TWOS, ONES + b" ref/1"]))
         self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f))
 
         f = BytesIO(b"\n".join([ONES + b" ref/1", b"^" + TWOS, b"^" + THREES]))
         self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f))
 
-    def test_write_with_peeled(self):
+    def test_write_with_peeled(self) -> None:
         f = BytesIO()
         write_packed_refs(f, {b"ref/1": ONES, b"ref/2": TWOS}, {b"ref/1": THREES})
         self.assertEqual(
@@ -143,7 +143,7 @@ class PackedRefsFileTests(TestCase):
             f.getvalue(),
         )
 
-    def test_write_without_peeled(self):
+    def test_write_without_peeled(self) -> None:
         f = BytesIO()
         write_packed_refs(f, {b"ref/1": ONES, b"ref/2": TWOS})
         self.assertEqual(
@@ -165,7 +165,7 @@ _TEST_REFS = {
 
 
 class RefsContainerTests:
-    def test_keys(self):
+    def test_keys(self) -> None:
         actual_keys = set(self._refs.keys())
         self.assertEqual(set(self._refs.allkeys()), actual_keys)
         self.assertEqual(set(_TEST_REFS.keys()), actual_keys)
@@ -180,18 +180,18 @@ class RefsContainerTests:
             [b"refs-0.1", b"refs-0.2"], sorted(self._refs.keys(b"refs/tags"))
         )
 
-    def test_iter(self):
+    def test_iter(self) -> None:
         actual_keys = set(self._refs.keys())
         self.assertEqual(set(self._refs), actual_keys)
         self.assertEqual(set(_TEST_REFS.keys()), actual_keys)
 
-    def test_as_dict(self):
+    def test_as_dict(self) -> None:
         # refs/heads/loop does not show up even if it exists
         expected_refs = dict(_TEST_REFS)
         del expected_refs[b"refs/heads/loop"]
         self.assertEqual(expected_refs, self._refs.as_dict())
 
-    def test_get_symrefs(self):
+    def test_get_symrefs(self) -> None:
         self._refs.set_symbolic_ref(b"refs/heads/src", b"refs/heads/dst")
         symrefs = self._refs.get_symrefs()
         if b"HEAD" in symrefs:
@@ -204,7 +204,7 @@ class RefsContainerTests:
             symrefs,
         )
 
-    def test_setitem(self):
+    def test_setitem(self) -> None:
         self._refs[b"refs/some/ref"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
         self.assertEqual(
             b"42d06bd4b77fed026b154d16493e5deab78f02ec",
@@ -234,7 +234,7 @@ class RefsContainerTests:
             b"42d06bd",
         )
 
-    def test_set_if_equals(self):
+    def test_set_if_equals(self) -> None:
         nines = b"9" * 40
         self.assertFalse(self._refs.set_if_equals(b"HEAD", b"c0ffee", nines))
         self.assertEqual(
@@ -260,7 +260,7 @@ class RefsContainerTests:
         )
         self.assertEqual(nines, self._refs[b"refs/heads/nonexistent"])
 
-    def test_add_if_new(self):
+    def test_add_if_new(self) -> None:
         nines = b"9" * 40
         self.assertFalse(self._refs.add_if_new(b"refs/heads/master", nines))
         self.assertEqual(
@@ -271,7 +271,7 @@ class RefsContainerTests:
         self.assertTrue(self._refs.add_if_new(b"refs/some/ref", nines))
         self.assertEqual(nines, self._refs[b"refs/some/ref"])
 
-    def test_set_symbolic_ref(self):
+    def test_set_symbolic_ref(self) -> None:
         self._refs.set_symbolic_ref(b"refs/heads/symbolic", b"refs/heads/master")
         self.assertEqual(
             b"ref: refs/heads/master",
@@ -282,7 +282,7 @@ class RefsContainerTests:
             self._refs[b"refs/heads/symbolic"],
         )
 
-    def test_set_symbolic_ref_overwrite(self):
+    def test_set_symbolic_ref_overwrite(self) -> None:
         nines = b"9" * 40
         self.assertNotIn(b"refs/heads/symbolic", self._refs)
         self._refs[b"refs/heads/symbolic"] = nines
@@ -297,7 +297,7 @@ class RefsContainerTests:
             self._refs[b"refs/heads/symbolic"],
         )
 
-    def test_check_refname(self):
+    def test_check_refname(self) -> None:
         self._refs._check_refname(b"HEAD")
         self._refs._check_refname(b"refs/stash")
         self._refs._check_refname(b"refs/heads/foo")
@@ -307,11 +307,11 @@ class RefsContainerTests:
             errors.RefFormatError, self._refs._check_refname, b"notrefs/foo"
         )
 
-    def test_contains(self):
+    def test_contains(self) -> None:
         self.assertIn(b"refs/heads/master", self._refs)
         self.assertNotIn(b"refs/heads/bar", self._refs)
 
-    def test_delitem(self):
+    def test_delitem(self) -> None:
         self.assertEqual(
             b"42d06bd4b77fed026b154d16493e5deab78f02ec",
             self._refs[b"refs/heads/master"],
@@ -319,7 +319,7 @@ class RefsContainerTests:
         del self._refs[b"refs/heads/master"]
         self.assertRaises(KeyError, lambda: self._refs[b"refs/heads/master"])
 
-    def test_remove_if_equals(self):
+    def test_remove_if_equals(self) -> None:
         self.assertFalse(self._refs.remove_if_equals(b"HEAD", b"c0ffee"))
         self.assertEqual(
             b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"HEAD"]
@@ -333,7 +333,7 @@ class RefsContainerTests:
         self.assertTrue(self._refs.remove_if_equals(b"refs/tags/refs-0.2", ZERO_SHA))
         self.assertNotIn(b"refs/tags/refs-0.2", self._refs)
 
-    def test_import_refs_name(self):
+    def test_import_refs_name(self) -> None:
         self._refs[b"refs/remotes/origin/other"] = (
             b"48d01bd4b77fed026b154d16493e5deab78f02ec"
         )
@@ -350,7 +350,7 @@ class RefsContainerTests:
             self._refs[b"refs/remotes/origin/other"],
         )
 
-    def test_import_refs_name_prune(self):
+    def test_import_refs_name_prune(self) -> None:
         self._refs[b"refs/remotes/origin/other"] = (
             b"48d01bd4b77fed026b154d16493e5deab78f02ec"
         )
@@ -367,11 +367,11 @@ class RefsContainerTests:
 
 
 class DictRefsContainerTests(RefsContainerTests, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         self._refs = DictRefsContainer(dict(_TEST_REFS))
 
-    def test_invalid_refname(self):
+    def test_invalid_refname(self) -> None:
         # FIXME: Move this test into RefsContainerTests, but requires
         # some way of injecting invalid refs.
         self._refs._refs[b"refs/stash"] = b"00" * 20
@@ -382,13 +382,13 @@ class DictRefsContainerTests(RefsContainerTests, TestCase):
 
 
 class DiskRefsContainerTests(RefsContainerTests, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         self._repo = open_repo("refs.git")
         self.addCleanup(tear_down_repo, self._repo)
         self._refs = self._repo.refs
 
-    def test_get_packed_refs(self):
+    def test_get_packed_refs(self) -> None:
         self.assertEqual(
             {
                 b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec",
@@ -397,7 +397,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
             self._refs.get_packed_refs(),
         )
 
-    def test_get_peeled_not_packed(self):
+    def test_get_peeled_not_packed(self) -> None:
         # not packed
         self.assertEqual(None, self._refs.get_peeled(b"refs/tags/refs-0.2"))
         self.assertEqual(
@@ -417,7 +417,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
             self._refs.get_peeled(b"refs/tags/refs-0.1"),
         )
 
-    def test_setitem(self):
+    def test_setitem(self) -> None:
         RefsContainerTests.test_setitem(self)
         path = os.path.join(self._refs.path, b"refs", b"some", b"ref")
         with open(path, "rb") as f:
@@ -430,14 +430,14 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
             b"42d06bd4b77fed026b154d16493e5deab78f02ec",
         )
 
-    def test_delete_refs_container(self):
+    def test_delete_refs_container(self) -> None:
         # We shouldn't delete the refs directory
         self._refs[b"refs/heads/blah"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
         for ref in self._refs.allkeys():
             del self._refs[ref]
         self.assertTrue(os.path.exists(os.path.join(self._refs.path, b"refs")))
 
-    def test_setitem_packed(self):
+    def test_setitem_packed(self) -> None:
         with open(os.path.join(self._refs.path, b"packed-refs"), "w") as f:
             f.write("# pack-refs with: peeled fully-peeled sorted \n")
             f.write("42d06bd4b77fed026b154d16493e5deab78f02ec refs/heads/packed\n")
@@ -462,7 +462,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
             self._refs.get_packed_refs(),
         )
 
-    def test_add_packed_refs(self):
+    def test_add_packed_refs(self) -> None:
         # first, create a non-packed ref
         self._refs[b"refs/heads/packed"] = b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8"
 
@@ -516,7 +516,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
 
         self.assertFalse(os.path.exists(packed_refs_file_path))
 
-    def test_setitem_symbolic(self):
+    def test_setitem_symbolic(self) -> None:
         ones = b"1" * 40
         self._refs[b"HEAD"] = ones
         self.assertEqual(ones, self._refs[b"HEAD"])
@@ -532,7 +532,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
         self.assertEqual(ones, f.read()[:40])
         f.close()
 
-    def test_set_if_equals(self):
+    def test_set_if_equals(self) -> None:
         RefsContainerTests.test_set_if_equals(self)
 
         # ensure symref was followed
@@ -546,7 +546,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
         )
         self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD.lock")))
 
-    def test_add_if_new_packed(self):
+    def test_add_if_new_packed(self) -> None:
         # don't overwrite packed ref
         self.assertFalse(self._refs.add_if_new(b"refs/tags/refs-0.1", b"9" * 40))
         self.assertEqual(
@@ -554,7 +554,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
             self._refs[b"refs/tags/refs-0.1"],
         )
 
-    def test_add_if_new_symbolic(self):
+    def test_add_if_new_symbolic(self) -> None:
         # Use an empty repo instead of the default.
         repo_dir = os.path.join(tempfile.mkdtemp(), "test")
         os.makedirs(repo_dir)
@@ -573,7 +573,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
         self.assertEqual(nines, refs[b"HEAD"])
         self.assertEqual(nines, refs[b"refs/heads/master"])
 
-    def test_follow(self):
+    def test_follow(self) -> None:
         self.assertEqual(
             (
                 [b"HEAD", b"refs/heads/master"],
@@ -590,7 +590,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
         )
         self.assertRaises(SymrefLoop, self._refs.follow, b"refs/heads/loop")
 
-    def test_set_overwrite_loop(self):
+    def test_set_overwrite_loop(self) -> None:
         self.assertRaises(SymrefLoop, self._refs.follow, b"refs/heads/loop")
         self._refs[b"refs/heads/loop"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
         self.assertEqual(
@@ -598,13 +598,13 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
             self._refs.follow(b"refs/heads/loop"),
         )
 
-    def test_delitem(self):
+    def test_delitem(self) -> None:
         RefsContainerTests.test_delitem(self)
         ref_file = os.path.join(self._refs.path, b"refs", b"heads", b"master")
         self.assertFalse(os.path.exists(ref_file))
         self.assertNotIn(b"refs/heads/master", self._refs.get_packed_refs())
 
-    def test_delitem_symbolic(self):
+    def test_delitem_symbolic(self) -> None:
         self.assertEqual(b"ref: refs/heads/master", self._refs.read_loose_ref(b"HEAD"))
         del self._refs[b"HEAD"]
         self.assertRaises(KeyError, lambda: self._refs[b"HEAD"])
@@ -614,7 +614,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
         )
         self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD")))
 
-    def test_remove_if_equals_symref(self):
+    def test_remove_if_equals_symref(self) -> None:
         # HEAD is a symref, so shouldn't equal its dereferenced value
         self.assertFalse(
             self._refs.remove_if_equals(
@@ -640,7 +640,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
         )
         self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD.lock")))
 
-    def test_remove_packed_without_peeled(self):
+    def test_remove_packed_without_peeled(self) -> None:
         refs_file = os.path.join(self._repo.path, "packed-refs")
         f = GitFile(refs_file)
         refs_data = f.read()
@@ -663,7 +663,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
             )
         )
 
-    def test_remove_if_equals_packed(self):
+    def test_remove_if_equals_packed(self) -> None:
         # test removing ref that is only packed
         self.assertEqual(
             b"df6800012397fb85c56e7418dd4eb9405dee075c",
@@ -677,7 +677,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
         )
         self.assertRaises(KeyError, lambda: self._refs[b"refs/tags/refs-0.1"])
 
-    def test_remove_parent(self):
+    def test_remove_parent(self) -> None:
         self._refs[b"refs/heads/foo/bar"] = b"df6800012397fb85c56e7418dd4eb9405dee075c"
         del self._refs[b"refs/heads/foo/bar"]
         ref_file = os.path.join(
@@ -694,7 +694,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
         self.assertTrue(os.path.exists(ref_file))
         self._refs[b"refs/heads/foo"] = b"df6800012397fb85c56e7418dd4eb9405dee075c"
 
-    def test_read_ref(self):
+    def test_read_ref(self) -> None:
         self.assertEqual(b"ref: refs/heads/master", self._refs.read_ref(b"HEAD"))
         self.assertEqual(
             b"42d06bd4b77fed026b154d16493e5deab78f02ec",
@@ -702,12 +702,12 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
         )
         self.assertEqual(None, self._refs.read_ref(b"nonexistent"))
 
-    def test_read_loose_ref(self):
+    def test_read_loose_ref(self) -> None:
         self._refs[b"refs/heads/foo"] = b"df6800012397fb85c56e7418dd4eb9405dee075c"
 
         self.assertEqual(None, self._refs.read_ref(b"refs/heads/foo/bar"))
 
-    def test_non_ascii(self):
+    def test_non_ascii(self) -> None:
         try:
             encoded_ref = os.fsencode("refs/tags/schön")
         except UnicodeEncodeError as exc:
@@ -724,7 +724,7 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
 
         self.assertEqual(expected_refs, self._repo.get_refs())
 
-    def test_cyrillic(self):
+    def test_cyrillic(self) -> None:
         if sys.platform in ("darwin", "win32"):
             raise SkipTest("filesystem encoding doesn't support arbitrary bytes")
         # reported in https://github.com/dulwich/dulwich/issues/608
@@ -757,7 +757,7 @@ _TEST_REFS_SERIALIZED = (
 
 
 class InfoRefsContainerTests(TestCase):
-    def test_invalid_refname(self):
+    def test_invalid_refname(self) -> None:
         text = _TEST_REFS_SERIALIZED + b"00" * 20 + b"\trefs/stash\n"
         refs = InfoRefsContainer(BytesIO(text))
         expected_refs = dict(_TEST_REFS)
@@ -766,7 +766,7 @@ class InfoRefsContainerTests(TestCase):
         del expected_refs[b"refs/heads/loop"]
         self.assertEqual(expected_refs, refs.as_dict())
 
-    def test_keys(self):
+    def test_keys(self) -> None:
         refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
         actual_keys = set(refs.keys())
         self.assertEqual(set(refs.allkeys()), actual_keys)
@@ -783,7 +783,7 @@ class InfoRefsContainerTests(TestCase):
         )
         self.assertEqual([b"refs-0.1", b"refs-0.2"], sorted(refs.keys(b"refs/tags")))
 
-    def test_as_dict(self):
+    def test_as_dict(self) -> None:
         refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
         # refs/heads/loop does not show up even if it exists
         expected_refs = dict(_TEST_REFS)
@@ -791,12 +791,12 @@ class InfoRefsContainerTests(TestCase):
         del expected_refs[b"refs/heads/loop"]
         self.assertEqual(expected_refs, refs.as_dict())
 
-    def test_contains(self):
+    def test_contains(self) -> None:
         refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
         self.assertIn(b"refs/heads/master", refs)
         self.assertNotIn(b"refs/heads/bar", refs)
 
-    def test_get_peeled(self):
+    def test_get_peeled(self) -> None:
         refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED))
         # refs/heads/loop does not show up even if it exists
         self.assertEqual(
@@ -806,10 +806,10 @@ class InfoRefsContainerTests(TestCase):
 
 
 class ParseSymrefValueTests(TestCase):
-    def test_valid(self):
+    def test_valid(self) -> None:
         self.assertEqual(b"refs/heads/foo", parse_symref_value(b"ref: refs/heads/foo"))
 
-    def test_invalid(self):
+    def test_invalid(self) -> None:
         self.assertRaises(ValueError, parse_symref_value, b"foobar")
 
 
@@ -829,11 +829,11 @@ class StripPeeledRefsTests(TestCase):
         b"refs/tags/2.0.0": b"0749936d0956c661ac8f8d3483774509c165f89e",
     }
 
-    def test_strip_peeled_refs(self):
+    def test_strip_peeled_refs(self) -> None:
         # Simple check of two dicts
         self.assertEqual(strip_peeled_refs(self.all_refs), self.non_peeled_refs)
 
-    def test_split_peeled_refs(self):
+    def test_split_peeled_refs(self) -> None:
         (regular, peeled) = split_peeled_refs(self.all_refs)
         self.assertEqual(regular, self.non_peeled_refs)
         self.assertEqual(

+ 100 - 100
tests/test_repository.py

@@ -49,7 +49,7 @@ missing_sha = b"b91fa4d900e17e99b433218e988c4eb4a3e9a097"
 
 
 class CreateRepositoryTests(TestCase):
-    def assertFileContentsEqual(self, expected, repo, path):
+    def assertFileContentsEqual(self, expected, repo, path) -> None:
         f = repo.get_named_file(path)
         if not f:
             self.assertEqual(expected, None)
@@ -57,7 +57,7 @@ class CreateRepositoryTests(TestCase):
             with f:
                 self.assertEqual(expected, f.read())
 
-    def _check_repo_contents(self, repo, expect_bare):
+    def _check_repo_contents(self, repo, expect_bare) -> None:
         self.assertEqual(expect_bare, repo.bare)
         self.assertFileContentsEqual(b"Unnamed repository", repo, "description")
         self.assertFileContentsEqual(b"", repo, os.path.join("info", "exclude"))
@@ -87,25 +87,25 @@ class CreateRepositoryTests(TestCase):
 
             self.assertEqual(expected, actual)
 
-    def test_create_memory(self):
+    def test_create_memory(self) -> None:
         repo = MemoryRepo.init_bare([], {})
         self._check_repo_contents(repo, True)
 
-    def test_create_disk_bare(self):
+    def test_create_disk_bare(self) -> None:
         tmp_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
         repo = Repo.init_bare(tmp_dir)
         self.assertEqual(tmp_dir, repo._controldir)
         self._check_repo_contents(repo, True)
 
-    def test_create_disk_non_bare(self):
+    def test_create_disk_non_bare(self) -> None:
         tmp_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
         repo = Repo.init(tmp_dir)
         self.assertEqual(os.path.join(tmp_dir, ".git"), repo._controldir)
         self._check_repo_contents(repo, False)
 
-    def test_create_disk_non_bare_mkdir(self):
+    def test_create_disk_non_bare_mkdir(self) -> None:
         tmp_dir = tempfile.mkdtemp()
         target_dir = os.path.join(tmp_dir, "target")
         self.addCleanup(shutil.rmtree, tmp_dir)
@@ -113,7 +113,7 @@ class CreateRepositoryTests(TestCase):
         self.assertEqual(os.path.join(target_dir, ".git"), repo._controldir)
         self._check_repo_contents(repo, False)
 
-    def test_create_disk_bare_mkdir(self):
+    def test_create_disk_bare_mkdir(self) -> None:
         tmp_dir = tempfile.mkdtemp()
         target_dir = os.path.join(tmp_dir, "target")
         self.addCleanup(shutil.rmtree, tmp_dir)
@@ -123,13 +123,13 @@ class CreateRepositoryTests(TestCase):
 
 
 class MemoryRepoTests(TestCase):
-    def test_set_description(self):
+    def test_set_description(self) -> None:
         r = MemoryRepo.init_bare([], {})
         description = b"Some description"
         r.set_description(description)
         self.assertEqual(description, r.get_description())
 
-    def test_pull_into(self):
+    def test_pull_into(self) -> None:
         r = MemoryRepo.init_bare([], {})
         repo = open_repo("a.git")
         self.addCleanup(tear_down_repo, repo)
@@ -146,18 +146,18 @@ class RepositoryRootTests(TestCase):
         self.addCleanup(tear_down_repo, repo)
         return repo
 
-    def test_simple_props(self):
+    def test_simple_props(self) -> None:
         r = self.open_repo("a.git")
         self.assertEqual(r.controldir(), r.path)
 
-    def test_setitem(self):
+    def test_setitem(self) -> None:
         r = self.open_repo("a.git")
         r[b"refs/tags/foo"] = b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"
         self.assertEqual(
             b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", r[b"refs/tags/foo"].id
         )
 
-    def test_getitem_unicode(self):
+    def test_getitem_unicode(self) -> None:
         r = self.open_repo("a.git")
 
         test_keys = [
@@ -182,7 +182,7 @@ class RepositoryRootTests(TestCase):
                 12,
             )
 
-    def test_delitem(self):
+    def test_delitem(self) -> None:
         r = self.open_repo("a.git")
 
         del r[b"refs/heads/master"]
@@ -193,7 +193,7 @@ class RepositoryRootTests(TestCase):
 
         self.assertRaises(ValueError, r.__delitem__, b"notrefs/foo")
 
-    def test_get_refs(self):
+    def test_get_refs(self) -> None:
         r = self.open_repo("a.git")
         self.assertEqual(
             {
@@ -205,49 +205,49 @@ class RepositoryRootTests(TestCase):
             r.get_refs(),
         )
 
-    def test_head(self):
+    def test_head(self) -> None:
         r = self.open_repo("a.git")
         self.assertEqual(r.head(), b"a90fa2d900a17e99b433217e988c4eb4a2e9a097")
 
-    def test_get_object(self):
+    def test_get_object(self) -> None:
         r = self.open_repo("a.git")
         obj = r.get_object(r.head())
         self.assertEqual(obj.type_name, b"commit")
 
-    def test_get_object_non_existant(self):
+    def test_get_object_non_existant(self) -> None:
         r = self.open_repo("a.git")
         self.assertRaises(KeyError, r.get_object, missing_sha)
 
-    def test_contains_object(self):
+    def test_contains_object(self) -> None:
         r = self.open_repo("a.git")
         self.assertIn(r.head(), r)
         self.assertNotIn(b"z" * 40, r)
 
-    def test_contains_ref(self):
+    def test_contains_ref(self) -> None:
         r = self.open_repo("a.git")
         self.assertIn(b"HEAD", r)
 
-    def test_get_no_description(self):
+    def test_get_no_description(self) -> None:
         r = self.open_repo("a.git")
         self.assertIs(None, r.get_description())
 
-    def test_get_description(self):
+    def test_get_description(self) -> None:
         r = self.open_repo("a.git")
         with open(os.path.join(r.path, "description"), "wb") as f:
             f.write(b"Some description")
         self.assertEqual(b"Some description", r.get_description())
 
-    def test_set_description(self):
+    def test_set_description(self) -> None:
         r = self.open_repo("a.git")
         description = b"Some description"
         r.set_description(description)
         self.assertEqual(description, r.get_description())
 
-    def test_contains_missing(self):
+    def test_contains_missing(self) -> None:
         r = self.open_repo("a.git")
         self.assertNotIn(b"bar", r)
 
-    def test_get_peeled(self):
+    def test_get_peeled(self) -> None:
         # unpacked ref
         r = self.open_repo("a.git")
         tag_sha = b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a"
@@ -262,11 +262,11 @@ class RepositoryRootTests(TestCase):
 
         # TODO: add more corner cases to test repo
 
-    def test_get_peeled_not_tag(self):
+    def test_get_peeled_not_tag(self) -> None:
         r = self.open_repo("a.git")
         self.assertEqual(r.get_peeled(b"HEAD"), r.head())
 
-    def test_get_parents(self):
+    def test_get_parents(self) -> None:
         r = self.open_repo("a.git")
         self.assertEqual(
             [b"2a72d929692c41d8554c07f6301757ba18a65d91"],
@@ -275,7 +275,7 @@ class RepositoryRootTests(TestCase):
         r.update_shallow([b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], None)
         self.assertEqual([], r.get_parents(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"))
 
-    def test_get_walker(self):
+    def test_get_walker(self) -> None:
         r = self.open_repo("a.git")
         # include defaults to [r.head()]
         self.assertEqual(
@@ -297,7 +297,7 @@ class RepositoryRootTests(TestCase):
             [b"2a72d929692c41d8554c07f6301757ba18a65d91"],
         )
 
-    def assertFilesystemHidden(self, path):
+    def assertFilesystemHidden(self, path) -> None:
         if sys.platform != "win32":
             return
         import ctypes
@@ -308,7 +308,7 @@ class RepositoryRootTests(TestCase):
         )
         self.assertTrue(2 & GetFileAttributesW(path))
 
-    def test_init_existing(self):
+    def test_init_existing(self) -> None:
         tmp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
         t = Repo.init(tmp_dir)
@@ -316,7 +316,7 @@ class RepositoryRootTests(TestCase):
         self.assertEqual(os.listdir(tmp_dir), [".git"])
         self.assertFilesystemHidden(os.path.join(tmp_dir, ".git"))
 
-    def test_init_mkdir(self):
+    def test_init_mkdir(self) -> None:
         tmp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
         repo_dir = os.path.join(tmp_dir, "a-repo")
@@ -326,7 +326,7 @@ class RepositoryRootTests(TestCase):
         self.assertEqual(os.listdir(repo_dir), [".git"])
         self.assertFilesystemHidden(os.path.join(repo_dir, ".git"))
 
-    def test_init_mkdir_unicode(self):
+    def test_init_mkdir_unicode(self) -> None:
         repo_name = "\xa7"
         try:
             os.fsencode(repo_name)
@@ -342,7 +342,7 @@ class RepositoryRootTests(TestCase):
         self.assertFilesystemHidden(os.path.join(repo_dir, ".git"))
 
     @skipIf(sys.platform == "win32", "fails on Windows")
-    def test_fetch(self):
+    def test_fetch(self) -> None:
         r = self.open_repo("a.git")
         tmp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
@@ -356,7 +356,7 @@ class RepositoryRootTests(TestCase):
         self.assertIn(b"b0931cadc54336e78a1d980420e3268903b57a50", t)
 
     @skipIf(sys.platform == "win32", "fails on Windows")
-    def test_fetch_ignores_missing_refs(self):
+    def test_fetch_ignores_missing_refs(self) -> None:
         r = self.open_repo("a.git")
         missing = b"1234566789123456789123567891234657373833"
         r.refs[b"refs/heads/blah"] = missing
@@ -372,7 +372,7 @@ class RepositoryRootTests(TestCase):
         self.assertIn(b"b0931cadc54336e78a1d980420e3268903b57a50", t)
         self.assertNotIn(missing, t)
 
-    def test_clone(self):
+    def test_clone(self) -> None:
         r = self.open_repo("a.git")
         tmp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
@@ -402,7 +402,7 @@ class RepositoryRootTests(TestCase):
                 c.get((b"remote", b"origin"), b"fetch"),
             )
 
-    def test_clone_no_head(self):
+    def test_clone_no_head(self) -> None:
         temp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, temp_dir)
         repo_dir = os.path.join(os.path.dirname(__file__), "..", "testdata", "repos")
@@ -422,7 +422,7 @@ class RepositoryRootTests(TestCase):
             t.refs.as_dict(),
         )
 
-    def test_clone_empty(self):
+    def test_clone_empty(self) -> None:
         """Test clone() doesn't crash if HEAD points to a non-existing ref.
 
         This simulates cloning server-side bare repository either when it is
@@ -435,7 +435,7 @@ class RepositoryRootTests(TestCase):
         self.addCleanup(shutil.rmtree, tmp_dir)
         r.clone(tmp_dir, mkdir=False, bare=True)
 
-    def test_reset_index_symlink_enabled(self):
+    def test_reset_index_symlink_enabled(self) -> None:
         if sys.platform == "win32":
             self.skipTest("symlinks are not supported on Windows")
         tmp_dir = self.mkdtemp()
@@ -456,7 +456,7 @@ class RepositoryRootTests(TestCase):
             self.assertEqual("foo", os.readlink(bar_path))
         t.close()
 
-    def test_reset_index_symlink_disabled(self):
+    def test_reset_index_symlink_disabled(self) -> None:
         tmp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
 
@@ -472,14 +472,14 @@ class RepositoryRootTests(TestCase):
 
         t.close()
 
-    def test_clone_bare(self):
+    def test_clone_bare(self) -> None:
         r = self.open_repo("a.git")
         tmp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
         t = r.clone(tmp_dir, mkdir=False)
         t.close()
 
-    def test_clone_checkout_and_bare(self):
+    def test_clone_checkout_and_bare(self) -> None:
         r = self.open_repo("a.git")
         tmp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
@@ -487,7 +487,7 @@ class RepositoryRootTests(TestCase):
             ValueError, r.clone, tmp_dir, mkdir=False, checkout=True, bare=True
         )
 
-    def test_clone_branch(self):
+    def test_clone_branch(self) -> None:
         r = self.open_repo("a.git")
         r.refs[b"refs/heads/mybranch"] = b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a"
         tmp_dir = self.mkdtemp()
@@ -502,7 +502,7 @@ class RepositoryRootTests(TestCase):
                 b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
             )
 
-    def test_clone_tag(self):
+    def test_clone_tag(self) -> None:
         r = self.open_repo("a.git")
         tmp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
@@ -517,7 +517,7 @@ class RepositoryRootTests(TestCase):
                 b"a90fa2d900a17e99b433217e988c4eb4a2e9a097",
             )
 
-    def test_clone_invalid_branch(self):
+    def test_clone_invalid_branch(self) -> None:
         r = self.open_repo("a.git")
         tmp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
@@ -529,7 +529,7 @@ class RepositoryRootTests(TestCase):
             branch=b"mybranch",
         )
 
-    def test_merge_history(self):
+    def test_merge_history(self) -> None:
         r = self.open_repo("simple_merge.git")
         shas = [e.commit.id for e in r.get_walker()]
         self.assertEqual(
@@ -543,7 +543,7 @@ class RepositoryRootTests(TestCase):
             ],
         )
 
-    def test_out_of_order_merge(self):
+    def test_out_of_order_merge(self) -> None:
         """Test that revision history is ordered by date, not parent order."""
         r = self.open_repo("ooo_merge.git")
         shas = [e.commit.id for e in r.get_walker()]
@@ -557,19 +557,19 @@ class RepositoryRootTests(TestCase):
             ],
         )
 
-    def test_get_tags_empty(self):
+    def test_get_tags_empty(self) -> None:
         r = self.open_repo("ooo_merge.git")
         self.assertEqual({}, r.refs.as_dict(b"refs/tags"))
 
-    def test_get_config(self):
+    def test_get_config(self) -> None:
         r = self.open_repo("ooo_merge.git")
         self.assertIsInstance(r.get_config(), Config)
 
-    def test_get_config_stack(self):
+    def test_get_config_stack(self) -> None:
         r = self.open_repo("ooo_merge.git")
         self.assertIsInstance(r.get_config_stack(), Config)
 
-    def test_common_revisions(self):
+    def test_common_revisions(self) -> None:
         """This test demonstrates that ``find_common_revisions()`` actually
         returns common heads, not revisions; dulwich already uses
         ``find_common_revisions()`` in such a manner (see
@@ -618,7 +618,7 @@ class RepositoryRootTests(TestCase):
         shas = r1.object_store.find_common_revisions(r2.get_graph_walker())
         self.assertEqual(set(shas), expected_shas)
 
-    def test_shell_hook_pre_commit(self):
+    def test_shell_hook_pre_commit(self) -> None:
         if os.name != "posix":
             self.skipTest("shell hook tests requires POSIX shell")
 
@@ -668,7 +668,7 @@ exit 0
         )
         self.assertEqual([], r[commit_sha].parents)
 
-    def test_shell_hook_commit_msg(self):
+    def test_shell_hook_commit_msg(self) -> None:
         if os.name != "posix":
             self.skipTest("shell hook tests requires POSIX shell")
 
@@ -718,7 +718,7 @@ exit 0
         )
         self.assertEqual([], r[commit_sha].parents)
 
-    def test_shell_hook_pre_commit_add_files(self):
+    def test_shell_hook_pre_commit_add_files(self) -> None:
         if os.name != "posix":
             self.skipTest("shell hook tests requires POSIX shell")
 
@@ -767,7 +767,7 @@ r.stage(['foo'])
         tree = r[r[commit_sha].tree]
         self.assertEqual({b"blah", b"foo"}, set(tree))
 
-    def test_shell_hook_post_commit(self):
+    def test_shell_hook_post_commit(self) -> None:
         if os.name != "posix":
             self.skipTest("shell hook tests requires POSIX shell")
 
@@ -851,8 +851,8 @@ exit 1
             )
         self.assertEqual([commit_sha], r[commit_sha2].parents)
 
-    def test_as_dict(self):
-        def check(repo):
+    def test_as_dict(self) -> None:
+        def check(repo) -> None:
             self.assertEqual(
                 repo.refs.subkeys(b"refs/tags"),
                 repo.refs.subkeys(b"refs/tags/"),
@@ -873,7 +873,7 @@ exit 1
             check(nonbare)
             check(bare)
 
-    def test_working_tree(self):
+    def test_working_tree(self) -> None:
         temp_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, temp_dir)
         worktree_temp_dir = tempfile.mkdtemp()
@@ -919,7 +919,7 @@ class BuildRepoRootTests(TestCase):
     def get_repo_dir(self):
         return os.path.join(tempfile.mkdtemp(), "test")
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._repo_dir = self.get_repo_dir()
         os.makedirs(self._repo_dir)
@@ -944,7 +944,7 @@ class BuildRepoRootTests(TestCase):
         self.assertEqual([], r[commit_sha].parents)
         self._root_commit = commit_sha
 
-    def test_get_shallow(self):
+    def test_get_shallow(self) -> None:
         self.assertEqual(set(), self._repo.get_shallow())
         with open(os.path.join(self._repo.path, ".git", "shallow"), "wb") as f:
             f.write(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097\n")
@@ -953,7 +953,7 @@ class BuildRepoRootTests(TestCase):
             self._repo.get_shallow(),
         )
 
-    def test_update_shallow(self):
+    def test_update_shallow(self) -> None:
         self._repo.update_shallow(None, None)  # no op
         self.assertEqual(set(), self._repo.get_shallow())
         self._repo.update_shallow([b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], None)
@@ -976,7 +976,7 @@ class BuildRepoRootTests(TestCase):
             os.path.exists(os.path.join(self._repo.controldir(), "shallow")),
         )
 
-    def test_build_repo(self):
+    def test_build_repo(self) -> None:
         r = self._repo
         self.assertEqual(b"ref: refs/heads/master", r.refs.read_ref(b"HEAD"))
         self.assertEqual(self._root_commit, r.refs[b"refs/heads/master"])
@@ -985,7 +985,7 @@ class BuildRepoRootTests(TestCase):
         actual_commit = r[self._root_commit]
         self.assertEqual(b"msg", actual_commit.message)
 
-    def test_commit_modified(self):
+    def test_commit_modified(self) -> None:
         r = self._repo
         with open(os.path.join(r.path, "a"), "wb") as f:
             f.write(b"new contents")
@@ -1005,7 +1005,7 @@ class BuildRepoRootTests(TestCase):
         self.assertEqual(b"new contents", r[a_id].data)
 
     @skipIf(not getattr(os, "symlink", None), "Requires symlink support")
-    def test_commit_symlink(self):
+    def test_commit_symlink(self) -> None:
         r = self._repo
         os.symlink("a", os.path.join(r.path, "b"))
         r.stage(["a", "b"])
@@ -1023,7 +1023,7 @@ class BuildRepoRootTests(TestCase):
         self.assertTrue(stat.S_ISLNK(b_mode))
         self.assertEqual(b"a", r[b_id].data)
 
-    def test_commit_merge_heads_file(self):
+    def test_commit_merge_heads_file(self) -> None:
         tmp_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
         r = Repo.init(tmp_dir)
@@ -1057,7 +1057,7 @@ class BuildRepoRootTests(TestCase):
             r[commit_sha].parents,
         )
 
-    def test_commit_deleted(self):
+    def test_commit_deleted(self) -> None:
         r = self._repo
         os.remove(os.path.join(r.path, "a"))
         r.stage(["a"])
@@ -1075,7 +1075,7 @@ class BuildRepoRootTests(TestCase):
         tree = r[r[commit_sha].tree]
         self.assertEqual([], list(tree.iteritems()))
 
-    def test_commit_follows(self):
+    def test_commit_follows(self) -> None:
         r = self._repo
         r.refs.set_symbolic_ref(b"HEAD", b"refs/heads/bla")
         commit_sha = r.do_commit(
@@ -1090,7 +1090,7 @@ class BuildRepoRootTests(TestCase):
         )
         self.assertEqual(commit_sha, r[b"refs/heads/bla"].id)
 
-    def test_commit_encoding(self):
+    def test_commit_encoding(self) -> None:
         r = self._repo
         commit_sha = r.do_commit(
             b"commit with strange character \xee",
@@ -1104,7 +1104,7 @@ class BuildRepoRootTests(TestCase):
         )
         self.assertEqual(b"iso8859-1", r[commit_sha].encoding)
 
-    def test_compression_level(self):
+    def test_compression_level(self) -> None:
         r = self._repo
         c = r.get_config()
         c.set(("core",), "compression", "3")
@@ -1113,21 +1113,21 @@ class BuildRepoRootTests(TestCase):
         r = Repo(self._repo_dir)
         self.assertEqual(r.object_store.loose_compression_level, 4)
 
-    def test_repositoryformatversion_unsupported(self):
+    def test_repositoryformatversion_unsupported(self) -> None:
         r = self._repo
         c = r.get_config()
         c.set(("core",), "repositoryformatversion", "2")
         c.write_to_path()
         self.assertRaises(UnsupportedVersion, Repo, self._repo_dir)
 
-    def test_repositoryformatversion_1(self):
+    def test_repositoryformatversion_1(self) -> None:
         r = self._repo
         c = r.get_config()
         c.set(("core",), "repositoryformatversion", "1")
         c.write_to_path()
         Repo(self._repo_dir)
 
-    def test_worktreeconfig_extension(self):
+    def test_worktreeconfig_extension(self) -> None:
         r = self._repo
         c = r.get_config()
         c.set(("core",), "repositoryformatversion", "1")
@@ -1140,7 +1140,7 @@ class BuildRepoRootTests(TestCase):
         cs = r.get_config_stack()
         self.assertEqual(cs.get(("user",), "name"), b"Jelmer")
 
-    def test_worktreeconfig_extension_case(self):
+    def test_worktreeconfig_extension_case(self) -> None:
         """Test that worktree code does not error for alternate case format."""
         r = self._repo
         c = r.get_config()
@@ -1156,7 +1156,7 @@ class BuildRepoRootTests(TestCase):
         # https://github.com/jelmer/dulwich/issues/1285 was addressed
         Repo(self._repo_dir)
 
-    def test_repositoryformatversion_1_extension(self):
+    def test_repositoryformatversion_1_extension(self) -> None:
         r = self._repo
         c = r.get_config()
         c.set(("core",), "repositoryformatversion", "1")
@@ -1164,7 +1164,7 @@ class BuildRepoRootTests(TestCase):
         c.write_to_path()
         self.assertRaises(UnsupportedExtension, Repo, self._repo_dir)
 
-    def test_commit_encoding_from_config(self):
+    def test_commit_encoding_from_config(self) -> None:
         r = self._repo
         c = r.get_config()
         c.set(("i18n",), "commitEncoding", "iso8859-1")
@@ -1180,7 +1180,7 @@ class BuildRepoRootTests(TestCase):
         )
         self.assertEqual(b"iso8859-1", r[commit_sha].encoding)
 
-    def test_commit_config_identity(self):
+    def test_commit_config_identity(self) -> None:
         # commit falls back to the users' identity if it wasn't specified
         r = self._repo
         c = r.get_config()
@@ -1191,7 +1191,7 @@ class BuildRepoRootTests(TestCase):
         self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
         self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].committer)
 
-    def test_commit_config_identity_strips_than(self):
+    def test_commit_config_identity_strips_than(self) -> None:
         # commit falls back to the users' identity if it wasn't specified,
         # and strips superfluous <>
         r = self._repo
@@ -1203,7 +1203,7 @@ class BuildRepoRootTests(TestCase):
         self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
         self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].committer)
 
-    def test_commit_config_identity_in_memoryrepo(self):
+    def test_commit_config_identity_in_memoryrepo(self) -> None:
         # commit falls back to the users' identity if it wasn't specified
         r = MemoryRepo.init_bare([], {})
         c = r.get_config()
@@ -1214,7 +1214,7 @@ class BuildRepoRootTests(TestCase):
         self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
         self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].committer)
 
-    def test_commit_config_identity_from_env(self):
+    def test_commit_config_identity_from_env(self) -> None:
         # commit falls back to the users' identity if it wasn't specified
         self.overrideEnv("GIT_COMMITTER_NAME", "joe")
         self.overrideEnv("GIT_COMMITTER_EMAIL", "joe@example.com")
@@ -1227,15 +1227,15 @@ class BuildRepoRootTests(TestCase):
         self.assertEqual(b"Jelmer <jelmer@apache.org>", r[commit_sha].author)
         self.assertEqual(b"joe <joe@example.com>", r[commit_sha].committer)
 
-    def test_commit_fail_ref(self):
+    def test_commit_fail_ref(self) -> None:
         r = self._repo
 
-        def set_if_equals(name, old_ref, new_ref, **kwargs):
+        def set_if_equals(name, old_ref, new_ref, **kwargs) -> bool:
             return False
 
         r.refs.set_if_equals = set_if_equals
 
-        def add_if_new(name, new_ref, **kwargs):
+        def add_if_new(name, new_ref, **kwargs) -> None:
             self.fail("Unexpected call to add_if_new")
 
         r.refs.add_if_new = add_if_new
@@ -1259,7 +1259,7 @@ class BuildRepoRootTests(TestCase):
         self.assertEqual(r[self._root_commit].tree, new_commit.tree)
         self.assertEqual(b"failed commit", new_commit.message)
 
-    def test_commit_branch(self):
+    def test_commit_branch(self) -> None:
         r = self._repo
 
         commit_sha = r.do_commit(
@@ -1293,7 +1293,7 @@ class BuildRepoRootTests(TestCase):
         self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id)
         self.assertEqual([new_branch_head], r[commit_sha].parents)
 
-    def test_commit_merge_heads(self):
+    def test_commit_merge_heads(self) -> None:
         r = self._repo
         merge_1 = r.do_commit(
             b"commit to branch 2",
@@ -1317,7 +1317,7 @@ class BuildRepoRootTests(TestCase):
         )
         self.assertEqual([self._root_commit, merge_1], r[commit_sha].parents)
 
-    def test_commit_dangling_commit(self):
+    def test_commit_dangling_commit(self) -> None:
         r = self._repo
 
         old_shas = set(r.object_store)
@@ -1341,7 +1341,7 @@ class BuildRepoRootTests(TestCase):
         self.assertEqual([], r[commit_sha].parents)
         self.assertEqual(old_refs, r.get_refs())
 
-    def test_commit_dangling_commit_with_parents(self):
+    def test_commit_dangling_commit_with_parents(self) -> None:
         r = self._repo
 
         old_shas = set(r.object_store)
@@ -1366,32 +1366,32 @@ class BuildRepoRootTests(TestCase):
         self.assertEqual([self._root_commit], r[commit_sha].parents)
         self.assertEqual(old_refs, r.get_refs())
 
-    def test_stage_absolute(self):
+    def test_stage_absolute(self) -> None:
         r = self._repo
         os.remove(os.path.join(r.path, "a"))
         self.assertRaises(ValueError, r.stage, [os.path.join(r.path, "a")])
 
-    def test_stage_deleted(self):
+    def test_stage_deleted(self) -> None:
         r = self._repo
         os.remove(os.path.join(r.path, "a"))
         r.stage(["a"])
         r.stage(["a"])  # double-stage a deleted path
         self.assertEqual([], list(r.open_index()))
 
-    def test_stage_directory(self):
+    def test_stage_directory(self) -> None:
         r = self._repo
         os.mkdir(os.path.join(r.path, "c"))
         r.stage(["c"])
         self.assertEqual([b"a"], list(r.open_index()))
 
-    def test_stage_submodule(self):
+    def test_stage_submodule(self) -> None:
         r = self._repo
         s = Repo.init(os.path.join(r.path, "sub"), mkdir=True)
         s.do_commit(b"message")
         r.stage(["sub"])
         self.assertEqual([b"a", b"sub"], list(r.open_index()))
 
-    def test_unstage_midify_file_with_dir(self):
+    def test_unstage_midify_file_with_dir(self) -> None:
         os.mkdir(os.path.join(self._repo.path, "new_dir"))
         full_path = os.path.join(self._repo.path, "new_dir", "foo")
 
@@ -1412,7 +1412,7 @@ class BuildRepoRootTests(TestCase):
             [{"add": [], "delete": [], "modify": []}, [b"new_dir/foo"], []], status
         )
 
-    def test_unstage_while_no_commit(self):
+    def test_unstage_while_no_commit(self) -> None:
         file = "foo"
         full_path = os.path.join(self._repo.path, file)
         with open(full_path, "w") as f:
@@ -1422,7 +1422,7 @@ class BuildRepoRootTests(TestCase):
         status = list(porcelain.status(self._repo))
         self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["foo"]], status)
 
-    def test_unstage_add_file(self):
+    def test_unstage_add_file(self) -> None:
         file = "foo"
         full_path = os.path.join(self._repo.path, file)
         porcelain.commit(
@@ -1438,7 +1438,7 @@ class BuildRepoRootTests(TestCase):
         status = list(porcelain.status(self._repo))
         self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["foo"]], status)
 
-    def test_unstage_modify_file(self):
+    def test_unstage_modify_file(self) -> None:
         file = "foo"
         full_path = os.path.join(self._repo.path, file)
         with open(full_path, "w") as f:
@@ -1460,7 +1460,7 @@ class BuildRepoRootTests(TestCase):
             [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
         )
 
-    def test_unstage_remove_file(self):
+    def test_unstage_remove_file(self) -> None:
         file = "foo"
         full_path = os.path.join(self._repo.path, file)
         with open(full_path, "w") as f:
@@ -1479,7 +1479,7 @@ class BuildRepoRootTests(TestCase):
             [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
         )
 
-    def test_reset_index(self):
+    def test_reset_index(self) -> None:
         r = self._repo
         with open(os.path.join(r.path, "a"), "wb") as f:
             f.write(b"changed")
@@ -1498,7 +1498,7 @@ class BuildRepoRootTests(TestCase):
         sys.platform in ("win32", "darwin"),
         "tries to implicitly decode as utf8",
     )
-    def test_commit_no_encode_decode(self):
+    def test_commit_no_encode_decode(self) -> None:
         r = self._repo
         repo_path_bytes = os.fsencode(r.path)
         encodings = ("utf8", "latin1")
@@ -1529,25 +1529,25 @@ class BuildRepoRootTests(TestCase):
             self.assertEqual(stat.S_IFREG | 0o644, mode)
             self.assertEqual(encoding.encode("ascii"), r[id].data)
 
-    def test_discover_intended(self):
+    def test_discover_intended(self) -> None:
         path = os.path.join(self._repo_dir, "b/c")
         r = Repo.discover(path)
         self.assertEqual(r.head(), self._repo.head())
 
-    def test_discover_isrepo(self):
+    def test_discover_isrepo(self) -> None:
         r = Repo.discover(self._repo_dir)
         self.assertEqual(r.head(), self._repo.head())
 
-    def test_discover_notrepo(self):
+    def test_discover_notrepo(self) -> None:
         with self.assertRaises(NotGitRepository):
             Repo.discover("/")
 
 
 class CheckUserIdentityTests(TestCase):
-    def test_valid(self):
+    def test_valid(self) -> None:
         check_user_identity(b"Me <me@example.com>")
 
-    def test_invalid(self):
+    def test_invalid(self) -> None:
         self.assertRaises(InvalidUserIdentity, check_user_identity, b"No Email")
         self.assertRaises(
             InvalidUserIdentity, check_user_identity, b"Fullname <missing"

+ 79 - 79
tests/test_server.py

@@ -69,7 +69,7 @@ class TestProto:
         self._output: list[bytes] = []
         self._received: dict[int, list[bytes]] = {0: [], 1: [], 2: [], 3: []}
 
-    def set_output(self, output_lines):
+    def set_output(self, output_lines) -> None:
         self._output = output_lines
 
     def read_pkt_line(self):
@@ -83,10 +83,10 @@ class TestProto:
         else:
             raise HangupException
 
-    def write_sideband(self, band, data):
+    def write_sideband(self, band, data) -> None:
         self._received[band].append(data)
 
-    def write_pkt_line(self, data):
+    def write_pkt_line(self, data) -> None:
         self._received[0].append(data)
 
     def get_received_line(self, band=0):
@@ -108,23 +108,23 @@ class TestGenericPackHandler(PackHandler):
 
 
 class HandlerTestCase(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._handler = TestGenericPackHandler()
 
-    def assertSucceeds(self, func, *args, **kwargs):
+    def assertSucceeds(self, func, *args, **kwargs) -> None:
         try:
             func(*args, **kwargs)
         except GitProtocolError as e:
             self.fail(e)
 
-    def test_capability_line(self):
+    def test_capability_line(self) -> None:
         self.assertEqual(
             b" cap1 cap2 cap3",
             format_capability_line([b"cap1", b"cap2", b"cap3"]),
         )
 
-    def test_set_client_capabilities(self):
+    def test_set_client_capabilities(self) -> None:
         set_caps = self._handler.set_client_capabilities
         self.assertSucceeds(set_caps, [b"cap2"])
         self.assertSucceeds(set_caps, [b"cap1", b"cap2"])
@@ -142,7 +142,7 @@ class HandlerTestCase(TestCase):
         self._handler.innocuous_capabilities = lambda: (b"ignoreme",)
         self.assertSucceeds(set_caps, [b"cap2", b"ignoreme"])
 
-    def test_has_capability(self):
+    def test_has_capability(self) -> None:
         self.assertRaises(GitProtocolError, self._handler.has_capability, b"cap")
         caps = self._handler.capabilities()
         self._handler.set_client_capabilities(caps)
@@ -152,7 +152,7 @@ class HandlerTestCase(TestCase):
 
 
 class UploadPackHandlerTestCase(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.path = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, self.path)
@@ -163,7 +163,7 @@ class UploadPackHandlerTestCase(TestCase):
             backend, [b"/", b"host=lolcathost"], TestProto()
         )
 
-    def test_progress(self):
+    def test_progress(self) -> None:
         caps = self._handler.required_capabilities()
         self._handler.set_client_capabilities(caps)
         self._handler._start_pack_send_phase()
@@ -173,14 +173,14 @@ class UploadPackHandlerTestCase(TestCase):
         self.assertEqual(b"second message", self._handler.proto.get_received_line(2))
         self.assertRaises(IndexError, self._handler.proto.get_received_line, 2)
 
-    def test_no_progress(self):
+    def test_no_progress(self) -> None:
         caps = [*list(self._handler.required_capabilities()), b"no-progress"]
         self._handler.set_client_capabilities(caps)
         self._handler.progress(b"first message")
         self._handler.progress(b"second message")
         self.assertRaises(IndexError, self._handler.proto.get_received_line, 2)
 
-    def test_get_tagged(self):
+    def test_get_tagged(self) -> None:
         refs = {
             b"refs/tags/tag1": ONE,
             b"refs/tags/tag2": TWO,
@@ -209,7 +209,7 @@ class UploadPackHandlerTestCase(TestCase):
         self._handler.set_client_capabilities(caps)
         self.assertEqual({}, self._handler.get_tagged(refs, repo=self._repo))
 
-    def test_nothing_to_do_but_wants(self):
+    def test_nothing_to_do_but_wants(self) -> None:
         # Just the fact that the client claims to want an object is enough
         # for sending a pack. Even if there turns out to be nothing.
         refs = {b"refs/tags/tag1": ONE}
@@ -231,7 +231,7 @@ class UploadPackHandlerTestCase(TestCase):
         # The server should always send a pack, even if it's empty.
         self.assertTrue(self._handler.proto.get_received_line(1).startswith(b"PACK"))
 
-    def test_nothing_to_do_no_wants(self):
+    def test_nothing_to_do_no_wants(self) -> None:
         # Don't send a pack if the client didn't ask for anything.
         refs = {b"refs/tags/tag1": ONE}
         tree = Tree()
@@ -247,7 +247,7 @@ class UploadPackHandlerTestCase(TestCase):
 
 
 class FindShallowTests(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._store = MemoryObjectStore()
 
@@ -264,10 +264,10 @@ class FindShallowTests(TestCase):
             parents = [commits[-1].id]
         return commits
 
-    def assertSameElements(self, expected, actual):
+    def assertSameElements(self, expected, actual) -> None:
         self.assertEqual(set(expected), set(actual))
 
-    def test_linear(self):
+    def test_linear(self) -> None:
         c1, c2, c3 = self.make_linear_commits(3)
 
         self.assertEqual(({c3.id}, set()), _find_shallow(self._store, [c3.id], 1))
@@ -284,7 +284,7 @@ class FindShallowTests(TestCase):
             _find_shallow(self._store, [c3.id], 4),
         )
 
-    def test_multiple_independent(self):
+    def test_multiple_independent(self) -> None:
         a = self.make_linear_commits(2, message=b"a")
         b = self.make_linear_commits(2, message=b"b")
         c = self.make_linear_commits(2, message=b"c")
@@ -295,7 +295,7 @@ class FindShallowTests(TestCase):
             _find_shallow(self._store, heads, 2),
         )
 
-    def test_multiple_overlapping(self):
+    def test_multiple_overlapping(self) -> None:
         # Create the following commit tree:
         # 1--2
         #  \
@@ -310,7 +310,7 @@ class FindShallowTests(TestCase):
             _find_shallow(self._store, [c2.id, c4.id], 3),
         )
 
-    def test_merge(self):
+    def test_merge(self) -> None:
         c1 = self.make_commit()
         c2 = self.make_commit()
         c3 = self.make_commit(parents=[c1.id, c2.id])
@@ -320,7 +320,7 @@ class FindShallowTests(TestCase):
             _find_shallow(self._store, [c3.id], 2),
         )
 
-    def test_tag(self):
+    def test_tag(self) -> None:
         c1, c2 = self.make_linear_commits(2)
         tag = make_tag(c2, name=b"tag")
         self._store.add_object(tag)
@@ -338,7 +338,7 @@ class TestUploadPackHandler(UploadPackHandler):
 
 
 class ReceivePackHandlerTestCase(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._repo = MemoryRepo.init_bare([], {})
         backend = DictBackend({b"/": self._repo})
@@ -346,7 +346,7 @@ class ReceivePackHandlerTestCase(TestCase):
             backend, [b"/", b"host=lolcathost"], TestProto()
         )
 
-    def test_apply_pack_del_ref(self):
+    def test_apply_pack_del_ref(self) -> None:
         refs = {b"refs/heads/master": TWO, b"refs/heads/fake-branch": ONE}
         self._repo.refs._update(refs)
         update_refs = [
@@ -361,7 +361,7 @@ class ReceivePackHandlerTestCase(TestCase):
 
 
 class ProtocolGraphWalkerEmptyTestCase(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._repo = MemoryRepo.init_bare([], {})
         backend = DictBackend({b"/": self._repo})
@@ -372,7 +372,7 @@ class ProtocolGraphWalkerEmptyTestCase(TestCase):
             self._repo.refs.get_symrefs,
         )
 
-    def test_empty_repository(self):
+    def test_empty_repository(self) -> None:
         # The server should wait for a flush packet.
         self._walker.proto.set_output([])
         self.assertRaises(HangupException, self._walker.determine_wants, {})
@@ -384,7 +384,7 @@ class ProtocolGraphWalkerEmptyTestCase(TestCase):
 
 
 class ProtocolGraphWalkerTestCase(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         # Create the following commit tree:
         #   3---5
@@ -406,7 +406,7 @@ class ProtocolGraphWalkerTestCase(TestCase):
             self._repo.refs.get_symrefs,
         )
 
-    def test_all_wants_satisfied_no_haves(self):
+    def test_all_wants_satisfied_no_haves(self) -> None:
         self._walker.set_wants([ONE])
         self.assertFalse(self._walker.all_wants_satisfied([]))
         self._walker.set_wants([TWO])
@@ -414,7 +414,7 @@ class ProtocolGraphWalkerTestCase(TestCase):
         self._walker.set_wants([THREE])
         self.assertFalse(self._walker.all_wants_satisfied([]))
 
-    def test_all_wants_satisfied_have_root(self):
+    def test_all_wants_satisfied_have_root(self) -> None:
         self._walker.set_wants([ONE])
         self.assertTrue(self._walker.all_wants_satisfied([ONE]))
         self._walker.set_wants([TWO])
@@ -422,14 +422,14 @@ class ProtocolGraphWalkerTestCase(TestCase):
         self._walker.set_wants([THREE])
         self.assertTrue(self._walker.all_wants_satisfied([ONE]))
 
-    def test_all_wants_satisfied_have_branch(self):
+    def test_all_wants_satisfied_have_branch(self) -> None:
         self._walker.set_wants([TWO])
         self.assertTrue(self._walker.all_wants_satisfied([TWO]))
         # wrong branch
         self._walker.set_wants([THREE])
         self.assertFalse(self._walker.all_wants_satisfied([TWO]))
 
-    def test_all_wants_satisfied(self):
+    def test_all_wants_satisfied(self) -> None:
         self._walker.set_wants([FOUR, FIVE])
         # trivial case: wants == haves
         self.assertTrue(self._walker.all_wants_satisfied([FOUR, FIVE]))
@@ -439,7 +439,7 @@ class ProtocolGraphWalkerTestCase(TestCase):
         self.assertFalse(self._walker.all_wants_satisfied([THREE]))
         self.assertTrue(self._walker.all_wants_satisfied([TWO, THREE]))
 
-    def test_split_proto_line(self):
+    def test_split_proto_line(self) -> None:
         allowed = (b"want", b"done", None)
         self.assertEqual(
             (b"want", ONE), _split_proto_line(b"want " + ONE + b"\n", allowed)
@@ -464,7 +464,7 @@ class ProtocolGraphWalkerTestCase(TestCase):
         self.assertEqual((b"done", None), _split_proto_line(b"done\n", allowed))
         self.assertEqual((None, None), _split_proto_line(b"", allowed))
 
-    def test_determine_wants(self):
+    def test_determine_wants(self) -> None:
         self._walker.proto.set_output([None])
         self.assertEqual([], self._walker.determine_wants({}))
         self.assertEqual(None, self._walker.proto.get_received_line())
@@ -500,7 +500,7 @@ class ProtocolGraphWalkerTestCase(TestCase):
         self._walker.proto.set_output([b"want " + FOUR + b" multi_ack", None])
         self.assertRaises(GitProtocolError, self._walker.determine_wants, heads)
 
-    def test_determine_wants_advertisement(self):
+    def test_determine_wants_advertisement(self) -> None:
         self._walker.proto.set_output([None])
         # advertise branch tips plus tag
         heads = {
@@ -539,16 +539,16 @@ class ProtocolGraphWalkerTestCase(TestCase):
 
     # TODO: test commit time cutoff
 
-    def _handle_shallow_request(self, lines, heads):
+    def _handle_shallow_request(self, lines, heads) -> None:
         self._walker.proto.set_output([*lines, None])
         self._walker._handle_shallow_request(heads)
 
-    def assertReceived(self, expected):
+    def assertReceived(self, expected) -> None:
         self.assertEqual(
             expected, list(iter(self._walker.proto.get_received_line, None))
         )
 
-    def test_handle_shallow_request_no_client_shallows(self):
+    def test_handle_shallow_request_no_client_shallows(self) -> None:
         self._handle_shallow_request([b"deepen 2\n"], [FOUR, FIVE])
         self.assertEqual({TWO, THREE}, self._walker.shallow)
         self.assertReceived(
@@ -558,7 +558,7 @@ class ProtocolGraphWalkerTestCase(TestCase):
             ]
         )
 
-    def test_handle_shallow_request_no_new_shallows(self):
+    def test_handle_shallow_request_no_new_shallows(self) -> None:
         lines = [
             b"shallow " + TWO + b"\n",
             b"shallow " + THREE + b"\n",
@@ -568,7 +568,7 @@ class ProtocolGraphWalkerTestCase(TestCase):
         self.assertEqual({TWO, THREE}, self._walker.shallow)
         self.assertReceived([])
 
-    def test_handle_shallow_request_unshallows(self):
+    def test_handle_shallow_request_unshallows(self) -> None:
         lines = [
             b"shallow " + TWO + b"\n",
             b"deepen 3\n",
@@ -603,10 +603,10 @@ class TestProtocolGraphWalker:
             assert command in allowed
         return command, sha
 
-    def send_ack(self, sha, ack_type=b""):
+    def send_ack(self, sha, ack_type=b"") -> None:
         self.acks.append((sha, ack_type))
 
-    def send_nak(self):
+    def send_nak(self) -> None:
         self.acks.append((None, b"nak"))
 
     def all_wants_satisfied(self, haves):
@@ -626,14 +626,14 @@ class TestProtocolGraphWalker:
         self.pack_sent = self._impl.handle_done(self.done_required, self.done_received)
         return self.pack_sent
 
-    def notify_done(self):
+    def notify_done(self) -> None:
         self.done_received = True
 
 
 class AckGraphWalkerImplTestCase(TestCase):
     """Base setup and asserts for AckGraphWalker tests."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._walker = TestProtocolGraphWalker()
         self._walker.lines = [
@@ -645,24 +645,24 @@ class AckGraphWalkerImplTestCase(TestCase):
         self._impl = self.impl_cls(self._walker)
         self._walker._impl = self._impl
 
-    def assertNoAck(self):
+    def assertNoAck(self) -> None:
         self.assertEqual(None, self._walker.pop_ack())
 
-    def assertAcks(self, acks):
+    def assertAcks(self, acks) -> None:
         for sha, ack_type in acks:
             self.assertEqual((sha, ack_type), self._walker.pop_ack())
         self.assertNoAck()
 
-    def assertAck(self, sha, ack_type=b""):
+    def assertAck(self, sha, ack_type=b"") -> None:
         self.assertAcks([(sha, ack_type)])
 
-    def assertNak(self):
+    def assertNak(self) -> None:
         self.assertAck(None, b"nak")
 
-    def assertNextEquals(self, sha):
+    def assertNextEquals(self, sha) -> None:
         self.assertEqual(sha, next(self._impl))
 
-    def assertNextEmpty(self):
+    def assertNextEmpty(self) -> None:
         # This is necessary because of no-done - the assumption that it
         # it safe to immediately send out the final ACK is no longer
         # true but the test is still needed for it.  TestProtocolWalker
@@ -675,7 +675,7 @@ class AckGraphWalkerImplTestCase(TestCase):
 class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
     impl_cls = SingleAckGraphWalkerImpl
 
-    def test_single_ack(self):
+    def test_single_ack(self) -> None:
         self.assertNextEquals(TWO)
         self.assertNoAck()
 
@@ -690,7 +690,7 @@ class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNextEquals(None)
         self.assertNoAck()
 
-    def test_single_ack_flush(self):
+    def test_single_ack_flush(self) -> None:
         # same as ack test but ends with a flush-pkt instead of done
         self._walker.lines[-1] = (None, None)
 
@@ -707,7 +707,7 @@ class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNextEquals(None)
         self.assertNoAck()
 
-    def test_single_ack_nak(self):
+    def test_single_ack_nak(self) -> None:
         self.assertNextEquals(TWO)
         self.assertNoAck()
 
@@ -721,7 +721,7 @@ class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNextEmpty()
         self.assertNak()
 
-    def test_single_ack_nak_flush(self):
+    def test_single_ack_nak_flush(self) -> None:
         # same as nak test but ends with a flush-pkt instead of done
         self._walker.lines[-1] = (None, None)
 
@@ -742,7 +742,7 @@ class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
 class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
     impl_cls = MultiAckGraphWalkerImpl
 
-    def test_multi_ack(self):
+    def test_multi_ack(self) -> None:
         self.assertNextEquals(TWO)
         self.assertNoAck()
 
@@ -758,7 +758,7 @@ class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNextEmpty()
         self.assertAck(THREE)
 
-    def test_multi_ack_partial(self):
+    def test_multi_ack_partial(self) -> None:
         self.assertNextEquals(TWO)
         self.assertNoAck()
 
@@ -773,7 +773,7 @@ class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNextEmpty()
         self.assertAck(ONE)
 
-    def test_multi_ack_flush(self):
+    def test_multi_ack_flush(self) -> None:
         self._walker.lines = [
             (b"have", TWO),
             (None, None),
@@ -798,7 +798,7 @@ class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNextEmpty()
         self.assertAck(THREE)
 
-    def test_multi_ack_nak(self):
+    def test_multi_ack_nak(self) -> None:
         self.assertNextEquals(TWO)
         self.assertNoAck()
 
@@ -816,7 +816,7 @@ class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
 class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
     impl_cls = MultiAckDetailedGraphWalkerImpl
 
-    def test_multi_ack(self):
+    def test_multi_ack(self) -> None:
         self.assertNextEquals(TWO)
         self.assertNoAck()
 
@@ -837,7 +837,7 @@ class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         # PACK is sent
         self.assertTrue(self._walker.pack_sent)
 
-    def test_multi_ack_nodone(self):
+    def test_multi_ack_nodone(self) -> None:
         self._walker.done_required = False
         self.assertNextEquals(TWO)
         self.assertNoAck()
@@ -859,7 +859,7 @@ class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         # PACK is sent
         self.assertTrue(self._walker.pack_sent)
 
-    def test_multi_ack_flush_end(self):
+    def test_multi_ack_flush_end(self) -> None:
         # transmission ends with a flush-pkt without a done but no-done is
         # assumed.
         self._walker.lines[-1] = (None, None)
@@ -881,7 +881,7 @@ class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         # PACK is NOT sent
         self.assertFalse(self._walker.pack_sent)
 
-    def test_multi_ack_flush_end_nodone(self):
+    def test_multi_ack_flush_end_nodone(self) -> None:
         # transmission ends with a flush-pkt without a done but no-done is
         # assumed.
         self._walker.lines[-1] = (None, None)
@@ -904,7 +904,7 @@ class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         # PACK is sent
         self.assertTrue(self._walker.pack_sent)
 
-    def test_multi_ack_partial(self):
+    def test_multi_ack_partial(self) -> None:
         self.assertNextEquals(TWO)
         self.assertNoAck()
 
@@ -919,7 +919,7 @@ class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNextEmpty()
         self.assertAck(ONE)
 
-    def test_multi_ack_flush(self):
+    def test_multi_ack_flush(self) -> None:
         # same as ack test but contains a flush-pkt in the middle
         self._walker.lines = [
             (b"have", TWO),
@@ -947,7 +947,7 @@ class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNextEmpty()
         self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")])
 
-    def test_multi_ack_nak(self):
+    def test_multi_ack_nak(self) -> None:
         self.assertNextEquals(TWO)
         self.assertNoAck()
 
@@ -964,7 +964,7 @@ class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNextEmpty()
         self.assertTrue(self._walker.pack_sent)
 
-    def test_multi_ack_nak_nodone(self):
+    def test_multi_ack_nak_nodone(self) -> None:
         self._walker.done_required = False
         self.assertNextEquals(TWO)
         self.assertNoAck()
@@ -983,7 +983,7 @@ class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNak()
         self.assertNextEmpty()
 
-    def test_multi_ack_nak_flush(self):
+    def test_multi_ack_nak_flush(self) -> None:
         # same as nak test but contains a flush-pkt in the middle
         self._walker.lines = [
             (b"have", TWO),
@@ -1005,7 +1005,7 @@ class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNextEmpty()
         self.assertNak()
 
-    def test_multi_ack_stateless(self):
+    def test_multi_ack_stateless(self) -> None:
         # transmission ends with a flush-pkt
         self._walker.lines[-1] = (None, None)
         self._walker.stateless_rpc = True
@@ -1027,7 +1027,7 @@ class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
         self.assertNoAck()
         self.assertFalse(self._walker.pack_sent)
 
-    def test_multi_ack_stateless_nodone(self):
+    def test_multi_ack_stateless_nodone(self) -> None:
         self._walker.done_required = False
         # transmission ends with a flush-pkt
         self._walker.lines[-1] = (None, None)
@@ -1055,7 +1055,7 @@ class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
 class FileSystemBackendTests(TestCase):
     """Tests for FileSystemBackend."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.path = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, self.path)
@@ -1065,14 +1065,14 @@ class FileSystemBackendTests(TestCase):
         else:
             self.backend = FileSystemBackend()
 
-    def test_nonexistant(self):
+    def test_nonexistant(self) -> None:
         self.assertRaises(
             NotGitRepository,
             self.backend.open_repository,
             "/does/not/exist/unless/foo",
         )
 
-    def test_absolute(self):
+    def test_absolute(self) -> None:
         repo = self.backend.open_repository(self.path)
         self.assertTrue(
             os.path.samefile(
@@ -1080,14 +1080,14 @@ class FileSystemBackendTests(TestCase):
             )
         )
 
-    def test_child(self):
+    def test_child(self) -> None:
         self.assertRaises(
             NotGitRepository,
             self.backend.open_repository,
             os.path.join(self.path, "foo"),
         )
 
-    def test_bad_repo_path(self):
+    def test_bad_repo_path(self) -> None:
         backend = FileSystemBackend()
 
         self.assertRaises(NotGitRepository, lambda: backend.open_repository("/ups"))
@@ -1096,7 +1096,7 @@ class FileSystemBackendTests(TestCase):
 class DictBackendTests(TestCase):
     """Tests for DictBackend."""
 
-    def test_nonexistant(self):
+    def test_nonexistant(self) -> None:
         repo = MemoryRepo.init_bare([], {})
         backend = DictBackend({b"/": repo})
         self.assertRaises(
@@ -1105,7 +1105,7 @@ class DictBackendTests(TestCase):
             "/does/not/exist/unless/foo",
         )
 
-    def test_bad_repo_path(self):
+    def test_bad_repo_path(self) -> None:
         repo = MemoryRepo.init_bare([], {})
         backend = DictBackend({b"/": repo})
 
@@ -1115,7 +1115,7 @@ class DictBackendTests(TestCase):
 class ServeCommandTests(TestCase):
     """Tests for serve_command."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.backend = DictBackend({})
 
@@ -1128,7 +1128,7 @@ class ServeCommandTests(TestCase):
             outf=outf,
         )
 
-    def test_receive_pack(self):
+    def test_receive_pack(self) -> None:
         commit = make_commit(id=ONE, parents=[], commit_time=111)
         self.backend.repos[b"/"] = MemoryRepo.init_bare(
             [commit], {b"refs/heads/master": commit.id}
@@ -1150,13 +1150,13 @@ class ServeCommandTests(TestCase):
 class UpdateServerInfoTests(TestCase):
     """Tests for update_server_info."""
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.path = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, self.path)
         self.repo = Repo.init(self.path)
 
-    def test_empty(self):
+    def test_empty(self) -> None:
         update_server_info(self.repo)
         with open(os.path.join(self.path, ".git", "info", "refs"), "rb") as f:
             self.assertEqual(b"", f.read())
@@ -1164,7 +1164,7 @@ class UpdateServerInfoTests(TestCase):
         with open(p, "rb") as f:
             self.assertEqual(b"", f.read())
 
-    def test_simple(self):
+    def test_simple(self) -> None:
         commit_id = self.repo.do_commit(
             message=b"foo",
             committer=b"Joe Example <joe@example.com>",

+ 1 - 1
tests/test_stash.py

@@ -29,7 +29,7 @@ from . import TestCase
 class StashTests(TestCase):
     """Tests for stash."""
 
-    def test_obtain(self):
+    def test_obtain(self) -> None:
         repo = MemoryRepo()
         stash = Stash.from_repo(repo)
         self.assertIsInstance(stash, Stash)

+ 7 - 7
tests/test_utils.py

@@ -28,11 +28,11 @@ from . import TestCase
 
 
 class BuildCommitGraphTest(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.store = MemoryObjectStore()
 
-    def test_linear(self):
+    def test_linear(self) -> None:
         c1, c2 = build_commit_graph(self.store, [[1], [2, 1]])
         for obj_id in [c1.id, c2.id, c1.tree, c2.tree]:
             self.assertIn(obj_id, self.store)
@@ -42,7 +42,7 @@ class BuildCommitGraphTest(TestCase):
         self.assertEqual([], self.store[c1.tree].items())
         self.assertGreater(c2.commit_time, c1.commit_time)
 
-    def test_merge(self):
+    def test_merge(self) -> None:
         c1, c2, c3, c4 = build_commit_graph(
             self.store, [[1], [2, 1], [3, 1], [4, 2, 3]]
         )
@@ -50,12 +50,12 @@ class BuildCommitGraphTest(TestCase):
         self.assertGreater(c4.commit_time, c2.commit_time)
         self.assertGreater(c4.commit_time, c3.commit_time)
 
-    def test_missing_parent(self):
+    def test_missing_parent(self) -> None:
         self.assertRaises(
             ValueError, build_commit_graph, self.store, [[1], [3, 2], [2, 1]]
         )
 
-    def test_trees(self):
+    def test_trees(self) -> None:
         a1 = make_object(Blob, data=b"aaa1")
         a2 = make_object(Blob, data=b"aaa2")
         c1, c2 = build_commit_graph(
@@ -66,14 +66,14 @@ class BuildCommitGraphTest(TestCase):
         self.assertEqual((0o100644, a1.id), self.store[c1.tree][b"a"])
         self.assertEqual((0o100644, a2.id), self.store[c2.tree][b"a"])
 
-    def test_attrs(self):
+    def test_attrs(self) -> None:
         c1, c2 = build_commit_graph(
             self.store, [[1], [2, 1]], attrs={1: {"message": b"Hooray!"}}
         )
         self.assertEqual(b"Hooray!", c1.message)
         self.assertEqual(b"Commit 2", c2.message)
 
-    def test_commit_time(self):
+    def test_commit_time(self) -> None:
         c1, c2, c3 = build_commit_graph(
             self.store,
             [[1], [2, 1], [3, 2]],

+ 38 - 38
tests/test_walk.py

@@ -50,7 +50,7 @@ class TestWalkEntry:
 
 
 class WalkerTest(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.store = MemoryObjectStore()
 
@@ -70,7 +70,7 @@ class WalkerTest(TestCase):
             commit_spec.append(c)
         return self.make_commits(commit_spec, **kwargs)
 
-    def assertWalkYields(self, expected, *args, **kwargs):
+    def assertWalkYields(self, expected, *args, **kwargs) -> None:
         walker = Walker(self.store, *args, **kwargs)
         expected = list(expected)
         for i, entry in enumerate(expected):
@@ -79,13 +79,13 @@ class WalkerTest(TestCase):
         actual = list(walker)
         self.assertEqual(expected, actual)
 
-    def test_tag(self):
+    def test_tag(self) -> None:
         c1, c2, c3 = self.make_linear_commits(3)
         t2 = make_tag(target=c2)
         self.store.add_object(t2)
         self.assertWalkYields([c2, c1], [t2.id])
 
-    def test_linear(self):
+    def test_linear(self) -> None:
         c1, c2, c3 = self.make_linear_commits(3)
         self.assertWalkYields([c1], [c1.id])
         self.assertWalkYields([c2, c1], [c2.id])
@@ -95,7 +95,7 @@ class WalkerTest(TestCase):
         self.assertWalkYields([c3, c2], [c3.id, c1.id], exclude=[c1.id])
         self.assertWalkYields([c3], [c3.id, c1.id], exclude=[c2.id])
 
-    def test_missing(self):
+    def test_missing(self) -> None:
         cs = list(reversed(self.make_linear_commits(20)))
         self.assertWalkYields(cs, [cs[0].id])
 
@@ -107,7 +107,7 @@ class WalkerTest(TestCase):
             self.assertWalkYields(cs[:i], [cs[0].id], max_entries=i)
         self.assertRaises(MissingCommitError, Walker, self.store, [cs[-1].id])
 
-    def test_branch(self):
+    def test_branch(self) -> None:
         c1, x2, x3, y4 = self.make_commits([[1], [2, 1], [3, 2], [4, 1]])
         self.assertWalkYields([x3, x2, c1], [x3.id])
         self.assertWalkYields([y4, c1], [y4.id])
@@ -117,7 +117,7 @@ class WalkerTest(TestCase):
         self.assertWalkYields([y4], [y4.id], exclude=[x3.id])
         self.assertWalkYields([x3, x2], [x3.id], exclude=[y4.id])
 
-    def test_merge(self):
+    def test_merge(self) -> None:
         c1, c2, c3, c4 = self.make_commits([[1], [2, 1], [3, 1], [4, 2, 3]])
         self.assertWalkYields([c4, c3, c2, c1], [c4.id])
         self.assertWalkYields([c3, c1], [c3.id])
@@ -125,7 +125,7 @@ class WalkerTest(TestCase):
         self.assertWalkYields([c4, c3], [c4.id], exclude=[c2.id])
         self.assertWalkYields([c4, c2], [c4.id], exclude=[c3.id])
 
-    def test_merge_of_new_branch_from_old_base(self):
+    def test_merge_of_new_branch_from_old_base(self) -> None:
         # The commit on the branch was made at a time after any of the
         # commits on master, but the branch was from an older commit.
         # See also test_merge_of_old_branch
@@ -139,7 +139,7 @@ class WalkerTest(TestCase):
         self.assertWalkYields([c2, c1], [c2.id])
 
     @expectedFailure
-    def test_merge_of_old_branch(self):
+    def test_merge_of_old_branch(self) -> None:
         # The commit on the branch was made at a time before any of
         # the commits on master, but it was merged into master after
         # those commits.
@@ -153,23 +153,23 @@ class WalkerTest(TestCase):
         self.assertWalkYields([c3, c2, c1], [c3.id])
         self.assertWalkYields([c2, c1], [c2.id])
 
-    def test_reverse(self):
+    def test_reverse(self) -> None:
         c1, c2, c3 = self.make_linear_commits(3)
         self.assertWalkYields([c1, c2, c3], [c3.id], reverse=True)
 
-    def test_max_entries(self):
+    def test_max_entries(self) -> None:
         c1, c2, c3 = self.make_linear_commits(3)
         self.assertWalkYields([c3, c2, c1], [c3.id], max_entries=3)
         self.assertWalkYields([c3, c2], [c3.id], max_entries=2)
         self.assertWalkYields([c3], [c3.id], max_entries=1)
 
-    def test_reverse_after_max_entries(self):
+    def test_reverse_after_max_entries(self) -> None:
         c1, c2, c3 = self.make_linear_commits(3)
         self.assertWalkYields([c1, c2, c3], [c3.id], max_entries=3, reverse=True)
         self.assertWalkYields([c2, c3], [c3.id], max_entries=2, reverse=True)
         self.assertWalkYields([c3], [c3.id], max_entries=1, reverse=True)
 
-    def test_changes_one_parent(self):
+    def test_changes_one_parent(self) -> None:
         blob_a1 = make_object(Blob, data=b"a1")
         blob_a2 = make_object(Blob, data=b"a2")
         blob_b2 = make_object(Blob, data=b"b2")
@@ -190,7 +190,7 @@ class WalkerTest(TestCase):
         )
         self.assertWalkYields([e2, e1], [c2.id])
 
-    def test_changes_multiple_parents(self):
+    def test_changes_multiple_parents(self) -> None:
         blob_a1 = make_object(Blob, data=b"a1")
         blob_b2 = make_object(Blob, data=b"b2")
         blob_a3 = make_object(Blob, data=b"a3")
@@ -213,7 +213,7 @@ class WalkerTest(TestCase):
             [TestWalkEntry(c3, changes)], [c3.id], exclude=[c1.id, c2.id]
         )
 
-    def test_path_matches(self):
+    def test_path_matches(self) -> None:
         walker = Walker(None, [], paths=[b"foo", b"bar", b"baz/quux"])
         self.assertTrue(walker._path_matches(b"foo"))
         self.assertTrue(walker._path_matches(b"foo/a"))
@@ -228,7 +228,7 @@ class WalkerTest(TestCase):
         self.assertFalse(walker._path_matches(b"baz"))
         self.assertFalse(walker._path_matches(b"baz/quu"))
 
-    def test_paths(self):
+    def test_paths(self) -> None:
         blob_a1 = make_object(Blob, data=b"a1")
         blob_b2 = make_object(Blob, data=b"b2")
         blob_a3 = make_object(Blob, data=b"a3")
@@ -255,7 +255,7 @@ class WalkerTest(TestCase):
             [TestWalkEntry(c3, changes)], [c3.id], max_entries=1, paths=[b"a"]
         )
 
-    def test_paths_subtree(self):
+    def test_paths_subtree(self) -> None:
         blob_a = make_object(Blob, data=b"a")
         blob_b = make_object(Blob, data=b"b")
         c1, c2, c3 = self.make_linear_commits(
@@ -269,7 +269,7 @@ class WalkerTest(TestCase):
         self.assertWalkYields([c2], [c3.id], paths=[b"b"])
         self.assertWalkYields([c3, c1], [c3.id], paths=[b"x"])
 
-    def test_paths_max_entries(self):
+    def test_paths_max_entries(self) -> None:
         blob_a = make_object(Blob, data=b"a")
         blob_b = make_object(Blob, data=b"b")
         c1, c2 = self.make_linear_commits(
@@ -278,7 +278,7 @@ class WalkerTest(TestCase):
         self.assertWalkYields([c2], [c2.id], paths=[b"b"], max_entries=1)
         self.assertWalkYields([c1], [c1.id], paths=[b"a"], max_entries=1)
 
-    def test_paths_merge(self):
+    def test_paths_merge(self) -> None:
         blob_a1 = make_object(Blob, data=b"a1")
         blob_a2 = make_object(Blob, data=b"a2")
         blob_a3 = make_object(Blob, data=b"a3")
@@ -294,7 +294,7 @@ class WalkerTest(TestCase):
         self.assertWalkYields([m3, y2, x1], [m3.id], paths=[b"a"])
         self.assertWalkYields([y2, x1], [m4.id], paths=[b"a"])
 
-    def test_changes_with_renames(self):
+    def test_changes_with_renames(self) -> None:
         blob = make_object(Blob, data=b"blob")
         c1, c2 = self.make_linear_commits(
             2, trees={1: [(b"a", blob)], 2: [(b"b", blob)]}
@@ -319,7 +319,7 @@ class WalkerTest(TestCase):
             rename_detector=detector,
         )
 
-    def test_follow_rename(self):
+    def test_follow_rename(self) -> None:
         blob = make_object(Blob, data=b"blob")
         names = [b"a", b"a", b"b", b"b", b"c", b"c"]
 
@@ -341,7 +341,7 @@ class WalkerTest(TestCase):
             follow=True,
         )
 
-    def test_follow_rename_remove_path(self):
+    def test_follow_rename_remove_path(self) -> None:
         blob = make_object(Blob, data=b"blob")
         _, _, _, c4, c5, c6 = self.make_linear_commits(
             6,
@@ -370,7 +370,7 @@ class WalkerTest(TestCase):
             follow=True,
         )
 
-    def test_since(self):
+    def test_since(self) -> None:
         c1, c2, c3 = self.make_linear_commits(3)
         self.assertWalkYields([c3, c2, c1], [c3.id], since=-1)
         self.assertWalkYields([c3, c2, c1], [c3.id], since=0)
@@ -383,7 +383,7 @@ class WalkerTest(TestCase):
         self.assertWalkYields([], [c3.id], since=201)
         self.assertWalkYields([], [c3.id], since=300)
 
-    def test_until(self):
+    def test_until(self) -> None:
         c1, c2, c3 = self.make_linear_commits(3)
         self.assertWalkYields([], [c3.id], until=-1)
         self.assertWalkYields([c1], [c3.id], until=0)
@@ -396,14 +396,14 @@ class WalkerTest(TestCase):
         self.assertWalkYields([c3, c2, c1], [c3.id], until=201)
         self.assertWalkYields([c3, c2, c1], [c3.id], until=300)
 
-    def test_since_until(self):
+    def test_since_until(self) -> None:
         c1, c2, c3 = self.make_linear_commits(3)
         self.assertWalkYields([], [c3.id], since=100, until=99)
         self.assertWalkYields([c3, c2, c1], [c3.id], since=-1, until=201)
         self.assertWalkYields([c2], [c3.id], since=100, until=100)
         self.assertWalkYields([c2], [c3.id], since=50, until=150)
 
-    def test_since_over_scan(self):
+    def test_since_over_scan(self) -> None:
         commits = self.make_linear_commits(11, times=[9, 0, 1, 2, 3, 4, 5, 8, 6, 7, 9])
         c8, _, c10, c11 = commits[-4:]
         del self.store[commits[0].id]
@@ -413,18 +413,18 @@ class WalkerTest(TestCase):
         # even with over-scanning.
         self.assertWalkYields([c11, c10, c8], [c11.id], since=7)
 
-    def assertTopoOrderEqual(self, expected_commits, commits):
+    def assertTopoOrderEqual(self, expected_commits, commits) -> None:
         entries = [TestWalkEntry(c, None) for c in commits]
         actual_ids = [e.commit.id for e in list(_topo_reorder(entries))]
         self.assertEqual([c.id for c in expected_commits], actual_ids)
 
-    def test_topo_reorder_linear(self):
+    def test_topo_reorder_linear(self) -> None:
         commits = self.make_linear_commits(5)
         commits.reverse()
         for perm in permutations(commits):
             self.assertTopoOrderEqual(commits, perm)
 
-    def test_topo_reorder_multiple_parents(self):
+    def test_topo_reorder_multiple_parents(self) -> None:
         c1, c2, c3 = self.make_commits([[1], [2], [3, 1, 2]])
         # Already sorted, so totally FIFO.
         self.assertTopoOrderEqual([c3, c2, c1], [c3, c2, c1])
@@ -438,7 +438,7 @@ class WalkerTest(TestCase):
         self.assertTopoOrderEqual([c3, c2, c1], [c1, c2, c3])
         self.assertTopoOrderEqual([c3, c2, c1], [c2, c1, c3])
 
-    def test_topo_reorder_multiple_children(self):
+    def test_topo_reorder_multiple_children(self) -> None:
         c1, c2, c3 = self.make_commits([[1], [2, 1], [3, 1]])
 
         # c2 and c3 are FIFO but c1 moves to the end.
@@ -450,14 +450,14 @@ class WalkerTest(TestCase):
         self.assertTopoOrderEqual([c2, c3, c1], [c2, c1, c3])
         self.assertTopoOrderEqual([c2, c3, c1], [c1, c2, c3])
 
-    def test_out_of_order_children(self):
+    def test_out_of_order_children(self) -> None:
         c1, c2, c3, c4, c5 = self.make_commits(
             [[1], [2, 1], [3, 2], [4, 1], [5, 3, 4]], times=[2, 1, 3, 4, 5]
         )
         self.assertWalkYields([c5, c4, c3, c1, c2], [c5.id])
         self.assertWalkYields([c5, c4, c3, c2, c1], [c5.id], order=ORDER_TOPO)
 
-    def test_out_of_order_with_exclude(self):
+    def test_out_of_order_with_exclude(self) -> None:
         # Create the following graph:
         # c1-------x2---m6
         #   \          /
@@ -472,13 +472,13 @@ class WalkerTest(TestCase):
         # priority queue long before y5.
         self.assertWalkYields([m6, x2], [m6.id], exclude=[y5.id])
 
-    def test_empty_walk(self):
+    def test_empty_walk(self) -> None:
         c1, c2, c3 = self.make_linear_commits(3)
         self.assertWalkYields([], [c3.id], exclude=[c3.id])
 
 
 class WalkEntryTest(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.store = MemoryObjectStore()
 
@@ -498,7 +498,7 @@ class WalkEntryTest(TestCase):
             commit_spec.append(c)
         return self.make_commits(commit_spec, **kwargs)
 
-    def test_all_changes(self):
+    def test_all_changes(self) -> None:
         # Construct a commit with 2 files in different subdirectories.
         blob_a = make_object(Blob, data=b"a")
         blob_b = make_object(Blob, data=b"b")
@@ -520,7 +520,7 @@ class WalkEntryTest(TestCase):
             changes,
         )
 
-    def test_all_with_merge(self):
+    def test_all_with_merge(self) -> None:
         blob_a = make_object(Blob, data=b"a")
         blob_a2 = make_object(Blob, data=b"a2")
         blob_b = make_object(Blob, data=b"b")
@@ -560,7 +560,7 @@ class WalkEntryTest(TestCase):
             changes,
         )
 
-    def test_filter_changes(self):
+    def test_filter_changes(self) -> None:
         # Construct a commit with 2 files in different subdirectories.
         blob_a = make_object(Blob, data=b"a")
         blob_b = make_object(Blob, data=b"b")
@@ -581,7 +581,7 @@ class WalkEntryTest(TestCase):
             changes,
         )
 
-    def test_filter_with_merge(self):
+    def test_filter_with_merge(self) -> None:
         blob_a = make_object(Blob, data=b"a")
         blob_a2 = make_object(Blob, data=b"a2")
         blob_b = make_object(Blob, data=b"b")

+ 53 - 52
tests/test_web.py

@@ -24,6 +24,7 @@ import gzip
 import os
 import re
 from io import BytesIO
+from typing import NoReturn
 
 from dulwich.object_store import MemoryObjectStore
 from dulwich.objects import Blob
@@ -71,10 +72,10 @@ class MinimalistWSGIInputStream:
 class MinimalistWSGIInputStream2(MinimalistWSGIInputStream):
     """WSGI input stream with no *working* 'seek()' and 'tell()' methods."""
 
-    def seek(self, pos):
+    def seek(self, pos) -> NoReturn:
         raise NotImplementedError
 
-    def tell(self):
+    def tell(self) -> NoReturn:
         raise NotImplementedError
 
 
@@ -85,10 +86,10 @@ class TestHTTPGitRequest(HTTPGitRequest):
         HTTPGitRequest.__init__(self, *args, **kwargs)
         self.cached = None
 
-    def nocache(self):
+    def nocache(self) -> None:
         self.cached = False
 
-    def cache_forever(self):
+    def cache_forever(self) -> None:
         self.cached = True
 
 
@@ -97,7 +98,7 @@ class WebTestCase(TestCase):
 
     _req_class: type[HTTPGitRequest] = TestHTTPGitRequest
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._environ = {}
         self._req = self._req_class(
@@ -112,10 +113,10 @@ class WebTestCase(TestCase):
         self._headers = list(headers)
         return self._output.write
 
-    def _handlers(self):
+    def _handlers(self) -> None:
         return None
 
-    def assertContentTypeEquals(self, expected):
+    def assertContentTypeEquals(self, expected) -> None:
         self.assertIn(("Content-Type", expected), self._headers)
 
 
@@ -131,11 +132,11 @@ def _test_backend(objects, refs=None, named_files=None):
 
 
 class DumbHandlersTestCase(WebTestCase):
-    def test_send_file_not_found(self):
+    def test_send_file_not_found(self) -> None:
         list(send_file(self._req, None, "text/plain"))
         self.assertEqual(HTTP_NOT_FOUND, self._status)
 
-    def test_send_file(self):
+    def test_send_file(self) -> None:
         f = BytesIO(b"foobar")
         output = b"".join(send_file(self._req, f, "some/thing"))
         self.assertEqual(b"foobar", output)
@@ -143,7 +144,7 @@ class DumbHandlersTestCase(WebTestCase):
         self.assertContentTypeEquals("some/thing")
         self.assertTrue(f.closed)
 
-    def test_send_file_buffered(self):
+    def test_send_file_buffered(self) -> None:
         bufsize = 10240
         xs = b"x" * bufsize
         f = BytesIO(2 * xs)
@@ -152,16 +153,16 @@ class DumbHandlersTestCase(WebTestCase):
         self.assertContentTypeEquals("some/thing")
         self.assertTrue(f.closed)
 
-    def test_send_file_error(self):
+    def test_send_file_error(self) -> None:
         class TestFile:
             def __init__(self, exc_class) -> None:
                 self.closed = False
                 self._exc_class = exc_class
 
-            def read(self, size=-1):
+            def read(self, size=-1) -> NoReturn:
                 raise self._exc_class
 
-            def close(self):
+            def close(self) -> None:
                 self.closed = True
 
         f = TestFile(IOError)
@@ -176,7 +177,7 @@ class DumbHandlersTestCase(WebTestCase):
         self.assertTrue(f.closed)
         self.assertFalse(self._req.cached)
 
-    def test_get_text_file(self):
+    def test_get_text_file(self) -> None:
         backend = _test_backend([], named_files={"description": b"foo"})
         mat = re.search(".*", "description")
         output = b"".join(get_text_file(self._req, backend, mat))
@@ -185,7 +186,7 @@ class DumbHandlersTestCase(WebTestCase):
         self.assertContentTypeEquals("text/plain")
         self.assertFalse(self._req.cached)
 
-    def test_get_loose_object(self):
+    def test_get_loose_object(self) -> None:
         blob = make_object(Blob, data=b"foo")
         backend = _test_backend([blob])
         mat = re.search("^(..)(.{38})$", blob.id.decode("ascii"))
@@ -195,17 +196,17 @@ class DumbHandlersTestCase(WebTestCase):
         self.assertContentTypeEquals("application/x-git-loose-object")
         self.assertTrue(self._req.cached)
 
-    def test_get_loose_object_missing(self):
+    def test_get_loose_object_missing(self) -> None:
         mat = re.search("^(..)(.{38})$", "1" * 40)
         list(get_loose_object(self._req, _test_backend([]), mat))
         self.assertEqual(HTTP_NOT_FOUND, self._status)
 
-    def test_get_loose_object_error(self):
+    def test_get_loose_object_error(self) -> None:
         blob = make_object(Blob, data=b"foo")
         backend = _test_backend([blob])
         mat = re.search("^(..)(.{38})$", blob.id.decode("ascii"))
 
-        def as_legacy_object_error(self):
+        def as_legacy_object_error(self) -> NoReturn:
             raise OSError
 
         self.addCleanup(setattr, Blob, "as_legacy_object", Blob.as_legacy_object)
@@ -213,7 +214,7 @@ class DumbHandlersTestCase(WebTestCase):
         list(get_loose_object(self._req, backend, mat))
         self.assertEqual(HTTP_ERROR, self._status)
 
-    def test_get_pack_file(self):
+    def test_get_pack_file(self) -> None:
         pack_name = os.path.join("objects", "pack", "pack-%s.pack" % ("1" * 40))
         backend = _test_backend([], named_files={pack_name: b"pack contents"})
         mat = re.search(".*", pack_name)
@@ -223,7 +224,7 @@ class DumbHandlersTestCase(WebTestCase):
         self.assertContentTypeEquals("application/x-git-packed-objects")
         self.assertTrue(self._req.cached)
 
-    def test_get_idx_file(self):
+    def test_get_idx_file(self) -> None:
         idx_name = os.path.join("objects", "pack", "pack-%s.idx" % ("1" * 40))
         backend = _test_backend([], named_files={idx_name: b"idx contents"})
         mat = re.search(".*", idx_name)
@@ -233,7 +234,7 @@ class DumbHandlersTestCase(WebTestCase):
         self.assertContentTypeEquals("application/x-git-packed-objects-toc")
         self.assertTrue(self._req.cached)
 
-    def test_get_info_refs(self):
+    def test_get_info_refs(self) -> None:
         self._environ["QUERY_STRING"] = ""
 
         blob1 = make_object(Blob, data=b"1")
@@ -265,7 +266,7 @@ class DumbHandlersTestCase(WebTestCase):
         self.assertContentTypeEquals("text/plain")
         self.assertFalse(self._req.cached)
 
-    def test_get_info_refs_not_found(self):
+    def test_get_info_refs_not_found(self) -> None:
         self._environ["QUERY_STRING"] = ""
 
         objects = []
@@ -280,7 +281,7 @@ class DumbHandlersTestCase(WebTestCase):
         self.assertEqual(HTTP_NOT_FOUND, self._status)
         self.assertContentTypeEquals("text/plain")
 
-    def test_get_info_packs(self):
+    def test_get_info_packs(self) -> None:
         class TestPackData:
             def __init__(self, sha) -> None:
                 self.filename = f"pack-{sha}.pack"
@@ -326,7 +327,7 @@ class SmartHandlersTestCase(WebTestCase):
             self.stateless_rpc = stateless_rpc
             self.advertise_refs = advertise_refs
 
-        def handle(self):
+        def handle(self) -> None:
             self.proto.write(b"handled input: " + self.proto.recv(1024))
 
     def _make_handler(self, *args, **kwargs):
@@ -336,21 +337,21 @@ class SmartHandlersTestCase(WebTestCase):
     def _handlers(self):
         return {b"git-upload-pack": self._make_handler}
 
-    def test_handle_service_request_unknown(self):
+    def test_handle_service_request_unknown(self) -> None:
         mat = re.search(".*", "/git-evil-handler")
         content = list(handle_service_request(self._req, "backend", mat))
         self.assertEqual(HTTP_FORBIDDEN, self._status)
         self.assertNotIn(b"git-evil-handler", b"".join(content))
         self.assertFalse(self._req.cached)
 
-    def _run_handle_service_request(self, content_length=None):
+    def _run_handle_service_request(self, content_length=None) -> None:
         self._environ["wsgi.input"] = BytesIO(b"foo")
         if content_length is not None:
             self._environ["CONTENT_LENGTH"] = content_length
         mat = re.search(".*", "/git-upload-pack")
 
         class Backend:
-            def open_repository(self, path):
+            def open_repository(self, path) -> None:
                 return None
 
         handler_output = b"".join(handle_service_request(self._req, Backend(), mat))
@@ -363,20 +364,20 @@ class SmartHandlersTestCase(WebTestCase):
         self.assertTrue(self._handler.stateless_rpc)
         self.assertFalse(self._req.cached)
 
-    def test_handle_service_request(self):
+    def test_handle_service_request(self) -> None:
         self._run_handle_service_request()
 
-    def test_handle_service_request_with_length(self):
+    def test_handle_service_request_with_length(self) -> None:
         self._run_handle_service_request(content_length="3")
 
-    def test_handle_service_request_empty_length(self):
+    def test_handle_service_request_empty_length(self) -> None:
         self._run_handle_service_request(content_length="")
 
-    def test_get_info_refs_unknown(self):
+    def test_get_info_refs_unknown(self) -> None:
         self._environ["QUERY_STRING"] = "service=git-evil-handler"
 
         class Backend:
-            def open_repository(self, url):
+            def open_repository(self, url) -> None:
                 return None
 
         mat = re.search(".*", "/git-evil-pack")
@@ -385,12 +386,12 @@ class SmartHandlersTestCase(WebTestCase):
         self.assertEqual(HTTP_FORBIDDEN, self._status)
         self.assertFalse(self._req.cached)
 
-    def test_get_info_refs(self):
+    def test_get_info_refs(self) -> None:
         self._environ["wsgi.input"] = BytesIO(b"foo")
         self._environ["QUERY_STRING"] = "service=git-upload-pack"
 
         class Backend:
-            def open_repository(self, url):
+            def open_repository(self, url) -> None:
                 return None
 
         mat = re.search(".*", "/git-upload-pack")
@@ -413,16 +414,16 @@ class SmartHandlersTestCase(WebTestCase):
 
 
 class LengthLimitedFileTestCase(TestCase):
-    def test_no_cutoff(self):
+    def test_no_cutoff(self) -> None:
         f = _LengthLimitedFile(BytesIO(b"foobar"), 1024)
         self.assertEqual(b"foobar", f.read())
 
-    def test_cutoff(self):
+    def test_cutoff(self) -> None:
         f = _LengthLimitedFile(BytesIO(b"foobar"), 3)
         self.assertEqual(b"foo", f.read())
         self.assertEqual(b"", f.read())
 
-    def test_multiple_reads(self):
+    def test_multiple_reads(self) -> None:
         f = _LengthLimitedFile(BytesIO(b"foobar"), 3)
         self.assertEqual(b"fo", f.read(2))
         self.assertEqual(b"o", f.read(2))
@@ -433,26 +434,26 @@ class HTTPGitRequestTestCase(WebTestCase):
     # This class tests the contents of the actual cache headers
     _req_class = HTTPGitRequest
 
-    def test_not_found(self):
+    def test_not_found(self) -> None:
         self._req.cache_forever()  # cache headers should be discarded
         message = "Something not found"
         self.assertEqual(message.encode("ascii"), self._req.not_found(message))
         self.assertEqual(HTTP_NOT_FOUND, self._status)
         self.assertEqual({("Content-Type", "text/plain")}, set(self._headers))
 
-    def test_forbidden(self):
+    def test_forbidden(self) -> None:
         self._req.cache_forever()  # cache headers should be discarded
         message = "Something not found"
         self.assertEqual(message.encode("ascii"), self._req.forbidden(message))
         self.assertEqual(HTTP_FORBIDDEN, self._status)
         self.assertEqual({("Content-Type", "text/plain")}, set(self._headers))
 
-    def test_respond_ok(self):
+    def test_respond_ok(self) -> None:
         self._req.respond()
         self.assertEqual([], self._headers)
         self.assertEqual(HTTP_OK, self._status)
 
-    def test_respond(self):
+    def test_respond(self) -> None:
         self._req.nocache()
         self._req.respond(
             status=402,
@@ -474,7 +475,7 @@ class HTTPGitRequestTestCase(WebTestCase):
 
 
 class HTTPGitApplicationTestCase(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._app = HTTPGitApplication("backend")
 
@@ -483,25 +484,25 @@ class HTTPGitApplicationTestCase(TestCase):
             "REQUEST_METHOD": "GET",
         }
 
-    def _test_handler(self, req, backend, mat):
+    def _test_handler(self, req, backend, mat) -> str:
         # tests interface used by all handlers
         self.assertEqual(self._environ, req.environ)
         self.assertEqual("backend", backend)
         self.assertEqual("/foo", mat.group(0))
         return "output"
 
-    def _add_handler(self, app):
+    def _add_handler(self, app) -> None:
         req = self._environ["REQUEST_METHOD"]
         app.services = {
             (req, re.compile("/foo$")): self._test_handler,
         }
 
-    def test_call(self):
+    def test_call(self) -> None:
         self._add_handler(self._app)
         self.assertEqual("output", self._app(self._environ, None))
 
-    def test_fallback_app(self):
-        def test_app(environ, start_response):
+    def test_fallback_app(self) -> None:
+        def test_app(environ, start_response) -> str:
             return "output"
 
         app = HTTPGitApplication("backend", fallback_app=test_app)
@@ -514,7 +515,7 @@ class GunzipTestCase(HTTPGitApplicationTestCase):
     """
     example_text = __doc__.encode("ascii")
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self._app = GunzipFilter(self._app)
         self._environ["HTTP_CONTENT_ENCODING"] = "gzip"
@@ -529,7 +530,7 @@ class GunzipTestCase(HTTPGitApplicationTestCase):
         zstream.seek(0)
         return zstream, zlength
 
-    def _test_call(self, orig, zstream, zlength):
+    def _test_call(self, orig, zstream, zlength) -> None:
         self._add_handler(self._app.app)
         self.assertLess(zlength, len(orig))
         self.assertEqual(self._environ["HTTP_CONTENT_ENCODING"], "gzip")
@@ -543,10 +544,10 @@ class GunzipTestCase(HTTPGitApplicationTestCase):
         self.assertIs(None, self._environ.get("CONTENT_LENGTH"))
         self.assertNotIn("HTTP_CONTENT_ENCODING", self._environ)
 
-    def test_call(self):
+    def test_call(self) -> None:
         self._test_call(self.example_text, *self._get_zstream(self.example_text))
 
-    def test_call_no_seek(self):
+    def test_call_no_seek(self) -> None:
         """This ensures that the gunzipping code doesn't require any methods on
         'wsgi.input' except for '.read()'.  (In particular, it shouldn't
         require '.seek()'. See https://github.com/jelmer/dulwich/issues/140.).
@@ -558,7 +559,7 @@ class GunzipTestCase(HTTPGitApplicationTestCase):
             zlength,
         )
 
-    def test_call_no_working_seek(self):
+    def test_call_no_working_seek(self) -> None:
         """Similar to 'test_call_no_seek', but this time the methods are available
         (but defunct).  See https://github.com/jonashaag/klaus/issues/154.
         """

Some files were not shown because too many files changed in this diff