Browse Source

Reformat and add reformat make target

Jelmer Vernooij 1 year ago
parent
commit
abb66b1b39
68 changed files with 1674 additions and 1167 deletions
  1. 3 0
      Makefile
  2. 21 22
      docs/conf.py
  3. 1 1
      dulwich/archive.py
  4. 12 7
      dulwich/bundle.py
  5. 34 33
      dulwich/cli.py
  6. 113 77
      dulwich/client.py
  7. 13 14
      dulwich/cloud/gcs.py
  8. 16 38
      dulwich/config.py
  9. 4 2
      dulwich/contrib/diffstat.py
  10. 2 3
      dulwich/contrib/paramiko_vendor.py
  11. 3 13
      dulwich/contrib/requests_vendor.py
  12. 4 3
      dulwich/contrib/swift.py
  13. 40 19
      dulwich/contrib/test_paramiko_vendor.py
  14. 1 3
      dulwich/contrib/test_swift.py
  15. 4 2
      dulwich/diff_tree.py
  16. 2 6
      dulwich/errors.py
  17. 2 2
      dulwich/file.py
  18. 2 2
      dulwich/graph.py
  19. 12 4
      dulwich/greenthreads.py
  20. 3 3
      dulwich/hooks.py
  21. 3 1
      dulwich/ignore.py
  22. 64 42
      dulwich/index.py
  23. 1 2
      dulwich/line_ending.py
  24. 18 8
      dulwich/lru_cache.py
  25. 1 1
      dulwich/mailmap.py
  26. 147 74
      dulwich/object_store.py
  27. 72 60
      dulwich/objects.py
  28. 16 9
      dulwich/objectspec.py
  29. 311 149
      dulwich/pack.py
  30. 7 5
      dulwich/patch.py
  31. 47 28
      dulwich/porcelain.py
  32. 2 7
      dulwich/protocol.py
  33. 24 23
      dulwich/refs.py
  34. 93 53
      dulwich/repo.py
  35. 37 24
      dulwich/server.py
  36. 3 5
      dulwich/stash.py
  37. 6 6
      dulwich/tests/__init__.py
  38. 3 5
      dulwich/tests/compat/test_client.py
  39. 5 3
      dulwich/tests/compat/test_pack.py
  40. 9 11
      dulwich/tests/compat/test_porcelain.py
  41. 10 2
      dulwich/tests/compat/test_repository.py
  42. 1 1
      dulwich/tests/compat/test_utils.py
  43. 17 10
      dulwich/tests/compat/utils.py
  44. 30 8
      dulwich/tests/test_client.py
  45. 21 21
      dulwich/tests/test_config.py
  46. 11 7
      dulwich/tests/test_credentials.py
  47. 0 2
      dulwich/tests/test_hooks.py
  48. 12 12
      dulwich/tests/test_ignore.py
  49. 2 11
      dulwich/tests/test_index.py
  50. 3 4
      dulwich/tests/test_missing_obj_finder.py
  51. 9 11
      dulwich/tests/test_object_store.py
  52. 12 9
      dulwich/tests/test_objects.py
  53. 2 2
      dulwich/tests/test_objectspec.py
  54. 78 37
      dulwich/tests/test_pack.py
  55. 170 134
      dulwich/tests/test_porcelain.py
  56. 6 7
      dulwich/tests/test_refs.py
  57. 70 61
      dulwich/tests/test_repository.py
  58. 1 6
      dulwich/tests/test_server.py
  59. 0 1
      dulwich/tests/test_web.py
  60. 3 1
      dulwich/tests/utils.py
  61. 1 1
      dulwich/walk.py
  62. 9 5
      dulwich/web.py
  63. 1 1
      examples/diff.py
  64. 2 2
      examples/gcs.py
  65. 6 3
      examples/latest_change.py
  66. 8 9
      examples/memoryrepo.py
  67. 5 7
      examples/rename-branch.py
  68. 23 22
      setup.py

+ 3 - 0
Makefile

@@ -79,6 +79,9 @@ apidocs:
 fix:
 fix:
 	ruff check --fix .
 	ruff check --fix .
 
 
+reformat:
+	ruff format .
+
 .PHONY: codespell
 .PHONY: codespell
 
 
 codespell:
 codespell:

+ 21 - 22
docs/conf.py

@@ -17,47 +17,47 @@ import sys
 # If extensions (or modules to document with autodoc) are in another directory,
 # If extensions (or modules to document with autodoc) are in another directory,
 # add these directories to sys.path here. If the directory is relative to the
 # add these directories to sys.path here. If the directory is relative to the
 # documentation root, use os.path.abspath to make it absolute, like shown here.
 # documentation root, use os.path.abspath to make it absolute, like shown here.
-sys.path.insert(0, os.path.abspath('..'))
+sys.path.insert(0, os.path.abspath(".."))
 sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__))))
 sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__))))
-dulwich = __import__('dulwich')
+dulwich = __import__("dulwich")
 
 
 # -- General configuration ----------------------------------------------------
 # -- General configuration ----------------------------------------------------
 
 
 # Add any Sphinx extension module names here, as strings. They can be
 # Add any Sphinx extension module names here, as strings. They can be
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
 extensions = [
 extensions = [
-    'sphinx.ext.autodoc',
-    'sphinx.ext.ifconfig',
-    'sphinx.ext.intersphinx',
-    'sphinx.ext.napoleon',
+    "sphinx.ext.autodoc",
+    "sphinx.ext.ifconfig",
+    "sphinx.ext.intersphinx",
+    "sphinx.ext.napoleon",
 ]
 ]
 
 
 autoclass_content = "both"
 autoclass_content = "both"
 
 
 # Add any paths that contain templates here, relative to this directory.
 # Add any paths that contain templates here, relative to this directory.
-templates_path = ['templates']
+templates_path = ["templates"]
 
 
 # The suffix of source filenames.
 # The suffix of source filenames.
-source_suffix = '.txt'
+source_suffix = ".txt"
 
 
 # The encoding of source files.
 # The encoding of source files.
 #         source_encoding = 'utf-8'
 #         source_encoding = 'utf-8'
 
 
 # The master toctree document.
 # The master toctree document.
-master_doc = 'index'
+master_doc = "index"
 
 
 # General information about the project.
 # General information about the project.
-project = 'dulwich'
-copyright = '2011-2023 Jelmer Vernooij'
+project = "dulwich"
+copyright = "2011-2023 Jelmer Vernooij"
 
 
 # The version info for the project you're documenting, acts as replacement for
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
 # |version| and |release|, also used in various other places throughout the
 # built documents.
 # built documents.
 #
 #
 # The short X.Y version.
 # The short X.Y version.
-version = '.'.join(map(str, dulwich.__version__[:2]))
+version = ".".join(map(str, dulwich.__version__[:2]))
 # The full version, including alpha/beta/rc tags.
 # The full version, including alpha/beta/rc tags.
-release = '.'.join(map(str, dulwich.__version__))
+release = ".".join(map(str, dulwich.__version__))
 
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
 # for a list of supported languages.
@@ -74,7 +74,7 @@ release = '.'.join(map(str, dulwich.__version__))
 
 
 # List of directories, relative to source directory, that shouldn't be searched
 # List of directories, relative to source directory, that shouldn't be searched
 # for source files.
 # for source files.
-exclude_trees = ['build']
+exclude_trees = ["build"]
 
 
 # The reST default role (used for this markup: `text`) to use for all
 # The reST default role (used for this markup: `text`) to use for all
 # documents.
 # documents.
@@ -92,7 +92,7 @@ exclude_trees = ['build']
 # show_authors = False
 # show_authors = False
 
 
 # The name of the Pygments (syntax highlighting) style to use.
 # The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = "sphinx"
 
 
 # A list of ignored prefixes for module index sorting.
 # A list of ignored prefixes for module index sorting.
 # modindex_common_prefix = []
 # modindex_common_prefix = []
@@ -103,7 +103,7 @@ pygments_style = 'sphinx'
 # The theme to use for HTML and HTML Help pages.  Major themes that come with
 # The theme to use for HTML and HTML Help pages.  Major themes that come with
 # Sphinx are currently 'default' and 'sphinxdoc'.
 # Sphinx are currently 'default' and 'sphinxdoc'.
 # html_theme = 'default'
 # html_theme = 'default'
-html_theme = 'agogo'
+html_theme = "agogo"
 
 
 # Theme options are theme-specific and customize the look and feel of a theme
 # Theme options are theme-specific and customize the look and feel of a theme
 # further.  For a list of options available for each theme, see the
 # further.  For a list of options available for each theme, see the
@@ -111,7 +111,7 @@ html_theme = 'agogo'
 # html_theme_options = {}
 # html_theme_options = {}
 
 
 # Add any paths that contain custom themes here, relative to this directory.
 # Add any paths that contain custom themes here, relative to this directory.
-html_theme_path = ['theme']
+html_theme_path = ["theme"]
 
 
 # The name for this set of Sphinx documents.  If None, it defaults to
 # The name for this set of Sphinx documents.  If None, it defaults to
 # "<project> v<release> documentation".
 # "<project> v<release> documentation".
@@ -170,7 +170,7 @@ html_static_path = []
 # html_file_suffix = ''
 # html_file_suffix = ''
 
 
 # Output file base name for HTML help builder.
 # Output file base name for HTML help builder.
-htmlhelp_basename = 'dulwichdoc'
+htmlhelp_basename = "dulwichdoc"
 
 
 
 
 # -- Options for LaTeX output ------------------------------------------------
 # -- Options for LaTeX output ------------------------------------------------
@@ -185,8 +185,7 @@ htmlhelp_basename = 'dulwichdoc'
 # (source start file, target name, title, author, documentclass
 # (source start file, target name, title, author, documentclass
 # [howto/manual]).
 # [howto/manual]).
 latex_documents = [
 latex_documents = [
-    ('index', 'dulwich.tex', 'dulwich Documentation',
-     'Jelmer Vernooij', 'manual'),
+    ("index", "dulwich.tex", "dulwich Documentation", "Jelmer Vernooij", "manual"),
 ]
 ]
 
 
 # The name of an image file (relative to this directory) to place at the top of
 # The name of an image file (relative to this directory) to place at the top of
@@ -208,6 +207,6 @@ latex_documents = [
 
 
 # Add mappings
 # Add mappings
 intersphinx_mapping = {
 intersphinx_mapping = {
-    'urllib3': ('http://urllib3.readthedocs.org/en/latest', None),
-    'python': ('http://docs.python.org/3', None),
+    "urllib3": ("http://urllib3.readthedocs.org/en/latest", None),
+    "python": ("http://docs.python.org/3", None),
 }
 }

+ 1 - 1
dulwich/archive.py

@@ -108,7 +108,7 @@ def tar_stream(store, tree, mtime, prefix=b"", format=""):
 
 
             info = tarfile.TarInfo()
             info = tarfile.TarInfo()
             # tarfile only works with ascii.
             # tarfile only works with ascii.
-            info.name = entry_abspath.decode('utf-8', 'surrogateescape')
+            info.name = entry_abspath.decode("utf-8", "surrogateescape")
             info.size = blob.raw_length()
             info.size = blob.raw_length()
             info.mode = entry.mode
             info.mode = entry.mode
             info.mtime = mtime
             info.mtime = mtime

+ 12 - 7
dulwich/bundle.py

@@ -26,7 +26,6 @@ from .pack import PackData, write_pack_data
 
 
 
 
 class Bundle:
 class Bundle:
-
     version: Optional[int] = None
     version: Optional[int] = None
 
 
     capabilities: Dict[str, str] = {}
     capabilities: Dict[str, str] = {}
@@ -35,10 +34,12 @@ class Bundle:
     pack_data: Union[PackData, Sequence[bytes]] = []
     pack_data: Union[PackData, Sequence[bytes]] = []
 
 
     def __repr__(self) -> str:
     def __repr__(self) -> str:
-        return (f"<{type(self).__name__}(version={self.version}, "
-                f"capabilities={self.capabilities}, "
-                f"prerequisites={self.prerequisites}, "
-                f"references={self.references})>")
+        return (
+            f"<{type(self).__name__}(version={self.version}, "
+            f"capabilities={self.capabilities}, "
+            f"prerequisites={self.prerequisites}, "
+            f"references={self.references})>"
+        )
 
 
     def __eq__(self, other):
     def __eq__(self, other):
         if not isinstance(other, type(self)):
         if not isinstance(other, type(self)):
@@ -120,9 +121,13 @@ def write_bundle(f, bundle):
             if value is not None:
             if value is not None:
                 f.write(b"=" + value.encode("utf-8"))
                 f.write(b"=" + value.encode("utf-8"))
             f.write(b"\n")
             f.write(b"\n")
-    for (obj_id, comment) in bundle.prerequisites:
+    for obj_id, comment in bundle.prerequisites:
         f.write(b"-%s %s\n" % (obj_id, comment.encode("utf-8")))
         f.write(b"-%s %s\n" % (obj_id, comment.encode("utf-8")))
     for ref, obj_id in bundle.references.items():
     for ref, obj_id in bundle.references.items():
         f.write(b"%s %s\n" % (obj_id, ref))
         f.write(b"%s %s\n" % (obj_id, ref))
     f.write(b"\n")
     f.write(b"\n")
-    write_pack_data(f.write, num_records=len(bundle.pack_data), records=bundle.pack_data.iter_unpacked())
+    write_pack_data(
+        f.write,
+        num_records=len(bundle.pack_data),
+        records=bundle.pack_data.iter_unpacked(),
+    )

+ 34 - 33
dulwich/cli.py

@@ -72,7 +72,7 @@ class cmd_archive(Command):
             type=str,
             type=str,
             help="Retrieve archive from specified remote repo",
             help="Retrieve archive from specified remote repo",
         )
         )
-        parser.add_argument('committish', type=str, nargs='?')
+        parser.add_argument("committish", type=str, nargs="?")
         args = parser.parse_args(args)
         args = parser.parse_args(args)
         if args.remote:
         if args.remote:
             client, path = get_transport_and_path(args.remote)
             client, path = get_transport_and_path(args.remote)
@@ -84,8 +84,7 @@ class cmd_archive(Command):
             )
             )
         else:
         else:
             porcelain.archive(
             porcelain.archive(
-                ".", args.committish, outstream=sys.stdout.buffer,
-                errstream=sys.stderr
+                ".", args.committish, outstream=sys.stdout.buffer, errstream=sys.stderr
             )
             )
 
 
 
 
@@ -108,8 +107,8 @@ class cmd_rm(Command):
 class cmd_fetch_pack(Command):
 class cmd_fetch_pack(Command):
     def run(self, argv):
     def run(self, argv):
         parser = argparse.ArgumentParser()
         parser = argparse.ArgumentParser()
-        parser.add_argument('--all', action='store_true')
-        parser.add_argument('location', nargs='?', type=str)
+        parser.add_argument("--all", action="store_true")
+        parser.add_argument("location", nargs="?", type=str)
         args = parser.parse_args(argv)
         args = parser.parse_args(argv)
         client, path = get_transport_and_path(args.location)
         client, path = get_transport_and_path(args.location)
         r = Repo(".")
         r = Repo(".")
@@ -139,7 +138,7 @@ class cmd_fsck(Command):
     def run(self, args):
     def run(self, args):
         opts, args = getopt(args, "", [])
         opts, args = getopt(args, "", [])
         opts = dict(opts)
         opts = dict(opts)
-        for (obj, msg) in porcelain.fsck("."):
+        for obj, msg in porcelain.fsck("."):
             print(f"{obj}: {msg}")
             print(f"{obj}: {msg}")
 
 
 
 
@@ -175,13 +174,14 @@ class cmd_diff(Command):
 
 
         r = Repo(".")
         r = Repo(".")
         if args == []:
         if args == []:
-            commit_id = b'HEAD'
+            commit_id = b"HEAD"
         else:
         else:
             commit_id = args[0]
             commit_id = args[0]
         commit = parse_commit(r, commit_id)
         commit = parse_commit(r, commit_id)
         parent_commit = r[commit.parents[0]]
         parent_commit = r[commit.parents[0]]
         porcelain.diff_tree(
         porcelain.diff_tree(
-            r, parent_commit.tree, commit.tree, outstream=sys.stdout.buffer)
+            r, parent_commit.tree, commit.tree, outstream=sys.stdout.buffer
+        )
 
 
 
 
 class cmd_dump_pack(Command):
 class cmd_dump_pack(Command):
@@ -249,9 +249,12 @@ class cmd_clone(Command):
             "--depth", dest="depth", type=int, help="Depth at which to fetch"
             "--depth", dest="depth", type=int, help="Depth at which to fetch"
         )
         )
         parser.add_option(
         parser.add_option(
-            "-b", "--branch", dest="branch", type=str,
-            help=("Check out branch instead of branch pointed to by remote "
-                  "HEAD"))
+            "-b",
+            "--branch",
+            dest="branch",
+            type=str,
+            help=("Check out branch instead of branch pointed to by remote " "HEAD"),
+        )
         options, args = parser.parse_args(args)
         options, args = parser.parse_args(args)
 
 
         if args == []:
         if args == []:
@@ -265,8 +268,13 @@ class cmd_clone(Command):
             target = None
             target = None
 
 
         try:
         try:
-            porcelain.clone(source, target, bare=options.bare, depth=options.depth,
-                            branch=options.branch)
+            porcelain.clone(
+                source,
+                target,
+                bare=options.bare,
+                depth=options.depth,
+                branch=options.branch,
+            )
         except GitProtocolError as e:
         except GitProtocolError as e:
             print("%s" % e)
             print("%s" % e)
 
 
@@ -307,9 +315,9 @@ class cmd_symbolic_ref(Command):
 class cmd_pack_refs(Command):
 class cmd_pack_refs(Command):
     def run(self, argv):
     def run(self, argv):
         parser = argparse.ArgumentParser()
         parser = argparse.ArgumentParser()
-        parser.add_argument('--all', action='store_true')
+        parser.add_argument("--all", action="store_true")
         # ignored, we never prune
         # ignored, we never prune
-        parser.add_argument('--no-prune', action='store_true')
+        parser.add_argument("--no-prune", action="store_true")
 
 
         args = parser.parse_args(argv)
         args = parser.parse_args(argv)
 
 
@@ -319,7 +327,7 @@ class cmd_pack_refs(Command):
 class cmd_show(Command):
 class cmd_show(Command):
     def run(self, argv):
     def run(self, argv):
         parser = argparse.ArgumentParser()
         parser = argparse.ArgumentParser()
-        parser.add_argument('objectish', type=str, nargs='*')
+        parser.add_argument("objectish", type=str, nargs="*")
         args = parser.parse_args(argv)
         args = parser.parse_args(argv)
         porcelain.show(".", args.objectish or None)
         porcelain.show(".", args.objectish or None)
 
 
@@ -562,12 +570,8 @@ class cmd_pack_objects(Command):
             idxf = open(basename + ".idx", "wb")
             idxf = open(basename + ".idx", "wb")
             close = [packf, idxf]
             close = [packf, idxf]
         porcelain.pack_objects(
         porcelain.pack_objects(
-            ".",
-            object_ids,
-            packf,
-            idxf,
-            deltify=deltify,
-            reuse_deltas=reuse_deltas)
+            ".", object_ids, packf, idxf, deltify=deltify, reuse_deltas=reuse_deltas
+        )
         for f in close:
         for f in close:
             f.close()
             f.close()
 
 
@@ -584,17 +588,18 @@ class cmd_pull(Command):
 
 
 
 
 class cmd_push(Command):
 class cmd_push(Command):
-
     def run(self, argv):
     def run(self, argv):
         parser = argparse.ArgumentParser()
         parser = argparse.ArgumentParser()
-        parser.add_argument('-f', '--force', action='store_true', help='Force')
-        parser.add_argument('to_location', type=str)
-        parser.add_argument('refspec', type=str, nargs='*')
+        parser.add_argument("-f", "--force", action="store_true", help="Force")
+        parser.add_argument("to_location", type=str)
+        parser.add_argument("refspec", type=str, nargs="*")
         args = parser.parse_args(argv)
         args = parser.parse_args(argv)
         try:
         try:
-            porcelain.push('.', args.to_location, args.refspec or None, force=args.force)
+            porcelain.push(
+                ".", args.to_location, args.refspec or None, force=args.force
+            )
         except porcelain.DivergedBranches:
         except porcelain.DivergedBranches:
-            sys.stderr.write('Diverged branches; specify --force to override')
+            sys.stderr.write("Diverged branches; specify --force to override")
             return 1
             return 1
 
 
 
 
@@ -606,7 +611,6 @@ class cmd_remote_add(Command):
 
 
 
 
 class SuperCommand(Command):
 class SuperCommand(Command):
-
     subcommands: Dict[str, Type[Command]] = {}
     subcommands: Dict[str, Type[Command]] = {}
     default_command: Optional[Type[Command]] = None
     default_command: Optional[Type[Command]] = None
 
 
@@ -624,7 +628,6 @@ class SuperCommand(Command):
 
 
 
 
 class cmd_remote(SuperCommand):
 class cmd_remote(SuperCommand):
-
     subcommands = {
     subcommands = {
         "add": cmd_remote_add,
         "add": cmd_remote_add,
     }
     }
@@ -635,7 +638,7 @@ class cmd_submodule_list(Command):
         parser = argparse.ArgumentParser()
         parser = argparse.ArgumentParser()
         parser.parse_args(argv)
         parser.parse_args(argv)
         for path, sha in porcelain.submodule_list("."):
         for path, sha in porcelain.submodule_list("."):
-            sys.stdout.write(f' {sha} {path}\n')
+            sys.stdout.write(f" {sha} {path}\n")
 
 
 
 
 class cmd_submodule_init(Command):
 class cmd_submodule_init(Command):
@@ -646,7 +649,6 @@ class cmd_submodule_init(Command):
 
 
 
 
 class cmd_submodule(SuperCommand):
 class cmd_submodule(SuperCommand):
-
     subcommands = {
     subcommands = {
         "init": cmd_submodule_init,
         "init": cmd_submodule_init,
     }
     }
@@ -699,7 +701,6 @@ class cmd_stash_pop(Command):
 
 
 
 
 class cmd_stash(SuperCommand):
 class cmd_stash(SuperCommand):
-
     subcommands = {
     subcommands = {
         "list": cmd_stash_list,
         "list": cmd_stash_list,
         "pop": cmd_stash_pop,
         "pop": cmd_stash_pop,

+ 113 - 77
dulwich/client.py

@@ -279,7 +279,9 @@ class FetchPackResult:
         "viewvalues",
         "viewvalues",
     ]
     ]
 
 
-    def __init__(self, refs, symrefs, agent, new_shallow=None, new_unshallow=None) -> None:
+    def __init__(
+        self, refs, symrefs, agent, new_shallow=None, new_unshallow=None
+    ) -> None:
         self.refs = refs
         self.refs = refs
         self.symrefs = symrefs
         self.symrefs = symrefs
         self.agent = agent
         self.agent = agent
@@ -427,7 +429,6 @@ def _read_shallow_updates(pkt_seq):
 
 
 
 
 class _v1ReceivePackHeader:
 class _v1ReceivePackHeader:
-
     def __init__(self, capabilities, old_refs, new_refs) -> None:
     def __init__(self, capabilities, old_refs, new_refs) -> None:
         self.want: List[bytes] = []
         self.want: List[bytes] = []
         self.have: List[bytes] = []
         self.have: List[bytes] = []
@@ -466,8 +467,8 @@ class _v1ReceivePackHeader:
 
 
             if old_sha1 != new_sha1:
             if old_sha1 != new_sha1:
                 logger.debug(
                 logger.debug(
-                    'Sending updated ref %r: %r -> %r',
-                    refname, old_sha1, new_sha1)
+                    "Sending updated ref %r: %r -> %r", refname, old_sha1, new_sha1
+                )
                 if self.sent_capabilities:
                 if self.sent_capabilities:
                     yield old_sha1 + b" " + new_sha1 + b" " + refname
                     yield old_sha1 + b" " + new_sha1 + b" " + refname
                 else:
                 else:
@@ -499,9 +500,7 @@ def _read_side_band64k_data(pkt_seq: Iterable[bytes]) -> Iterator[Tuple[int, byt
         yield channel, pkt[1:]
         yield channel, pkt[1:]
 
 
 
 
-def _handle_upload_pack_head(
-    proto, capabilities, graph_walker, wants, can_read, depth
-):
+def _handle_upload_pack_head(proto, capabilities, graph_walker, wants, can_read, depth):
     """Handle the head of a 'git-upload-pack' request.
     """Handle the head of a 'git-upload-pack' request.
 
 
     Args:
     Args:
@@ -515,12 +514,7 @@ def _handle_upload_pack_head(
     """
     """
     assert isinstance(wants, list) and isinstance(wants[0], bytes)
     assert isinstance(wants, list) and isinstance(wants[0], bytes)
     proto.write_pkt_line(
     proto.write_pkt_line(
-        COMMAND_WANT
-        + b" "
-        + wants[0]
-        + b" "
-        + b" ".join(sorted(capabilities))
-        + b"\n"
+        COMMAND_WANT + b" " + wants[0] + b" " + b" ".join(sorted(capabilities)) + b"\n"
     )
     )
     for want in wants[1:]:
     for want in wants[1:]:
         proto.write_pkt_line(COMMAND_WANT + b" " + want + b"\n")
         proto.write_pkt_line(COMMAND_WANT + b" " + want + b"\n")
@@ -609,8 +603,7 @@ def _handle_upload_pack_tail(
             elif chan == SIDE_BAND_CHANNEL_PROGRESS:
             elif chan == SIDE_BAND_CHANNEL_PROGRESS:
                 progress(data)
                 progress(data)
             else:
             else:
-                raise AssertionError(
-                    "Invalid sideband channel %d" % chan)
+                raise AssertionError("Invalid sideband channel %d" % chan)
     else:
     else:
         while True:
         while True:
             data = proto.read(rbufsize)
             data = proto.read(rbufsize)
@@ -678,7 +671,15 @@ class GitClient:
         """
         """
         raise NotImplementedError(cls.from_parsedurl)
         raise NotImplementedError(cls.from_parsedurl)
 
 
-    def send_pack(self, path, update_refs, generate_pack_data: Callable[[Set[bytes], Set[bytes], bool], Tuple[int, Iterator[UnpackedObject]]], progress=None):
+    def send_pack(
+        self,
+        path,
+        update_refs,
+        generate_pack_data: Callable[
+            [Set[bytes], Set[bytes], bool], Tuple[int, Iterator[UnpackedObject]]
+        ],
+        progress=None,
+    ):
         """Upload a pack to a remote repository.
         """Upload a pack to a remote repository.
 
 
         Args:
         Args:
@@ -699,8 +700,18 @@ class GitClient:
         """
         """
         raise NotImplementedError(self.send_pack)
         raise NotImplementedError(self.send_pack)
 
 
-    def clone(self, path, target_path, mkdir: bool = True, bare=False, origin="origin",
-              checkout=None, branch=None, progress=None, depth=None):
+    def clone(
+        self,
+        path,
+        target_path,
+        mkdir: bool = True,
+        bare=False,
+        origin="origin",
+        checkout=None,
+        branch=None,
+        progress=None,
+        depth=None,
+    ):
         """Clone a repository."""
         """Clone a repository."""
         from .refs import _set_default_branch, _set_head, _set_origin_head
         from .refs import _set_default_branch, _set_head, _set_origin_head
 
 
@@ -720,35 +731,38 @@ class GitClient:
 
 
             # TODO(jelmer): abstract method for get_location?
             # TODO(jelmer): abstract method for get_location?
             if isinstance(self, (LocalGitClient, SubprocessGitClient)):
             if isinstance(self, (LocalGitClient, SubprocessGitClient)):
-                encoded_path = path.encode('utf-8')
+                encoded_path = path.encode("utf-8")
             else:
             else:
-                encoded_path = self.get_url(path).encode('utf-8')
+                encoded_path = self.get_url(path).encode("utf-8")
 
 
             assert target is not None
             assert target is not None
             target_config = target.get_config()
             target_config = target.get_config()
-            target_config.set((b"remote", origin.encode('utf-8')), b"url", encoded_path)
+            target_config.set((b"remote", origin.encode("utf-8")), b"url", encoded_path)
             target_config.set(
             target_config.set(
-                (b"remote", origin.encode('utf-8')),
+                (b"remote", origin.encode("utf-8")),
                 b"fetch",
                 b"fetch",
-                b"+refs/heads/*:refs/remotes/" + origin.encode('utf-8') + b"/*",
+                b"+refs/heads/*:refs/remotes/" + origin.encode("utf-8") + b"/*",
             )
             )
             target_config.write_to_path()
             target_config.write_to_path()
 
 
             ref_message = b"clone: from " + encoded_path
             ref_message = b"clone: from " + encoded_path
             result = self.fetch(path, target, progress=progress, depth=depth)
             result = self.fetch(path, target, progress=progress, depth=depth)
-            _import_remote_refs(
-                target.refs, origin, result.refs, message=ref_message)
+            _import_remote_refs(target.refs, origin, result.refs, message=ref_message)
 
 
             origin_head = result.symrefs.get(b"HEAD")
             origin_head = result.symrefs.get(b"HEAD")
-            origin_sha = result.refs.get(b'HEAD')
+            origin_sha = result.refs.get(b"HEAD")
             if origin_sha and not origin_head:
             if origin_sha and not origin_head:
                 # set detached HEAD
                 # set detached HEAD
                 target.refs[b"HEAD"] = origin_sha
                 target.refs[b"HEAD"] = origin_sha
                 head = origin_sha
                 head = origin_sha
             else:
             else:
-                _set_origin_head(target.refs, origin.encode('utf-8'), origin_head)
+                _set_origin_head(target.refs, origin.encode("utf-8"), origin_head)
                 head_ref = _set_default_branch(
                 head_ref = _set_default_branch(
-                    target.refs, origin.encode('utf-8'), origin_head, branch, ref_message
+                    target.refs,
+                    origin.encode("utf-8"),
+                    origin_head,
+                    branch,
+                    ref_message,
                 )
                 )
 
 
                 # Update target head
                 # Update target head
@@ -764,6 +778,7 @@ class GitClient:
                 target.close()
                 target.close()
             if mkdir:
             if mkdir:
                 import shutil
                 import shutil
+
                 shutil.rmtree(target_path)
                 shutil.rmtree(target_path)
             raise
             raise
         return target
         return target
@@ -776,7 +791,7 @@ class GitClient:
             Callable[[Dict[bytes, bytes], Optional[int]], List[bytes]]
             Callable[[Dict[bytes, bytes], Optional[int]], List[bytes]]
         ] = None,
         ] = None,
         progress: Optional[Callable[[bytes], None]] = None,
         progress: Optional[Callable[[bytes], None]] = None,
-        depth: Optional[int] = None
+        depth: Optional[int] = None,
     ) -> FetchPackResult:
     ) -> FetchPackResult:
         """Fetch into a target repository.
         """Fetch into a target repository.
 
 
@@ -797,9 +812,12 @@ class GitClient:
             determine_wants = target.object_store.determine_wants_all
             determine_wants = target.object_store.determine_wants_all
         if CAPABILITY_THIN_PACK in self._fetch_capabilities:
         if CAPABILITY_THIN_PACK in self._fetch_capabilities:
             from tempfile import SpooledTemporaryFile
             from tempfile import SpooledTemporaryFile
+
             f: IO[bytes] = SpooledTemporaryFile(
             f: IO[bytes] = SpooledTemporaryFile(
-                max_size=PACK_SPOOL_FILE_MAX_SIZE, prefix='incoming-',
-                dir=getattr(target.object_store, 'path', None))
+                max_size=PACK_SPOOL_FILE_MAX_SIZE,
+                prefix="incoming-",
+                dir=getattr(target.object_store, "path", None),
+            )
 
 
             def commit():
             def commit():
                 if f.tell():
                 if f.tell():
@@ -917,8 +935,7 @@ class GitClient:
                 elif chan == SIDE_BAND_CHANNEL_PROGRESS:
                 elif chan == SIDE_BAND_CHANNEL_PROGRESS:
                     progress(data)
                     progress(data)
                 else:
                 else:
-                    raise AssertionError(
-                        "Invalid sideband channel %d" % chan)
+                    raise AssertionError("Invalid sideband channel %d" % chan)
         else:
         else:
             if CAPABILITY_REPORT_STATUS in capabilities:
             if CAPABILITY_REPORT_STATUS in capabilities:
                 assert self._report_status_parser
                 assert self._report_status_parser
@@ -1078,7 +1095,9 @@ class TraditionalGitClient(GitClient):
                     ref_status = None
                     ref_status = None
                 return SendPackResult(old_refs, agent=agent, ref_status=ref_status)
                 return SendPackResult(old_refs, agent=agent, ref_status=ref_status)
 
 
-            header_handler = _v1ReceivePackHeader(negotiated_capabilities, old_refs, new_refs)
+            header_handler = _v1ReceivePackHeader(
+                negotiated_capabilities, old_refs, new_refs
+            )
 
 
             for pkt in header_handler:
             for pkt in header_handler:
                 proto.write_pkt_line(pkt)
                 proto.write_pkt_line(pkt)
@@ -1091,7 +1110,9 @@ class TraditionalGitClient(GitClient):
             )
             )
 
 
             if self._should_send_pack(new_refs):
             if self._should_send_pack(new_refs):
-                for chunk in PackChunkGenerator(pack_data_count, pack_data, progress=progress):
+                for chunk in PackChunkGenerator(
+                    pack_data_count, pack_data, progress=progress
+                ):
                     proto.write(chunk)
                     proto.write(chunk)
 
 
             ref_status = self._handle_receive_pack_tail(
             ref_status = self._handle_receive_pack_tail(
@@ -1260,7 +1281,7 @@ class TCPGitClient(TraditionalGitClient):
         )
         )
         s = None
         s = None
         err = OSError("no address found for %s" % self._host)
         err = OSError("no address found for %s" % self._host)
-        for (family, socktype, proto, canonname, sockaddr) in sockaddrs:
+        for family, socktype, proto, canonname, sockaddr in sockaddrs:
             s = socket.socket(family, socktype, proto)
             s = socket.socket(family, socktype, proto)
             s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
             s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
             try:
             try:
@@ -1383,8 +1404,9 @@ class SubprocessGitClient(TraditionalGitClient):
 class LocalGitClient(GitClient):
 class LocalGitClient(GitClient):
     """Git Client that just uses a local Repo."""
     """Git Client that just uses a local Repo."""
 
 
-    def __init__(self, thin_packs=True, report_activity=None,
-                 config: Optional[Config] = None) -> None:
+    def __init__(
+        self, thin_packs=True, report_activity=None, config: Optional[Config] = None
+    ) -> None:
         """Create a new LocalGitClient instance.
         """Create a new LocalGitClient instance.
 
 
         Args:
         Args:
@@ -1404,7 +1426,6 @@ class LocalGitClient(GitClient):
 
 
     @classmethod
     @classmethod
     def _open_repo(cls, path):
     def _open_repo(cls, path):
-
         if not isinstance(path, str):
         if not isinstance(path, str):
             path = os.fsdecode(path)
             path = os.fsdecode(path)
         return closing(Repo(path))
         return closing(Repo(path))
@@ -1532,7 +1553,9 @@ class LocalGitClient(GitClient):
             # Note that the client still expects a 0-object pack in most cases.
             # Note that the client still expects a 0-object pack in most cases.
             if object_ids is None:
             if object_ids is None:
                 return FetchPackResult(None, symrefs, agent)
                 return FetchPackResult(None, symrefs, agent)
-            write_pack_from_container(pack_data, r.object_store, object_ids, other_haves=other_haves)
+            write_pack_from_container(
+                pack_data, r.object_store, object_ids, other_haves=other_haves
+            )
             return FetchPackResult(r.get_refs(), symrefs, agent)
             return FetchPackResult(r.get_refs(), symrefs, agent)
 
 
     def get_refs(self, path):
     def get_refs(self, path):
@@ -1595,7 +1618,6 @@ class SubprocessSSHVendor(SSHVendor):
         key_filename=None,
         key_filename=None,
         ssh_command=None,
         ssh_command=None,
     ):
     ):
-
         if password is not None:
         if password is not None:
             raise NotImplementedError(
             raise NotImplementedError(
                 "Setting password not supported by SubprocessSSHVendor."
                 "Setting password not supported by SubprocessSSHVendor."
@@ -1603,8 +1625,8 @@ class SubprocessSSHVendor(SSHVendor):
 
 
         if ssh_command:
         if ssh_command:
             import shlex
             import shlex
-            args = shlex.split(
-                ssh_command, posix=(sys.platform != 'win32')) + ["-x"]
+
+            args = shlex.split(ssh_command, posix=(sys.platform != "win32")) + ["-x"]
         else:
         else:
             args = ["ssh", "-x"]
             args = ["ssh", "-x"]
 
 
@@ -1643,11 +1665,10 @@ class PLinkSSHVendor(SSHVendor):
         key_filename=None,
         key_filename=None,
         ssh_command=None,
         ssh_command=None,
     ):
     ):
-
         if ssh_command:
         if ssh_command:
             import shlex
             import shlex
-            args = shlex.split(
-                ssh_command, posix=(sys.platform != 'win32')) + ["-ssh"]
+
+            args = shlex.split(ssh_command, posix=(sys.platform != "win32")) + ["-ssh"]
         elif sys.platform == "win32":
         elif sys.platform == "win32":
             args = ["plink.exe", "-ssh"]
             args = ["plink.exe", "-ssh"]
         else:
         else:
@@ -1711,7 +1732,7 @@ class SSHGitClient(TraditionalGitClient):
         password=None,
         password=None,
         key_filename=None,
         key_filename=None,
         ssh_command=None,
         ssh_command=None,
-        **kwargs
+        **kwargs,
     ) -> None:
     ) -> None:
         self.host = host
         self.host = host
         self.port = port
         self.port = port
@@ -1744,7 +1765,7 @@ class SSHGitClient(TraditionalGitClient):
             host=parsedurl.hostname,
             host=parsedurl.hostname,
             port=parsedurl.port,
             port=parsedurl.port,
             username=parsedurl.username,
             username=parsedurl.username,
-            **kwargs
+            **kwargs,
         )
         )
 
 
     def _get_cmd_path(self, cmd):
     def _get_cmd_path(self, cmd):
@@ -1794,8 +1815,12 @@ def default_user_agent_string():
     return "git/dulwich/%s" % ".".join([str(x) for x in dulwich.__version__])
     return "git/dulwich/%s" % ".".join([str(x) for x in dulwich.__version__])
 
 
 
 
-def default_urllib3_manager(   # noqa: C901
-    config, pool_manager_cls=None, proxy_manager_cls=None, base_url=None, **override_kwargs
+def default_urllib3_manager(  # noqa: C901
+    config,
+    pool_manager_cls=None,
+    proxy_manager_cls=None,
+    base_url=None,
+    **override_kwargs,
 ) -> Union["urllib3.ProxyManager", "urllib3.PoolManager"]:
 ) -> Union["urllib3.ProxyManager", "urllib3.PoolManager"]:
     """Return urllib3 connection pool manager.
     """Return urllib3 connection pool manager.
 
 
@@ -1823,7 +1848,7 @@ def default_urllib3_manager(   # noqa: C901
     if proxy_server:
     if proxy_server:
         if check_for_proxy_bypass(base_url):
         if check_for_proxy_bypass(base_url):
             proxy_server = None
             proxy_server = None
-    
+
     if config is not None:
     if config is not None:
         if proxy_server is None:
         if proxy_server is None:
             try:
             try:
@@ -1852,7 +1877,7 @@ def default_urllib3_manager(   # noqa: C901
     headers = {"User-agent": user_agent}
     headers = {"User-agent": user_agent}
 
 
     kwargs = {
     kwargs = {
-        "ca_certs" : ca_certs,
+        "ca_certs": ca_certs,
     }
     }
     if ssl_verify is True:
     if ssl_verify is True:
         kwargs["cert_reqs"] = "CERT_REQUIRED"
         kwargs["cert_reqs"] = "CERT_REQUIRED"
@@ -1899,30 +1924,36 @@ def check_for_proxy_bypass(base_url):
                 except ValueError:
                 except ValueError:
                     hostname_ip = None
                     hostname_ip = None
 
 
-                no_proxy_values = no_proxy_str.split(',')
+                no_proxy_values = no_proxy_str.split(",")
                 for no_proxy_value in no_proxy_values:
                 for no_proxy_value in no_proxy_values:
                     no_proxy_value = no_proxy_value.strip()
                     no_proxy_value = no_proxy_value.strip()
                     if no_proxy_value:
                     if no_proxy_value:
                         no_proxy_value = no_proxy_value.lower()
                         no_proxy_value = no_proxy_value.lower()
-                        no_proxy_value = no_proxy_value.lstrip('.')  # ignore leading dots
+                        no_proxy_value = no_proxy_value.lstrip(
+                            "."
+                        )  # ignore leading dots
 
 
                         if hostname_ip:
                         if hostname_ip:
                             # check if no_proxy_value is a ip network
                             # check if no_proxy_value is a ip network
                             try:
                             try:
-                                no_proxy_value_network = ipaddress.ip_network(no_proxy_value, strict=False)
+                                no_proxy_value_network = ipaddress.ip_network(
+                                    no_proxy_value, strict=False
+                                )
                             except ValueError:
                             except ValueError:
                                 no_proxy_value_network = None
                                 no_proxy_value_network = None
                             if no_proxy_value_network:
                             if no_proxy_value_network:
                                 # if hostname is a ip address and no_proxy_value is a ip network -> check if ip address is part of network
                                 # if hostname is a ip address and no_proxy_value is a ip network -> check if ip address is part of network
                                 if hostname_ip in no_proxy_value_network:
                                 if hostname_ip in no_proxy_value_network:
                                     return True
                                     return True
-                                
-                        if no_proxy_value == '*':
+
+                        if no_proxy_value == "*":
                             # '*' is special case for always bypass proxy
                             # '*' is special case for always bypass proxy
                             return True
                             return True
                         if hostname == no_proxy_value:
                         if hostname == no_proxy_value:
                             return True
                             return True
-                        no_proxy_value = '.' + no_proxy_value   # add a dot to only match complete domains
+                        no_proxy_value = (
+                            "." + no_proxy_value
+                        )  # add a dot to only match complete domains
                         if hostname.endswith(no_proxy_value):
                         if hostname.endswith(no_proxy_value):
                             return True
                             return True
     return False
     return False
@@ -1979,9 +2010,9 @@ class AbstractHttpGitClient(GitClient):
             base_url = urljoin(url, resp.redirect_location[: -len(tail)])
             base_url = urljoin(url, resp.redirect_location[: -len(tail)])
 
 
         try:
         try:
-            self.dumb = (
-                resp.content_type is None
-                or not resp.content_type.startswith("application/x-git-"))
+            self.dumb = resp.content_type is None or not resp.content_type.startswith(
+                "application/x-git-"
+            )
             if not self.dumb:
             if not self.dumb:
                 proto = Protocol(read, None)
                 proto = Protocol(read, None)
                 # The first line should mention the service
                 # The first line should mention the service
@@ -1989,7 +2020,8 @@ class AbstractHttpGitClient(GitClient):
                     [pkt] = list(proto.read_pkt_seq())
                     [pkt] = list(proto.read_pkt_seq())
                 except ValueError as exc:
                 except ValueError as exc:
                     raise GitProtocolError(
                     raise GitProtocolError(
-                        "unexpected number of packets received") from exc
+                        "unexpected number of packets received"
+                    ) from exc
                 if pkt.rstrip(b"\n") != (b"# service=" + service):
                 if pkt.rstrip(b"\n") != (b"# service=" + service):
                     raise GitProtocolError(
                     raise GitProtocolError(
                         "unexpected first line %r from smart server" % pkt
                         "unexpected first line %r from smart server" % pkt
@@ -2016,7 +2048,7 @@ class AbstractHttpGitClient(GitClient):
         if isinstance(data, bytes):
         if isinstance(data, bytes):
             headers["Content-Length"] = str(len(data))
             headers["Content-Length"] = str(len(data))
         resp, read = self._http_request(url, headers, data)
         resp, read = self._http_request(url, headers, data)
-        if resp.content_type.split(';')[0] != result_content_type:
+        if resp.content_type.split(";")[0] != result_content_type:
             raise GitProtocolError(
             raise GitProtocolError(
                 "Invalid content-type from server: %s" % resp.content_type
                 "Invalid content-type from server: %s" % resp.content_type
             )
             )
@@ -2064,7 +2096,9 @@ class AbstractHttpGitClient(GitClient):
             raise NotImplementedError(self.fetch_pack)
             raise NotImplementedError(self.fetch_pack)
 
 
         def body_generator():
         def body_generator():
-            header_handler = _v1ReceivePackHeader(negotiated_capabilities, old_refs, new_refs)
+            header_handler = _v1ReceivePackHeader(
+                negotiated_capabilities, old_refs, new_refs
+            )
             for pkt in header_handler:
             for pkt in header_handler:
                 yield pkt_line(pkt)
                 yield pkt_line(pkt)
             pack_data_count, pack_data = generate_pack_data(
             pack_data_count, pack_data = generate_pack_data(
@@ -2075,9 +2109,7 @@ class AbstractHttpGitClient(GitClient):
             if self._should_send_pack(new_refs):
             if self._should_send_pack(new_refs):
                 yield from PackChunkGenerator(pack_data_count, pack_data)
                 yield from PackChunkGenerator(pack_data_count, pack_data)
 
 
-        resp, read = self._smart_request(
-            "git-receive-pack", url, data=body_generator()
-        )
+        resp, read = self._smart_request("git-receive-pack", url, data=body_generator())
         try:
         try:
             resp_proto = Protocol(read, None)
             resp_proto = Protocol(read, None)
             ref_status = self._handle_receive_pack_tail(
             ref_status = self._handle_receive_pack_tail(
@@ -2146,7 +2178,8 @@ class AbstractHttpGitClient(GitClient):
             resp_proto = Protocol(read, None)
             resp_proto = Protocol(read, None)
             if new_shallow is None and new_unshallow is None:
             if new_shallow is None and new_unshallow is None:
                 (new_shallow, new_unshallow) = _read_shallow_updates(
                 (new_shallow, new_unshallow) = _read_shallow_updates(
-                    resp_proto.read_pkt_seq())
+                    resp_proto.read_pkt_seq()
+                )
             _handle_upload_pack_tail(
             _handle_upload_pack_tail(
                 resp_proto,
                 resp_proto,
                 negotiated_capabilities,
                 negotiated_capabilities,
@@ -2193,7 +2226,7 @@ class Urllib3HttpGitClient(AbstractHttpGitClient):
         config=None,
         config=None,
         username=None,
         username=None,
         password=None,
         password=None,
-        **kwargs
+        **kwargs,
     ) -> None:
     ) -> None:
         self._username = username
         self._username = username
         self._password = password
         self._password = password
@@ -2214,8 +2247,7 @@ class Urllib3HttpGitClient(AbstractHttpGitClient):
 
 
         self.config = config
         self.config = config
 
 
-        super().__init__(
-            base_url=base_url, dumb=dumb, **kwargs)
+        super().__init__(base_url=base_url, dumb=dumb, **kwargs)
 
 
     def _get_url(self, path):
     def _get_url(self, path):
         if not isinstance(path, str):
         if not isinstance(path, str):
@@ -2226,6 +2258,7 @@ class Urllib3HttpGitClient(AbstractHttpGitClient):
 
 
     def _http_request(self, url, headers=None, data=None):
     def _http_request(self, url, headers=None, data=None):
         import urllib3.exceptions
         import urllib3.exceptions
+
         req_headers = self.pool_manager.headers.copy()
         req_headers = self.pool_manager.headers.copy()
         if headers is not None:
         if headers is not None:
             req_headers.update(headers)
             req_headers.update(headers)
@@ -2234,7 +2267,8 @@ class Urllib3HttpGitClient(AbstractHttpGitClient):
         try:
         try:
             if data is None:
             if data is None:
                 resp = self.pool_manager.request(
                 resp = self.pool_manager.request(
-                    "GET", url, headers=req_headers, preload_content=False)
+                    "GET", url, headers=req_headers, preload_content=False
+                )
             else:
             else:
                 resp = self.pool_manager.request(
                 resp = self.pool_manager.request(
                     "POST", url, headers=req_headers, body=data, preload_content=False
                     "POST", url, headers=req_headers, body=data, preload_content=False
@@ -2298,8 +2332,8 @@ def _win32_url_to_path(parsed) -> str:
 
 
 
 
 def get_transport_and_path_from_url(
 def get_transport_and_path_from_url(
-        url: str, config: Optional[Config] = None,
-        operation: Optional[str] = None, **kwargs) -> Tuple[GitClient, str]:
+    url: str, config: Optional[Config] = None, operation: Optional[str] = None, **kwargs
+) -> Tuple[GitClient, str]:
     """Obtain a git client from a URL.
     """Obtain a git client from a URL.
 
 
     Args:
     Args:
@@ -2318,7 +2352,8 @@ def get_transport_and_path_from_url(
         url = apply_instead_of(config, url, push=(operation == "push"))
         url = apply_instead_of(config, url, push=(operation == "push"))
 
 
     return _get_transport_and_path_from_url(
     return _get_transport_and_path_from_url(
-        url, config=config, operation=operation, **kwargs)
+        url, config=config, operation=operation, **kwargs
+    )
 
 
 
 
 def _get_transport_and_path_from_url(url, config, operation, **kwargs):
 def _get_transport_and_path_from_url(url, config, operation, **kwargs):
@@ -2366,7 +2401,7 @@ def get_transport_and_path(
     location: str,
     location: str,
     config: Optional[Config] = None,
     config: Optional[Config] = None,
     operation: Optional[str] = None,
     operation: Optional[str] = None,
-    **kwargs
+    **kwargs,
 ) -> Tuple[GitClient, str]:
 ) -> Tuple[GitClient, str]:
     """Obtain a git client from a URL.
     """Obtain a git client from a URL.
 
 
@@ -2388,7 +2423,8 @@ def get_transport_and_path(
     # First, try to parse it as a URL
     # First, try to parse it as a URL
     try:
     try:
         return _get_transport_and_path_from_url(
         return _get_transport_and_path_from_url(
-            location, config=config, operation=operation, **kwargs)
+            location, config=config, operation=operation, **kwargs
+        )
     except ValueError:
     except ValueError:
         pass
         pass
 
 

+ 13 - 14
dulwich/cloud/gcs.py

@@ -31,8 +31,7 @@ from ..pack import PACK_SPOOL_FILE_MAX_SIZE, Pack, PackData, load_pack_index_fil
 
 
 
 
 class GcsObjectStore(BucketBasedObjectStore):
 class GcsObjectStore(BucketBasedObjectStore):
-
-    def __init__(self, bucket, subpath='') -> None:
+    def __init__(self, bucket, subpath="") -> None:
         super().__init__()
         super().__init__()
         self.bucket = bucket
         self.bucket = bucket
         self.subpath = subpath
         self.subpath = subpath
@@ -41,9 +40,9 @@ class GcsObjectStore(BucketBasedObjectStore):
         return f"{type(self).__name__}({self.bucket!r}, subpath={self.subpath!r})"
         return f"{type(self).__name__}({self.bucket!r}, subpath={self.subpath!r})"
 
 
     def _remove_pack(self, name):
     def _remove_pack(self, name):
-        self.bucket.delete_blobs([
-            posixpath.join(self.subpath, name) + '.' + ext
-            for ext in ['pack', 'idx']])
+        self.bucket.delete_blobs(
+            [posixpath.join(self.subpath, name) + "." + ext for ext in ["pack", "idx"]]
+        )
 
 
     def _iter_pack_names(self):
     def _iter_pack_names(self):
         packs = {}
         packs = {}
@@ -51,30 +50,30 @@ class GcsObjectStore(BucketBasedObjectStore):
             name, ext = posixpath.splitext(posixpath.basename(blob.name))
             name, ext = posixpath.splitext(posixpath.basename(blob.name))
             packs.setdefault(name, set()).add(ext)
             packs.setdefault(name, set()).add(ext)
         for name, exts in packs.items():
         for name, exts in packs.items():
-            if exts == {'.pack', '.idx'}:
+            if exts == {".pack", ".idx"}:
                 yield name
                 yield name
 
 
     def _load_pack_data(self, name):
     def _load_pack_data(self, name):
-        b = self.bucket.blob(posixpath.join(self.subpath, name + '.pack'))
+        b = self.bucket.blob(posixpath.join(self.subpath, name + ".pack"))
         f = tempfile.SpooledTemporaryFile(max_size=PACK_SPOOL_FILE_MAX_SIZE)
         f = tempfile.SpooledTemporaryFile(max_size=PACK_SPOOL_FILE_MAX_SIZE)
         b.download_to_file(f)
         b.download_to_file(f)
         f.seek(0)
         f.seek(0)
-        return PackData(name + '.pack', f)
+        return PackData(name + ".pack", f)
 
 
     def _load_pack_index(self, name):
     def _load_pack_index(self, name):
-        b = self.bucket.blob(posixpath.join(self.subpath, name + '.idx'))
+        b = self.bucket.blob(posixpath.join(self.subpath, name + ".idx"))
         f = tempfile.SpooledTemporaryFile(max_size=PACK_SPOOL_FILE_MAX_SIZE)
         f = tempfile.SpooledTemporaryFile(max_size=PACK_SPOOL_FILE_MAX_SIZE)
         b.download_to_file(f)
         b.download_to_file(f)
         f.seek(0)
         f.seek(0)
-        return load_pack_index_file(name + '.idx', f)
+        return load_pack_index_file(name + ".idx", f)
 
 
     def _get_pack(self, name):
     def _get_pack(self, name):
         return Pack.from_lazy_objects(
         return Pack.from_lazy_objects(
-            lambda: self._load_pack_data(name),
-            lambda: self._load_pack_index(name))
+            lambda: self._load_pack_data(name), lambda: self._load_pack_index(name)
+        )
 
 
     def _upload_pack(self, basename, pack_file, index_file):
     def _upload_pack(self, basename, pack_file, index_file):
-        idxblob = self.bucket.blob(posixpath.join(self.subpath, basename + '.idx'))
-        datablob = self.bucket.blob(posixpath.join(self.subpath, basename + '.pack'))
+        idxblob = self.bucket.blob(posixpath.join(self.subpath, basename + ".idx"))
+        datablob = self.bucket.blob(posixpath.join(self.subpath, basename + ".pack"))
         idxblob.upload_from_file(index_file)
         idxblob.upload_from_file(index_file)
         datablob.upload_from_file(pack_file)
         datablob.upload_from_file(pack_file)

+ 16 - 38
dulwich/config.py

@@ -60,14 +60,12 @@ def lower_key(key):
 
 
 
 
 class CaseInsensitiveOrderedMultiDict(MutableMapping):
 class CaseInsensitiveOrderedMultiDict(MutableMapping):
-
     def __init__(self) -> None:
     def __init__(self) -> None:
         self._real: List[Any] = []
         self._real: List[Any] = []
         self._keyed: Dict[Any, Any] = {}
         self._keyed: Dict[Any, Any] = {}
 
 
     @classmethod
     @classmethod
     def make(cls, dict_in=None):
     def make(cls, dict_in=None):
-
         if isinstance(dict_in, cls):
         if isinstance(dict_in, cls):
             return dict_in
             return dict_in
 
 
@@ -208,10 +206,7 @@ class Config:
         raise ValueError("not a valid boolean string: %r" % value)
         raise ValueError("not a valid boolean string: %r" % value)
 
 
     def set(
     def set(
-        self,
-        section: SectionLike,
-        name: NameLike,
-        value: Union[ValueLike, bool]
+        self, section: SectionLike, name: NameLike, value: Union[ValueLike, bool]
     ) -> None:
     ) -> None:
         """Set a configuration value.
         """Set a configuration value.
 
 
@@ -259,7 +254,7 @@ class ConfigDict(Config, MutableMapping[Section, MutableMapping[Name, Value]]):
         values: Union[
         values: Union[
             MutableMapping[Section, MutableMapping[Name, Value]], None
             MutableMapping[Section, MutableMapping[Name, Value]], None
         ] = None,
         ] = None,
-        encoding: Union[str, None] = None
+        encoding: Union[str, None] = None,
     ) -> None:
     ) -> None:
         """Create a new ConfigDict."""
         """Create a new ConfigDict."""
         if encoding is None:
         if encoding is None:
@@ -276,11 +271,7 @@ class ConfigDict(Config, MutableMapping[Section, MutableMapping[Name, Value]]):
     def __getitem__(self, key: Section) -> MutableMapping[Name, Value]:
     def __getitem__(self, key: Section) -> MutableMapping[Name, Value]:
         return self._values.__getitem__(key)
         return self._values.__getitem__(key)
 
 
-    def __setitem__(
-        self,
-        key: Section,
-        value: MutableMapping[Name, Value]
-    ) -> None:
+    def __setitem__(self, key: Section, value: MutableMapping[Name, Value]) -> None:
         return self._values.__setitem__(key, value)
         return self._values.__setitem__(key, value)
 
 
     def __delitem__(self, key: Section) -> None:
     def __delitem__(self, key: Section) -> None:
@@ -301,9 +292,7 @@ class ConfigDict(Config, MutableMapping[Section, MutableMapping[Name, Value]]):
             return (parts[0], None, parts[1])
             return (parts[0], None, parts[1])
 
 
     def _check_section_and_name(
     def _check_section_and_name(
-        self,
-        section: SectionLike,
-        name: NameLike
+        self, section: SectionLike, name: NameLike
     ) -> Tuple[Section, Name]:
     ) -> Tuple[Section, Name]:
         if not isinstance(section, tuple):
         if not isinstance(section, tuple):
             section = (section,)
             section = (section,)
@@ -322,11 +311,7 @@ class ConfigDict(Config, MutableMapping[Section, MutableMapping[Name, Value]]):
 
 
         return checked_section, name
         return checked_section, name
 
 
-    def get_multivar(
-        self,
-        section: SectionLike,
-        name: NameLike
-    ) -> Iterator[Value]:
+    def get_multivar(self, section: SectionLike, name: NameLike) -> Iterator[Value]:
         section, name = self._check_section_and_name(section, name)
         section, name = self._check_section_and_name(section, name)
 
 
         if len(section) > 1:
         if len(section) > 1:
@@ -369,8 +354,7 @@ class ConfigDict(Config, MutableMapping[Section, MutableMapping[Name, Value]]):
         self._values.setdefault(section)[name] = value
         self._values.setdefault(section)[name] = value
 
 
     def items(  # type: ignore[override]
     def items(  # type: ignore[override]
-        self,
-        section: Section
+        self, section: Section
     ) -> Iterator[Tuple[Name, Value]]:
     ) -> Iterator[Tuple[Name, Value]]:
         return self._values.get(section).items()
         return self._values.get(section).items()
 
 
@@ -498,15 +482,15 @@ def _parse_section_header_line(line: bytes) -> Tuple[Section, bytes]:
             continue
             continue
         if c == ord(b'"'):
         if c == ord(b'"'):
             in_quotes = not in_quotes
             in_quotes = not in_quotes
-        if c == ord(b'\\'):
+        if c == ord(b"\\"):
             escaped = True
             escaped = True
-        if c == ord(b']') and not in_quotes:
+        if c == ord(b"]") and not in_quotes:
             last = i
             last = i
             break
             break
     else:
     else:
         raise ValueError("expected trailing ]")
         raise ValueError("expected trailing ]")
     pts = line[1:last].split(b" ", 1)
     pts = line[1:last].split(b" ", 1)
-    line = line[last + 1:]
+    line = line[last + 1 :]
     section: Section
     section: Section
     if len(pts) == 2:
     if len(pts) == 2:
         if pts[1][:1] != b'"' or pts[1][-1:] != b'"':
         if pts[1][:1] != b'"' or pts[1][-1:] != b'"':
@@ -535,7 +519,7 @@ class ConfigFile(ConfigDict):
         values: Union[
         values: Union[
             MutableMapping[Section, MutableMapping[Name, Value]], None
             MutableMapping[Section, MutableMapping[Name, Value]], None
         ] = None,
         ] = None,
-        encoding: Union[str, None] = None
+        encoding: Union[str, None] = None,
     ) -> None:
     ) -> None:
         super().__init__(values=values, encoding=encoding)
         super().__init__(values=values, encoding=encoding)
         self.path: Optional[str] = None
         self.path: Optional[str] = None
@@ -548,7 +532,7 @@ class ConfigFile(ConfigDict):
         setting = None
         setting = None
         continuation = None
         continuation = None
         for lineno, line in enumerate(f.readlines()):
         for lineno, line in enumerate(f.readlines()):
-            if lineno == 0 and line.startswith(b'\xef\xbb\xbf'):
+            if lineno == 0 and line.startswith(b"\xef\xbb\xbf"):
                 line = line[3:]
                 line = line[3:]
             line = line.lstrip()
             line = line.lstrip()
             if setting is None:
             if setting is None:
@@ -655,10 +639,7 @@ def _find_git_in_win_reg():
             "CurrentVersion\\Uninstall\\Git_is1"
             "CurrentVersion\\Uninstall\\Git_is1"
         )
         )
     else:
     else:
-        subkey = (
-            "SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\"
-            "Uninstall\\Git_is1"
-        )
+        subkey = "SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\" "Uninstall\\Git_is1"
 
 
     for key in (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE):  # type: ignore
     for key in (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE):  # type: ignore
         with suppress(OSError):
         with suppress(OSError):
@@ -744,10 +725,7 @@ class StackedConfig(Config):
                 pass
                 pass
 
 
     def set(
     def set(
-        self,
-        section: SectionLike,
-        name: NameLike,
-        value: Union[ValueLike, bool]
+        self, section: SectionLike, name: NameLike, value: Union[ValueLike, bool]
     ) -> None:
     ) -> None:
         if self.writable is None:
         if self.writable is None:
             raise NotImplementedError(self.set)
             raise NotImplementedError(self.set)
@@ -788,7 +766,7 @@ def parse_submodules(config: ConfigFile) -> Iterator[Tuple[bytes, bytes, bytes]]
 def iter_instead_of(config: Config, push: bool = False) -> Iterable[Tuple[str, str]]:
 def iter_instead_of(config: Config, push: bool = False) -> Iterable[Tuple[str, str]]:
     """Iterate over insteadOf / pushInsteadOf values."""
     """Iterate over insteadOf / pushInsteadOf values."""
     for section in config.sections():
     for section in config.sections():
-        if section[0] != b'url':
+        if section[0] != b"url":
             continue
             continue
         replacement = section[1]
         replacement = section[1]
         try:
         try:
@@ -802,7 +780,7 @@ def iter_instead_of(config: Config, push: bool = False) -> Iterable[Tuple[str, s
                 pass
                 pass
         for needle in needles:
         for needle in needles:
             assert isinstance(needle, bytes)
             assert isinstance(needle, bytes)
-            yield needle.decode('utf-8'), replacement.decode('utf-8')
+            yield needle.decode("utf-8"), replacement.decode("utf-8")
 
 
 
 
 def apply_instead_of(config: Config, orig_url: str, push: bool = False) -> str:
 def apply_instead_of(config: Config, orig_url: str, push: bool = False) -> str:
@@ -814,5 +792,5 @@ def apply_instead_of(config: Config, orig_url: str, push: bool = False) -> str:
             continue
             continue
         if len(longest_needle) < len(needle):
         if len(longest_needle) < len(needle):
             longest_needle = needle
             longest_needle = needle
-            updated_url = replacement + orig_url[len(needle):]
+            updated_url = replacement + orig_url[len(needle) :]
     return updated_url
     return updated_url

+ 4 - 2
dulwich/contrib/diffstat.py

@@ -39,7 +39,7 @@ from typing import List, Optional, Tuple
 # only needs to detect git style diffs as this is for
 # only needs to detect git style diffs as this is for
 # use with dulwich
 # use with dulwich
 
 
-_git_header_name = re.compile(br"diff --git a/(.*) b/(.*)")
+_git_header_name = re.compile(rb"diff --git a/(.*) b/(.*)")
 
 
 _GIT_HEADER_START = b"diff --git a/"
 _GIT_HEADER_START = b"diff --git a/"
 _GIT_BINARY_START = b"Binary file"
 _GIT_BINARY_START = b"Binary file"
@@ -55,7 +55,9 @@ _GIT_UNCHANGED_START = b" "
 # properly interface with diffstat routine
 # properly interface with diffstat routine
 
 
 
 
-def _parse_patch(lines: List[bytes]) -> Tuple[List[bytes], List[bool], List[Tuple[int, int]]]:
+def _parse_patch(
+    lines: List[bytes]
+) -> Tuple[List[bytes], List[bool], List[Tuple[int, int]]]:
     """Parse a git style diff or patch to generate diff stats.
     """Parse a git style diff or patch to generate diff stats.
 
 
     Args:
     Args:

+ 2 - 3
dulwich/contrib/paramiko_vendor.py

@@ -44,7 +44,7 @@ class _ParamikoWrapper:
 
 
     @property
     @property
     def stderr(self):
     def stderr(self):
-        return self.channel.makefile_stderr('rb')
+        return self.channel.makefile_stderr("rb")
 
 
     def can_read(self):
     def can_read(self):
         return self.channel.recv_ready()
         return self.channel.recv_ready()
@@ -85,9 +85,8 @@ class ParamikoSSHVendor:
         password=None,
         password=None,
         pkey=None,
         pkey=None,
         key_filename=None,
         key_filename=None,
-        **kwargs
+        **kwargs,
     ):
     ):
-
         client = paramiko.SSHClient()
         client = paramiko.SSHClient()
 
 
         connection_kwargs = {"hostname": host}
         connection_kwargs = {"hostname": host}

+ 3 - 13
dulwich/contrib/requests_vendor.py

@@ -44,13 +44,7 @@ from ..errors import GitProtocolError, NotGitRepository
 
 
 class RequestsHttpGitClient(AbstractHttpGitClient):
 class RequestsHttpGitClient(AbstractHttpGitClient):
     def __init__(
     def __init__(
-            self,
-            base_url,
-            dumb=None,
-            config=None,
-            username=None,
-            password=None,
-            **kwargs
+        self, base_url, dumb=None, config=None, username=None, password=None, **kwargs
     ) -> None:
     ) -> None:
         self._username = username
         self._username = username
         self._password = password
         self._password = password
@@ -60,8 +54,7 @@ class RequestsHttpGitClient(AbstractHttpGitClient):
         if username is not None:
         if username is not None:
             self.session.auth = (username, password)
             self.session.auth = (username, password)
 
 
-        super().__init__(
-            base_url=base_url, dumb=dumb, **kwargs)
+        super().__init__(base_url=base_url, dumb=dumb, **kwargs)
 
 
     def _http_request(self, url, headers=None, data=None, allow_compression=False):
     def _http_request(self, url, headers=None, data=None, allow_compression=False):
         req_headers = self.session.headers.copy()
         req_headers = self.session.headers.copy()
@@ -143,8 +136,5 @@ def get_session(config):
         session.verify = ssl_verify
         session.verify = ssl_verify
 
 
     if proxy_server:
     if proxy_server:
-        session.proxies.update({
-            "http": proxy_server,
-            "https": proxy_server
-        })
+        session.proxies.update({"http": proxy_server, "https": proxy_server})
     return session
     return session

+ 4 - 3
dulwich/contrib/swift.py

@@ -134,8 +134,7 @@ def load_conf(path=None, file=None):
         try:
         try:
             confpath = os.environ["DULWICH_SWIFT_CFG"]
             confpath = os.environ["DULWICH_SWIFT_CFG"]
         except KeyError as exc:
         except KeyError as exc:
-            raise Exception(
-                "You need to specify a configuration file") from exc
+            raise Exception("You need to specify a configuration file") from exc
     else:
     else:
         confpath = path
         confpath = path
     if not os.path.isfile(confpath):
     if not os.path.isfile(confpath):
@@ -1014,7 +1013,9 @@ def main(argv=sys.argv):
     }
     }
 
 
     if len(sys.argv) < 2:
     if len(sys.argv) < 2:
-        print("Usage: {} <{}> [OPTIONS...]".format(sys.argv[0], "|".join(commands.keys())))
+        print(
+            "Usage: {} <{}> [OPTIONS...]".format(sys.argv[0], "|".join(commands.keys()))
+        )
         sys.exit(1)
         sys.exit(1)
 
 
     cmd = sys.argv[1]
     cmd = sys.argv[1]

+ 40 - 19
dulwich/contrib/test_paramiko_vendor.py

@@ -36,6 +36,7 @@ else:
 
 
     class Server(paramiko.ServerInterface):
     class Server(paramiko.ServerInterface):
         """http://docs.paramiko.org/en/2.4/api/server.html."""
         """http://docs.paramiko.org/en/2.4/api/server.html."""
+
         def __init__(self, commands, *args, **kwargs) -> None:
         def __init__(self, commands, *args, **kwargs) -> None:
             super().__init__(*args, **kwargs)
             super().__init__(*args, **kwargs)
             self.commands = commands
             self.commands = commands
@@ -64,8 +65,8 @@ else:
             return "password,publickey"
             return "password,publickey"
 
 
 
 
-USER = 'testuser'
-PASSWORD = 'test'
+USER = "testuser"
+PASSWORD = "test"
 SERVER_KEY = """\
 SERVER_KEY = """\
 -----BEGIN RSA PRIVATE KEY-----
 -----BEGIN RSA PRIVATE KEY-----
 MIIEpAIBAAKCAQEAy/L1sSYAzxsMprtNXW4u/1jGXXkQmQ2xtmKVlR+RlIL3a1BH
 MIIEpAIBAAKCAQEAy/L1sSYAzxsMprtNXW4u/1jGXXkQmQ2xtmKVlR+RlIL3a1BH
@@ -126,7 +127,6 @@ WxtWBWHwxfSmqgTXilEA3ALJp0kNolLnEttnhENwJpZHlqtes0ZA4w==
 
 
 @skipIf(not has_paramiko, "paramiko is not installed")
 @skipIf(not has_paramiko, "paramiko is not installed")
 class ParamikoSSHVendorTests(TestCase):
 class ParamikoSSHVendorTests(TestCase):
-
     def setUp(self):
     def setUp(self):
         import paramiko.transport
         import paramiko.transport
 
 
@@ -138,7 +138,7 @@ class ParamikoSSHVendorTests(TestCase):
         socket.setdefaulttimeout(10)
         socket.setdefaulttimeout(10)
         self.addCleanup(socket.setdefaulttimeout, None)
         self.addCleanup(socket.setdefaulttimeout, None)
         self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        self.socket.bind(('127.0.0.1', 0))
+        self.socket.bind(("127.0.0.1", 0))
         self.socket.listen(5)
         self.socket.listen(5)
         self.addCleanup(self.socket.close)
         self.addCleanup(self.socket.close)
         self.port = self.socket.getsockname()[1]
         self.port = self.socket.getsockname()[1]
@@ -161,40 +161,61 @@ class ParamikoSSHVendorTests(TestCase):
         self.transport.start_server(server=server)
         self.transport.start_server(server=server)
 
 
     def test_run_command_password(self):
     def test_run_command_password(self):
-        vendor = ParamikoSSHVendor(allow_agent=False, look_for_keys=False,)
+        vendor = ParamikoSSHVendor(
+            allow_agent=False,
+            look_for_keys=False,
+        )
         vendor.run_command(
         vendor.run_command(
-            '127.0.0.1', 'test_run_command_password',
-            username=USER, port=self.port, password=PASSWORD)
+            "127.0.0.1",
+            "test_run_command_password",
+            username=USER,
+            port=self.port,
+            password=PASSWORD,
+        )
 
 
-        self.assertIn(b'test_run_command_password', self.commands)
+        self.assertIn(b"test_run_command_password", self.commands)
 
 
     def test_run_command_with_privkey(self):
     def test_run_command_with_privkey(self):
         key = paramiko.RSAKey.from_private_key(StringIO(CLIENT_KEY))
         key = paramiko.RSAKey.from_private_key(StringIO(CLIENT_KEY))
 
 
-        vendor = ParamikoSSHVendor(allow_agent=False, look_for_keys=False,)
+        vendor = ParamikoSSHVendor(
+            allow_agent=False,
+            look_for_keys=False,
+        )
         vendor.run_command(
         vendor.run_command(
-            '127.0.0.1', 'test_run_command_with_privkey',
-            username=USER, port=self.port, pkey=key)
+            "127.0.0.1",
+            "test_run_command_with_privkey",
+            username=USER,
+            port=self.port,
+            pkey=key,
+        )
 
 
-        self.assertIn(b'test_run_command_with_privkey', self.commands)
+        self.assertIn(b"test_run_command_with_privkey", self.commands)
 
 
     def test_run_command_data_transfer(self):
     def test_run_command_data_transfer(self):
-        vendor = ParamikoSSHVendor(allow_agent=False, look_for_keys=False,)
+        vendor = ParamikoSSHVendor(
+            allow_agent=False,
+            look_for_keys=False,
+        )
         con = vendor.run_command(
         con = vendor.run_command(
-            '127.0.0.1', 'test_run_command_data_transfer',
-            username=USER, port=self.port, password=PASSWORD)
+            "127.0.0.1",
+            "test_run_command_data_transfer",
+            username=USER,
+            port=self.port,
+            password=PASSWORD,
+        )
 
 
-        self.assertIn(b'test_run_command_data_transfer', self.commands)
+        self.assertIn(b"test_run_command_data_transfer", self.commands)
 
 
         channel = self.transport.accept(5)
         channel = self.transport.accept(5)
-        channel.send(b'stdout\n')
-        channel.send_stderr(b'stderr\n')
+        channel.send(b"stdout\n")
+        channel.send_stderr(b"stderr\n")
         channel.close()
         channel.close()
 
 
         # Fixme: it's return false
         # Fixme: it's return false
         # self.assertTrue(con.can_read())
         # self.assertTrue(con.can_read())
 
 
-        self.assertEqual(b'stdout\n', con.read(4096))
+        self.assertEqual(b"stdout\n", con.read(4096))
 
 
         # Fixme: it's return empty string
         # Fixme: it's return empty string
         # self.assertEqual(b'stderr\n', con.read_stderr(4096))
         # self.assertEqual(b'stderr\n', con.read_stderr(4096))

+ 1 - 3
dulwich/contrib/test_swift.py

@@ -396,9 +396,7 @@ class TestSwiftConnector(TestCase):
     def test_get_container_objects(self):
     def test_get_container_objects(self):
         with patch(
         with patch(
             "geventhttpclient.HTTPClient.request",
             "geventhttpclient.HTTPClient.request",
-            lambda *args: Response(
-                content=json.dumps(({"name": "a"}, {"name": "b"}))
-            ),
+            lambda *args: Response(content=json.dumps(({"name": "a"}, {"name": "b"}))),
         ):
         ):
             self.assertEqual(len(self.conn.get_container_objects()), 2)
             self.assertEqual(len(self.conn.get_container_objects()), 2)
 
 

+ 4 - 2
dulwich/diff_tree.py

@@ -264,7 +264,9 @@ def tree_changes_for_merge(store, parent_tree_ids, tree_id, rename_detector=None
         for t in parent_tree_ids
         for t in parent_tree_ids
     ]
     ]
     num_parents = len(parent_tree_ids)
     num_parents = len(parent_tree_ids)
-    changes_by_path: Dict[str, List[Optional[TreeChange]]] = defaultdict(lambda: [None] * num_parents)
+    changes_by_path: Dict[str, List[Optional[TreeChange]]] = defaultdict(
+        lambda: [None] * num_parents
+    )
 
 
     # Organize by path.
     # Organize by path.
     for i, parent_changes in enumerate(all_parent_changes):
     for i, parent_changes in enumerate(all_parent_changes):
@@ -517,7 +519,7 @@ class RenameDetector:
         self._prune(add_paths, delete_paths)
         self._prune(add_paths, delete_paths)
 
 
     def _should_find_content_renames(self):
     def _should_find_content_renames(self):
-        return len(self._adds) * len(self._deletes) <= self._max_files ** 2
+        return len(self._adds) * len(self._deletes) <= self._max_files**2
 
 
     def _rename_type(self, check_paths, delete, add):
     def _rename_type(self, check_paths, delete, add):
         if check_paths and delete.old.path == add.new.path:
         if check_paths and delete.old.path == add.new.path:

+ 2 - 6
dulwich/errors.py

@@ -145,9 +145,7 @@ class HangupException(GitProtocolError):
                 )
                 )
             )
             )
         else:
         else:
-            super().__init__(
-                "The remote server unexpectedly closed the connection."
-            )
+            super().__init__("The remote server unexpectedly closed the connection.")
         self.stderr_lines = stderr_lines
         self.stderr_lines = stderr_lines
 
 
     def __eq__(self, other):
     def __eq__(self, other):
@@ -162,9 +160,7 @@ class UnexpectedCommandError(GitProtocolError):
             command = "flush-pkt"
             command = "flush-pkt"
         else:
         else:
             command = "command %s" % command
             command = "command %s" % command
-        super().__init__(
-            "Protocol got unexpected %s" % command
-        )
+        super().__init__("Protocol got unexpected %s" % command)
 
 
 
 
 class FileFormatException(Exception):
 class FileFormatException(Exception):

+ 2 - 2
dulwich/file.py

@@ -205,8 +205,8 @@ class _GitFile:
             self.abort()
             self.abort()
 
 
     def __del__(self) -> None:
     def __del__(self) -> None:
-        if not getattr(self, '_closed', True):
-            warnings.warn('unclosed %r' % self, ResourceWarning, stacklevel=2)
+        if not getattr(self, "_closed", True):
+            warnings.warn("unclosed %r" % self, ResourceWarning, stacklevel=2)
             self.abort()
             self.abort()
 
 
     def __enter__(self):
     def __enter__(self):

+ 2 - 2
dulwich/graph.py

@@ -44,7 +44,7 @@ class WorkList:
         return None
         return None
 
 
     def iter(self):
     def iter(self):
-        for (pr, cmt) in self.pq:
+        for pr, cmt in self.pq:
             yield (-pr, cmt)
             yield (-pr, cmt)
 
 
 
 
@@ -96,7 +96,7 @@ def _find_lcas(lookup_parents, c1, c2s, lookup_stamp, min_stamp=0):
                 pflags = cstates.get(pcmt, 0)
                 pflags = cstates.get(pcmt, 0)
                 # if this parent was already visited with no new ancestry/flag information
                 # if this parent was already visited with no new ancestry/flag information
                 # do not add it to the working list again
                 # do not add it to the working list again
-                if ((pflags & cflags) == cflags):
+                if (pflags & cflags) == cflags:
                     continue
                     continue
                 pdt = lookup_stamp(pcmt)
                 pdt = lookup_stamp(pcmt)
                 if pdt < min_stamp:
                 if pdt < min_stamp:

+ 12 - 4
dulwich/greenthreads.py

@@ -89,9 +89,15 @@ class GreenThreadsMissingObjectFinder(MissingObjectFinder):
         self.object_store = object_store
         self.object_store = object_store
         p = pool.Pool(size=concurrency)
         p = pool.Pool(size=concurrency)
 
 
-        have_commits, have_tags = _split_commits_and_tags(object_store, haves, ignore_unknown=True, pool=p)
-        want_commits, want_tags = _split_commits_and_tags(object_store, wants, ignore_unknown=False, pool=p)
-        all_ancestors: FrozenSet[ObjectID] = frozenset(_collect_ancestors(object_store, have_commits)[0])
+        have_commits, have_tags = _split_commits_and_tags(
+            object_store, haves, ignore_unknown=True, pool=p
+        )
+        want_commits, want_tags = _split_commits_and_tags(
+            object_store, wants, ignore_unknown=False, pool=p
+        )
+        all_ancestors: FrozenSet[ObjectID] = frozenset(
+            _collect_ancestors(object_store, have_commits)[0]
+        )
         missing_commits, common_commits = _collect_ancestors(
         missing_commits, common_commits = _collect_ancestors(
             object_store, want_commits, all_ancestors
             object_store, want_commits, all_ancestors
         )
         )
@@ -103,7 +109,9 @@ class GreenThreadsMissingObjectFinder(MissingObjectFinder):
             self.sha_done.add(t)
             self.sha_done.add(t)
         missing_tags = want_tags.difference(have_tags)
         missing_tags = want_tags.difference(have_tags)
         wants = missing_commits.union(missing_tags)
         wants = missing_commits.union(missing_tags)
-        self.objects_to_send: Set[Tuple[ObjectID, Optional[bytes], Optional[int], bool]] = {(w, None, 0, False) for w in wants}
+        self.objects_to_send: Set[
+            Tuple[ObjectID, Optional[bytes], Optional[int], bool]
+        ] = {(w, None, 0, False) for w in wants}
         if progress is None:
         if progress is None:
             self.progress = lambda x: None
             self.progress = lambda x: None
         else:
         else:

+ 3 - 3
dulwich/hooks.py

@@ -98,8 +98,8 @@ class ShellHook(Hook):
 
 
         try:
         try:
             ret = subprocess.call(
             ret = subprocess.call(
-                [os.path.relpath(self.filepath, self.cwd)] + list(args),
-                cwd=self.cwd)
+                [os.path.relpath(self.filepath, self.cwd)] + list(args), cwd=self.cwd
+            )
             if ret != 0:
             if ret != 0:
                 if self.post_exec_callback is not None:
                 if self.post_exec_callback is not None:
                     self.post_exec_callback(0, *args)
                     self.post_exec_callback(0, *args)
@@ -193,7 +193,7 @@ class PostReceiveShellHook(ShellHook):
             if (p.returncode != 0) or err_data:
             if (p.returncode != 0) or err_data:
                 err_fmt = b"post-receive exit code: %d\n" + b"stdout:\n%s\nstderr:\n%s"
                 err_fmt = b"post-receive exit code: %d\n" + b"stdout:\n%s\nstderr:\n%s"
                 err_msg = err_fmt % (p.returncode, out_data, err_data)
                 err_msg = err_fmt % (p.returncode, out_data, err_data)
-                raise HookError(err_msg.decode('utf-8', 'backslashreplace'))
+                raise HookError(err_msg.decode("utf-8", "backslashreplace"))
             return out_data
             return out_data
         except OSError as err:
         except OSError as err:
             raise HookError(repr(err)) from err
             raise HookError(repr(err)) from err

+ 3 - 1
dulwich/ignore.py

@@ -190,7 +190,9 @@ class Pattern:
 
 
 
 
 class IgnoreFilter:
 class IgnoreFilter:
-    def __init__(self, patterns: Iterable[bytes], ignorecase: bool = False, path=None) -> None:
+    def __init__(
+        self, patterns: Iterable[bytes], ignorecase: bool = False, path=None
+    ) -> None:
         self._patterns: List[Pattern] = []
         self._patterns: List[Pattern] = []
         self._ignorecase = ignorecase
         self._ignorecase = ignorecase
         self._path = path
         self._path = path

+ 64 - 42
dulwich/index.py

@@ -148,9 +148,12 @@ class ConflictedIndexEntry:
     this: Optional[IndexEntry]
     this: Optional[IndexEntry]
     other: Optional[IndexEntry]
     other: Optional[IndexEntry]
 
 
-    def __init__(self, ancestor: Optional[IndexEntry] = None,
-                 this: Optional[IndexEntry] = None,
-                 other: Optional[IndexEntry] = None) -> None:
+    def __init__(
+        self,
+        ancestor: Optional[IndexEntry] = None,
+        this: Optional[IndexEntry] = None,
+        other: Optional[IndexEntry] = None,
+    ) -> None:
         self.ancestor = ancestor
         self.ancestor = ancestor
         self.this = this
         self.this = this
         self.other = other
         self.other = other
@@ -231,9 +234,8 @@ def read_cache_entry(f, version: int) -> SerializedIndexEntry:
     ) = struct.unpack(">LLLLLL20sH", f.read(20 + 4 * 6 + 2))
     ) = struct.unpack(">LLLLLL20sH", f.read(20 + 4 * 6 + 2))
     if flags & FLAG_EXTENDED:
     if flags & FLAG_EXTENDED:
         if version < 3:
         if version < 3:
-            raise AssertionError(
-                'extended flag set in index with version < 3')
-        (extended_flags, ) = struct.unpack(">H", f.read(2))
+            raise AssertionError("extended flag set in index with version < 3")
+        (extended_flags,) = struct.unpack(">H", f.read(2))
     else:
     else:
         extended_flags = 0
         extended_flags = 0
     name = f.read(flags & FLAG_NAMEMASK)
     name = f.read(flags & FLAG_NAMEMASK)
@@ -271,7 +273,7 @@ def write_cache_entry(f, entry: SerializedIndexEntry, version: int) -> None:
     if entry.extended_flags:
     if entry.extended_flags:
         flags |= FLAG_EXTENDED
         flags |= FLAG_EXTENDED
     if flags & FLAG_EXTENDED and version is not None and version < 3:
     if flags & FLAG_EXTENDED and version is not None and version < 3:
-        raise AssertionError('unable to use extended flags in version < 3')
+        raise AssertionError("unable to use extended flags in version < 3")
     f.write(
     f.write(
         struct.pack(
         struct.pack(
             b">LLLLLL20sH",
             b">LLLLLL20sH",
@@ -337,7 +339,9 @@ def read_index_dict(f) -> Dict[bytes, Union[IndexEntry, ConflictedIndexEntry]]:
     return ret
     return ret
 
 
 
 
-def write_index(f: BinaryIO, entries: List[SerializedIndexEntry], version: Optional[int] = None):
+def write_index(
+    f: BinaryIO, entries: List[SerializedIndexEntry], version: Optional[int] = None
+):
     """Write an index file.
     """Write an index file.
 
 
     Args:
     Args:
@@ -366,11 +370,17 @@ def write_index_dict(
         value = entries[key]
         value = entries[key]
         if isinstance(value, ConflictedIndexEntry):
         if isinstance(value, ConflictedIndexEntry):
             if value.ancestor is not None:
             if value.ancestor is not None:
-                entries_list.append(value.ancestor.serialize(key, Stage.MERGE_CONFLICT_ANCESTOR))
+                entries_list.append(
+                    value.ancestor.serialize(key, Stage.MERGE_CONFLICT_ANCESTOR)
+                )
             if value.this is not None:
             if value.this is not None:
-                entries_list.append(value.this.serialize(key, Stage.MERGE_CONFLICT_THIS))
+                entries_list.append(
+                    value.this.serialize(key, Stage.MERGE_CONFLICT_THIS)
+                )
             if value.other is not None:
             if value.other is not None:
-                entries_list.append(value.other.serialize(key, Stage.MERGE_CONFLICT_OTHER))
+                entries_list.append(
+                    value.other.serialize(key, Stage.MERGE_CONFLICT_OTHER)
+                )
         else:
         else:
             entries_list.append(value.serialize(key, Stage.NORMAL))
             entries_list.append(value.serialize(key, Stage.NORMAL))
     write_index(f, entries_list, version=version)
     write_index(f, entries_list, version=version)
@@ -499,14 +509,18 @@ class Index:
         """Remove all contents from this index."""
         """Remove all contents from this index."""
         self._byname = {}
         self._byname = {}
 
 
-    def __setitem__(self, name: bytes, value: Union[IndexEntry, ConflictedIndexEntry]) -> None:
+    def __setitem__(
+        self, name: bytes, value: Union[IndexEntry, ConflictedIndexEntry]
+    ) -> None:
         assert isinstance(name, bytes)
         assert isinstance(name, bytes)
         self._byname[name] = value
         self._byname[name] = value
 
 
     def __delitem__(self, name: bytes) -> None:
     def __delitem__(self, name: bytes) -> None:
         del self._byname[name]
         del self._byname[name]
 
 
-    def iteritems(self) -> Iterator[Tuple[bytes, Union[IndexEntry, ConflictedIndexEntry]]]:
+    def iteritems(
+        self
+    ) -> Iterator[Tuple[bytes, Union[IndexEntry, ConflictedIndexEntry]]]:
         return iter(self._byname.items())
         return iter(self._byname.items())
 
 
     def items(self) -> Iterator[Tuple[bytes, Union[IndexEntry, ConflictedIndexEntry]]]:
     def items(self) -> Iterator[Tuple[bytes, Union[IndexEntry, ConflictedIndexEntry]]]:
@@ -520,7 +534,8 @@ class Index:
         yield from self._byname.keys()
         yield from self._byname.keys()
 
 
     def changes_from_tree(
     def changes_from_tree(
-            self, object_store, tree: ObjectID, want_unchanged: bool = False):
+        self, object_store, tree: ObjectID, want_unchanged: bool = False
+    ):
         """Find the differences between the contents of this index and a tree.
         """Find the differences between the contents of this index and a tree.
 
 
         Args:
         Args:
@@ -639,7 +654,7 @@ def changes_from_tree(
     other_names = set(names)
     other_names = set(names)
 
 
     if tree is not None:
     if tree is not None:
-        for (name, mode, sha) in iter_tree_contents(object_store, tree):
+        for name, mode, sha in iter_tree_contents(object_store, tree):
             try:
             try:
                 (other_sha, other_mode) = lookup_entry(name)
                 (other_sha, other_mode) = lookup_entry(name)
             except KeyError:
             except KeyError:
@@ -661,7 +676,9 @@ def changes_from_tree(
 
 
 
 
 def index_entry_from_stat(
 def index_entry_from_stat(
-    stat_val, hex_sha: bytes, mode: Optional[int] = None,
+    stat_val,
+    hex_sha: bytes,
+    mode: Optional[int] = None,
 ):
 ):
     """Create a new index entry from a stat value.
     """Create a new index entry from a stat value.
 
 
@@ -685,7 +702,7 @@ def index_entry_from_stat(
     )
     )
 
 
 
 
-if sys.platform == 'win32':
+if sys.platform == "win32":
     # On Windows, creating symlinks either requires administrator privileges
     # On Windows, creating symlinks either requires administrator privileges
     # or developer mode. Raise a more helpful error when we're unable to
     # or developer mode. Raise a more helpful error when we're unable to
     # create symlinks
     # create symlinks
@@ -693,28 +710,33 @@ if sys.platform == 'win32':
     # https://github.com/jelmer/dulwich/issues/1005
     # https://github.com/jelmer/dulwich/issues/1005
 
 
     class WindowsSymlinkPermissionError(PermissionError):
     class WindowsSymlinkPermissionError(PermissionError):
-
         def __init__(self, errno, msg, filename) -> None:
         def __init__(self, errno, msg, filename) -> None:
             super(PermissionError, self).__init__(
             super(PermissionError, self).__init__(
-                errno, "Unable to create symlink; "
+                errno,
+                "Unable to create symlink; "
                 "do you have developer mode enabled? %s" % msg,
                 "do you have developer mode enabled? %s" % msg,
-                filename)
+                filename,
+            )
 
 
     def symlink(src, dst, target_is_directory=False, *, dir_fd=None):
     def symlink(src, dst, target_is_directory=False, *, dir_fd=None):
         try:
         try:
             return os.symlink(
             return os.symlink(
-                src, dst, target_is_directory=target_is_directory,
-                dir_fd=dir_fd)
+                src, dst, target_is_directory=target_is_directory, dir_fd=dir_fd
+            )
         except PermissionError as e:
         except PermissionError as e:
-            raise WindowsSymlinkPermissionError(
-                e.errno, e.strerror, e.filename) from e
+            raise WindowsSymlinkPermissionError(e.errno, e.strerror, e.filename) from e
 else:
 else:
     symlink = os.symlink
     symlink = os.symlink
 
 
 
 
 def build_file_from_blob(
 def build_file_from_blob(
-        blob: Blob, mode: int, target_path: bytes, *, honor_filemode=True,
-        tree_encoding="utf-8", symlink_fn=None
+    blob: Blob,
+    mode: int,
+    target_path: bytes,
+    *,
+    honor_filemode=True,
+    tree_encoding="utf-8",
+    symlink_fn=None,
 ):
 ):
     """Build a file or symlink on disk based on a Git object.
     """Build a file or symlink on disk based on a Git object.
 
 
@@ -772,8 +794,7 @@ def validate_path_element_ntfs(element: bytes) -> bool:
     return True
     return True
 
 
 
 
-def validate_path(path: bytes,
-                  element_validator=validate_path_element_default) -> bool:
+def validate_path(path: bytes, element_validator=validate_path_element_default) -> bool:
     """Default path validator that just checks for .git/."""
     """Default path validator that just checks for .git/."""
     parts = path.split(b"/")
     parts = path.split(b"/")
     for p in parts:
     for p in parts:
@@ -790,7 +811,7 @@ def build_index_from_tree(
     tree_id: bytes,
     tree_id: bytes,
     honor_filemode: bool = True,
     honor_filemode: bool = True,
     validate_path_element=validate_path_element_default,
     validate_path_element=validate_path_element_default,
-    symlink_fn=None
+    symlink_fn=None,
 ):
 ):
     """Generate and materialize index from a tree.
     """Generate and materialize index from a tree.
 
 
@@ -829,7 +850,9 @@ def build_index_from_tree(
             obj = object_store[entry.sha]
             obj = object_store[entry.sha]
             assert isinstance(obj, Blob)
             assert isinstance(obj, Blob)
             st = build_file_from_blob(
             st = build_file_from_blob(
-                obj, entry.mode, full_path,
+                obj,
+                entry.mode,
+                full_path,
                 honor_filemode=honor_filemode,
                 honor_filemode=honor_filemode,
                 symlink_fn=symlink_fn,
                 symlink_fn=symlink_fn,
             )
             )
@@ -859,8 +882,7 @@ def build_index_from_tree(
     index.write()
     index.write()
 
 
 
 
-def blob_from_path_and_mode(fs_path: bytes, mode: int,
-                            tree_encoding="utf-8"):
+def blob_from_path_and_mode(fs_path: bytes, mode: int, tree_encoding="utf-8"):
     """Create a blob from a path and a stat object.
     """Create a blob from a path and a stat object.
 
 
     Args:
     Args:
@@ -943,8 +965,8 @@ def _has_directory_changed(tree_path: bytes, entry):
 
 
 
 
 def get_unstaged_changes(
 def get_unstaged_changes(
-        index: Index, root_path: Union[str, bytes],
-        filter_blob_callback=None):
+    index: Index, root_path: Union[str, bytes], filter_blob_callback=None
+):
     """Walk through an index and check for differences against working tree.
     """Walk through an index and check for differences against working tree.
 
 
     Args:
     Args:
@@ -1035,7 +1057,7 @@ def index_entry_from_directory(st, path: bytes) -> Optional[IndexEntry]:
 
 
 
 
 def index_entry_from_path(
 def index_entry_from_path(
-        path: bytes, object_store: Optional[ObjectContainer] = None
+    path: bytes, object_store: Optional[ObjectContainer] = None
 ) -> Optional[IndexEntry]:
 ) -> Optional[IndexEntry]:
     """Create an index from a filesystem path.
     """Create an index from a filesystem path.
 
 
@@ -1064,8 +1086,9 @@ def index_entry_from_path(
 
 
 
 
 def iter_fresh_entries(
 def iter_fresh_entries(
-    paths: Iterable[bytes], root_path: bytes,
-    object_store: Optional[ObjectContainer] = None
+    paths: Iterable[bytes],
+    root_path: bytes,
+    object_store: Optional[ObjectContainer] = None,
 ) -> Iterator[Tuple[bytes, Optional[IndexEntry]]]:
 ) -> Iterator[Tuple[bytes, Optional[IndexEntry]]]:
     """Iterate over current versions of index entries on disk.
     """Iterate over current versions of index entries on disk.
 
 
@@ -1085,9 +1108,8 @@ def iter_fresh_entries(
 
 
 
 
 def iter_fresh_objects(
 def iter_fresh_objects(
-        paths: Iterable[bytes], root_path: bytes, include_deleted=False,
-        object_store=None) -> Iterator[
-            Tuple[bytes, Optional[bytes], Optional[int]]]:
+    paths: Iterable[bytes], root_path: bytes, include_deleted=False, object_store=None
+) -> Iterator[Tuple[bytes, Optional[bytes], Optional[int]]]:
     """Iterate over versions of objects on disk referenced by index.
     """Iterate over versions of objects on disk referenced by index.
 
 
     Args:
     Args:
@@ -1097,8 +1119,7 @@ def iter_fresh_objects(
       object_store: Optional object store to report new items to
       object_store: Optional object store to report new items to
     Returns: Iterator over path, sha, mode
     Returns: Iterator over path, sha, mode
     """
     """
-    for path, entry in iter_fresh_entries(
-            paths, root_path, object_store=object_store):
+    for path, entry in iter_fresh_entries(paths, root_path, object_store=object_store):
         if entry is None:
         if entry is None:
             if include_deleted:
             if include_deleted:
                 yield path, None, None
                 yield path, None, None
@@ -1125,6 +1146,7 @@ class locked_index:
 
 
     Works as a context manager.
     Works as a context manager.
     """
     """
+
     def __init__(self, path: Union[bytes, str]) -> None:
     def __init__(self, path: Union[bytes, str]) -> None:
         self._path = path
         self._path = path
 
 

+ 1 - 2
dulwich/line_ending.py

@@ -288,8 +288,7 @@ class TreeBlobNormalizer(BlobNormalizer):
         super().__init__(config_stack, git_attributes)
         super().__init__(config_stack, git_attributes)
         if tree:
         if tree:
             self.existing_paths = {
             self.existing_paths = {
-                name
-                for name, _, _ in iter_tree_contents(object_store, tree)
+                name for name, _, _ in iter_tree_contents(object_store, tree)
             }
             }
         else:
         else:
             self.existing_paths = set()
             self.existing_paths = set()

+ 18 - 8
dulwich/lru_cache.py

@@ -26,8 +26,8 @@ from typing import Callable, Dict, Generic, Iterable, Iterator, Optional, TypeVa
 _null_key = object()
 _null_key = object()
 
 
 
 
-K = TypeVar('K')
-V = TypeVar('V')
+K = TypeVar("K")
+V = TypeVar("V")
 
 
 
 
 class _LRUNode(Generic[K, V]):
 class _LRUNode(Generic[K, V]):
@@ -76,7 +76,9 @@ class LRUCache(Generic[K, V]):
     _least_recently_used: Optional[_LRUNode[K, V]]
     _least_recently_used: Optional[_LRUNode[K, V]]
     _most_recently_used: Optional[_LRUNode[K, V]]
     _most_recently_used: Optional[_LRUNode[K, V]]
 
 
-    def __init__(self, max_cache: int = 100, after_cleanup_count: Optional[int] = None) -> None:
+    def __init__(
+        self, max_cache: int = 100, after_cleanup_count: Optional[int] = None
+    ) -> None:
         self._cache: Dict[K, _LRUNode[K, V]] = {}
         self._cache: Dict[K, _LRUNode[K, V]] = {}
         # The "HEAD" of the lru linked list
         # The "HEAD" of the lru linked list
         self._most_recently_used = None
         self._most_recently_used = None
@@ -160,7 +162,9 @@ class LRUCache(Generic[K, V]):
             yield node
             yield node
             node = node_next
             node = node_next
 
 
-    def add(self, key: K, value: V, cleanup: Optional[Callable[[K, V], None]] = None) -> None:
+    def add(
+        self, key: K, value: V, cleanup: Optional[Callable[[K, V], None]] = None
+    ) -> None:
         """Add a new value to the cache.
         """Add a new value to the cache.
 
 
         Also, if the entry is ever removed from the cache, call
         Also, if the entry is ever removed from the cache, call
@@ -313,8 +317,10 @@ class LRUSizeCache(LRUCache[K, V]):
     _compute_size: Callable[[V], int]
     _compute_size: Callable[[V], int]
 
 
     def __init__(
     def __init__(
-            self, max_size: int = 1024 * 1024, after_cleanup_size: Optional[int] = None,
-            compute_size: Optional[Callable[[V], int]] = None
+        self,
+        max_size: int = 1024 * 1024,
+        after_cleanup_size: Optional[int] = None,
+        compute_size: Optional[Callable[[V], int]] = None,
     ) -> None:
     ) -> None:
         """Create a new LRUSizeCache.
         """Create a new LRUSizeCache.
 
 
@@ -338,7 +344,9 @@ class LRUSizeCache(LRUCache[K, V]):
         self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
         self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
         LRUCache.__init__(self, max_cache=max(int(max_size / 512), 1))
         LRUCache.__init__(self, max_cache=max(int(max_size / 512), 1))
 
 
-    def add(self, key: K, value: V, cleanup: Optional[Callable[[K, V], None]] = None) -> None:
+    def add(
+        self, key: K, value: V, cleanup: Optional[Callable[[K, V], None]] = None
+    ) -> None:
         """Add a new value to the cache.
         """Add a new value to the cache.
 
 
         Also, if the entry is ever removed from the cache, call
         Also, if the entry is ever removed from the cache, call
@@ -398,7 +406,9 @@ class LRUSizeCache(LRUCache[K, V]):
         max_cache = max(int(max_size / 512), 1)
         max_cache = max(int(max_size / 512), 1)
         self._update_max_cache(max_cache)
         self._update_max_cache(max_cache)
 
 
-    def _update_max_size(self, max_size: int, after_cleanup_size: Optional[int] = None) -> None:
+    def _update_max_size(
+        self, max_size: int, after_cleanup_size: Optional[int] = None
+    ) -> None:
         self._max_size = max_size
         self._max_size = max_size
         if after_cleanup_size is None:
         if after_cleanup_size is None:
             self._after_cleanup_size = self._max_size * 8 // 10
             self._after_cleanup_size = self._max_size * 8 // 10

+ 1 - 1
dulwich/mailmap.py

@@ -66,7 +66,7 @@ class Mailmap:
     def __init__(self, map=None) -> None:
     def __init__(self, map=None) -> None:
         self._table: Dict[Tuple[Optional[str], str], Tuple[str, str]] = {}
         self._table: Dict[Tuple[Optional[str], str], Tuple[str, str]] = {}
         if map:
         if map:
-            for (canonical_identity, from_identity) in map:
+            for canonical_identity, from_identity in map:
                 self.add_entry(canonical_identity, from_identity)
                 self.add_entry(canonical_identity, from_identity)
 
 
     def add_entry(self, canonical_identity, from_identity=None):
     def add_entry(self, canonical_identity, from_identity=None):

+ 147 - 74
dulwich/object_store.py

@@ -99,10 +99,7 @@ PACK_MODE = 0o444 if sys.platform != "win32" else 0o644
 
 
 
 
 class PackContainer(Protocol):
 class PackContainer(Protocol):
-
-    def add_pack(
-        self
-    ) -> Tuple[BytesIO, Callable[[], None], Callable[[], None]]:
+    def add_pack(self) -> Tuple[BytesIO, Callable[[], None], Callable[[], None]]:
         """Add a new pack."""
         """Add a new pack."""
 
 
 
 
@@ -110,9 +107,7 @@ class BaseObjectStore:
     """Object store interface."""
     """Object store interface."""
 
 
     def determine_wants_all(
     def determine_wants_all(
-        self,
-        refs: Dict[Ref, ObjectID],
-        depth: Optional[int] = None
+        self, refs: Dict[Ref, ObjectID], depth: Optional[int] = None
     ) -> List[ObjectID]:
     ) -> List[ObjectID]:
         def _want_deepen(sha):
         def _want_deepen(sha):
             if not depth:
             if not depth:
@@ -197,6 +192,7 @@ class BaseObjectStore:
             (oldpath, newpath), (oldmode, newmode), (oldsha, newsha)
             (oldpath, newpath), (oldmode, newmode), (oldsha, newsha)
         """
         """
         from .diff_tree import tree_changes
         from .diff_tree import tree_changes
+
         for change in tree_changes(
         for change in tree_changes(
             self,
             self,
             source,
             source,
@@ -225,10 +221,14 @@ class BaseObjectStore:
         """
         """
         warnings.warn(
         warnings.warn(
             "Please use dulwich.object_store.iter_tree_contents",
             "Please use dulwich.object_store.iter_tree_contents",
-            DeprecationWarning, stacklevel=2)
+            DeprecationWarning,
+            stacklevel=2,
+        )
         return iter_tree_contents(self, tree_id, include_trees=include_trees)
         return iter_tree_contents(self, tree_id, include_trees=include_trees)
 
 
-    def iterobjects_subset(self, shas: Iterable[bytes], *, allow_missing: bool = False) -> Iterator[ShaFile]:
+    def iterobjects_subset(
+        self, shas: Iterable[bytes], *, allow_missing: bool = False
+    ) -> Iterator[ShaFile]:
         for sha in shas:
         for sha in shas:
             try:
             try:
                 yield self[sha]
                 yield self[sha]
@@ -259,8 +259,7 @@ class BaseObjectStore:
             commit.
             commit.
         Returns: Iterator over (sha, path) pairs.
         Returns: Iterator over (sha, path) pairs.
         """
         """
-        warnings.warn(
-            'Please use MissingObjectFinder(store)', DeprecationWarning)
+        warnings.warn("Please use MissingObjectFinder(store)", DeprecationWarning)
         finder = MissingObjectFinder(
         finder = MissingObjectFinder(
             self,
             self,
             haves=haves,
             haves=haves,
@@ -289,8 +288,7 @@ class BaseObjectStore:
         return haves
         return haves
 
 
     def generate_pack_data(
     def generate_pack_data(
-        self, have, want, shallow=None, progress=None,
-        ofs_delta=True
+        self, have, want, shallow=None, progress=None, ofs_delta=True
     ) -> Tuple[int, Iterator[UnpackedObject]]:
     ) -> Tuple[int, Iterator[UnpackedObject]]:
         """Generate pack data objects for a set of wants/haves.
         """Generate pack data objects for a set of wants/haves.
 
 
@@ -304,11 +302,14 @@ class BaseObjectStore:
         # Note that the pack-specific implementation below is more efficient,
         # Note that the pack-specific implementation below is more efficient,
         # as it reuses deltas
         # as it reuses deltas
         missing_objects = MissingObjectFinder(
         missing_objects = MissingObjectFinder(
-            self, haves=have, wants=want, shallow=shallow, progress=progress)
+            self, haves=have, wants=want, shallow=shallow, progress=progress
+        )
         object_ids = list(missing_objects)
         object_ids = list(missing_objects)
         return pack_objects_to_data(
         return pack_objects_to_data(
-            [(self[oid], path) for oid, path in object_ids], ofs_delta=ofs_delta,
-            progress=progress)
+            [(self[oid], path) for oid, path in object_ids],
+            ofs_delta=ofs_delta,
+            progress=progress,
+        )
 
 
     def peel_sha(self, sha):
     def peel_sha(self, sha):
         """Peel all tags from a SHA.
         """Peel all tags from a SHA.
@@ -321,11 +322,16 @@ class BaseObjectStore:
         """
         """
         warnings.warn(
         warnings.warn(
             "Please use dulwich.object_store.peel_sha()",
             "Please use dulwich.object_store.peel_sha()",
-            DeprecationWarning, stacklevel=2)
+            DeprecationWarning,
+            stacklevel=2,
+        )
         return peel_sha(self, sha)[1]
         return peel_sha(self, sha)[1]
 
 
     def _get_depth(
     def _get_depth(
-        self, head, get_parents=lambda commit: commit.parents, max_depth=None,
+        self,
+        head,
+        get_parents=lambda commit: commit.parents,
+        max_depth=None,
     ):
     ):
         """Return the current available depth for the given head.
         """Return the current available depth for the given head.
         For commits with multiple parents, the largest possible depth will be
         For commits with multiple parents, the largest possible depth will be
@@ -348,9 +354,7 @@ class BaseObjectStore:
                 _cls, sha = cmt.object
                 _cls, sha = cmt.object
                 cmt = self[sha]
                 cmt = self[sha]
             queue.extend(
             queue.extend(
-                (parent, depth + 1)
-                for parent in get_parents(cmt)
-                if parent in self
+                (parent, depth + 1) for parent in get_parents(cmt) if parent in self
             )
             )
         return current_depth
         return current_depth
 
 
@@ -364,13 +368,13 @@ class PackBasedObjectStore(BaseObjectStore):
         self._pack_cache: Dict[str, Pack] = {}
         self._pack_cache: Dict[str, Pack] = {}
         self.pack_compression_level = pack_compression_level
         self.pack_compression_level = pack_compression_level
 
 
-    def add_pack(
-        self
-    ) -> Tuple[BytesIO, Callable[[], None], Callable[[], None]]:
+    def add_pack(self) -> Tuple[BytesIO, Callable[[], None], Callable[[], None]]:
         """Add a new pack to this object store."""
         """Add a new pack to this object store."""
         raise NotImplementedError(self.add_pack)
         raise NotImplementedError(self.add_pack)
 
 
-    def add_pack_data(self, count: int, unpacked_objects: Iterator[UnpackedObject], progress=None) -> None:
+    def add_pack_data(
+        self, count: int, unpacked_objects: Iterator[UnpackedObject], progress=None
+    ) -> None:
         """Add pack data to this object store.
         """Add pack data to this object store.
 
 
         Args:
         Args:
@@ -433,8 +437,7 @@ class PackBasedObjectStore(BaseObjectStore):
                 prev_pack.close()
                 prev_pack.close()
 
 
     def generate_pack_data(
     def generate_pack_data(
-        self, have, want, shallow=None, progress=None,
-        ofs_delta=True
+        self, have, want, shallow=None, progress=None, ofs_delta=True
     ) -> Tuple[int, Iterator[UnpackedObject]]:
     ) -> Tuple[int, Iterator[UnpackedObject]]:
         """Generate pack data objects for a set of wants/haves.
         """Generate pack data objects for a set of wants/haves.
 
 
@@ -446,7 +449,8 @@ class PackBasedObjectStore(BaseObjectStore):
           progress: Optional progress reporting method
           progress: Optional progress reporting method
         """
         """
         missing_objects = MissingObjectFinder(
         missing_objects = MissingObjectFinder(
-            self, haves=have, wants=want, shallow=shallow, progress=progress)
+            self, haves=have, wants=want, shallow=shallow, progress=progress
+        )
         remote_has = missing_objects.get_remote_has()
         remote_has = missing_objects.get_remote_has()
         object_ids = list(missing_objects)
         object_ids = list(missing_objects)
         return len(object_ids), generate_unpacked_objects(
         return len(object_ids), generate_unpacked_objects(
@@ -454,7 +458,8 @@ class PackBasedObjectStore(BaseObjectStore):
             object_ids,
             object_ids,
             progress=progress,
             progress=progress,
             ofs_delta=ofs_delta,
             ofs_delta=ofs_delta,
-            other_haves=remote_has)
+            other_haves=remote_has,
+        )
 
 
     def _clear_cached_packs(self):
     def _clear_cached_packs(self):
         pack_cache = self._pack_cache
         pack_cache = self._pack_cache
@@ -595,27 +600,51 @@ class PackBasedObjectStore(BaseObjectStore):
                 pass
                 pass
         raise KeyError(hexsha)
         raise KeyError(hexsha)
 
 
-    def iter_unpacked_subset(self, shas, *, include_comp=False, allow_missing: bool = False, convert_ofs_delta: bool = True) -> Iterator[ShaFile]:
+    def iter_unpacked_subset(
+        self,
+        shas,
+        *,
+        include_comp=False,
+        allow_missing: bool = False,
+        convert_ofs_delta: bool = True,
+    ) -> Iterator[ShaFile]:
         todo: Set[bytes] = set(shas)
         todo: Set[bytes] = set(shas)
         for p in self._iter_cached_packs():
         for p in self._iter_cached_packs():
-            for unpacked in p.iter_unpacked_subset(todo, include_comp=include_comp, allow_missing=True, convert_ofs_delta=convert_ofs_delta):
+            for unpacked in p.iter_unpacked_subset(
+                todo,
+                include_comp=include_comp,
+                allow_missing=True,
+                convert_ofs_delta=convert_ofs_delta,
+            ):
                 yield unpacked
                 yield unpacked
                 hexsha = sha_to_hex(unpacked.sha())
                 hexsha = sha_to_hex(unpacked.sha())
                 todo.remove(hexsha)
                 todo.remove(hexsha)
         # Maybe something else has added a pack with the object
         # Maybe something else has added a pack with the object
         # in the mean time?
         # in the mean time?
         for p in self._update_pack_cache():
         for p in self._update_pack_cache():
-            for unpacked in p.iter_unpacked_subset(todo, include_comp=include_comp, allow_missing=True, convert_ofs_delta=convert_ofs_delta):
+            for unpacked in p.iter_unpacked_subset(
+                todo,
+                include_comp=include_comp,
+                allow_missing=True,
+                convert_ofs_delta=convert_ofs_delta,
+            ):
                 yield unpacked
                 yield unpacked
                 hexsha = sha_to_hex(unpacked.sha())
                 hexsha = sha_to_hex(unpacked.sha())
                 todo.remove(hexsha)
                 todo.remove(hexsha)
         for alternate in self.alternates:
         for alternate in self.alternates:
-            for unpacked in alternate.iter_unpacked_subset(todo, include_comp=include_comp, allow_missing=True, convert_ofs_delta=convert_ofs_delta):
+            for unpacked in alternate.iter_unpacked_subset(
+                todo,
+                include_comp=include_comp,
+                allow_missing=True,
+                convert_ofs_delta=convert_ofs_delta,
+            ):
                 yield unpacked
                 yield unpacked
                 hexsha = sha_to_hex(unpacked.sha())
                 hexsha = sha_to_hex(unpacked.sha())
                 todo.remove(hexsha)
                 todo.remove(hexsha)
 
 
-    def iterobjects_subset(self, shas: Iterable[bytes], *, allow_missing: bool = False) -> Iterator[ShaFile]:
+    def iterobjects_subset(
+        self, shas: Iterable[bytes], *, allow_missing: bool = False
+    ) -> Iterator[ShaFile]:
         todo: Set[bytes] = set(shas)
         todo: Set[bytes] = set(shas)
         for p in self._iter_cached_packs():
         for p in self._iter_cached_packs():
             for o in p.iterobjects_subset(todo, allow_missing=True):
             for o in p.iterobjects_subset(todo, allow_missing=True):
@@ -638,7 +667,9 @@ class PackBasedObjectStore(BaseObjectStore):
             elif not allow_missing:
             elif not allow_missing:
                 raise KeyError(oid)
                 raise KeyError(oid)
 
 
-    def get_unpacked_object(self, sha1: bytes, *, include_comp: bool = False) -> UnpackedObject:
+    def get_unpacked_object(
+        self, sha1: bytes, *, include_comp: bool = False
+    ) -> UnpackedObject:
         """Obtain the unpacked object.
         """Obtain the unpacked object.
 
 
         Args:
         Args:
@@ -676,8 +707,10 @@ class PackBasedObjectStore(BaseObjectStore):
         raise KeyError(hexsha)
         raise KeyError(hexsha)
 
 
     def add_objects(
     def add_objects(
-            self, objects: Sequence[Tuple[ShaFile, Optional[str]]],
-            progress: Optional[Callable[[str], None]] = None) -> None:
+        self,
+        objects: Sequence[Tuple[ShaFile, Optional[str]]],
+        progress: Optional[Callable[[str], None]] = None,
+    ) -> None:
         """Add a set of objects to this object store.
         """Add a set of objects to this object store.
 
 
         Args:
         Args:
@@ -693,7 +726,9 @@ class PackBasedObjectStore(BaseObjectStore):
 class DiskObjectStore(PackBasedObjectStore):
 class DiskObjectStore(PackBasedObjectStore):
     """Git-style object store that exists on disk."""
     """Git-style object store that exists on disk."""
 
 
-    def __init__(self, path, loose_compression_level=-1, pack_compression_level=-1) -> None:
+    def __init__(
+        self, path, loose_compression_level=-1, pack_compression_level=-1
+    ) -> None:
         """Open an object store.
         """Open an object store.
 
 
         Args:
         Args:
@@ -701,9 +736,7 @@ class DiskObjectStore(PackBasedObjectStore):
           loose_compression_level: zlib compression level for loose objects
           loose_compression_level: zlib compression level for loose objects
           pack_compression_level: zlib compression level for pack objects
           pack_compression_level: zlib compression level for pack objects
         """
         """
-        super().__init__(
-            pack_compression_level=pack_compression_level
-        )
+        super().__init__(pack_compression_level=pack_compression_level)
         self.path = path
         self.path = path
         self.pack_dir = os.path.join(self.path, PACKDIR)
         self.pack_dir = os.path.join(self.path, PACKDIR)
         self._alternates = None
         self._alternates = None
@@ -862,12 +895,18 @@ class DiskObjectStore(PackBasedObjectStore):
         entries = []
         entries = []
         for i, entry in enumerate(indexer):
         for i, entry in enumerate(indexer):
             if progress is not None:
             if progress is not None:
-                progress(("generating index: %d/%d\r" % (i, num_objects)).encode('ascii'))
+                progress(
+                    ("generating index: %d/%d\r" % (i, num_objects)).encode("ascii")
+                )
             entries.append(entry)
             entries.append(entry)
 
 
         pack_sha, extra_entries = extend_pack(
         pack_sha, extra_entries = extend_pack(
-            f, indexer.ext_refs(), get_raw=self.get_raw, compression_level=self.pack_compression_level,
-            progress=progress)
+            f,
+            indexer.ext_refs(),
+            get_raw=self.get_raw,
+            compression_level=self.pack_compression_level,
+            progress=progress,
+        )
         f.flush()
         f.flush()
         try:
         try:
             fileno = f.fileno()
             fileno = f.fileno()
@@ -948,7 +987,9 @@ class DiskObjectStore(PackBasedObjectStore):
             if f.tell() > 0:
             if f.tell() > 0:
                 f.seek(0)
                 f.seek(0)
                 with PackData(path, f) as pd:
                 with PackData(path, f) as pd:
-                    indexer = PackIndexer.for_pack_data(pd, resolve_ext_ref=self.get_raw)
+                    indexer = PackIndexer.for_pack_data(
+                        pd, resolve_ext_ref=self.get_raw
+                    )
                     return self._complete_pack(f, path, len(pd), indexer)
                     return self._complete_pack(f, path, len(pd), indexer)
             else:
             else:
                 f.close()
                 f.close()
@@ -1064,8 +1105,8 @@ class MemoryObjectStore(BaseObjectStore):
             call when the pack is finished.
             call when the pack is finished.
         """
         """
         from tempfile import SpooledTemporaryFile
         from tempfile import SpooledTemporaryFile
-        f = SpooledTemporaryFile(
-            max_size=PACK_SPOOL_FILE_MAX_SIZE, prefix='incoming-')
+
+        f = SpooledTemporaryFile(max_size=PACK_SPOOL_FILE_MAX_SIZE, prefix="incoming-")
 
 
         def commit():
         def commit():
             size = f.tell()
             size = f.tell()
@@ -1083,7 +1124,9 @@ class MemoryObjectStore(BaseObjectStore):
 
 
         return f, commit, abort
         return f, commit, abort
 
 
-    def add_pack_data(self, count: int, unpacked_objects: Iterator[UnpackedObject], progress=None) -> None:
+    def add_pack_data(
+        self, count: int, unpacked_objects: Iterator[UnpackedObject], progress=None
+    ) -> None:
         """Add pack data to this object store.
         """Add pack data to this object store.
 
 
         Args:
         Args:
@@ -1139,7 +1182,9 @@ def tree_lookup_path(lookup_obj, root_sha, path):
     return tree.lookup_path(lookup_obj, path)
     return tree.lookup_path(lookup_obj, path)
 
 
 
 
-def _collect_filetree_revs(obj_store: ObjectContainer, tree_sha: ObjectID, kset: Set[ObjectID]) -> None:
+def _collect_filetree_revs(
+    obj_store: ObjectContainer, tree_sha: ObjectID, kset: Set[ObjectID]
+) -> None:
     """Collect SHA1s of files and directories for specified tree.
     """Collect SHA1s of files and directories for specified tree.
 
 
     Args:
     Args:
@@ -1156,7 +1201,9 @@ def _collect_filetree_revs(obj_store: ObjectContainer, tree_sha: ObjectID, kset:
                 _collect_filetree_revs(obj_store, sha, kset)
                 _collect_filetree_revs(obj_store, sha, kset)
 
 
 
 
-def _split_commits_and_tags(obj_store: ObjectContainer, lst, *, ignore_unknown=False) -> Tuple[Set[bytes], Set[bytes], Set[bytes]]:
+def _split_commits_and_tags(
+    obj_store: ObjectContainer, lst, *, ignore_unknown=False
+) -> Tuple[Set[bytes], Set[bytes], Set[bytes]]:
     """Split object id list into three lists with commit, tag, and other SHAs.
     """Split object id list into three lists with commit, tag, and other SHAs.
 
 
     Commits referenced by tags are included into commits
     Commits referenced by tags are included into commits
@@ -1241,8 +1288,7 @@ class MissingObjectFinder:
         # all_ancestors is a set of commits that shall not be sent
         # all_ancestors is a set of commits that shall not be sent
         # (complete repository up to 'haves')
         # (complete repository up to 'haves')
         all_ancestors = _collect_ancestors(
         all_ancestors = _collect_ancestors(
-            object_store,
-            have_commits, shallow=shallow, get_parents=self._get_parents
+            object_store, have_commits, shallow=shallow, get_parents=self._get_parents
         )[0]
         )[0]
         # all_missing - complete set of commits between haves and wants
         # all_missing - complete set of commits between haves and wants
         # common - commits from all_ancestors we hit into while
         # common - commits from all_ancestors we hit into while
@@ -1270,17 +1316,15 @@ class MissingObjectFinder:
 
 
         # in fact, what we 'want' is commits, tags, and others
         # in fact, what we 'want' is commits, tags, and others
         # we've found missing
         # we've found missing
-        self.objects_to_send: Set[Tuple[ObjectID, Optional[bytes], Optional[int], bool]] = {
-            (w, None, Commit.type_num, False)
-            for w in missing_commits}
+        self.objects_to_send: Set[
+            Tuple[ObjectID, Optional[bytes], Optional[int], bool]
+        ] = {(w, None, Commit.type_num, False) for w in missing_commits}
         missing_tags = want_tags.difference(have_tags)
         missing_tags = want_tags.difference(have_tags)
         self.objects_to_send.update(
         self.objects_to_send.update(
-            {(w, None, Tag.type_num, False)
-             for w in missing_tags})
+            {(w, None, Tag.type_num, False) for w in missing_tags}
+        )
         missing_others = want_others.difference(have_others)
         missing_others = want_others.difference(have_others)
-        self.objects_to_send.update(
-            {(w, None, None, False)
-             for w in missing_others})
+        self.objects_to_send.update({(w, None, None, False) for w in missing_others})
 
 
         if progress is None:
         if progress is None:
             self.progress = lambda x: None
             self.progress = lambda x: None
@@ -1291,13 +1335,19 @@ class MissingObjectFinder:
     def get_remote_has(self):
     def get_remote_has(self):
         return self.remote_has
         return self.remote_has
 
 
-    def add_todo(self, entries: Iterable[Tuple[ObjectID, Optional[bytes], Optional[int], bool]]):
+    def add_todo(
+        self, entries: Iterable[Tuple[ObjectID, Optional[bytes], Optional[int], bool]]
+    ):
         self.objects_to_send.update([e for e in entries if e[0] not in self.sha_done])
         self.objects_to_send.update([e for e in entries if e[0] not in self.sha_done])
 
 
     def __next__(self) -> Tuple[bytes, Optional[PackHint]]:
     def __next__(self) -> Tuple[bytes, Optional[PackHint]]:
         while True:
         while True:
             if not self.objects_to_send:
             if not self.objects_to_send:
-                self.progress(("counting objects: %d, done.\n" % len(self.sha_done)).encode("ascii"))
+                self.progress(
+                    ("counting objects: %d, done.\n" % len(self.sha_done)).encode(
+                        "ascii"
+                    )
+                )
                 raise StopIteration
                 raise StopIteration
             (sha, name, type_num, leaf) = self.objects_to_send.pop()
             (sha, name, type_num, leaf) = self.objects_to_send.pop()
             if sha not in self.sha_done:
             if sha not in self.sha_done:
@@ -1309,8 +1359,12 @@ class MissingObjectFinder:
             elif isinstance(o, Tree):
             elif isinstance(o, Tree):
                 self.add_todo(
                 self.add_todo(
                     [
                     [
-                        (s, n, (Blob.type_num if stat.S_ISREG(m) else Tree.type_num),
-                         not stat.S_ISDIR(m))
+                        (
+                            s,
+                            n,
+                            (Blob.type_num if stat.S_ISREG(m) else Tree.type_num),
+                            not stat.S_ISDIR(m),
+                        )
                         for n, m, s in o.iteritems()
                         for n, m, s in o.iteritems()
                         if not S_ISGITLINK(m)
                         if not S_ISGITLINK(m)
                     ]
                     ]
@@ -1321,7 +1375,9 @@ class MissingObjectFinder:
             self.add_todo([(self._tagged[sha], None, None, True)])
             self.add_todo([(self._tagged[sha], None, None, True)])
         self.sha_done.add(sha)
         self.sha_done.add(sha)
         if len(self.sha_done) % 1000 == 0:
         if len(self.sha_done) % 1000 == 0:
-            self.progress(("counting objects: %d\r" % len(self.sha_done)).encode("ascii"))
+            self.progress(
+                ("counting objects: %d\r" % len(self.sha_done)).encode("ascii")
+            )
         if type_num is None:
         if type_num is None:
             pack_hint = None
             pack_hint = None
         else:
         else:
@@ -1423,7 +1479,7 @@ def commit_tree_changes(object_store, tree, changes):
     # TODO(jelmer): Save up the objects and add them using .add_objects
     # TODO(jelmer): Save up the objects and add them using .add_objects
     # rather than with individual calls to .add_object.
     # rather than with individual calls to .add_object.
     nested_changes = {}
     nested_changes = {}
-    for (path, new_mode, new_sha) in changes:
+    for path, new_mode, new_sha in changes:
         try:
         try:
             (dirname, subpath) = path.split(b"/", 1)
             (dirname, subpath) = path.split(b"/", 1)
         except ValueError:
         except ValueError:
@@ -1479,7 +1535,9 @@ class OverlayObjectStore(BaseObjectStore):
                     yield o_id
                     yield o_id
                     done.add(o_id)
                     done.add(o_id)
 
 
-    def iterobjects_subset(self, shas: Iterable[bytes], *, allow_missing: bool = False) -> Iterator[ShaFile]:
+    def iterobjects_subset(
+        self, shas: Iterable[bytes], *, allow_missing: bool = False
+    ) -> Iterator[ShaFile]:
         todo = set(shas)
         todo = set(shas)
         for b in self.bases:
         for b in self.bases:
             for o in b.iterobjects_subset(todo, allow_missing=True):
             for o in b.iterobjects_subset(todo, allow_missing=True):
@@ -1488,10 +1546,22 @@ class OverlayObjectStore(BaseObjectStore):
         if todo and not allow_missing:
         if todo and not allow_missing:
             raise KeyError(o.id)
             raise KeyError(o.id)
 
 
-    def iter_unpacked_subset(self, shas: Iterable[bytes], *, include_comp=False, allow_missing: bool = False, convert_ofs_delta=True) -> Iterator[ShaFile]:
+    def iter_unpacked_subset(
+        self,
+        shas: Iterable[bytes],
+        *,
+        include_comp=False,
+        allow_missing: bool = False,
+        convert_ofs_delta=True,
+    ) -> Iterator[ShaFile]:
         todo = set(shas)
         todo = set(shas)
         for b in self.bases:
         for b in self.bases:
-            for o in b.iter_unpacked_subset(todo, include_comp=include_comp, allow_missing=True, convert_ofs_delta=convert_ofs_delta):
+            for o in b.iter_unpacked_subset(
+                todo,
+                include_comp=include_comp,
+                allow_missing=True,
+                convert_ofs_delta=convert_ofs_delta,
+            ):
                 yield o
                 yield o
                 todo.remove(o.id)
                 todo.remove(o.id)
         if todo and not allow_missing:
         if todo and not allow_missing:
@@ -1580,7 +1650,8 @@ class BucketBasedObjectStore(PackBasedObjectStore):
         import tempfile
         import tempfile
 
 
         pf = tempfile.SpooledTemporaryFile(
         pf = tempfile.SpooledTemporaryFile(
-            max_size=PACK_SPOOL_FILE_MAX_SIZE, prefix='incoming-')
+            max_size=PACK_SPOOL_FILE_MAX_SIZE, prefix="incoming-"
+        )
 
 
         def commit():
         def commit():
             if pf.tell() == 0:
             if pf.tell() == 0:
@@ -1590,13 +1661,14 @@ class BucketBasedObjectStore(PackBasedObjectStore):
             pf.seek(0)
             pf.seek(0)
             p = PackData(pf.name, pf)
             p = PackData(pf.name, pf)
             entries = p.sorted_entries()
             entries = p.sorted_entries()
-            basename = iter_sha1(entry[0] for entry in entries).decode('ascii')
+            basename = iter_sha1(entry[0] for entry in entries).decode("ascii")
             idxf = tempfile.SpooledTemporaryFile(
             idxf = tempfile.SpooledTemporaryFile(
-                max_size=PACK_SPOOL_FILE_MAX_SIZE, prefix='incoming-')
+                max_size=PACK_SPOOL_FILE_MAX_SIZE, prefix="incoming-"
+            )
             checksum = p.get_stored_checksum()
             checksum = p.get_stored_checksum()
             write_pack_index(idxf, entries, checksum)
             write_pack_index(idxf, entries, checksum)
             idxf.seek(0)
             idxf.seek(0)
-            idx = load_pack_index_file(basename + '.idx', idxf)
+            idx = load_pack_index_file(basename + ".idx", idxf)
             for pack in self.packs:
             for pack in self.packs:
                 if pack.get_stored_checksum() == p.get_stored_checksum():
                 if pack.get_stored_checksum() == p.get_stored_checksum():
                     p.close()
                     p.close()
@@ -1649,7 +1721,8 @@ def _collect_ancestors(
 
 
 
 
 def iter_tree_contents(
 def iter_tree_contents(
-        store: ObjectContainer, tree_id: Optional[ObjectID], *, include_trees: bool = False):
+    store: ObjectContainer, tree_id: Optional[ObjectID], *, include_trees: bool = False
+):
     """Iterate the contents of a tree and all subtrees.
     """Iterate the contents of a tree and all subtrees.
 
 
     Iteration is depth-first pre-order, as in e.g. os.walk.
     Iteration is depth-first pre-order, as in e.g. os.walk.

+ 72 - 60
dulwich/objects.py

@@ -216,16 +216,18 @@ def check_identity(identity: bytes, error_msg: str) -> None:
       identity: Identity string
       identity: Identity string
       error_msg: Error message to use in exception
       error_msg: Error message to use in exception
     """
     """
-    email_start = identity.find(b'<')
-    email_end = identity.find(b'>')
-    if not all([
-        email_start >= 1,
-        identity[email_start - 1] == b' '[0],
-        identity.find(b'<', email_start + 1) == -1,
-        email_end == len(identity) - 1,
-        b'\0' not in identity,
-        b'\n' not in identity,
-    ]):
+    email_start = identity.find(b"<")
+    email_end = identity.find(b">")
+    if not all(
+        [
+            email_start >= 1,
+            identity[email_start - 1] == b" "[0],
+            identity.find(b"<", email_start + 1) == -1,
+            email_end == len(identity) - 1,
+            b"\0" not in identity,
+            b"\n" not in identity,
+        ]
+    ):
         raise ObjectFormatException(error_msg)
         raise ObjectFormatException(error_msg)
 
 
 
 
@@ -300,11 +302,12 @@ class ShaFile:
         try:
         try:
             int(size)  # sanity check
             int(size)  # sanity check
         except ValueError as exc:
         except ValueError as exc:
-            raise ObjectFormatException(
-                "Object size not an integer: %s" % exc) from exc
+            raise ObjectFormatException("Object size not an integer: %s" % exc) from exc
         obj_class = object_class(type_name)
         obj_class = object_class(type_name)
         if not obj_class:
         if not obj_class:
-            raise ObjectFormatException("Not a known type: %s" % type_name.decode('ascii'))
+            raise ObjectFormatException(
+                "Not a known type: %s" % type_name.decode("ascii")
+            )
         return obj_class()
         return obj_class()
 
 
     def _parse_legacy_object(self, map) -> None:
     def _parse_legacy_object(self, map) -> None:
@@ -315,8 +318,7 @@ class ShaFile:
             raise ObjectFormatException("Invalid object header, no \\0")
             raise ObjectFormatException("Invalid object header, no \\0")
         self.set_raw_string(text[header_end + 1 :])
         self.set_raw_string(text[header_end + 1 :])
 
 
-    def as_legacy_object_chunks(
-            self, compression_level: int = -1) -> Iterator[bytes]:
+    def as_legacy_object_chunks(self, compression_level: int = -1) -> Iterator[bytes]:
         """Return chunks representing the object in the experimental format.
         """Return chunks representing the object in the experimental format.
 
 
         Returns: List of strings
         Returns: List of strings
@@ -363,16 +365,15 @@ class ShaFile:
         """Return a string representing this object, fit for display."""
         """Return a string representing this object, fit for display."""
         return self.as_raw_string()
         return self.as_raw_string()
 
 
-    def set_raw_string(
-            self, text: bytes, sha: Optional[ObjectID] = None) -> None:
+    def set_raw_string(self, text: bytes, sha: Optional[ObjectID] = None) -> None:
         """Set the contents of this object from a serialized string."""
         """Set the contents of this object from a serialized string."""
         if not isinstance(text, bytes):
         if not isinstance(text, bytes):
             raise TypeError("Expected bytes for text, got %r" % text)
             raise TypeError("Expected bytes for text, got %r" % text)
         self.set_raw_chunks([text], sha)
         self.set_raw_chunks([text], sha)
 
 
     def set_raw_chunks(
     def set_raw_chunks(
-            self, chunks: List[bytes],
-            sha: Optional[ObjectID] = None) -> None:
+        self, chunks: List[bytes], sha: Optional[ObjectID] = None
+    ) -> None:
         """Set the contents of this object from a list of chunks."""
         """Set the contents of this object from a list of chunks."""
         self._chunked_text = chunks
         self._chunked_text = chunks
         self._deserialize(chunks)
         self._deserialize(chunks)
@@ -470,8 +471,8 @@ class ShaFile:
 
 
     @staticmethod
     @staticmethod
     def from_raw_chunks(
     def from_raw_chunks(
-            type_num: int, chunks: List[bytes],
-            sha: Optional[ObjectID] = None):
+        type_num: int, chunks: List[bytes], sha: Optional[ObjectID] = None
+    ):
         """Creates an object of the indicated type from the raw chunks given.
         """Creates an object of the indicated type from the raw chunks given.
 
 
         Args:
         Args:
@@ -550,7 +551,7 @@ class ShaFile:
         """Create a new copy of this SHA1 object from its raw string."""
         """Create a new copy of this SHA1 object from its raw string."""
         obj_class = object_class(self.type_num)
         obj_class = object_class(self.type_num)
         if obj_class is None:
         if obj_class is None:
-            raise AssertionError('invalid type num %d' % self.type_num)
+            raise AssertionError("invalid type num %d" % self.type_num)
         return obj_class.from_raw_string(self.type_num, self.as_raw_string(), self.id)
         return obj_class.from_raw_string(self.type_num, self.as_raw_string(), self.id)
 
 
     @property
     @property
@@ -668,7 +669,9 @@ class Blob(ShaFile):
         return ret
         return ret
 
 
 
 
-def _parse_message(chunks: Iterable[bytes]) -> Iterator[Union[Tuple[None, None], Tuple[Optional[bytes], bytes]]]:
+def _parse_message(
+    chunks: Iterable[bytes]
+) -> Iterator[Union[Tuple[None, None], Tuple[Optional[bytes], bytes]]]:
     """Parse a message with a list of fields and a body.
     """Parse a message with a list of fields and a body.
 
 
     Args:
     Args:
@@ -812,9 +815,16 @@ class Tag(ShaFile):
             if self._tag_time is None:
             if self._tag_time is None:
                 headers.append((_TAGGER_HEADER, self._tagger))
                 headers.append((_TAGGER_HEADER, self._tagger))
             else:
             else:
-                headers.append((_TAGGER_HEADER, format_time_entry(
-                    self._tagger, self._tag_time,
-                    (self._tag_timezone, self._tag_timezone_neg_utc))))
+                headers.append(
+                    (
+                        _TAGGER_HEADER,
+                        format_time_entry(
+                            self._tagger,
+                            self._tag_time,
+                            (self._tag_timezone, self._tag_timezone_neg_utc),
+                        ),
+                    )
+                )
 
 
         if self.message is None and self._signature is None:
         if self.message is None and self._signature is None:
             body = None
             body = None
@@ -892,6 +902,7 @@ class Tag(ShaFile):
 
 
     def sign(self, keyid: Optional[str] = None):
     def sign(self, keyid: Optional[str] = None):
         import gpg
         import gpg
+
         with gpg.Context(armor=True) as c:
         with gpg.Context(armor=True) as c:
             if keyid is not None:
             if keyid is not None:
                 key = c.get_key(keyid)
                 key = c.get_key(keyid)
@@ -930,18 +941,13 @@ class Tag(ShaFile):
                 signature=self._signature,
                 signature=self._signature,
             )
             )
             if keyids:
             if keyids:
-                keys = [
-                    ctx.get_key(key)
-                    for key in keyids
-                ]
+                keys = [ctx.get_key(key) for key in keyids]
                 for key in keys:
                 for key in keys:
                     for subkey in keys:
                     for subkey in keys:
                         for sig in result.signatures:
                         for sig in result.signatures:
                             if subkey.can_sign and subkey.fpr == sig.fpr:
                             if subkey.can_sign and subkey.fpr == sig.fpr:
                                 return
                                 return
-                raise gpg.errors.MissingSignatures(
-                    result, keys, results=(data, result)
-                )
+                raise gpg.errors.MissingSignatures(result, keys, results=(data, result))
 
 
 
 
 class TreeEntry(namedtuple("TreeEntry", ["path", "mode", "sha"])):
 class TreeEntry(namedtuple("TreeEntry", ["path", "mode", "sha"])):
@@ -974,8 +980,7 @@ def parse_tree(text, strict=False):
         try:
         try:
             mode = int(mode_text, 8)
             mode = int(mode_text, 8)
         except ValueError as exc:
         except ValueError as exc:
-            raise ObjectFormatException(
-                "Invalid mode '%s'" % mode_text) from exc
+            raise ObjectFormatException("Invalid mode '%s'" % mode_text) from exc
         name_end = text.index(b"\0", mode_end)
         name_end = text.index(b"\0", mode_end)
         name = text[mode_end + 1 : name_end]
         name = text[mode_end + 1 : name_end]
         count = name_end + 21
         count = name_end + 21
@@ -1216,7 +1221,7 @@ class Tree(ShaFile):
             if not p:
             if not p:
                 continue
                 continue
             if mode is not None and S_ISGITLINK(mode):
             if mode is not None and S_ISGITLINK(mode):
-                raise SubmoduleEncountered(b'/'.join(parts[:i]), sha)
+                raise SubmoduleEncountered(b"/".join(parts[:i]), sha)
             obj = lookup_obj(sha)
             obj = lookup_obj(sha)
             if not isinstance(obj, Tree):
             if not isinstance(obj, Tree):
                 raise NotTreeError(sha)
                 raise NotTreeError(sha)
@@ -1299,10 +1304,9 @@ def parse_time_entry(value):
 def format_time_entry(person, time, timezone_info):
 def format_time_entry(person, time, timezone_info):
     """Format an event."""
     """Format an event."""
     (timezone, timezone_neg_utc) = timezone_info
     (timezone, timezone_neg_utc) = timezone_info
-    return b" ".join([
-        person,
-        str(time).encode("ascii"),
-        format_timezone(timezone, timezone_neg_utc)])
+    return b" ".join(
+        [person, str(time).encode("ascii"), format_timezone(timezone, timezone_neg_utc)]
+    )
 
 
 
 
 def parse_commit(chunks):
 def parse_commit(chunks):
@@ -1313,7 +1317,7 @@ def parse_commit(chunks):
     Returns: Tuple of (tree, parents, author_info, commit_info,
     Returns: Tuple of (tree, parents, author_info, commit_info,
         encoding, mergetag, gpgsig, message, extra)
         encoding, mergetag, gpgsig, message, extra)
     """
     """
-    warnings.warn('parse_commit will be removed in 0.22', DeprecationWarning)
+    warnings.warn("parse_commit will be removed in 0.22", DeprecationWarning)
     parents = []
     parents = []
     extra = []
     extra = []
     tree = None
     tree = None
@@ -1491,6 +1495,7 @@ class Commit(ShaFile):
 
 
     def sign(self, keyid: Optional[str] = None):
     def sign(self, keyid: Optional[str] = None):
         import gpg
         import gpg
+
         with gpg.Context(armor=True) as c:
         with gpg.Context(armor=True) as c:
             if keyid is not None:
             if keyid is not None:
                 key = c.get_key(keyid)
                 key = c.get_key(keyid)
@@ -1532,18 +1537,13 @@ class Commit(ShaFile):
                 signature=self._gpgsig,
                 signature=self._gpgsig,
             )
             )
             if keyids:
             if keyids:
-                keys = [
-                    ctx.get_key(key)
-                    for key in keyids
-                ]
+                keys = [ctx.get_key(key) for key in keyids]
                 for key in keys:
                 for key in keys:
                     for subkey in keys:
                     for subkey in keys:
                         for sig in result.signatures:
                         for sig in result.signatures:
                             if subkey.can_sign and subkey.fpr == sig.fpr:
                             if subkey.can_sign and subkey.fpr == sig.fpr:
                                 return
                                 return
-                raise gpg.errors.MissingSignatures(
-                    result, keys, results=(data, result)
-                )
+                raise gpg.errors.MissingSignatures(result, keys, results=(data, result))
 
 
     def _serialize(self):
     def _serialize(self):
         headers = []
         headers = []
@@ -1551,16 +1551,26 @@ class Commit(ShaFile):
         headers.append((_TREE_HEADER, tree_bytes))
         headers.append((_TREE_HEADER, tree_bytes))
         for p in self._parents:
         for p in self._parents:
             headers.append((_PARENT_HEADER, p))
             headers.append((_PARENT_HEADER, p))
-        headers.append((
-            _AUTHOR_HEADER,
-            format_time_entry(
-                self._author, self._author_time,
-                (self._author_timezone, self._author_timezone_neg_utc))))
-        headers.append((
-            _COMMITTER_HEADER,
-            format_time_entry(
-                self._committer, self._commit_time,
-                (self._commit_timezone, self._commit_timezone_neg_utc))))
+        headers.append(
+            (
+                _AUTHOR_HEADER,
+                format_time_entry(
+                    self._author,
+                    self._author_time,
+                    (self._author_timezone, self._author_timezone_neg_utc),
+                ),
+            )
+        )
+        headers.append(
+            (
+                _COMMITTER_HEADER,
+                format_time_entry(
+                    self._committer,
+                    self._commit_time,
+                    (self._commit_timezone, self._commit_timezone_neg_utc),
+                ),
+            )
+        )
         if self.encoding:
         if self.encoding:
             headers.append((_ENCODING_HEADER, self.encoding))
             headers.append((_ENCODING_HEADER, self.encoding))
         for mergetag in self.mergetag:
         for mergetag in self.mergetag:
@@ -1590,8 +1600,10 @@ class Commit(ShaFile):
     def _get_extra(self):
     def _get_extra(self):
         """Return extra settings of this commit."""
         """Return extra settings of this commit."""
         warnings.warn(
         warnings.warn(
-            'Commit.extra is deprecated. Use Commit._extra instead.',
-            DeprecationWarning, stacklevel=2)
+            "Commit.extra is deprecated. Use Commit._extra instead.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
         return self._extra
         return self._extra
 
 
     extra = property(
     extra = property(

+ 16 - 9
dulwich/objectspec.py

@@ -69,7 +69,9 @@ def parse_tree(repo: "Repo", treeish: Union[bytes, str]) -> "Tree":
     return o
     return o
 
 
 
 
-def parse_ref(container: Union["Repo", "RefsContainer"], refspec: Union[str, bytes]) -> "Ref":
+def parse_ref(
+    container: Union["Repo", "RefsContainer"], refspec: Union[str, bytes]
+) -> "Ref":
     """Parse a string referring to a reference.
     """Parse a string referring to a reference.
 
 
     Args:
     Args:
@@ -95,9 +97,11 @@ def parse_ref(container: Union["Repo", "RefsContainer"], refspec: Union[str, byt
 
 
 
 
 def parse_reftuple(
 def parse_reftuple(
-        lh_container: Union["Repo", "RefsContainer"],
-        rh_container: Union["Repo", "RefsContainer"], refspec: Union[str, bytes],
-        force: bool = False) -> Tuple[Optional["Ref"], Optional["Ref"], bool]:
+    lh_container: Union["Repo", "RefsContainer"],
+    rh_container: Union["Repo", "RefsContainer"],
+    refspec: Union[str, bytes],
+    force: bool = False,
+) -> Tuple[Optional["Ref"], Optional["Ref"], bool]:
     """Parse a reftuple spec.
     """Parse a reftuple spec.
 
 
     Args:
     Args:
@@ -135,10 +139,11 @@ def parse_reftuple(
 
 
 
 
 def parse_reftuples(
 def parse_reftuples(
-        lh_container: Union["Repo", "RefsContainer"],
-        rh_container: Union["Repo", "RefsContainer"],
-        refspecs: Union[bytes, List[bytes]],
-        force: bool = False):
+    lh_container: Union["Repo", "RefsContainer"],
+    rh_container: Union["Repo", "RefsContainer"],
+    refspecs: Union[bytes, List[bytes]],
+    force: bool = False,
+):
     """Parse a list of reftuple specs to a list of reftuples.
     """Parse a list of reftuple specs to a list of reftuples.
 
 
     Args:
     Args:
@@ -178,7 +183,9 @@ def parse_refs(container, refspecs):
     return ret
     return ret
 
 
 
 
-def parse_commit_range(repo: "Repo", committishs: Union[str, bytes]) -> Iterator["Commit"]:
+def parse_commit_range(
+    repo: "Repo", committishs: Union[str, bytes]
+) -> Iterator["Commit"]:
     """Parse a string referring to a range of commits.
     """Parse a string referring to a range of commits.
 
 
     Args:
     Args:

+ 311 - 149
dulwich/pack.py

@@ -109,20 +109,21 @@ PackHint = Tuple[int, Optional[bytes]]
 
 
 
 
 class UnresolvedDeltas(Exception):
 class UnresolvedDeltas(Exception):
-    """"Delta objects could not be resolved."""
+    """ "Delta objects could not be resolved."""
 
 
     def __init__(self, shas):
     def __init__(self, shas):
         self.shas = shas
         self.shas = shas
 
 
 
 
 class ObjectContainer(Protocol):
 class ObjectContainer(Protocol):
-
     def add_object(self, obj: ShaFile) -> None:
     def add_object(self, obj: ShaFile) -> None:
         """Add a single object to this object store."""
         """Add a single object to this object store."""
 
 
     def add_objects(
     def add_objects(
-            self, objects: Sequence[Tuple[ShaFile, Optional[str]]],
-            progress: Optional[Callable[[str], None]] = None) -> None:
+        self,
+        objects: Sequence[Tuple[ShaFile, Optional[str]]],
+        progress: Optional[Callable[[str], None]] = None,
+    ) -> None:
         """Add a set of objects to this object store.
         """Add a set of objects to this object store.
 
 
         Args:
         Args:
@@ -137,22 +138,28 @@ class ObjectContainer(Protocol):
 
 
 
 
 class PackedObjectContainer(ObjectContainer):
 class PackedObjectContainer(ObjectContainer):
-
-    def get_unpacked_object(self, sha1: bytes, *, include_comp: bool = False) -> "UnpackedObject":
+    def get_unpacked_object(
+        self, sha1: bytes, *, include_comp: bool = False
+    ) -> "UnpackedObject":
         """Get a raw unresolved object."""
         """Get a raw unresolved object."""
         raise NotImplementedError(self.get_unpacked_object)
         raise NotImplementedError(self.get_unpacked_object)
 
 
-    def iterobjects_subset(self, shas: Iterable[bytes], *, allow_missing: bool = False) -> Iterator[ShaFile]:
+    def iterobjects_subset(
+        self, shas: Iterable[bytes], *, allow_missing: bool = False
+    ) -> Iterator[ShaFile]:
         raise NotImplementedError(self.iterobjects_subset)
         raise NotImplementedError(self.iterobjects_subset)
 
 
     def iter_unpacked_subset(
     def iter_unpacked_subset(
-            self, shas: Set[bytes], include_comp: bool = False, allow_missing: bool = False,
-            convert_ofs_delta: bool = True) -> Iterator["UnpackedObject"]:
+        self,
+        shas: Set[bytes],
+        include_comp: bool = False,
+        allow_missing: bool = False,
+        convert_ofs_delta: bool = True,
+    ) -> Iterator["UnpackedObject"]:
         raise NotImplementedError(self.iter_unpacked_subset)
         raise NotImplementedError(self.iter_unpacked_subset)
 
 
 
 
 class UnpackedObjectStream:
 class UnpackedObjectStream:
-
     def __iter__(self) -> Iterator["UnpackedObject"]:
     def __iter__(self) -> Iterator["UnpackedObject"]:
         raise NotImplementedError(self.__iter__)
         raise NotImplementedError(self.__iter__)
 
 
@@ -160,7 +167,9 @@ class UnpackedObjectStream:
         raise NotImplementedError(self.__len__)
         raise NotImplementedError(self.__len__)
 
 
 
 
-def take_msb_bytes(read: Callable[[int], bytes], crc32: Optional[int] = None) -> Tuple[List[int], Optional[int]]:
+def take_msb_bytes(
+    read: Callable[[int], bytes], crc32: Optional[int] = None
+) -> Tuple[List[int], Optional[int]]:
     """Read bytes marked with most significant bit.
     """Read bytes marked with most significant bit.
 
 
     Args:
     Args:
@@ -212,7 +221,17 @@ class UnpackedObject:
 
 
     # TODO(dborowitz): read_zlib_chunks and unpack_object could very well be
     # TODO(dborowitz): read_zlib_chunks and unpack_object could very well be
     # methods of this object.
     # methods of this object.
-    def __init__(self, pack_type_num, *, delta_base=None, decomp_len=None, crc32=None, sha=None, decomp_chunks=None, offset=None) -> None:
+    def __init__(
+        self,
+        pack_type_num,
+        *,
+        delta_base=None,
+        decomp_len=None,
+        crc32=None,
+        sha=None,
+        decomp_chunks=None,
+        offset=None,
+    ) -> None:
         self.offset = offset
         self.offset = offset
         self._sha = sha
         self._sha = sha
         self.pack_type_num = pack_type_num
         self.pack_type_num = pack_type_num
@@ -274,9 +293,10 @@ _ZLIB_BUFSIZE = 4096
 
 
 
 
 def read_zlib_chunks(
 def read_zlib_chunks(
-        read_some: Callable[[int], bytes],
-        unpacked: UnpackedObject, include_comp: bool = False,
-        buffer_size: int = _ZLIB_BUFSIZE
+    read_some: Callable[[int], bytes],
+    unpacked: UnpackedObject,
+    include_comp: bool = False,
+    buffer_size: int = _ZLIB_BUFSIZE,
 ) -> bytes:
 ) -> bytes:
     """Read zlib data from a buffer.
     """Read zlib data from a buffer.
 
 
@@ -476,7 +496,9 @@ class PackIndex:
         raise NotImplementedError(self.get_pack_checksum)
         raise NotImplementedError(self.get_pack_checksum)
 
 
     def object_index(self, sha: bytes) -> int:
     def object_index(self, sha: bytes) -> int:
-        warnings.warn('Please use object_offset instead', DeprecationWarning, stacklevel=2)
+        warnings.warn(
+            "Please use object_offset instead", DeprecationWarning, stacklevel=2
+        )
         return self.object_offset(sha)
         return self.object_offset(sha)
 
 
     def object_offset(self, sha: bytes) -> int:
     def object_offset(self, sha: bytes) -> int:
@@ -490,7 +512,7 @@ class PackIndex:
 
 
     def object_sha1(self, index: int) -> bytes:
     def object_sha1(self, index: int) -> bytes:
         """Return the SHA1 corresponding to the index in the pack file."""
         """Return the SHA1 corresponding to the index in the pack file."""
-        for (name, offset, crc32) in self.iterentries():
+        for name, offset, crc32 in self.iterentries():
             if offset == index:
             if offset == index:
                 return name
                 return name
         else:
         else:
@@ -787,8 +809,8 @@ class PackIndex2(FilePackIndex):
     def _unpack_offset(self, i):
     def _unpack_offset(self, i):
         offset = self._pack_offset_table_offset + i * 4
         offset = self._pack_offset_table_offset + i * 4
         offset = unpack_from(">L", self._contents, offset)[0]
         offset = unpack_from(">L", self._contents, offset)[0]
-        if offset & (2 ** 31):
-            offset = self._pack_offset_largetable_offset + (offset & (2 ** 31 - 1)) * 8
+        if offset & (2**31):
+            offset = self._pack_offset_largetable_offset + (offset & (2**31 - 1)) * 8
             offset = unpack_from(">Q", self._contents, offset)[0]
             offset = unpack_from(">Q", self._contents, offset)[0]
         return offset
         return offset
 
 
@@ -888,7 +910,9 @@ def unpack_object(
     else:
     else:
         delta_base = None
         delta_base = None
 
 
-    unpacked = UnpackedObject(type_num, delta_base=delta_base, decomp_len=size, crc32=crc32)
+    unpacked = UnpackedObject(
+        type_num, delta_base=delta_base, decomp_len=size, crc32=crc32
+    )
     unused = read_zlib_chunks(
     unused = read_zlib_chunks(
         read_some,
         read_some,
         unpacked,
         unpacked,
@@ -1086,9 +1110,11 @@ class PackStreamCopier(PackStreamReader):
             if self._delta_iter:
             if self._delta_iter:
                 self._delta_iter.record(unpacked)
                 self._delta_iter.record(unpacked)
             if progress is not None:
             if progress is not None:
-                progress(("copying pack entries: %d/%d\r" % (i, len(self))).encode('ascii'))
+                progress(
+                    ("copying pack entries: %d/%d\r" % (i, len(self))).encode("ascii")
+                )
         if progress is not None:
         if progress is not None:
-            progress(("copied %d pack entries\n" % i).encode('ascii'))
+            progress(("copied %d pack entries\n" % i).encode("ascii"))
 
 
 
 
 def obj_sha(type, chunks):
 def obj_sha(type, chunks):
@@ -1241,13 +1267,17 @@ class PackData:
 
 
         for _ in range(self._num_objects):
         for _ in range(self._num_objects):
             offset = self._file.tell()
             offset = self._file.tell()
-            unpacked, unused = unpack_object(self._file.read, compute_crc32=False, include_comp=include_comp)
+            unpacked, unused = unpack_object(
+                self._file.read, compute_crc32=False, include_comp=include_comp
+            )
             unpacked.offset = offset
             unpacked.offset = offset
             yield unpacked
             yield unpacked
             # Back up over unused data.
             # Back up over unused data.
             self._file.seek(-len(unused), SEEK_CUR)
             self._file.seek(-len(unused), SEEK_CUR)
 
 
-    def iterentries(self, progress=None, resolve_ext_ref: Optional[ResolveExtRefFn] = None):
+    def iterentries(
+        self, progress=None, resolve_ext_ref: Optional[ResolveExtRefFn] = None
+    ):
         """Yield entries summarizing the contents of this pack.
         """Yield entries summarizing the contents of this pack.
 
 
         Args:
         Args:
@@ -1262,7 +1292,11 @@ class PackData:
                 progress(i, num_objects)
                 progress(i, num_objects)
             yield result
             yield result
 
 
-    def sorted_entries(self, progress: Optional[ProgressFn] = None, resolve_ext_ref: Optional[ResolveExtRefFn] = None):
+    def sorted_entries(
+        self,
+        progress: Optional[ProgressFn] = None,
+        resolve_ext_ref: Optional[ResolveExtRefFn] = None,
+    ):
         """Return entries in this pack, sorted by SHA.
         """Return entries in this pack, sorted by SHA.
 
 
         Args:
         Args:
@@ -1270,8 +1304,9 @@ class PackData:
             object count
             object count
         Returns: Iterator of tuples with (sha, offset, crc32)
         Returns: Iterator of tuples with (sha, offset, crc32)
         """
         """
-        return sorted(self.iterentries(
-            progress=progress, resolve_ext_ref=resolve_ext_ref))
+        return sorted(
+            self.iterentries(progress=progress, resolve_ext_ref=resolve_ext_ref)
+        )
 
 
     def create_index_v1(self, filename, progress=None, resolve_ext_ref=None):
     def create_index_v1(self, filename, progress=None, resolve_ext_ref=None):
         """Create a version 1 file for this data file.
         """Create a version 1 file for this data file.
@@ -1282,7 +1317,8 @@ class PackData:
         Returns: Checksum of index file
         Returns: Checksum of index file
         """
         """
         entries = self.sorted_entries(
         entries = self.sorted_entries(
-            progress=progress, resolve_ext_ref=resolve_ext_ref)
+            progress=progress, resolve_ext_ref=resolve_ext_ref
+        )
         with GitFile(filename, "wb") as f:
         with GitFile(filename, "wb") as f:
             return write_pack_index_v1(f, entries, self.calculate_checksum())
             return write_pack_index_v1(f, entries, self.calculate_checksum())
 
 
@@ -1295,7 +1331,8 @@ class PackData:
         Returns: Checksum of index file
         Returns: Checksum of index file
         """
         """
         entries = self.sorted_entries(
         entries = self.sorted_entries(
-            progress=progress, resolve_ext_ref=resolve_ext_ref)
+            progress=progress, resolve_ext_ref=resolve_ext_ref
+        )
         with GitFile(filename, "wb") as f:
         with GitFile(filename, "wb") as f:
             return write_pack_index_v2(f, entries, self.calculate_checksum())
             return write_pack_index_v2(f, entries, self.calculate_checksum())
 
 
@@ -1309,10 +1346,12 @@ class PackData:
         """
         """
         if version == 1:
         if version == 1:
             return self.create_index_v1(
             return self.create_index_v1(
-                filename, progress, resolve_ext_ref=resolve_ext_ref)
+                filename, progress, resolve_ext_ref=resolve_ext_ref
+            )
         elif version == 2:
         elif version == 2:
             return self.create_index_v2(
             return self.create_index_v2(
-                filename, progress, resolve_ext_ref=resolve_ext_ref)
+                filename, progress, resolve_ext_ref=resolve_ext_ref
+            )
         else:
         else:
             raise ValueError("unknown index format %d" % version)
             raise ValueError("unknown index format %d" % version)
 
 
@@ -1328,7 +1367,9 @@ class PackData:
         if actual != stored:
         if actual != stored:
             raise ChecksumMismatch(stored, actual)
             raise ChecksumMismatch(stored, actual)
 
 
-    def get_unpacked_object_at(self, offset: int, *, include_comp: bool = False) -> UnpackedObject:
+    def get_unpacked_object_at(
+        self, offset: int, *, include_comp: bool = False
+    ) -> UnpackedObject:
         """Given offset in the packfile return a UnpackedObject."""
         """Given offset in the packfile return a UnpackedObject."""
         assert offset >= self._header_size
         assert offset >= self._header_size
         self._file.seek(offset)
         self._file.seek(offset)
@@ -1351,7 +1392,7 @@ class PackData:
         return (unpacked.pack_type_num, unpacked._obj())
         return (unpacked.pack_type_num, unpacked._obj())
 
 
 
 
-T = TypeVar('T')
+T = TypeVar("T")
 
 
 
 
 class DeltaChainIterator(Generic[T]):
 class DeltaChainIterator(Generic[T]):
@@ -1396,8 +1437,13 @@ class DeltaChainIterator(Generic[T]):
 
 
     @classmethod
     @classmethod
     def for_pack_subset(
     def for_pack_subset(
-            cls, pack: "Pack", shas: Iterable[bytes], *,
-            allow_missing: bool = False, resolve_ext_ref=None):
+        cls,
+        pack: "Pack",
+        shas: Iterable[bytes],
+        *,
+        allow_missing: bool = False,
+        resolve_ext_ref=None,
+    ):
         walker = cls(None, resolve_ext_ref=resolve_ext_ref)
         walker = cls(None, resolve_ext_ref=resolve_ext_ref)
         walker.set_pack_data(pack.data)
         walker.set_pack_data(pack.data)
         todo = set()
         todo = set()
@@ -1477,7 +1523,9 @@ class DeltaChainIterator(Generic[T]):
     def _result(self, unpacked: UnpackedObject) -> T:
     def _result(self, unpacked: UnpackedObject) -> T:
         raise NotImplementedError
         raise NotImplementedError
 
 
-    def _resolve_object(self, offset: int, obj_type_num: int, base_chunks: List[bytes]) -> UnpackedObject:
+    def _resolve_object(
+        self, offset: int, obj_type_num: int, base_chunks: List[bytes]
+    ) -> UnpackedObject:
         self._file.seek(offset)
         self._file.seek(offset)
         unpacked, _ = unpack_object(
         unpacked, _ = unpack_object(
             self._file.read,
             self._file.read,
@@ -1661,8 +1709,7 @@ def write_pack_object(write, type, object, sha=None, compression_level=-1):
     Returns: Tuple with offset at which the object was written, and crc32
     Returns: Tuple with offset at which the object was written, and crc32
     """
     """
     crc32 = 0
     crc32 = 0
-    for chunk in pack_object_chunks(
-            type, object, compression_level=compression_level):
+    for chunk in pack_object_chunks(type, object, compression_level=compression_level):
         write(chunk)
         write(chunk)
         if sha is not None:
         if sha is not None:
             sha.update(chunk)
             sha.update(chunk)
@@ -1671,12 +1718,13 @@ def write_pack_object(write, type, object, sha=None, compression_level=-1):
 
 
 
 
 def write_pack(
 def write_pack(
-        filename,
-        objects: Union[Sequence[ShaFile], Sequence[Tuple[ShaFile, Optional[bytes]]]],
-        *,
-        deltify: Optional[bool] = None,
-        delta_window_size: Optional[int] = None,
-        compression_level: int = -1):
+    filename,
+    objects: Union[Sequence[ShaFile], Sequence[Tuple[ShaFile, Optional[bytes]]]],
+    *,
+    deltify: Optional[bool] = None,
+    delta_window_size: Optional[int] = None,
+    compression_level: int = -1,
+):
     """Write a new pack data file.
     """Write a new pack data file.
 
 
     Args:
     Args:
@@ -1710,38 +1758,53 @@ def pack_header_chunks(num_objects):
 
 
 def write_pack_header(write, num_objects):
 def write_pack_header(write, num_objects):
     """Write a pack header for the given number of objects."""
     """Write a pack header for the given number of objects."""
-    if hasattr(write, 'write'):
+    if hasattr(write, "write"):
         write = write.write
         write = write.write
         warnings.warn(
         warnings.warn(
-            'write_pack_header() now takes a write rather than file argument',
-            DeprecationWarning, stacklevel=2)
+            "write_pack_header() now takes a write rather than file argument",
+            DeprecationWarning,
+            stacklevel=2,
+        )
     for chunk in pack_header_chunks(num_objects):
     for chunk in pack_header_chunks(num_objects):
         write(chunk)
         write(chunk)
 
 
 
 
 def find_reusable_deltas(
 def find_reusable_deltas(
-        container: PackedObjectContainer,
-        object_ids: Set[bytes],
-        *, other_haves: Optional[Set[bytes]] = None, progress=None) -> Iterator[UnpackedObject]:
+    container: PackedObjectContainer,
+    object_ids: Set[bytes],
+    *,
+    other_haves: Optional[Set[bytes]] = None,
+    progress=None,
+) -> Iterator[UnpackedObject]:
     if other_haves is None:
     if other_haves is None:
         other_haves = set()
         other_haves = set()
     reused = 0
     reused = 0
-    for i, unpacked in enumerate(container.iter_unpacked_subset(object_ids, allow_missing=True, convert_ofs_delta=True)):
+    for i, unpacked in enumerate(
+        container.iter_unpacked_subset(
+            object_ids, allow_missing=True, convert_ofs_delta=True
+        )
+    ):
         if progress is not None and i % 1000 == 0:
         if progress is not None and i % 1000 == 0:
-            progress(("checking for reusable deltas: %d/%d\r" % (i, len(object_ids))).encode('utf-8'))
+            progress(
+                ("checking for reusable deltas: %d/%d\r" % (i, len(object_ids))).encode(
+                    "utf-8"
+                )
+            )
         if unpacked.pack_type_num == REF_DELTA:
         if unpacked.pack_type_num == REF_DELTA:
             hexsha = sha_to_hex(unpacked.delta_base)
             hexsha = sha_to_hex(unpacked.delta_base)
             if hexsha in object_ids or hexsha in other_haves:
             if hexsha in object_ids or hexsha in other_haves:
                 yield unpacked
                 yield unpacked
                 reused += 1
                 reused += 1
     if progress is not None:
     if progress is not None:
-        progress(("found %d deltas to reuse\n" % (reused, )).encode('utf-8'))
+        progress(("found %d deltas to reuse\n" % (reused,)).encode("utf-8"))
 
 
 
 
 def deltify_pack_objects(
 def deltify_pack_objects(
-        objects: Union[Iterator[bytes], Iterator[Tuple[ShaFile, Optional[bytes]]]],
-        *, window_size: Optional[int] = None,
-        progress=None) -> Iterator[UnpackedObject]:
+    objects: Union[Iterator[bytes], Iterator[Tuple[ShaFile, Optional[bytes]]]],
+    *,
+    window_size: Optional[int] = None,
+    progress=None,
+) -> Iterator[UnpackedObject]:
     """Generate deltas for pack objects.
     """Generate deltas for pack objects.
 
 
     Args:
     Args:
@@ -1750,19 +1813,24 @@ def deltify_pack_objects(
     Returns: Iterator over type_num, object id, delta_base, content
     Returns: Iterator over type_num, object id, delta_base, content
         delta_base is None for full text entries
         delta_base is None for full text entries
     """
     """
+
     def objects_with_hints():
     def objects_with_hints():
         for e in objects:
         for e in objects:
             if isinstance(e, ShaFile):
             if isinstance(e, ShaFile):
                 yield (e, (e.type_num, None))
                 yield (e, (e.type_num, None))
             else:
             else:
                 yield (e[0], (e[0].type_num, e[1]))
                 yield (e[0], (e[0].type_num, e[1]))
+
     yield from deltas_from_sorted_objects(
     yield from deltas_from_sorted_objects(
         sort_objects_for_delta(objects_with_hints()),
         sort_objects_for_delta(objects_with_hints()),
         window_size=window_size,
         window_size=window_size,
-        progress=progress)
+        progress=progress,
+    )
 
 
 
 
-def sort_objects_for_delta(objects: Union[Iterator[ShaFile], Iterator[Tuple[ShaFile, Optional[PackHint]]]]) -> Iterator[ShaFile]:
+def sort_objects_for_delta(
+    objects: Union[Iterator[ShaFile], Iterator[Tuple[ShaFile, Optional[PackHint]]]]
+) -> Iterator[ShaFile]:
     magic = []
     magic = []
     for entry in objects:
     for entry in objects:
         if isinstance(entry, tuple):
         if isinstance(entry, tuple):
@@ -1781,7 +1849,9 @@ def sort_objects_for_delta(objects: Union[Iterator[ShaFile], Iterator[Tuple[ShaF
     return (x[3] for x in magic)
     return (x[3] for x in magic)
 
 
 
 
-def deltas_from_sorted_objects(objects, window_size: Optional[int] = None, progress=None):
+def deltas_from_sorted_objects(
+    objects, window_size: Optional[int] = None, progress=None
+):
     # TODO(jelmer): Use threads
     # TODO(jelmer): Use threads
     if window_size is None:
     if window_size is None:
         window_size = DEFAULT_PACK_DELTA_WINDOW_SIZE
         window_size = DEFAULT_PACK_DELTA_WINDOW_SIZE
@@ -1789,7 +1859,7 @@ def deltas_from_sorted_objects(objects, window_size: Optional[int] = None, progr
     possible_bases: Deque[Tuple[bytes, int, List[bytes]]] = deque()
     possible_bases: Deque[Tuple[bytes, int, List[bytes]]] = deque()
     for i, o in enumerate(objects):
     for i, o in enumerate(objects):
         if progress is not None and i % 1000 == 0:
         if progress is not None and i % 1000 == 0:
-            progress(("generating deltas: %d\r" % (i, )).encode('utf-8'))
+            progress(("generating deltas: %d\r" % (i,)).encode("utf-8"))
         raw = o.as_raw_chunks()
         raw = o.as_raw_chunks()
         winner = raw
         winner = raw
         winner_len = sum(map(len, winner))
         winner_len = sum(map(len, winner))
@@ -1808,19 +1878,26 @@ def deltas_from_sorted_objects(objects, window_size: Optional[int] = None, progr
                 winner_base = base_id
                 winner_base = base_id
                 winner = delta
                 winner = delta
                 winner_len = sum(map(len, winner))
                 winner_len = sum(map(len, winner))
-        yield UnpackedObject(o.type_num, sha=o.sha().digest(), delta_base=winner_base, decomp_len=winner_len, decomp_chunks=winner)
+        yield UnpackedObject(
+            o.type_num,
+            sha=o.sha().digest(),
+            delta_base=winner_base,
+            decomp_len=winner_len,
+            decomp_chunks=winner,
+        )
         possible_bases.appendleft((o.sha().digest(), o.type_num, raw))
         possible_bases.appendleft((o.sha().digest(), o.type_num, raw))
         while len(possible_bases) > window_size:
         while len(possible_bases) > window_size:
             possible_bases.pop()
             possible_bases.pop()
 
 
 
 
 def pack_objects_to_data(
 def pack_objects_to_data(
-        objects: Union[Sequence[ShaFile], Sequence[Tuple[ShaFile, Optional[bytes]]]],
-        *,
-        deltify: Optional[bool] = None,
-        delta_window_size: Optional[int] = None,
-        ofs_delta: bool = True,
-        progress=None) -> Tuple[int, Iterator[UnpackedObject]]:
+    objects: Union[Sequence[ShaFile], Sequence[Tuple[ShaFile, Optional[bytes]]]],
+    *,
+    deltify: Optional[bool] = None,
+    delta_window_size: Optional[int] = None,
+    ofs_delta: bool = True,
+    progress=None,
+) -> Tuple[int, Iterator[UnpackedObject]]:
     """Create pack data from objects.
     """Create pack data from objects.
 
 
     Args:
     Args:
@@ -1836,29 +1913,32 @@ def pack_objects_to_data(
     if deltify:
     if deltify:
         return (
         return (
             count,
             count,
-            deltify_pack_objects(iter(objects), window_size=delta_window_size, progress=progress))  # type: ignore
+            deltify_pack_objects(
+                iter(objects), window_size=delta_window_size, progress=progress
+            ),
+        )  # type: ignore
     else:
     else:
+
         def iter_without_path():
         def iter_without_path():
             for o in objects:
             for o in objects:
                 if isinstance(o, tuple):
                 if isinstance(o, tuple):
                     yield full_unpacked_object(o[0])
                     yield full_unpacked_object(o[0])
                 else:
                 else:
                     yield full_unpacked_object(o)
                     yield full_unpacked_object(o)
-        return (
-            count,
-            iter_without_path()
-        )
+
+        return (count, iter_without_path())
 
 
 
 
 def generate_unpacked_objects(
 def generate_unpacked_objects(
-        container: PackedObjectContainer,
-        object_ids: Sequence[Tuple[ObjectID, Optional[PackHint]]],
-        delta_window_size: Optional[int] = None,
-        deltify: Optional[bool] = None,
-        reuse_deltas: bool = True,
-        ofs_delta: bool = True,
-        other_haves: Optional[Set[bytes]] = None,
-        progress=None) -> Iterator[UnpackedObject]:
+    container: PackedObjectContainer,
+    object_ids: Sequence[Tuple[ObjectID, Optional[PackHint]]],
+    delta_window_size: Optional[int] = None,
+    deltify: Optional[bool] = None,
+    reuse_deltas: bool = True,
+    ofs_delta: bool = True,
+    other_haves: Optional[Set[bytes]] = None,
+    progress=None,
+) -> Iterator[UnpackedObject]:
     """Create pack data from objects.
     """Create pack data from objects.
 
 
     Args:
     Args:
@@ -1867,7 +1947,9 @@ def generate_unpacked_objects(
     """
     """
     todo = dict(object_ids)
     todo = dict(object_ids)
     if reuse_deltas:
     if reuse_deltas:
-        for unpack in find_reusable_deltas(container, set(todo), other_haves=other_haves, progress=progress):
+        for unpack in find_reusable_deltas(
+            container, set(todo), other_haves=other_haves, progress=progress
+        ):
             del todo[sha_to_hex(unpack.sha())]
             del todo[sha_to_hex(unpack.sha())]
             yield unpack
             yield unpack
     if deltify is None:
     if deltify is None:
@@ -1875,13 +1957,14 @@ def generate_unpacked_objects(
         # slow at the moment.
         # slow at the moment.
         deltify = False
         deltify = False
     if deltify:
     if deltify:
-        objects_to_delta = container.iterobjects_subset(todo.keys(), allow_missing=False)
+        objects_to_delta = container.iterobjects_subset(
+            todo.keys(), allow_missing=False
+        )
         yield from deltas_from_sorted_objects(
         yield from deltas_from_sorted_objects(
-            sort_objects_for_delta(
-                (o, todo[o.id])
-                for o in objects_to_delta),
+            sort_objects_for_delta((o, todo[o.id]) for o in objects_to_delta),
             window_size=delta_window_size,
             window_size=delta_window_size,
-            progress=progress)
+            progress=progress,
+        )
     else:
     else:
         for oid in todo:
         for oid in todo:
             yield full_unpacked_object(container[oid])
             yield full_unpacked_object(container[oid])
@@ -1889,20 +1972,23 @@ def generate_unpacked_objects(
 
 
 def full_unpacked_object(o: ShaFile) -> UnpackedObject:
 def full_unpacked_object(o: ShaFile) -> UnpackedObject:
     return UnpackedObject(
     return UnpackedObject(
-        o.type_num, delta_base=None, crc32=None,
+        o.type_num,
+        delta_base=None,
+        crc32=None,
         decomp_chunks=o.as_raw_chunks(),
         decomp_chunks=o.as_raw_chunks(),
-        sha=o.sha().digest())
+        sha=o.sha().digest(),
+    )
 
 
 
 
 def write_pack_from_container(
 def write_pack_from_container(
-        write,
-        container: PackedObjectContainer,
-        object_ids: Sequence[Tuple[ObjectID, Optional[PackHint]]],
-        delta_window_size: Optional[int] = None,
-        deltify: Optional[bool] = None,
-        reuse_deltas: bool = True,
-        compression_level: int = -1,
-        other_haves: Optional[Set[bytes]] = None
+    write,
+    container: PackedObjectContainer,
+    object_ids: Sequence[Tuple[ObjectID, Optional[PackHint]]],
+    delta_window_size: Optional[int] = None,
+    deltify: Optional[bool] = None,
+    reuse_deltas: bool = True,
+    compression_level: int = -1,
+    other_haves: Optional[Set[bytes]] = None,
 ):
 ):
     """Write a new pack data file.
     """Write a new pack data file.
 
 
@@ -1918,10 +2004,13 @@ def write_pack_from_container(
     """
     """
     pack_contents_count = len(object_ids)
     pack_contents_count = len(object_ids)
     pack_contents = generate_unpacked_objects(
     pack_contents = generate_unpacked_objects(
-        container, object_ids, delta_window_size=delta_window_size,
+        container,
+        object_ids,
+        delta_window_size=delta_window_size,
         deltify=deltify,
         deltify=deltify,
         reuse_deltas=reuse_deltas,
         reuse_deltas=reuse_deltas,
-        other_haves=other_haves)
+        other_haves=other_haves,
+    )
 
 
     return write_pack_data(
     return write_pack_data(
         write,
         write,
@@ -1932,12 +2021,12 @@ def write_pack_from_container(
 
 
 
 
 def write_pack_objects(
 def write_pack_objects(
-        write,
-        objects: Union[Sequence[ShaFile], Sequence[Tuple[ShaFile, Optional[bytes]]]],
-        *,
-        delta_window_size: Optional[int] = None,
-        deltify: Optional[bool] = None,
-        compression_level: int = -1
+    write,
+    objects: Union[Sequence[ShaFile], Sequence[Tuple[ShaFile, Optional[bytes]]]],
+    *,
+    delta_window_size: Optional[int] = None,
+    deltify: Optional[bool] = None,
+    compression_level: int = -1,
 ):
 ):
     """Write a new pack data file.
     """Write a new pack data file.
 
 
@@ -1950,8 +2039,7 @@ def write_pack_objects(
       compression_level: the zlib compression level to use
       compression_level: the zlib compression level to use
     Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum
     Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum
     """
     """
-    pack_contents_count, pack_contents = pack_objects_to_data(
-        objects, deltify=deltify)
+    pack_contents_count, pack_contents = pack_objects_to_data(objects, deltify=deltify)
 
 
     return write_pack_data(
     return write_pack_data(
         write,
         write,
@@ -1962,12 +2050,23 @@ def write_pack_objects(
 
 
 
 
 class PackChunkGenerator:
 class PackChunkGenerator:
-
-    def __init__(self, num_records=None, records=None, progress=None, compression_level=-1, reuse_compressed=True) -> None:
+    def __init__(
+        self,
+        num_records=None,
+        records=None,
+        progress=None,
+        compression_level=-1,
+        reuse_compressed=True,
+    ) -> None:
         self.cs = sha1(b"")
         self.cs = sha1(b"")
         self.entries: Dict[Union[int, bytes], Tuple[int, int]] = {}
         self.entries: Dict[Union[int, bytes], Tuple[int, int]] = {}
         self._it = self._pack_data_chunks(
         self._it = self._pack_data_chunks(
-            num_records=num_records, records=records, progress=progress, compression_level=compression_level, reuse_compressed=reuse_compressed)
+            num_records=num_records,
+            records=records,
+            progress=progress,
+            compression_level=compression_level,
+            reuse_compressed=reuse_compressed,
+        )
 
 
     def sha1digest(self):
     def sha1digest(self):
         return self.cs.digest()
         return self.cs.digest()
@@ -1975,7 +2074,15 @@ class PackChunkGenerator:
     def __iter__(self):
     def __iter__(self):
         return self._it
         return self._it
 
 
-    def _pack_data_chunks(self, records: Iterator[UnpackedObject], *, num_records=None, progress=None, compression_level: int = -1, reuse_compressed: bool = True) -> Iterator[bytes]:
+    def _pack_data_chunks(
+        self,
+        records: Iterator[UnpackedObject],
+        *,
+        num_records=None,
+        progress=None,
+        compression_level: int = -1,
+        reuse_compressed: bool = True,
+    ) -> Iterator[bytes]:
         """Iterate pack data file chunks.
         """Iterate pack data file chunks.
 
 
         Args:
         Args:
@@ -1987,7 +2094,7 @@ class PackChunkGenerator:
         """
         """
         # Write the pack
         # Write the pack
         if num_records is None:
         if num_records is None:
-            num_records = len(records)   # type: ignore
+            num_records = len(records)  # type: ignore
         offset = 0
         offset = 0
         for chunk in pack_header_chunks(num_records):
         for chunk in pack_header_chunks(num_records):
             yield chunk
             yield chunk
@@ -1997,7 +2104,9 @@ class PackChunkGenerator:
         for i, unpacked in enumerate(records):
         for i, unpacked in enumerate(records):
             type_num = unpacked.pack_type_num
             type_num = unpacked.pack_type_num
             if progress is not None and i % 1000 == 0:
             if progress is not None and i % 1000 == 0:
-                progress(("writing pack data: %d/%d\r" % (i, num_records)).encode("ascii"))
+                progress(
+                    ("writing pack data: %d/%d\r" % (i, num_records)).encode("ascii")
+                )
             raw: Union[List[bytes], Tuple[int, List[bytes]], Tuple[bytes, List[bytes]]]
             raw: Union[List[bytes], Tuple[int, List[bytes]], Tuple[bytes, List[bytes]]]
             if unpacked.delta_base is not None:
             if unpacked.delta_base is not None:
                 try:
                 try:
@@ -2014,7 +2123,9 @@ class PackChunkGenerator:
             if unpacked.comp_chunks is not None and reuse_compressed:
             if unpacked.comp_chunks is not None and reuse_compressed:
                 chunks = unpacked.comp_chunks
                 chunks = unpacked.comp_chunks
             else:
             else:
-                chunks = pack_object_chunks(type_num, raw, compression_level=compression_level)
+                chunks = pack_object_chunks(
+                    type_num, raw, compression_level=compression_level
+                )
             crc32 = 0
             crc32 = 0
             object_size = 0
             object_size = 0
             for chunk in chunks:
             for chunk in chunks:
@@ -2027,13 +2138,21 @@ class PackChunkGenerator:
             offset += object_size
             offset += object_size
         if actual_num_records != num_records:
         if actual_num_records != num_records:
             raise AssertionError(
             raise AssertionError(
-                'actual records written differs: %d != %d' % (
-                    actual_num_records, num_records))
+                "actual records written differs: %d != %d"
+                % (actual_num_records, num_records)
+            )
 
 
         yield self.cs.digest()
         yield self.cs.digest()
 
 
 
 
-def write_pack_data(write, records: Iterator[UnpackedObject], *, num_records=None, progress=None, compression_level=-1):
+def write_pack_data(
+    write,
+    records: Iterator[UnpackedObject],
+    *,
+    num_records=None,
+    progress=None,
+    compression_level=-1,
+):
     """Write a new pack data file.
     """Write a new pack data file.
 
 
     Args:
     Args:
@@ -2045,8 +2164,11 @@ def write_pack_data(write, records: Iterator[UnpackedObject], *, num_records=Non
     Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum
     Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum
     """
     """
     chunk_generator = PackChunkGenerator(
     chunk_generator = PackChunkGenerator(
-        num_records=num_records, records=records, progress=progress,
-        compression_level=compression_level)
+        num_records=num_records,
+        records=records,
+        progress=progress,
+        compression_level=compression_level,
+    )
     for chunk in chunk_generator:
     for chunk in chunk_generator:
         write(chunk)
         write(chunk)
     return chunk_generator.entries, chunk_generator.sha1digest()
     return chunk_generator.entries, chunk_generator.sha1digest()
@@ -2064,13 +2186,13 @@ def write_pack_index_v1(f, entries, pack_checksum):
     """
     """
     f = SHA1Writer(f)
     f = SHA1Writer(f)
     fan_out_table = defaultdict(lambda: 0)
     fan_out_table = defaultdict(lambda: 0)
-    for (name, offset, entry_checksum) in entries:
+    for name, offset, entry_checksum in entries:
         fan_out_table[ord(name[:1])] += 1
         fan_out_table[ord(name[:1])] += 1
     # Fan-out table
     # Fan-out table
     for i in range(0x100):
     for i in range(0x100):
         f.write(struct.pack(">L", fan_out_table[i]))
         f.write(struct.pack(">L", fan_out_table[i]))
         fan_out_table[i + 1] += fan_out_table[i]
         fan_out_table[i + 1] += fan_out_table[i]
-    for (name, offset, entry_checksum) in entries:
+    for name, offset, entry_checksum in entries:
         if not (offset <= 0xFFFFFFFF):
         if not (offset <= 0xFFFFFFFF):
             raise TypeError("pack format 1 only supports offsets < 2Gb")
             raise TypeError("pack format 1 only supports offsets < 2Gb")
         f.write(struct.pack(">L20s", offset, name))
         f.write(struct.pack(">L20s", offset, name))
@@ -2118,9 +2240,9 @@ def create_delta(base_buf, target_buf):
       target_buf: Target buffer
       target_buf: Target buffer
     """
     """
     if isinstance(base_buf, list):
     if isinstance(base_buf, list):
-        base_buf = b''.join(base_buf)
+        base_buf = b"".join(base_buf)
     if isinstance(target_buf, list):
     if isinstance(target_buf, list):
-        target_buf = b''.join(target_buf)
+        target_buf = b"".join(target_buf)
     assert isinstance(base_buf, bytes)
     assert isinstance(base_buf, bytes)
     assert isinstance(target_buf, bytes)
     assert isinstance(target_buf, bytes)
     # write delta header
     # write delta header
@@ -2149,11 +2271,11 @@ def create_delta(base_buf, target_buf):
             o = j1
             o = j1
             while s > 127:
             while s > 127:
                 yield bytes([127])
                 yield bytes([127])
-                yield memoryview(target_buf)[o:o + 127]
+                yield memoryview(target_buf)[o : o + 127]
                 s -= 127
                 s -= 127
                 o += 127
                 o += 127
             yield bytes([s])
             yield bytes([s])
-            yield memoryview(target_buf)[o:o + s]
+            yield memoryview(target_buf)[o : o + s]
 
 
 
 
 def apply_delta(src_buf, delta):
 def apply_delta(src_buf, delta):
@@ -2228,7 +2350,8 @@ def apply_delta(src_buf, delta):
 
 
 
 
 def write_pack_index_v2(
 def write_pack_index_v2(
-        f, entries: Iterable[PackIndexEntry], pack_checksum: bytes) -> bytes:
+    f, entries: Iterable[PackIndexEntry], pack_checksum: bytes
+) -> bytes:
     """Write a new pack index file.
     """Write a new pack index file.
 
 
     Args:
     Args:
@@ -2242,22 +2365,22 @@ def write_pack_index_v2(
     f.write(b"\377tOc")  # Magic!
     f.write(b"\377tOc")  # Magic!
     f.write(struct.pack(">L", 2))
     f.write(struct.pack(">L", 2))
     fan_out_table: Dict[int, int] = defaultdict(lambda: 0)
     fan_out_table: Dict[int, int] = defaultdict(lambda: 0)
-    for (name, offset, entry_checksum) in entries:
+    for name, offset, entry_checksum in entries:
         fan_out_table[ord(name[:1])] += 1
         fan_out_table[ord(name[:1])] += 1
     # Fan-out table
     # Fan-out table
     largetable: List[int] = []
     largetable: List[int] = []
     for i in range(0x100):
     for i in range(0x100):
         f.write(struct.pack(b">L", fan_out_table[i]))
         f.write(struct.pack(b">L", fan_out_table[i]))
         fan_out_table[i + 1] += fan_out_table[i]
         fan_out_table[i + 1] += fan_out_table[i]
-    for (name, offset, entry_checksum) in entries:
+    for name, offset, entry_checksum in entries:
         f.write(name)
         f.write(name)
-    for (name, offset, entry_checksum) in entries:
+    for name, offset, entry_checksum in entries:
         f.write(struct.pack(b">L", entry_checksum))
         f.write(struct.pack(b">L", entry_checksum))
-    for (name, offset, entry_checksum) in entries:
-        if offset < 2 ** 31:
+    for name, offset, entry_checksum in entries:
+        if offset < 2**31:
             f.write(struct.pack(b">L", offset))
             f.write(struct.pack(b">L", offset))
         else:
         else:
-            f.write(struct.pack(b">L", 2 ** 31 + len(largetable)))
+            f.write(struct.pack(b">L", 2**31 + len(largetable)))
             largetable.append(offset)
             largetable.append(offset)
     for offset in largetable:
     for offset in largetable:
         f.write(struct.pack(b">Q", offset))
         f.write(struct.pack(b">Q", offset))
@@ -2278,7 +2401,9 @@ class Pack:
     _data: Optional[PackData]
     _data: Optional[PackData]
     _idx: Optional[PackIndex]
     _idx: Optional[PackIndex]
 
 
-    def __init__(self, basename, resolve_ext_ref: Optional[ResolveExtRefFn] = None) -> None:
+    def __init__(
+        self, basename, resolve_ext_ref: Optional[ResolveExtRefFn] = None
+    ) -> None:
         self._basename = basename
         self._basename = basename
         self._data = None
         self._data = None
         self._idx = None
         self._idx = None
@@ -2361,7 +2486,9 @@ class Pack:
 
 
     def check_length_and_checksum(self) -> None:
     def check_length_and_checksum(self) -> None:
         """Sanity check the length and checksum of the pack index and data."""
         """Sanity check the length and checksum of the pack index and data."""
-        assert len(self.index) == len(self.data), f"Length mismatch: {len(self.index)} (index) != {len(self.data)} (data)"
+        assert len(self.index) == len(
+            self.data
+        ), f"Length mismatch: {len(self.index)} (index) != {len(self.data)} (data)"
         idx_stored_checksum = self.index.get_pack_checksum()
         idx_stored_checksum = self.index.get_pack_checksum()
         data_stored_checksum = self.data.get_stored_checksum()
         data_stored_checksum = self.data.get_stored_checksum()
         if idx_stored_checksum != data_stored_checksum:
         if idx_stored_checksum != data_stored_checksum:
@@ -2413,16 +2540,28 @@ class Pack:
             PackInflater.for_pack_data(self.data, resolve_ext_ref=self.resolve_ext_ref)
             PackInflater.for_pack_data(self.data, resolve_ext_ref=self.resolve_ext_ref)
         )
         )
 
 
-    def iterobjects_subset(self, shas: Iterable[ObjectID], *, allow_missing: bool = False) -> Iterator[ShaFile]:
+    def iterobjects_subset(
+        self, shas: Iterable[ObjectID], *, allow_missing: bool = False
+    ) -> Iterator[ShaFile]:
         return (
         return (
             uo
             uo
-            for uo in
-            PackInflater.for_pack_subset(
-                self, shas, allow_missing=allow_missing,
-                resolve_ext_ref=self.resolve_ext_ref)
-            if uo.id in shas)
+            for uo in PackInflater.for_pack_subset(
+                self,
+                shas,
+                allow_missing=allow_missing,
+                resolve_ext_ref=self.resolve_ext_ref,
+            )
+            if uo.id in shas
+        )
 
 
-    def iter_unpacked_subset(self, shas: Iterable[ObjectID], *, include_comp: bool = False, allow_missing: bool = False, convert_ofs_delta: bool = False) -> Iterator[UnpackedObject]:
+    def iter_unpacked_subset(
+        self,
+        shas: Iterable[ObjectID],
+        *,
+        include_comp: bool = False,
+        allow_missing: bool = False,
+        convert_ofs_delta: bool = False,
+    ) -> Iterator[UnpackedObject]:
         ofs_pending: Dict[int, List[UnpackedObject]] = defaultdict(list)
         ofs_pending: Dict[int, List[UnpackedObject]] = defaultdict(list)
         ofs: Dict[bytes, int] = {}
         ofs: Dict[bytes, int] = {}
         todo = set(shas)
         todo = set(shas)
@@ -2452,7 +2591,9 @@ class Pack:
             raise UnresolvedDeltas(todo)
             raise UnresolvedDeltas(todo)
 
 
     def iter_unpacked(self, include_comp=False):
     def iter_unpacked(self, include_comp=False):
-        ofs_to_entries = {ofs: (sha, crc32) for (sha, ofs, crc32) in self.index.iterentries()}
+        ofs_to_entries = {
+            ofs: (sha, crc32) for (sha, ofs, crc32) in self.index.iterentries()
+        }
         for unpacked in self.data.iter_unpacked(include_comp=include_comp):
         for unpacked in self.data.iter_unpacked(include_comp=include_comp):
             (sha, crc32) = ofs_to_entries[unpacked.offset]
             (sha, crc32) = ofs_to_entries[unpacked.offset]
             unpacked._sha = sha
             unpacked._sha = sha
@@ -2489,7 +2630,9 @@ class Pack:
             raise KeyError(sha)
             raise KeyError(sha)
         return offset, type, obj
         return offset, type, obj
 
 
-    def resolve_object(self, offset: int, type: int, obj, get_ref=None) -> Tuple[int, Iterable[bytes]]:
+    def resolve_object(
+        self, offset: int, type: int, obj, get_ref=None
+    ) -> Tuple[int, Iterable[bytes]]:
         """Resolve an object, possibly resolving deltas when necessary.
         """Resolve an object, possibly resolving deltas when necessary.
 
 
         Returns: Tuple with object type and contents.
         Returns: Tuple with object type and contents.
@@ -2531,7 +2674,9 @@ class Pack:
                 self.data._offset_cache[prev_offset] = base_type, chunks
                 self.data._offset_cache[prev_offset] = base_type, chunks
         return base_type, chunks
         return base_type, chunks
 
 
-    def entries(self, progress: Optional[ProgressFn] = None) -> Iterator[PackIndexEntry]:
+    def entries(
+        self, progress: Optional[ProgressFn] = None
+    ) -> Iterator[PackIndexEntry]:
         """Yield entries summarizing the contents of this pack.
         """Yield entries summarizing the contents of this pack.
 
 
         Args:
         Args:
@@ -2540,9 +2685,12 @@ class Pack:
         Returns: iterator of tuples with (sha, offset, crc32)
         Returns: iterator of tuples with (sha, offset, crc32)
         """
         """
         return self.data.iterentries(
         return self.data.iterentries(
-            progress=progress, resolve_ext_ref=self.resolve_ext_ref)
+            progress=progress, resolve_ext_ref=self.resolve_ext_ref
+        )
 
 
-    def sorted_entries(self, progress: Optional[ProgressFn] = None) -> Iterator[PackIndexEntry]:
+    def sorted_entries(
+        self, progress: Optional[ProgressFn] = None
+    ) -> Iterator[PackIndexEntry]:
         """Return entries in this pack, sorted by SHA.
         """Return entries in this pack, sorted by SHA.
 
 
         Args:
         Args:
@@ -2551,9 +2699,12 @@ class Pack:
         Returns: Iterator of tuples with (sha, offset, crc32)
         Returns: Iterator of tuples with (sha, offset, crc32)
         """
         """
         return self.data.sorted_entries(
         return self.data.sorted_entries(
-            progress=progress, resolve_ext_ref=self.resolve_ext_ref)
+            progress=progress, resolve_ext_ref=self.resolve_ext_ref
+        )
 
 
-    def get_unpacked_object(self, sha: bytes, *, include_comp: bool = False, convert_ofs_delta: bool = True) -> UnpackedObject:
+    def get_unpacked_object(
+        self, sha: bytes, *, include_comp: bool = False, convert_ofs_delta: bool = True
+    ) -> UnpackedObject:
         """Get the unpacked object for a sha.
         """Get the unpacked object for a sha.
 
 
         Args:
         Args:
@@ -2569,7 +2720,14 @@ class Pack:
         return unpacked
         return unpacked
 
 
 
 
-def extend_pack(f: BinaryIO, object_ids: Set[ObjectID], get_raw, *, compression_level=-1, progress=None) -> Tuple[bytes, List]:
+def extend_pack(
+    f: BinaryIO,
+    object_ids: Set[ObjectID],
+    get_raw,
+    *,
+    compression_level=-1,
+    progress=None,
+) -> Tuple[bytes, List]:
     """Extend a pack file with more objects.
     """Extend a pack file with more objects.
 
 
     The caller should make sure that object_ids does not contain any objects
     The caller should make sure that object_ids does not contain any objects
@@ -2597,7 +2755,11 @@ def extend_pack(f: BinaryIO, object_ids: Set[ObjectID], get_raw, *, compression_
     # Complete the pack.
     # Complete the pack.
     for i, object_id in enumerate(object_ids):
     for i, object_id in enumerate(object_ids):
         if progress is not None:
         if progress is not None:
-            progress(("writing extra base objects: %d/%d\r" % (i, len(object_ids))).encode("ascii"))
+            progress(
+                ("writing extra base objects: %d/%d\r" % (i, len(object_ids))).encode(
+                    "ascii"
+                )
+            )
         assert len(object_id) == 20
         assert len(object_id) == 20
         type_num, data = get_raw(object_id)
         type_num, data = get_raw(object_id)
         offset = f.tell()
         offset = f.tell()

+ 7 - 5
dulwich/patch.py

@@ -136,15 +136,17 @@ def unified_diff(
             started = True
             started = True
             fromdate = f"\t{fromfiledate}" if fromfiledate else ""
             fromdate = f"\t{fromfiledate}" if fromfiledate else ""
             todate = f"\t{tofiledate}" if tofiledate else ""
             todate = f"\t{tofiledate}" if tofiledate else ""
-            yield f"--- {fromfile.decode(tree_encoding)}{fromdate}{lineterm}".encode(output_encoding)
-            yield f"+++ {tofile.decode(tree_encoding)}{todate}{lineterm}".encode(output_encoding)
+            yield f"--- {fromfile.decode(tree_encoding)}{fromdate}{lineterm}".encode(
+                output_encoding
+            )
+            yield f"+++ {tofile.decode(tree_encoding)}{todate}{lineterm}".encode(
+                output_encoding
+            )
 
 
         first, last = group[0], group[-1]
         first, last = group[0], group[-1]
         file1_range = _format_range_unified(first[1], last[2])
         file1_range = _format_range_unified(first[1], last[2])
         file2_range = _format_range_unified(first[3], last[4])
         file2_range = _format_range_unified(first[3], last[4])
-        yield f"@@ -{file1_range} +{file2_range} @@{lineterm}".encode(
-            output_encoding
-        )
+        yield f"@@ -{file1_range} +{file2_range} @@{lineterm}".encode(output_encoding)
 
 
         for tag, i1, i2, j1, j2 in group:
         for tag, i1, i2, j1, j2 in group:
             if tag == "equal":
             if tag == "equal":

+ 47 - 28
dulwich/porcelain.py

@@ -205,6 +205,7 @@ def parse_timezone_format(tz_str):
 
 
     # RFC 2822
     # RFC 2822
     import email.utils
     import email.utils
+
     rfc_2822 = email.utils.parsedate_tz(tz_str)
     rfc_2822 = email.utils.parsedate_tz(tz_str)
     if rfc_2822:
     if rfc_2822:
         return rfc_2822[9]
         return rfc_2822[9]
@@ -213,7 +214,9 @@ def parse_timezone_format(tz_str):
 
 
     # Supported offsets:
     # Supported offsets:
     # sHHMM, sHH:MM, sHH
     # sHHMM, sHH:MM, sHH
-    iso_8601_pattern = re.compile("[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$")
+    iso_8601_pattern = re.compile(
+        "[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$"
+    )
     match = re.search(iso_8601_pattern, tz_str)
     match = re.search(iso_8601_pattern, tz_str)
     total_secs = 0
     total_secs = 0
     if match:
     if match:
@@ -492,7 +495,7 @@ def clone(
     depth: Optional[int] = None,
     depth: Optional[int] = None,
     branch: Optional[Union[str, bytes]] = None,
     branch: Optional[Union[str, bytes]] = None,
     config: Optional[Config] = None,
     config: Optional[Config] = None,
-    **kwargs
+    **kwargs,
 ):
 ):
     """Clone a local or remote git repository.
     """Clone a local or remote git repository.
 
 
@@ -536,8 +539,7 @@ def clone(
 
 
     mkdir = not os.path.exists(target)
     mkdir = not os.path.exists(target)
 
 
-    (client, path) = get_transport_and_path(
-        source, config=config, **kwargs)
+    (client, path) = get_transport_and_path(source, config=config, **kwargs)
 
 
     return client.clone(
     return client.clone(
         path,
         path,
@@ -958,8 +960,8 @@ def rev_list(repo, commits, outstream=sys.stdout):
 
 
 
 
 def _canonical_part(url: str) -> str:
 def _canonical_part(url: str) -> str:
-    name = url.rsplit('/', 1)[-1]
-    if name.endswith('.git'):
+    name = url.rsplit("/", 1)[-1]
+    if name.endswith(".git"):
         name = name[:-4]
         name = name[:-4]
     return name
     return name
 
 
@@ -998,10 +1000,10 @@ def submodule_init(repo):
     """
     """
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         config = r.get_config()
         config = r.get_config()
-        gitmodules_path = os.path.join(r.path, '.gitmodules')
+        gitmodules_path = os.path.join(r.path, ".gitmodules")
         for path, url, name in read_submodules(gitmodules_path):
         for path, url, name in read_submodules(gitmodules_path):
-            config.set((b'submodule', name), b'active', True)
-            config.set((b'submodule', name), b'url', url)
+            config.set((b"submodule", name), b"active", True)
+            config.set((b"submodule", name), b"url", url)
         config.write_to_path()
         config.write_to_path()
 
 
 
 
@@ -1012,6 +1014,7 @@ def submodule_list(repo):
       repo: Path to repository
       repo: Path to repository
     """
     """
     from .submodule import iter_cached_submodules
     from .submodule import iter_cached_submodules
+
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
         for path, sha in iter_cached_submodules(r.object_store, r[r.head()].tree):
         for path, sha in iter_cached_submodules(r.object_store, r[r.head()].tree):
             yield path, sha.decode(DEFAULT_ENCODING)
             yield path, sha.decode(DEFAULT_ENCODING)
@@ -1027,7 +1030,7 @@ def tag_create(
     tag_time=None,
     tag_time=None,
     tag_timezone=None,
     tag_timezone=None,
     sign=False,
     sign=False,
-    encoding=DEFAULT_ENCODING
+    encoding=DEFAULT_ENCODING,
 ):
 ):
     """Creates a tag in git via dulwich calls.
     """Creates a tag in git via dulwich calls.
 
 
@@ -1153,7 +1156,7 @@ def push(
     outstream=default_bytes_out_stream,
     outstream=default_bytes_out_stream,
     errstream=default_bytes_err_stream,
     errstream=default_bytes_err_stream,
     force=False,
     force=False,
-    **kwargs
+    **kwargs,
 ):
 ):
     """Remote push with dulwich via dulwich.client.
     """Remote push with dulwich via dulwich.client.
 
 
@@ -1183,7 +1186,7 @@ def push(
             selected_refs.extend(parse_reftuples(r.refs, refs, refspecs, force=force))
             selected_refs.extend(parse_reftuples(r.refs, refs, refspecs, force=force))
             new_refs = {}
             new_refs = {}
             # TODO: Handle selected_refs == {None: None}
             # TODO: Handle selected_refs == {None: None}
-            for (lh, rh, force_ref) in selected_refs:
+            for lh, rh, force_ref in selected_refs:
                 if lh is None:
                 if lh is None:
                     new_refs[rh] = ZERO_SHA
                     new_refs[rh] = ZERO_SHA
                     remote_changed_refs[rh] = None
                     remote_changed_refs[rh] = None
@@ -1191,9 +1194,7 @@ def push(
                     try:
                     try:
                         localsha = r.refs[lh]
                         localsha = r.refs[lh]
                     except KeyError as exc:
                     except KeyError as exc:
-                        raise Error(
-                            "No valid ref %s in local repository" % lh
-                        ) from exc
+                        raise Error("No valid ref %s in local repository" % lh) from exc
                     if not force_ref and rh in refs:
                     if not force_ref and rh in refs:
                         check_diverged(r, refs[rh], localsha)
                         check_diverged(r, refs[rh], localsha)
                     new_refs[rh] = localsha
                     new_refs[rh] = localsha
@@ -1238,7 +1239,7 @@ def pull(
     errstream=default_bytes_err_stream,
     errstream=default_bytes_err_stream,
     fast_forward=True,
     fast_forward=True,
     force=False,
     force=False,
-    **kwargs
+    **kwargs,
 ):
 ):
     """Pull from remote via dulwich.client.
     """Pull from remote via dulwich.client.
 
 
@@ -1273,7 +1274,7 @@ def pull(
         fetch_result = client.fetch(
         fetch_result = client.fetch(
             path, r, progress=errstream.write, determine_wants=determine_wants
             path, r, progress=errstream.write, determine_wants=determine_wants
         )
         )
-        for (lh, rh, force_ref) in selected_refs:
+        for lh, rh, force_ref in selected_refs:
             if not force_ref and rh in r.refs:
             if not force_ref and rh in r.refs:
                 try:
                 try:
                     check_diverged(r, r.refs.follow(rh)[1], fetch_result.refs[lh])
                     check_diverged(r, r.refs.follow(rh)[1], fetch_result.refs[lh])
@@ -1281,8 +1282,7 @@ def pull(
                     if fast_forward:
                     if fast_forward:
                         raise
                         raise
                     else:
                     else:
-                        raise NotImplementedError(
-                            "merge is not yet supported") from exc
+                        raise NotImplementedError("merge is not yet supported") from exc
             r.refs[rh] = fetch_result.refs[lh]
             r.refs[rh] = fetch_result.refs[lh]
         if selected_refs:
         if selected_refs:
             r[b"HEAD"] = fetch_result.refs[selected_refs[0][1]]
             r[b"HEAD"] = fetch_result.refs[selected_refs[0][1]]
@@ -1666,7 +1666,7 @@ def fetch(
     prune=False,
     prune=False,
     prune_tags=False,
     prune_tags=False,
     force=False,
     force=False,
-    **kwargs
+    **kwargs,
 ):
 ):
     """Fetch objects from a remote server.
     """Fetch objects from a remote server.
 
 
@@ -1729,7 +1729,15 @@ def repack(repo):
         r.object_store.pack_loose_objects()
         r.object_store.pack_loose_objects()
 
 
 
 
-def pack_objects(repo, object_ids, packf, idxf, delta_window_size=None, deltify=None, reuse_deltas=True):
+def pack_objects(
+    repo,
+    object_ids,
+    packf,
+    idxf,
+    delta_window_size=None,
+    deltify=None,
+    reuse_deltas=True,
+):
     """Pack objects into a file.
     """Pack objects into a file.
 
 
     Args:
     Args:
@@ -1774,7 +1782,7 @@ def ls_tree(
     """
     """
 
 
     def list_tree(store, treeid, base):
     def list_tree(store, treeid, base):
-        for (name, mode, sha) in store[treeid].iteritems():
+        for name, mode, sha in store[treeid].iteritems():
             if base:
             if base:
                 name = posixpath.join(base, name)
                 name = posixpath.join(base, name)
             if name_only:
             if name_only:
@@ -1875,8 +1883,7 @@ def update_head(repo, target, detached=False, new_branch=None):
             r.refs.set_symbolic_ref(b"HEAD", to_set)
             r.refs.set_symbolic_ref(b"HEAD", to_set)
 
 
 
 
-def reset_file(repo, file_path: str, target: bytes = b'HEAD',
-               symlink_fn=None):
+def reset_file(repo, file_path: str, target: bytes = b"HEAD", symlink_fn=None):
     """Reset the file to specific commit or branch.
     """Reset the file to specific commit or branch.
 
 
     Args:
     Args:
@@ -1896,7 +1903,7 @@ def reset_file(repo, file_path: str, target: bytes = b'HEAD',
 
 
 def _update_head_during_checkout_branch(repo, target):
 def _update_head_during_checkout_branch(repo, target):
     checkout_target = None
     checkout_target = None
-    if target == b'HEAD':  # Do not update head while trying to checkout to HEAD.
+    if target == b"HEAD":  # Do not update head while trying to checkout to HEAD.
         pass
         pass
     elif target in repo.refs.keys(base=LOCAL_BRANCH_PREFIX):
     elif target in repo.refs.keys(base=LOCAL_BRANCH_PREFIX):
         update_head(repo, target)
         update_head(repo, target)
@@ -1908,7 +1915,9 @@ def _update_head_during_checkout_branch(repo, target):
         if config.has_section(section):
         if config.has_section(section):
             checkout_target = target.replace(name + b"/", b"")
             checkout_target = target.replace(name + b"/", b"")
             try:
             try:
-                branch_create(repo, checkout_target, (LOCAL_REMOTE_PREFIX + target).decode())
+                branch_create(
+                    repo, checkout_target, (LOCAL_REMOTE_PREFIX + target).decode()
+                )
             except Error:
             except Error:
                 pass
                 pass
             update_head(repo, LOCAL_BRANCH_PREFIX + checkout_target)
             update_head(repo, LOCAL_BRANCH_PREFIX + checkout_target)
@@ -1941,7 +1950,14 @@ def checkout_branch(repo, target: Union[bytes, str], force: bool = False):
         _update_head_during_checkout_branch(repo, target)
         _update_head_during_checkout_branch(repo, target)
     else:
     else:
         status_report = status(repo)
         status_report = status(repo)
-        changes = list(set(status_report[0]['add'] + status_report[0]['delete'] + status_report[0]['modify'] + status_report[1]))
+        changes = list(
+            set(
+                status_report[0]["add"]
+                + status_report[0]["delete"]
+                + status_report[0]["modify"]
+                + status_report[1]
+            )
+        )
         index = 0
         index = 0
         while index < len(changes):
         while index < len(changes):
             change = changes[index]
             change = changes[index]
@@ -1951,7 +1967,10 @@ def checkout_branch(repo, target: Union[bytes, str], force: bool = False):
                     target_tree.lookup_path(repo.object_store.__getitem__, change)
                     target_tree.lookup_path(repo.object_store.__getitem__, change)
                     index += 1
                     index += 1
                 except KeyError:
                 except KeyError:
-                    raise CheckoutError('Your local changes to the following files would be overwritten by checkout: ' + change.decode())
+                    raise CheckoutError(
+                        "Your local changes to the following files would be overwritten by checkout: "
+                        + change.decode()
+                    )
             except KeyError:
             except KeyError:
                 changes.pop(index)
                 changes.pop(index)
 
 

+ 2 - 7
dulwich/protocol.py

@@ -343,9 +343,7 @@ class ReceivableProtocol(Protocol):
     def __init__(
     def __init__(
         self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE
         self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE
     ) -> None:
     ) -> None:
-        super().__init__(
-            self.read, write, close=close, report_activity=report_activity
-        )
+        super().__init__(self.read, write, close=close, report_activity=report_activity)
         self._recv = recv
         self._recv = recv
         self._rbuf = BytesIO()
         self._rbuf = BytesIO()
         self._rbufsize = rbufsize
         self._rbufsize = rbufsize
@@ -558,10 +556,7 @@ def format_ref_line(ref, sha, capabilities=None):
     if capabilities is None:
     if capabilities is None:
         return sha + b" " + ref + b"\n"
         return sha + b" " + ref + b"\n"
     else:
     else:
-        return (
-            sha + b" " + ref + b"\0"
-            + format_capability_line(capabilities)
-            + b"\n")
+        return sha + b" " + ref + b"\0" + format_capability_line(capabilities) + b"\n"
 
 
 
 
 def format_shallow_line(sha):
 def format_shallow_line(sha):

+ 24 - 23
dulwich/refs.py

@@ -351,8 +351,9 @@ class RefsContainer:
         """
         """
         raise NotImplementedError(self.set_if_equals)
         raise NotImplementedError(self.set_if_equals)
 
 
-    def add_if_new(self, name, ref, committer=None, timestamp=None,
-                   timezone=None, message=None):
+    def add_if_new(
+        self, name, ref, committer=None, timestamp=None, timezone=None, message=None
+    ):
         """Add a new reference only if it does not already exist.
         """Add a new reference only if it does not already exist.
 
 
         Args:
         Args:
@@ -1040,7 +1041,7 @@ class DiskRefsContainer(RefsContainer):
             except ValueError:
             except ValueError:
                 break
                 break
 
 
-            if parent == b'refs':
+            if parent == b"refs":
                 break
                 break
             parent_filename = self.refpath(parent)
             parent_filename = self.refpath(parent)
             try:
             try:
@@ -1146,6 +1147,7 @@ def write_info_refs(refs, store: ObjectContainer):
     """Generate info refs."""
     """Generate info refs."""
     # TODO: Avoid recursive import :(
     # TODO: Avoid recursive import :(
     from .object_store import peel_sha
     from .object_store import peel_sha
+
     for name, sha in sorted(refs.items()):
     for name, sha in sorted(refs.items()):
         # get_refs() includes HEAD as a special case, but we don't want to
         # get_refs() includes HEAD as a special case, but we don't want to
         # advertise it
         # advertise it
@@ -1168,9 +1170,7 @@ def is_local_branch(x):
 def strip_peeled_refs(refs):
 def strip_peeled_refs(refs):
     """Remove all peeled refs."""
     """Remove all peeled refs."""
     return {
     return {
-        ref: sha
-        for (ref, sha) in refs.items()
-        if not ref.endswith(PEELED_TAG_SUFFIX)
+        ref: sha for (ref, sha) in refs.items() if not ref.endswith(PEELED_TAG_SUFFIX)
     }
     }
 
 
 
 
@@ -1185,24 +1185,24 @@ def _set_origin_head(refs, origin, origin_head):
 
 
 
 
 def _set_default_branch(
 def _set_default_branch(
-        refs: RefsContainer, origin: bytes, origin_head: bytes, branch: bytes,
-        ref_message: Optional[bytes]) -> bytes:
+    refs: RefsContainer,
+    origin: bytes,
+    origin_head: bytes,
+    branch: bytes,
+    ref_message: Optional[bytes],
+) -> bytes:
     """Set the default branch."""
     """Set the default branch."""
     origin_base = b"refs/remotes/" + origin + b"/"
     origin_base = b"refs/remotes/" + origin + b"/"
     if branch:
     if branch:
         origin_ref = origin_base + branch
         origin_ref = origin_base + branch
         if origin_ref in refs:
         if origin_ref in refs:
             local_ref = LOCAL_BRANCH_PREFIX + branch
             local_ref = LOCAL_BRANCH_PREFIX + branch
-            refs.add_if_new(
-                local_ref, refs[origin_ref], ref_message
-            )
+            refs.add_if_new(local_ref, refs[origin_ref], ref_message)
             head_ref = local_ref
             head_ref = local_ref
         elif LOCAL_TAG_PREFIX + branch in refs:
         elif LOCAL_TAG_PREFIX + branch in refs:
             head_ref = LOCAL_TAG_PREFIX + branch
             head_ref = LOCAL_TAG_PREFIX + branch
         else:
         else:
-            raise ValueError(
-                "%r is not a valid branch or tag" % os.fsencode(branch)
-            )
+            raise ValueError("%r is not a valid branch or tag" % os.fsencode(branch))
     elif origin_head:
     elif origin_head:
         head_ref = origin_head
         head_ref = origin_head
         if origin_head.startswith(LOCAL_BRANCH_PREFIX):
         if origin_head.startswith(LOCAL_BRANCH_PREFIX):
@@ -1210,13 +1210,11 @@ def _set_default_branch(
         else:
         else:
             origin_ref = origin_head
             origin_ref = origin_head
         try:
         try:
-            refs.add_if_new(
-                head_ref, refs[origin_ref], ref_message
-            )
+            refs.add_if_new(head_ref, refs[origin_ref], ref_message)
         except KeyError:
         except KeyError:
             pass
             pass
     else:
     else:
-        raise ValueError('neither origin_head nor branch are provided')
+        raise ValueError("neither origin_head nor branch are provided")
     return head_ref
     return head_ref
 
 
 
 
@@ -1228,9 +1226,7 @@ def _set_head(refs, head_ref, ref_message):
             _cls, obj = head.object
             _cls, obj = head.object
             head = obj.get_object(obj).id
             head = obj.get_object(obj).id
         del refs[HEADREF]
         del refs[HEADREF]
-        refs.set_if_equals(
-            HEADREF, None, head, message=ref_message
-        )
+        refs.set_if_equals(HEADREF, None, head, message=ref_message)
     else:
     else:
         # set HEAD to specific branch
         # set HEAD to specific branch
         try:
         try:
@@ -1267,19 +1263,24 @@ def _import_remote_refs(
         for (n, v) in stripped_refs.items()
         for (n, v) in stripped_refs.items()
         if n.startswith(LOCAL_TAG_PREFIX) and not n.endswith(PEELED_TAG_SUFFIX)
         if n.startswith(LOCAL_TAG_PREFIX) and not n.endswith(PEELED_TAG_SUFFIX)
     }
     }
-    refs_container.import_refs(LOCAL_TAG_PREFIX, tags, message=message, prune=prune_tags)
+    refs_container.import_refs(
+        LOCAL_TAG_PREFIX, tags, message=message, prune=prune_tags
+    )
 
 
 
 
 def serialize_refs(store, refs):
 def serialize_refs(store, refs):
     # TODO: Avoid recursive import :(
     # TODO: Avoid recursive import :(
     from .object_store import peel_sha
     from .object_store import peel_sha
+
     ret = {}
     ret = {}
     for ref, sha in refs.items():
     for ref, sha in refs.items():
         try:
         try:
             unpeeled, peeled = peel_sha(store, sha)
             unpeeled, peeled = peel_sha(store, sha)
         except KeyError:
         except KeyError:
             warnings.warn(
             warnings.warn(
-                "ref {} points at non-present sha {}".format(ref.decode("utf-8", "replace"), sha.decode("ascii")),
+                "ref {} points at non-present sha {}".format(
+                    ref.decode("utf-8", "replace"), sha.decode("ascii")
+                ),
                 UserWarning,
                 UserWarning,
             )
             )
             continue
             continue

+ 93 - 53
dulwich/repo.py

@@ -152,7 +152,7 @@ class DefaultIdentityNotFound(Exception):
 def _get_default_identity() -> Tuple[str, str]:
 def _get_default_identity() -> Tuple[str, str]:
     import socket
     import socket
 
 
-    for name in ('LOGNAME', 'USER', 'LNAME', 'USERNAME'):
+    for name in ("LOGNAME", "USER", "LNAME", "USERNAME"):
         username = os.environ.get(name)
         username = os.environ.get(name)
         if username:
         if username:
             break
             break
@@ -169,7 +169,7 @@ def _get_default_identity() -> Tuple[str, str]:
         except KeyError:
         except KeyError:
             fullname = None
             fullname = None
         else:
         else:
-            if getattr(entry, 'gecos', None):
+            if getattr(entry, "gecos", None):
                 fullname = entry.pw_gecos.split(",")[0]
                 fullname = entry.pw_gecos.split(",")[0]
             else:
             else:
                 fullname = None
                 fullname = None
@@ -251,7 +251,7 @@ def check_user_identity(identity):
         raise InvalidUserIdentity(identity) from exc
         raise InvalidUserIdentity(identity) from exc
     if b">" not in snd:
     if b">" not in snd:
         raise InvalidUserIdentity(identity)
         raise InvalidUserIdentity(identity)
-    if b'\0' in identity or b'\n' in identity:
+    if b"\0" in identity or b"\n" in identity:
         raise InvalidUserIdentity(identity)
         raise InvalidUserIdentity(identity)
 
 
 
 
@@ -505,8 +505,8 @@ class BaseRepo:
         remote_has = missing_objects.get_remote_has()
         remote_has = missing_objects.get_remote_has()
         object_ids = list(missing_objects)
         object_ids = list(missing_objects)
         return len(object_ids), generate_unpacked_objects(
         return len(object_ids), generate_unpacked_objects(
-            self.object_store, object_ids, progress=progress,
-            other_haves=remote_has)
+            self.object_store, object_ids, progress=progress, other_haves=remote_has
+        )
 
 
     def find_missing_objects(
     def find_missing_objects(
         self,
         self,
@@ -541,7 +541,9 @@ class BaseRepo:
             raise TypeError("determine_wants() did not return a list")
             raise TypeError("determine_wants() did not return a list")
 
 
         shallows: FrozenSet[ObjectID] = getattr(graph_walker, "shallow", frozenset())
         shallows: FrozenSet[ObjectID] = getattr(graph_walker, "shallow", frozenset())
-        unshallows: FrozenSet[ObjectID] = getattr(graph_walker, "unshallow", frozenset())
+        unshallows: FrozenSet[ObjectID] = getattr(
+            graph_walker, "unshallow", frozenset()
+        )
 
 
         if wants == []:
         if wants == []:
             # TODO(dborowitz): find a way to short-circuit that doesn't change
             # TODO(dborowitz): find a way to short-circuit that doesn't change
@@ -552,7 +554,6 @@ class BaseRepo:
                 return None
                 return None
 
 
             class DummyMissingObjectFinder:
             class DummyMissingObjectFinder:
-
                 def get_remote_has(self):
                 def get_remote_has(self):
                     return None
                     return None
 
 
@@ -588,11 +589,16 @@ class BaseRepo:
             shallow=self.get_shallow(),
             shallow=self.get_shallow(),
             progress=progress,
             progress=progress,
             get_tagged=get_tagged,
             get_tagged=get_tagged,
-            get_parents=get_parents)
+            get_parents=get_parents,
+        )
 
 
-    def generate_pack_data(self, have: List[ObjectID], want: List[ObjectID],
-                           progress: Optional[Callable[[str], None]] = None,
-                           ofs_delta: Optional[bool] = None):
+    def generate_pack_data(
+        self,
+        have: List[ObjectID],
+        want: List[ObjectID],
+        progress: Optional[Callable[[str], None]] = None,
+        ofs_delta: Optional[bool] = None,
+    ):
         """Generate pack data objects for a set of wants/haves.
         """Generate pack data objects for a set of wants/haves.
 
 
         Args:
         Args:
@@ -610,8 +616,8 @@ class BaseRepo:
         )
         )
 
 
     def get_graph_walker(
     def get_graph_walker(
-            self,
-            heads: Optional[List[ObjectID]] = None) -> ObjectStoreGraphWalker:
+        self, heads: Optional[List[ObjectID]] = None
+    ) -> ObjectStoreGraphWalker:
         """Retrieve a graph walker.
         """Retrieve a graph walker.
 
 
         A graph walker is used by a remote repository (or proxy)
         A graph walker is used by a remote repository (or proxy)
@@ -656,9 +662,7 @@ class BaseRepo:
             elif cls is Tag:
             elif cls is Tag:
                 raise NotTagError(ret)
                 raise NotTagError(ret)
             else:
             else:
-                raise Exception(
-                    f"Type invalid: {ret.type_name!r} != {cls.type_name!r}"
-                )
+                raise Exception(f"Type invalid: {ret.type_name!r} != {cls.type_name!r}")
         return ret
         return ret
 
 
     def get_object(self, sha: bytes) -> ShaFile:
     def get_object(self, sha: bytes) -> ShaFile:
@@ -679,8 +683,7 @@ class BaseRepo:
             shallows=self.get_shallow(),
             shallows=self.get_shallow(),
         )
         )
 
 
-    def get_parents(self, sha: bytes,
-                    commit: Optional[Commit] = None) -> List[bytes]:
+    def get_parents(self, sha: bytes, commit: Optional[Commit] = None) -> List[bytes]:
         """Retrieve the parents of a specific commit.
         """Retrieve the parents of a specific commit.
 
 
         If the specific commit is a graftpoint, the graft parents
         If the specific commit is a graftpoint, the graft parents
@@ -733,7 +736,7 @@ class BaseRepo:
 
 
         local_config = self.get_config()
         local_config = self.get_config()
         backends: List[ConfigFile] = [local_config]
         backends: List[ConfigFile] = [local_config]
-        if local_config.get_boolean((b"extensions", ), b"worktreeconfig", False):
+        if local_config.get_boolean((b"extensions",), b"worktreeconfig", False):
             backends.append(self.get_worktree_config())
             backends.append(self.get_worktree_config())
 
 
         backends += StackedConfig.default_backends()
         backends += StackedConfig.default_backends()
@@ -763,9 +766,7 @@ class BaseRepo:
         if new_unshallow:
         if new_unshallow:
             shallow.difference_update(new_unshallow)
             shallow.difference_update(new_unshallow)
         if shallow:
         if shallow:
-            self._put_named_file(
-                "shallow", b"".join([sha + b"\n" for sha in shallow])
-            )
+            self._put_named_file("shallow", b"".join([sha + b"\n" for sha in shallow]))
         else:
         else:
             self._del_named_file("shallow")
             self._del_named_file("shallow")
 
 
@@ -783,8 +784,7 @@ class BaseRepo:
             return cached
             return cached
         return peel_sha(self.object_store, self.refs[ref])[1].id
         return peel_sha(self.object_store, self.refs[ref])[1].id
 
 
-    def get_walker(self, include: Optional[List[bytes]] = None,
-                   *args, **kwargs):
+    def get_walker(self, include: Optional[List[bytes]] = None, *args, **kwargs):
         """Obtain a walker for this repository.
         """Obtain a walker for this repository.
 
 
         Args:
         Args:
@@ -881,12 +881,14 @@ class BaseRepo:
         else:
         else:
             raise ValueError(name)
             raise ValueError(name)
 
 
-    def _get_user_identity(self, config: "StackedConfig",
-                           kind: Optional[str] = None) -> bytes:
+    def _get_user_identity(
+        self, config: "StackedConfig", kind: Optional[str] = None
+    ) -> bytes:
         """Determine the identity to use for new commits."""
         """Determine the identity to use for new commits."""
         warnings.warn(
         warnings.warn(
             "use get_user_identity() rather than Repo._get_user_identity",
             "use get_user_identity() rather than Repo._get_user_identity",
-            DeprecationWarning)
+            DeprecationWarning,
+        )
         return get_user_identity(config)
         return get_user_identity(config)
 
 
     def _add_graftpoints(self, updated_graftpoints: Dict[bytes, List[bytes]]):
     def _add_graftpoints(self, updated_graftpoints: Dict[bytes, List[bytes]]):
@@ -1137,15 +1139,17 @@ class Repo(BaseRepo):
         self,
         self,
         root: str,
         root: str,
         object_store: Optional[PackBasedObjectStore] = None,
         object_store: Optional[PackBasedObjectStore] = None,
-        bare: Optional[bool] = None
+        bare: Optional[bool] = None,
     ) -> None:
     ) -> None:
         hidden_path = os.path.join(root, CONTROLDIR)
         hidden_path = os.path.join(root, CONTROLDIR)
         if bare is None:
         if bare is None:
-            if (os.path.isfile(hidden_path)
-                    or os.path.isdir(os.path.join(hidden_path, OBJECTDIR))):
+            if os.path.isfile(hidden_path) or os.path.isdir(
+                os.path.join(hidden_path, OBJECTDIR)
+            ):
                 bare = False
                 bare = False
-            elif (os.path.isdir(os.path.join(root, OBJECTDIR))
-                    and os.path.isdir(os.path.join(root, REFSDIR))):
+            elif os.path.isdir(os.path.join(root, OBJECTDIR)) and os.path.isdir(
+                os.path.join(root, REFSDIR)
+            ):
                 bare = True
                 bare = True
             else:
             else:
                 raise NotGitRepository(
                 raise NotGitRepository(
@@ -1174,10 +1178,7 @@ class Repo(BaseRepo):
         self.path = root
         self.path = root
         config = self.get_config()
         config = self.get_config()
         try:
         try:
-            repository_format_version = config.get(
-                "core",
-                "repositoryformatversion"
-            )
+            repository_format_version = config.get("core", "repositoryformatversion")
             format_version = (
             format_version = (
                 0
                 0
                 if repository_format_version is None
                 if repository_format_version is None
@@ -1189,8 +1190,8 @@ class Repo(BaseRepo):
         if format_version not in (0, 1):
         if format_version not in (0, 1):
             raise UnsupportedVersion(format_version)
             raise UnsupportedVersion(format_version)
 
 
-        for extension, _value in config.items((b"extensions", )):
-            if extension not in (b'worktreeconfig', ):
+        for extension, _value in config.items((b"extensions",)):
+            if extension not in (b"worktreeconfig",):
                 raise UnsupportedExtension(extension)
                 raise UnsupportedExtension(extension)
 
 
         if object_store is None:
         if object_store is None:
@@ -1374,7 +1375,12 @@ class Repo(BaseRepo):
         # missing index file, which is treated as empty.
         # missing index file, which is treated as empty.
         return not self.bare
         return not self.bare
 
 
-    def stage(self, fs_paths: Union[str, bytes, os.PathLike, Iterable[Union[str, bytes, os.PathLike]]]) -> None:
+    def stage(
+        self,
+        fs_paths: Union[
+            str, bytes, os.PathLike, Iterable[Union[str, bytes, os.PathLike]]
+        ],
+    ) -> None:
         """Stage a set of paths.
         """Stage a set of paths.
 
 
         Args:
         Args:
@@ -1445,7 +1451,7 @@ class Repo(BaseRepo):
 
 
         index = self.open_index()
         index = self.open_index()
         try:
         try:
-            tree_id = self[b'HEAD'].tree
+            tree_id = self[b"HEAD"].tree
         except KeyError:
         except KeyError:
             # no head mean no commit in the repo
             # no head mean no commit in the repo
             for fs_path in fs_paths:
             for fs_path in fs_paths:
@@ -1459,8 +1465,7 @@ class Repo(BaseRepo):
             try:
             try:
                 tree = self.object_store[tree_id]
                 tree = self.object_store[tree_id]
                 assert isinstance(tree, Tree)
                 assert isinstance(tree, Tree)
-                tree_entry = tree.lookup_path(
-                    self.object_store.__getitem__, tree_path)
+                tree_entry = tree.lookup_path(self.object_store.__getitem__, tree_path)
             except KeyError:
             except KeyError:
                 # if tree_entry didn't exist, this file was being added, so
                 # if tree_entry didn't exist, this file was being added, so
                 # remove index entry
                 # remove index entry
@@ -1479,8 +1484,8 @@ class Repo(BaseRepo):
                 pass
                 pass
 
 
             index_entry = IndexEntry(
             index_entry = IndexEntry(
-                ctime=(self[b'HEAD'].commit_time, 0),
-                mtime=(self[b'HEAD'].commit_time, 0),
+                ctime=(self[b"HEAD"].commit_time, 0),
+                mtime=(self[b"HEAD"].commit_time, 0),
                 dev=st.st_dev if st else 0,
                 dev=st.st_dev if st else 0,
                 ino=st.st_ino if st else 0,
                 ino=st.st_ino if st else 0,
                 mode=tree_entry[0],
                 mode=tree_entry[0],
@@ -1583,6 +1588,7 @@ class Repo(BaseRepo):
         except BaseException:
         except BaseException:
             if mkdir:
             if mkdir:
                 import shutil
                 import shutil
+
                 shutil.rmtree(target_path)
                 shutil.rmtree(target_path)
             raise
             raise
         return target
         return target
@@ -1615,9 +1621,13 @@ class Repo(BaseRepo):
         if config.get_boolean(b"core", b"symlinks", True):
         if config.get_boolean(b"core", b"symlinks", True):
             symlink_fn = symlink
             symlink_fn = symlink
         else:
         else:
+
             def symlink_fn(source, target):  # type: ignore
             def symlink_fn(source, target):  # type: ignore
-                with open(target, 'w' + ('b' if isinstance(source, bytes) else '')) as f:
+                with open(
+                    target, "w" + ("b" if isinstance(source, bytes) else "")
+                ) as f:
                     f.write(source)
                     f.write(source)
+
         return build_index_from_tree(
         return build_index_from_tree(
             self.path,
             self.path,
             self.index_path(),
             self.index_path(),
@@ -1625,11 +1635,12 @@ class Repo(BaseRepo):
             tree,
             tree,
             honor_filemode=honor_filemode,
             honor_filemode=honor_filemode,
             validate_path_element=validate_path_element,
             validate_path_element=validate_path_element,
-            symlink_fn=symlink_fn
+            symlink_fn=symlink_fn,
         )
         )
 
 
     def get_worktree_config(self) -> "ConfigFile":
     def get_worktree_config(self) -> "ConfigFile":
         from .config import ConfigFile
         from .config import ConfigFile
+
         path = os.path.join(self.commondir(), "config.worktree")
         path = os.path.join(self.commondir(), "config.worktree")
         try:
         try:
             return ConfigFile.from_path(path)
             return ConfigFile.from_path(path)
@@ -1678,8 +1689,15 @@ class Repo(BaseRepo):
 
 
     @classmethod
     @classmethod
     def _init_maybe_bare(
     def _init_maybe_bare(
-            cls, path, controldir, bare, object_store=None, config=None,
-            default_branch=None, symlinks: Optional[bool] = None):
+        cls,
+        path,
+        controldir,
+        bare,
+        object_store=None,
+        config=None,
+        default_branch=None,
+        symlinks: Optional[bool] = None,
+    ):
         for d in BASE_DIRECTORIES:
         for d in BASE_DIRECTORIES:
             os.mkdir(os.path.join(controldir, *d))
             os.mkdir(os.path.join(controldir, *d))
         if object_store is None:
         if object_store is None:
@@ -1688,6 +1706,7 @@ class Repo(BaseRepo):
         if default_branch is None:
         if default_branch is None:
             if config is None:
             if config is None:
                 from .config import StackedConfig
                 from .config import StackedConfig
+
                 config = StackedConfig.default()
                 config = StackedConfig.default()
             try:
             try:
                 default_branch = config.get("init", "defaultBranch")
                 default_branch = config.get("init", "defaultBranch")
@@ -1698,7 +1717,15 @@ class Repo(BaseRepo):
         return ret
         return ret
 
 
     @classmethod
     @classmethod
-    def init(cls, path: str, *, mkdir: bool = False, config=None, default_branch=None, symlinks: Optional[bool] = None) -> "Repo":
+    def init(
+        cls,
+        path: str,
+        *,
+        mkdir: bool = False,
+        config=None,
+        default_branch=None,
+        symlinks: Optional[bool] = None,
+    ) -> "Repo":
         """Create a new repository.
         """Create a new repository.
 
 
         Args:
         Args:
@@ -1712,9 +1739,13 @@ class Repo(BaseRepo):
         os.mkdir(controldir)
         os.mkdir(controldir)
         _set_filesystem_hidden(controldir)
         _set_filesystem_hidden(controldir)
         return cls._init_maybe_bare(
         return cls._init_maybe_bare(
-            path, controldir, False, config=config,
+            path,
+            controldir,
+            False,
+            config=config,
             default_branch=default_branch,
             default_branch=default_branch,
-            symlinks=symlinks)
+            symlinks=symlinks,
+        )
 
 
     @classmethod
     @classmethod
     def _init_new_working_directory(cls, path, main_repo, identifier=None, mkdir=False):
     def _init_new_working_directory(cls, path, main_repo, identifier=None, mkdir=False):
@@ -1755,7 +1786,9 @@ class Repo(BaseRepo):
         return r
         return r
 
 
     @classmethod
     @classmethod
-    def init_bare(cls, path, *, mkdir=False, object_store=None, config=None, default_branch=None):
+    def init_bare(
+        cls, path, *, mkdir=False, object_store=None, config=None, default_branch=None
+    ):
         """Create a new bare repository.
         """Create a new bare repository.
 
 
         ``path`` should already exist and be an empty directory.
         ``path`` should already exist and be an empty directory.
@@ -1766,7 +1799,14 @@ class Repo(BaseRepo):
         """
         """
         if mkdir:
         if mkdir:
             os.mkdir(path)
             os.mkdir(path)
-        return cls._init_maybe_bare(path, path, True, object_store=object_store, config=config, default_branch=default_branch)
+        return cls._init_maybe_bare(
+            path,
+            path,
+            True,
+            object_store=object_store,
+            config=config,
+            default_branch=default_branch,
+        )
 
 
     create = init_bare
     create = init_bare
 
 

+ 37 - 24
dulwich/server.py

@@ -166,7 +166,9 @@ class BackendRepo(TypingProtocol):
         """
         """
         return None
         return None
 
 
-    def find_missing_objects(self, determine_wants, graph_walker, progress, get_tagged=None):
+    def find_missing_objects(
+        self, determine_wants, graph_walker, progress, get_tagged=None
+    ):
         """Yield the objects required for a list of commits.
         """Yield the objects required for a list of commits.
 
 
         Args:
         Args:
@@ -282,10 +284,10 @@ class PackHandler(Handler):
 class UploadPackHandler(PackHandler):
 class UploadPackHandler(PackHandler):
     """Protocol handler for uploading a pack to the client."""
     """Protocol handler for uploading a pack to the client."""
 
 
-    def __init__(self, backend, args, proto, stateless_rpc=False, advertise_refs=False) -> None:
-        super().__init__(
-            backend, proto, stateless_rpc=stateless_rpc
-        )
+    def __init__(
+        self, backend, args, proto, stateless_rpc=False, advertise_refs=False
+    ) -> None:
+        super().__init__(backend, proto, stateless_rpc=stateless_rpc)
         self.repo = backend.open_repository(args[0])
         self.repo = backend.open_repository(args[0])
         self._graph_walker = None
         self._graph_walker = None
         self.advertise_refs = advertise_refs
         self.advertise_refs = advertise_refs
@@ -324,9 +326,13 @@ class UploadPackHandler(PackHandler):
             # The provided haves are processed, and it is safe to send side-
             # The provided haves are processed, and it is safe to send side-
             # band data now.
             # band data now.
             if not self.has_capability(CAPABILITY_NO_PROGRESS):
             if not self.has_capability(CAPABILITY_NO_PROGRESS):
-                self.progress = partial(self.proto.write_sideband, SIDE_BAND_CHANNEL_PROGRESS)
+                self.progress = partial(
+                    self.proto.write_sideband, SIDE_BAND_CHANNEL_PROGRESS
+                )
 
 
-            self.write_pack_data = partial(self.proto.write_sideband, SIDE_BAND_CHANNEL_DATA)
+            self.write_pack_data = partial(
+                self.proto.write_sideband, SIDE_BAND_CHANNEL_DATA
+            )
         else:
         else:
             self.write_pack_data = self.proto.write
             self.write_pack_data = self.proto.write
 
 
@@ -408,7 +414,9 @@ class UploadPackHandler(PackHandler):
             ("counting objects: %d, done.\n" % len(object_ids)).encode("ascii")
             ("counting objects: %d, done.\n" % len(object_ids)).encode("ascii")
         )
         )
 
 
-        write_pack_from_container(self.write_pack_data, self.repo.object_store, object_ids)
+        write_pack_from_container(
+            self.write_pack_data, self.repo.object_store, object_ids
+        )
         # we are done
         # we are done
         self.proto.write_pkt_line(None)
         self.proto.write_pkt_line(None)
 
 
@@ -557,7 +565,9 @@ class _ProtocolGraphWalker:
     any calls to next() or ack() are made.
     any calls to next() or ack() are made.
     """
     """
 
 
-    def __init__(self, handler, object_store: ObjectContainer, get_peeled, get_symrefs) -> None:
+    def __init__(
+        self, handler, object_store: ObjectContainer, get_peeled, get_symrefs
+    ) -> None:
         self.handler = handler
         self.handler = handler
         self.store: ObjectContainer = object_store
         self.store: ObjectContainer = object_store
         self.get_peeled = get_peeled
         self.get_peeled = get_peeled
@@ -604,18 +614,20 @@ class _ProtocolGraphWalker:
                     # logic.
                     # logic.
                     continue
                     continue
                 if i == 0:
                 if i == 0:
-                    logger.info(
-                        "Sending capabilities: %s", self.handler.capabilities())
+                    logger.info("Sending capabilities: %s", self.handler.capabilities())
                     line = format_ref_line(
                     line = format_ref_line(
-                        ref, sha,
+                        ref,
+                        sha,
                         self.handler.capabilities()
                         self.handler.capabilities()
-                        + symref_capabilities(symrefs.items()))
+                        + symref_capabilities(symrefs.items()),
+                    )
                 else:
                 else:
                     line = format_ref_line(ref, sha)
                     line = format_ref_line(ref, sha)
                 self.proto.write_pkt_line(line)
                 self.proto.write_pkt_line(line)
                 if peeled_sha != sha:
                 if peeled_sha != sha:
                     self.proto.write_pkt_line(
                     self.proto.write_pkt_line(
-                        format_ref_line(ref + PEELED_TAG_SUFFIX, peeled_sha))
+                        format_ref_line(ref + PEELED_TAG_SUFFIX, peeled_sha)
+                    )
 
 
             # i'm done..
             # i'm done..
             self.proto.write_pkt_line(None)
             self.proto.write_pkt_line(None)
@@ -929,10 +941,10 @@ class MultiAckDetailedGraphWalkerImpl:
 class ReceivePackHandler(PackHandler):
 class ReceivePackHandler(PackHandler):
     """Protocol handler for downloading a pack from the client."""
     """Protocol handler for downloading a pack from the client."""
 
 
-    def __init__(self, backend, args, proto, stateless_rpc=False, advertise_refs=False) -> None:
-        super().__init__(
-            backend, proto, stateless_rpc=stateless_rpc
-        )
+    def __init__(
+        self, backend, args, proto, stateless_rpc=False, advertise_refs=False
+    ) -> None:
+        super().__init__(backend, proto, stateless_rpc=stateless_rpc)
         self.repo = backend.open_repository(args[0])
         self.repo = backend.open_repository(args[0])
         self.advertise_refs = advertise_refs
         self.advertise_refs = advertise_refs
 
 
@@ -1043,7 +1055,7 @@ class ReceivePackHandler(PackHandler):
             if output:
             if output:
                 self.proto.write_sideband(SIDE_BAND_CHANNEL_PROGRESS, output)
                 self.proto.write_sideband(SIDE_BAND_CHANNEL_PROGRESS, output)
         except HookError as err:
         except HookError as err:
-            self.proto.write_sideband(SIDE_BAND_CHANNEL_FATAL, str(err).encode('utf-8'))
+            self.proto.write_sideband(SIDE_BAND_CHANNEL_FATAL, str(err).encode("utf-8"))
 
 
     def handle(self) -> None:
     def handle(self) -> None:
         if self.advertise_refs or not self.stateless_rpc:
         if self.advertise_refs or not self.stateless_rpc:
@@ -1052,12 +1064,14 @@ class ReceivePackHandler(PackHandler):
 
 
             if not refs:
             if not refs:
                 refs = [(CAPABILITIES_REF, ZERO_SHA)]
                 refs = [(CAPABILITIES_REF, ZERO_SHA)]
-            logger.info(
-                "Sending capabilities: %s", self.capabilities())
+            logger.info("Sending capabilities: %s", self.capabilities())
             self.proto.write_pkt_line(
             self.proto.write_pkt_line(
                 format_ref_line(
                 format_ref_line(
-                    refs[0][0], refs[0][1],
-                    self.capabilities() + symref_capabilities(symrefs)))
+                    refs[0][0],
+                    refs[0][1],
+                    self.capabilities() + symref_capabilities(symrefs),
+                )
+            )
             for i in range(1, len(refs)):
             for i in range(1, len(refs)):
                 ref = refs[i]
                 ref = refs[i]
                 self.proto.write_pkt_line(format_ref_line(ref[0], ref[1]))
                 self.proto.write_pkt_line(format_ref_line(ref[0], ref[1]))
@@ -1158,7 +1172,6 @@ class TCPGitRequestHandler(socketserver.StreamRequestHandler):
 
 
 
 
 class TCPGitServer(socketserver.TCPServer):
 class TCPGitServer(socketserver.TCPServer):
-
     allow_reuse_address = True
     allow_reuse_address = True
     serve = socketserver.TCPServer.serve_forever
     serve = socketserver.TCPServer.serve_forever
 
 

+ 3 - 5
dulwich/stash.py

@@ -42,9 +42,7 @@ class Stash:
 
 
     @property
     @property
     def _reflog_path(self):
     def _reflog_path(self):
-        return os.path.join(
-            self._repo.commondir(), "logs", os.fsdecode(self._ref)
-        )
+        return os.path.join(self._repo.commondir(), "logs", os.fsdecode(self._ref))
 
 
     def stashes(self):
     def stashes(self):
         try:
         try:
@@ -95,7 +93,7 @@ class Stash:
             message=b"Index stash",
             message=b"Index stash",
             merge_heads=[self._repo.head()],
             merge_heads=[self._repo.head()],
             no_verify=True,
             no_verify=True,
-            **commit_kwargs
+            **commit_kwargs,
         )
         )
 
 
         # Then, the working tree one.
         # Then, the working tree one.
@@ -120,7 +118,7 @@ class Stash:
             message=message,
             message=message,
             merge_heads=[index_commit_id],
             merge_heads=[index_commit_id],
             no_verify=True,
             no_verify=True,
-            **commit_kwargs
+            **commit_kwargs,
         )
         )
 
 
         return cid
         return cid

+ 6 - 6
dulwich/tests/__init__.py

@@ -21,11 +21,11 @@
 """Tests for Dulwich."""
 """Tests for Dulwich."""
 
 
 __all__ = [
 __all__ = [
-    'SkipTest',
-    'TestCase',
-    'BlackboxTestCase',
-    'skipIf',
-    'expectedFailure',
+    "SkipTest",
+    "TestCase",
+    "BlackboxTestCase",
+    "skipIf",
+    "expectedFailure",
 ]
 ]
 
 
 import doctest
 import doctest
@@ -203,7 +203,7 @@ def tutorial_test_suite():
         package="dulwich.tests",
         package="dulwich.tests",
         setUp=setup,
         setUp=setup,
         tearDown=teardown,
         tearDown=teardown,
-        *tutorial_files
+        *tutorial_files,
     )
     )
 
 
 
 

+ 3 - 5
dulwich/tests/compat/test_client.py

@@ -417,7 +417,7 @@ class DulwichTCPClientTest(CompatTestCase, DulwichClientTestBase):
     def test_send_remove_branch(self):
     def test_send_remove_branch(self):
         # This test fails intermittently on my machine, probably due to some sort
         # This test fails intermittently on my machine, probably due to some sort
         # of race condition. Probably also related to #1015
         # of race condition. Probably also related to #1015
-        self.skipTest('skip flaky test; see #1015')
+        self.skipTest("skip flaky test; see #1015")
 
 
 
 
 class TestSSHVendor:
 class TestSSHVendor:
@@ -596,7 +596,7 @@ class GitHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
         if self.command.lower() == "post":
         if self.command.lower() == "post":
             if nbytes > 0:
             if nbytes > 0:
                 data = self.rfile.read(nbytes)
                 data = self.rfile.read(nbytes)
-            elif self.headers.get('transfer-encoding') == 'chunked':
+            elif self.headers.get("transfer-encoding") == "chunked":
                 chunks = []
                 chunks = []
                 while True:
                 while True:
                     line = self.rfile.readline()
                     line = self.rfile.readline()
@@ -605,7 +605,7 @@ class GitHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
                     chunks.append(chunk[:-2])
                     chunks.append(chunk[:-2])
                     if length == 0:
                     if length == 0:
                         break
                         break
-                data = b''.join(chunks)
+                data = b"".join(chunks)
                 env["CONTENT_LENGTH"] = str(len(data))
                 env["CONTENT_LENGTH"] = str(len(data))
             else:
             else:
                 raise AssertionError
                 raise AssertionError
@@ -624,7 +624,6 @@ class GitHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
 
 
 
 
 class HTTPGitServer(http.server.HTTPServer):
 class HTTPGitServer(http.server.HTTPServer):
-
     allow_reuse_address = True
     allow_reuse_address = True
 
 
     def __init__(self, server_address, root_path) -> None:
     def __init__(self, server_address, root_path) -> None:
@@ -637,7 +636,6 @@ class HTTPGitServer(http.server.HTTPServer):
 
 
 
 
 class DulwichHttpClientTest(CompatTestCase, DulwichClientTestBase):
 class DulwichHttpClientTest(CompatTestCase, DulwichClientTestBase):
-
     min_git_version = (1, 7, 0, 2)
     min_git_version = (1, 7, 0, 2)
 
 
     def setUp(self):
     def setUp(self):

+ 5 - 3
dulwich/tests/compat/test_pack.py

@@ -72,7 +72,9 @@ class TestPack(PackTests):
             orig_blob = orig_pack[a_sha]
             orig_blob = orig_pack[a_sha]
             new_blob = Blob()
             new_blob = Blob()
             new_blob.data = orig_blob.data + b"x"
             new_blob.data = orig_blob.data + b"x"
-            all_to_pack = [(o, None) for o in orig_pack.iterobjects()] + [(new_blob, None)]
+            all_to_pack = [(o, None) for o in orig_pack.iterobjects()] + [
+                (new_blob, None)
+            ]
         pack_path = os.path.join(self._tempdir, "pack_with_deltas")
         pack_path = os.path.join(self._tempdir, "pack_with_deltas")
         write_pack(pack_path, all_to_pack, deltify=True)
         write_pack(pack_path, all_to_pack, deltify=True)
         output = run_git_or_fail(["verify-pack", "-v", pack_path])
         output = run_git_or_fail(["verify-pack", "-v", pack_path])
@@ -96,7 +98,7 @@ class TestPack(PackTests):
         with self.get_pack(pack1_sha) as orig_pack:
         with self.get_pack(pack1_sha) as orig_pack:
             orig_blob = orig_pack[a_sha]
             orig_blob = orig_pack[a_sha]
             new_blob = Blob()
             new_blob = Blob()
-            new_blob.data = orig_blob.data + (b"x" * 2 ** 20)
+            new_blob.data = orig_blob.data + (b"x" * 2**20)
             new_blob_2 = Blob()
             new_blob_2 = Blob()
             new_blob_2.data = new_blob.data + b"y"
             new_blob_2.data = new_blob.data + b"y"
             all_to_pack = list(orig_pack.pack_tuples()) + [
             all_to_pack = list(orig_pack.pack_tuples()) + [
@@ -135,7 +137,7 @@ class TestPack(PackTests):
         raise SkipTest("skipping slow, large test")
         raise SkipTest("skipping slow, large test")
         with self.get_pack(pack1_sha) as orig_pack:
         with self.get_pack(pack1_sha) as orig_pack:
             new_blob = Blob()
             new_blob = Blob()
-            new_blob.data = "big blob" + ("x" * 2 ** 25)
+            new_blob.data = "big blob" + ("x" * 2**25)
             new_blob_2 = Blob()
             new_blob_2 = Blob()
             new_blob_2.data = new_blob.data + "y"
             new_blob_2.data = new_blob.data + "y"
             all_to_pack = list(orig_pack.pack_tuples()) + [
             all_to_pack = list(orig_pack.pack_tuples()) + [

+ 9 - 11
dulwich/tests/compat/test_porcelain.py

@@ -32,7 +32,10 @@ from ..utils import build_commit_graph
 from .utils import CompatTestCase, run_git_or_fail
 from .utils import CompatTestCase, run_git_or_fail
 
 
 
 
-@skipIf(platform.python_implementation() == "PyPy" or sys.platform == "win32", "gpgme not easily available or supported on Windows and PyPy")
+@skipIf(
+    platform.python_implementation() == "PyPy" or sys.platform == "win32",
+    "gpgme not easily available or supported on Windows and PyPy",
+)
 class TagCreateSignTestCase(PorcelainGpgTestCase, CompatTestCase):
 class TagCreateSignTestCase(PorcelainGpgTestCase, CompatTestCase):
     def setUp(self):
     def setUp(self):
         super().setUp()
         super().setUp()
@@ -57,13 +60,8 @@ class TagCreateSignTestCase(PorcelainGpgTestCase, CompatTestCase):
         )
         )
 
 
         run_git_or_fail(
         run_git_or_fail(
-            [
-                f"--git-dir={self.repo.controldir()}",
-                "tag",
-                "-v",
-                "tryme"
-            ],
-            env={'GNUPGHOME': os.environ['GNUPGHOME']},
+            [f"--git-dir={self.repo.controldir()}", "tag", "-v", "tryme"],
+            env={"GNUPGHOME": os.environ["GNUPGHOME"]},
         )
         )
 
 
     def test_verify(self):
     def test_verify(self):
@@ -85,9 +83,9 @@ class TagCreateSignTestCase(PorcelainGpgTestCase, CompatTestCase):
                 "verifyme",
                 "verifyme",
             ],
             ],
             env={
             env={
-                'GNUPGHOME': os.environ['GNUPGHOME'],
-                'GIT_COMMITTER_NAME': 'Joe Example',
-                'GIT_COMMITTER_EMAIL': 'joe@example.com',
+                "GNUPGHOME": os.environ["GNUPGHOME"],
+                "GIT_COMMITTER_NAME": "Joe Example",
+                "GIT_COMMITTER_EMAIL": "joe@example.com",
             },
             },
         )
         )
         tag = self.repo[b"refs/tags/verifyme"]
         tag = self.repo[b"refs/tags/verifyme"]

+ 10 - 2
dulwich/tests/compat/test_repository.py

@@ -203,8 +203,16 @@ class WorkingTreeTestCase(ObjectStoreTestCase):
 
 
         # Read the config values in the worktree with the git cli and assert they match
         # Read the config values in the worktree with the git cli and assert they match
         # the dulwich-parsed configs
         # the dulwich-parsed configs
-        output_name = run_git_or_fail(["config", "user.name"], cwd=self._mainworktree_repo.path).decode().rstrip("\n")
-        output_email = run_git_or_fail(["config", "user.email"], cwd=self._mainworktree_repo.path).decode().rstrip("\n")
+        output_name = (
+            run_git_or_fail(["config", "user.name"], cwd=self._mainworktree_repo.path)
+            .decode()
+            .rstrip("\n")
+        )
+        output_email = (
+            run_git_or_fail(["config", "user.email"], cwd=self._mainworktree_repo.path)
+            .decode()
+            .rstrip("\n")
+        )
         self.assertEqual(test_name, output_name)
         self.assertEqual(test_name, output_name)
         self.assertEqual(test_email, output_email)
         self.assertEqual(test_email, output_email)
 
 

+ 1 - 1
dulwich/tests/compat/test_utils.py

@@ -32,7 +32,7 @@ class GitVersionTests(TestCase):
 
 
         def run_git(args, **unused_kwargs):
         def run_git(args, **unused_kwargs):
             self.assertEqual(["--version"], args)
             self.assertEqual(["--version"], args)
-            return 0, self._version_str, ''
+            return 0, self._version_str, ""
 
 
         utils.run_git = run_git
         utils.run_git = run_git
 
 

+ 17 - 10
dulwich/tests/compat/utils.py

@@ -41,8 +41,8 @@ _DEFAULT_GIT = "git"
 _VERSION_LEN = 4
 _VERSION_LEN = 4
 _REPOS_DATA_DIR = os.path.abspath(
 _REPOS_DATA_DIR = os.path.abspath(
     os.path.join(
     os.path.join(
-        os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
-        "testdata", "repos")
+        os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, "testdata", "repos"
+    )
 )
 )
 
 
 
 
@@ -91,9 +91,7 @@ def require_git_version(required_version, git_path=_DEFAULT_GIT):
     """
     """
     found_version = git_version(git_path=git_path)
     found_version = git_version(git_path=git_path)
     if found_version is None:
     if found_version is None:
-        raise SkipTest(
-            f"Test requires git >= {required_version}, but c git not found"
-        )
+        raise SkipTest(f"Test requires git >= {required_version}, but c git not found")
 
 
     if len(required_version) > _VERSION_LEN:
     if len(required_version) > _VERSION_LEN:
         raise ValueError(
         raise ValueError(
@@ -115,8 +113,12 @@ def require_git_version(required_version, git_path=_DEFAULT_GIT):
 
 
 
 
 def run_git(
 def run_git(
-    args, git_path=_DEFAULT_GIT, input=None, capture_stdout=False,
-    capture_stderr=False, **popen_kwargs
+    args,
+    git_path=_DEFAULT_GIT,
+    input=None,
+    capture_stdout=False,
+    capture_stderr=False,
+    **popen_kwargs,
 ):
 ):
     """Run a git command.
     """Run a git command.
 
 
@@ -161,12 +163,17 @@ def run_git_or_fail(args, git_path=_DEFAULT_GIT, input=None, **popen_kwargs):
     if "stderr" not in popen_kwargs:
     if "stderr" not in popen_kwargs:
         popen_kwargs["stderr"] = subprocess.STDOUT
         popen_kwargs["stderr"] = subprocess.STDOUT
     returncode, stdout, stderr = run_git(
     returncode, stdout, stderr = run_git(
-        args, git_path=git_path, input=input, capture_stdout=True,
-        capture_stderr=True, **popen_kwargs
+        args,
+        git_path=git_path,
+        input=input,
+        capture_stdout=True,
+        capture_stderr=True,
+        **popen_kwargs,
     )
     )
     if returncode != 0:
     if returncode != 0:
         raise AssertionError(
         raise AssertionError(
-            "git with args %r failed with %d: stdout=%r stderr=%r" % (args, returncode, stdout, stderr)
+            "git with args %r failed with %d: stdout=%r stderr=%r"
+            % (args, returncode, stdout, stderr)
         )
         )
     return stdout
     return stdout
 
 

+ 30 - 8
dulwich/tests/test_client.py

@@ -170,7 +170,9 @@ class GitClientTests(TestCase):
             b"0000"
             b"0000"
         )
         )
         self.rin.seek(0)
         self.rin.seek(0)
-        ret = self.client.fetch_pack(b"bla", lambda heads, **kwargs: [], None, None, None)
+        ret = self.client.fetch_pack(
+            b"bla", lambda heads, **kwargs: [], None, None, None
+        )
         self.assertEqual(
         self.assertEqual(
             {b"HEAD": b"55dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7"}, ret.refs
             {b"HEAD": b"55dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7"}, ret.refs
         )
         )
@@ -856,8 +858,8 @@ class LocalGitClientTests(TestCase):
         result_repo = c.clone(s.path, target, mkdir=False)
         result_repo = c.clone(s.path, target, mkdir=False)
         self.addCleanup(result_repo.close)
         self.addCleanup(result_repo.close)
         expected = dict(s.get_refs())
         expected = dict(s.get_refs())
-        expected[b'refs/remotes/origin/HEAD'] = expected[b'HEAD']
-        expected[b'refs/remotes/origin/master'] = expected[b'refs/heads/master']
+        expected[b"refs/remotes/origin/HEAD"] = expected[b"HEAD"]
+        expected[b"refs/remotes/origin/master"] = expected[b"refs/heads/master"]
         self.assertEqual(expected, result_repo.get_refs())
         self.assertEqual(expected, result_repo.get_refs())
 
 
     def test_fetch_empty(self):
     def test_fetch_empty(self):
@@ -1025,7 +1027,7 @@ class HttpGitClientTests(TestCase):
         self.assertEqual(c._password, None)
         self.assertEqual(c._password, None)
 
 
         basic_auth = c.pool_manager.headers["authorization"]
         basic_auth = c.pool_manager.headers["authorization"]
-        auth_string = username.encode('ascii') + b":"
+        auth_string = username.encode("ascii") + b":"
         b64_credentials = base64.b64encode(auth_string)
         b64_credentials = base64.b64encode(auth_string)
         expected_basic_auth = f"Basic {b64_credentials.decode('ascii')}"
         expected_basic_auth = f"Basic {b64_credentials.decode('ascii')}"
         self.assertEqual(basic_auth, expected_basic_auth)
         self.assertEqual(basic_auth, expected_basic_auth)
@@ -1091,7 +1093,15 @@ class HttpGitClientTests(TestCase):
             def __init__(self) -> None:
             def __init__(self) -> None:
                 self.headers: Dict[str, str] = {}
                 self.headers: Dict[str, str] = {}
 
 
-            def request(self, method, url, fields=None, headers=None, redirect=True, preload_content=True):
+            def request(
+                self,
+                method,
+                url,
+                fields=None,
+                headers=None,
+                redirect=True,
+                preload_content=True,
+            ):
                 base_url = url[: -len(tail)]
                 base_url = url[: -len(tail)]
                 redirect_base_url = test_data[base_url]["location"]
                 redirect_base_url = test_data[base_url]["location"]
                 redirect_url = redirect_base_url + tail
                 redirect_url = redirect_base_url + tail
@@ -1152,7 +1162,15 @@ class HttpGitClientTests(TestCase):
             def __init__(self) -> None:
             def __init__(self) -> None:
                 self.headers: Dict[str, str] = {}
                 self.headers: Dict[str, str] = {}
 
 
-            def request(self, method, url, fields=None, headers=None, redirect=True, preload_content=True):
+            def request(
+                self,
+                method,
+                url,
+                fields=None,
+                headers=None,
+                redirect=True,
+                preload_content=True,
+            ):
                 return HTTPResponse(
                 return HTTPResponse(
                     headers={
                     headers={
                         "Content-Type": "application/x-git-upload-pack-result; charset=utf-8"
                         "Content-Type": "application/x-git-upload-pack-result; charset=utf-8"
@@ -1355,7 +1373,9 @@ class DefaultUrllib3ManagerTest(TestCase):
 
 
         config = ConfigDict()
         config = ConfigDict()
         self.overrideEnv("http_proxy", "http://myproxy:8080")
         self.overrideEnv("http_proxy", "http://myproxy:8080")
-        self.overrideEnv("no_proxy", "xyz,abc.def.gh,ff80:1::/64,192.168.0.0/24,ample.com")
+        self.overrideEnv(
+            "no_proxy", "xyz,abc.def.gh,ff80:1::/64,192.168.0.0/24,ample.com"
+        )
         base_url = "http://192.168.0.10/path/port"
         base_url = "http://192.168.0.10/path/port"
         manager = default_urllib3_manager(config=config, base_url=base_url)
         manager = default_urllib3_manager(config=config, base_url=base_url)
         self.assertNotIsInstance(manager, urllib3.ProxyManager)
         self.assertNotIsInstance(manager, urllib3.ProxyManager)
@@ -1388,7 +1408,9 @@ class DefaultUrllib3ManagerTest(TestCase):
 
 
         config = ConfigDict()
         config = ConfigDict()
         self.overrideEnv("http_proxy", "http://myproxy:8080")
         self.overrideEnv("http_proxy", "http://myproxy:8080")
-        self.overrideEnv("no_proxy", "xyz,abc.def.gh,192.168.0.0/24,ff80:1::/64,ample.com")
+        self.overrideEnv(
+            "no_proxy", "xyz,abc.def.gh,192.168.0.0/24,ff80:1::/64,ample.com"
+        )
         base_url = "http://[ff80:1::affe]/path/port"
         base_url = "http://[ff80:1::affe]/path/port"
         manager = default_urllib3_manager(config=config, base_url=base_url)
         manager = default_urllib3_manager(config=config, base_url=base_url)
         self.assertNotIsInstance(manager, urllib3.ProxyManager)
         self.assertNotIsInstance(manager, urllib3.ProxyManager)

+ 21 - 21
dulwich/tests/test_config.py

@@ -115,7 +115,7 @@ class ConfigFileTests(TestCase):
     def test_from_file_multiple(self):
     def test_from_file_multiple(self):
         cf = self.from_file(b"[core]\nfoo = bar\nfoo = blah\n")
         cf = self.from_file(b"[core]\nfoo = bar\nfoo = blah\n")
         self.assertEqual([b"bar", b"blah"], list(cf.get_multivar((b"core",), b"foo")))
         self.assertEqual([b"bar", b"blah"], list(cf.get_multivar((b"core",), b"foo")))
-        self.assertEqual([], list(cf.get_multivar((b"core", ), b"blah")))
+        self.assertEqual([], list(cf.get_multivar((b"core",), b"blah")))
 
 
     def test_from_file_utf8_bom(self):
     def test_from_file_utf8_bom(self):
         text = "[core]\nfoo = b\u00e4r\n".encode("utf-8-sig")
         text = "[core]\nfoo = b\u00e4r\n".encode("utf-8-sig")
@@ -199,14 +199,15 @@ class ConfigFileTests(TestCase):
         cf = self.from_file(
         cf = self.from_file(
             b"[alias]\r\n"
             b"[alias]\r\n"
             b"c = '!f() { \\\r\n"
             b"c = '!f() { \\\r\n"
-            b" printf '[git commit -m \\\"%s\\\"]\\n' \\\"$*\\\" && \\\r\n"
-            b" git commit -m \\\"$*\\\"; \\\r\n"
-            b" }; f'\r\n")
-        self.assertEqual(list(cf.sections()), [(b'alias', )])
+            b' printf \'[git commit -m \\"%s\\"]\\n\' \\"$*\\" && \\\r\n'
+            b' git commit -m \\"$*\\"; \\\r\n'
+            b" }; f'\r\n"
+        )
+        self.assertEqual(list(cf.sections()), [(b"alias",)])
         self.assertEqual(
         self.assertEqual(
-            b'\'!f() { printf \'[git commit -m "%s"]\n\' '
-            b'"$*" && git commit -m "$*"',
-            cf.get((b"alias", ), b"c"))
+            b"'!f() { printf '[git commit -m \"%s\"]\n' " b'"$*" && git commit -m "$*"',
+            cf.get((b"alias",), b"c"),
+        )
 
 
     def test_quoted(self):
     def test_quoted(self):
         cf = self.from_file(
         cf = self.from_file(
@@ -443,25 +444,24 @@ class ApplyInsteadOfTests(TestCase):
     def test_none(self):
     def test_none(self):
         config = ConfigDict()
         config = ConfigDict()
         self.assertEqual(
         self.assertEqual(
-            'https://example.com/', apply_instead_of(config, 'https://example.com/'))
+            "https://example.com/", apply_instead_of(config, "https://example.com/")
+        )
 
 
     def test_apply(self):
     def test_apply(self):
         config = ConfigDict()
         config = ConfigDict()
-        config.set(
-            ('url', 'https://samba.org/'), 'insteadOf', 'https://example.com/')
+        config.set(("url", "https://samba.org/"), "insteadOf", "https://example.com/")
         self.assertEqual(
         self.assertEqual(
-            'https://samba.org/',
-            apply_instead_of(config, 'https://example.com/'))
+            "https://samba.org/", apply_instead_of(config, "https://example.com/")
+        )
 
 
     def test_apply_multiple(self):
     def test_apply_multiple(self):
         config = ConfigDict()
         config = ConfigDict()
-        config.set(
-            ('url', 'https://samba.org/'), 'insteadOf', 'https://blah.com/')
-        config.set(
-            ('url', 'https://samba.org/'), 'insteadOf', 'https://example.com/')
+        config.set(("url", "https://samba.org/"), "insteadOf", "https://blah.com/")
+        config.set(("url", "https://samba.org/"), "insteadOf", "https://example.com/")
         self.assertEqual(
         self.assertEqual(
-            [b'https://blah.com/', b'https://example.com/'],
-            list(config.get_multivar(('url', 'https://samba.org/'), 'insteadOf')))
+            [b"https://blah.com/", b"https://example.com/"],
+            list(config.get_multivar(("url", "https://samba.org/"), "insteadOf")),
+        )
         self.assertEqual(
         self.assertEqual(
-            'https://samba.org/',
-            apply_instead_of(config, 'https://example.com/'))
+            "https://samba.org/", apply_instead_of(config, "https://example.com/")
+        )

+ 11 - 7
dulwich/tests/test_credentials.py

@@ -28,7 +28,6 @@ from ..credentials import match_partial_url, match_urls, urlmatch_credential_sec
 
 
 
 
 class TestCredentialHelpersUtils(TestCase):
 class TestCredentialHelpersUtils(TestCase):
-
     def test_match_urls(self):
     def test_match_urls(self):
         url = urlparse("https://github.com/jelmer/dulwich/")
         url = urlparse("https://github.com/jelmer/dulwich/")
         url_1 = urlparse("https://github.com/jelmer/dulwich")
         url_1 = urlparse("https://github.com/jelmer/dulwich")
@@ -56,17 +55,22 @@ class TestCredentialHelpersUtils(TestCase):
         config.set(b"credential", b"helper", "bar")
         config.set(b"credential", b"helper", "bar")
 
 
         self.assertEqual(
         self.assertEqual(
-            list(urlmatch_credential_sections(config, "https://github.com")), [
+            list(urlmatch_credential_sections(config, "https://github.com")),
+            [
                 (b"credential", b"https://github.com"),
                 (b"credential", b"https://github.com"),
                 (b"credential",),
                 (b"credential",),
-            ])
+            ],
+        )
 
 
         self.assertEqual(
         self.assertEqual(
-            list(urlmatch_credential_sections(config, "https://git.sr.ht")), [
+            list(urlmatch_credential_sections(config, "https://git.sr.ht")),
+            [
                 (b"credential", b"git.sr.ht"),
                 (b"credential", b"git.sr.ht"),
                 (b"credential",),
                 (b"credential",),
-            ])
+            ],
+        )
 
 
         self.assertEqual(
         self.assertEqual(
-            list(urlmatch_credential_sections(config, "missing_url")), [
-                (b"credential",)])
+            list(urlmatch_credential_sections(config, "missing_url")),
+            [(b"credential",)],
+        )

+ 0 - 2
dulwich/tests/test_hooks.py

@@ -90,7 +90,6 @@ exit 0
         hook.execute()
         hook.execute()
 
 
     def test_hook_commit_msg(self):
     def test_hook_commit_msg(self):
-
         repo_dir = os.path.join(tempfile.mkdtemp())
         repo_dir = os.path.join(tempfile.mkdtemp())
         os.mkdir(os.path.join(repo_dir, "hooks"))
         os.mkdir(os.path.join(repo_dir, "hooks"))
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
@@ -135,7 +134,6 @@ if [ "$(pwd)" = '"""
         hook.execute(b"empty commit")
         hook.execute(b"empty commit")
 
 
     def test_hook_post_commit(self):
     def test_hook_post_commit(self):
-
         (fd, path) = tempfile.mkstemp()
         (fd, path) = tempfile.mkstemp()
         os.close(fd)
         os.close(fd)
 
 

+ 12 - 12
dulwich/tests/test_ignore.py

@@ -90,7 +90,7 @@ TRANSLATE_TESTS = [
 
 
 class TranslateTests(TestCase):
 class TranslateTests(TestCase):
     def test_translate(self):
     def test_translate(self):
-        for (pattern, regex) in TRANSLATE_TESTS:
+        for pattern, regex in TRANSLATE_TESTS:
             if re.escape(b"/") == b"/":
             if re.escape(b"/") == b"/":
                 # Slash is no longer escaped in Python3.7, so undo the escaping
                 # Slash is no longer escaped in Python3.7, so undo the escaping
                 # in the expected return value..
                 # in the expected return value..
@@ -129,14 +129,14 @@ with escaped trailing whitespace\\
 
 
 class MatchPatternTests(TestCase):
 class MatchPatternTests(TestCase):
     def test_matches(self):
     def test_matches(self):
-        for (path, pattern) in POSITIVE_MATCH_TESTS:
+        for path, pattern in POSITIVE_MATCH_TESTS:
             self.assertTrue(
             self.assertTrue(
                 match_pattern(path, pattern),
                 match_pattern(path, pattern),
                 f"path: {path!r}, pattern: {pattern!r}",
                 f"path: {path!r}, pattern: {pattern!r}",
             )
             )
 
 
     def test_no_matches(self):
     def test_no_matches(self):
-        for (path, pattern) in NEGATIVE_MATCH_TESTS:
+        for path, pattern in NEGATIVE_MATCH_TESTS:
             self.assertFalse(
             self.assertFalse(
                 match_pattern(path, pattern),
                 match_pattern(path, pattern),
                 f"path: {path!r}, pattern: {pattern!r}",
                 f"path: {path!r}, pattern: {pattern!r}",
@@ -237,19 +237,19 @@ class IgnoreFilterManagerTests(TestCase):
         self.addCleanup(shutil.rmtree, tmp_dir)
         self.addCleanup(shutil.rmtree, tmp_dir)
         repo = Repo.init(tmp_dir)
         repo = Repo.init(tmp_dir)
 
 
-        with open(os.path.join(repo.path, '.gitignore'), 'wb') as f:
-            f.write(b'/*\n')
-            f.write(b'!/foo\n')
+        with open(os.path.join(repo.path, ".gitignore"), "wb") as f:
+            f.write(b"/*\n")
+            f.write(b"!/foo\n")
 
 
-        os.mkdir(os.path.join(repo.path, 'foo'))
-        with open(os.path.join(repo.path, 'foo', '.gitignore'), 'wb') as f:
-            f.write(b'/bar\n')
+        os.mkdir(os.path.join(repo.path, "foo"))
+        with open(os.path.join(repo.path, "foo", ".gitignore"), "wb") as f:
+            f.write(b"/bar\n")
 
 
-        with open(os.path.join(repo.path, 'foo', 'bar'), 'wb') as f:
-            f.write(b'IGNORED')
+        with open(os.path.join(repo.path, "foo", "bar"), "wb") as f:
+            f.write(b"IGNORED")
 
 
         m = IgnoreFilterManager.from_repo(repo)
         m = IgnoreFilterManager.from_repo(repo)
-        self.assertTrue(m.is_ignored('foo/bar'))
+        self.assertTrue(m.is_ignored("foo/bar"))
 
 
     def test_load_ignore_ignorecase(self):
     def test_load_ignore_ignorecase(self):
         tmp_dir = tempfile.mkdtemp()
         tmp_dir = tempfile.mkdtemp()

+ 2 - 11
dulwich/tests/test_index.py

@@ -71,7 +71,6 @@ def can_symlink():
 
 
 
 
 class IndexTestCase(TestCase):
 class IndexTestCase(TestCase):
-
     datadir = os.path.join(os.path.dirname(__file__), "../../testdata/indexes")
     datadir = os.path.join(os.path.dirname(__file__), "../../testdata/indexes")
 
 
     def get_simple_index(self, name):
     def get_simple_index(self, name):
@@ -145,7 +144,8 @@ class SimpleIndexWriterTestCase(IndexTestCase):
                     0,
                     0,
                     b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
                     b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
                     0,
                     0,
-                    0)
+                    0,
+                )
             )
             )
         ]
         ]
         filename = os.path.join(self.tempdir, "test-simple-write-index")
         filename = os.path.join(self.tempdir, "test-simple-write-index")
@@ -157,7 +157,6 @@ class SimpleIndexWriterTestCase(IndexTestCase):
 
 
 
 
 class ReadIndexDictTests(IndexTestCase):
 class ReadIndexDictTests(IndexTestCase):
-
     def setUp(self):
     def setUp(self):
         IndexTestCase.setUp(self)
         IndexTestCase.setUp(self)
         self.tempdir = tempfile.mkdtemp()
         self.tempdir = tempfile.mkdtemp()
@@ -358,7 +357,6 @@ class BuildIndexTests(TestCase):
         repo_dir = tempfile.mkdtemp()
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:
-
             # Populate repo
             # Populate repo
             filea = Blob.from_string(b"file a")
             filea = Blob.from_string(b"file a")
             filee = Blob.from_string(b"d")
             filee = Blob.from_string(b"d")
@@ -393,7 +391,6 @@ class BuildIndexTests(TestCase):
         repo_dir = tempfile.mkdtemp()
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:
-
             # Populate repo
             # Populate repo
             filea = Blob.from_string(b"file a")
             filea = Blob.from_string(b"file a")
             fileb = Blob.from_string(b"file b")
             fileb = Blob.from_string(b"file b")
@@ -491,7 +488,6 @@ class BuildIndexTests(TestCase):
         repo_dir = tempfile.mkdtemp()
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:
-
             # Populate repo
             # Populate repo
             filed = Blob.from_string(b"file d")
             filed = Blob.from_string(b"file d")
             filee = Blob.from_string(b"d")
             filee = Blob.from_string(b"d")
@@ -525,7 +521,6 @@ class BuildIndexTests(TestCase):
         repo_dir_bytes = os.fsencode(repo_dir)
         repo_dir_bytes = os.fsencode(repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:
-
             # Populate repo
             # Populate repo
             file = Blob.from_string(b"foo")
             file = Blob.from_string(b"foo")
 
 
@@ -651,7 +646,6 @@ class GetUnstagedChangesTests(TestCase):
         repo_dir = tempfile.mkdtemp()
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:
-
             # Commit a dummy file then modify it
             # Commit a dummy file then modify it
             foo1_fullpath = os.path.join(repo_dir, "foo1")
             foo1_fullpath = os.path.join(repo_dir, "foo1")
             with open(foo1_fullpath, "wb") as f:
             with open(foo1_fullpath, "wb") as f:
@@ -683,7 +677,6 @@ class GetUnstagedChangesTests(TestCase):
         repo_dir = tempfile.mkdtemp()
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:
-
             # Commit a dummy file then remove it
             # Commit a dummy file then remove it
             foo1_fullpath = os.path.join(repo_dir, "foo1")
             foo1_fullpath = os.path.join(repo_dir, "foo1")
             with open(foo1_fullpath, "wb") as f:
             with open(foo1_fullpath, "wb") as f:
@@ -707,7 +700,6 @@ class GetUnstagedChangesTests(TestCase):
         repo_dir = tempfile.mkdtemp()
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:
-
             # Commit a dummy file then modify it
             # Commit a dummy file then modify it
             foo1_fullpath = os.path.join(repo_dir, "foo1")
             foo1_fullpath = os.path.join(repo_dir, "foo1")
             with open(foo1_fullpath, "wb") as f:
             with open(foo1_fullpath, "wb") as f:
@@ -733,7 +725,6 @@ class GetUnstagedChangesTests(TestCase):
         repo_dir = tempfile.mkdtemp()
         repo_dir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         with Repo.init(repo_dir) as repo:
         with Repo.init(repo_dir) as repo:
-
             # Commit a dummy file then modify it
             # Commit a dummy file then modify it
             foo1_fullpath = os.path.join(repo_dir, "foo1")
             foo1_fullpath = os.path.join(repo_dir, "foo1")
             with open(foo1_fullpath, "wb") as f:
             with open(foo1_fullpath, "wb") as f:

+ 3 - 4
dulwich/tests/test_missing_obj_finder.py

@@ -37,9 +37,7 @@ class MissingObjectFinderTest(TestCase):
     def assertMissingMatch(self, haves, wants, expected):
     def assertMissingMatch(self, haves, wants, expected):
         for sha, path in MissingObjectFinder(self.store, haves, wants, shallow=set()):
         for sha, path in MissingObjectFinder(self.store, haves, wants, shallow=set()):
             self.assertIn(
             self.assertIn(
-                sha,
-                expected,
-                f"({sha},{path}) erroneously reported as missing"
+                sha, expected, f"({sha},{path}) erroneously reported as missing"
             )
             )
             expected.remove(sha)
             expected.remove(sha)
 
 
@@ -108,7 +106,8 @@ class MOFLinearRepoTest(MissingObjectFinderTest):
         haves = [self.cmt(1).id]
         haves = [self.cmt(1).id]
         wants = [self.cmt(3).id, bogus_sha]
         wants = [self.cmt(3).id, bogus_sha]
         self.assertRaises(
         self.assertRaises(
-            KeyError, MissingObjectFinder, self.store, haves, wants, shallow=set())
+            KeyError, MissingObjectFinder, self.store, haves, wants, shallow=set()
+        )
 
 
     def test_no_changes(self):
     def test_no_changes(self):
         self.assertMissingMatch([self.cmt(3).id], [self.cmt(3).id], [])
         self.assertMissingMatch([self.cmt(3).id], [self.cmt(3).id], [])

+ 9 - 11
dulwich/tests/test_object_store.py

@@ -84,27 +84,21 @@ class ObjectStoreTests:
         self.store.add_object(testobject)
         self.store.add_object(testobject)
         refs = {b"refs/heads/foo": testobject.id}
         refs = {b"refs/heads/foo": testobject.id}
         with patch.object(self.store, "_get_depth", return_value=1) as m:
         with patch.object(self.store, "_get_depth", return_value=1) as m:
-            self.assertEqual(
-                [], self.store.determine_wants_all(refs, depth=0)
-            )
+            self.assertEqual([], self.store.determine_wants_all(refs, depth=0))
             self.assertEqual(
             self.assertEqual(
                 [testobject.id],
                 [testobject.id],
                 self.store.determine_wants_all(refs, depth=DEPTH_INFINITE),
                 self.store.determine_wants_all(refs, depth=DEPTH_INFINITE),
             )
             )
             m.assert_not_called()
             m.assert_not_called()
 
 
-            self.assertEqual(
-                [], self.store.determine_wants_all(refs, depth=1)
-            )
+            self.assertEqual([], self.store.determine_wants_all(refs, depth=1))
             m.assert_called_with(testobject.id)
             m.assert_called_with(testobject.id)
             self.assertEqual(
             self.assertEqual(
                 [testobject.id], self.store.determine_wants_all(refs, depth=2)
                 [testobject.id], self.store.determine_wants_all(refs, depth=2)
             )
             )
 
 
     def test_get_depth(self):
     def test_get_depth(self):
-        self.assertEqual(
-            0, self.store._get_depth(testobject.id)
-        )
+        self.assertEqual(0, self.store._get_depth(testobject.id))
 
 
         self.store.add_object(testobject)
         self.store.add_object(testobject)
         self.assertEqual(
         self.assertEqual(
@@ -598,8 +592,12 @@ class TreeLookupPathTests(TestCase):
     def test_lookup_submodule(self):
     def test_lookup_submodule(self):
         tree_lookup_path(self.get_object, self.tree_id, b"d")[1]
         tree_lookup_path(self.get_object, self.tree_id, b"d")[1]
         self.assertRaises(
         self.assertRaises(
-            SubmoduleEncountered, tree_lookup_path, self.get_object,
-            self.tree_id, b"d/a")
+            SubmoduleEncountered,
+            tree_lookup_path,
+            self.get_object,
+            self.tree_id,
+            b"d/a",
+        )
 
 
     def test_lookup_nonexistent(self):
     def test_lookup_nonexistent(self):
         self.assertRaises(
         self.assertRaises(

+ 12 - 9
dulwich/tests/test_objects.py

@@ -1195,7 +1195,10 @@ class CheckTests(TestCase):
         )
         )
         check_identity(b" <dborowitz@google.com>", "failed to check good identity")
         check_identity(b" <dborowitz@google.com>", "failed to check good identity")
         self.assertRaises(
         self.assertRaises(
-            ObjectFormatException, check_identity, b'<dborowitz@google.com>', 'no space before email'
+            ObjectFormatException,
+            check_identity,
+            b"<dborowitz@google.com>",
+            "no space before email",
         )
         )
         self.assertRaises(
         self.assertRaises(
             ObjectFormatException, check_identity, b"Dave Borowitz", "no email"
             ObjectFormatException, check_identity, b"Dave Borowitz", "no email"
@@ -1239,26 +1242,26 @@ class CheckTests(TestCase):
         self.assertRaises(
         self.assertRaises(
             ObjectFormatException,
             ObjectFormatException,
             check_identity,
             check_identity,
-            b'Dave<Borowitz <dborowitz@google.com>',
-            'reserved byte in name',
+            b"Dave<Borowitz <dborowitz@google.com>",
+            "reserved byte in name",
         )
         )
         self.assertRaises(
         self.assertRaises(
             ObjectFormatException,
             ObjectFormatException,
             check_identity,
             check_identity,
-            b'Dave>Borowitz <dborowitz@google.com>',
-            'reserved byte in name',
+            b"Dave>Borowitz <dborowitz@google.com>",
+            "reserved byte in name",
         )
         )
         self.assertRaises(
         self.assertRaises(
             ObjectFormatException,
             ObjectFormatException,
             check_identity,
             check_identity,
-            b'Dave\0Borowitz <dborowitz@google.com>',
-            'null byte',
+            b"Dave\0Borowitz <dborowitz@google.com>",
+            "null byte",
         )
         )
         self.assertRaises(
         self.assertRaises(
             ObjectFormatException,
             ObjectFormatException,
             check_identity,
             check_identity,
-            b'Dave\nBorowitz <dborowitz@google.com>',
-            'newline byte',
+            b"Dave\nBorowitz <dborowitz@google.com>",
+            "newline byte",
         )
         )
 
 
 
 

+ 2 - 2
dulwich/tests/test_objectspec.py

@@ -257,5 +257,5 @@ class ParseTreeTests(TestCase):
     def test_from_ref(self):
     def test_from_ref(self):
         r = MemoryRepo()
         r = MemoryRepo()
         c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]])
         c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]])
-        r.refs[b'refs/heads/foo'] = c1.id
-        self.assertEqual(r[c1.tree], parse_tree(r, b'foo'))
+        r.refs[b"refs/heads/foo"] = c1.id
+        self.assertEqual(r[c1.tree], parse_tree(r, b"foo"))

+ 78 - 37
dulwich/tests/test_pack.py

@@ -80,7 +80,9 @@ class PackTests(TestCase):
         self.tempdir = tempfile.mkdtemp()
         self.tempdir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, self.tempdir)
         self.addCleanup(shutil.rmtree, self.tempdir)
 
 
-    datadir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../testdata/packs"))
+    datadir = os.path.abspath(
+        os.path.join(os.path.dirname(__file__), "../../testdata/packs")
+    )
 
 
     def get_pack_index(self, sha):
     def get_pack_index(self, sha):
         """Returns a PackIndex from the datadir with the given sha."""
         """Returns a PackIndex from the datadir with the given sha."""
@@ -160,7 +162,6 @@ class PackIndexTests(PackTests):
 
 
 
 
 class TestPackDeltas(TestCase):
 class TestPackDeltas(TestCase):
-
     test_string1 = b"The answer was flailing in the wind"
     test_string1 = b"The answer was flailing in the wind"
     test_string2 = b"The answer was falling down the pipe"
     test_string2 = b"The answer was falling down the pipe"
     test_string3 = b"zzzzz"
     test_string3 = b"zzzzz"
@@ -171,8 +172,7 @@ class TestPackDeltas(TestCase):
 
 
     def _test_roundtrip(self, base, target):
     def _test_roundtrip(self, base, target):
         self.assertEqual(
         self.assertEqual(
-            target,
-            b"".join(apply_delta(base, list(create_delta(base, target))))
+            target, b"".join(apply_delta(base, list(create_delta(base, target))))
         )
         )
 
 
     def test_nochange(self):
     def test_nochange(self):
@@ -285,9 +285,24 @@ class TestPackData(PackTests):
             actual = list(p.iter_unpacked())
             actual = list(p.iter_unpacked())
             self.assertEqual(
             self.assertEqual(
                 [
                 [
-                    UnpackedObject(offset=12, pack_type_num=1, decomp_chunks=[commit_data], crc32=None),
-                    UnpackedObject(offset=138, pack_type_num=2, decomp_chunks=[tree_data], crc32=None),
-                    UnpackedObject(offset=178, pack_type_num=3, decomp_chunks=[b"test 1\n"], crc32=None),
+                    UnpackedObject(
+                        offset=12,
+                        pack_type_num=1,
+                        decomp_chunks=[commit_data],
+                        crc32=None,
+                    ),
+                    UnpackedObject(
+                        offset=138,
+                        pack_type_num=2,
+                        decomp_chunks=[tree_data],
+                        crc32=None,
+                    ),
+                    UnpackedObject(
+                        offset=178,
+                        pack_type_num=3,
+                        decomp_chunks=[b"test 1\n"],
+                        crc32=None,
+                    ),
                 ],
                 ],
                 actual,
                 actual,
             )
             )
@@ -486,9 +501,7 @@ class TestPack(PackTests):
             bad_data = PackData("", file=bad_file)
             bad_data = PackData("", file=bad_file)
             bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
             bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
             self.assertRaises(AssertionError, lambda: bad_pack.data)
             self.assertRaises(AssertionError, lambda: bad_pack.data)
-            self.assertRaises(
-                AssertionError, bad_pack.check_length_and_checksum
-            )
+            self.assertRaises(AssertionError, bad_pack.check_length_and_checksum)
 
 
     def test_checksum_mismatch(self):
     def test_checksum_mismatch(self):
         with self.get_pack_data(pack1_sha) as data:
         with self.get_pack_data(pack1_sha) as data:
@@ -500,9 +513,7 @@ class TestPack(PackTests):
             bad_data = PackData("", file=bad_file)
             bad_data = PackData("", file=bad_file)
             bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
             bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
             self.assertRaises(ChecksumMismatch, lambda: bad_pack.data)
             self.assertRaises(ChecksumMismatch, lambda: bad_pack.data)
-            self.assertRaises(
-                ChecksumMismatch, bad_pack.check_length_and_checksum
-            )
+            self.assertRaises(ChecksumMismatch, bad_pack.check_length_and_checksum)
 
 
     def test_iterobjects_2(self):
     def test_iterobjects_2(self):
         with self.get_pack(pack1_sha) as p:
         with self.get_pack(pack1_sha) as p:
@@ -551,7 +562,8 @@ class TestThinPack(PackTests):
         with self.make_pack(True) as pack:
         with self.make_pack(True) as pack:
             with PackData(pack._data_path) as data:
             with PackData(pack._data_path) as data:
                 data.create_index(
                 data.create_index(
-                    self.pack_prefix + ".idx", resolve_ext_ref=pack.resolve_ext_ref)
+                    self.pack_prefix + ".idx", resolve_ext_ref=pack.resolve_ext_ref
+                )
 
 
         del self.store[self.blobs[b"bar"].id]
         del self.store[self.blobs[b"bar"].id]
 
 
@@ -573,7 +585,7 @@ class TestThinPack(PackTests):
             expected = UnpackedObject(
             expected = UnpackedObject(
                 7,
                 7,
                 delta_base=b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c",
                 delta_base=b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c",
-                decomp_chunks=[b'\x03\x07\x90\x03\x041234'],
+                decomp_chunks=[b"\x03\x07\x90\x03\x041234"],
             )
             )
             expected.offset = 12
             expected.offset = 12
             got = p.get_unpacked_object(self.blobs[b"foo1234"].id)
             got = p.get_unpacked_object(self.blobs[b"foo1234"].id)
@@ -582,7 +594,7 @@ class TestThinPack(PackTests):
             expected = UnpackedObject(
             expected = UnpackedObject(
                 7,
                 7,
                 delta_base=b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c",
                 delta_base=b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c",
-                decomp_chunks=[b'\x03\x07\x90\x03\x041234'],
+                decomp_chunks=[b"\x03\x07\x90\x03\x041234"],
             )
             )
             expected.offset = 12
             expected.offset = 12
             got = p.get_unpacked_object(self.blobs[b"foo1234"].id)
             got = p.get_unpacked_object(self.blobs[b"foo1234"].id)
@@ -646,7 +658,9 @@ class WritePackTests(TestCase):
         offset = f.tell()
         offset = f.tell()
         sha_a = sha1(b"foo")
         sha_a = sha1(b"foo")
         sha_b = sha_a.copy()
         sha_b = sha_a.copy()
-        write_pack_object(f.write, Blob.type_num, b"blob", sha=sha_a, compression_level=6)
+        write_pack_object(
+            f.write, Blob.type_num, b"blob", sha=sha_a, compression_level=6
+        )
         self.assertNotEqual(sha_a.digest(), sha_b.digest())
         self.assertNotEqual(sha_a.digest(), sha_b.digest())
         sha_b.update(f.getvalue()[offset:])
         sha_b.update(f.getvalue()[offset:])
         self.assertEqual(sha_a.digest(), sha_b.digest())
         self.assertEqual(sha_a.digest(), sha_b.digest())
@@ -675,7 +689,7 @@ class BaseTestPackIndexWriting:
         entry2_sha = hex_to_sha("e98f071751bd77f59967bfa671cd2caebdccc9a2")
         entry2_sha = hex_to_sha("e98f071751bd77f59967bfa671cd2caebdccc9a2")
         entries = [
         entries = [
             (entry1_sha, 0xF2972D0830529B87, 24),
             (entry1_sha, 0xF2972D0830529B87, 24),
-            (entry2_sha, (~0xF2972D0830529B87) & (2 ** 64 - 1), 92),
+            (entry2_sha, (~0xF2972D0830529B87) & (2**64 - 1), 92),
         ]
         ]
         if not self._supports_large:
         if not self._supports_large:
             self.assertRaises(
             self.assertRaises(
@@ -779,7 +793,6 @@ class TestPackIndexWritingv2(TestCase, BaseTestFilePackIndexWriting):
 
 
 
 
 class ReadZlibTests(TestCase):
 class ReadZlibTests(TestCase):
-
     decomp = (
     decomp = (
         b"tree 4ada885c9196b6b6fa08744b5862bf92896fc002\n"
         b"tree 4ada885c9196b6b6fa08744b5862bf92896fc002\n"
         b"parent None\n"
         b"parent None\n"
@@ -794,7 +807,9 @@ class ReadZlibTests(TestCase):
     def setUp(self):
     def setUp(self):
         super().setUp()
         super().setUp()
         self.read = BytesIO(self.comp + self.extra).read
         self.read = BytesIO(self.comp + self.extra).read
-        self.unpacked = UnpackedObject(Tree.type_num, decomp_len=len(self.decomp), crc32=0)
+        self.unpacked = UnpackedObject(
+            Tree.type_num, decomp_len=len(self.decomp), crc32=0
+        )
 
 
     def test_decompress_size(self):
     def test_decompress_size(self):
         good_decomp_len = len(self.decomp)
         good_decomp_len = len(self.decomp)
@@ -865,7 +880,14 @@ class DeltifyTests(TestCase):
     def test_single(self):
     def test_single(self):
         b = Blob.from_string(b"foo")
         b = Blob.from_string(b"foo")
         self.assertEqual(
         self.assertEqual(
-            [UnpackedObject(b.type_num, sha=b.sha().digest(), delta_base=None, decomp_chunks=b.as_raw_chunks())],
+            [
+                UnpackedObject(
+                    b.type_num,
+                    sha=b.sha().digest(),
+                    delta_base=None,
+                    decomp_chunks=b.as_raw_chunks(),
+                )
+            ],
             list(deltify_pack_objects([(b, b"")])),
             list(deltify_pack_objects([(b, b"")])),
         )
         )
 
 
@@ -875,8 +897,18 @@ class DeltifyTests(TestCase):
         delta = list(create_delta(b1.as_raw_chunks(), b2.as_raw_chunks()))
         delta = list(create_delta(b1.as_raw_chunks(), b2.as_raw_chunks()))
         self.assertEqual(
         self.assertEqual(
             [
             [
-                UnpackedObject(b1.type_num, sha=b1.sha().digest(), delta_base=None, decomp_chunks=b1.as_raw_chunks()),
-                UnpackedObject(b2.type_num, sha=b2.sha().digest(), delta_base=b1.sha().digest(), decomp_chunks=delta),
+                UnpackedObject(
+                    b1.type_num,
+                    sha=b1.sha().digest(),
+                    delta_base=None,
+                    decomp_chunks=b1.as_raw_chunks(),
+                ),
+                UnpackedObject(
+                    b2.type_num,
+                    sha=b2.sha().digest(),
+                    delta_base=b1.sha().digest(),
+                    decomp_chunks=delta,
+                ),
             ],
             ],
             list(deltify_pack_objects([(b1, b""), (b2, b"")])),
             list(deltify_pack_objects([(b1, b""), (b2, b"")])),
         )
         )
@@ -919,7 +951,7 @@ class TestPackStreamReader(TestCase):
             unpacked_delta.delta_base,
             unpacked_delta.delta_base,
         )
         )
         delta = create_delta(b"blob", b"blob1")
         delta = create_delta(b"blob", b"blob1")
-        self.assertEqual(b''.join(delta), b"".join(unpacked_delta.decomp_chunks))
+        self.assertEqual(b"".join(delta), b"".join(unpacked_delta.decomp_chunks))
         self.assertEqual(entries[1][4], unpacked_delta.crc32)
         self.assertEqual(entries[1][4], unpacked_delta.crc32)
 
 
     def test_read_objects_buffered(self):
     def test_read_objects_buffered(self):
@@ -940,7 +972,6 @@ class TestPackStreamReader(TestCase):
 
 
 
 
 class TestPackIterator(DeltaChainIterator):
 class TestPackIterator(DeltaChainIterator):
-
     _compute_crc32 = True
     _compute_crc32 = True
 
 
     def __init__(self, *args, **kwargs) -> None:
     def __init__(self, *args, **kwargs) -> None:
@@ -962,9 +993,7 @@ class TestPackIterator(DeltaChainIterator):
             "Attempted to re-inflate offset %i" % offset
             "Attempted to re-inflate offset %i" % offset
         )
         )
         self._unpacked_offsets.add(offset)
         self._unpacked_offsets.add(offset)
-        return super()._resolve_object(
-            offset, pack_type_num, base_chunks
-        )
+        return super()._resolve_object(offset, pack_type_num, base_chunks)
 
 
 
 
 class DeltaChainIteratorTests(TestCase):
 class DeltaChainIteratorTests(TestCase):
@@ -985,9 +1014,7 @@ class DeltaChainIteratorTests(TestCase):
         """Wrapper around store.get_raw that doesn't allow repeat lookups."""
         """Wrapper around store.get_raw that doesn't allow repeat lookups."""
         hex_sha = sha_to_hex(bin_sha)
         hex_sha = sha_to_hex(bin_sha)
         self.assertNotIn(
         self.assertNotIn(
-            hex_sha,
-            self.fetched,
-            "Attempted to re-fetch object %s" % hex_sha
+            hex_sha, self.fetched, "Attempted to re-fetch object %s" % hex_sha
         )
         )
         self.fetched.add(hex_sha)
         self.fetched.add(hex_sha)
         return self.store.get_raw(hex_sha)
         return self.store.get_raw(hex_sha)
@@ -1007,7 +1034,9 @@ class DeltaChainIteratorTests(TestCase):
         assert data
         assert data
         index = MemoryPackIndex.for_pack(data)
         index = MemoryPackIndex.for_pack(data)
         pack = Pack.from_objects(data, index)
         pack = Pack.from_objects(data, index)
-        return TestPackIterator.for_pack_subset(pack, subset, resolve_ext_ref=resolve_ext_ref)
+        return TestPackIterator.for_pack_subset(
+            pack, subset, resolve_ext_ref=resolve_ext_ref
+        )
 
 
     def assertEntriesMatch(self, expected_indexes, entries, pack_iter):
     def assertEntriesMatch(self, expected_indexes, entries, pack_iter):
         expected = [entries[i] for i in expected_indexes]
         expected = [entries[i] for i in expected_indexes]
@@ -1027,10 +1056,19 @@ class DeltaChainIteratorTests(TestCase):
         f.seek(0)
         f.seek(0)
         self.assertEntriesMatch([], entries, self.make_pack_iter_subset(f, []))
         self.assertEntriesMatch([], entries, self.make_pack_iter_subset(f, []))
         f.seek(0)
         f.seek(0)
-        self.assertEntriesMatch([1, 0], entries, self.make_pack_iter_subset(f, [entries[0][3], entries[1][3]]))
+        self.assertEntriesMatch(
+            [1, 0],
+            entries,
+            self.make_pack_iter_subset(f, [entries[0][3], entries[1][3]]),
+        )
         f.seek(0)
         f.seek(0)
         self.assertEntriesMatch(
         self.assertEntriesMatch(
-            [1, 0], entries, self.make_pack_iter_subset(f, [sha_to_hex(entries[0][3]), sha_to_hex(entries[1][3])]))
+            [1, 0],
+            entries,
+            self.make_pack_iter_subset(
+                f, [sha_to_hex(entries[0][3]), sha_to_hex(entries[1][3])]
+            ),
+        )
 
 
     def test_ofs_deltas(self):
     def test_ofs_deltas(self):
         f = BytesIO()
         f = BytesIO()
@@ -1046,8 +1084,10 @@ class DeltaChainIteratorTests(TestCase):
         self.assertEntriesMatch([0, 2, 1], entries, self.make_pack_iter(f))
         self.assertEntriesMatch([0, 2, 1], entries, self.make_pack_iter(f))
         f.seek(0)
         f.seek(0)
         self.assertEntriesMatch(
         self.assertEntriesMatch(
-            [0, 2, 1], entries,
-            self.make_pack_iter_subset(f, [entries[1][3], entries[2][3]]))
+            [0, 2, 1],
+            entries,
+            self.make_pack_iter_subset(f, [entries[1][3], entries[2][3]]),
+        )
 
 
     def test_ofs_deltas_chain(self):
     def test_ofs_deltas_chain(self):
         f = BytesIO()
         f = BytesIO()
@@ -1112,7 +1152,8 @@ class DeltaChainIteratorTests(TestCase):
                 (OFS_DELTA, (0, b"blob1")),
                 (OFS_DELTA, (0, b"blob1")),
                 (OFS_DELTA, (1, b"blob3")),
                 (OFS_DELTA, (1, b"blob3")),
                 (OFS_DELTA, (0, b"bob")),
                 (OFS_DELTA, (0, b"bob")),
-            ])
+            ],
+        )
         # Delta resolution changed to DFS
         # Delta resolution changed to DFS
         self.assertEntriesMatch([0, 4, 2, 1, 3], entries, self.make_pack_iter(f))
         self.assertEntriesMatch([0, 4, 2, 1, 3], entries, self.make_pack_iter(f))
 
 

+ 170 - 134
dulwich/tests/test_porcelain.py

@@ -262,7 +262,7 @@ ya6JVZCRbMXfdCy8lVPgtNQ6VlHaj8Wvnn2FLbWWO2n2r3s=
         # (e.g. the gpg-agent socket having been deleted). See
         # (e.g. the gpg-agent socket having been deleted). See
         # https://github.com/jelmer/dulwich/issues/1000
         # https://github.com/jelmer/dulwich/issues/1000
         self.addCleanup(shutil.rmtree, self.gpg_dir, ignore_errors=True)
         self.addCleanup(shutil.rmtree, self.gpg_dir, ignore_errors=True)
-        self.overrideEnv('GNUPGHOME', self.gpg_dir)
+        self.overrideEnv("GNUPGHOME", self.gpg_dir)
 
 
     def import_default_key(self):
     def import_default_key(self):
         subprocess.run(
         subprocess.run(
@@ -452,9 +452,11 @@ class CommitTests(PorcelainTestCase):
         self.assertEqual(commit._commit_timezone, local_timezone)
         self.assertEqual(commit._commit_timezone, local_timezone)
 
 
 
 
-@skipIf(platform.python_implementation() == "PyPy" or sys.platform == "win32", "gpgme not easily available or supported on Windows and PyPy")
+@skipIf(
+    platform.python_implementation() == "PyPy" or sys.platform == "win32",
+    "gpgme not easily available or supported on Windows and PyPy",
+)
 class CommitSignTests(PorcelainGpgTestCase):
 class CommitSignTests(PorcelainGpgTestCase):
-
     def test_default_key(self):
     def test_default_key(self):
         c1, c2, c3 = build_commit_graph(
         c1, c2, c3 = build_commit_graph(
             self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
             self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
@@ -517,7 +519,6 @@ class CommitSignTests(PorcelainGpgTestCase):
 
 
 
 
 class TimezoneTests(PorcelainTestCase):
 class TimezoneTests(PorcelainTestCase):
-
     def put_envs(self, value):
     def put_envs(self, value):
         self.overrideEnv("GIT_AUTHOR_DATE", value)
         self.overrideEnv("GIT_AUTHOR_DATE", value)
         self.overrideEnv("GIT_COMMITTER_DATE", value)
         self.overrideEnv("GIT_COMMITTER_DATE", value)
@@ -587,7 +588,9 @@ class TimezoneTests(PorcelainTestCase):
         self.put_envs("0 +0500")
         self.put_envs("0 +0500")
         self.overrideEnv("GIT_AUTHOR_DATE", None)
         self.overrideEnv("GIT_AUTHOR_DATE", None)
         self.overrideEnv("GIT_COMMITTER_DATE", None)
         self.overrideEnv("GIT_COMMITTER_DATE", None)
-        self.assertTupleEqual((local_timezone, local_timezone), porcelain.get_user_timezones())
+        self.assertTupleEqual(
+            (local_timezone, local_timezone), porcelain.get_user_timezones()
+        )
 
 
 
 
 class CleanTests(PorcelainTestCase):
 class CleanTests(PorcelainTestCase):
@@ -847,7 +850,10 @@ class CloneTests(PorcelainTestCase):
         self.assertEqual(c1.id, target_repo.refs[b"refs/heads/else"])
         self.assertEqual(c1.id, target_repo.refs[b"refs/heads/else"])
         self.assertEqual(c1.id, target_repo.refs[b"HEAD"])
         self.assertEqual(c1.id, target_repo.refs[b"HEAD"])
         self.assertEqual(
         self.assertEqual(
-            {b"HEAD": b"refs/heads/else", b"refs/remotes/origin/HEAD": b"refs/remotes/origin/else"},
+            {
+                b"HEAD": b"refs/heads/else",
+                b"refs/remotes/origin/HEAD": b"refs/remotes/origin/else",
+            },
             target_repo.refs.get_symrefs(),
             target_repo.refs.get_symrefs(),
         )
         )
 
 
@@ -1322,9 +1328,11 @@ class RevListTests(PorcelainTestCase):
         )
         )
 
 
 
 
-@skipIf(platform.python_implementation() == "PyPy" or sys.platform == "win32", "gpgme not easily available or supported on Windows and PyPy")
+@skipIf(
+    platform.python_implementation() == "PyPy" or sys.platform == "win32",
+    "gpgme not easily available or supported on Windows and PyPy",
+)
 class TagCreateSignTests(PorcelainGpgTestCase):
 class TagCreateSignTests(PorcelainGpgTestCase):
-
     def test_default_key(self):
     def test_default_key(self):
         c1, c2, c3 = build_commit_graph(
         c1, c2, c3 = build_commit_graph(
             self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
             self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
@@ -1350,7 +1358,7 @@ class TagCreateSignTests(PorcelainGpgTestCase):
         self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
         self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
         self.assertEqual(b"bar\n", tag.message)
         self.assertEqual(b"bar\n", tag.message)
         self.assertRecentTimestamp(tag.tag_time)
         self.assertRecentTimestamp(tag.tag_time)
-        tag = self.repo[b'refs/tags/tryme']
+        tag = self.repo[b"refs/tags/tryme"]
         # GPG Signatures aren't deterministic, so we can't do a static assertion.
         # GPG Signatures aren't deterministic, so we can't do a static assertion.
         tag.verify()
         tag.verify()
         tag.verify(keyids=[PorcelainGpgTestCase.DEFAULT_KEY_ID])
         tag.verify(keyids=[PorcelainGpgTestCase.DEFAULT_KEY_ID])
@@ -1393,13 +1401,12 @@ class TagCreateSignTests(PorcelainGpgTestCase):
         self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
         self.assertEqual(b"foo <foo@bar.com>", tag.tagger)
         self.assertEqual(b"bar\n", tag.message)
         self.assertEqual(b"bar\n", tag.message)
         self.assertRecentTimestamp(tag.tag_time)
         self.assertRecentTimestamp(tag.tag_time)
-        tag = self.repo[b'refs/tags/tryme']
+        tag = self.repo[b"refs/tags/tryme"]
         # GPG Signatures aren't deterministic, so we can't do a static assertion.
         # GPG Signatures aren't deterministic, so we can't do a static assertion.
         tag.verify()
         tag.verify()
 
 
 
 
 class TagCreateTests(PorcelainTestCase):
 class TagCreateTests(PorcelainTestCase):
-
     def test_annotated(self):
     def test_annotated(self):
         c1, c2, c3 = build_commit_graph(
         c1, c2, c3 = build_commit_graph(
             self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
             self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
@@ -1538,13 +1545,12 @@ class ResetTests(PorcelainTestCase):
 
 
 
 
 class ResetFileTests(PorcelainTestCase):
 class ResetFileTests(PorcelainTestCase):
-
     def test_reset_modify_file_to_commit(self):
     def test_reset_modify_file_to_commit(self):
-        file = 'foo'
+        file = "foo"
         full_path = os.path.join(self.repo.path, file)
         full_path = os.path.join(self.repo.path, file)
 
 
-        with open(full_path, 'w') as f:
-            f.write('hello')
+        with open(full_path, "w") as f:
+            f.write("hello")
         porcelain.add(self.repo, paths=[full_path])
         porcelain.add(self.repo, paths=[full_path])
         sha = porcelain.commit(
         sha = porcelain.commit(
             self.repo,
             self.repo,
@@ -1552,19 +1558,19 @@ class ResetFileTests(PorcelainTestCase):
             committer=b"Jane <jane@example.com>",
             committer=b"Jane <jane@example.com>",
             author=b"John <john@example.com>",
             author=b"John <john@example.com>",
         )
         )
-        with open(full_path, 'a') as f:
-            f.write('something new')
+        with open(full_path, "a") as f:
+            f.write("something new")
         porcelain.reset_file(self.repo, file, target=sha)
         porcelain.reset_file(self.repo, file, target=sha)
 
 
         with open(full_path) as f:
         with open(full_path) as f:
-            self.assertEqual('hello', f.read())
+            self.assertEqual("hello", f.read())
 
 
     def test_reset_remove_file_to_commit(self):
     def test_reset_remove_file_to_commit(self):
-        file = 'foo'
+        file = "foo"
         full_path = os.path.join(self.repo.path, file)
         full_path = os.path.join(self.repo.path, file)
 
 
-        with open(full_path, 'w') as f:
-            f.write('hello')
+        with open(full_path, "w") as f:
+            f.write("hello")
         porcelain.add(self.repo, paths=[full_path])
         porcelain.add(self.repo, paths=[full_path])
         sha = porcelain.commit(
         sha = porcelain.commit(
             self.repo,
             self.repo,
@@ -1576,14 +1582,14 @@ class ResetFileTests(PorcelainTestCase):
         porcelain.reset_file(self.repo, file, target=sha)
         porcelain.reset_file(self.repo, file, target=sha)
 
 
         with open(full_path) as f:
         with open(full_path) as f:
-            self.assertEqual('hello', f.read())
+            self.assertEqual("hello", f.read())
 
 
     def test_resetfile_with_dir(self):
     def test_resetfile_with_dir(self):
-        os.mkdir(os.path.join(self.repo.path, 'new_dir'))
-        full_path = os.path.join(self.repo.path, 'new_dir', 'foo')
+        os.mkdir(os.path.join(self.repo.path, "new_dir"))
+        full_path = os.path.join(self.repo.path, "new_dir", "foo")
 
 
-        with open(full_path, 'w') as f:
-            f.write('hello')
+        with open(full_path, "w") as f:
+            f.write("hello")
         porcelain.add(self.repo, paths=[full_path])
         porcelain.add(self.repo, paths=[full_path])
         sha = porcelain.commit(
         sha = porcelain.commit(
             self.repo,
             self.repo,
@@ -1591,24 +1597,24 @@ class ResetFileTests(PorcelainTestCase):
             committer=b"Jane <jane@example.com>",
             committer=b"Jane <jane@example.com>",
             author=b"John <john@example.com>",
             author=b"John <john@example.com>",
         )
         )
-        with open(full_path, 'a') as f:
-            f.write('something new')
+        with open(full_path, "a") as f:
+            f.write("something new")
         porcelain.commit(
         porcelain.commit(
             self.repo,
             self.repo,
             message=b"unitest 2",
             message=b"unitest 2",
             committer=b"Jane <jane@example.com>",
             committer=b"Jane <jane@example.com>",
             author=b"John <john@example.com>",
             author=b"John <john@example.com>",
         )
         )
-        porcelain.reset_file(self.repo, os.path.join('new_dir', 'foo'), target=sha)
+        porcelain.reset_file(self.repo, os.path.join("new_dir", "foo"), target=sha)
 
 
         with open(full_path) as f:
         with open(full_path) as f:
-            self.assertEqual('hello', f.read())
+            self.assertEqual("hello", f.read())
 
 
 
 
 def _commit_file_with_content(repo, filename, content):
 def _commit_file_with_content(repo, filename, content):
     file_path = os.path.join(repo.path, filename)
     file_path = os.path.join(repo.path, filename)
 
 
-    with open(file_path, 'w') as f:
+    with open(file_path, "w") as f:
         f.write(content)
         f.write(content)
     porcelain.add(repo, paths=[file_path])
     porcelain.add(repo, paths=[file_path])
     sha = porcelain.commit(
     sha = porcelain.commit(
@@ -1622,160 +1628,189 @@ def _commit_file_with_content(repo, filename, content):
 
 
 
 
 class CheckoutTests(PorcelainTestCase):
 class CheckoutTests(PorcelainTestCase):
-
     def setUp(self):
     def setUp(self):
         super().setUp()
         super().setUp()
-        self._sha, self._foo_path = _commit_file_with_content(self.repo, 'foo', 'hello\n')
-        porcelain.branch_create(self.repo, 'uni')
+        self._sha, self._foo_path = _commit_file_with_content(
+            self.repo, "foo", "hello\n"
+        )
+        porcelain.branch_create(self.repo, "uni")
 
 
     def test_checkout_to_existing_branch(self):
     def test_checkout_to_existing_branch(self):
         self.assertEqual(b"master", porcelain.active_branch(self.repo))
         self.assertEqual(b"master", porcelain.active_branch(self.repo))
-        porcelain.checkout_branch(self.repo, b'uni')
+        porcelain.checkout_branch(self.repo, b"uni")
         self.assertEqual(b"uni", porcelain.active_branch(self.repo))
         self.assertEqual(b"uni", porcelain.active_branch(self.repo))
 
 
     def test_checkout_to_non_existing_branch(self):
     def test_checkout_to_non_existing_branch(self):
         self.assertEqual(b"master", porcelain.active_branch(self.repo))
         self.assertEqual(b"master", porcelain.active_branch(self.repo))
 
 
         with self.assertRaises(KeyError):
         with self.assertRaises(KeyError):
-            porcelain.checkout_branch(self.repo, b'bob')
+            porcelain.checkout_branch(self.repo, b"bob")
 
 
         self.assertEqual(b"master", porcelain.active_branch(self.repo))
         self.assertEqual(b"master", porcelain.active_branch(self.repo))
 
 
     def test_checkout_to_branch_with_modified_files(self):
     def test_checkout_to_branch_with_modified_files(self):
-        with open(self._foo_path, 'a') as f:
-            f.write('new message\n')
+        with open(self._foo_path, "a") as f:
+            f.write("new message\n")
         porcelain.add(self.repo, paths=[self._foo_path])
         porcelain.add(self.repo, paths=[self._foo_path])
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': [b"foo"]}, [], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": [b"foo"]}, [], []], status
+        )
 
 
         # Both branches have file 'foo' checkout should be fine.
         # Both branches have file 'foo' checkout should be fine.
-        porcelain.checkout_branch(self.repo, b'uni')
+        porcelain.checkout_branch(self.repo, b"uni")
         self.assertEqual(b"uni", porcelain.active_branch(self.repo))
         self.assertEqual(b"uni", porcelain.active_branch(self.repo))
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': [b"foo"]}, [], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": [b"foo"]}, [], []], status
+        )
 
 
     def test_checkout_with_deleted_files(self):
     def test_checkout_with_deleted_files(self):
-        porcelain.remove(self.repo.path, [os.path.join(self.repo.path, 'foo')])
+        porcelain.remove(self.repo.path, [os.path.join(self.repo.path, "foo")])
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [b'foo'], 'modify': []}, [], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [b"foo"], "modify": []}, [], []], status
+        )
 
 
         # Both branches have file 'foo' checkout should be fine.
         # Both branches have file 'foo' checkout should be fine.
-        porcelain.checkout_branch(self.repo, b'uni')
+        porcelain.checkout_branch(self.repo, b"uni")
         self.assertEqual(b"uni", porcelain.active_branch(self.repo))
         self.assertEqual(b"uni", porcelain.active_branch(self.repo))
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [b"foo"], 'modify': []}, [], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [b"foo"], "modify": []}, [], []], status
+        )
 
 
     def test_checkout_to_branch_with_added_files(self):
     def test_checkout_to_branch_with_added_files(self):
-        file_path = os.path.join(self.repo.path, 'bar')
+        file_path = os.path.join(self.repo.path, "bar")
 
 
-        with open(file_path, 'w') as f:
-            f.write('bar content\n')
+        with open(file_path, "w") as f:
+            f.write("bar content\n")
         porcelain.add(self.repo, paths=[file_path])
         porcelain.add(self.repo, paths=[file_path])
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [b'bar'], 'delete': [], 'modify': []}, [], []], status)
+        self.assertEqual(
+            [{"add": [b"bar"], "delete": [], "modify": []}, [], []], status
+        )
 
 
         # Both branches have file 'foo' checkout should be fine.
         # Both branches have file 'foo' checkout should be fine.
-        porcelain.checkout_branch(self.repo, b'uni')
-        self.assertEqual(b'uni', porcelain.active_branch(self.repo))
+        porcelain.checkout_branch(self.repo, b"uni")
+        self.assertEqual(b"uni", porcelain.active_branch(self.repo))
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [b'bar'], 'delete': [], 'modify': []}, [], []], status)
+        self.assertEqual(
+            [{"add": [b"bar"], "delete": [], "modify": []}, [], []], status
+        )
 
 
     def test_checkout_to_branch_with_modified_file_not_present(self):
     def test_checkout_to_branch_with_modified_file_not_present(self):
         # Commit a new file that the other branch doesn't have.
         # Commit a new file that the other branch doesn't have.
-        _, nee_path = _commit_file_with_content(self.repo, 'nee', 'Good content\n')
+        _, nee_path = _commit_file_with_content(self.repo, "nee", "Good content\n")
 
 
         # Modify the file the other branch doesn't have.
         # Modify the file the other branch doesn't have.
-        with open(nee_path, 'a') as f:
-            f.write('bar content\n')
+        with open(nee_path, "a") as f:
+            f.write("bar content\n")
         porcelain.add(self.repo, paths=[nee_path])
         porcelain.add(self.repo, paths=[nee_path])
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': [b'nee']}, [], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": [b"nee"]}, [], []], status
+        )
 
 
         # 'uni' branch doesn't have 'nee' and it has been modified, should result in the checkout being aborted.
         # 'uni' branch doesn't have 'nee' and it has been modified, should result in the checkout being aborted.
         with self.assertRaises(CheckoutError):
         with self.assertRaises(CheckoutError):
-            porcelain.checkout_branch(self.repo, b'uni')
+            porcelain.checkout_branch(self.repo, b"uni")
 
 
-        self.assertEqual(b'master', porcelain.active_branch(self.repo))
+        self.assertEqual(b"master", porcelain.active_branch(self.repo))
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': [b'nee']}, [], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": [b"nee"]}, [], []], status
+        )
 
 
     def test_checkout_to_branch_with_modified_file_not_present_forced(self):
     def test_checkout_to_branch_with_modified_file_not_present_forced(self):
         # Commit a new file that the other branch doesn't have.
         # Commit a new file that the other branch doesn't have.
-        _, nee_path = _commit_file_with_content(self.repo, 'nee', 'Good content\n')
+        _, nee_path = _commit_file_with_content(self.repo, "nee", "Good content\n")
 
 
         # Modify the file the other branch doesn't have.
         # Modify the file the other branch doesn't have.
-        with open(nee_path, 'a') as f:
-            f.write('bar content\n')
+        with open(nee_path, "a") as f:
+            f.write("bar content\n")
         porcelain.add(self.repo, paths=[nee_path])
         porcelain.add(self.repo, paths=[nee_path])
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': [b'nee']}, [], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": [b"nee"]}, [], []], status
+        )
 
 
         # 'uni' branch doesn't have 'nee' and it has been modified, but we force to reset the entire index.
         # 'uni' branch doesn't have 'nee' and it has been modified, but we force to reset the entire index.
-        porcelain.checkout_branch(self.repo, b'uni', force=True)
+        porcelain.checkout_branch(self.repo, b"uni", force=True)
 
 
-        self.assertEqual(b'uni', porcelain.active_branch(self.repo))
+        self.assertEqual(b"uni", porcelain.active_branch(self.repo))
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], []], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
 
 
     def test_checkout_to_branch_with_unstaged_files(self):
     def test_checkout_to_branch_with_unstaged_files(self):
         # Edit `foo`.
         # Edit `foo`.
-        with open(self._foo_path, 'a') as f:
-            f.write('new message')
+        with open(self._foo_path, "a") as f:
+            f.write("new message")
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [b'foo'], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
+        )
 
 
-        porcelain.checkout_branch(self.repo, b'uni')
+        porcelain.checkout_branch(self.repo, b"uni")
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [b'foo'], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
+        )
 
 
     def test_checkout_to_branch_with_untracked_files(self):
     def test_checkout_to_branch_with_untracked_files(self):
-        with open(os.path.join(self.repo.path, 'neu'), 'a') as f:
-            f.write('new message\n')
+        with open(os.path.join(self.repo.path, "neu"), "a") as f:
+            f.write("new message\n")
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], ['neu']], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["neu"]], status)
 
 
-        porcelain.checkout_branch(self.repo, b'uni')
+        porcelain.checkout_branch(self.repo, b"uni")
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], ['neu']], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["neu"]], status)
 
 
     def test_checkout_to_branch_with_new_files(self):
     def test_checkout_to_branch_with_new_files(self):
-        porcelain.checkout_branch(self.repo, b'uni')
-        sub_directory = os.path.join(self.repo.path, 'sub1')
+        porcelain.checkout_branch(self.repo, b"uni")
+        sub_directory = os.path.join(self.repo.path, "sub1")
         os.mkdir(sub_directory)
         os.mkdir(sub_directory)
         for index in range(5):
         for index in range(5):
-            _commit_file_with_content(self.repo, 'new_file_' + str(index + 1), "Some content\n")
-            _commit_file_with_content(self.repo, os.path.join('sub1', 'new_file_' + str(index + 10)), "Good content\n")
+            _commit_file_with_content(
+                self.repo, "new_file_" + str(index + 1), "Some content\n"
+            )
+            _commit_file_with_content(
+                self.repo,
+                os.path.join("sub1", "new_file_" + str(index + 10)),
+                "Good content\n",
+            )
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], []], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
 
 
-        porcelain.checkout_branch(self.repo, b'master')
+        porcelain.checkout_branch(self.repo, b"master")
         self.assertEqual(b"master", porcelain.active_branch(self.repo))
         self.assertEqual(b"master", porcelain.active_branch(self.repo))
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], []], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
 
 
-        porcelain.checkout_branch(self.repo, b'uni')
+        porcelain.checkout_branch(self.repo, b"uni")
         self.assertEqual(b"uni", porcelain.active_branch(self.repo))
         self.assertEqual(b"uni", porcelain.active_branch(self.repo))
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], []], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
 
 
     def test_checkout_to_branch_with_file_in_sub_directory(self):
     def test_checkout_to_branch_with_file_in_sub_directory(self):
-        sub_directory = os.path.join(self.repo.path, 'sub1', 'sub2')
+        sub_directory = os.path.join(self.repo.path, "sub1", "sub2")
         os.makedirs(sub_directory)
         os.makedirs(sub_directory)
 
 
-        sub_directory_file = os.path.join(sub_directory, 'neu')
-        with open(sub_directory_file, 'w') as f:
-            f.write('new message\n')
+        sub_directory_file = os.path.join(sub_directory, "neu")
+        with open(sub_directory_file, "w") as f:
+            f.write("new message\n")
 
 
         porcelain.add(self.repo, paths=[sub_directory_file])
         porcelain.add(self.repo, paths=[sub_directory_file])
         porcelain.commit(
         porcelain.commit(
@@ -1785,35 +1820,35 @@ class CheckoutTests(PorcelainTestCase):
             author=b"John <john@example.com>",
             author=b"John <john@example.com>",
         )
         )
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], []], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
 
 
         self.assertTrue(os.path.isdir(sub_directory))
         self.assertTrue(os.path.isdir(sub_directory))
         self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
         self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
 
 
-        porcelain.checkout_branch(self.repo, b'uni')
+        porcelain.checkout_branch(self.repo, b"uni")
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], []], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
 
 
         self.assertFalse(os.path.isdir(sub_directory))
         self.assertFalse(os.path.isdir(sub_directory))
         self.assertFalse(os.path.isdir(os.path.dirname(sub_directory)))
         self.assertFalse(os.path.isdir(os.path.dirname(sub_directory)))
 
 
-        porcelain.checkout_branch(self.repo, b'master')
+        porcelain.checkout_branch(self.repo, b"master")
 
 
         self.assertTrue(os.path.isdir(sub_directory))
         self.assertTrue(os.path.isdir(sub_directory))
         self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
         self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
 
 
     def test_checkout_to_branch_with_multiple_files_in_sub_directory(self):
     def test_checkout_to_branch_with_multiple_files_in_sub_directory(self):
-        sub_directory = os.path.join(self.repo.path, 'sub1', 'sub2')
+        sub_directory = os.path.join(self.repo.path, "sub1", "sub2")
         os.makedirs(sub_directory)
         os.makedirs(sub_directory)
 
 
-        sub_directory_file_1 = os.path.join(sub_directory, 'neu')
-        with open(sub_directory_file_1, 'w') as f:
-            f.write('new message\n')
+        sub_directory_file_1 = os.path.join(sub_directory, "neu")
+        with open(sub_directory_file_1, "w") as f:
+            f.write("new message\n")
 
 
-        sub_directory_file_2 = os.path.join(sub_directory, 'gus')
-        with open(sub_directory_file_2, 'w') as f:
-            f.write('alternative message\n')
+        sub_directory_file_2 = os.path.join(sub_directory, "gus")
+        with open(sub_directory_file_2, "w") as f:
+            f.write("alternative message\n")
 
 
         porcelain.add(self.repo, paths=[sub_directory_file_1, sub_directory_file_2])
         porcelain.add(self.repo, paths=[sub_directory_file_1, sub_directory_file_2])
         porcelain.commit(
         porcelain.commit(
@@ -1823,22 +1858,22 @@ class CheckoutTests(PorcelainTestCase):
             author=b"John <john@example.com>",
             author=b"John <john@example.com>",
         )
         )
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], []], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
 
 
         self.assertTrue(os.path.isdir(sub_directory))
         self.assertTrue(os.path.isdir(sub_directory))
         self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
         self.assertTrue(os.path.isdir(os.path.dirname(sub_directory)))
 
 
-        porcelain.checkout_branch(self.repo, b'uni')
+        porcelain.checkout_branch(self.repo, b"uni")
 
 
         status = list(porcelain.status(self.repo))
         status = list(porcelain.status(self.repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], []], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status)
 
 
         self.assertFalse(os.path.isdir(sub_directory))
         self.assertFalse(os.path.isdir(sub_directory))
         self.assertFalse(os.path.isdir(os.path.dirname(sub_directory)))
         self.assertFalse(os.path.isdir(os.path.dirname(sub_directory)))
 
 
     def _commit_something_wrong(self):
     def _commit_something_wrong(self):
-        with open(self._foo_path, 'a') as f:
-            f.write('something wrong')
+        with open(self._foo_path, "a") as f:
+            f.write("something wrong")
 
 
         porcelain.add(self.repo, paths=[self._foo_path])
         porcelain.add(self.repo, paths=[self._foo_path])
         return porcelain.commit(
         return porcelain.commit(
@@ -1937,24 +1972,23 @@ class CheckoutTests(PorcelainTestCase):
     def test_checkout_remote_branch_then_master_then_remote_branch_again(self):
     def test_checkout_remote_branch_then_master_then_remote_branch_again(self):
         target_repo = self._checkout_remote_branch()
         target_repo = self._checkout_remote_branch()
         self.assertEqual(b"foo", porcelain.active_branch(target_repo))
         self.assertEqual(b"foo", porcelain.active_branch(target_repo))
-        _commit_file_with_content(target_repo, 'bar', 'something\n')
-        self.assertTrue(os.path.isfile(os.path.join(target_repo.path, 'bar')))
+        _commit_file_with_content(target_repo, "bar", "something\n")
+        self.assertTrue(os.path.isfile(os.path.join(target_repo.path, "bar")))
 
 
         porcelain.checkout_branch(target_repo, b"master")
         porcelain.checkout_branch(target_repo, b"master")
 
 
         self.assertEqual(b"master", porcelain.active_branch(target_repo))
         self.assertEqual(b"master", porcelain.active_branch(target_repo))
-        self.assertFalse(os.path.isfile(os.path.join(target_repo.path, 'bar')))
+        self.assertFalse(os.path.isfile(os.path.join(target_repo.path, "bar")))
 
 
         porcelain.checkout_branch(target_repo, b"origin/foo")
         porcelain.checkout_branch(target_repo, b"origin/foo")
 
 
         self.assertEqual(b"foo", porcelain.active_branch(target_repo))
         self.assertEqual(b"foo", porcelain.active_branch(target_repo))
-        self.assertTrue(os.path.isfile(os.path.join(target_repo.path, 'bar')))
+        self.assertTrue(os.path.isfile(os.path.join(target_repo.path, "bar")))
 
 
         target_repo.close()
         target_repo.close()
 
 
 
 
 class SubmoduleTests(PorcelainTestCase):
 class SubmoduleTests(PorcelainTestCase):
-
     def test_empty(self):
     def test_empty(self):
         porcelain.commit(
         porcelain.commit(
             repo=self.repo.path,
             repo=self.repo.path,
@@ -1967,12 +2001,15 @@ class SubmoduleTests(PorcelainTestCase):
 
 
     def test_add(self):
     def test_add(self):
         porcelain.submodule_add(self.repo, "../bar.git", "bar")
         porcelain.submodule_add(self.repo, "../bar.git", "bar")
-        with open('%s/.gitmodules' % self.repo.path) as f:
-            self.assertEqual("""\
+        with open("%s/.gitmodules" % self.repo.path) as f:
+            self.assertEqual(
+                """\
 [submodule "bar"]
 [submodule "bar"]
 \turl = ../bar.git
 \turl = ../bar.git
 \tpath = bar
 \tpath = bar
-""", f.read())
+""",
+                f.read(),
+            )
 
 
     def test_init(self):
     def test_init(self):
         porcelain.submodule_add(self.repo, "../bar.git", "bar")
         porcelain.submodule_add(self.repo, "../bar.git", "bar")
@@ -2553,7 +2590,9 @@ class StatusTests(PorcelainTestCase):
         porcelain.add(repo=self.repo.path, paths=[file_path])
         porcelain.add(repo=self.repo.path, paths=[file_path])
 
 
         results = porcelain.status(self.repo)
         results = porcelain.status(self.repo)
-        self.assertDictEqual({"add": [b"crlf-new"], "delete": [], "modify": []}, results.staged)
+        self.assertDictEqual(
+            {"add": [b"crlf-new"], "delete": [], "modify": []}, results.staged
+        )
         self.assertListEqual(results.unstaged, [])
         self.assertListEqual(results.unstaged, [])
         self.assertListEqual(results.untracked, [])
         self.assertListEqual(results.untracked, [])
 
 
@@ -2715,9 +2754,11 @@ class StatusTests(PorcelainTestCase):
             ),
             ),
         )
         )
         self.assertEqual(
         self.assertEqual(
-            {os.path.join('nested', 'ignored'),
-                os.path.join('nested', 'with'),
-                os.path.join('nested', 'manager')},
+            {
+                os.path.join("nested", "ignored"),
+                os.path.join("nested", "with"),
+                os.path.join("nested", "manager"),
+            },
             set(
             set(
                 porcelain.get_untracked_paths(
                 porcelain.get_untracked_paths(
                     self.repo.path,
                     self.repo.path,
@@ -2751,7 +2792,7 @@ class StatusTests(PorcelainTestCase):
                     self.repo.path,
                     self.repo.path,
                     self.repo.open_index(),
                     self.repo.open_index(),
                 )
                 )
-            )
+            ),
         )
         )
         self.assertEqual(
         self.assertEqual(
             {".gitignore", "notignored"},
             {".gitignore", "notignored"},
@@ -2762,7 +2803,7 @@ class StatusTests(PorcelainTestCase):
                     self.repo.open_index(),
                     self.repo.open_index(),
                     exclude_ignored=True,
                     exclude_ignored=True,
                 )
                 )
-            )
+            ),
         )
         )
 
 
     def test_get_untracked_paths_invalid_untracked_files(self):
     def test_get_untracked_paths_invalid_untracked_files(self):
@@ -2778,9 +2819,8 @@ class StatusTests(PorcelainTestCase):
 
 
     def test_get_untracked_paths_normal(self):
     def test_get_untracked_paths_normal(self):
         with self.assertRaises(NotImplementedError):
         with self.assertRaises(NotImplementedError):
-            _, _, _ = porcelain.status(
-                repo=self.repo.path, untracked_files="normal"
-            )
+            _, _, _ = porcelain.status(repo=self.repo.path, untracked_files="normal")
+
 
 
 # TODO(jelmer): Add test for dulwich.porcelain.daemon
 # TODO(jelmer): Add test for dulwich.porcelain.daemon
 
 
@@ -2838,9 +2878,7 @@ class BranchListTests(PorcelainTestCase):
         [c1] = build_commit_graph(self.repo.object_store, [[1]])
         [c1] = build_commit_graph(self.repo.object_store, [[1]])
         self.repo[b"HEAD"] = c1.id
         self.repo[b"HEAD"] = c1.id
         porcelain.branch_create(self.repo, b"foo")
         porcelain.branch_create(self.repo, b"foo")
-        self.assertEqual(
-            {b"master", b"foo"}, set(porcelain.branch_list(self.repo))
-        )
+        self.assertEqual({b"master", b"foo"}, set(porcelain.branch_list(self.repo)))
 
 
 
 
 class BranchCreateTests(PorcelainTestCase):
 class BranchCreateTests(PorcelainTestCase):
@@ -2855,9 +2893,7 @@ class BranchCreateTests(PorcelainTestCase):
         [c1] = build_commit_graph(self.repo.object_store, [[1]])
         [c1] = build_commit_graph(self.repo.object_store, [[1]])
         self.repo[b"HEAD"] = c1.id
         self.repo[b"HEAD"] = c1.id
         porcelain.branch_create(self.repo, b"foo")
         porcelain.branch_create(self.repo, b"foo")
-        self.assertEqual(
-            {b"master", b"foo"}, set(porcelain.branch_list(self.repo))
-        )
+        self.assertEqual({b"master", b"foo"}, set(porcelain.branch_list(self.repo)))
 
 
 
 
 class BranchDeleteTests(PorcelainTestCase):
 class BranchDeleteTests(PorcelainTestCase):
@@ -3473,15 +3509,15 @@ class ActiveBranchTests(PorcelainTestCase):
 
 
 
 
 class FindUniqueAbbrevTests(PorcelainTestCase):
 class FindUniqueAbbrevTests(PorcelainTestCase):
-
     def test_simple(self):
     def test_simple(self):
         c1, c2, c3 = build_commit_graph(
         c1, c2, c3 = build_commit_graph(
             self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
             self.repo.object_store, [[1], [2, 1], [3, 1, 2]]
         )
         )
         self.repo.refs[b"HEAD"] = c3.id
         self.repo.refs[b"HEAD"] = c3.id
         self.assertEqual(
         self.assertEqual(
-            c1.id.decode('ascii')[:7],
-            porcelain.find_unique_abbrev(self.repo.object_store, c1.id))
+            c1.id.decode("ascii")[:7],
+            porcelain.find_unique_abbrev(self.repo.object_store, c1.id),
+        )
 
 
 
 
 class PackRefsTests(PorcelainTestCase):
 class PackRefsTests(PorcelainTestCase):
@@ -3524,7 +3560,7 @@ class PackRefsTests(PorcelainTestCase):
 class ServerTests(PorcelainTestCase):
 class ServerTests(PorcelainTestCase):
     @contextlib.contextmanager
     @contextlib.contextmanager
     def _serving(self):
     def _serving(self):
-        with make_server('localhost', 0, self.app) as server:
+        with make_server("localhost", 0, self.app) as server:
             thread = threading.Thread(target=server.serve_forever, daemon=True)
             thread = threading.Thread(target=server.serve_forever, daemon=True)
             thread.start()
             thread.start()
 
 
@@ -3546,14 +3582,14 @@ class ServerTests(PorcelainTestCase):
         self.app = make_wsgi_chain(backend)
         self.app = make_wsgi_chain(backend)
 
 
     def test_pull(self):
     def test_pull(self):
-        c1, = build_commit_graph(self.served_repo.object_store, [[1]])
+        (c1,) = build_commit_graph(self.served_repo.object_store, [[1]])
         self.served_repo.refs[b"refs/heads/master"] = c1.id
         self.served_repo.refs[b"refs/heads/master"] = c1.id
 
 
         with self._serving() as url:
         with self._serving() as url:
             porcelain.pull(self.repo, url, "master")
             porcelain.pull(self.repo, url, "master")
 
 
     def test_push(self):
     def test_push(self):
-        c1, = build_commit_graph(self.repo.object_store, [[1]])
+        (c1,) = build_commit_graph(self.repo.object_store, [[1]])
         self.repo.refs[b"refs/heads/master"] = c1.id
         self.repo.refs[b"refs/heads/master"] = c1.id
 
 
         with self._serving() as url:
         with self._serving() as url:

+ 6 - 7
dulwich/tests/test_refs.py

@@ -413,10 +413,10 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
 
 
     def test_delete_refs_container(self):
     def test_delete_refs_container(self):
         # We shouldn't delete the refs directory
         # We shouldn't delete the refs directory
-        self._refs[b'refs/heads/blah'] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
+        self._refs[b"refs/heads/blah"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
         for ref in self._refs.allkeys():
         for ref in self._refs.allkeys():
             del self._refs[ref]
             del self._refs[ref]
-        self.assertTrue(os.path.exists(os.path.join(self._refs.path, b'refs')))
+        self.assertTrue(os.path.exists(os.path.join(self._refs.path, b"refs")))
 
 
     def test_setitem_packed(self):
     def test_setitem_packed(self):
         with open(os.path.join(self._refs.path, b"packed-refs"), "w") as f:
         with open(os.path.join(self._refs.path, b"packed-refs"), "w") as f:
@@ -573,11 +573,11 @@ class DiskRefsContainerTests(RefsContainerTests, TestCase):
 
 
     def test_set_overwrite_loop(self):
     def test_set_overwrite_loop(self):
         self.assertRaises(SymrefLoop, self._refs.follow, b"refs/heads/loop")
         self.assertRaises(SymrefLoop, self._refs.follow, b"refs/heads/loop")
-        self._refs[b'refs/heads/loop'] = (
-            b"42d06bd4b77fed026b154d16493e5deab78f02ec")
+        self._refs[b"refs/heads/loop"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec"
         self.assertEqual(
         self.assertEqual(
-            ([b'refs/heads/loop'], b'42d06bd4b77fed026b154d16493e5deab78f02ec'),
-            self._refs.follow(b"refs/heads/loop"))
+            ([b"refs/heads/loop"], b"42d06bd4b77fed026b154d16493e5deab78f02ec"),
+            self._refs.follow(b"refs/heads/loop"),
+        )
 
 
     def test_delitem(self):
     def test_delitem(self):
         RefsContainerTests.test_delitem(self)
         RefsContainerTests.test_delitem(self)
@@ -795,7 +795,6 @@ class ParseSymrefValueTests(TestCase):
 
 
 
 
 class StripPeeledRefsTests(TestCase):
 class StripPeeledRefsTests(TestCase):
-
     all_refs = {
     all_refs = {
         b"refs/heads/master": b"8843d7f92416211de9ebb963ff4ce28125932878",
         b"refs/heads/master": b"8843d7f92416211de9ebb963ff4ce28125932878",
         b"refs/heads/testing": b"186a005b134d8639a58b6731c7c1ea821a6eedba",
         b"refs/heads/testing": b"186a005b134d8639a58b6731c7c1ea821a6eedba",

+ 70 - 61
dulwich/tests/test_repository.py

@@ -73,15 +73,15 @@ class CreateRepositoryTests(TestCase):
             self.assertIn(barestr, config_text, "%r" % config_text)
             self.assertIn(barestr, config_text, "%r" % config_text)
 
 
         if isinstance(repo, Repo):
         if isinstance(repo, Repo):
-            expected_mode = '0o100644' if expect_filemode else '0o100666'
+            expected_mode = "0o100644" if expect_filemode else "0o100666"
             expected = {
             expected = {
-                'HEAD': expected_mode,
-                'config': expected_mode,
-                'description': expected_mode,
+                "HEAD": expected_mode,
+                "config": expected_mode,
+                "description": expected_mode,
             }
             }
             actual = {
             actual = {
-                f[len(repo._controldir) + 1:]: oct(os.stat(f).st_mode)
-                for f in glob.glob(os.path.join(repo._controldir, '*'))
+                f[len(repo._controldir) + 1 :]: oct(os.stat(f).st_mode)
+                for f in glob.glob(os.path.join(repo._controldir, "*"))
                 if os.path.isfile(f)
                 if os.path.isfile(f)
             }
             }
 
 
@@ -405,7 +405,9 @@ class RepositoryRootTests(TestCase):
     def test_clone_no_head(self):
     def test_clone_no_head(self):
         temp_dir = self.mkdtemp()
         temp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, temp_dir)
         self.addCleanup(shutil.rmtree, temp_dir)
-        repo_dir = os.path.join(os.path.dirname(__file__), "..", "..", "testdata", "repos")
+        repo_dir = os.path.join(
+            os.path.dirname(__file__), "..", "..", "testdata", "repos"
+        )
         dest_dir = os.path.join(temp_dir, "a.git")
         dest_dir = os.path.join(temp_dir, "a.git")
         shutil.copytree(os.path.join(repo_dir, "a.git"), dest_dir, symlinks=True)
         shutil.copytree(os.path.join(repo_dir, "a.git"), dest_dir, symlinks=True)
         r = Repo(dest_dir)
         r = Repo(dest_dir)
@@ -436,7 +438,7 @@ class RepositoryRootTests(TestCase):
         r.clone(tmp_dir, mkdir=False, bare=True)
         r.clone(tmp_dir, mkdir=False, bare=True)
 
 
     def test_reset_index_symlink_enabled(self):
     def test_reset_index_symlink_enabled(self):
-        if sys.platform == 'win32':
+        if sys.platform == "win32":
             self.skipTest("symlinks are not supported on Windows")
             self.skipTest("symlinks are not supported on Windows")
         tmp_dir = self.mkdtemp()
         tmp_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, tmp_dir)
         self.addCleanup(shutil.rmtree, tmp_dir)
@@ -448,12 +450,12 @@ class RepositoryRootTests(TestCase):
 
 
         t = o.clone(os.path.join(tmp_dir, "t"), symlinks=True)
         t = o.clone(os.path.join(tmp_dir, "t"), symlinks=True)
         o.close()
         o.close()
-        bar_path = os.path.join(tmp_dir, 't', 'bar')
-        if sys.platform == 'win32':
+        bar_path = os.path.join(tmp_dir, "t", "bar")
+        if sys.platform == "win32":
             with open(bar_path) as f:
             with open(bar_path) as f:
-                self.assertEqual('foo', f.read())
+                self.assertEqual("foo", f.read())
         else:
         else:
-            self.assertEqual('foo', os.readlink(bar_path))
+            self.assertEqual("foo", os.readlink(bar_path))
         t.close()
         t.close()
 
 
     def test_reset_index_symlink_disabled(self):
     def test_reset_index_symlink_disabled(self):
@@ -467,8 +469,8 @@ class RepositoryRootTests(TestCase):
         o.do_commit(b"add symlink")
         o.do_commit(b"add symlink")
 
 
         t = o.clone(os.path.join(tmp_dir, "t"), symlinks=False)
         t = o.clone(os.path.join(tmp_dir, "t"), symlinks=False)
-        with open(os.path.join(tmp_dir, "t", 'bar')) as f:
-            self.assertEqual('foo', f.read())
+        with open(os.path.join(tmp_dir, "t", "bar")) as f:
+            self.assertEqual("foo", f.read())
 
 
         t.close()
         t.close()
 
 
@@ -734,17 +736,18 @@ r = Repo('.')
 r.stage(['foo'])
 r.stage(['foo'])
 """.format(
 """.format(
             executable=sys.executable,
             executable=sys.executable,
-            path=[os.path.join(os.path.dirname(__file__), '..', '..')] + sys.path)
+            path=[os.path.join(os.path.dirname(__file__), "..", "..")] + sys.path,
+        )
 
 
         repo_dir = os.path.join(self.mkdtemp())
         repo_dir = os.path.join(self.mkdtemp())
         self.addCleanup(shutil.rmtree, repo_dir)
         self.addCleanup(shutil.rmtree, repo_dir)
         r = Repo.init(repo_dir)
         r = Repo.init(repo_dir)
         self.addCleanup(r.close)
         self.addCleanup(r.close)
 
 
-        with open(os.path.join(repo_dir, 'blah'), 'w') as f:
-            f.write('blah')
+        with open(os.path.join(repo_dir, "blah"), "w") as f:
+            f.write("blah")
 
 
-        r.stage(['blah'])
+        r.stage(["blah"])
 
 
         pre_commit = os.path.join(r.controldir(), "hooks", "pre-commit")
         pre_commit = os.path.join(r.controldir(), "hooks", "pre-commit")
 
 
@@ -764,7 +767,7 @@ r.stage(['foo'])
         self.assertEqual([], r[commit_sha].parents)
         self.assertEqual([], r[commit_sha].parents)
 
 
         tree = r[r[commit_sha].tree]
         tree = r[r[commit_sha].tree]
-        self.assertEqual({b'blah', b'foo'}, set(tree))
+        self.assertEqual({b"blah", b"foo"}, set(tree))
 
 
     def test_shell_hook_post_commit(self):
     def test_shell_hook_post_commit(self):
         if os.name != "posix":
         if os.name != "posix":
@@ -968,9 +971,7 @@ class BuildRepoRootTests(TestCase):
             {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"},
             {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"},
             self._repo.get_shallow(),
             self._repo.get_shallow(),
         )
         )
-        self._repo.update_shallow(
-            None, [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"]
-        )
+        self._repo.update_shallow(None, [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"])
         self.assertEqual(set(), self._repo.get_shallow())
         self.assertEqual(set(), self._repo.get_shallow())
         self.assertEqual(
         self.assertEqual(
             False,
             False,
@@ -1132,20 +1133,20 @@ class BuildRepoRootTests(TestCase):
         r = self._repo
         r = self._repo
         c = r.get_config()
         c = r.get_config()
         c.set(("core",), "repositoryformatversion", "1")
         c.set(("core",), "repositoryformatversion", "1")
-        c.set(("extensions", ), "worktreeconfig", True)
+        c.set(("extensions",), "worktreeconfig", True)
         c.write_to_path()
         c.write_to_path()
         c = r.get_worktree_config()
         c = r.get_worktree_config()
         c.set(("user",), "repositoryformatversion", "1")
         c.set(("user",), "repositoryformatversion", "1")
         c.set((b"user",), b"name", b"Jelmer")
         c.set((b"user",), b"name", b"Jelmer")
         c.write_to_path()
         c.write_to_path()
         cs = r.get_config_stack()
         cs = r.get_config_stack()
-        self.assertEqual(cs.get(("user", ), "name"), b"Jelmer")
+        self.assertEqual(cs.get(("user",), "name"), b"Jelmer")
 
 
     def test_repositoryformatversion_1_extension(self):
     def test_repositoryformatversion_1_extension(self):
         r = self._repo
         r = self._repo
         c = r.get_config()
         c = r.get_config()
         c.set(("core",), "repositoryformatversion", "1")
         c.set(("core",), "repositoryformatversion", "1")
-        c.set(("extensions", ), "unknownextension", True)
+        c.set(("extensions",), "unknownextension", True)
         c.write_to_path()
         c.write_to_path()
         self.assertRaises(UnsupportedExtension, Repo, self._repo_dir)
         self.assertRaises(UnsupportedExtension, Repo, self._repo_dir)
 
 
@@ -1372,16 +1373,16 @@ class BuildRepoRootTests(TestCase):
     def test_stage_submodule(self):
     def test_stage_submodule(self):
         r = self._repo
         r = self._repo
         s = Repo.init(os.path.join(r.path, "sub"), mkdir=True)
         s = Repo.init(os.path.join(r.path, "sub"), mkdir=True)
-        s.do_commit(b'message')
+        s.do_commit(b"message")
         r.stage(["sub"])
         r.stage(["sub"])
         self.assertEqual([b"a", b"sub"], list(r.open_index()))
         self.assertEqual([b"a", b"sub"], list(r.open_index()))
 
 
     def test_unstage_midify_file_with_dir(self):
     def test_unstage_midify_file_with_dir(self):
-        os.mkdir(os.path.join(self._repo.path, 'new_dir'))
-        full_path = os.path.join(self._repo.path, 'new_dir', 'foo')
+        os.mkdir(os.path.join(self._repo.path, "new_dir"))
+        full_path = os.path.join(self._repo.path, "new_dir", "foo")
 
 
-        with open(full_path, 'w') as f:
-            f.write('hello')
+        with open(full_path, "w") as f:
+            f.write("hello")
         porcelain.add(self._repo, paths=[full_path])
         porcelain.add(self._repo, paths=[full_path])
         porcelain.commit(
         porcelain.commit(
             self._repo,
             self._repo,
@@ -1389,24 +1390,26 @@ class BuildRepoRootTests(TestCase):
             committer=b"Jane <jane@example.com>",
             committer=b"Jane <jane@example.com>",
             author=b"John <john@example.com>",
             author=b"John <john@example.com>",
         )
         )
-        with open(full_path, 'a') as f:
-            f.write('something new')
-        self._repo.unstage(['new_dir/foo'])
+        with open(full_path, "a") as f:
+            f.write("something new")
+        self._repo.unstage(["new_dir/foo"])
         status = list(porcelain.status(self._repo))
         status = list(porcelain.status(self._repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [b'new_dir/foo'], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": []}, [b"new_dir/foo"], []], status
+        )
 
 
     def test_unstage_while_no_commit(self):
     def test_unstage_while_no_commit(self):
-        file = 'foo'
+        file = "foo"
         full_path = os.path.join(self._repo.path, file)
         full_path = os.path.join(self._repo.path, file)
-        with open(full_path, 'w') as f:
-            f.write('hello')
+        with open(full_path, "w") as f:
+            f.write("hello")
         porcelain.add(self._repo, paths=[full_path])
         porcelain.add(self._repo, paths=[full_path])
         self._repo.unstage([file])
         self._repo.unstage([file])
         status = list(porcelain.status(self._repo))
         status = list(porcelain.status(self._repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], ['foo']], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["foo"]], status)
 
 
     def test_unstage_add_file(self):
     def test_unstage_add_file(self):
-        file = 'foo'
+        file = "foo"
         full_path = os.path.join(self._repo.path, file)
         full_path = os.path.join(self._repo.path, file)
         porcelain.commit(
         porcelain.commit(
             self._repo,
             self._repo,
@@ -1414,18 +1417,18 @@ class BuildRepoRootTests(TestCase):
             committer=b"Jane <jane@example.com>",
             committer=b"Jane <jane@example.com>",
             author=b"John <john@example.com>",
             author=b"John <john@example.com>",
         )
         )
-        with open(full_path, 'w') as f:
-            f.write('hello')
+        with open(full_path, "w") as f:
+            f.write("hello")
         porcelain.add(self._repo, paths=[full_path])
         porcelain.add(self._repo, paths=[full_path])
         self._repo.unstage([file])
         self._repo.unstage([file])
         status = list(porcelain.status(self._repo))
         status = list(porcelain.status(self._repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], ['foo']], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["foo"]], status)
 
 
     def test_unstage_modify_file(self):
     def test_unstage_modify_file(self):
-        file = 'foo'
+        file = "foo"
         full_path = os.path.join(self._repo.path, file)
         full_path = os.path.join(self._repo.path, file)
-        with open(full_path, 'w') as f:
-            f.write('hello')
+        with open(full_path, "w") as f:
+            f.write("hello")
         porcelain.add(self._repo, paths=[full_path])
         porcelain.add(self._repo, paths=[full_path])
         porcelain.commit(
         porcelain.commit(
             self._repo,
             self._repo,
@@ -1433,19 +1436,21 @@ class BuildRepoRootTests(TestCase):
             committer=b"Jane <jane@example.com>",
             committer=b"Jane <jane@example.com>",
             author=b"John <john@example.com>",
             author=b"John <john@example.com>",
         )
         )
-        with open(full_path, 'a') as f:
-            f.write('broken')
+        with open(full_path, "a") as f:
+            f.write("broken")
         porcelain.add(self._repo, paths=[full_path])
         porcelain.add(self._repo, paths=[full_path])
         self._repo.unstage([file])
         self._repo.unstage([file])
         status = list(porcelain.status(self._repo))
         status = list(porcelain.status(self._repo))
 
 
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [b'foo'], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
+        )
 
 
     def test_unstage_remove_file(self):
     def test_unstage_remove_file(self):
-        file = 'foo'
+        file = "foo"
         full_path = os.path.join(self._repo.path, file)
         full_path = os.path.join(self._repo.path, file)
-        with open(full_path, 'w') as f:
-            f.write('hello')
+        with open(full_path, "w") as f:
+            f.write("hello")
         porcelain.add(self._repo, paths=[full_path])
         porcelain.add(self._repo, paths=[full_path])
         porcelain.commit(
         porcelain.commit(
             self._repo,
             self._repo,
@@ -1456,20 +1461,24 @@ class BuildRepoRootTests(TestCase):
         os.remove(full_path)
         os.remove(full_path)
         self._repo.unstage([file])
         self._repo.unstage([file])
         status = list(porcelain.status(self._repo))
         status = list(porcelain.status(self._repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [b'foo'], []], status)
+        self.assertEqual(
+            [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status
+        )
 
 
     def test_reset_index(self):
     def test_reset_index(self):
         r = self._repo
         r = self._repo
-        with open(os.path.join(r.path, 'a'), 'wb') as f:
-            f.write(b'changed')
-        with open(os.path.join(r.path, 'b'), 'wb') as f:
-            f.write(b'added')
-        r.stage(['a', 'b'])
+        with open(os.path.join(r.path, "a"), "wb") as f:
+            f.write(b"changed")
+        with open(os.path.join(r.path, "b"), "wb") as f:
+            f.write(b"added")
+        r.stage(["a", "b"])
         status = list(porcelain.status(self._repo))
         status = list(porcelain.status(self._repo))
-        self.assertEqual([{'add': [b'b'], 'delete': [], 'modify': [b'a']}, [], []], status)
+        self.assertEqual(
+            [{"add": [b"b"], "delete": [], "modify": [b"a"]}, [], []], status
+        )
         r.reset_index()
         r.reset_index()
         status = list(porcelain.status(self._repo))
         status = list(porcelain.status(self._repo))
-        self.assertEqual([{'add': [], 'delete': [], 'modify': []}, [], ['b']], status)
+        self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["b"]], status)
 
 
     @skipIf(
     @skipIf(
         sys.platform in ("win32", "darwin"),
         sys.platform in ("win32", "darwin"),
@@ -1536,8 +1545,8 @@ class CheckUserIdentityTests(TestCase):
             InvalidUserIdentity, check_user_identity, b"Fullname >order<>"
             InvalidUserIdentity, check_user_identity, b"Fullname >order<>"
         )
         )
         self.assertRaises(
         self.assertRaises(
-            InvalidUserIdentity, check_user_identity, b'Contains\0null byte <>'
+            InvalidUserIdentity, check_user_identity, b"Contains\0null byte <>"
         )
         )
         self.assertRaises(
         self.assertRaises(
-            InvalidUserIdentity, check_user_identity, b'Contains\nnewline byte <>'
+            InvalidUserIdentity, check_user_identity, b"Contains\nnewline byte <>"
         )
         )

+ 1 - 6
dulwich/tests/test_server.py

@@ -271,9 +271,7 @@ class FindShallowTests(TestCase):
     def test_linear(self):
     def test_linear(self):
         c1, c2, c3 = self.make_linear_commits(3)
         c1, c2, c3 = self.make_linear_commits(3)
 
 
-        self.assertEqual(
-            ({c3.id}, set()), _find_shallow(self._store, [c3.id], 1)
-        )
+        self.assertEqual(({c3.id}, set()), _find_shallow(self._store, [c3.id], 1))
         self.assertEqual(
         self.assertEqual(
             ({c2.id}, {c3.id}),
             ({c2.id}, {c3.id}),
             _find_shallow(self._store, [c3.id], 2),
             _find_shallow(self._store, [c3.id], 2),
@@ -676,7 +674,6 @@ class AckGraphWalkerImplTestCase(TestCase):
 
 
 
 
 class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
 class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
-
     impl_cls = SingleAckGraphWalkerImpl
     impl_cls = SingleAckGraphWalkerImpl
 
 
     def test_single_ack(self):
     def test_single_ack(self):
@@ -744,7 +741,6 @@ class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
 
 
 
 
 class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
 class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
-
     impl_cls = MultiAckGraphWalkerImpl
     impl_cls = MultiAckGraphWalkerImpl
 
 
     def test_multi_ack(self):
     def test_multi_ack(self):
@@ -819,7 +815,6 @@ class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
 
 
 
 
 class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
 class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase):
-
     impl_cls = MultiAckDetailedGraphWalkerImpl
     impl_cls = MultiAckDetailedGraphWalkerImpl
 
 
     def test_multi_ack(self):
     def test_multi_ack(self):

+ 0 - 1
dulwich/tests/test_web.py

@@ -431,7 +431,6 @@ class LengthLimitedFileTestCase(TestCase):
 
 
 
 
 class HTTPGitRequestTestCase(WebTestCase):
 class HTTPGitRequestTestCase(WebTestCase):
-
     # This class tests the contents of the actual cache headers
     # This class tests the contents of the actual cache headers
     _req_class = HTTPGitRequest
     _req_class = HTTPGitRequest
 
 

+ 3 - 1
dulwich/tests/utils.py

@@ -65,7 +65,9 @@ def open_repo(name, temp_dir=None):
     """
     """
     if temp_dir is None:
     if temp_dir is None:
         temp_dir = tempfile.mkdtemp()
         temp_dir = tempfile.mkdtemp()
-    repo_dir = os.path.join(os.path.dirname(__file__), "..", "..", "testdata", "repos", name)
+    repo_dir = os.path.join(
+        os.path.dirname(__file__), "..", "..", "testdata", "repos", name
+    )
     temp_repo_dir = os.path.join(temp_dir, name)
     temp_repo_dir = os.path.join(temp_dir, name)
     shutil.copytree(repo_dir, temp_repo_dir, symlinks=True)
     shutil.copytree(repo_dir, temp_repo_dir, symlinks=True)
     return Repo(temp_repo_dir)
     return Repo(temp_repo_dir)

+ 1 - 1
dulwich/walk.py

@@ -236,7 +236,7 @@ class Walker:
         store,
         store,
         include: List[bytes],
         include: List[bytes],
         exclude: Optional[List[bytes]] = None,
         exclude: Optional[List[bytes]] = None,
-        order: str = 'date',
+        order: str = "date",
         reverse: bool = False,
         reverse: bool = False,
         max_entries: Optional[int] = None,
         max_entries: Optional[int] = None,
         paths: Optional[List[bytes]] = None,
         paths: Optional[List[bytes]] = None,

+ 9 - 5
dulwich/web.py

@@ -255,7 +255,6 @@ def _chunk_iter(f):
 
 
 
 
 class ChunkReader:
 class ChunkReader:
-
     def __init__(self, f) -> None:
     def __init__(self, f) -> None:
         self._iter = _chunk_iter(f)
         self._iter = _chunk_iter(f)
         self._buffer: List[bytes] = []
         self._buffer: List[bytes] = []
@@ -266,7 +265,7 @@ class ChunkReader:
                 self._buffer.append(next(self._iter))
                 self._buffer.append(next(self._iter))
             except StopIteration:
             except StopIteration:
                 break
                 break
-        f = b''.join(self._buffer)
+        f = b"".join(self._buffer)
         ret = f[:n]
         ret = f[:n]
         self._buffer = [f[n:]]
         self._buffer = [f[n:]]
         return ret
         return ret
@@ -309,7 +308,7 @@ def handle_service_request(req, backend, mat):
         return
         return
     req.nocache()
     req.nocache()
     write = req.respond(HTTP_OK, "application/x-%s-result" % service)
     write = req.respond(HTTP_OK, "application/x-%s-result" % service)
-    if req.environ.get('HTTP_TRANSFER_ENCODING') == 'chunked':
+    if req.environ.get("HTTP_TRANSFER_ENCODING") == "chunked":
         read = ChunkReader(req.environ["wsgi.input"]).read
         read = ChunkReader(req.environ["wsgi.input"]).read
     else:
     else:
         read = req.environ["wsgi.input"].read
         read = req.environ["wsgi.input"].read
@@ -327,7 +326,9 @@ class HTTPGitRequest:
       environ: the WSGI environment for the request.
       environ: the WSGI environment for the request.
     """
     """
 
 
-    def __init__(self, environ, start_response, dumb: bool = False, handlers=None) -> None:
+    def __init__(
+        self, environ, start_response, dumb: bool = False, handlers=None
+    ) -> None:
         self.environ = environ
         self.environ = environ
         self.dumb = dumb
         self.dumb = dumb
         self.handlers = handlers
         self.handlers = handlers
@@ -413,7 +414,9 @@ class HTTPGitApplication:
         ("POST", re.compile("/git-receive-pack$")): handle_service_request,
         ("POST", re.compile("/git-receive-pack$")): handle_service_request,
     }
     }
 
 
-    def __init__(self, backend, dumb: bool = False, handlers=None, fallback_app=None) -> None:
+    def __init__(
+        self, backend, dumb: bool = False, handlers=None, fallback_app=None
+    ) -> None:
         self.backend = backend
         self.backend = backend
         self.dumb = dumb
         self.dumb = dumb
         self.handlers = dict(DEFAULT_HANDLERS)
         self.handlers = dict(DEFAULT_HANDLERS)
@@ -456,6 +459,7 @@ class GunzipFilter:
 
 
     def __call__(self, environ, start_response):
     def __call__(self, environ, start_response):
         import gzip
         import gzip
+
         if environ.get("HTTP_CONTENT_ENCODING", "") == "gzip":
         if environ.get("HTTP_CONTENT_ENCODING", "") == "gzip":
             environ["wsgi.input"] = gzip.GzipFile(
             environ["wsgi.input"] = gzip.GzipFile(
                 filename=None, fileobj=environ["wsgi.input"], mode="rb"
                 filename=None, fileobj=environ["wsgi.input"], mode="rb"

+ 1 - 1
examples/diff.py

@@ -17,5 +17,5 @@ r = Repo(repo_path)
 
 
 commit = r[commit_id]
 commit = r[commit_id]
 parent_commit = r[commit.parents[0]]
 parent_commit = r[commit.parents[0]]
-outstream = getattr(sys.stdout, 'buffer', sys.stdout)
+outstream = getattr(sys.stdout, "buffer", sys.stdout)
 write_tree_diff(outstream, r.object_store, parent_commit.tree, commit.tree)
 write_tree_diff(outstream, r.object_store, parent_commit.tree, commit.tree)

+ 2 - 2
examples/gcs.py

@@ -8,7 +8,7 @@ from dulwich.cloud.gcs import GcsObjectStore
 from dulwich.repo import Repo
 from dulwich.repo import Repo
 
 
 client = storage.Client()
 client = storage.Client()
-bucket = client.get_bucket('mybucket')
+bucket = client.get_bucket("mybucket")
 
 
-gcs_object_store = GcsObjectStore(bucket, 'path')
+gcs_object_store = GcsObjectStore(bucket, "path")
 r = Repo.init_bare(tempfile.mkdtemp(), object_store=gcs_object_store)
 r = Repo.init_bare(tempfile.mkdtemp(), object_store=gcs_object_store)

+ 6 - 3
examples/latest_change.py

@@ -12,7 +12,7 @@ if len(sys.argv) < 2:
 
 
 r = Repo(".")
 r = Repo(".")
 
 
-path = sys.argv[1].encode('utf-8')
+path = sys.argv[1].encode("utf-8")
 
 
 w = r.get_walker(paths=[path], max_entries=1)
 w = r.get_walker(paths=[path], max_entries=1)
 try:
 try:
@@ -20,5 +20,8 @@ try:
 except StopIteration:
 except StopIteration:
     print("No file %s anywhere in history." % sys.argv[1])
     print("No file %s anywhere in history." % sys.argv[1])
 else:
 else:
-    print("{} was last changed by {} at {} (commit {})".format(
-        sys.argv[1], c.author, time.ctime(c.author_time), c.id))
+    print(
+        "{} was last changed by {} at {} (commit {})".format(
+            sys.argv[1], c.author, time.ctime(c.author_time), c.id
+        )
+    )

+ 8 - 9
examples/memoryrepo.py

@@ -13,21 +13,20 @@ from dulwich.objects import Blob
 from dulwich.repo import MemoryRepo
 from dulwich.repo import MemoryRepo
 
 
 local_repo = MemoryRepo()
 local_repo = MemoryRepo()
-local_repo.refs.set_symbolic_ref(b'HEAD', b'refs/heads/master')
+local_repo.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
 
 
 fetch_result = porcelain.fetch(local_repo, sys.argv[1])
 fetch_result = porcelain.fetch(local_repo, sys.argv[1])
-local_repo.refs[b'refs/heads/master'] = fetch_result.refs[b'refs/heads/master']
+local_repo.refs[b"refs/heads/master"] = fetch_result.refs[b"refs/heads/master"]
 print(local_repo.refs.as_dict())
 print(local_repo.refs.as_dict())
 
 
-last_tree = local_repo[local_repo[b'HEAD'].tree]
-new_blob = Blob.from_string(b'Some contents')
+last_tree = local_repo[local_repo[b"HEAD"].tree]
+new_blob = Blob.from_string(b"Some contents")
 local_repo.object_store.add_object(new_blob)
 local_repo.object_store.add_object(new_blob)
-last_tree.add(b'test', stat.S_IFREG, new_blob.id)
+last_tree.add(b"test", stat.S_IFREG, new_blob.id)
 local_repo.object_store.add_object(last_tree)
 local_repo.object_store.add_object(last_tree)
 
 
 local_repo.do_commit(
 local_repo.do_commit(
-    message=b'Add a file called \'test\'',
-    ref=b'refs/heads/master',
-    tree=last_tree.id)
+    message=b"Add a file called 'test'", ref=b"refs/heads/master", tree=last_tree.id
+)
 
 
-porcelain.push(local_repo, sys.argv[1], 'master')
+porcelain.push(local_repo, sys.argv[1], "master")

+ 5 - 7
examples/rename-branch.py

@@ -7,9 +7,9 @@ from dulwich.objects import ZERO_SHA
 from dulwich.pack import pack_objects_to_data
 from dulwich.pack import pack_objects_to_data
 
 
 parser = argparse.ArgumentParser()
 parser = argparse.ArgumentParser()
-parser.add_argument('url', type=str)
-parser.add_argument('old_ref', type=str)
-parser.add_argument('new_ref', type=str)
+parser.add_argument("url", type=str)
+parser.add_argument("old_ref", type=str)
+parser.add_argument("new_ref", type=str)
 args = parser.parse_args()
 args = parser.parse_args()
 
 
 client, path = get_transport_and_path_from_url(args.url)
 client, path = get_transport_and_path_from_url(args.url)
@@ -20,10 +20,8 @@ def generate_pack_data(*args, **kwargs):
 
 
 
 
 def update_refs(refs):
 def update_refs(refs):
-    sha = refs[args.old_ref.encode('utf-8')]
-    return {
-        args.old_ref.encode('utf-8'): ZERO_SHA,
-        args.new_ref.encode('utf-8'): sha}
+    sha = refs[args.old_ref.encode("utf-8")]
+    return {args.old_ref.encode("utf-8"): ZERO_SHA, args.new_ref.encode("utf-8"): sha}
 
 
 
 
 client.send_pack(path, update_refs, generate_pack_data)
 client.send_pack(path, update_refs, generate_pack_data)

+ 23 - 22
setup.py

@@ -7,46 +7,47 @@ import sys
 
 
 from setuptools import Extension, setup
 from setuptools import Extension, setup
 
 
-if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
+if sys.platform == "darwin" and os.path.exists("/usr/bin/xcodebuild"):
     # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
     # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
     # distutils.sysconfig
     # distutils.sysconfig
     import subprocess
     import subprocess
+
     p = subprocess.Popen(
     p = subprocess.Popen(
-        ['/usr/bin/xcodebuild', '-version'], stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE, env={})
+        ["/usr/bin/xcodebuild", "-version"],
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        env={},
+    )
     out, err = p.communicate()
     out, err = p.communicate()
     for line in out.splitlines():
     for line in out.splitlines():
         line = line.decode("utf8")
         line = line.decode("utf8")
         # Also parse only first digit, because 3.2.1 can't be parsed nicely
         # Also parse only first digit, because 3.2.1 can't be parsed nicely
-        if (line.startswith('Xcode')
-                and int(line.split()[1].split('.')[0]) >= 4):
-            os.environ['ARCHFLAGS'] = ''
+        if line.startswith("Xcode") and int(line.split()[1].split(".")[0]) >= 4:
+            os.environ["ARCHFLAGS"] = ""
 
 
-tests_require = ['fastimport']
+tests_require = ["fastimport"]
 
 
 
 
-if '__pypy__' not in sys.modules and sys.platform != 'win32':
-    tests_require.extend([
-        'gevent', 'geventhttpclient', 'setuptools>=17.1'])
+if "__pypy__" not in sys.modules and sys.platform != "win32":
+    tests_require.extend(["gevent", "geventhttpclient", "setuptools>=17.1"])
 
 
 
 
-optional = os.environ.get('CIBUILDWHEEL', '0') != '1'
+optional = os.environ.get("CIBUILDWHEEL", "0") != "1"
 
 
 ext_modules = [
 ext_modules = [
-    Extension('dulwich._objects', ['dulwich/_objects.c'],
-              optional=optional),
-    Extension('dulwich._pack', ['dulwich/_pack.c'],
-              optional=optional),
-    Extension('dulwich._diff_tree', ['dulwich/_diff_tree.c'],
-              optional=optional),
+    Extension("dulwich._objects", ["dulwich/_objects.c"], optional=optional),
+    Extension("dulwich._pack", ["dulwich/_pack.c"], optional=optional),
+    Extension("dulwich._diff_tree", ["dulwich/_diff_tree.c"], optional=optional),
 ]
 ]
 
 
 # Ideally, setuptools would just provide a way to do this
 # Ideally, setuptools would just provide a way to do this
-if '--pure' in sys.argv:
-    sys.argv.remove('--pure')
+if "--pure" in sys.argv:
+    sys.argv.remove("--pure")
     ext_modules = []
     ext_modules = []
 
 
 
 
-setup(package_data={'': ['../docs/tutorial/*.txt', 'py.typed']},
-      ext_modules=ext_modules,
-      tests_require=tests_require)
+setup(
+    package_data={"": ["../docs/tutorial/*.txt", "py.typed"]},
+    ext_modules=ext_modules,
+    tests_require=tests_require,
+)