瀏覽代碼

Some style fixes.

Jelmer Vernooij 8 年之前
父節點
當前提交
e5f6eb1ebd

+ 7 - 5
docs/conf.py

@@ -3,7 +3,8 @@
 # dulwich documentation build configuration file, created by
 # dulwich documentation build configuration file, created by
 # sphinx-quickstart on Thu Feb 18 23:18:28 2010.
 # sphinx-quickstart on Thu Feb 18 23:18:28 2010.
 #
 #
-# This file is execfile()d with the current directory set to its containing dir.
+# This file is execfile()d with the current directory set to its containing
+# dir.
 #
 #
 # Note that not all possible configuration values are present in this
 # Note that not all possible configuration values are present in this
 # autogenerated file.
 # autogenerated file.
@@ -11,7 +12,8 @@
 # All configuration values have a default; values that are commented out
 # All configuration values have a default; values that are commented out
 # serve to show the default.
 # serve to show the default.
 
 
-import sys, os
+import os
+import sys
 
 
 # If extensions (or modules to document with autodoc) are in another directory,
 # If extensions (or modules to document with autodoc) are in another directory,
 # add these directories to sys.path here. If the directory is relative to the
 # add these directories to sys.path here. If the directory is relative to the
@@ -20,10 +22,10 @@ sys.path.insert(0, os.path.abspath('..'))
 sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__))))
 sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__))))
 dulwich = __import__('dulwich')
 dulwich = __import__('dulwich')
 
 
-# -- General configuration -----------------------------------------------------
+# -- General configuration ----------------------------------------------------
 
 
-# Add any Sphinx extension module names here, as strings. They can be extensions
-# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
 extensions = ['sphinx.ext.autodoc']
 extensions = ['sphinx.ext.autodoc']
 try:
 try:
     import rst2pdf
     import rst2pdf

+ 61 - 40
dulwich/client.py

@@ -266,10 +266,12 @@ class GitClient(object):
             # TODO(jelmer): Avoid reading entire file into memory and
             # TODO(jelmer): Avoid reading entire file into memory and
             # only processing it after the whole file has been fetched.
             # only processing it after the whole file has been fetched.
             f = BytesIO()
             f = BytesIO()
+
             def commit():
             def commit():
                 if f.tell():
                 if f.tell():
                     f.seek(0)
                     f.seek(0)
                     target.object_store.add_thin_pack(f.read, None)
                     target.object_store.add_thin_pack(f.read, None)
+
             def abort():
             def abort():
                 pass
                 pass
         else:
         else:
@@ -337,10 +339,9 @@ class GitClient(object):
                 else:
                 else:
                     ok.add(ref)
                     ok.add(ref)
                 ref_status[ref] = status
                 ref_status[ref] = status
-            raise UpdateRefsError(', '.join([ref for ref in ref_status
-                                             if ref not in ok]) +
-                                             b' failed to update',
-                                  ref_status=ref_status)
+            raise UpdateRefsError(', '.join([
+                ref for ref in ref_status if ref not in ok]) +
+                b' failed to update', ref_status=ref_status)
 
 
     def _read_side_band64k_data(self, proto, channel_callbacks):
     def _read_side_band64k_data(self, proto, channel_callbacks):
         """Read per-channel data.
         """Read per-channel data.
@@ -382,15 +383,16 @@ class GitClient(object):
             old_sha1 = old_refs.get(refname, ZERO_SHA)
             old_sha1 = old_refs.get(refname, ZERO_SHA)
             if not isinstance(old_sha1, bytes):
             if not isinstance(old_sha1, bytes):
                 raise TypeError('old sha1 for %s is not a bytestring: %r' %
                 raise TypeError('old sha1 for %s is not a bytestring: %r' %
-                        (refname, old_sha1))
+                                (refname, old_sha1))
             new_sha1 = new_refs.get(refname, ZERO_SHA)
             new_sha1 = new_refs.get(refname, ZERO_SHA)
             if not isinstance(new_sha1, bytes):
             if not isinstance(new_sha1, bytes):
                 raise TypeError('old sha1 for %s is not a bytestring %r' %
                 raise TypeError('old sha1 for %s is not a bytestring %r' %
-                        (refname, new_sha1))
+                                (refname, new_sha1))
 
 
             if old_sha1 != new_sha1:
             if old_sha1 != new_sha1:
                 if sent_capabilities:
                 if sent_capabilities:
-                    proto.write_pkt_line(old_sha1 + b' ' + new_sha1 + b' ' + refname)
+                    proto.write_pkt_line(old_sha1 + b' ' + new_sha1 + b' ' +
+                                         refname)
                 else:
                 else:
                     proto.write_pkt_line(
                     proto.write_pkt_line(
                         old_sha1 + b' ' + new_sha1 + b' ' + refname + b'\0' +
                         old_sha1 + b' ' + new_sha1 + b' ' + refname + b'\0' +
@@ -410,7 +412,8 @@ class GitClient(object):
         """
         """
         if b"side-band-64k" in capabilities:
         if b"side-band-64k" in capabilities:
             if progress is None:
             if progress is None:
-                progress = lambda x: None
+                def progress(x):
+                    pass
             channel_callbacks = {2: progress}
             channel_callbacks = {2: progress}
             if CAPABILITY_REPORT_STATUS in capabilities:
             if CAPABILITY_REPORT_STATUS in capabilities:
                 channel_callbacks[1] = PktLineParser(
                 channel_callbacks[1] = PktLineParser(
@@ -435,7 +438,8 @@ class GitClient(object):
             whether there is extra graph data to read on proto
             whether there is extra graph data to read on proto
         """
         """
         assert isinstance(wants, list) and isinstance(wants[0], bytes)
         assert isinstance(wants, list) and isinstance(wants[0], bytes)
-        proto.write_pkt_line(COMMAND_WANT + b' ' + wants[0] + b' ' + b' '.join(capabilities) + b'\n')
+        proto.write_pkt_line(COMMAND_WANT + b' ' + wants[0] + b' ' +
+                             b' '.join(capabilities) + b'\n')
         for want in wants[1:]:
         for want in wants[1:]:
             proto.write_pkt_line(COMMAND_WANT + b' ' + want + b'\n')
             proto.write_pkt_line(COMMAND_WANT + b' ' + want + b'\n')
         proto.write_pkt_line(None)
         proto.write_pkt_line(None)
@@ -481,7 +485,9 @@ class GitClient(object):
         if CAPABILITY_SIDE_BAND_64K in capabilities:
         if CAPABILITY_SIDE_BAND_64K in capabilities:
             if progress is None:
             if progress is None:
                 # Just ignore progress data
                 # Just ignore progress data
-                progress = lambda x: None
+
+                def progress(x):
+                    pass
             self._read_side_band64k_data(proto, {
             self._read_side_band64k_data(proto, {
                 SIDE_BAND_CHANNEL_DATA: pack_data,
                 SIDE_BAND_CHANNEL_DATA: pack_data,
                 SIDE_BAND_CHANNEL_PROGRESS: progress}
                 SIDE_BAND_CHANNEL_PROGRESS: progress}
@@ -537,7 +543,8 @@ class TraditionalGitClient(GitClient):
         proto, unused_can_read = self._connect(b'receive-pack', path)
         proto, unused_can_read = self._connect(b'receive-pack', path)
         with proto:
         with proto:
             old_refs, server_capabilities = read_pkt_refs(proto)
             old_refs, server_capabilities = read_pkt_refs(proto)
-            negotiated_capabilities = self._send_capabilities & server_capabilities
+            negotiated_capabilities = (
+                self._send_capabilities & server_capabilities)
 
 
             if CAPABILITY_REPORT_STATUS in negotiated_capabilities:
             if CAPABILITY_REPORT_STATUS in negotiated_capabilities:
                 self._report_status_parser = ReportStatusParser()
                 self._report_status_parser = ReportStatusParser()
@@ -549,14 +556,15 @@ class TraditionalGitClient(GitClient):
                 proto.write_pkt_line(None)
                 proto.write_pkt_line(None)
                 raise
                 raise
 
 
-            if not CAPABILITY_DELETE_REFS in server_capabilities:
+            if CAPABILITY_DELETE_REFS not in server_capabilities:
                 # Server does not support deletions. Fail later.
                 # Server does not support deletions. Fail later.
                 new_refs = dict(orig_new_refs)
                 new_refs = dict(orig_new_refs)
                 for ref, sha in orig_new_refs.items():
                 for ref, sha in orig_new_refs.items():
                     if sha == ZERO_SHA:
                     if sha == ZERO_SHA:
                         if CAPABILITY_REPORT_STATUS in negotiated_capabilities:
                         if CAPABILITY_REPORT_STATUS in negotiated_capabilities:
                             report_status_parser._ref_statuses.append(
                             report_status_parser._ref_statuses.append(
-                                b'ng ' + sha + b' remote does not support deleting refs')
+                                b'ng ' + sha +
+                                b' remote does not support deleting refs')
                             report_status_parser._ref_status_ok = False
                             report_status_parser._ref_status_ok = False
                         del new_refs[ref]
                         del new_refs[ref]
 
 
@@ -573,7 +581,8 @@ class TraditionalGitClient(GitClient):
 
 
             (have, want) = self._handle_receive_pack_head(
             (have, want) = self._handle_receive_pack_head(
                 proto, negotiated_capabilities, old_refs, new_refs)
                 proto, negotiated_capabilities, old_refs, new_refs)
-            if not want and set(new_refs.items()).issubset(set(old_refs.items())):
+            if (not want and
+                    set(new_refs.items()).issubset(set(old_refs.items()))):
                 return new_refs
                 return new_refs
             objects = generate_pack_contents(have, want)
             objects = generate_pack_contents(have, want)
 
 
@@ -621,7 +630,8 @@ class TraditionalGitClient(GitClient):
             self._handle_upload_pack_head(
             self._handle_upload_pack_head(
                 proto, negotiated_capabilities, graph_walker, wants, can_read)
                 proto, negotiated_capabilities, graph_walker, wants, can_read)
             self._handle_upload_pack_tail(
             self._handle_upload_pack_tail(
-                proto, negotiated_capabilities, graph_walker, pack_data, progress)
+                proto, negotiated_capabilities, graph_walker, pack_data,
+                progress)
             return refs
             return refs
 
 
     def get_refs(self, path):
     def get_refs(self, path):
@@ -702,6 +712,7 @@ class TCPGitClient(TraditionalGitClient):
         rfile = s.makefile('rb', -1)
         rfile = s.makefile('rb', -1)
         # 0 means unbuffered
         # 0 means unbuffered
         wfile = s.makefile('wb', 0)
         wfile = s.makefile('wb', 0)
+
         def close():
         def close():
             rfile.close()
             rfile.close()
             wfile.close()
             wfile.close()
@@ -712,7 +723,8 @@ class TCPGitClient(TraditionalGitClient):
         if path.startswith(b"/~"):
         if path.startswith(b"/~"):
             path = path[1:]
             path = path[1:]
         # TODO(jelmer): Alternative to ascii?
         # TODO(jelmer): Alternative to ascii?
-        proto.send_cmd(b'git-' + cmd, path, b'host=' + self._host.encode('ascii'))
+        proto.send_cmd(
+            b'git-' + cmd, path, b'host=' + self._host.encode('ascii'))
         return proto, lambda: _fileno_can_read(s)
         return proto, lambda: _fileno_can_read(s)
 
 
 
 
@@ -748,10 +760,10 @@ class SubprocessWrapper(object):
 def find_git_command():
 def find_git_command():
     """Find command to run for system Git (usually C Git).
     """Find command to run for system Git (usually C Git).
     """
     """
-    if sys.platform == 'win32': # support .exe, .bat and .cmd
-        try: # to avoid overhead
+    if sys.platform == 'win32':  # support .exe, .bat and .cmd
+        try:  # to avoid overhead
             import win32api
             import win32api
-        except ImportError: # run through cmd.exe with some overhead
+        except ImportError:  # run through cmd.exe with some overhead
             return ['cmd', '/c', 'git']
             return ['cmd', '/c', 'git']
         else:
         else:
             status, git = win32api.FindExecutable('git')
             status, git = win32api.FindExecutable('git')
@@ -838,19 +850,23 @@ class LocalGitClient(GitClient):
             {refname: new_ref}, including deleted refs.
             {refname: new_ref}, including deleted refs.
         """
         """
         if not progress:
         if not progress:
-            progress = lambda x: None
+            def progress(x):
+                pass
 
 
-        with self._open_repo(path)  as target:
+        with self._open_repo(path) as target:
             old_refs = target.get_refs()
             old_refs = target.get_refs()
             new_refs = determine_wants(dict(old_refs))
             new_refs = determine_wants(dict(old_refs))
 
 
             have = [sha1 for sha1 in old_refs.values() if sha1 != ZERO_SHA]
             have = [sha1 for sha1 in old_refs.values() if sha1 != ZERO_SHA]
             want = []
             want = []
             for refname, new_sha1 in new_refs.items():
             for refname, new_sha1 in new_refs.items():
-                if new_sha1 not in have and not new_sha1 in want and new_sha1 != ZERO_SHA:
+                if (new_sha1 not in have and
+                        new_sha1 not in want and
+                        new_sha1 != ZERO_SHA):
                     want.append(new_sha1)
                     want.append(new_sha1)
 
 
-            if not want and set(new_refs.items()).issubset(set(old_refs.items())):
+            if (not want and
+                    set(new_refs.items()).issubset(set(old_refs.items()))):
                 return new_refs
                 return new_refs
 
 
             target.object_store.add_objects(generate_pack_contents(have, want))
             target.object_store.add_objects(generate_pack_contents(have, want))
@@ -858,8 +874,10 @@ class LocalGitClient(GitClient):
             for refname, new_sha1 in new_refs.items():
             for refname, new_sha1 in new_refs.items():
                 old_sha1 = old_refs.get(refname, ZERO_SHA)
                 old_sha1 = old_refs.get(refname, ZERO_SHA)
                 if new_sha1 != ZERO_SHA:
                 if new_sha1 != ZERO_SHA:
-                    if not target.refs.set_if_equals(refname, old_sha1, new_sha1):
-                        progress('unable to set %s to %s' % (refname, new_sha1))
+                    if not target.refs.set_if_equals(
+                            refname, old_sha1, new_sha1):
+                        progress('unable to set %s to %s' %
+                                 (refname, new_sha1))
                 else:
                 else:
                     if not target.refs.remove_if_equals(refname, old_sha1):
                     if not target.refs.remove_if_equals(refname, old_sha1):
                         progress('unable to remove %s' % refname)
                         progress('unable to remove %s' % refname)
@@ -891,10 +909,11 @@ class LocalGitClient(GitClient):
         :return: Dictionary with all remote refs (not just those fetched)
         :return: Dictionary with all remote refs (not just those fetched)
         """
         """
         with self._open_repo(path) as r:
         with self._open_repo(path) as r:
-            objects_iter = r.fetch_objects(determine_wants, graph_walker, progress)
+            objects_iter = r.fetch_objects(
+                determine_wants, graph_walker, progress)
 
 
-            # Did the process short-circuit (e.g. in a stateless RPC call)? Note
-            # that the client still expects a 0-object pack in most cases.
+            # Did the process short-circuit (e.g. in a stateless RPC call)?
+            # Note that the client still expects a 0-object pack in most cases.
             if objects_iter is None:
             if objects_iter is None:
                 return
                 return
             write_pack_objects(ProtocolFile(None, pack_data), objects_iter)
             write_pack_objects(ProtocolFile(None, pack_data), objects_iter)
@@ -943,7 +962,7 @@ class SubprocessSSHVendor(SSHVendor):
         if not isinstance(command, bytes):
         if not isinstance(command, bytes):
             raise TypeError(command)
             raise TypeError(command)
 
 
-        #FIXME: This has no way to deal with passwords..
+        # FIXME: This has no way to deal with passwords..
         args = ['ssh', '-x']
         args = ['ssh', '-x']
         if port is not None:
         if port is not None:
             args.extend(['-p', str(port)])
             args.extend(['-p', str(port)])
@@ -1076,7 +1095,8 @@ class HttpGitClient(GitClient):
                    password=password, username=username, **kwargs)
                    password=password, username=username, **kwargs)
 
 
     def __repr__(self):
     def __repr__(self):
-        return "%s(%r, dumb=%r)" % (type(self).__name__, self._base_url, self.dumb)
+        return "%s(%r, dumb=%r)" % (
+            type(self).__name__, self._base_url, self.dumb)
 
 
     def _get_url(self, path):
     def _get_url(self, path):
         return urlparse.urljoin(self._base_url, path).rstrip("/") + "/"
         return urlparse.urljoin(self._base_url, path).rstrip("/") + "/"
@@ -1138,7 +1158,7 @@ class HttpGitClient(GitClient):
         if content_type != (
         if content_type != (
                 "application/x-%s-result" % service):
                 "application/x-%s-result" % service):
             raise GitProtocolError("Invalid content-type from server: %s"
             raise GitProtocolError("Invalid content-type from server: %s"
-                % content_type)
+                                   % content_type)
         return resp
         return resp
 
 
     def send_pack(self, path, determine_wants, generate_pack_contents,
     def send_pack(self, path, determine_wants, generate_pack_contents,
@@ -1185,13 +1205,12 @@ class HttpGitClient(GitClient):
                                    data=req_data.getvalue())
                                    data=req_data.getvalue())
         try:
         try:
             resp_proto = Protocol(resp.read, None)
             resp_proto = Protocol(resp.read, None)
-            self._handle_receive_pack_tail(resp_proto, negotiated_capabilities,
-                progress)
+            self._handle_receive_pack_tail(
+                resp_proto, negotiated_capabilities, progress)
             return new_refs
             return new_refs
         finally:
         finally:
             resp.close()
             resp.close()
 
 
-
     def fetch_pack(self, path, determine_wants, graph_walker, pack_data,
     def fetch_pack(self, path, determine_wants, graph_walker, pack_data,
                    progress=None):
                    progress=None):
         """Retrieve a pack from a git smart server.
         """Retrieve a pack from a git smart server.
@@ -1205,7 +1224,8 @@ class HttpGitClient(GitClient):
         url = self._get_url(path)
         url = self._get_url(path)
         refs, server_capabilities = self._discover_references(
         refs, server_capabilities = self._discover_references(
             b"git-upload-pack", url)
             b"git-upload-pack", url)
-        negotiated_capabilities = self._fetch_capabilities & server_capabilities
+        negotiated_capabilities = (
+            self._fetch_capabilities & server_capabilities)
         wants = determine_wants(refs)
         wants = determine_wants(refs)
         if wants is not None:
         if wants is not None:
             wants = [cid for cid in wants if cid != ZERO_SHA]
             wants = [cid for cid in wants if cid != ZERO_SHA]
@@ -1216,14 +1236,15 @@ class HttpGitClient(GitClient):
         req_data = BytesIO()
         req_data = BytesIO()
         req_proto = Protocol(None, req_data.write)
         req_proto = Protocol(None, req_data.write)
         self._handle_upload_pack_head(
         self._handle_upload_pack_head(
-            req_proto, negotiated_capabilities, graph_walker, wants,
-            lambda: False)
+                req_proto, negotiated_capabilities, graph_walker, wants,
+                lambda: False)
         resp = self._smart_request(
         resp = self._smart_request(
             "git-upload-pack", url, data=req_data.getvalue())
             "git-upload-pack", url, data=req_data.getvalue())
         try:
         try:
             resp_proto = Protocol(resp.read, None)
             resp_proto = Protocol(resp.read, None)
-            self._handle_upload_pack_tail(resp_proto, negotiated_capabilities,
-                graph_walker, pack_data, progress)
+            self._handle_upload_pack_tail(
+                resp_proto, negotiated_capabilities, graph_walker, pack_data,
+                progress)
             return refs
             return refs
         finally:
         finally:
             resp.close()
             resp.close()
@@ -1283,7 +1304,7 @@ def get_transport_and_path(location, **kwargs):
         # Windows local path
         # Windows local path
         return default_local_git_client_cls(**kwargs), location
         return default_local_git_client_cls(**kwargs), location
 
 
-    if ':' in location and not '@' in location:
+    if ':' in location and '@' not in location:
         # SSH with no user@, zero or one leading slash.
         # SSH with no user@, zero or one leading slash.
         (hostname, path) = location.split(':', 1)
         (hostname, path) = location.split(':', 1)
         return SSHGitClient(hostname, **kwargs), path
         return SSHGitClient(hostname, **kwargs), path

+ 18 - 11
dulwich/config.py

@@ -172,12 +172,13 @@ class ConfigDict(Config, MutableMapping):
 
 
 def _format_string(value):
 def _format_string(value):
     if (value.startswith(b" ") or
     if (value.startswith(b" ") or
-        value.startswith(b"\t") or
-        value.endswith(b" ") or
-        b'#' in value or
-        value.endswith(b"\t")):
+            value.startswith(b"\t") or
+            value.endswith(b" ") or
+            b'#' in value or
+            value.endswith(b"\t")):
         return b'"' + _escape_value(value) + b'"'
         return b'"' + _escape_value(value) + b'"'
-    return _escape_value(value)
+    else:
+        return _escape_value(value)
 
 
 
 
 _ESCAPE_TABLE = {
 _ESCAPE_TABLE = {
@@ -190,6 +191,7 @@ _ESCAPE_TABLE = {
 _COMMENT_CHARS = [ord(b"#"), ord(b";")]
 _COMMENT_CHARS = [ord(b"#"), ord(b";")]
 _WHITESPACE_CHARS = [ord(b"\t"), ord(b" ")]
 _WHITESPACE_CHARS = [ord(b"\t"), ord(b" ")]
 
 
+
 def _parse_string(value):
 def _parse_string(value):
     value = bytearray(value.strip())
     value = bytearray(value.strip())
     ret = bytearray()
     ret = bytearray()
@@ -208,8 +210,8 @@ def _parse_string(value):
                     (value, i))
                     (value, i))
             except KeyError:
             except KeyError:
                 raise ValueError(
                 raise ValueError(
-                    "escape character followed by unknown character %s at %d in %r" %
-                    (value[i], i, value))
+                    "escape character followed by unknown character "
+                    "%s at %d in %r" % (value[i], i, value))
             if whitespace:
             if whitespace:
                 ret.extend(whitespace)
                 ret.extend(whitespace)
                 whitespace = bytearray()
                 whitespace = bytearray()
@@ -236,7 +238,11 @@ def _parse_string(value):
 
 
 def _escape_value(value):
 def _escape_value(value):
     """Escape a value."""
     """Escape a value."""
-    return value.replace(b"\\", b"\\\\").replace(b"\n", b"\\n").replace(b"\t", b"\\t").replace(b"\"", b"\\\"")
+    value = value.replace(b"\\", b"\\\\")
+    value = value.replace(b"\n", b"\\n")
+    value = value.replace(b"\t", b"\\t")
+    value = value.replace(b"\"", b"\\\"")
+    return value
 
 
 
 
 def _check_variable_name(name):
 def _check_variable_name(name):
@@ -295,8 +301,8 @@ class ConfigFile(ConfigDict):
                         section = (pts[0], pts[1])
                         section = (pts[0], pts[1])
                     else:
                     else:
                         if not _check_section_name(pts[0]):
                         if not _check_section_name(pts[0]):
-                            raise ValueError("invalid section name %r" %
-                                    pts[0])
+                            raise ValueError(
+                                "invalid section name %r" % pts[0])
                         pts = pts[0].split(b".", 1)
                         pts = pts[0].split(b".", 1)
                         if len(pts) == 2:
                         if len(pts) == 2:
                             section = (pts[0], pts[1])
                             section = (pts[0], pts[1])
@@ -359,7 +365,8 @@ class ConfigFile(ConfigDict):
             if subsection_name is None:
             if subsection_name is None:
                 f.write(b"[" + section_name + b"]\n")
                 f.write(b"[" + section_name + b"]\n")
             else:
             else:
-                f.write(b"[" + section_name + b" \"" + subsection_name + b"\"]\n")
+                f.write(b"[" + section_name +
+                        b" \"" + subsection_name + b"\"]\n")
             for key, value in values.items():
             for key, value in values.items():
                 if value is True:
                 if value is True:
                     value = b"true"
                     value = b"true"

+ 15 - 11
dulwich/diff_tree.py

@@ -173,10 +173,10 @@ def tree_changes(store, tree1_id, tree2_id, want_unchanged=False,
         source and target tree.
         source and target tree.
     """
     """
     if (rename_detector is not None and tree1_id is not None and
     if (rename_detector is not None and tree1_id is not None and
-        tree2_id is not None):
+            tree2_id is not None):
         for change in rename_detector.changes_with_renames(
         for change in rename_detector.changes_with_renames(
-            tree1_id, tree2_id, want_unchanged=want_unchanged):
-                yield change
+                tree1_id, tree2_id, want_unchanged=want_unchanged):
+            yield change
         return
         return
 
 
     entries = walk_trees(store, tree1_id, tree2_id,
     entries = walk_trees(store, tree1_id, tree2_id,
@@ -255,8 +255,11 @@ def tree_changes_for_merge(store, parent_tree_ids, tree_id,
                 path = change.new.path
                 path = change.new.path
             changes_by_path[path][i] = change
             changes_by_path[path][i] = change
 
 
-    old_sha = lambda c: c.old.sha
-    change_type = lambda c: c.type
+    def old_sha(c):
+        return c.old.sha
+
+    def change_type(c):
+        return c.type
 
 
     # Yield only conflicting changes.
     # Yield only conflicting changes.
     for _, changes in sorted(changes_by_path.items()):
     for _, changes in sorted(changes_by_path.items()):
@@ -381,9 +384,9 @@ class RenameDetector(object):
             an add/delete pair to be a rename/copy; see _similarity_score.
             an add/delete pair to be a rename/copy; see _similarity_score.
         :param max_files: The maximum number of adds and deletes to consider,
         :param max_files: The maximum number of adds and deletes to consider,
             or None for no limit. The detector is guaranteed to compare no more
             or None for no limit. The detector is guaranteed to compare no more
-            than max_files ** 2 add/delete pairs. This limit is provided because
-            rename detection can be quadratic in the project size. If the limit
-            is exceeded, no content rename detection is attempted.
+            than max_files ** 2 add/delete pairs. This limit is provided
+            because rename detection can be quadratic in the project size. If
+            the limit is exceeded, no content rename detection is attempted.
         :param rewrite_threshold: The threshold similarity score below which a
         :param rewrite_threshold: The threshold similarity score below which a
             modify should be considered a delete/add, or None to not break
             modify should be considered a delete/add, or None to not break
             modifies; see _similarity_score.
             modifies; see _similarity_score.
@@ -404,7 +407,7 @@ class RenameDetector(object):
 
 
     def _should_split(self, change):
     def _should_split(self, change):
         if (self._rewrite_threshold is None or change.type != CHANGE_MODIFY or
         if (self._rewrite_threshold is None or change.type != CHANGE_MODIFY or
-            change.old.sha == change.new.sha):
+                change.old.sha == change.new.sha):
             return False
             return False
         old_obj = self._store[change.old.sha]
         old_obj = self._store[change.old.sha]
         new_obj = self._store[change.new.sha]
         new_obj = self._store[change.new.sha]
@@ -551,7 +554,7 @@ class RenameDetector(object):
             path = add.new.path
             path = add.new.path
             delete = delete_map.get(path)
             delete = delete_map.get(path)
             if (delete is not None and
             if (delete is not None and
-                stat.S_IFMT(delete.old.mode) == stat.S_IFMT(add.new.mode)):
+                    stat.S_IFMT(delete.old.mode) == stat.S_IFMT(add.new.mode)):
                 modifies[path] = TreeChange(CHANGE_MODIFY, delete.old, add.new)
                 modifies[path] = TreeChange(CHANGE_MODIFY, delete.old, add.new)
 
 
         self._adds = [a for a in self._adds if a.new.path not in modifies]
         self._adds = [a for a in self._adds if a.new.path not in modifies]
@@ -570,7 +573,8 @@ class RenameDetector(object):
     def _prune_unchanged(self):
     def _prune_unchanged(self):
         if self._want_unchanged:
         if self._want_unchanged:
             return
             return
-        self._deletes = [d for d in self._deletes if d.type != CHANGE_UNCHANGED]
+        self._deletes = [
+            d for d in self._deletes if d.type != CHANGE_UNCHANGED]
 
 
     def changes_with_renames(self, tree1_id, tree2_id, want_unchanged=False):
     def changes_with_renames(self, tree1_id, tree2_id, want_unchanged=False):
         """Iterate TreeChanges between two tree SHAs, with rename detection."""
         """Iterate TreeChanges between two tree SHAs, with rename detection."""

+ 11 - 8
dulwich/file.py

@@ -105,10 +105,12 @@ class _GitFile(object):
     PROXY_METHODS = ('__iter__', 'flush', 'fileno', 'isatty', 'read',
     PROXY_METHODS = ('__iter__', 'flush', 'fileno', 'isatty', 'read',
                      'readline', 'readlines', 'seek', 'tell',
                      'readline', 'readlines', 'seek', 'tell',
                      'truncate', 'write', 'writelines')
                      'truncate', 'write', 'writelines')
+
     def __init__(self, filename, mode, bufsize):
     def __init__(self, filename, mode, bufsize):
         self._filename = filename
         self._filename = filename
         self._lockfilename = '%s.lock' % self._filename
         self._lockfilename = '%s.lock' % self._filename
-        fd = os.open(self._lockfilename,
+        fd = os.open(
+            self._lockfilename,
             os.O_RDWR | os.O_CREAT | os.O_EXCL | getattr(os, "O_BINARY", 0))
             os.O_RDWR | os.O_CREAT | os.O_EXCL | getattr(os, "O_BINARY", 0))
         self._file = os.fdopen(fd, mode, bufsize)
         self._file = os.fdopen(fd, mode, bufsize)
         self._closed = False
         self._closed = False
@@ -137,12 +139,12 @@ class _GitFile(object):
         """Close this file, saving the lockfile over the original.
         """Close this file, saving the lockfile over the original.
 
 
         :note: If this method fails, it will attempt to delete the lockfile.
         :note: If this method fails, it will attempt to delete the lockfile.
-            However, it is not guaranteed to do so (e.g. if a filesystem becomes
-            suddenly read-only), which will prevent future writes to this file
-            until the lockfile is removed manually.
-        :raises OSError: if the original file could not be overwritten. The lock
-            file is still closed, so further attempts to write to the same file
-            object will raise ValueError.
+            However, it is not guaranteed to do so (e.g. if a filesystem
+            becomes suddenly read-only), which will prevent future writes to
+            this file until the lockfile is removed manually.
+        :raises OSError: if the original file could not be overwritten. The
+            lock file is still closed, so further attempts to write to the same
+            file object will raise ValueError.
         """
         """
         if self._closed:
         if self._closed:
             return
             return
@@ -152,7 +154,8 @@ class _GitFile(object):
                 os.rename(self._lockfilename, self._filename)
                 os.rename(self._lockfilename, self._filename)
             except OSError as e:
             except OSError as e:
                 if sys.platform == 'win32' and e.errno == errno.EEXIST:
                 if sys.platform == 'win32' and e.errno == errno.EEXIST:
-                    # Windows versions prior to Vista don't support atomic renames
+                    # Windows versions prior to Vista don't support atomic
+                    # renames
                     _fancy_rename(self._lockfilename, self._filename)
                     _fancy_rename(self._lockfilename, self._filename)
                 else:
                 else:
                     raise
                     raise

+ 2 - 1
dulwich/log_utils.py

@@ -31,7 +31,8 @@ http://docs.python.org/library/logging.html#configuring-logging-for-a-library
 
 
 For many modules, the only function from the logging module they need is
 For many modules, the only function from the logging module they need is
 getLogger; this module exports that function for convenience. If a calling
 getLogger; this module exports that function for convenience. If a calling
-module needs something else, it can import the standard logging module directly.
+module needs something else, it can import the standard logging module
+directly.
 """
 """
 
 
 import logging
 import logging

+ 40 - 28
dulwich/object_store.py

@@ -74,8 +74,8 @@ class BaseObjectStore(object):
 
 
     def determine_wants_all(self, refs):
     def determine_wants_all(self, refs):
         return [sha for (ref, sha) in refs.items()
         return [sha for (ref, sha) in refs.items()
-                if not sha in self and not ref.endswith(b"^{}") and
-                   not sha == ZERO_SHA]
+                if sha not in self and not ref.endswith(b"^{}") and
+                not sha == ZERO_SHA]
 
 
     def iter_shas(self, shas):
     def iter_shas(self, shas):
         """Iterate over the objects for the specified shas.
         """Iterate over the objects for the specified shas.
@@ -173,12 +173,14 @@ class BaseObjectStore(object):
         :param wants: Iterable over SHAs of objects to fetch.
         :param wants: Iterable over SHAs of objects to fetch.
         :param progress: Simple progress function that will be called with
         :param progress: Simple progress function that will be called with
             updated progress strings.
             updated progress strings.
-        :param get_tagged: Function that returns a dict of pointed-to sha -> tag
-            sha for including tags.
-        :param get_parents: Optional function for getting the parents of a commit.
+        :param get_tagged: Function that returns a dict of pointed-to sha ->
+            tag sha for including tags.
+        :param get_parents: Optional function for getting the parents of a
+            commit.
         :return: Iterator over (sha, path) pairs.
         :return: Iterator over (sha, path) pairs.
         """
         """
-        finder = MissingObjectFinder(self, haves, wants, progress, get_tagged, get_parents=get_parents)
+        finder = MissingObjectFinder(self, haves, wants, progress, get_tagged,
+                                     get_parents=get_parents)
         return iter(finder.next, None)
         return iter(finder.next, None)
 
 
     def find_common_revisions(self, graphwalker):
     def find_common_revisions(self, graphwalker):
@@ -210,8 +212,8 @@ class BaseObjectStore(object):
 
 
         :param sha: The object SHA to peel.
         :param sha: The object SHA to peel.
         :return: The fully-peeled SHA1 of a tag object, after peeling all
         :return: The fully-peeled SHA1 of a tag object, after peeling all
-            intermediate tags; if the original ref does not point to a tag, this
-            will equal the original SHA1.
+            intermediate tags; if the original ref does not point to a tag,
+            this will equal the original SHA1.
         """
         """
         obj = self[sha]
         obj = self[sha]
         obj_class = object_class(obj.type_name)
         obj_class = object_class(obj.type_name)
@@ -227,7 +229,8 @@ class BaseObjectStore(object):
         :param heads: commits to start from
         :param heads: commits to start from
         :param common: commits to end at, or empty set to walk repository
         :param common: commits to end at, or empty set to walk repository
             completely
             completely
-        :param get_parents: Optional function for getting the parents of a commit.
+        :param get_parents: Optional function for getting the parents of a
+            commit.
         :return: a tuple (A, B) where A - all commits reachable
         :return: a tuple (A, B) where A - all commits reachable
             from heads but not present in common, B - common (shared) elements
             from heads but not present in common, B - common (shared) elements
             that are directly reachable from heads
             that are directly reachable from heads
@@ -338,7 +341,8 @@ class PackBasedObjectStore(BaseObjectStore):
 
 
     def __iter__(self):
     def __iter__(self):
         """Iterate over the SHAs that are present in this store."""
         """Iterate over the SHAs that are present in this store."""
-        iterables = list(self.packs) + [self._iter_loose_objects()] + [self._iter_alternate_objects()]
+        iterables = (list(self.packs) + [self._iter_loose_objects()] +
+                     [self._iter_alternate_objects()])
         return chain(*iterables)
         return chain(*iterables)
 
 
     def contains_loose(self, sha):
     def contains_loose(self, sha):
@@ -428,8 +432,7 @@ class DiskObjectStore(PackBasedObjectStore):
 
 
     def _read_alternate_paths(self):
     def _read_alternate_paths(self):
         try:
         try:
-            f = GitFile(os.path.join(self.path, INFODIR, "alternates"),
-                    'rb')
+            f = GitFile(os.path.join(self.path, INFODIR, "alternates"), 'rb')
         except (OSError, IOError) as e:
         except (OSError, IOError) as e:
             if e.errno == errno.ENOENT:
             if e.errno == errno.ENOENT:
                 return
                 return
@@ -442,7 +445,8 @@ class DiskObjectStore(PackBasedObjectStore):
                 if os.path.isabs(l):
                 if os.path.isabs(l):
                     yield l.decode(sys.getfilesystemencoding())
                     yield l.decode(sys.getfilesystemencoding())
                 else:
                 else:
-                    yield os.path.join(self.path, l).decode(sys.getfilesystemencoding())
+                    yield os.path.join(self.path, l).decode(
+                        sys.getfilesystemencoding())
 
 
     def add_alternate_path(self, path):
     def add_alternate_path(self, path):
         """Add an alternate path to this object store.
         """Add an alternate path to this object store.
@@ -482,7 +486,8 @@ class DiskObjectStore(PackBasedObjectStore):
         for name in pack_dir_contents:
         for name in pack_dir_contents:
             assert isinstance(name, basestring if sys.version_info[0] == 2 else str)
             assert isinstance(name, basestring if sys.version_info[0] == 2 else str)
             if name.startswith("pack-") and name.endswith(".pack"):
             if name.startswith("pack-") and name.endswith(".pack"):
-                # verify that idx exists first (otherwise the pack was not yet fully written)
+                # verify that idx exists first (otherwise the pack was not yet
+                # fully written)
                 idx_name = os.path.splitext(name)[0] + ".idx"
                 idx_name = os.path.splitext(name)[0] + ".idx"
                 if idx_name in pack_dir_contents:
                 if idx_name in pack_dir_contents:
                     pack_name = name[:-len(".pack")]
                     pack_name = name[:-len(".pack")]
@@ -599,12 +604,12 @@ class DiskObjectStore(PackBasedObjectStore):
     def add_thin_pack(self, read_all, read_some):
     def add_thin_pack(self, read_all, read_some):
         """Add a new thin pack to this object store.
         """Add a new thin pack to this object store.
 
 
-        Thin packs are packs that contain deltas with parents that exist outside
-        the pack. They should never be placed in the object store directly, and
-        always indexed and completed as they are copied.
+        Thin packs are packs that contain deltas with parents that exist
+        outside the pack. They should never be placed in the object store
+        directly, and always indexed and completed as they are copied.
 
 
-        :param read_all: Read function that blocks until the number of requested
-            bytes are read.
+        :param read_all: Read function that blocks until the number of
+            requested bytes are read.
         :param read_some: Read function that returns at least one byte, but may
         :param read_some: Read function that returns at least one byte, but may
             not return the number of bytes requested.
             not return the number of bytes requested.
         :return: A Pack object pointing at the now-completed thin pack in the
         :return: A Pack object pointing at the now-completed thin pack in the
@@ -645,6 +650,7 @@ class DiskObjectStore(PackBasedObjectStore):
         """
         """
         fd, path = tempfile.mkstemp(dir=self.pack_dir, suffix=".pack")
         fd, path = tempfile.mkstemp(dir=self.pack_dir, suffix=".pack")
         f = os.fdopen(fd, 'wb')
         f = os.fdopen(fd, 'wb')
+
         def commit():
         def commit():
             os.fsync(fd)
             os.fsync(fd)
             f.close()
             f.close()
@@ -653,6 +659,7 @@ class DiskObjectStore(PackBasedObjectStore):
             else:
             else:
                 os.remove(path)
                 os.remove(path)
                 return None
                 return None
+
         def abort():
         def abort():
             f.close()
             f.close()
             os.remove(path)
             os.remove(path)
@@ -671,7 +678,7 @@ class DiskObjectStore(PackBasedObjectStore):
             if e.errno != errno.EEXIST:
             if e.errno != errno.EEXIST:
                 raise
                 raise
         if os.path.exists(path):
         if os.path.exists(path):
-            return # Already there, no need to write again
+            return  # Already there, no need to write again
         with GitFile(path, 'wb') as f:
         with GitFile(path, 'wb') as f:
             f.write(obj.as_legacy_object())
             f.write(obj.as_legacy_object())
 
 
@@ -759,11 +766,13 @@ class MemoryObjectStore(BaseObjectStore):
             call when the pack is finished.
             call when the pack is finished.
         """
         """
         f = BytesIO()
         f = BytesIO()
+
         def commit():
         def commit():
             p = PackData.from_file(BytesIO(f.getvalue()), f.tell())
             p = PackData.from_file(BytesIO(f.getvalue()), f.tell())
             f.close()
             f.close()
             for obj in PackInflater.for_pack_data(p, self.get_raw):
             for obj in PackInflater.for_pack_data(p, self.get_raw):
                 self.add_object(obj)
                 self.add_object(obj)
+
         def abort():
         def abort():
             pass
             pass
         return f, commit, abort
         return f, commit, abort
@@ -794,19 +803,20 @@ class MemoryObjectStore(BaseObjectStore):
     def add_thin_pack(self, read_all, read_some):
     def add_thin_pack(self, read_all, read_some):
         """Add a new thin pack to this object store.
         """Add a new thin pack to this object store.
 
 
-        Thin packs are packs that contain deltas with parents that exist outside
-        the pack. Because this object store doesn't support packs, we extract
-        and add the individual objects.
+        Thin packs are packs that contain deltas with parents that exist
+        outside the pack. Because this object store doesn't support packs, we
+        extract and add the individual objects.
 
 
-        :param read_all: Read function that blocks until the number of requested
-            bytes are read.
+        :param read_all: Read function that blocks until the number of
+            requested bytes are read.
         :param read_some: Read function that returns at least one byte, but may
         :param read_some: Read function that returns at least one byte, but may
             not return the number of bytes requested.
             not return the number of bytes requested.
         """
         """
         f, commit, abort = self.add_pack()
         f, commit, abort = self.add_pack()
         try:
         try:
             indexer = PackIndexer(f, resolve_ext_ref=self.get_raw)
             indexer = PackIndexer(f, resolve_ext_ref=self.get_raw)
-            copier = PackStreamCopier(read_all, read_some, f, delta_iter=indexer)
+            copier = PackStreamCopier(read_all, read_some, f,
+                                      delta_iter=indexer)
             copier.verify()
             copier.verify()
             self._complete_thin_pack(f, indexer)
             self._complete_thin_pack(f, indexer)
         except:
         except:
@@ -1054,7 +1064,8 @@ class MissingObjectFinder(object):
         if sha in self._tagged:
         if sha in self._tagged:
             self.add_todo([(self._tagged[sha], None, True)])
             self.add_todo([(self._tagged[sha], None, True)])
         self.sha_done.add(sha)
         self.sha_done.add(sha)
-        self.progress(("counting objects: %d\r" % len(self.sha_done)).encode('ascii'))
+        self.progress(("counting objects: %d\r" %
+                       len(self.sha_done)).encode('ascii'))
         return (sha, name)
         return (sha, name)
 
 
     __next__ = next
     __next__ = next
@@ -1109,7 +1120,8 @@ class ObjectStoreGraphWalker(object):
             ret = self.heads.pop()
             ret = self.heads.pop()
             ps = self.get_parents(ret)
             ps = self.get_parents(ret)
             self.parents[ret] = ps
             self.parents[ret] = ps
-            self.heads.update([p for p in ps if not p in self.parents])
+            self.heads.update(
+                [p for p in ps if p not in self.parents])
             return ret
             return ret
         return None
         return None
 
 

+ 10 - 6
dulwich/objects.py

@@ -136,7 +136,8 @@ def filename_to_hex(filename):
 
 
 def object_header(num_type, length):
 def object_header(num_type, length):
     """Return an object header for the given numeric type and text length."""
     """Return an object header for the given numeric type and text length."""
-    return object_class(num_type).type_name + b' ' + str(length).encode('ascii') + b'\0'
+    return (object_class(num_type).type_name +
+            b' ' + str(length).encode('ascii') + b'\0')
 
 
 
 
 def serializable_property(name, docstring=None):
 def serializable_property(name, docstring=None):
@@ -145,6 +146,7 @@ def serializable_property(name, docstring=None):
     def set(obj, value):
     def set(obj, value):
         setattr(obj, "_"+name, value)
         setattr(obj, "_"+name, value)
         obj._needs_serialization = True
         obj._needs_serialization = True
+
     def get(obj):
     def get(obj):
         return getattr(obj, "_"+name)
         return getattr(obj, "_"+name)
     return property(get, set, doc=docstring)
     return property(get, set, doc=docstring)
@@ -182,9 +184,9 @@ def check_identity(identity, error_msg):
     email_start = identity.find(b'<')
     email_start = identity.find(b'<')
     email_end = identity.find(b'>')
     email_end = identity.find(b'>')
     if (email_start < 0 or email_end < 0 or email_end <= email_start
     if (email_start < 0 or email_end < 0 or email_end <= email_start
-        or identity.find(b'<', email_start + 1) >= 0
-        or identity.find(b'>', email_end + 1) >= 0
-        or not identity.endswith(b'>')):
+            or identity.find(b'<', email_start + 1) >= 0
+            or identity.find(b'>', email_end + 1) >= 0
+            or not identity.endswith(b'>')):
         raise ObjectFormatException(error_msg)
         raise ObjectFormatException(error_msg)
 
 
 
 
@@ -551,7 +553,8 @@ class Blob(ShaFile):
     def _deserialize(self, chunks):
     def _deserialize(self, chunks):
         self._chunked_text = chunks
         self._chunked_text = chunks
 
 
-    chunked = property(_get_chunked, _set_chunked,
+    chunked = property(
+        _get_chunked, _set_chunked,
         "The text within the blob object, as chunks (not necessarily lines).")
         "The text within the blob object, as chunks (not necessarily lines).")
 
 
     @classmethod
     @classmethod
@@ -713,7 +716,8 @@ class Tag(ShaFile):
                 chunks.append(git_line(_TAGGER_HEADER, self._tagger))
                 chunks.append(git_line(_TAGGER_HEADER, self._tagger))
             else:
             else:
                 chunks.append(git_line(
                 chunks.append(git_line(
-                    _TAGGER_HEADER, self._tagger, str(self._tag_time).encode('ascii'),
+                    _TAGGER_HEADER, self._tagger,
+                    str(self._tag_time).encode('ascii'),
                     format_timezone(self._tag_timezone, self._tag_timezone_neg_utc)))
                     format_timezone(self._tag_timezone, self._tag_timezone_neg_utc)))
         if self._message is not None:
         if self._message is not None:
             chunks.append(b'\n') # To close headers
             chunks.append(b'\n') # To close headers

+ 39 - 31
dulwich/pack.py

@@ -309,8 +309,8 @@ def load_pack_index_file(path, f):
     if contents[:4] == b'\377tOc':
     if contents[:4] == b'\377tOc':
         version = struct.unpack(b'>L', contents[4:8])[0]
         version = struct.unpack(b'>L', contents[4:8])[0]
         if version == 2:
         if version == 2:
-            return PackIndex2(path, file=f, contents=contents,
-                size=size)
+            return PackIndex2(
+                path, file=f, contents=contents, size=size)
         else:
         else:
             raise KeyError('Unknown pack index format %d' % version)
             raise KeyError('Unknown pack index format %d' % version)
     else:
     else:
@@ -451,7 +451,8 @@ class FilePackIndex(PackIndex):
     is the end of the group that shares the same starting byte. Subtract one
     is the end of the group that shares the same starting byte. Subtract one
     from the starting byte and index again to find the start of the group.
     from the starting byte and index again to find the start of the group.
     The values are sorted by sha id within the group, so do the math to find
     The values are sorted by sha id within the group, so do the math to find
-    the start and end offset and then bisect in to find if the value is present.
+    the start and end offset and then bisect in to find if the value is
+    present.
     """
     """
 
 
     def __init__(self, filename, file=None, contents=None, size=None):
     def __init__(self, filename, file=None, contents=None, size=None):
@@ -475,7 +476,7 @@ class FilePackIndex(PackIndex):
     def __eq__(self, other):
     def __eq__(self, other):
         # Quick optimization:
         # Quick optimization:
         if (isinstance(other, FilePackIndex) and
         if (isinstance(other, FilePackIndex) and
-            self._fan_out_table != other._fan_out_table):
+                self._fan_out_table != other._fan_out_table):
             return False
             return False
 
 
         return super(FilePackIndex, self).__eq__(other)
         return super(FilePackIndex, self).__eq__(other)
@@ -506,7 +507,8 @@ class FilePackIndex(PackIndex):
         raise NotImplementedError(self._unpack_offset)
         raise NotImplementedError(self._unpack_offset)
 
 
     def _unpack_crc32_checksum(self, i):
     def _unpack_crc32_checksum(self, i):
-        """Unpack the crc32 checksum for the i-th object from the index file."""
+        """Unpack the crc32 checksum for the ith object from the index file.
+        """
         raise NotImplementedError(self._unpack_crc32_checksum)
         raise NotImplementedError(self._unpack_crc32_checksum)
 
 
     def _itersha(self):
     def _itersha(self):
@@ -525,7 +527,8 @@ class FilePackIndex(PackIndex):
     def _read_fan_out_table(self, start_offset):
     def _read_fan_out_table(self, start_offset):
         ret = []
         ret = []
         for i in range(0x100):
         for i in range(0x100):
-            fanout_entry = self._contents[start_offset+i*4:start_offset+(i+1)*4]
+            fanout_entry = self._contents[
+                start_offset+i*4:start_offset+(i+1)*4]
             ret.append(struct.unpack('>L', fanout_entry)[0])
             ret.append(struct.unpack('>L', fanout_entry)[0])
         return ret
         return ret
 
 
@@ -616,8 +619,8 @@ class PackIndex2(FilePackIndex):
         self._crc32_table_offset = self._name_table_offset + 20 * len(self)
         self._crc32_table_offset = self._name_table_offset + 20 * len(self)
         self._pack_offset_table_offset = (self._crc32_table_offset +
         self._pack_offset_table_offset = (self._crc32_table_offset +
                                           4 * len(self))
                                           4 * len(self))
-        self._pack_offset_largetable_offset = (self._pack_offset_table_offset +
-                                          4 * len(self))
+        self._pack_offset_largetable_offset = (
+            self._pack_offset_table_offset + 4 * len(self))
 
 
     def _unpack_entry(self, i):
     def _unpack_entry(self, i):
         return (self._unpack_name(i), self._unpack_offset(i),
         return (self._unpack_name(i), self._unpack_offset(i),
@@ -637,15 +640,15 @@ class PackIndex2(FilePackIndex):
 
 
     def _unpack_crc32_checksum(self, i):
     def _unpack_crc32_checksum(self, i):
         return unpack_from('>L', self._contents,
         return unpack_from('>L', self._contents,
-                          self._crc32_table_offset + i * 4)[0]
+                           self._crc32_table_offset + i * 4)[0]
 
 
 
 
 def read_pack_header(read):
 def read_pack_header(read):
     """Read the header of a pack file.
     """Read the header of a pack file.
 
 
     :param read: Read function
     :param read: Read function
-    :return: Tuple of (pack version, number of objects). If no data is available
-        to read, returns (None, None).
+    :return: Tuple of (pack version, number of objects). If no data is
+        available to read, returns (None, None).
     """
     """
     header = read(12)
     header = read(12)
     if not header:
     if not header:
@@ -779,7 +782,8 @@ class PackStreamReader(object):
         else:
         else:
             to_pop = max(n + tn - 20, 0)
             to_pop = max(n + tn - 20, 0)
             to_add = n
             to_add = n
-        self.sha.update(bytes(bytearray([self._trailer.popleft() for _ in range(to_pop)])))
+        self.sha.update(
+            bytes(bytearray([self._trailer.popleft() for _ in range(to_pop)])))
         self._trailer.extend(data[-to_add:])
         self._trailer.extend(data[-to_add:])
 
 
         # hash everything but the trailer
         # hash everything but the trailer
@@ -880,8 +884,8 @@ class PackStreamCopier(PackStreamReader):
     def __init__(self, read_all, read_some, outfile, delta_iter=None):
     def __init__(self, read_all, read_some, outfile, delta_iter=None):
         """Initialize the copier.
         """Initialize the copier.
 
 
-        :param read_all: Read function that blocks until the number of requested
-            bytes are read.
+        :param read_all: Read function that blocks until the number of
+            requested bytes are read.
         :param read_some: Read function that returns at least one byte, but may
         :param read_some: Read function that returns at least one byte, but may
             not return the number of bytes requested.
             not return the number of bytes requested.
         :param outfile: File-like object to write output through.
         :param outfile: File-like object to write output through.
@@ -924,7 +928,7 @@ def obj_sha(type, chunks):
     return sha.digest()
     return sha.digest()
 
 
 
 
-def compute_file_sha(f, start_ofs=0, end_ofs=0, buffer_size=1<<16):
+def compute_file_sha(f, start_ofs=0, end_ofs=0, buffer_size=1 << 16):
     """Hash a portion of a file into a new SHA.
     """Hash a portion of a file into a new SHA.
 
 
     :param f: A file-like object to read from that supports seek().
     :param f: A file-like object to read from that supports seek().
@@ -981,8 +985,8 @@ class PackData(object):
     def __init__(self, filename, file=None, size=None):
     def __init__(self, filename, file=None, size=None):
         """Create a PackData object representing the pack in the given filename.
         """Create a PackData object representing the pack in the given filename.
 
 
-        The file must exist and stay readable until the object is disposed of. It
-        must also stay the same size. It will be mapped whenever needed.
+        The file must exist and stay readable until the object is disposed of.
+        It must also stay the same size. It will be mapped whenever needed.
 
 
         Currently there is a restriction on the size of the pack as the python
         Currently there is a restriction on the size of the pack as the python
         mmap implementation is flawed.
         mmap implementation is flawed.
@@ -995,8 +999,8 @@ class PackData(object):
         else:
         else:
             self._file = file
             self._file = file
         (version, self._num_objects) = read_pack_header(self._file.read)
         (version, self._num_objects) = read_pack_header(self._file.read)
-        self._offset_cache = LRUSizeCache(1024*1024*20,
-            compute_size=_compute_object_size)
+        self._offset_cache = LRUSizeCache(
+            1024*1024*20, compute_size=_compute_object_size)
         self.pack = None
         self.pack = None
 
 
     @property
     @property
@@ -1116,7 +1120,8 @@ class PackData(object):
                 progress(i, self._num_objects)
                 progress(i, self._num_objects)
             yield (offset, unpacked.pack_type_num, unpacked._obj(),
             yield (offset, unpacked.pack_type_num, unpacked._obj(),
                    unpacked.crc32)
                    unpacked.crc32)
-            self._file.seek(-len(unused), SEEK_CUR)  # Back up over unused data.
+            # Back up over unused data.
+            self._file.seek(-len(unused), SEEK_CUR)
 
 
     def _iter_unpacked(self):
     def _iter_unpacked(self):
         # TODO(dborowitz): Merge this with iterobjects, if we can change its
         # TODO(dborowitz): Merge this with iterobjects, if we can change its
@@ -1132,7 +1137,8 @@ class PackData(object):
               self._file.read, compute_crc32=False)
               self._file.read, compute_crc32=False)
             unpacked.offset = offset
             unpacked.offset = offset
             yield unpacked
             yield unpacked
-            self._file.seek(-len(unused), SEEK_CUR)  # Back up over unused data.
+            # Back up over unused data.
+            self._file.seek(-len(unused), SEEK_CUR)
 
 
     def iterentries(self, progress=None):
     def iterentries(self, progress=None):
         """Yield entries summarizing the contents of this pack.
         """Yield entries summarizing the contents of this pack.
@@ -1305,9 +1311,9 @@ class DeltaChainIterator(object):
             try:
             try:
                 type_num, chunks = self._resolve_ext_ref(base_sha)
                 type_num, chunks = self._resolve_ext_ref(base_sha)
             except KeyError:
             except KeyError:
-                # Not an external ref, but may depend on one. Either it will get
-                # popped via a _follow_chain call, or we will raise an error
-                # below.
+                # Not an external ref, but may depend on one. Either it will
+                # get popped via a _follow_chain call, or we will raise an
+                # error below.
                 continue
                 continue
             self._ext_refs.append(base_sha)
             self._ext_refs.append(base_sha)
             self._pending_ref.pop(base_sha)
             self._pending_ref.pop(base_sha)
@@ -1492,8 +1498,8 @@ def write_pack(filename, objects, deltify=None, delta_window_size=None):
     :return: Tuple with checksum of pack file and index file
     :return: Tuple with checksum of pack file and index file
     """
     """
     with GitFile(filename + '.pack', 'wb') as f:
     with GitFile(filename + '.pack', 'wb') as f:
-        entries, data_sum = write_pack_objects(f, objects,
-            delta_window_size=delta_window_size, deltify=deltify)
+        entries, data_sum = write_pack_objects(
+            f, objects, delta_window_size=delta_window_size, deltify=deltify)
     entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
     entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
     with GitFile(filename + '.idx', 'wb') as f:
     with GitFile(filename + '.idx', 'wb') as f:
         return data_sum, write_pack_index_v2(f, entries, data_sum)
         return data_sum, write_pack_index_v2(f, entries, data_sum)
@@ -1634,6 +1640,7 @@ def _delta_encode_size(size):
 # 24-bit lengths in copy operations, but we always make version 2 packs.
 # 24-bit lengths in copy operations, but we always make version 2 packs.
 _MAX_COPY_LEN = 0xffff
 _MAX_COPY_LEN = 0xffff
 
 
+
 def _encode_copy_operation(start, length):
 def _encode_copy_operation(start, length):
     scratch = []
     scratch = []
     op = 0x80
     op = 0x80
@@ -1664,7 +1671,7 @@ def create_delta(base_buf, target_buf):
     seq = difflib.SequenceMatcher(a=base_buf, b=target_buf)
     seq = difflib.SequenceMatcher(a=base_buf, b=target_buf)
     for opcode, i1, i2, j1, j2 in seq.get_opcodes():
     for opcode, i1, i2, j1, j2 in seq.get_opcodes():
         # Git patch opcodes don't care about deletes!
         # Git patch opcodes don't care about deletes!
-        #if opcode == 'replace' or opcode == 'delete':
+        # if opcode == 'replace' or opcode == 'delete':
         #    pass
         #    pass
         if opcode == 'equal':
         if opcode == 'equal':
             # If they are equal, unpacker will use data from base_buf
             # If they are equal, unpacker will use data from base_buf
@@ -1704,6 +1711,7 @@ def apply_delta(src_buf, delta):
     out = []
     out = []
     index = 0
     index = 0
     delta_length = len(delta)
     delta_length = len(delta)
+
     def get_delta_header_size(delta, index):
     def get_delta_header_size(delta, index):
         size = 0
         size = 0
         i = 0
         i = 0
@@ -1738,8 +1746,8 @@ def apply_delta(src_buf, delta):
             if cp_size == 0:
             if cp_size == 0:
                 cp_size = 0x10000
                 cp_size = 0x10000
             if (cp_off + cp_size < cp_size or
             if (cp_off + cp_size < cp_size or
-                cp_off + cp_size > src_size or
-                cp_size > dest_size):
+                    cp_off + cp_size > src_size or
+                    cp_size > dest_size):
                 break
                 break
             out.append(src_buf[cp_off:cp_off+cp_size])
             out.append(src_buf[cp_off:cp_off+cp_size])
         elif cmd != 0:
         elif cmd != 0:
@@ -1945,8 +1953,8 @@ class Pack(object):
     def keep(self, msg=None):
     def keep(self, msg=None):
         """Add a .keep file for the pack, preventing git from garbage collecting it.
         """Add a .keep file for the pack, preventing git from garbage collecting it.
 
 
-        :param msg: A message written inside the .keep file; can be used later to
-                    determine whether or not a .keep file is obsolete.
+        :param msg: A message written inside the .keep file; can be used later
+            to determine whether or not a .keep file is obsolete.
         :return: The path of the .keep file, as a string.
         :return: The path of the .keep file, as a string.
         """
         """
         keepfile_name = '%s.keep' % self._basename
         keepfile_name = '%s.keep' % self._basename

+ 49 - 38
dulwich/porcelain.py

@@ -163,9 +163,9 @@ def archive(repo, committish=None, outstream=default_bytes_out_stream,
         committish = "HEAD"
         committish = "HEAD"
     with open_repo_closing(repo) as repo_obj:
     with open_repo_closing(repo) as repo_obj:
         c = repo_obj[committish]
         c = repo_obj[committish]
-        tree = c.tree
-        for chunk in tar_stream(repo_obj.object_store,
-                repo_obj.object_store[c.tree], c.commit_time):
+        for chunk in tar_stream(
+                repo_obj.object_store, repo_obj.object_store[c.tree],
+                c.commit_time):
             outstream.write(chunk)
             outstream.write(chunk)
 
 
 
 
@@ -204,8 +204,7 @@ def commit(repo=".", message=None, author=None, committer=None):
     # FIXME: Support --all argument
     # FIXME: Support --all argument
     # FIXME: Support --signoff argument
     # FIXME: Support --signoff argument
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
-        return r.do_commit(message=message, author=author,
-            committer=committer)
+        return r.do_commit(message=message, author=author, committer=committer)
 
 
 
 
 def commit_tree(repo, tree, message=None, author=None, committer=None):
 def commit_tree(repo, tree, message=None, author=None, committer=None):
@@ -217,8 +216,8 @@ def commit_tree(repo, tree, message=None, author=None, committer=None):
     :param committer: Optional committer name and email
     :param committer: Optional committer name and email
     """
     """
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
-        return r.do_commit(message=message, tree=tree, committer=committer,
-                author=author)
+        return r.do_commit(
+            message=message, tree=tree, committer=committer, author=author)
 
 
 
 
 def init(path=".", bare=False):
 def init(path=".", bare=False):
@@ -252,8 +251,9 @@ def clone(source, target=None, bare=False, checkout=None,
     """
     """
     if outstream is not None:
     if outstream is not None:
         import warnings
         import warnings
-        warnings.warn("outstream= has been deprecated in favour of errstream=.", DeprecationWarning,
-                stacklevel=3)
+        warnings.warn(
+            "outstream= has been deprecated in favour of errstream=.",
+            DeprecationWarning, stacklevel=3)
         errstream = outstream
         errstream = outstream
 
 
     if checkout is None:
     if checkout is None:
@@ -273,8 +273,8 @@ def clone(source, target=None, bare=False, checkout=None,
     else:
     else:
         r = Repo.init(target)
         r = Repo.init(target)
     try:
     try:
-        remote_refs = client.fetch(host_path, r,
-            determine_wants=r.object_store.determine_wants_all,
+        remote_refs = client.fetch(
+            host_path, r, determine_wants=r.object_store.determine_wants_all,
             progress=errstream.write)
             progress=errstream.write)
         r.refs.import_refs(
         r.refs.import_refs(
             b'refs/remotes/' + origin,
             b'refs/remotes/' + origin,
@@ -293,7 +293,8 @@ def clone(source, target=None, bare=False, checkout=None,
         if not isinstance(source, bytes):
         if not isinstance(source, bytes):
             source = source.encode(DEFAULT_ENCODING)
             source = source.encode(DEFAULT_ENCODING)
         target_config.set((b'remote', b'origin'), b'url', source)
         target_config.set((b'remote', b'origin'), b'url', source)
-        target_config.set((b'remote', b'origin'), b'fetch',
+        target_config.set(
+            (b'remote', b'origin'), b'fetch',
             b'+refs/heads/*:refs/remotes/origin/*')
             b'+refs/heads/*:refs/remotes/origin/*')
         target_config.write_to_path()
         target_config.write_to_path()
         if checkout and b"HEAD" in r.refs:
         if checkout and b"HEAD" in r.refs:
@@ -321,7 +322,8 @@ def add(repo=".", paths=None):
                 if '.git' in dirnames:
                 if '.git' in dirnames:
                     dirnames.remove('.git')
                     dirnames.remove('.git')
                 for filename in filenames:
                 for filename in filenames:
-                    paths.append(os.path.join(dirpath[len(r.path)+1:], filename))
+                    paths.append(
+                        os.path.join(dirpath[len(r.path)+1:], filename))
         # TODO(jelmer): Possibly allow passing in absolute paths?
         # TODO(jelmer): Possibly allow passing in absolute paths?
         relpaths = []
         relpaths = []
         if not isinstance(paths, list):
         if not isinstance(paths, list):
@@ -364,7 +366,8 @@ def print_commit(commit, decode, outstream=sys.stdout):
     outstream.write("-" * 50 + "\n")
     outstream.write("-" * 50 + "\n")
     outstream.write("commit: " + commit.id.decode('ascii') + "\n")
     outstream.write("commit: " + commit.id.decode('ascii') + "\n")
     if len(commit.parents) > 1:
     if len(commit.parents) > 1:
-        outstream.write("merge: " +
+        outstream.write(
+            "merge: " +
             "...".join([c.decode('ascii') for c in commit.parents[1:]]) + "\n")
             "...".join([c.decode('ascii') for c in commit.parents[1:]]) + "\n")
     outstream.write("Author: " + decode(commit.author) + "\n")
     outstream.write("Author: " + decode(commit.author) + "\n")
     if commit.author != commit.committer:
     if commit.author != commit.committer:
@@ -414,7 +417,8 @@ def show_commit(repo, commit, decode, outstream=sys.stdout):
     """
     """
     print_commit(commit, decode=decode, outstream=outstream)
     print_commit(commit, decode=decode, outstream=outstream)
     parent_commit = repo[commit.parents[0]]
     parent_commit = repo[commit.parents[0]]
-    write_tree_diff(outstream, repo.object_store, parent_commit.tree, commit.tree)
+    write_tree_diff(
+        outstream, repo.object_store, parent_commit.tree, commit.tree)
 
 
 
 
 def show_tree(repo, tree, decode, outstream=sys.stdout):
 def show_tree(repo, tree, decode, outstream=sys.stdout):
@@ -495,7 +499,8 @@ def log(repo=".", paths=None, outstream=sys.stdout, max_entries=None,
         walker = r.get_walker(
         walker = r.get_walker(
             max_entries=max_entries, paths=paths, reverse=reverse)
             max_entries=max_entries, paths=paths, reverse=reverse)
         for entry in walker:
         for entry in walker:
-            decode = lambda x: commit_decode(entry.commit, x)
+            def decode(x):
+                return commit_decode(entry.commit, x)
             print_commit(entry.commit, decode, outstream)
             print_commit(entry.commit, decode, outstream)
             if name_status:
             if name_status:
                 outstream.writelines(
                 outstream.writelines(
@@ -510,7 +515,8 @@ def show(repo=".", objects=None, outstream=sys.stdout,
     :param repo: Path to repository
     :param repo: Path to repository
     :param objects: Objects to show (defaults to [HEAD])
     :param objects: Objects to show (defaults to [HEAD])
     :param outstream: Stream to write to
     :param outstream: Stream to write to
-    :param default_encoding: Default encoding to use if none is set in the commit
+    :param default_encoding: Default encoding to use if none is set in the
+        commit
     """
     """
     if objects is None:
     if objects is None:
         objects = ["HEAD"]
         objects = ["HEAD"]
@@ -520,9 +526,11 @@ def show(repo=".", objects=None, outstream=sys.stdout,
         for objectish in objects:
         for objectish in objects:
             o = parse_object(r, objectish)
             o = parse_object(r, objectish)
             if isinstance(o, Commit):
             if isinstance(o, Commit):
-                decode = lambda x: commit_decode(o, x, default_encoding)
+                def decode(x):
+                    return commit_decode(o, x, default_encoding)
             else:
             else:
-                decode = lambda x: x.decode(default_encoding)
+                def decode(x):
+                    return x.decode(default_encoding)
             show_object(r, o, decode, outstream)
             show_object(r, o, decode, outstream)
 
 
 
 
@@ -552,11 +560,13 @@ def rev_list(repo, commits, outstream=sys.stdout):
 
 
 def tag(*args, **kwargs):
 def tag(*args, **kwargs):
     import warnings
     import warnings
-    warnings.warn("tag has been deprecated in favour of tag_create.", DeprecationWarning)
+    warnings.warn("tag has been deprecated in favour of tag_create.",
+                  DeprecationWarning)
     return tag_create(*args, **kwargs)
     return tag_create(*args, **kwargs)
 
 
 
 
-def tag_create(repo, tag, author=None, message=None, annotated=False,
+def tag_create(
+        repo, tag, author=None, message=None, annotated=False,
         objectish="HEAD", tag_time=None, tag_timezone=None):
         objectish="HEAD", tag_time=None, tag_timezone=None):
     """Creates a tag in git via dulwich calls:
     """Creates a tag in git via dulwich calls:
 
 
@@ -602,7 +612,8 @@ def tag_create(repo, tag, author=None, message=None, annotated=False,
 
 
 def list_tags(*args, **kwargs):
 def list_tags(*args, **kwargs):
     import warnings
     import warnings
-    warnings.warn("list_tags has been deprecated in favour of tag_list.", DeprecationWarning)
+    warnings.warn("list_tags has been deprecated in favour of tag_list.",
+                  DeprecationWarning)
     return tag_list(*args, **kwargs)
     return tag_list(*args, **kwargs)
 
 
 
 
@@ -683,10 +694,11 @@ def push(repo, remote_location, refspecs,
         err_encoding = getattr(errstream, 'encoding', None) or DEFAULT_ENCODING
         err_encoding = getattr(errstream, 'encoding', None) or DEFAULT_ENCODING
         remote_location_bytes = client.get_url(path).encode(err_encoding)
         remote_location_bytes = client.get_url(path).encode(err_encoding)
         try:
         try:
-            client.send_pack(path, update_refs,
-                r.object_store.generate_pack_contents, progress=errstream.write)
-            errstream.write(b"Push to " + remote_location_bytes +
-                            b" successful.\n")
+            client.send_pack(
+                path, update_refs, r.object_store.generate_pack_contents,
+                progress=errstream.write)
+            errstream.write(
+                b"Push to " + remote_location_bytes + b" successful.\n")
         except (UpdateRefsError, SendPackError) as e:
         except (UpdateRefsError, SendPackError) as e:
             errstream.write(b"Push to " + remote_location_bytes +
             errstream.write(b"Push to " + remote_location_bytes +
                             b" failed -> " + e.message.encode(err_encoding) +
                             b" failed -> " + e.message.encode(err_encoding) +
@@ -694,7 +706,8 @@ def push(repo, remote_location, refspecs,
 
 
 
 
 def pull(repo, remote_location=None, refspecs=None,
 def pull(repo, remote_location=None, refspecs=None,
-         outstream=default_bytes_out_stream, errstream=default_bytes_err_stream):
+         outstream=default_bytes_out_stream,
+         errstream=default_bytes_err_stream):
     """Pull from remote via dulwich.client
     """Pull from remote via dulwich.client
 
 
     :param repo: Path to repository
     :param repo: Path to repository
@@ -712,12 +725,14 @@ def pull(repo, remote_location=None, refspecs=None,
         if refspecs is None:
         if refspecs is None:
             refspecs = [b"HEAD"]
             refspecs = [b"HEAD"]
         selected_refs = []
         selected_refs = []
+
         def determine_wants(remote_refs):
         def determine_wants(remote_refs):
-            selected_refs.extend(parse_reftuples(remote_refs, r.refs, refspecs))
+            selected_refs.extend(
+                parse_reftuples(remote_refs, r.refs, refspecs))
             return [remote_refs[lh] for (lh, rh, force) in selected_refs]
             return [remote_refs[lh] for (lh, rh, force) in selected_refs]
         client, path = get_transport_and_path(remote_location)
         client, path = get_transport_and_path(remote_location)
-        remote_refs = client.fetch(path, r, progress=errstream.write,
-                determine_wants=determine_wants)
+        remote_refs = client.fetch(
+            path, r, progress=errstream.write, determine_wants=determine_wants)
         for (lh, rh, force) in selected_refs:
         for (lh, rh, force) in selected_refs:
             r.refs[rh] = remote_refs[lh]
             r.refs[rh] = remote_refs[lh]
         if selected_refs:
         if selected_refs:
@@ -828,6 +843,7 @@ def upload_pack(path=".", inf=None, outf=None):
         inf = getattr(sys.stdin, 'buffer', sys.stdin)
         inf = getattr(sys.stdin, 'buffer', sys.stdin)
     path = os.path.expanduser(path)
     path = os.path.expanduser(path)
     backend = FileSystemBackend(path)
     backend = FileSystemBackend(path)
+
     def send_fn(data):
     def send_fn(data):
         outf.write(data)
         outf.write(data)
         outf.flush()
         outf.flush()
@@ -851,6 +867,7 @@ def receive_pack(path=".", inf=None, outf=None):
         inf = getattr(sys.stdin, 'buffer', sys.stdin)
         inf = getattr(sys.stdin, 'buffer', sys.stdin)
     path = os.path.expanduser(path)
     path = os.path.expanduser(path)
     backend = FileSystemBackend(path)
     backend = FileSystemBackend(path)
+
     def send_fn(data):
     def send_fn(data):
         outf.write(data)
         outf.write(data)
         outf.flush()
         outf.flush()
@@ -887,12 +904,6 @@ def branch_create(repo, name, objectish=None, force=False):
     :param force: Force creation of branch, even if it already exists
     :param force: Force creation of branch, even if it already exists
     """
     """
     with open_repo_closing(repo) as r:
     with open_repo_closing(repo) as r:
-        if isinstance(name, bytes):
-            names = [name]
-        elif isinstance(name, list):
-            names = name
-        else:
-            raise TypeError("Unexpected branch name type %r" % name)
         if objectish is None:
         if objectish is None:
             objectish = "HEAD"
             objectish = "HEAD"
         object = parse_object(r, objectish)
         object = parse_object(r, objectish)
@@ -912,7 +923,7 @@ def branch_list(repo):
 
 
 
 
 def fetch(repo, remote_location, outstream=sys.stdout,
 def fetch(repo, remote_location, outstream=sys.stdout,
-        errstream=default_bytes_err_stream):
+          errstream=default_bytes_err_stream):
     """Fetch objects from a remote server.
     """Fetch objects from a remote server.
 
 
     :param repo: Path to the repository
     :param repo: Path to the repository
@@ -967,7 +978,7 @@ def pack_objects(repo, object_ids, packf, idxf, delta_window_size=None):
 
 
 
 
 def ls_tree(repo, tree_ish=None, outstream=sys.stdout, recursive=False,
 def ls_tree(repo, tree_ish=None, outstream=sys.stdout, recursive=False,
-        name_only=False):
+            name_only=False):
     """List contents of a tree.
     """List contents of a tree.
 
 
     :param repo: Path to the repository
     :param repo: Path to the repository

+ 5 - 3
dulwich/reflog.py

@@ -29,11 +29,13 @@ from dulwich.objects import (
     ZERO_SHA,
     ZERO_SHA,
     )
     )
 
 
-Entry = collections.namedtuple('Entry', ['old_sha', 'new_sha', 'committer',
-    'timestamp', 'timezone', 'message'])
+Entry = collections.namedtuple(
+    'Entry', ['old_sha', 'new_sha', 'committer', 'timestamp', 'timezone',
+              'message'])
 
 
 
 
-def format_reflog_line(old_sha, new_sha, committer, timestamp, timezone, message):
+def format_reflog_line(old_sha, new_sha, committer, timestamp, timezone,
+                       message):
     """Generate a single reflog line.
     """Generate a single reflog line.
 
 
     :param old_sha: Old Commit SHA
     :param old_sha: Old Commit SHA

+ 30 - 23
dulwich/repo.py

@@ -261,8 +261,8 @@ class BaseRepo(object):
             that a revision is present.
             that a revision is present.
         :param progress: Simple progress function that will be called with
         :param progress: Simple progress function that will be called with
             updated progress strings.
             updated progress strings.
-        :param get_tagged: Function that returns a dict of pointed-to sha -> tag
-            sha for including tags.
+        :param get_tagged: Function that returns a dict of pointed-to sha ->
+            tag sha for including tags.
         :return: iterator over objects, with __len__ implemented
         :return: iterator over objects, with __len__ implemented
         """
         """
         wants = determine_wants(self.get_refs())
         wants = determine_wants(self.get_refs())
@@ -290,8 +290,9 @@ class BaseRepo(object):
         # Deal with shallow requests separately because the haves do
         # Deal with shallow requests separately because the haves do
         # not reflect what objects are missing
         # not reflect what objects are missing
         if shallows or unshallows:
         if shallows or unshallows:
-            haves = []  # TODO: filter the haves commits from iter_shas.
-                        # the specific commits aren't missing.
+            # TODO: filter the haves commits from iter_shas. the specific
+            # commits aren't missing.
+            haves = []
 
 
         def get_parents(commit):
         def get_parents(commit):
             if commit.id in shallows:
             if commit.id in shallows:
@@ -412,8 +413,8 @@ class BaseRepo(object):
 
 
         :param ref: The refname to peel.
         :param ref: The refname to peel.
         :return: The fully-peeled SHA1 of a tag object, after peeling all
         :return: The fully-peeled SHA1 of a tag object, after peeling all
-            intermediate tags; if the original ref does not point to a tag, this
-            will equal the original SHA1.
+            intermediate tags; if the original ref does not point to a tag,
+            this will equal the original SHA1.
         """
         """
         cached = self.refs.get_peeled(ref)
         cached = self.refs.get_peeled(ref)
         if cached is not None:
         if cached is not None:
@@ -427,8 +428,8 @@ class BaseRepo(object):
             ancestors. Defaults to [HEAD]
             ancestors. Defaults to [HEAD]
         :param exclude: Iterable of SHAs of commits to exclude along with their
         :param exclude: Iterable of SHAs of commits to exclude along with their
             ancestors, overriding includes.
             ancestors, overriding includes.
-        :param order: ORDER_* constant specifying the order of results. Anything
-            other than ORDER_DATE may result in O(n) memory usage.
+        :param order: ORDER_* constant specifying the order of results.
+            Anything other than ORDER_DATE may result in O(n) memory usage.
         :param reverse: If True, reverse the order of output, requiring O(n)
         :param reverse: If True, reverse the order of output, requiring O(n)
             memory.
             memory.
         :param max_entries: The maximum number of entries to yield, or None for
         :param max_entries: The maximum number of entries to yield, or None for
@@ -451,7 +452,8 @@ class BaseRepo(object):
         if isinstance(include, str):
         if isinstance(include, str):
             include = [include]
             include = [include]
 
 
-        kwargs['get_parents'] = lambda commit: self.get_parents(commit.id, commit)
+        kwargs['get_parents'] = lambda commit: self.get_parents(
+            commit.id, commit)
 
 
         return Walker(self.object_store, include, *args, **kwargs)
         return Walker(self.object_store, include, *args, **kwargs)
 
 
@@ -464,7 +466,7 @@ class BaseRepo(object):
         """
         """
         if not isinstance(name, bytes):
         if not isinstance(name, bytes):
             raise TypeError("'name' must be bytestring, not %.80s" %
             raise TypeError("'name' must be bytestring, not %.80s" %
-                    type(name).__name__)
+                            type(name).__name__)
         if len(name) in (20, 40):
         if len(name) in (20, 40):
             try:
             try:
                 return self.object_store[name]
                 return self.object_store[name]
@@ -551,7 +553,8 @@ class BaseRepo(object):
         :param author: Author fullname (defaults to committer)
         :param author: Author fullname (defaults to committer)
         :param commit_timestamp: Commit timestamp (defaults to now)
         :param commit_timestamp: Commit timestamp (defaults to now)
         :param commit_timezone: Commit timestamp timezone (defaults to GMT)
         :param commit_timezone: Commit timestamp timezone (defaults to GMT)
-        :param author_timestamp: Author timestamp (defaults to commit timestamp)
+        :param author_timestamp: Author timestamp (defaults to commit
+            timestamp)
         :param author_timezone: Author timestamp timezone
         :param author_timezone: Author timestamp timezone
             (defaults to commit timestamp timezone)
             (defaults to commit timestamp timezone)
         :param tree: SHA1 of the tree root to use (if not specified the
         :param tree: SHA1 of the tree root to use (if not specified the
@@ -636,8 +639,8 @@ class BaseRepo(object):
                 self.object_store.add_object(c)
                 self.object_store.add_object(c)
                 ok = self.refs.add_if_new(ref, c.id)
                 ok = self.refs.add_if_new(ref, c.id)
             if not ok:
             if not ok:
-                # Fail if the atomic compare-and-swap failed, leaving the commit and
-                # all its objects as garbage.
+                # Fail if the atomic compare-and-swap failed, leaving the
+                # commit and all its objects as garbage.
                 raise CommitError("%s changed during commit" % (ref,))
                 raise CommitError("%s changed during commit" % (ref,))
 
 
         try:
         try:
@@ -650,7 +653,6 @@ class BaseRepo(object):
         return c.id
         return c.id
 
 
 
 
-
 def read_gitfile(f):
 def read_gitfile(f):
     """Read a ``.git`` file.
     """Read a ``.git`` file.
 
 
@@ -698,7 +700,8 @@ class Repo(BaseRepo):
             with commondir:
             with commondir:
                 self._commondir = os.path.join(
                 self._commondir = os.path.join(
                     self.controldir(),
                     self.controldir(),
-                    commondir.read().rstrip(b"\r\n").decode(sys.getfilesystemencoding()))
+                    commondir.read().rstrip(b"\r\n").decode(
+                        sys.getfilesystemencoding()))
         else:
         else:
             self._commondir = self._controldir
             self._commondir = self._controldir
         self.path = root
         self.path = root
@@ -795,7 +798,8 @@ class Repo(BaseRepo):
         pointing to a file in that location.
         pointing to a file in that location.
 
 
         :param path: The path to the file, relative to the control dir.
         :param path: The path to the file, relative to the control dir.
-        :param basedir: Optional argument that specifies an alternative to the control dir.
+        :param basedir: Optional argument that specifies an alternative to the
+            control dir.
         :return: An open file object, or None if the file does not exist.
         :return: An open file object, or None if the file does not exist.
         """
         """
         # TODO(dborowitz): sanitize filenames, since this is used directly by
         # TODO(dborowitz): sanitize filenames, since this is used directly by
@@ -871,7 +875,7 @@ class Repo(BaseRepo):
         index.write()
         index.write()
 
 
     def clone(self, target_path, mkdir=True, bare=False,
     def clone(self, target_path, mkdir=True, bare=False,
-            origin=b"origin"):
+              origin=b"origin"):
         """Clone this repository.
         """Clone this repository.
 
 
         :param target_path: Target path
         :param target_path: Target path
@@ -900,7 +904,7 @@ class Repo(BaseRepo):
             encoded_path = encoded_path.encode(sys.getfilesystemencoding())
             encoded_path = encoded_path.encode(sys.getfilesystemencoding())
         target_config.set((b'remote', b'origin'), b'url', encoded_path)
         target_config.set((b'remote', b'origin'), b'url', encoded_path)
         target_config.set((b'remote', b'origin'), b'fetch',
         target_config.set((b'remote', b'origin'), b'fetch',
-            b'+refs/heads/*:refs/remotes/origin/*')
+                          b'+refs/heads/*:refs/remotes/origin/*')
         target_config.write_to_path()
         target_config.write_to_path()
 
 
         # Update target head
         # Update target head
@@ -928,14 +932,16 @@ class Repo(BaseRepo):
         if tree is None:
         if tree is None:
             tree = self[b'HEAD'].tree
             tree = self[b'HEAD'].tree
         config = self.get_config()
         config = self.get_config()
-        honor_filemode = config.get_boolean('core', 'filemode', os.name != "nt")
+        honor_filemode = config.get_boolean(
+            'core', 'filemode', os.name != "nt")
         if config.get_boolean('core', 'core.protectNTFS', os.name == "nt"):
         if config.get_boolean('core', 'core.protectNTFS', os.name == "nt"):
             validate_path_element = validate_path_element_ntfs
             validate_path_element = validate_path_element_ntfs
         else:
         else:
             validate_path_element = validate_path_element_default
             validate_path_element = validate_path_element_default
-        return build_index_from_tree(self.path, self.index_path(),
-                self.object_store, tree, honor_filemode=honor_filemode,
-                validate_path_element=validate_path_element)
+        return build_index_from_tree(
+            self.path, self.index_path(), self.object_store, tree,
+            honor_filemode=honor_filemode,
+            validate_path_element=validate_path_element)
 
 
     def get_config(self):
     def get_config(self):
         """Retrieve the config object.
         """Retrieve the config object.
@@ -1004,7 +1010,8 @@ class Repo(BaseRepo):
         return cls(path)
         return cls(path)
 
 
     @classmethod
     @classmethod
-    def _init_new_working_directory(cls, path, main_repo, identifier=None, mkdir=False):
+    def _init_new_working_directory(cls, path, main_repo, identifier=None,
+                                    mkdir=False):
         """Create a new working directory linked to a repository.
         """Create a new working directory linked to a repository.
 
 
         :param path: Path in which to create the working tree.
         :param path: Path in which to create the working tree.

+ 1 - 0
dulwich/tests/compat/__init__.py

@@ -22,6 +22,7 @@
 
 
 import unittest
 import unittest
 
 
+
 def test_suite():
 def test_suite():
     names = [
     names = [
         'client',
         'client',

+ 32 - 22
dulwich/tests/compat/server_utils.py

@@ -169,8 +169,9 @@ class ServerTests(object):
         port = self._start_server(self._source_repo)
         port = self._start_server(self._source_repo)
 
 
         # Fetch at depth 1
         # Fetch at depth 1
-        run_git_or_fail(['clone', '--mirror', '--depth=1', '--no-single-branch',
-                        self.url(port), self._stub_repo.path])
+        run_git_or_fail(
+            ['clone', '--mirror', '--depth=1', '--no-single-branch',
+             self.url(port), self._stub_repo.path])
         clone = self._stub_repo = Repo(self._stub_repo.path)
         clone = self._stub_repo = Repo(self._stub_repo.path)
         expected_shallow = [b'35e0b59e187dd72a0af294aedffc213eaa4d03ff',
         expected_shallow = [b'35e0b59e187dd72a0af294aedffc213eaa4d03ff',
                             b'514dc6d3fbfe77361bcaef320c4d21b72bc10be9']
                             b'514dc6d3fbfe77361bcaef320c4d21b72bc10be9']
@@ -186,13 +187,14 @@ class ServerTests(object):
         self.addCleanup(tear_down_repo, self._stub_repo_dw)
         self.addCleanup(tear_down_repo, self._stub_repo_dw)
 
 
         # shallow clone using stock git, then using dulwich
         # shallow clone using stock git, then using dulwich
-        run_git_or_fail(['clone', '--mirror', '--depth=1', '--no-single-branch',
-                         'file://' + self._source_repo.path,
-                         self._stub_repo_git.path])
+        run_git_or_fail(
+            ['clone', '--mirror', '--depth=1', '--no-single-branch',
+             'file://' + self._source_repo.path, self._stub_repo_git.path])
 
 
         port = self._start_server(self._source_repo)
         port = self._start_server(self._source_repo)
-        run_git_or_fail(['clone', '--mirror', '--depth=1', '--no-single-branch',
-                        self.url(port), self._stub_repo_dw.path])
+        run_git_or_fail(
+            ['clone', '--mirror', '--depth=1', '--no-single-branch',
+             self.url(port), self._stub_repo_dw.path])
 
 
         # compare the two clones; they should be equal
         # compare the two clones; they should be equal
         self.assertReposEqual(Repo(self._stub_repo_git.path),
         self.assertReposEqual(Repo(self._stub_repo_git.path),
@@ -206,8 +208,9 @@ class ServerTests(object):
         port = self._start_server(self._source_repo)
         port = self._start_server(self._source_repo)
 
 
         # Fetch at depth 2
         # Fetch at depth 2
-        run_git_or_fail(['clone', '--mirror', '--depth=2', '--no-single-branch',
-                        self.url(port), self._stub_repo.path])
+        run_git_or_fail(
+            ['clone', '--mirror', '--depth=2', '--no-single-branch',
+             self.url(port), self._stub_repo.path])
         clone = self._stub_repo = Repo(self._stub_repo.path)
         clone = self._stub_repo = Repo(self._stub_repo.path)
 
 
         # Fetching at the same depth is a no-op.
         # Fetching at the same depth is a no-op.
@@ -227,8 +230,9 @@ class ServerTests(object):
         port = self._start_server(self._source_repo)
         port = self._start_server(self._source_repo)
 
 
         # Fetch at depth 2
         # Fetch at depth 2
-        run_git_or_fail(['clone', '--mirror', '--depth=2', '--no-single-branch',
-                        self.url(port), self._stub_repo.path])
+        run_git_or_fail(
+            ['clone', '--mirror', '--depth=2', '--no-single-branch',
+             self.url(port), self._stub_repo.path])
         clone = self._stub_repo = Repo(self._stub_repo.path)
         clone = self._stub_repo = Repo(self._stub_repo.path)
 
 
         # Fetching at the same depth is a no-op.
         # Fetching at the same depth is a no-op.
@@ -246,11 +250,13 @@ class ServerTests(object):
     def test_fetch_from_dulwich_issue_88_standard(self):
     def test_fetch_from_dulwich_issue_88_standard(self):
         # Basically an integration test to see that the ACK/NAK
         # Basically an integration test to see that the ACK/NAK
         # generation works on repos with common head.
         # generation works on repos with common head.
-        self._source_repo = self.import_repo('issue88_expect_ack_nak_server.export')
-        self._client_repo = self.import_repo('issue88_expect_ack_nak_client.export')
+        self._source_repo = self.import_repo(
+            'issue88_expect_ack_nak_server.export')
+        self._client_repo = self.import_repo(
+            'issue88_expect_ack_nak_client.export')
         port = self._start_server(self._source_repo)
         port = self._start_server(self._source_repo)
 
 
-        run_git_or_fail(['fetch', self.url(port), 'master',],
+        run_git_or_fail(['fetch', self.url(port), 'master'],
                         cwd=self._client_repo.path)
                         cwd=self._client_repo.path)
         self.assertObjectStoreEqual(
         self.assertObjectStoreEqual(
             self._source_repo.object_store,
             self._source_repo.object_store,
@@ -258,13 +264,16 @@ class ServerTests(object):
 
 
     def test_fetch_from_dulwich_issue_88_alternative(self):
     def test_fetch_from_dulwich_issue_88_alternative(self):
         # likewise, but the case where the two repos have no common parent
         # likewise, but the case where the two repos have no common parent
-        self._source_repo = self.import_repo('issue88_expect_ack_nak_other.export')
-        self._client_repo = self.import_repo('issue88_expect_ack_nak_client.export')
+        self._source_repo = self.import_repo(
+            'issue88_expect_ack_nak_other.export')
+        self._client_repo = self.import_repo(
+            'issue88_expect_ack_nak_client.export')
         port = self._start_server(self._source_repo)
         port = self._start_server(self._source_repo)
 
 
-        self.assertRaises(KeyError, self._client_repo.get_object,
+        self.assertRaises(
+            KeyError, self._client_repo.get_object,
             b'02a14da1fc1fc13389bbf32f0af7d8899f2b2323')
             b'02a14da1fc1fc13389bbf32f0af7d8899f2b2323')
-        run_git_or_fail(['fetch', self.url(port), 'master',],
+        run_git_or_fail(['fetch', self.url(port), 'master'],
                         cwd=self._client_repo.path)
                         cwd=self._client_repo.path)
         self.assertEqual(b'commit', self._client_repo.get_object(
         self.assertEqual(b'commit', self._client_repo.get_object(
             b'02a14da1fc1fc13389bbf32f0af7d8899f2b2323').type_name)
             b'02a14da1fc1fc13389bbf32f0af7d8899f2b2323').type_name)
@@ -272,11 +281,13 @@ class ServerTests(object):
     def test_push_to_dulwich_issue_88_standard(self):
     def test_push_to_dulwich_issue_88_standard(self):
         # Same thing, but we reverse the role of the server/client
         # Same thing, but we reverse the role of the server/client
         # and do a push instead.
         # and do a push instead.
-        self._source_repo = self.import_repo('issue88_expect_ack_nak_client.export')
-        self._client_repo = self.import_repo('issue88_expect_ack_nak_server.export')
+        self._source_repo = self.import_repo(
+            'issue88_expect_ack_nak_client.export')
+        self._client_repo = self.import_repo(
+            'issue88_expect_ack_nak_server.export')
         port = self._start_server(self._source_repo)
         port = self._start_server(self._source_repo)
 
 
-        run_git_or_fail(['push', self.url(port), 'master',],
+        run_git_or_fail(['push', self.url(port), 'master'],
                         cwd=self._client_repo.path)
                         cwd=self._client_repo.path)
         self.assertReposEqual(self._source_repo, self._client_repo)
         self.assertReposEqual(self._source_repo, self._client_repo)
 
 
@@ -298,4 +309,3 @@ def ignore_error(error):
     (e_type, e_value, e_tb) = error
     (e_type, e_value, e_tb) = error
     return (issubclass(e_type, socket.error) and
     return (issubclass(e_type, socket.error) and
             e_value[0] in (errno.ECONNRESET, errno.EPIPE))
             e_value[0] in (errno.ECONNRESET, errno.EPIPE))
-

+ 18 - 9
dulwich/tests/compat/test_client.py

@@ -160,7 +160,8 @@ class DulwichClientTestBase(object):
             sendrefs, gen_pack = self.compute_send(src)
             sendrefs, gen_pack = self.compute_send(src)
             c = self._client()
             c = self._client()
             try:
             try:
-                c.send_pack(self._build_path(b'/dest'), lambda _: sendrefs, gen_pack)
+                c.send_pack(self._build_path(b'/dest'),
+                            lambda _: sendrefs, gen_pack)
             except errors.UpdateRefsError as e:
             except errors.UpdateRefsError as e:
                 self.assertEqual('refs/heads/master failed to update',
                 self.assertEqual('refs/heads/master failed to update',
                                  e.args[0])
                                  e.args[0])
@@ -231,7 +232,8 @@ class DulwichClientTestBase(object):
         # be ignored
         # be ignored
         c = self._client()
         c = self._client()
         with repo.Repo(os.path.join(self.gitroot, 'dest')) as dest:
         with repo.Repo(os.path.join(self.gitroot, 'dest')) as dest:
-            refs = c.fetch(self._build_path(b'/server_new.export'), dest,
+            refs = c.fetch(
+                self._build_path(b'/server_new.export'), dest,
                 lambda refs: [protocol.ZERO_SHA])
                 lambda refs: [protocol.ZERO_SHA])
             for r in refs.items():
             for r in refs.items():
                 dest.refs.set_if_equals(r[0], None, r[1])
                 dest.refs.set_if_equals(r[0], None, r[1])
@@ -244,10 +246,13 @@ class DulwichClientTestBase(object):
             sendrefs = dict(dest.refs)
             sendrefs = dict(dest.refs)
             sendrefs[b'refs/heads/abranch'] = b"00" * 20
             sendrefs[b'refs/heads/abranch'] = b"00" * 20
             del sendrefs[b'HEAD']
             del sendrefs[b'HEAD']
-            gen_pack = lambda have, want: []
+
+            def gen_pack(have, want):
+                return []
             c = self._client()
             c = self._client()
             self.assertEqual(dest.refs[b"refs/heads/abranch"], dummy_commit)
             self.assertEqual(dest.refs[b"refs/heads/abranch"], dummy_commit)
-            c.send_pack(self._build_path(b'/dest'), lambda _: sendrefs, gen_pack)
+            c.send_pack(
+                self._build_path(b'/dest'), lambda _: sendrefs, gen_pack)
             self.assertFalse(b"refs/heads/abranch" in dest.refs)
             self.assertFalse(b"refs/heads/abranch" in dest.refs)
 
 
     def test_get_refs(self):
     def test_get_refs(self):
@@ -266,7 +271,7 @@ class DulwichTCPClientTest(CompatTestCase, DulwichClientTestBase):
         DulwichClientTestBase.setUp(self)
         DulwichClientTestBase.setUp(self)
         if check_for_daemon(limit=1):
         if check_for_daemon(limit=1):
             raise SkipTest('git-daemon was already running on port %s' %
             raise SkipTest('git-daemon was already running on port %s' %
-                              protocol.TCP_GIT_PORT)
+                           protocol.TCP_GIT_PORT)
         fd, self.pidfile = tempfile.mkstemp(prefix='dulwich-test-git-client',
         fd, self.pidfile = tempfile.mkstemp(prefix='dulwich-test-git-client',
                                             suffix=".pid")
                                             suffix=".pid")
         os.fdopen(fd).close()
         os.fdopen(fd).close()
@@ -387,7 +392,8 @@ class GitHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
     def run_backend(self):
     def run_backend(self):
         """Call out to git http-backend."""
         """Call out to git http-backend."""
         # Based on CGIHTTPServer.CGIHTTPRequestHandler.run_cgi:
         # Based on CGIHTTPServer.CGIHTTPRequestHandler.run_cgi:
-        # Copyright (c) 2001-2010 Python Software Foundation; All Rights Reserved
+        # Copyright (c) 2001-2010 Python Software Foundation;
+        # All Rights Reserved
         # Licensed under the Python Software Foundation License.
         # Licensed under the Python Software Foundation License.
         rest = self.path
         rest = self.path
         # find an explicit query string, if present.
         # find an explicit query string, if present.
@@ -419,7 +425,8 @@ class GitHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
         if authorization:
         if authorization:
             authorization = authorization.split()
             authorization = authorization.split()
             if len(authorization) == 2:
             if len(authorization) == 2:
-                import base64, binascii
+                import base64
+                import binascii
                 env['AUTH_TYPE'] = authorization[0]
                 env['AUTH_TYPE'] = authorization[0]
                 if authorization[0].lower() == "basic":
                 if authorization[0].lower() == "basic":
                     try:
                     try:
@@ -481,7 +488,8 @@ class GitHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
         args = ['http-backend']
         args = ['http-backend']
         if '=' not in decoded_query:
         if '=' not in decoded_query:
             args.append(decoded_query)
             args.append(decoded_query)
-        stdout = run_git_or_fail(args, input=data, env=env, stderr=subprocess.PIPE)
+        stdout = run_git_or_fail(
+            args, input=data, env=env, stderr=subprocess.PIPE)
         self.wfile.write(stdout)
         self.wfile.write(stdout)
 
 
 
 
@@ -490,7 +498,8 @@ class HTTPGitServer(BaseHTTPServer.HTTPServer):
     allow_reuse_address = True
     allow_reuse_address = True
 
 
     def __init__(self, server_address, root_path):
     def __init__(self, server_address, root_path):
-        BaseHTTPServer.HTTPServer.__init__(self, server_address, GitHTTPRequestHandler)
+        BaseHTTPServer.HTTPServer.__init__(
+            self, server_address, GitHTTPRequestHandler)
         self.root_path = root_path
         self.root_path = root_path
         self.server_name = "localhost"
         self.server_name = "localhost"
 
 

+ 1 - 0
dulwich/tests/compat/test_pack.py

@@ -48,6 +48,7 @@ from dulwich.tests.compat.utils import (
 
 
 _NON_DELTA_RE = re.compile(b'non delta: (?P<non_delta>\d+) objects')
 _NON_DELTA_RE = re.compile(b'non delta: (?P<non_delta>\d+) objects')
 
 
+
 def _git_verify_pack_object_list(output):
 def _git_verify_pack_object_list(output):
     pack_shas = set()
     pack_shas = set()
     for line in output.splitlines():
     for line in output.splitlines():

+ 9 - 8
dulwich/tests/compat/test_repository.py

@@ -24,9 +24,8 @@
 from io import BytesIO
 from io import BytesIO
 from itertools import chain
 from itertools import chain
 import os
 import os
-import tempfile
-
 import sys
 import sys
+import tempfile
 
 
 from dulwich.objects import (
 from dulwich.objects import (
     hex_to_sha,
     hex_to_sha,
@@ -94,7 +93,8 @@ class ObjectStoreTestCase(CompatTestCase):
     # TODO(dborowitz): peeled ref tests
     # TODO(dborowitz): peeled ref tests
 
 
     def _get_loose_shas(self):
     def _get_loose_shas(self):
-        output = self._run_git(['rev-list', '--all', '--objects', '--unpacked'])
+        output = self._run_git(
+            ['rev-list', '--all', '--objects', '--unpacked'])
         return self._parse_objects(output)
         return self._parse_objects(output)
 
 
     def _get_all_shas(self):
     def _get_all_shas(self):
@@ -110,8 +110,8 @@ class ObjectStoreTestCase(CompatTestCase):
         self.assertEqual(expected_shas, actual_shas)
         self.assertEqual(expected_shas, actual_shas)
 
 
     def test_loose_objects(self):
     def test_loose_objects(self):
-        # TODO(dborowitz): This is currently not very useful since fast-imported
-        # repos only contained packed objects.
+        # TODO(dborowitz): This is currently not very useful since
+        # fast-imported repos only contained packed objects.
         expected_shas = self._get_loose_shas()
         expected_shas = self._get_loose_shas()
         self.assertShasMatch(expected_shas,
         self.assertShasMatch(expected_shas,
                              self._repo.object_store._iter_loose_objects())
                              self._repo.object_store._iter_loose_objects())
@@ -147,7 +147,8 @@ class WorkingTreeTestCase(ObjectStoreTestCase):
 
 
     def setUp(self):
     def setUp(self):
         super(WorkingTreeTestCase, self).setUp()
         super(WorkingTreeTestCase, self).setUp()
-        self._worktree_path = self.create_new_worktree(self._repo.path, 'branch')
+        self._worktree_path = self.create_new_worktree(
+            self._repo.path, 'branch')
         self._worktree_repo = Repo(self._worktree_path)
         self._worktree_repo = Repo(self._worktree_path)
         self.addCleanup(self._worktree_repo.close)
         self.addCleanup(self._worktree_repo.close)
         self._mainworktree_repo = self._repo
         self._mainworktree_repo = self._repo
@@ -184,8 +185,8 @@ class WorkingTreeTestCase(ObjectStoreTestCase):
         self.assertEqual(os.path.normcase(worktrees[0][0]),
         self.assertEqual(os.path.normcase(worktrees[0][0]),
                          os.path.normcase(self._mainworktree_repo.path))
                          os.path.normcase(self._mainworktree_repo.path))
 
 
-        output = run_git_or_fail(['worktree', 'list'],
-            cwd=self._mainworktree_repo.path)
+        output = run_git_or_fail(
+            ['worktree', 'list'], cwd=self._mainworktree_repo.path)
         worktrees = self._parse_worktree_list(output)
         worktrees = self._parse_worktree_list(output)
         self.assertEqual(len(worktrees), self._number_of_working_tree)
         self.assertEqual(len(worktrees), self._number_of_working_tree)
         self.assertEqual(worktrees[0][1], '(bare)')
         self.assertEqual(worktrees[0][1], '(bare)')

+ 5 - 3
dulwich/tests/compat/test_server.py

@@ -43,7 +43,9 @@ from dulwich.tests.compat.utils import (
     require_git_version,
     require_git_version,
     )
     )
 
 
-@skipIf(sys.platform == 'win32', 'Broken on windows, with very long fail time.')
+
+@skipIf(sys.platform == 'win32',
+        'Broken on windows, with very long fail time.')
 class GitServerTestCase(ServerTests, CompatTestCase):
 class GitServerTestCase(ServerTests, CompatTestCase):
     """Tests for client/server compatibility.
     """Tests for client/server compatibility.
 
 
@@ -73,7 +75,8 @@ class GitServerTestCase(ServerTests, CompatTestCase):
         return port
         return port
 
 
 
 
-@skipIf(sys.platform == 'win32', 'Broken on windows, with very long fail time.')
+@skipIf(sys.platform == 'win32',
+        'Broken on windows, with very long fail time.')
 class GitServerSideBand64kTestCase(GitServerTestCase):
 class GitServerSideBand64kTestCase(GitServerTestCase):
     """Tests for client/server compatibility with side-band-64k support."""
     """Tests for client/server compatibility with side-band-64k support."""
 
 
@@ -88,7 +91,6 @@ class GitServerSideBand64kTestCase(GitServerTestCase):
         if os.name == 'nt':
         if os.name == 'nt':
             require_git_version((1, 9, 3))
             require_git_version((1, 9, 3))
 
 
-
     def _handlers(self):
     def _handlers(self):
         return None  # default handlers include side-band-64k
         return None  # default handlers include side-band-64k
 
 

+ 9 - 5
dulwich/tests/compat/test_web.py

@@ -54,7 +54,8 @@ from dulwich.tests.compat.utils import (
     )
     )
 
 
 
 
-@skipIf(sys.platform == 'win32', 'Broken on windows, with very long fail time.')
+@skipIf(sys.platform == 'win32',
+        'Broken on windows, with very long fail time.')
 class WebTests(ServerTests):
 class WebTests(ServerTests):
     """Base tests for web server tests.
     """Base tests for web server tests.
 
 
@@ -78,7 +79,8 @@ class WebTests(ServerTests):
         return port
         return port
 
 
 
 
-@skipIf(sys.platform == 'win32', 'Broken on windows, with very long fail time.')
+@skipIf(sys.platform == 'win32',
+        'Broken on windows, with very long fail time.')
 class SmartWebTestCase(WebTests, CompatTestCase):
 class SmartWebTestCase(WebTests, CompatTestCase):
     """Test cases for smart HTTP server.
     """Test cases for smart HTTP server.
 
 
@@ -111,13 +113,15 @@ def patch_capabilities(handler, caps_removed):
     original_capabilities = handler.capabilities
     original_capabilities = handler.capabilities
     filtered_capabilities = tuple(
     filtered_capabilities = tuple(
         i for i in original_capabilities() if i not in caps_removed)
         i for i in original_capabilities() if i not in caps_removed)
+
     def capabilities(cls):
     def capabilities(cls):
         return filtered_capabilities
         return filtered_capabilities
     handler.capabilities = classmethod(capabilities)
     handler.capabilities = classmethod(capabilities)
     return original_capabilities
     return original_capabilities
 
 
 
 
-@skipIf(sys.platform == 'win32', 'Broken on windows, with very long fail time.')
+@skipIf(sys.platform == 'win32',
+        'Broken on windows, with very long fail time.')
 class SmartWebSideBand64kTestCase(SmartWebTestCase):
 class SmartWebSideBand64kTestCase(SmartWebTestCase):
     """Test cases for smart HTTP server with side-band-64k support."""
     """Test cases for smart HTTP server with side-band-64k support."""
 
 
@@ -162,7 +166,8 @@ class SmartWebSideBand64kNoDoneTestCase(SmartWebTestCase):
         self.assertIn(b'no-done', caps)
         self.assertIn(b'no-done', caps)
 
 
 
 
-@skipIf(sys.platform == 'win32', 'Broken on windows, with very long fail time.')
+@skipIf(sys.platform == 'win32',
+        'Broken on windows, with very long fail time.')
 class DumbWebTestCase(WebTests, CompatTestCase):
 class DumbWebTestCase(WebTests, CompatTestCase):
     """Test cases for dumb HTTP server."""
     """Test cases for dumb HTTP server."""
 
 
@@ -199,4 +204,3 @@ class DumbWebTestCase(WebTests, CompatTestCase):
 
 
     def test_push_to_dulwich_issue_88_standard(self):
     def test_push_to_dulwich_issue_88_standard(self):
         raise SkipTest('Dumb web pushing not supported.')
         raise SkipTest('Dumb web pushing not supported.')
-

+ 3 - 1
dulwich/tests/compat/utils.py

@@ -233,10 +233,12 @@ class CompatTestCase(TestCase):
 
 
         :param name: The name of the repository export file, relative to
         :param name: The name of the repository export file, relative to
             dulwich/tests/data/repos.
             dulwich/tests/data/repos.
-        :returns: An initialized Repo object that lives in a temporary directory.
+        :returns: An initialized Repo object that lives in a temporary
+            directory.
         """
         """
         path = import_repo_to_dir(name)
         path = import_repo_to_dir(name)
         repo = Repo(path)
         repo = Repo(path)
+
         def cleanup():
         def cleanup():
             repo.close()
             repo.close()
             rmtree_ro(os.path.dirname(path.rstrip(os.sep)))
             rmtree_ro(os.path.dirname(path.rstrip(os.sep)))

+ 45 - 26
dulwich/tests/test_client.py

@@ -97,14 +97,15 @@ class GitClientTests(TestCase):
                                   self.rout.write)
                                   self.rout.write)
 
 
     def test_caps(self):
     def test_caps(self):
-        agent_cap = ('agent=dulwich/%d.%d.%d' % dulwich.__version__).encode('ascii')
+        agent_cap = (
+            'agent=dulwich/%d.%d.%d' % dulwich.__version__).encode('ascii')
         self.assertEqual(set([b'multi_ack', b'side-band-64k', b'ofs-delta',
         self.assertEqual(set([b'multi_ack', b'side-band-64k', b'ofs-delta',
-                               b'thin-pack', b'multi_ack_detailed',
-                               agent_cap]),
-                          set(self.client._fetch_capabilities))
+                              b'thin-pack', b'multi_ack_detailed',
+                              agent_cap]),
+                         set(self.client._fetch_capabilities))
         self.assertEqual(set([b'ofs-delta', b'report-status', b'side-band-64k',
         self.assertEqual(set([b'ofs-delta', b'report-status', b'side-band-64k',
                               agent_cap]),
                               agent_cap]),
-                          set(self.client._send_capabilities))
+                         set(self.client._send_capabilities))
 
 
     def test_archive_ack(self):
     def test_archive_ack(self):
         self.rin.write(
         self.rin.write(
@@ -117,6 +118,7 @@ class GitClientTests(TestCase):
     def test_fetch_empty(self):
     def test_fetch_empty(self):
         self.rin.write(b'0000')
         self.rin.write(b'0000')
         self.rin.seek(0)
         self.rin.seek(0)
+
         def check_heads(heads):
         def check_heads(heads):
             self.assertIs(heads, None)
             self.assertIs(heads, None)
             return []
             return []
@@ -130,6 +132,7 @@ class GitClientTests(TestCase):
             b'include-tag\n'
             b'include-tag\n'
             b'0000')
             b'0000')
         self.rin.seek(0)
         self.rin.seek(0)
+
         def check_heads(heads):
         def check_heads(heads):
             self.assertEquals({}, heads)
             self.assertEquals({}, heads)
             return []
             return []
@@ -193,7 +196,8 @@ class GitClientTests(TestCase):
 
 
         def determine_wants(refs):
         def determine_wants(refs):
             return {
             return {
-                b'refs/heads/master': b'310ca9477129b8586fa2afc779c1f57cf64bba6c'
+                b'refs/heads/master':
+                    b'310ca9477129b8586fa2afc779c1f57cf64bba6c'
             }
             }
 
 
         def generate_pack_contents(have, want):
         def generate_pack_contents(have, want):
@@ -266,7 +270,8 @@ class GitClientTests(TestCase):
             return {
             return {
                 b'refs/heads/blah12':
                 b'refs/heads/blah12':
                 b'310ca9477129b8586fa2afc779c1f57cf64bba6c',
                 b'310ca9477129b8586fa2afc779c1f57cf64bba6c',
-                b'refs/heads/master': b'310ca9477129b8586fa2afc779c1f57cf64bba6c'
+                b'refs/heads/master':
+                    b'310ca9477129b8586fa2afc779c1f57cf64bba6c'
             }
             }
 
 
         def generate_pack_contents(have, want):
         def generate_pack_contents(have, want):
@@ -308,7 +313,8 @@ class GitClientTests(TestCase):
         def determine_wants(refs):
         def determine_wants(refs):
             return {
             return {
                 b'refs/heads/blah12': commit.id,
                 b'refs/heads/blah12': commit.id,
-                b'refs/heads/master': b'310ca9477129b8586fa2afc779c1f57cf64bba6c'
+                b'refs/heads/master':
+                    b'310ca9477129b8586fa2afc779c1f57cf64bba6c'
             }
             }
 
 
         def generate_pack_contents(have, want):
         def generate_pack_contents(have, want):
@@ -320,9 +326,11 @@ class GitClientTests(TestCase):
         self.assertIn(
         self.assertIn(
             self.rout.getvalue(),
             self.rout.getvalue(),
             [b'007f0000000000000000000000000000000000000000 ' + commit.id +
             [b'007f0000000000000000000000000000000000000000 ' + commit.id +
-             b' refs/heads/blah12\x00report-status ofs-delta0000' + f.getvalue(),
+             b' refs/heads/blah12\x00report-status ofs-delta0000' +
+                 f.getvalue(),
              b'007f0000000000000000000000000000000000000000 ' + commit.id +
              b'007f0000000000000000000000000000000000000000 ' + commit.id +
-             b' refs/heads/blah12\x00ofs-delta report-status0000' + f.getvalue()])
+             b' refs/heads/blah12\x00ofs-delta report-status0000' +
+                 f.getvalue()])
 
 
     def test_send_pack_no_deleteref_delete_only(self):
     def test_send_pack_no_deleteref_delete_only(self):
         pkts = [b'310ca9477129b8586fa2afc779c1f57cf64bba6c refs/heads/master'
         pkts = [b'310ca9477129b8586fa2afc779c1f57cf64bba6c refs/heads/master'
@@ -545,7 +553,8 @@ class TestGetTransportAndPathFromUrl(TestCase):
         self.assertEqual('/bar/baz', path)
         self.assertEqual('/bar/baz', path)
 
 
     def test_ssh_homepath(self):
     def test_ssh_homepath(self):
-        c, path = get_transport_and_path_from_url('git+ssh://foo.com/~/bar/baz')
+        c, path = get_transport_and_path_from_url(
+            'git+ssh://foo.com/~/bar/baz')
         self.assertTrue(isinstance(c, SSHGitClient))
         self.assertTrue(isinstance(c, SSHGitClient))
         self.assertEqual('foo.com', c.host)
         self.assertEqual('foo.com', c.host)
         self.assertEqual(None, c.port)
         self.assertEqual(None, c.port)
@@ -561,21 +570,25 @@ class TestGetTransportAndPathFromUrl(TestCase):
         self.assertEqual('/~/bar/baz', path)
         self.assertEqual('/~/bar/baz', path)
 
 
     def test_ssh_host_relpath(self):
     def test_ssh_host_relpath(self):
-        self.assertRaises(ValueError, get_transport_and_path_from_url,
+        self.assertRaises(
+            ValueError, get_transport_and_path_from_url,
             'foo.com:bar/baz')
             'foo.com:bar/baz')
 
 
     def test_ssh_user_host_relpath(self):
     def test_ssh_user_host_relpath(self):
-        self.assertRaises(ValueError, get_transport_and_path_from_url,
+        self.assertRaises(
+            ValueError, get_transport_and_path_from_url,
             'user@foo.com:bar/baz')
             'user@foo.com:bar/baz')
 
 
     def test_local_path(self):
     def test_local_path(self):
-        self.assertRaises(ValueError, get_transport_and_path_from_url,
+        self.assertRaises(
+            ValueError, get_transport_and_path_from_url,
             'foo.bar/baz')
             'foo.bar/baz')
 
 
     def test_error(self):
     def test_error(self):
         # Need to use a known urlparse.uses_netloc URL scheme to get the
         # Need to use a known urlparse.uses_netloc URL scheme to get the
         # expected parsing of the URL on Python versions less than 2.6.5
         # expected parsing of the URL on Python versions less than 2.6.5
-        self.assertRaises(ValueError, get_transport_and_path_from_url,
+        self.assertRaises(
+            ValueError, get_transport_and_path_from_url,
             'prospero://bar/baz')
             'prospero://bar/baz')
 
 
     def test_http(self):
     def test_http(self):
@@ -607,7 +620,8 @@ class TestSSHVendor(object):
         self.username = username
         self.username = username
         self.port = port
         self.port = port
 
 
-        class Subprocess: pass
+        class Subprocess:
+            pass
         setattr(Subprocess, 'read', lambda: None)
         setattr(Subprocess, 'read', lambda: None)
         setattr(Subprocess, 'write', lambda: None)
         setattr(Subprocess, 'write', lambda: None)
         setattr(Subprocess, 'close', lambda: None)
         setattr(Subprocess, 'close', lambda: None)
@@ -645,13 +659,15 @@ class SSHGitClientTests(TestCase):
         self.assertEqual('ssh://user@git.samba.org:2222/tmp/repo.git', url)
         self.assertEqual('ssh://user@git.samba.org:2222/tmp/repo.git', url)
 
 
     def test_default_command(self):
     def test_default_command(self):
-        self.assertEqual(b'git-upload-pack',
-                self.client._get_cmd_path(b'upload-pack'))
+        self.assertEqual(
+            b'git-upload-pack',
+            self.client._get_cmd_path(b'upload-pack'))
 
 
     def test_alternative_command_path(self):
     def test_alternative_command_path(self):
         self.client.alternative_paths[b'upload-pack'] = (
         self.client.alternative_paths[b'upload-pack'] = (
             b'/usr/lib/git/git-upload-pack')
             b'/usr/lib/git/git-upload-pack')
-        self.assertEqual(b'/usr/lib/git/git-upload-pack',
+        self.assertEqual(
+            b'/usr/lib/git/git-upload-pack',
             self.client._get_cmd_path(b'upload-pack'))
             self.client._get_cmd_path(b'upload-pack'))
 
 
     def test_alternative_command_path_spaces(self):
     def test_alternative_command_path_spaces(self):
@@ -674,7 +690,7 @@ class SSHGitClientTests(TestCase):
 
 
         client._connect(b"relative-command", b"/~/path/to/repo")
         client._connect(b"relative-command", b"/~/path/to/repo")
         self.assertEqual(b"git-relative-command '~/path/to/repo'",
         self.assertEqual(b"git-relative-command '~/path/to/repo'",
-                          server.command)
+                         server.command)
 
 
 
 
 class ReportStatusParserTests(TestCase):
 class ReportStatusParserTests(TestCase):
@@ -723,15 +739,17 @@ class LocalGitClientTests(TestCase):
         self.addCleanup(tear_down_repo, s)
         self.addCleanup(tear_down_repo, s)
         out = BytesIO()
         out = BytesIO()
         walker = {}
         walker = {}
-        ret = c.fetch_pack(s.path, lambda heads: [], graph_walker=walker,
-            pack_data=out.write)
+        ret = c.fetch_pack(
+            s.path, lambda heads: [], graph_walker=walker, pack_data=out.write)
         self.assertEqual({
         self.assertEqual({
             b'HEAD': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
             b'HEAD': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
             b'refs/heads/master': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
             b'refs/heads/master': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
             b'refs/tags/mytag': b'28237f4dc30d0d462658d6b937b08a0f0b6ef55a',
             b'refs/tags/mytag': b'28237f4dc30d0d462658d6b937b08a0f0b6ef55a',
-            b'refs/tags/mytag-packed': b'b0931cadc54336e78a1d980420e3268903b57a50'
+            b'refs/tags/mytag-packed':
+                b'b0931cadc54336e78a1d980420e3268903b57a50'
             }, ret)
             }, ret)
-        self.assertEqual(b"PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08"
+        self.assertEqual(
+            b"PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08"
             b"\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e", out.getvalue())
             b"\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e", out.getvalue())
 
 
     def test_fetch_pack_none(self):
     def test_fetch_pack_none(self):
@@ -740,7 +758,8 @@ class LocalGitClientTests(TestCase):
         self.addCleanup(tear_down_repo, s)
         self.addCleanup(tear_down_repo, s)
         out = BytesIO()
         out = BytesIO()
         walker = MemoryRepo().get_graph_walker()
         walker = MemoryRepo().get_graph_walker()
-        c.fetch_pack(s.path,
+        c.fetch_pack(
+            s.path,
             lambda heads: [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"],
             lambda heads: [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"],
             graph_walker=walker, pack_data=out.write)
             graph_walker=walker, pack_data=out.write)
         # Hardcoding is not ideal, but we'll fix that some other day..
         # Hardcoding is not ideal, but we'll fix that some other day..
@@ -777,7 +796,7 @@ class LocalGitClientTests(TestCase):
         client = LocalGitClient()
         client = LocalGitClient()
         ref_name = b"refs/heads/" + branch
         ref_name = b"refs/heads/" + branch
         new_refs = client.send_pack(target.path,
         new_refs = client.send_pack(target.path,
-                                    lambda _: { ref_name: local.refs[ref_name] },
+                                    lambda _: {ref_name: local.refs[ref_name]},
                                     local.object_store.generate_pack_contents)
                                     local.object_store.generate_pack_contents)
 
 
         self.assertEqual(local.refs[ref_name], new_refs[ref_name])
         self.assertEqual(local.refs[ref_name], new_refs[ref_name])

+ 6 - 9
dulwich/tests/test_greenthreads.py

@@ -94,17 +94,15 @@ class TestGreenThreadsObjectStoreIterator(TestCase):
         self.assertEqual(len(iterator), self.cmt_amount * 3)
         self.assertEqual(len(iterator), self.cmt_amount * 3)
         haves = wants[0:self.cmt_amount-1]
         haves = wants[0:self.cmt_amount-1]
         finder = MissingObjectFinder(self.store, haves, wants)
         finder = MissingObjectFinder(self.store, haves, wants)
-        iterator = GreenThreadsObjectStoreIterator(self.store,
-                                               iter(finder.next, None),
-                                               finder)
+        iterator = GreenThreadsObjectStoreIterator(
+            self.store, iter(finder.next, None), finder)
         self.assertEqual(len(iterator), 3)
         self.assertEqual(len(iterator), 3)
 
 
     def test_iter(self):
     def test_iter(self):
         wants = [sha.id for sha in self.objs if isinstance(sha, Commit)]
         wants = [sha.id for sha in self.objs if isinstance(sha, Commit)]
         finder = MissingObjectFinder(self.store, (), wants)
         finder = MissingObjectFinder(self.store, (), wants)
-        iterator = GreenThreadsObjectStoreIterator(self.store,
-                                               iter(finder.next, None),
-                                               finder)
+        iterator = GreenThreadsObjectStoreIterator(
+            self.store, iter(finder.next, None), finder)
         objs = []
         objs = []
         for sha, path in iterator:
         for sha, path in iterator:
             self.assertIn(sha, self.objs)
             self.assertIn(sha, self.objs)
@@ -127,9 +125,8 @@ class TestGreenThreadsMissingObjectFinder(TestCase):
         self.assertEqual(len(finder.sha_done), 0)
         self.assertEqual(len(finder.sha_done), 0)
         self.assertEqual(len(finder.objects_to_send), self.cmt_amount)
         self.assertEqual(len(finder.objects_to_send), self.cmt_amount)
 
 
-        finder = GreenThreadsMissingObjectFinder(self.store,
-                                             wants[0:int(self.cmt_amount/2)],
-                                             wants)
+        finder = GreenThreadsMissingObjectFinder(
+            self.store, wants[0:int(self.cmt_amount/2)], wants)
         # sha_done will contains commit id and sha of blob refered in tree
         # sha_done will contains commit id and sha of blob refered in tree
         self.assertEqual(len(finder.sha_done), (self.cmt_amount/2)*2)
         self.assertEqual(len(finder.sha_done), (self.cmt_amount/2)*2)
         self.assertEqual(len(finder.objects_to_send), self.cmt_amount/2)
         self.assertEqual(len(finder.objects_to_send), self.cmt_amount/2)

+ 10 - 10
dulwich/tests/test_index.py

@@ -236,8 +236,8 @@ class IndexEntryFromStatTests(TestCase):
         st = os.stat_result((stat.S_IFREG + 0o644, 131078, 64769,
         st = os.stat_result((stat.S_IFREG + 0o644, 131078, 64769,
                 154, 1000, 1000, 12288,
                 154, 1000, 1000, 12288,
                 1323629595, 1324180496, 1324180496))
                 1323629595, 1324180496, 1324180496))
-        entry = index_entry_from_stat(st, "22" * 20, 0,
-                mode=stat.S_IFREG + 0o755)
+        entry = index_entry_from_stat(
+            st, "22" * 20, 0, mode=stat.S_IFREG + 0o755)
         self.assertEqual(entry, (
         self.assertEqual(entry, (
             1324180496,
             1324180496,
             1324180496,
             1324180496,
@@ -298,8 +298,8 @@ class BuildIndexTests(TestCase):
             repo.object_store.add_objects([(o, None)
             repo.object_store.add_objects([(o, None)
                 for o in [filea, filee, tree]])
                 for o in [filea, filee, tree]])
 
 
-            build_index_from_tree(repo.path, repo.index_path(),
-                    repo.object_store, tree.id)
+            build_index_from_tree(
+                repo.path, repo.index_path(), repo.object_store, tree.id)
 
 
             # Verify index entries
             # Verify index entries
             index = repo.open_index()
             index = repo.open_index()
@@ -312,8 +312,8 @@ class BuildIndexTests(TestCase):
             # filee
             # filee
             epath = os.path.join(repo.path, 'c', 'e')
             epath = os.path.join(repo.path, 'c', 'e')
             self.assertTrue(os.path.exists(epath))
             self.assertTrue(os.path.exists(epath))
-            self.assertReasonableIndexEntry(index[b'c/e'],
-                stat.S_IFREG | 0o644, 1, filee.id)
+            self.assertReasonableIndexEntry(
+                index[b'c/e'], stat.S_IFREG | 0o644, 1, filee.id)
             self.assertFileContents(epath, b'd')
             self.assertFileContents(epath, b'd')
 
 
     def test_nonempty(self):
     def test_nonempty(self):
@@ -331,11 +331,11 @@ class BuildIndexTests(TestCase):
             tree[b'b'] = (stat.S_IFREG | 0o644, fileb.id)
             tree[b'b'] = (stat.S_IFREG | 0o644, fileb.id)
             tree[b'c/d'] = (stat.S_IFREG | 0o644, filed.id)
             tree[b'c/d'] = (stat.S_IFREG | 0o644, filed.id)
 
 
-            repo.object_store.add_objects([(o, None)
-                for o in [filea, fileb, filed, tree]])
+            repo.object_store.add_objects(
+                [(o, None) for o in [filea, fileb, filed, tree]])
 
 
-            build_index_from_tree(repo.path, repo.index_path(),
-                    repo.object_store, tree.id)
+            build_index_from_tree(
+                repo.path, repo.index_path(), repo.object_store, tree.id)
 
 
             # Verify index entries
             # Verify index entries
             index = repo.open_index()
             index = repo.open_index()

+ 16 - 12
dulwich/tests/test_object_store.py

@@ -65,12 +65,13 @@ testobject = make_object(Blob, data=b"yummy data")
 class ObjectStoreTests(object):
 class ObjectStoreTests(object):
 
 
     def test_determine_wants_all(self):
     def test_determine_wants_all(self):
-        self.assertEqual([b"1" * 40],
+        self.assertEqual(
+            [b"1" * 40],
             self.store.determine_wants_all({b"refs/heads/foo": b"1" * 40}))
             self.store.determine_wants_all({b"refs/heads/foo": b"1" * 40}))
 
 
     def test_determine_wants_all_zero(self):
     def test_determine_wants_all_zero(self):
-        self.assertEqual([],
-            self.store.determine_wants_all({b"refs/heads/foo": b"0" * 40}))
+        self.assertEqual(
+            [], self.store.determine_wants_all({b"refs/heads/foo": b"0" * 40}))
 
 
     def test_iter(self):
     def test_iter(self):
         self.assertEqual([], list(self.store))
         self.assertEqual([], list(self.store))
@@ -131,11 +132,10 @@ class ObjectStoreTests(object):
         tree2_id = commit_tree(self.store, blobs_2)
         tree2_id = commit_tree(self.store, blobs_2)
         change_a = ((b'a', b'a'), (0o100644, 0o100644), (blob_a1.id, blob_a2.id))
         change_a = ((b'a', b'a'), (0o100644, 0o100644), (blob_a1.id, blob_a2.id))
         self.assertEqual([change_a],
         self.assertEqual([change_a],
-                          list(self.store.tree_changes(tree1_id, tree2_id)))
+                         list(self.store.tree_changes(tree1_id, tree2_id)))
         self.assertEqual(
         self.assertEqual(
             [change_a, ((b'b', b'b'), (0o100644, 0o100644), (blob_b.id, blob_b.id))],
             [change_a, ((b'b', b'b'), (0o100644, 0o100644), (blob_b.id, blob_b.id))],
-            list(self.store.tree_changes(tree1_id, tree2_id,
-                                         want_unchanged=True)))
+            list(self.store.tree_changes(tree1_id, tree2_id, want_unchanged=True)))
 
 
     def test_iter_tree_contents(self):
     def test_iter_tree_contents(self):
         blob_a = make_object(Blob, data=b'a')
         blob_a = make_object(Blob, data=b'a')
@@ -153,7 +153,7 @@ class ObjectStoreTests(object):
         ]
         ]
         tree_id = commit_tree(self.store, blobs)
         tree_id = commit_tree(self.store, blobs)
         self.assertEqual([TreeEntry(p, m, h) for (p, h, m) in blobs],
         self.assertEqual([TreeEntry(p, m, h) for (p, h, m) in blobs],
-                          list(self.store.iter_tree_contents(tree_id)))
+                         list(self.store.iter_tree_contents(tree_id)))
 
 
     def test_iter_tree_contents_include_trees(self):
     def test_iter_tree_contents_include_trees(self):
         blob_a = make_object(Blob, data=b'a')
         blob_a = make_object(Blob, data=b'a')
@@ -244,7 +244,6 @@ class MemoryObjectStoreTests(ObjectStoreTests, TestCase):
         self.assertEqual((Blob.type_num, b'more yummy data'),
         self.assertEqual((Blob.type_num, b'more yummy data'),
                          o.get_raw(packed_blob_sha))
                          o.get_raw(packed_blob_sha))
 
 
-
     def test_add_thin_pack_empty(self):
     def test_add_thin_pack_empty(self):
         o = MemoryObjectStore()
         o = MemoryObjectStore()
 
 
@@ -316,7 +315,8 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
         alternate_store.add_object(b2)
         alternate_store.add_object(b2)
         store = DiskObjectStore(self.store_dir)
         store = DiskObjectStore(self.store_dir)
         self.assertRaises(KeyError, store.__getitem__, b2.id)
         self.assertRaises(KeyError, store.__getitem__, b2.id)
-        store.add_alternate_path(os.path.relpath(alternate_dir, self.store_dir))
+        store.add_alternate_path(
+            os.path.relpath(alternate_dir, self.store_dir))
         self.assertEqual(list(alternate_store), list(store.alternates[0]))
         self.assertEqual(list(alternate_store), list(store.alternates[0]))
         self.assertIn(b2.id, store)
         self.assertIn(b2.id, store)
         self.assertEqual(b2, store[b2.id])
         self.assertEqual(b2, store[b2.id])
@@ -351,7 +351,8 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
             with o.add_thin_pack(f.read, None) as pack:
             with o.add_thin_pack(f.read, None) as pack:
                 packed_blob_sha = sha_to_hex(entries[0][3])
                 packed_blob_sha = sha_to_hex(entries[0][3])
                 pack.check_length_and_checksum()
                 pack.check_length_and_checksum()
-                self.assertEqual(sorted([blob.id, packed_blob_sha]), list(pack))
+                self.assertEqual(
+                    sorted([blob.id, packed_blob_sha]), list(pack))
                 self.assertTrue(o.contains_packed(packed_blob_sha))
                 self.assertTrue(o.contains_packed(packed_blob_sha))
                 self.assertTrue(o.contains_packed(blob.id))
                 self.assertTrue(o.contains_packed(blob.id))
                 self.assertEqual((Blob.type_num, b'more yummy data'),
                 self.assertEqual((Blob.type_num, b'more yummy data'),
@@ -403,10 +404,13 @@ class TreeLookupPathTests(TestCase):
         self.assertTrue(isinstance(self.store[o_id], Tree))
         self.assertTrue(isinstance(self.store[o_id], Tree))
 
 
     def test_lookup_nonexistent(self):
     def test_lookup_nonexistent(self):
-        self.assertRaises(KeyError, tree_lookup_path, self.get_object, self.tree_id, b'j')
+        self.assertRaises(
+            KeyError, tree_lookup_path, self.get_object, self.tree_id, b'j')
 
 
     def test_lookup_not_tree(self):
     def test_lookup_not_tree(self):
-        self.assertRaises(NotTreeError, tree_lookup_path, self.get_object, self.tree_id, b'ad/b/j')
+        self.assertRaises(
+            NotTreeError, tree_lookup_path, self.get_object, self.tree_id,
+            b'ad/b/j')
 
 
 
 
 class ObjectStoreGraphWalkerTests(TestCase):
 class ObjectStoreGraphWalkerTests(TestCase):

+ 25 - 22
dulwich/tests/test_objects.py

@@ -864,13 +864,14 @@ class TreeTests(ShaFileCheckTests):
 class TagSerializeTests(TestCase):
 class TagSerializeTests(TestCase):
 
 
     def test_serialize_simple(self):
     def test_serialize_simple(self):
-        x = make_object(Tag,
-                        tagger=b'Jelmer Vernooij <jelmer@samba.org>',
-                        name=b'0.1',
-                        message=b'Tag 0.1',
-                        object=(Blob, b'd80c186a03f423a81b39df39dc87fd269736ca86'),
-                        tag_time=423423423,
-                        tag_timezone=0)
+        x = make_object(
+            Tag,
+            tagger=b'Jelmer Vernooij <jelmer@samba.org>',
+            name=b'0.1',
+            message=b'Tag 0.1',
+            object=(Blob, b'd80c186a03f423a81b39df39dc87fd269736ca86'),
+            tag_time=423423423,
+            tag_timezone=0)
         self.assertEqual((b'object d80c186a03f423a81b39df39dc87fd269736ca86\n'
         self.assertEqual((b'object d80c186a03f423a81b39df39dc87fd269736ca86\n'
                           b'type blob\n'
                           b'type blob\n'
                           b'tag 0.1\n'
                           b'tag 0.1\n'
@@ -880,13 +881,14 @@ class TagSerializeTests(TestCase):
                           b'Tag 0.1'), x.as_raw_string())
                           b'Tag 0.1'), x.as_raw_string())
 
 
     def test_serialize_none_message(self):
     def test_serialize_none_message(self):
-        x = make_object(Tag,
-                        tagger=b'Jelmer Vernooij <jelmer@samba.org>',
-                        name=b'0.1',
-                        message=None,
-                        object=(Blob, b'd80c186a03f423a81b39df39dc87fd269736ca86'),
-                        tag_time=423423423,
-                        tag_timezone=0)
+        x = make_object(
+            Tag,
+            tagger=b'Jelmer Vernooij <jelmer@samba.org>',
+            name=b'0.1',
+            message=None,
+            object=(Blob, b'd80c186a03f423a81b39df39dc87fd269736ca86'),
+            tag_time=423423423,
+            tag_timezone=0)
         self.assertEqual((b'object d80c186a03f423a81b39df39dc87fd269736ca86\n'
         self.assertEqual((b'object d80c186a03f423a81b39df39dc87fd269736ca86\n'
                           b'type blob\n'
                           b'type blob\n'
                           b'tag 0.1\n'
                           b'tag 0.1\n'
@@ -943,7 +945,7 @@ class TagParseTests(ShaFileCheckTests):
                          object_sha)
                          object_sha)
         self.assertEqual(Commit, object_type)
         self.assertEqual(Commit, object_type)
         self.assertEqual(datetime.datetime.utcfromtimestamp(x.tag_time),
         self.assertEqual(datetime.datetime.utcfromtimestamp(x.tag_time),
-                          datetime.datetime(2007, 7, 1, 19, 54, 34))
+                         datetime.datetime(2007, 7, 1, 19, 54, 34))
         self.assertEqual(-25200, x.tag_timezone)
         self.assertEqual(-25200, x.tag_timezone)
 
 
     def test_parse_no_tagger(self):
     def test_parse_no_tagger(self):
@@ -960,7 +962,7 @@ class TagParseTests(ShaFileCheckTests):
         self.assertEqual(
         self.assertEqual(
             b'Linus Torvalds <torvalds@woody.linux-foundation.org>', x.tagger)
             b'Linus Torvalds <torvalds@woody.linux-foundation.org>', x.tagger)
         self.assertEqual(datetime.datetime.utcfromtimestamp(x.tag_time),
         self.assertEqual(datetime.datetime.utcfromtimestamp(x.tag_time),
-                          datetime.datetime(2007, 7, 1, 19, 54, 34))
+                         datetime.datetime(2007, 7, 1, 19, 54, 34))
         self.assertEqual(-25200, x.tag_timezone)
         self.assertEqual(-25200, x.tag_timezone)
         self.assertEqual(b'v2.6.22-rc7', x.name)
         self.assertEqual(b'v2.6.22-rc7', x.name)
 
 
@@ -1134,12 +1136,12 @@ class ShaFileCopyTests(TestCase):
 
 
 
 
 class ShaFileSerializeTests(TestCase):
 class ShaFileSerializeTests(TestCase):
-    """
-    Test that `ShaFile` objects only gets serialized once if they haven't changed.
+    """`ShaFile` objects only gets serialized once if they haven't changed.
     """
     """
 
 
     @contextmanager
     @contextmanager
-    def assert_serialization_on_change(self, obj, needs_serialization_after_change=True):
+    def assert_serialization_on_change(
+            self, obj, needs_serialization_after_change=True):
         old_id = obj.id
         old_id = obj.id
         self.assertFalse(obj._needs_serialization)
         self.assertFalse(obj._needs_serialization)
 
 
@@ -1172,7 +1174,8 @@ class ShaFileSerializeTests(TestCase):
     def test_blob_serialize(self):
     def test_blob_serialize(self):
         blob = make_object(Blob, data=b'i am a blob')
         blob = make_object(Blob, data=b'i am a blob')
 
 
-        with self.assert_serialization_on_change(blob, needs_serialization_after_change=False):
+        with self.assert_serialization_on_change(
+                blob, needs_serialization_after_change=False):
             blob.data = b'i am another blob'
             blob.data = b'i am another blob'
 
 
     def test_tree_serialize(self):
     def test_tree_serialize(self):
@@ -1199,6 +1202,6 @@ class PrettyFormatTreeEntryTests(TestCase):
     def test_format(self):
     def test_format(self):
         self.assertEqual(
         self.assertEqual(
                 '40000 tree 40820c38cfb182ce6c8b261555410d8382a5918b\tfoo\n',
                 '40000 tree 40820c38cfb182ce6c8b261555410d8382a5918b\tfoo\n',
-                pretty_format_tree_entry(b"foo", 0o40000,
+                pretty_format_tree_entry(
+                    b"foo", 0o40000,
                     b"40820c38cfb182ce6c8b261555410d8382a5918b"))
                     b"40820c38cfb182ce6c8b261555410d8382a5918b"))
-

+ 18 - 12
dulwich/tests/test_patch.py

@@ -234,8 +234,9 @@ class DiffTests(TestCase):
 
 
     def test_blob_diff(self):
     def test_blob_diff(self):
         f = BytesIO()
         f = BytesIO()
-        write_blob_diff(f, (b"foo.txt", 0o644, Blob.from_string(b"old\nsame\n")),
-                           (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")))
+        write_blob_diff(
+            f, (b"foo.txt", 0o644, Blob.from_string(b"old\nsame\n")),
+            (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")))
         self.assertEqual([
         self.assertEqual([
             b"diff --git a/foo.txt b/bar.txt",
             b"diff --git a/foo.txt b/bar.txt",
             b"index 3b0f961..a116b51 644",
             b"index 3b0f961..a116b51 644",
@@ -249,8 +250,9 @@ class DiffTests(TestCase):
 
 
     def test_blob_add(self):
     def test_blob_add(self):
         f = BytesIO()
         f = BytesIO()
-        write_blob_diff(f, (None, None, None),
-                           (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")))
+        write_blob_diff(
+            f, (None, None, None),
+            (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")))
         self.assertEqual([
         self.assertEqual([
              b'diff --git /dev/null b/bar.txt',
              b'diff --git /dev/null b/bar.txt',
              b'new mode 644',
              b'new mode 644',
@@ -264,8 +266,9 @@ class DiffTests(TestCase):
 
 
     def test_blob_remove(self):
     def test_blob_remove(self):
         f = BytesIO()
         f = BytesIO()
-        write_blob_diff(f, (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")),
-                           (None, None, None))
+        write_blob_diff(
+            f, (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")),
+            (None, None, None))
         self.assertEqual([
         self.assertEqual([
             b'diff --git a/bar.txt /dev/null',
             b'diff --git a/bar.txt /dev/null',
             b'deleted mode 644',
             b'deleted mode 644',
@@ -326,10 +329,10 @@ class DiffTests(TestCase):
         store = MemoryObjectStore()
         store = MemoryObjectStore()
         tree1 = Tree()
         tree1 = Tree()
         tree1.add(b"asubmodule", S_IFGITLINK,
         tree1.add(b"asubmodule", S_IFGITLINK,
-            b"06d0bdd9e2e20377b3180e4986b14c8549b393e4")
+                  b"06d0bdd9e2e20377b3180e4986b14c8549b393e4")
         tree2 = Tree()
         tree2 = Tree()
         tree2.add(b"asubmodule", S_IFGITLINK,
         tree2.add(b"asubmodule", S_IFGITLINK,
-            b"cc975646af69f279396d4d5e1379ac6af80ee637")
+                  b"cc975646af69f279396d4d5e1379ac6af80ee637")
         store.add_objects([(o, None) for o in [tree1, tree2]])
         store.add_objects([(o, None) for o in [tree1, tree2]])
         write_tree_diff(f, store, tree1.id, tree2.id)
         write_tree_diff(f, store, tree1.id, tree2.id)
         self.assertEqual([
         self.assertEqual([
@@ -408,8 +411,9 @@ class DiffTests(TestCase):
             b"\x00\x00\x01\xd5\x00\x00\x00\x9f\x08\x03\x00\x00\x00\x98\xd3\xb3")
             b"\x00\x00\x01\xd5\x00\x00\x00\x9f\x08\x03\x00\x00\x00\x98\xd3\xb3")
         store = MemoryObjectStore()
         store = MemoryObjectStore()
         store.add_objects([(b1, None), (b2, None)])
         store.add_objects([(b1, None), (b2, None)])
-        write_object_diff(f, store, (b'foo.png', 0o644, b1.id),
-                                    (b'bar.png', 0o644, b2.id), diff_binary=True)
+        write_object_diff(
+            f, store, (b'foo.png', 0o644, b1.id),
+            (b'bar.png', 0o644, b2.id), diff_binary=True)
         self.assertEqual([
         self.assertEqual([
             b'diff --git a/foo.png b/bar.png',
             b'diff --git a/foo.png b/bar.png',
             b'index f73e47d..06364b7 644',
             b'index f73e47d..06364b7 644',
@@ -481,8 +485,10 @@ class DiffTests(TestCase):
         b1 = Blob.from_string(b"new\nsame\n")
         b1 = Blob.from_string(b"new\nsame\n")
         store = MemoryObjectStore()
         store = MemoryObjectStore()
         store.add_object(b1)
         store.add_object(b1)
-        write_object_diff(f, store, (b"bar.txt", 0o644, b1.id),
-            (b"bar.txt", 0o160000, b"06d0bdd9e2e20377b3180e4986b14c8549b393e4"))
+        write_object_diff(
+            f, store, (b"bar.txt", 0o644, b1.id),
+            (b"bar.txt", 0o160000,
+                b"06d0bdd9e2e20377b3180e4986b14c8549b393e4"))
         self.assertEqual([
         self.assertEqual([
             b'diff --git a/bar.txt b/bar.txt',
             b'diff --git a/bar.txt b/bar.txt',
             b'old mode 644',
             b'old mode 644',

+ 121 - 93
dulwich/tests/test_repository.py

@@ -65,8 +65,10 @@ class CreateRepositoryTests(TestCase):
 
 
     def _check_repo_contents(self, repo, expect_bare):
     def _check_repo_contents(self, repo, expect_bare):
         self.assertEqual(expect_bare, repo.bare)
         self.assertEqual(expect_bare, repo.bare)
-        self.assertFileContentsEqual(b'Unnamed repository', repo, 'description')
-        self.assertFileContentsEqual(b'', repo, os.path.join('info', 'exclude'))
+        self.assertFileContentsEqual(
+            b'Unnamed repository', repo, 'description')
+        self.assertFileContentsEqual(
+            b'', repo, os.path.join('info', 'exclude'))
         self.assertFileContentsEqual(None, repo, 'nonexistent file')
         self.assertFileContentsEqual(None, repo, 'nonexistent file')
         barestr = b'bare = ' + str(expect_bare).lower().encode('ascii')
         barestr = b'bare = ' + str(expect_bare).lower().encode('ascii')
         with repo.get_named_file('config') as f:
         with repo.get_named_file('config') as f:
@@ -78,7 +80,6 @@ class CreateRepositoryTests(TestCase):
             config_text = f.read()
             config_text = f.read()
             self.assertTrue(barestr in config_text, "%r" % config_text)
             self.assertTrue(barestr in config_text, "%r" % config_text)
 
 
-
     def test_create_memory(self):
     def test_create_memory(self):
         repo = MemoryRepo.init_bare([], {})
         repo = MemoryRepo.init_bare([], {})
         self._check_repo_contents(repo, True)
         self._check_repo_contents(repo, True)
@@ -126,7 +127,7 @@ class RepositoryRootTests(TestCase):
         r = self.open_repo('a.git')
         r = self.open_repo('a.git')
         r[b"refs/tags/foo"] = b'a90fa2d900a17e99b433217e988c4eb4a2e9a097'
         r[b"refs/tags/foo"] = b'a90fa2d900a17e99b433217e988c4eb4a2e9a097'
         self.assertEqual(b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
         self.assertEqual(b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
-                          r[b"refs/tags/foo"].id)
+                         r[b"refs/tags/foo"].id)
 
 
     def test_getitem_unicode(self):
     def test_getitem_unicode(self):
         r = self.open_repo('a.git')
         r = self.open_repo('a.git')
@@ -168,7 +169,8 @@ class RepositoryRootTests(TestCase):
             b'HEAD': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
             b'HEAD': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
             b'refs/heads/master': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
             b'refs/heads/master': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
             b'refs/tags/mytag': b'28237f4dc30d0d462658d6b937b08a0f0b6ef55a',
             b'refs/tags/mytag': b'28237f4dc30d0d462658d6b937b08a0f0b6ef55a',
-            b'refs/tags/mytag-packed': b'b0931cadc54336e78a1d980420e3268903b57a50',
+            b'refs/tags/mytag-packed':
+                b'b0931cadc54336e78a1d980420e3268903b57a50',
             }, r.get_refs())
             }, r.get_refs())
 
 
     def test_head(self):
     def test_head(self):
@@ -234,13 +236,16 @@ class RepositoryRootTests(TestCase):
     def test_get_walker(self):
     def test_get_walker(self):
         r = self.open_repo('a.git')
         r = self.open_repo('a.git')
         # include defaults to [r.head()]
         # include defaults to [r.head()]
-        self.assertEqual([e.commit.id for e in r.get_walker()],
-                         [r.head(), b'2a72d929692c41d8554c07f6301757ba18a65d91'])
         self.assertEqual(
         self.assertEqual(
-            [e.commit.id for e in r.get_walker([b'2a72d929692c41d8554c07f6301757ba18a65d91'])],
+            [e.commit.id for e in r.get_walker()],
+            [r.head(), b'2a72d929692c41d8554c07f6301757ba18a65d91'])
+        self.assertEqual(
+            [e.commit.id for e in
+                r.get_walker([b'2a72d929692c41d8554c07f6301757ba18a65d91'])],
             [b'2a72d929692c41d8554c07f6301757ba18a65d91'])
             [b'2a72d929692c41d8554c07f6301757ba18a65d91'])
         self.assertEqual(
         self.assertEqual(
-            [e.commit.id for e in r.get_walker(b'2a72d929692c41d8554c07f6301757ba18a65d91')],
+            [e.commit.id for e in
+                r.get_walker(b'2a72d929692c41d8554c07f6301757ba18a65d91')],
             [b'2a72d929692c41d8554c07f6301757ba18a65d91'])
             [b'2a72d929692c41d8554c07f6301757ba18a65d91'])
 
 
     def test_clone(self):
     def test_clone(self):
@@ -252,8 +257,10 @@ class RepositoryRootTests(TestCase):
                 b'HEAD': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
                 b'HEAD': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
                 b'refs/remotes/origin/master':
                 b'refs/remotes/origin/master':
                     b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
                     b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
-                b'refs/heads/master': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
-                b'refs/tags/mytag': b'28237f4dc30d0d462658d6b937b08a0f0b6ef55a',
+                b'refs/heads/master':
+                    b'a90fa2d900a17e99b433217e988c4eb4a2e9a097',
+                b'refs/tags/mytag':
+                    b'28237f4dc30d0d462658d6b937b08a0f0b6ef55a',
                 b'refs/tags/mytag-packed':
                 b'refs/tags/mytag-packed':
                     b'b0931cadc54336e78a1d980420e3268903b57a50',
                     b'b0931cadc54336e78a1d980420e3268903b57a50',
                 }, t.refs.as_dict())
                 }, t.refs.as_dict())
@@ -264,7 +271,8 @@ class RepositoryRootTests(TestCase):
             encoded_path = r.path
             encoded_path = r.path
             if not isinstance(encoded_path, bytes):
             if not isinstance(encoded_path, bytes):
                 encoded_path = encoded_path.encode(sys.getfilesystemencoding())
                 encoded_path = encoded_path.encode(sys.getfilesystemencoding())
-            self.assertEqual(encoded_path, c.get((b'remote', b'origin'), b'url'))
+            self.assertEqual(encoded_path,
+                             c.get((b'remote', b'origin'), b'url'))
             self.assertEqual(
             self.assertEqual(
                 b'+refs/heads/*:refs/remotes/origin/*',
                 b'+refs/heads/*:refs/remotes/origin/*',
                 c.get((b'remote', b'origin'), b'fetch'))
                 c.get((b'remote', b'origin'), b'fetch'))
@@ -339,12 +347,13 @@ class RepositoryRootTests(TestCase):
         rel = os.path.relpath(os.path.join(repo_dir, 'submodule'), temp_dir)
         rel = os.path.relpath(os.path.join(repo_dir, 'submodule'), temp_dir)
         os.symlink(os.path.join(rel, 'dotgit'), os.path.join(temp_dir, '.git'))
         os.symlink(os.path.join(rel, 'dotgit'), os.path.join(temp_dir, '.git'))
         with Repo(temp_dir) as r:
         with Repo(temp_dir) as r:
-            self.assertEqual(r.head(), b'a90fa2d900a17e99b433217e988c4eb4a2e9a097')
+            self.assertEqual(r.head(),
+                             b'a90fa2d900a17e99b433217e988c4eb4a2e9a097')
 
 
     def test_common_revisions(self):
     def test_common_revisions(self):
         """
         """
-        This test demonstrates that ``find_common_revisions()`` actually returns
-        common heads, not revisions; dulwich already uses
+        This test demonstrates that ``find_common_revisions()`` actually
+        returns common heads, not revisions; dulwich already uses
         ``find_common_revisions()`` in such a manner (see
         ``find_common_revisions()`` in such a manner (see
         ``Repo.fetch_objects()``).
         ``Repo.fetch_objects()``).
         """
         """
@@ -357,17 +366,17 @@ class RepositoryRootTests(TestCase):
         # Re-create each-side of the merge in simple_merge.git.
         # Re-create each-side of the merge in simple_merge.git.
         #
         #
         # Since the trees and blobs are missing, the repository created is
         # Since the trees and blobs are missing, the repository created is
-        # corrupted, but we're only checking for commits for the purpose of this
-        # test, so it's immaterial.
+        # corrupted, but we're only checking for commits for the purpose of
+        # this test, so it's immaterial.
         r1_dir = self.mkdtemp()
         r1_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, r1_dir)
         self.addCleanup(shutil.rmtree, r1_dir)
-        r1_commits = [b'ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd', # HEAD
+        r1_commits = [b'ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd',  # HEAD
                       b'60dacdc733de308bb77bb76ce0fb0f9b44c9769e',
                       b'60dacdc733de308bb77bb76ce0fb0f9b44c9769e',
                       b'0d89f20333fbb1d2f3a94da77f4981373d8f4310']
                       b'0d89f20333fbb1d2f3a94da77f4981373d8f4310']
 
 
         r2_dir = self.mkdtemp()
         r2_dir = self.mkdtemp()
         self.addCleanup(shutil.rmtree, r2_dir)
         self.addCleanup(shutil.rmtree, r2_dir)
-        r2_commits = [b'4cffe90e0a41ad3f5190079d7c8f036bde29cbe6', # HEAD
+        r2_commits = [b'4cffe90e0a41ad3f5190079d7c8f036bde29cbe6',  # HEAD
                       b'60dacdc733de308bb77bb76ce0fb0f9b44c9769e',
                       b'60dacdc733de308bb77bb76ce0fb0f9b44c9769e',
                       b'0d89f20333fbb1d2f3a94da77f4981373d8f4310']
                       b'0d89f20333fbb1d2f3a94da77f4981373d8f4310']
 
 
@@ -536,15 +545,22 @@ exit 1
                     w.args == expected_warning.args):
                     w.args == expected_warning.args):
                 break
                 break
         else:
         else:
-            raise AssertionError('Expected warning %r not in %r' %
-                    (expected_warning, warnings_list))
+            raise AssertionError(
+                'Expected warning %r not in %r' %
+                (expected_warning, warnings_list))
         self.assertEqual([commit_sha], r[commit_sha2].parents)
         self.assertEqual([commit_sha], r[commit_sha2].parents)
 
 
     def test_as_dict(self):
     def test_as_dict(self):
         def check(repo):
         def check(repo):
-            self.assertEqual(repo.refs.subkeys(b'refs/tags'), repo.refs.subkeys(b'refs/tags/'))
-            self.assertEqual(repo.refs.as_dict(b'refs/tags'), repo.refs.as_dict(b'refs/tags/'))
-            self.assertEqual(repo.refs.as_dict(b'refs/heads'), repo.refs.as_dict(b'refs/heads/'))
+            self.assertEqual(
+                repo.refs.subkeys(b'refs/tags'),
+                repo.refs.subkeys(b'refs/tags/'))
+            self.assertEqual(
+                repo.refs.as_dict(b'refs/tags'),
+                repo.refs.as_dict(b'refs/tags/'))
+            self.assertEqual(
+                repo.refs.as_dict(b'refs/heads'),
+                repo.refs.as_dict(b'refs/heads/'))
 
 
         bare = self.open_repo('a.git')
         bare = self.open_repo('a.git')
         tmp_dir = self.mkdtemp()
         tmp_dir = self.mkdtemp()
@@ -627,11 +643,12 @@ class BuildRepoRootTests(TestCase):
         with open(os.path.join(r.path, 'a'), 'wb') as f:
         with open(os.path.join(r.path, 'a'), 'wb') as f:
             f.write(b'new contents')
             f.write(b'new contents')
         r.stage(['a'])
         r.stage(['a'])
-        commit_sha = r.do_commit(b'modified a',
-                                 committer=b'Test Committer <test@nodomain.com>',
-                                 author=b'Test Author <test@nodomain.com>',
-                                 commit_timestamp=12395, commit_timezone=0,
-                                 author_timestamp=12395, author_timezone=0)
+        commit_sha = r.do_commit(
+            b'modified a',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0)
         self.assertEqual([self._root_commit], r[commit_sha].parents)
         self.assertEqual([self._root_commit], r[commit_sha].parents)
         a_mode, a_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b'a')
         a_mode, a_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b'a')
         self.assertEqual(stat.S_IFREG | 0o644, a_mode)
         self.assertEqual(stat.S_IFREG | 0o644, a_mode)
@@ -642,11 +659,12 @@ class BuildRepoRootTests(TestCase):
         r = self._repo
         r = self._repo
         os.symlink('a', os.path.join(r.path, 'b'))
         os.symlink('a', os.path.join(r.path, 'b'))
         r.stage(['a', 'b'])
         r.stage(['a', 'b'])
-        commit_sha = r.do_commit(b'Symlink b',
-                                 committer=b'Test Committer <test@nodomain.com>',
-                                 author=b'Test Author <test@nodomain.com>',
-                                 commit_timestamp=12395, commit_timezone=0,
-                                 author_timestamp=12395, author_timezone=0)
+        commit_sha = r.do_commit(
+            b'Symlink b',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0)
         self.assertEqual([self._root_commit], r[commit_sha].parents)
         self.assertEqual([self._root_commit], r[commit_sha].parents)
         b_mode, b_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b'b')
         b_mode, b_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b'b')
         self.assertTrue(stat.S_ISLNK(b_mode))
         self.assertTrue(stat.S_ISLNK(b_mode))
@@ -656,11 +674,12 @@ class BuildRepoRootTests(TestCase):
         r = self._repo
         r = self._repo
         os.remove(os.path.join(r.path, 'a'))
         os.remove(os.path.join(r.path, 'a'))
         r.stage(['a'])
         r.stage(['a'])
-        commit_sha = r.do_commit(b'deleted a',
-                                 committer=b'Test Committer <test@nodomain.com>',
-                                 author=b'Test Author <test@nodomain.com>',
-                                 commit_timestamp=12395, commit_timezone=0,
-                                 author_timestamp=12395, author_timezone=0)
+        commit_sha = r.do_commit(
+            b'deleted a',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0)
         self.assertEqual([self._root_commit], r[commit_sha].parents)
         self.assertEqual([self._root_commit], r[commit_sha].parents)
         self.assertEqual([], list(r.open_index()))
         self.assertEqual([], list(r.open_index()))
         tree = r[r[commit_sha].tree]
         tree = r[r[commit_sha].tree]
@@ -669,22 +688,24 @@ class BuildRepoRootTests(TestCase):
     def test_commit_follows(self):
     def test_commit_follows(self):
         r = self._repo
         r = self._repo
         r.refs.set_symbolic_ref(b'HEAD', b'refs/heads/bla')
         r.refs.set_symbolic_ref(b'HEAD', b'refs/heads/bla')
-        commit_sha = r.do_commit(b'commit with strange character',
-             committer=b'Test Committer <test@nodomain.com>',
-             author=b'Test Author <test@nodomain.com>',
-             commit_timestamp=12395, commit_timezone=0,
-             author_timestamp=12395, author_timezone=0,
-             ref=b'HEAD')
+        commit_sha = r.do_commit(
+            b'commit with strange character',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0,
+            ref=b'HEAD')
         self.assertEqual(commit_sha, r[b'refs/heads/bla'].id)
         self.assertEqual(commit_sha, r[b'refs/heads/bla'].id)
 
 
     def test_commit_encoding(self):
     def test_commit_encoding(self):
         r = self._repo
         r = self._repo
-        commit_sha = r.do_commit(b'commit with strange character \xee',
-             committer=b'Test Committer <test@nodomain.com>',
-             author=b'Test Author <test@nodomain.com>',
-             commit_timestamp=12395, commit_timezone=0,
-             author_timestamp=12395, author_timezone=0,
-             encoding=b"iso8859-1")
+        commit_sha = r.do_commit(
+            b'commit with strange character \xee',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0,
+            encoding=b"iso8859-1")
         self.assertEqual(b"iso8859-1", r[commit_sha].encoding)
         self.assertEqual(b"iso8859-1", r[commit_sha].encoding)
 
 
     def test_commit_config_identity(self):
     def test_commit_config_identity(self):
@@ -744,12 +765,13 @@ class BuildRepoRootTests(TestCase):
     def test_commit_branch(self):
     def test_commit_branch(self):
         r = self._repo
         r = self._repo
 
 
-        commit_sha = r.do_commit(b'commit to branch',
-             committer=b'Test Committer <test@nodomain.com>',
-             author=b'Test Author <test@nodomain.com>',
-             commit_timestamp=12395, commit_timezone=0,
-             author_timestamp=12395, author_timezone=0,
-             ref=b"refs/heads/new_branch")
+        commit_sha = r.do_commit(
+            b'commit to branch',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0,
+            ref=b"refs/heads/new_branch")
         self.assertEqual(self._root_commit, r[b"HEAD"].id)
         self.assertEqual(self._root_commit, r[b"HEAD"].id)
         self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id)
         self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id)
         self.assertEqual([], r[commit_sha].parents)
         self.assertEqual([], r[commit_sha].parents)
@@ -757,30 +779,33 @@ class BuildRepoRootTests(TestCase):
 
 
         new_branch_head = commit_sha
         new_branch_head = commit_sha
 
 
-        commit_sha = r.do_commit(b'commit to branch 2',
-             committer=b'Test Committer <test@nodomain.com>',
-             author=b'Test Author <test@nodomain.com>',
-             commit_timestamp=12395, commit_timezone=0,
-             author_timestamp=12395, author_timezone=0,
-             ref=b"refs/heads/new_branch")
+        commit_sha = r.do_commit(
+            b'commit to branch 2',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0,
+            ref=b"refs/heads/new_branch")
         self.assertEqual(self._root_commit, r[b"HEAD"].id)
         self.assertEqual(self._root_commit, r[b"HEAD"].id)
         self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id)
         self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id)
         self.assertEqual([new_branch_head], r[commit_sha].parents)
         self.assertEqual([new_branch_head], r[commit_sha].parents)
 
 
     def test_commit_merge_heads(self):
     def test_commit_merge_heads(self):
         r = self._repo
         r = self._repo
-        merge_1 = r.do_commit(b'commit to branch 2',
-             committer=b'Test Committer <test@nodomain.com>',
-             author=b'Test Author <test@nodomain.com>',
-             commit_timestamp=12395, commit_timezone=0,
-             author_timestamp=12395, author_timezone=0,
-             ref=b"refs/heads/new_branch")
-        commit_sha = r.do_commit(b'commit with merge',
-             committer=b'Test Committer <test@nodomain.com>',
-             author=b'Test Author <test@nodomain.com>',
-             commit_timestamp=12395, commit_timezone=0,
-             author_timestamp=12395, author_timezone=0,
-             merge_heads=[merge_1])
+        merge_1 = r.do_commit(
+            b'commit to branch 2',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0,
+            ref=b"refs/heads/new_branch")
+        commit_sha = r.do_commit(
+            b'commit with merge',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0,
+            merge_heads=[merge_1])
         self.assertEqual(
         self.assertEqual(
             [self._root_commit, merge_1],
             [self._root_commit, merge_1],
             r[commit_sha].parents)
             r[commit_sha].parents)
@@ -790,12 +815,13 @@ class BuildRepoRootTests(TestCase):
 
 
         old_shas = set(r.object_store)
         old_shas = set(r.object_store)
         old_refs = r.get_refs()
         old_refs = r.get_refs()
-        commit_sha = r.do_commit(b'commit with no ref',
-             committer=b'Test Committer <test@nodomain.com>',
-             author=b'Test Author <test@nodomain.com>',
-             commit_timestamp=12395, commit_timezone=0,
-             author_timestamp=12395, author_timezone=0,
-             ref=None)
+        commit_sha = r.do_commit(
+            b'commit with no ref',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0,
+            ref=None)
         new_shas = set(r.object_store) - old_shas
         new_shas = set(r.object_store) - old_shas
 
 
         # New sha is added, but no new refs
         # New sha is added, but no new refs
@@ -810,12 +836,13 @@ class BuildRepoRootTests(TestCase):
 
 
         old_shas = set(r.object_store)
         old_shas = set(r.object_store)
         old_refs = r.get_refs()
         old_refs = r.get_refs()
-        commit_sha = r.do_commit(b'commit with no ref',
-             committer=b'Test Committer <test@nodomain.com>',
-             author=b'Test Author <test@nodomain.com>',
-             commit_timestamp=12395, commit_timezone=0,
-             author_timestamp=12395, author_timezone=0,
-             ref=None, merge_heads=[self._root_commit])
+        commit_sha = r.do_commit(
+            b'commit with no ref',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0,
+            ref=None, merge_heads=[self._root_commit])
         new_shas = set(r.object_store) - old_shas
         new_shas = set(r.object_store) - old_shas
 
 
         # New sha is added, but no new refs
         # New sha is added, but no new refs
@@ -850,12 +877,13 @@ class BuildRepoRootTests(TestCase):
             self.addCleanup(os.remove, full_path)
             self.addCleanup(os.remove, full_path)
 
 
         r.stage(names)
         r.stage(names)
-        commit_sha = r.do_commit(b'Files with different encodings',
-             committer=b'Test Committer <test@nodomain.com>',
-             author=b'Test Author <test@nodomain.com>',
-             commit_timestamp=12395, commit_timezone=0,
-             author_timestamp=12395, author_timezone=0,
-             ref=None, merge_heads=[self._root_commit])
+        commit_sha = r.do_commit(
+            b'Files with different encodings',
+            committer=b'Test Committer <test@nodomain.com>',
+            author=b'Test Author <test@nodomain.com>',
+            commit_timestamp=12395, commit_timezone=0,
+            author_timestamp=12395, author_timezone=0,
+            ref=None, merge_heads=[self._root_commit])
 
 
         for name, encoding in zip(names, encodings):
         for name, encoding in zip(names, encodings):
             mode, id = tree_lookup_path(r.get_object, r[commit_sha].tree, name)
             mode, id = tree_lookup_path(r.get_object, r[commit_sha].tree, name)

+ 16 - 13
dulwich/tests/test_web.py

@@ -160,7 +160,7 @@ class DumbHandlersTestCase(WebTestCase):
         xs = b'x' * bufsize
         xs = b'x' * bufsize
         f = BytesIO(2 * xs)
         f = BytesIO(2 * xs)
         self.assertEqual([xs, xs],
         self.assertEqual([xs, xs],
-                          list(send_file(self._req, f, 'some/thing')))
+                         list(send_file(self._req, f, 'some/thing')))
         self.assertEqual(HTTP_OK, self._status)
         self.assertEqual(HTTP_OK, self._status)
         self.assertContentTypeEquals('some/thing')
         self.assertContentTypeEquals('some/thing')
         self.assertTrue(f.closed)
         self.assertTrue(f.closed)
@@ -229,7 +229,8 @@ class DumbHandlersTestCase(WebTestCase):
         self.assertEqual(HTTP_ERROR, self._status)
         self.assertEqual(HTTP_ERROR, self._status)
 
 
     def test_get_pack_file(self):
     def test_get_pack_file(self):
-        pack_name = os.path.join('objects', 'pack', 'pack-%s.pack' % ('1' * 40))
+        pack_name = os.path.join(
+            'objects', 'pack', 'pack-%s.pack' % ('1' * 40))
         backend = _test_backend([], named_files={pack_name: b'pack contents'})
         backend = _test_backend([], named_files={pack_name: b'pack contents'})
         mat = re.search('.*', pack_name)
         mat = re.search('.*', pack_name)
         output = b''.join(get_pack_file(self._req, backend, mat))
         output = b''.join(get_pack_file(self._req, backend, mat))
@@ -268,10 +269,10 @@ class DumbHandlersTestCase(WebTestCase):
 
 
         mat = re.search('.*', '//info/refs')
         mat = re.search('.*', '//info/refs')
         self.assertEqual([blob1.id + b'\trefs/heads/master\n',
         self.assertEqual([blob1.id + b'\trefs/heads/master\n',
-                           blob3.id + b'\trefs/tags/blob-tag\n',
-                           tag1.id + b'\trefs/tags/tag-tag\n',
-                           blob2.id + b'\trefs/tags/tag-tag^{}\n'],
-                          list(get_info_refs(self._req, backend, mat)))
+                          blob3.id + b'\trefs/tags/blob-tag\n',
+                          tag1.id + b'\trefs/tags/tag-tag\n',
+                          blob2.id + b'\trefs/tags/tag-tag^{}\n'],
+                         list(get_info_refs(self._req, backend, mat)))
         self.assertEqual(HTTP_OK, self._status)
         self.assertEqual(HTTP_OK, self._status)
         self.assertContentTypeEquals('text/plain')
         self.assertContentTypeEquals('text/plain')
         self.assertFalse(self._req.cached)
         self.assertFalse(self._req.cached)
@@ -300,7 +301,8 @@ class DumbHandlersTestCase(WebTestCase):
         mat = re.search('.*', '//info/packs')
         mat = re.search('.*', '//info/packs')
         output = b''.join(get_info_packs(self._req, backend, mat))
         output = b''.join(get_info_packs(self._req, backend, mat))
         expected = b''.join(
         expected = b''.join(
-            [(b'P pack-' + s + b'.pack\n') for s in [b'1' * 40, b'2' * 40, b'3' * 40]])
+            [(b'P pack-' + s + b'.pack\n')
+             for s in [b'1' * 40, b'2' * 40, b'3' * 40]])
         self.assertEqual(expected, output)
         self.assertEqual(expected, output)
         self.assertEqual(HTTP_OK, self._status)
         self.assertEqual(HTTP_OK, self._status)
         self.assertContentTypeEquals('text/plain')
         self.assertContentTypeEquals('text/plain')
@@ -374,9 +376,9 @@ class SmartHandlersTestCase(WebTestCase):
         handler_output = b''.join(get_info_refs(self._req, b'backend', mat))
         handler_output = b''.join(get_info_refs(self._req, b'backend', mat))
         write_output = self._output.getvalue()
         write_output = self._output.getvalue()
         self.assertEqual((b'001e# service=git-upload-pack\n'
         self.assertEqual((b'001e# service=git-upload-pack\n'
-                           b'0000'
-                           # input is ignored by the handler
-                           b'handled input: '), write_output)
+                          b'0000'
+                          # input is ignored by the handler
+                          b'handled input: '), write_output)
         # Ensure all output was written via the write callback.
         # Ensure all output was written via the write callback.
         self.assertEqual(b'', handler_output)
         self.assertEqual(b'', handler_output)
         self.assertTrue(self._handler.advertise_refs)
         self.assertTrue(self._handler.advertise_refs)
@@ -412,7 +414,7 @@ class HTTPGitRequestTestCase(WebTestCase):
         self.assertEqual(message.encode('ascii'), self._req.not_found(message))
         self.assertEqual(message.encode('ascii'), self._req.not_found(message))
         self.assertEqual(HTTP_NOT_FOUND, self._status)
         self.assertEqual(HTTP_NOT_FOUND, self._status)
         self.assertEqual(set([('Content-Type', 'text/plain')]),
         self.assertEqual(set([('Content-Type', 'text/plain')]),
-                          set(self._headers))
+                         set(self._headers))
 
 
     def test_forbidden(self):
     def test_forbidden(self):
         self._req.cache_forever()  # cache headers should be discarded
         self._req.cache_forever()  # cache headers should be discarded
@@ -420,7 +422,7 @@ class HTTPGitRequestTestCase(WebTestCase):
         self.assertEqual(message.encode('ascii'), self._req.forbidden(message))
         self.assertEqual(message.encode('ascii'), self._req.forbidden(message))
         self.assertEqual(HTTP_FORBIDDEN, self._status)
         self.assertEqual(HTTP_FORBIDDEN, self._status)
         self.assertEqual(set([('Content-Type', 'text/plain')]),
         self.assertEqual(set([('Content-Type', 'text/plain')]),
-                          set(self._headers))
+                         set(self._headers))
 
 
     def test_respond_ok(self):
     def test_respond_ok(self):
         self._req.respond()
         self._req.respond()
@@ -526,7 +528,8 @@ class GunzipTestCase(HTTPGitApplicationTestCase):
         require '.seek()'. See https://github.com/jelmer/dulwich/issues/140.)
         require '.seek()'. See https://github.com/jelmer/dulwich/issues/140.)
         """
         """
         zstream, zlength = self._get_zstream(self.example_text)
         zstream, zlength = self._get_zstream(self.example_text)
-        self._test_call(self.example_text,
+        self._test_call(
+            self.example_text,
             MinimalistWSGIInputStream(zstream.read()), zlength)
             MinimalistWSGIInputStream(zstream.read()), zlength)
 
 
     def test_call_no_working_seek(self):
     def test_call_no_working_seek(self):

+ 12 - 7
dulwich/web.py

@@ -90,8 +90,8 @@ def url_prefix(mat):
 
 
     :param mat: A regex match object.
     :param mat: A regex match object.
     :returns: The URL prefix, defined as the text before the match in the
     :returns: The URL prefix, defined as the text before the match in the
-        original string. Normalized to start with one leading slash and end with
-        zero.
+        original string. Normalized to start with one leading slash and end
+        with zero.
     """
     """
     return '/' + mat.string[:mat.start()].strip('/')
     return '/' + mat.string[:mat.start()].strip('/')
 
 
@@ -182,11 +182,13 @@ def get_info_refs(req, backend, mat):
             yield req.forbidden('Unsupported service')
             yield req.forbidden('Unsupported service')
             return
             return
         req.nocache()
         req.nocache()
-        write = req.respond(HTTP_OK, 'application/x-%s-advertisement' % service)
+        write = req.respond(
+            HTTP_OK, 'application/x-%s-advertisement' % service)
         proto = ReceivableProtocol(BytesIO().read, write)
         proto = ReceivableProtocol(BytesIO().read, write)
         handler = handler_cls(backend, [url_prefix(mat)], proto,
         handler = handler_cls(backend, [url_prefix(mat)], proto,
                               http_req=req, advertise_refs=True)
                               http_req=req, advertise_refs=True)
-        handler.proto.write_pkt_line(b'# service=' + service.encode('ascii') + b'\n')
+        handler.proto.write_pkt_line(
+            b'# service=' + service.encode('ascii') + b'\n')
         handler.proto.write_pkt_line(None)
         handler.proto.write_pkt_line(None)
         handler.handle()
         handler.handle()
     else:
     else:
@@ -385,7 +387,8 @@ class GunzipFilter(object):
                 shutil.copyfileobj(environ['wsgi.input'], wsgi_input)
                 shutil.copyfileobj(environ['wsgi.input'], wsgi_input)
                 wsgi_input.seek(0)
                 wsgi_input.seek(0)
 
 
-            environ['wsgi.input'] = gzip.GzipFile(filename=None, fileobj=wsgi_input, mode='r')
+            environ['wsgi.input'] = gzip.GzipFile(
+                filename=None, fileobj=wsgi_input, mode='r')
             del environ['HTTP_CONTENT_ENCODING']
             del environ['HTTP_CONTENT_ENCODING']
             if 'CONTENT_LENGTH' in environ:
             if 'CONTENT_LENGTH' in environ:
                 del environ['CONTENT_LENGTH']
                 del environ['CONTENT_LENGTH']
@@ -456,7 +459,7 @@ class WSGIRequestHandlerLogger(WSGIRequestHandler):
         """Handle a single HTTP request"""
         """Handle a single HTTP request"""
 
 
         self.raw_requestline = self.rfile.readline()
         self.raw_requestline = self.rfile.readline()
-        if not self.parse_request(): # An error code has been sent, just exit
+        if not self.parse_request():  # An error code has been sent, just exit
             return
             return
 
 
         handler = ServerHandlerLogger(
         handler = ServerHandlerLogger(
@@ -470,7 +473,9 @@ class WSGIServerLogger(WSGIServer):
 
 
     def handle_error(self, request, client_address):
     def handle_error(self, request, client_address):
         """Handle an error. """
         """Handle an error. """
-        logger.exception('Exception happened during processing of request from %s' % str(client_address))
+        logger.exception(
+            'Exception happened during processing of request from %s' %
+            str(client_address))
 
 
 
 
 def main(argv=sys.argv):
 def main(argv=sys.argv):

+ 11 - 6
setup.py

@@ -8,16 +8,17 @@ try:
 except ImportError:
 except ImportError:
     from distutils.core import setup, Extension
     from distutils.core import setup, Extension
 from distutils.core import Distribution
 from distutils.core import Distribution
+import os
+import sys
 
 
 dulwich_version_string = '0.17.4'
 dulwich_version_string = '0.17.4'
 
 
 include_dirs = []
 include_dirs = []
 # Windows MSVC support
 # Windows MSVC support
-import os
-import sys
 if sys.platform == 'win32':
 if sys.platform == 'win32':
     include_dirs.append('dulwich')
     include_dirs.append('dulwich')
 
 
+
 class DulwichDistribution(Distribution):
 class DulwichDistribution(Distribution):
 
 
     def is_pure(self):
     def is_pure(self):
@@ -33,6 +34,7 @@ class DulwichDistribution(Distribution):
 
 
     pure = False
     pure = False
 
 
+
 if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
 if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
     # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
     # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
     # distutils.sysconfig
     # distutils.sysconfig
@@ -48,7 +50,9 @@ if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
             os.environ['ARCHFLAGS'] = ''
             os.environ['ARCHFLAGS'] = ''
 
 
 tests_require = ['fastimport']
 tests_require = ['fastimport']
-if not '__pypy__' in sys.modules and not sys.platform == 'win32':
+
+
+if '__pypy__' not in sys.modules and not sys.platform == 'win32':
     tests_require.extend([
     tests_require.extend([
         'gevent', 'geventhttpclient', 'mock', 'setuptools>=17.1'])
         'gevent', 'geventhttpclient', 'mock', 'setuptools>=17.1'])
 
 
@@ -81,10 +85,11 @@ setup(name='dulwich',
       All functionality is available in pure Python. Optional
       All functionality is available in pure Python. Optional
       C extensions can be built for improved performance.
       C extensions can be built for improved performance.
 
 
-      The project is named after the part of London that Mr. and Mrs. Git live in
-      in the particular Monty Python sketch.
+      The project is named after the part of London that Mr. and Mrs. Git live
+      in in the particular Monty Python sketch.
       """,
       """,
-      packages=['dulwich', 'dulwich.tests', 'dulwich.tests.compat', 'dulwich.contrib'],
+      packages=['dulwich', 'dulwich.tests', 'dulwich.tests.compat',
+                'dulwich.contrib'],
       package_data={'': ['../docs/tutorial/*.txt']},
       package_data={'': ['../docs/tutorial/*.txt']},
       scripts=['bin/dulwich', 'bin/dul-receive-pack', 'bin/dul-upload-pack'],
       scripts=['bin/dulwich', 'bin/dul-receive-pack', 'bin/dul-upload-pack'],
       classifiers=[
       classifiers=[