Kaynağa Gözat

Add pep8 make target and fix some pep8ness.

Jelmer Vernooij 11 yıl önce
ebeveyn
işleme
5a4e20d14b
7 değiştirilmiş dosya ile 125 ekleme ve 106 silme
  1. 4 0
      Makefile
  2. 52 43
      dulwich/client.py
  3. 2 2
      dulwich/config.py
  4. 22 17
      dulwich/diff_tree.py
  5. 11 14
      dulwich/object_store.py
  6. 34 27
      dulwich/objects.py
  7. 0 3
      dulwich/pack.py

+ 4 - 0
Makefile

@@ -1,5 +1,6 @@
 PYTHON = python
 PYFLAKES = pyflakes
+PEP8 = pep8
 SETUP = $(PYTHON) setup.py
 PYDOCTOR ?= pydoctor
 ifeq ($(shell $(PYTHON) -c "import sys; print(sys.version_info >= (2, 7))"),True)
@@ -51,3 +52,6 @@ clean::
 
 flakes:
 	$(PYFLAKES) dulwich
+
+pep8:
+	$(PEP8) dulwich

+ 52 - 43
dulwich/client.py

@@ -74,12 +74,13 @@ def _fileno_can_read(fileno):
     return len(select.select([fileno], [], [], 0)[0]) > 0
 
 COMMON_CAPABILITIES = ['ofs-delta', 'side-band-64k']
-FETCH_CAPABILITIES = ['thin-pack', 'multi_ack', 'multi_ack_detailed'] + COMMON_CAPABILITIES
+FETCH_CAPABILITIES = (['thin-pack', 'multi_ack', 'multi_ack_detailed'] +
+                      COMMON_CAPABILITIES)
 SEND_CAPABILITIES = ['report-status'] + COMMON_CAPABILITIES
 
 
 class ReportStatusParser(object):
-    """Handle status as reported by servers with the 'report-status' capability.
+    """Handle status as reported by servers with 'report-status' capability.
     """
 
     def __init__(self):
@@ -180,8 +181,8 @@ class GitClient(object):
         """Upload a pack to a remote repository.
 
         :param path: Repository path
-        :param generate_pack_contents: Function that can return a sequence of the
-            shas of the objects to upload.
+        :param generate_pack_contents: Function that can return a sequence of
+            the shas of the objects to upload.
         :param progress: Optional progress function
 
         :raises SendPackError: if server rejects the pack data
@@ -204,8 +205,9 @@ class GitClient(object):
             determine_wants = target.object_store.determine_wants_all
         f, commit, abort = target.object_store.add_pack()
         try:
-            result = self.fetch_pack(path, determine_wants,
-                    target.get_graph_walker(), f.write, progress)
+            result = self.fetch_pack(
+                path, determine_wants, target.get_graph_walker(), f.write,
+                progress)
         except:
             abort()
             raise
@@ -282,7 +284,8 @@ class GitClient(object):
                 if cb is not None:
                     cb(pkt)
 
-    def _handle_receive_pack_head(self, proto, capabilities, old_refs, new_refs):
+    def _handle_receive_pack_head(self, proto, capabilities, old_refs,
+                                  new_refs):
         """Handle the head of a 'git-receive-pack' request.
 
         :param proto: Protocol object to read from
@@ -301,12 +304,12 @@ class GitClient(object):
 
             if old_sha1 != new_sha1:
                 if sent_capabilities:
-                    proto.write_pkt_line('%s %s %s' % (old_sha1, new_sha1,
-                                                            refname))
+                    proto.write_pkt_line('%s %s %s' % (
+                        old_sha1, new_sha1, refname))
                 else:
                     proto.write_pkt_line(
-                      '%s %s %s\0%s' % (old_sha1, new_sha1, refname,
-                                        ' '.join(capabilities)))
+                        '%s %s %s\0%s' % (old_sha1, new_sha1, refname,
+                                          ' '.join(capabilities)))
                     sent_capabilities = True
             if new_sha1 not in have and new_sha1 != ZERO_SHA:
                 want.append(new_sha1)
@@ -323,7 +326,7 @@ class GitClient(object):
         if "side-band-64k" in capabilities:
             if progress is None:
                 progress = lambda x: None
-            channel_callbacks = { 2: progress }
+            channel_callbacks = {2: progress}
             if 'report-status' in capabilities:
                 channel_callbacks[1] = PktLineParser(
                     self._report_status_parser.handle_packet).parse
@@ -426,8 +429,8 @@ class TraditionalGitClient(GitClient):
         """Upload a pack to a remote repository.
 
         :param path: Repository path
-        :param generate_pack_contents: Function that can return a sequence of the
-            shas of the objects to upload.
+        :param generate_pack_contents: Function that can return a sequence of
+            the shas of the objects to upload.
         :param progress: Optional callback called with progress updates
 
         :raises SendPackError: if server rejects the pack data
@@ -477,8 +480,8 @@ class TraditionalGitClient(GitClient):
                 self._report_status_parser.check()
             return old_refs
 
-        (have, want) = self._handle_receive_pack_head(proto,
-            negotiated_capabilities, old_refs, new_refs)
+        (have, want) = self._handle_receive_pack_head(
+            proto, negotiated_capabilities, old_refs, new_refs)
         if not want and old_refs == new_refs:
             return new_refs
         objects = generate_pack_contents(have, want)
@@ -493,8 +496,8 @@ class TraditionalGitClient(GitClient):
                     set(old_refs.iteritems())) > 0:
                 entries, sha = write_pack_objects(proto.write_file(), objects)
 
-        self._handle_receive_pack_tail(proto, negotiated_capabilities,
-            progress)
+        self._handle_receive_pack_tail(
+            proto, negotiated_capabilities, progress)
         return new_refs
 
     def fetch_pack(self, path, determine_wants, graph_walker, pack_data,
@@ -508,7 +511,8 @@ class TraditionalGitClient(GitClient):
         """
         proto, can_read = self._connect('upload-pack', path)
         refs, server_capabilities = read_pkt_refs(proto)
-        negotiated_capabilities = self._fetch_capabilities & server_capabilities
+        negotiated_capabilities = (
+            self._fetch_capabilities & server_capabilities)
 
         if refs is None:
             proto.write_pkt_line(None)
@@ -524,10 +528,10 @@ class TraditionalGitClient(GitClient):
         if not wants:
             proto.write_pkt_line(None)
             return refs
-        self._handle_upload_pack_head(proto, negotiated_capabilities,
-            graph_walker, wants, can_read)
-        self._handle_upload_pack_tail(proto, negotiated_capabilities,
-            graph_walker, pack_data, progress)
+        self._handle_upload_pack_head(
+            proto, negotiated_capabilities, graph_walker, wants, can_read)
+        self._handle_upload_pack_tail(
+            proto, negotiated_capabilities, graph_walker, pack_data, progress)
         return refs
 
     def archive(self, path, committish, write_data, progress=None):
@@ -560,8 +564,8 @@ class TCPGitClient(TraditionalGitClient):
         TraditionalGitClient.__init__(self, *args, **kwargs)
 
     def _connect(self, cmd, path):
-        sockaddrs = socket.getaddrinfo(self._host, self._port,
-            socket.AF_UNSPEC, socket.SOCK_STREAM)
+        sockaddrs = socket.getaddrinfo(
+            self._host, self._port, socket.AF_UNSPEC, socket.SOCK_STREAM)
         s = None
         err = socket.error("no address found for %s" % self._host)
         for (family, socktype, proto, canonname, sockaddr) in sockaddrs:
@@ -652,8 +656,8 @@ class LocalGitClient(GitClient):
         """Upload a pack to a remote repository.
 
         :param path: Repository path
-        :param generate_pack_contents: Function that can return a sequence of the
-            shas of the objects to upload.
+        :param generate_pack_contents: Function that can return a sequence of
+            the shas of the objects to upload.
         :param progress: Optional progress function
 
         :raises SendPackError: if server rejects the pack data
@@ -674,7 +678,8 @@ class LocalGitClient(GitClient):
         """
         from dulwich.repo import Repo
         r = Repo(path)
-        return r.fetch(target, determine_wants=determine_wants, progress=progress)
+        return r.fetch(target, determine_wants=determine_wants,
+                       progress=progress)
 
     def fetch_pack(self, path, determine_wants, graph_walker, pack_data,
                    progress=None):
@@ -699,6 +704,7 @@ class LocalGitClient(GitClient):
 # What Git client to use for local access
 default_local_git_client_cls = SubprocessGitClient
 
+
 class SSHVendor(object):
     """A client side SSH implementation."""
 
@@ -764,7 +770,8 @@ else:
 
             # Start
             if self.should_monitor:
-                self.monitor_thread = threading.Thread(target=self.monitor_stderr)
+                self.monitor_thread = threading.Thread(
+                    target=self.monitor_stderr)
                 self.monitor_thread.start()
 
         def monitor_stderr(self):
@@ -830,7 +837,7 @@ else:
             self.ssh_kwargs = {}
 
         def run_command(self, host, command, username=None, port=None,
-                progress_stderr=None):
+                        progress_stderr=None):
 
             # Paramiko needs an explicit port. None is not valid
             if port is None:
@@ -849,8 +856,8 @@ else:
             # Run commands
             channel.exec_command(*command)
 
-            return ParamikoWrapper(client, channel,
-                    progress_stderr=progress_stderr)
+            return ParamikoWrapper(
+                client, channel, progress_stderr=progress_stderr)
 
 
 # Can be overridden by users
@@ -875,8 +882,9 @@ class SSHGitClient(TraditionalGitClient):
         con = get_ssh_vendor().run_command(
             self.host, ["%s '%s'" % (self._get_cmd_path(cmd), path)],
             port=self.port, username=self.username)
-        return (Protocol(con.read, con.write, report_activity=self._report_activity),
-                con.can_read)
+        return (Protocol(
+            con.read, con.write, report_activity=self._report_activity),
+            con.can_read)
 
 
 def default_user_agent_string():
@@ -890,7 +898,7 @@ def default_urllib2_opener(config):
         proxy_server = None
     handlers = []
     if proxy_server is not None:
-        handlers.append(urllib2.ProxyHandler({"http" : proxy_server}))
+        handlers.append(urllib2.ProxyHandler({"http": proxy_server}))
     opener = urllib2.build_opener(*handlers)
     if config is not None:
         user_agent = config.get("http", "useragent")
@@ -904,7 +912,8 @@ def default_urllib2_opener(config):
 
 class HttpGitClient(GitClient):
 
-    def __init__(self, base_url, dumb=None, opener=None, config=None, *args, **kwargs):
+    def __init__(self, base_url, dumb=None, opener=None, config=None, *args,
+                 **kwargs):
         self.base_url = base_url.rstrip("/") + "/"
         self.dumb = dumb
         if opener is None:
@@ -931,7 +940,7 @@ class HttpGitClient(GitClient):
         assert url[-1] == "/"
         url = urlparse.urljoin(url, "info/refs")
         headers = {}
-        if self.dumb != False:
+        if self.dumb is not False:
             url += "?service=%s" % service
             headers["Content-Type"] = "application/x-%s-request" % service
         resp = self._http_request(url, headers)
@@ -962,8 +971,8 @@ class HttpGitClient(GitClient):
         """Upload a pack to a remote repository.
 
         :param path: Repository path
-        :param generate_pack_contents: Function that can return a sequence of the
-            shas of the objects to upload.
+        :param generate_pack_contents: Function that can return a sequence of
+            the shas of the objects to upload.
         :param progress: Optional progress function
 
         :raises SendPackError: if server rejects the pack data
@@ -1022,11 +1031,11 @@ class HttpGitClient(GitClient):
             raise NotImplementedError(self.send_pack)
         req_data = BytesIO()
         req_proto = Protocol(None, req_data.write)
-        self._handle_upload_pack_head(req_proto,
-            negotiated_capabilities, graph_walker, wants,
+        self._handle_upload_pack_head(
+            req_proto, negotiated_capabilities, graph_walker, wants,
             lambda: False)
-        resp = self._smart_request("git-upload-pack", url,
-            data=req_data.getvalue())
+        resp = self._smart_request(
+            "git-upload-pack", url, data=req_data.getvalue())
         resp_proto = Protocol(resp.read, None)
         self._handle_upload_pack_tail(resp_proto, negotiated_capabilities,
             graph_walker, pack_data, progress)

+ 2 - 2
dulwich/config.py

@@ -170,7 +170,7 @@ def _parse_string(value):
     value = value.strip()
     ret = []
     block = []
-    in_quotes  = False
+    in_quotes = False
     for c in value:
         if c == "\"":
             in_quotes = (not in_quotes)
@@ -290,7 +290,7 @@ class ConfigFile(ConfigDict):
                 ret._values[section][setting] = value
                 if not continuation:
                     setting = None
-            else: # continuation line
+            else:  # continuation line
                 if line.endswith("\\\n"):
                     line = line[:-2]
                     continuation = True

+ 22 - 17
dulwich/diff_tree.py

@@ -131,7 +131,8 @@ def walk_trees(store, tree1_id, tree2_id, prune_identical=False):
         to None. If neither entry's path is None, they are guaranteed to
         match.
     """
-    # This could be fairly easily generalized to >2 trees if we find a use case.
+    # This could be fairly easily generalized to >2 trees if we find a use
+    # case.
     mode1 = tree1_id and stat.S_IFDIR or None
     mode2 = tree2_id and stat.S_IFDIR or None
     todo = [(TreeEntry('', mode1, tree1_id), TreeEntry('', mode2, tree2_id))]
@@ -171,8 +172,8 @@ def tree_changes(store, tree1_id, tree2_id, want_unchanged=False,
     if (rename_detector is not None and tree1_id is not None and
         tree2_id is not None):
         for change in rename_detector.changes_with_renames(
-          tree1_id, tree2_id, want_unchanged=want_unchanged):
-            yield change
+            tree1_id, tree2_id, want_unchanged=want_unchanged):
+                yield change
         return
 
     entries = walk_trees(store, tree1_id, tree2_id,
@@ -229,8 +230,8 @@ def tree_changes_for_merge(store, parent_tree_ids, tree_id,
         in the merge.
 
         Each list contains one element per parent, with the TreeChange for that
-        path relative to that parent. An element may be None if it never existed
-        in one parent and was deleted in two others.
+        path relative to that parent. An element may be None if it never
+        existed in one parent and was deleted in two others.
 
         A path is only included in the output if it is a conflict, i.e. its SHA
         in the merge tree is not found in any of the parents, or in the case of
@@ -265,7 +266,8 @@ def tree_changes_for_merge(store, parent_tree_ids, tree_id,
             yield changes
         elif None not in changes:
             # If no change was found relative to one parent, that means the SHA
-            # must have matched the SHA in that parent, so it is not a conflict.
+            # must have matched the SHA in that parent, so it is not a
+            # conflict.
             yield changes
 
 
@@ -329,11 +331,11 @@ def _similarity_score(obj1, obj2, block_cache=None):
 
     :param obj1: The first object to score.
     :param obj2: The second object to score.
-    :param block_cache: An optional dict of SHA to block counts to cache results
-        between calls.
-    :return: The similarity score between the two objects, defined as the number
-        of bytes in common between the two objects divided by the maximum size,
-        scaled to the range 0-100.
+    :param block_cache: An optional dict of SHA to block counts to cache
+        results between calls.
+    :return: The similarity score between the two objects, defined as the
+        number of bytes in common between the two objects divided by the
+        maximum size, scaled to the range 0-100.
     """
     if block_cache is None:
         block_cache = {}
@@ -372,8 +374,8 @@ class RenameDetector(object):
         :param store: An ObjectStore for looking up objects.
         :param rename_threshold: The threshold similarity score for considering
             an add/delete pair to be a rename/copy; see _similarity_score.
-        :param max_files: The maximum number of adds and deletes to consider, or
-            None for no limit. The detector is guaranteed to compare no more
+        :param max_files: The maximum number of adds and deletes to consider,
+            or None for no limit. The detector is guaranteed to compare no more
             than max_files ** 2 add/delete pairs. This limit is provided because
             rename detection can be quadratic in the project size. If the limit
             is exceeded, no content rename detection is attempted.
@@ -475,7 +477,8 @@ class RenameDetector(object):
             return CHANGE_MODIFY
         elif delete.type != CHANGE_DELETE:
             # If it's in deletes but not marked as a delete, it must have been
-            # added due to find_copies_harder, and needs to be marked as a copy.
+            # added due to find_copies_harder, and needs to be marked as a
+            # copy.
             return CHANGE_COPY
         return CHANGE_RENAME
 
@@ -509,7 +512,8 @@ class RenameDetector(object):
                     candidates.append((-score, rename))
 
     def _choose_content_renames(self):
-        # Sort scores from highest to lowest, but keep names in ascending order.
+        # Sort scores from highest to lowest, but keep names in ascending
+        # order.
         self._candidates.sort()
 
         delete_paths = set()
@@ -541,11 +545,12 @@ class RenameDetector(object):
             path = add.new.path
             delete = delete_map.get(path)
             if (delete is not None and
-              stat.S_IFMT(delete.old.mode) == stat.S_IFMT(add.new.mode)):
+                stat.S_IFMT(delete.old.mode) == stat.S_IFMT(add.new.mode)):
                 modifies[path] = TreeChange(CHANGE_MODIFY, delete.old, add.new)
 
         self._adds = [a for a in self._adds if a.new.path not in modifies]
-        self._deletes = [a for a in self._deletes if a.new.path not in modifies]
+        self._deletes = [a for a in self._deletes if a.new.path not in
+                         modifies]
         self._changes += modifies.values()
 
     def _sorted_changes(self):

+ 11 - 14
dulwich/object_store.py

@@ -924,10 +924,10 @@ def _collect_filetree_revs(obj_store, tree_sha, kset):
     """
     filetree = obj_store[tree_sha]
     for name, mode, sha in filetree.iteritems():
-       if not S_ISGITLINK(mode) and sha not in kset:
-           kset.add(sha)
-           if stat.S_ISDIR(mode):
-               _collect_filetree_revs(obj_store, sha, kset)
+        if not S_ISGITLINK(mode) and sha not in kset:
+            kset.add(sha)
+            if stat.S_ISDIR(mode):
+                _collect_filetree_revs(obj_store, sha, kset)
 
 
 def _split_commits_and_tags(obj_store, lst, ignore_unknown=False):
@@ -978,7 +978,7 @@ class MissingObjectFinder(object):
     """
 
     def __init__(self, object_store, haves, wants, progress=None,
-            get_tagged=None, get_parents=lambda commit: commit.parents):
+                 get_tagged=None, get_parents=lambda commit: commit.parents):
         self.object_store = object_store
         self._get_parents = get_parents
         # process Commits and Tags differently
@@ -986,22 +986,19 @@ class MissingObjectFinder(object):
         # and such SHAs would get filtered out by _split_commits_and_tags,
         # wants shall list only known SHAs, and otherwise
         # _split_commits_and_tags fails with KeyError
-        have_commits, have_tags = \
-                _split_commits_and_tags(object_store, haves, True)
-        want_commits, want_tags = \
-                _split_commits_and_tags(object_store, wants, False)
+        have_commits, have_tags = (
+            _split_commits_and_tags(object_store, haves, True))
+        want_commits, want_tags = (
+            _split_commits_and_tags(object_store, wants, False))
         # all_ancestors is a set of commits that shall not be sent
         # (complete repository up to 'haves')
         all_ancestors = object_store._collect_ancestors(
-                have_commits,
-                get_parents=self._get_parents)[0]
+            have_commits, get_parents=self._get_parents)[0]
         # all_missing - complete set of commits between haves and wants
         # common - commits from all_ancestors we hit into while
         # traversing parent hierarchy of wants
         missing_commits, common_commits = object_store._collect_ancestors(
-            want_commits,
-            all_ancestors,
-            get_parents=self._get_parents);
+            want_commits, all_ancestors, get_parents=self._get_parents)
         self.sha_done = set()
         # Now, fill sha_done with commits and revisions of
         # files and directories known to be both locally

+ 34 - 27
dulwich/objects.py

@@ -58,6 +58,7 @@ _TAGGER_HEADER = "tagger"
 
 S_IFGITLINK = 0o160000
 
+
 def S_ISGITLINK(m):
     """Check if a mode indicates a submodule.
 
@@ -727,10 +728,12 @@ class Tag(ShaFile):
     tagger = serializable_property("tagger",
         "Returns the name of the person who created this tag")
     tag_time = serializable_property("tag_time",
-        "The creation timestamp of the tag.  As the number of seconds since the epoch")
+        "The creation timestamp of the tag.  As the number of seconds "
+        "since the epoch")
     tag_timezone = serializable_property("tag_timezone",
         "The timezone that tag_time is in.")
-    message = serializable_property("message", "The message attached to this tag")
+    message = serializable_property(
+        "message", "The message attached to this tag")
 
 
 class TreeEntry(namedtuple('TreeEntry', ['path', 'mode', 'sha'])):
@@ -881,7 +884,8 @@ class Tree(ShaFile):
         """
         if isinstance(name, int) and isinstance(mode, str):
             (name, mode) = (mode, name)
-            warnings.warn("Please use Tree.add(name, mode, hexsha)",
+            warnings.warn(
+                "Please use Tree.add(name, mode, hexsha)",
                 category=DeprecationWarning, stacklevel=2)
         self._ensure_parsed()
         self._entries[name] = mode, hexsha
@@ -890,7 +894,8 @@ class Tree(ShaFile):
     def iteritems(self, name_order=False):
         """Iterate over entries.
 
-        :param name_order: If True, iterate in name order instead of tree order.
+        :param name_order: If True, iterate in name order instead of tree
+            order.
         :return: Iterator over (name, mode, sha) tuples
         """
         self._ensure_parsed()
@@ -909,8 +914,8 @@ class Tree(ShaFile):
             parsed_entries = parse_tree("".join(chunks))
         except ValueError as e:
             raise ObjectFormatException(e)
-        # TODO: list comprehension is for efficiency in the common (small) case;
-        # if memory efficiency in the large case is a concern, use a genexp.
+        # TODO: list comprehension is for efficiency in the common (small)
+        # case; if memory efficiency in the large case is a concern, use a genexp.
         self._entries = dict([(n, (m, s)) for n, m, s in parsed_entries])
 
     def check(self):
@@ -1088,12 +1093,12 @@ class Commit(ShaFile):
 
     def _deserialize(self, chunks):
         (self._tree, self._parents, author_info, commit_info, self._encoding,
-                self._mergetag, self._message, self._extra) = \
-                        parse_commit(chunks)
+                self._mergetag, self._message, self._extra) = (
+                        parse_commit(chunks))
         (self._author, self._author_time, (self._author_timezone,
-            self._author_timezone_neg_utc)) = author_info
+             self._author_timezone_neg_utc)) = author_info
         (self._committer, self._commit_time, (self._commit_timezone,
-            self._commit_timezone_neg_utc)) = commit_info
+             self._commit_timezone_neg_utc)) = commit_info
 
     def check(self):
         """Check this object for internal consistency.
@@ -1137,12 +1142,12 @@ class Commit(ShaFile):
         for p in self._parents:
             chunks.append("%s %s\n" % (_PARENT_HEADER, p))
         chunks.append("%s %s %s %s\n" % (
-          _AUTHOR_HEADER, self._author, str(self._author_time),
-          format_timezone(self._author_timezone,
+            _AUTHOR_HEADER, self._author, str(self._author_time),
+            format_timezone(self._author_timezone,
                           self._author_timezone_neg_utc)))
         chunks.append("%s %s %s %s\n" % (
-          _COMMITTER_HEADER, self._committer, str(self._commit_time),
-          format_timezone(self._commit_timezone,
+            _COMMITTER_HEADER, self._committer, str(self._commit_time),
+            format_timezone(self._commit_timezone,
                           self._commit_timezone_neg_utc)))
         if self.encoding:
             chunks.append("%s %s\n" % (_ENCODING_HEADER, self.encoding))
@@ -1158,13 +1163,15 @@ class Commit(ShaFile):
             chunks[-1] = chunks[-1].rstrip(" \n")
         for k, v in self.extra:
             if "\n" in k or "\n" in v:
-                raise AssertionError("newline in extra data: %r -> %r" % (k, v))
+                raise AssertionError(
+                    "newline in extra data: %r -> %r" % (k, v))
             chunks.append("%s %s\n" % (k, v))
-        chunks.append("\n") # There must be a new line after the headers
+        chunks.append("\n")  # There must be a new line after the headers
         chunks.append(self._message)
         return chunks
 
-    tree = serializable_property("tree", "Tree that is the state of this commit")
+    tree = serializable_property(
+        "tree", "Tree that is the state of this commit")
 
     def _get_parents(self):
         """Return a list of parents of this commit."""
@@ -1192,8 +1199,8 @@ class Commit(ShaFile):
     committer = serializable_property("committer",
         "The name of the committer of the commit")
 
-    message = serializable_property("message",
-        "The commit message")
+    message = serializable_property(
+        "message", "The commit message")
 
     commit_time = serializable_property("commit_time",
         "The timestamp of the commit. As the number of seconds since the epoch.")
@@ -1202,16 +1209,17 @@ class Commit(ShaFile):
         "The zone the commit time is in")
 
     author_time = serializable_property("author_time",
-        "The timestamp the commit was written. as the number of seconds since the epoch.")
+        "The timestamp the commit was written. As the number of "
+        "seconds since the epoch.")
 
-    author_timezone = serializable_property("author_timezone",
-        "Returns the zone the author time is in.")
+    author_timezone = serializable_property(
+        "author_timezone", "Returns the zone the author time is in.")
 
-    encoding = serializable_property("encoding",
-        "Encoding of the commit message.")
+    encoding = serializable_property(
+        "encoding", "Encoding of the commit message.")
 
-    mergetag = serializable_property("mergetag",
-        "Associated signed tag.")
+    mergetag = serializable_property(
+        "mergetag", "Associated signed tag.")
 
 
 OBJECT_CLASSES = (
@@ -1228,7 +1236,6 @@ for cls in OBJECT_CLASSES:
     _TYPE_MAP[cls.type_num] = cls
 
 
-
 # Hold on to the pure-python implementations for testing
 _parse_tree_py = parse_tree
 _sorted_tree_items_py = sorted_tree_items

+ 0 - 3
dulwich/pack.py

@@ -76,9 +76,6 @@ from dulwich.objects import (
     object_header,
     )
 
-supports_mmap_offset = (sys.version_info[0] >= 3 or
-        (sys.version_info[0] == 2 and sys.version_info[1] >= 6))
-
 
 OFS_DELTA = 6
 REF_DELTA = 7