Bladeren bron

Convert more docstrings to Google style.

Jelmer Vernooij 5 jaren geleden
bovenliggende
commit
d6d3c089a8
8 gewijzigde bestanden met toevoegingen van 549 en 403 verwijderingen
  1. 94 71
      dulwich/index.py
  2. 12 8
      dulwich/line_ending.py
  3. 13 10
      dulwich/lru_cache.py
  4. 6 4
      dulwich/mailmap.py
  5. 123 92
      dulwich/object_store.py
  6. 96 65
      dulwich/objects.py
  7. 52 36
      dulwich/objectspec.py
  8. 153 117
      dulwich/pack.py

+ 94 - 71
dulwich/index.py

@@ -172,8 +172,9 @@ def read_index_dict(f):
 def write_index(f, entries):
     """Write an index file.
 
-    :param f: File-like object to write to
-    :param entries: Iterable over the entries to write
+    Args:
+      f: File-like object to write to
+      entries: Iterable over the entries to write
     """
     f.write(b'DIRC')
     f.write(struct.pack(b'>LL', 2, len(entries)))
@@ -196,7 +197,8 @@ def cleanup_mode(mode):
 
     This will return a mode that can be stored in a tree object.
 
-    :param mode: Mode to clean up.
+    Args:
+      mode: Mode to clean up.
     """
     if stat.S_ISLNK(mode):
         return stat.S_IFLNK
@@ -215,7 +217,8 @@ class Index(object):
     def __init__(self, filename):
         """Open an index file.
 
-        :param filename: Path to the index file
+        Args:
+          filename: Path to the index file
         """
         self._filename = filename
         self.clear()
@@ -259,7 +262,7 @@ class Index(object):
     def __getitem__(self, name):
         """Retrieve entry by relative path.
 
-        :return: tuple with (ctime, mtime, dev, ino, mode, uid, gid, size, sha,
+        Returns: tuple with (ctime, mtime, dev, ino, mode, uid, gid, size, sha,
             flags)
         """
         return self._byname[name]
@@ -314,10 +317,11 @@ class Index(object):
     def changes_from_tree(self, object_store, tree, want_unchanged=False):
         """Find the differences between the contents of this index and a tree.
 
-        :param object_store: Object store to use for retrieving tree contents
-        :param tree: SHA1 of the root tree
-        :param want_unchanged: Whether unchanged files should be reported
-        :return: Iterator over tuples with (oldpath, newpath), (oldmode,
+        Args:
+          object_store: Object store to use for retrieving tree contents
+          tree: SHA1 of the root tree
+          want_unchanged: Whether unchanged files should be reported
+        Returns: Iterator over tuples with (oldpath, newpath), (oldmode,
             newmode), (oldsha, newsha)
         """
         def lookup_entry(path):
@@ -331,8 +335,10 @@ class Index(object):
     def commit(self, object_store):
         """Create a new tree from an index.
 
-        :param object_store: Object store to save the tree in
-        :return: Root tree SHA
+        Args:
+          object_store: Object store to save the tree in
+        Returns:
+          Root tree SHA
         """
         return commit_tree(object_store, self.iterobjects())
 
@@ -340,9 +346,11 @@ class Index(object):
 def commit_tree(object_store, blobs):
     """Commit a new tree.
 
-    :param object_store: Object store to add trees to
-    :param blobs: Iterable over blob path, sha, mode entries
-    :return: SHA1 of the created tree.
+    Args:
+      object_store: Object store to add trees to
+      blobs: Iterable over blob path, sha, mode entries
+    Returns:
+      SHA1 of the created tree.
     """
 
     trees = {b'': {}}
@@ -380,10 +388,11 @@ def commit_tree(object_store, blobs):
 def commit_index(object_store, index):
     """Create a new tree from an index.
 
-    :param object_store: Object store to save the tree in
-    :param index: Index file
-    :note: This function is deprecated, use index.commit() instead.
-    :return: Root tree sha.
+    Args:
+      object_store: Object store to save the tree in
+      index: Index file
+    Note: This function is deprecated, use index.commit() instead.
+    Returns: Root tree sha.
     """
     return commit_tree(object_store, index.iterobjects())
 
@@ -393,12 +402,13 @@ def changes_from_tree(names, lookup_entry, object_store, tree,
     """Find the differences between the contents of a tree and
     a working copy.
 
-    :param names: Iterable of names in the working copy
-    :param lookup_entry: Function to lookup an entry in the working copy
-    :param object_store: Object store to use for retrieving tree contents
-    :param tree: SHA1 of the root tree, or None for an empty tree
-    :param want_unchanged: Whether unchanged files should be reported
-    :return: Iterator over tuples with (oldpath, newpath), (oldmode, newmode),
+    Args:
+      names: Iterable of names in the working copy
+      lookup_entry: Function to lookup an entry in the working copy
+      object_store: Object store to use for retrieving tree contents
+      tree: SHA1 of the root tree, or None for an empty tree
+      want_unchanged: Whether unchanged files should be reported
+    Returns: Iterator over tuples with (oldpath, newpath), (oldmode, newmode),
         (oldsha, newsha)
     """
     # TODO(jelmer): Support a include_trees option
@@ -429,9 +439,10 @@ def changes_from_tree(names, lookup_entry, object_store, tree,
 def index_entry_from_stat(stat_val, hex_sha, flags, mode=None):
     """Create a new index entry from a stat value.
 
-    :param stat_val: POSIX stat_result instance
-    :param hex_sha: Hex sha of the object
-    :param flags: Index flags
+    Args:
+      stat_val: POSIX stat_result instance
+      hex_sha: Hex sha of the object
+      flags: Index flags
     """
     if mode is None:
         mode = cleanup_mode(stat_val.st_mode)
@@ -445,12 +456,13 @@ def index_entry_from_stat(stat_val, hex_sha, flags, mode=None):
 def build_file_from_blob(blob, mode, target_path, honor_filemode=True):
     """Build a file or symlink on disk based on a Git object.
 
-    :param obj: The git object
-    :param mode: File mode
-    :param target_path: Path to write to
-    :param honor_filemode: An optional flag to honor core.filemode setting in
+    Args:
+      obj: The git object
+      mode: File mode
+      target_path: Path to write to
+      honor_filemode: An optional flag to honor core.filemode setting in
         config file, default is core.filemode=True, change executable bit
-    :return: stat object for the file
+    Returns: stat object for the file
     """
     try:
         oldstat = os.lstat(target_path)
@@ -518,16 +530,17 @@ def build_index_from_tree(root_path, index_path, object_store, tree_id,
                           validate_path_element=validate_path_element_default):
     """Generate and materialize index from a tree
 
-    :param tree_id: Tree to materialize
-    :param root_path: Target dir for materialized index files
-    :param index_path: Target path for generated index
-    :param object_store: Non-empty object store holding tree contents
-    :param honor_filemode: An optional flag to honor core.filemode setting in
+    Args:
+      tree_id: Tree to materialize
+      root_path: Target dir for materialized index files
+      index_path: Target path for generated index
+      object_store: Non-empty object store holding tree contents
+      honor_filemode: An optional flag to honor core.filemode setting in
         config file, default is core.filemode=True, change executable bit
-    :param validate_path_element: Function to validate path elements to check
+      validate_path_element: Function to validate path elements to check
         out; default just refuses .git and .. directories.
 
-    :note:: existing index is wiped and contents are not merged
+    Note: existing index is wiped and contents are not merged
         in a working dir. Suitable only for fresh clones.
     """
 
@@ -570,9 +583,10 @@ def build_index_from_tree(root_path, index_path, object_store, tree_id,
 def blob_from_path_and_stat(fs_path, st):
     """Create a blob from a path and a stat object.
 
-    :param fs_path: Full file system path to file
-    :param st: A stat object
-    :return: A `Blob` object
+    Args:
+      fs_path: Full file system path to file
+      st: A stat object
+    Returns: A `Blob` object
     """
     assert isinstance(fs_path, bytes)
     blob = Blob()
@@ -594,8 +608,9 @@ def blob_from_path_and_stat(fs_path, st):
 def read_submodule_head(path):
     """Read the head commit of a submodule.
 
-    :param path: path to the submodule
-    :return: HEAD sha, None if not a valid head/repository
+    Args:
+      path: path to the submodule
+    Returns: HEAD sha, None if not a valid head/repository
     """
     from dulwich.errors import NotGitRepository
     from dulwich.repo import Repo
@@ -641,9 +656,10 @@ def _has_directory_changed(tree_path, entry):
 def get_unstaged_changes(index, root_path, filter_blob_callback=None):
     """Walk through an index and check for differences against working tree.
 
-    :param index: index to check
-    :param root_path: path in which to find files
-    :return: iterator over paths with unstaged changes
+    Args:
+      index: index to check
+      root_path: path in which to find files
+    Returns: iterator over paths with unstaged changes
     """
     # For each entry in the index check the sha1 & ensure not staged
     if not isinstance(root_path, bytes):
@@ -680,10 +696,11 @@ os_sep_bytes = os.sep.encode('ascii')
 def _tree_to_fs_path(root_path, tree_path):
     """Convert a git tree path to a file system path.
 
-    :param root_path: Root filesystem path
-    :param tree_path: Git tree path as bytes
+    Args:
+      root_path: Root filesystem path
+      tree_path: Git tree path as bytes
 
-    :return: File system path.
+    Returns: File system path.
     """
     assert isinstance(tree_path, bytes)
     if os_sep_bytes != b'/':
@@ -696,10 +713,11 @@ def _tree_to_fs_path(root_path, tree_path):
 def _fs_to_tree_path(fs_path, fs_encoding=None):
     """Convert a file system path to a git tree path.
 
-    :param fs_path: File system path.
-    :param fs_encoding: File system encoding
+    Args:
+      fs_path: File system path.
+      fs_encoding: File system encoding
 
-    :return:  Git tree path as bytes
+    Returns:  Git tree path as bytes
     """
     if fs_encoding is None:
         fs_encoding = sys.getfilesystemencoding()
@@ -721,10 +739,11 @@ def index_entry_from_path(path, object_store=None):
     and tree references. for directories and
     non-existant files it returns None
 
-    :param path: Path to create an index entry for
-    :param object_store: Optional object store to
+    Args:
+      path: Path to create an index entry for
+      object_store: Optional object store to
         save new blobs in
-    :return: An index entry; None for directories
+    Returns: An index entry; None for directories
     """
     assert isinstance(path, bytes)
     st = os.lstat(path)
@@ -746,10 +765,11 @@ def index_entry_from_path(path, object_store=None):
 def iter_fresh_entries(paths, root_path, object_store=None):
     """Iterate over current versions of index entries on disk.
 
-    :param paths: Paths to iterate over
-    :param root_path: Root path to access from
-    :param store: Optional store to save new blobs in
-    :return: Iterator over path, index_entry
+    Args:
+      paths: Paths to iterate over
+      root_path: Root path to access from
+      store: Optional store to save new blobs in
+    Returns: Iterator over path, index_entry
     """
     for path in paths:
         p = _tree_to_fs_path(root_path, path)
@@ -768,11 +788,12 @@ def iter_fresh_blobs(index, root_path):
 
     Don't use this function; it removes missing entries from index.
 
-    :param index: Index file
-    :param root_path: Root path to access from
-    :param include_deleted: Include deleted entries with sha and
+    Args:
+      index: Index file
+      root_path: Root path to access from
+      include_deleted: Include deleted entries with sha and
         mode set to None
-    :return: Iterator over path, sha, mode
+    Returns: Iterator over path, sha, mode
     """
     import warnings
     warnings.warn(PendingDeprecationWarning,
@@ -789,12 +810,13 @@ def iter_fresh_objects(paths, root_path, include_deleted=False,
                        object_store=None):
     """Iterate over versions of objecs on disk referenced by index.
 
-    :param index: Index file
-    :param root_path: Root path to access from
-    :param include_deleted: Include deleted entries with sha and
+    Args:
+      index: Index file
+      root_path: Root path to access from
+      include_deleted: Include deleted entries with sha and
         mode set to None
-    :param object_store: Optional object store to report new items to
-    :return: Iterator over path, sha, mode
+      object_store: Optional object store to report new items to
+    Returns: Iterator over path, sha, mode
     """
     for path, entry in iter_fresh_entries(paths, root_path,
                                           object_store=object_store):
@@ -811,8 +833,9 @@ def refresh_index(index, root_path):
 
     This is the equivalent to running 'git commit -a'.
 
-    :param index: Index to update
-    :param root_path: Root filesystem path
+    Args:
+      index: Index to update
+      root_path: Root filesystem path
     """
     for path, entry in iter_fresh_entries(index, root_path):
         index[path] = path

+ 12 - 8
dulwich/line_ending.py

@@ -136,8 +136,9 @@ LF = b"\n"
 def convert_crlf_to_lf(text_hunk):
     """Convert CRLF in text hunk into LF
 
-    :param text_hunk: A bytes string representing a text hunk
-    :return: The text hunk with the same type, with CRLF replaced into LF
+    Args:
+      text_hunk: A bytes string representing a text hunk
+    Returns: The text hunk with the same type, with CRLF replaced into LF
     """
     return text_hunk.replace(CRLF, LF)
 
@@ -145,8 +146,9 @@ def convert_crlf_to_lf(text_hunk):
 def convert_lf_to_crlf(text_hunk):
     """Convert LF in text hunk into CRLF
 
-    :param text_hunk: A bytes string representing a text hunk
-    :return: The text hunk with the same type, with LF replaced into CRLF
+    Args:
+      text_hunk: A bytes string representing a text hunk
+    Returns: The text hunk with the same type, with LF replaced into CRLF
     """
     # TODO find a more efficient way of doing it
     intermediary = text_hunk.replace(CRLF, LF)
@@ -174,9 +176,10 @@ def get_checkin_filter(core_eol, core_autocrlf, git_attributes):
 def get_checkout_filter_autocrlf(core_autocrlf):
     """ Returns the correct checkout filter base on autocrlf value
 
-    :param core_autocrlf: The bytes configuration value of core.autocrlf.
+    Args:
+      core_autocrlf: The bytes configuration value of core.autocrlf.
         Valid values are: b'true', b'false' or b'input'.
-    :return: Either None if no filter has to be applied or a function
+    Returns: Either None if no filter has to be applied or a function
         accepting a single argument, a binary text hunk
     """
 
@@ -189,9 +192,10 @@ def get_checkout_filter_autocrlf(core_autocrlf):
 def get_checkin_filter_autocrlf(core_autocrlf):
     """ Returns the correct checkin filter base on autocrlf value
 
-    :param core_autocrlf: The bytes configuration value of core.autocrlf.
+    Args:
+      core_autocrlf: The bytes configuration value of core.autocrlf.
         Valid values are: b'true', b'false' or b'input'.
-    :return: Either None if no filter has to be applied or a function
+    Returns: Either None if no filter has to be applied or a function
         accepting a single argument, a binary text hunk
     """
 

+ 13 - 10
dulwich/lru_cache.py

@@ -140,9 +140,10 @@ class LRUCache(object):
         Also, if the entry is ever removed from the cache, call
         cleanup(key, value).
 
-        :param key: The key to store it under
-        :param value: The object to store
-        :param cleanup: None or a function taking (key, value) to indicate
+        Args:
+          key: The key to store it under
+          value: The object to store
+          cleanup: None or a function taking (key, value) to indicate
                         'value' should be cleaned up.
         """
         if key is _null_key:
@@ -179,7 +180,7 @@ class LRUCache(object):
         request them later. This is simply meant as a peak into the current
         state.
 
-        :return: An unordered list of keys that are currently cached.
+        Returns: An unordered list of keys that are currently cached.
         """
         return self._cache.keys()
 
@@ -288,11 +289,12 @@ class LRUSizeCache(LRUCache):
                  compute_size=None):
         """Create a new LRUSizeCache.
 
-        :param max_size: The max number of bytes to store before we start
+        Args:
+          max_size: The max number of bytes to store before we start
             clearing out entries.
-        :param after_cleanup_size: After cleaning up, shrink everything to this
+          after_cleanup_size: After cleaning up, shrink everything to this
             size.
-        :param compute_size: A function to compute the size of the values. We
+          compute_size: A function to compute the size of the values. We
             use a function here, so that you can pass 'len' if you are just
             using simple strings, or a more complex function if you are using
             something like a list of strings, or even a custom object.
@@ -312,9 +314,10 @@ class LRUSizeCache(LRUCache):
         Also, if the entry is ever removed from the cache, call
         cleanup(key, value).
 
-        :param key: The key to store it under
-        :param value: The object to store
-        :param cleanup: None or a function taking (key, value) to indicate
+        Args:
+          key: The key to store it under
+          value: The object to store
+          cleanup: None or a function taking (key, value) to indicate
                         'value' should be cleaned up.
         """
         if key is _null_key:

+ 6 - 4
dulwich/mailmap.py

@@ -37,8 +37,9 @@ def parse_identity(text):
 def read_mailmap(f):
     """Read a mailmap.
 
-    :param f: File-like object to read from
-    :return: Iterator over
+    Args:
+      f: File-like object to read from
+    Returns: Iterator over
         ((canonical_name, canonical_email), (from_name, from_email)) tuples
     """
     for line in f:
@@ -72,8 +73,9 @@ class Mailmap(object):
         Any of the fields can be None, but at least one of them needs to be
         set.
 
-        :param canonical_identity: The canonical identity (tuple)
-        :param from_identity: The from identity (tuple)
+        Args:
+          canonical_identity: The canonical identity (tuple)
+          from_identity: The from identity (tuple)
         """
         if from_identity is None:
             from_name, from_email = None, None

+ 123 - 92
dulwich/object_store.py

@@ -82,8 +82,9 @@ class BaseObjectStore(object):
     def iter_shas(self, shas):
         """Iterate over the objects for the specified shas.
 
-        :param shas: Iterable object with SHAs
-        :return: Object iterator
+        Args:
+          shas: Iterable object with SHAs
+        Returns: Object iterator
         """
         return ObjectStoreIterator(self, shas)
 
@@ -110,8 +111,9 @@ class BaseObjectStore(object):
     def get_raw(self, name):
         """Obtain the raw text for an object.
 
-        :param name: sha for the object.
-        :return: tuple with numeric type and object contents.
+        Args:
+          name: sha for the object.
+        Returns: tuple with numeric type and object contents.
         """
         raise NotImplementedError(self.get_raw)
 
@@ -133,15 +135,17 @@ class BaseObjectStore(object):
     def add_objects(self, objects, progress=None):
         """Add a set of objects to this object store.
 
-        :param objects: Iterable over a list of (object, path) tuples
+        Args:
+          objects: Iterable over a list of (object, path) tuples
         """
         raise NotImplementedError(self.add_objects)
 
     def add_pack_data(self, count, pack_data, progress=None):
         """Add pack data to this object store.
 
-        :param num_items: Number of items to add
-        :param pack_data: Iterator over pack data tuples
+        Args:
+          num_items: Number of items to add
+          pack_data: Iterator over pack data tuples
         """
         if count == 0:
             # Don't bother writing an empty pack file
@@ -159,13 +163,14 @@ class BaseObjectStore(object):
                      include_trees=False, change_type_same=False):
         """Find the differences between the contents of two trees
 
-        :param source: SHA1 of the source tree
-        :param target: SHA1 of the target tree
-        :param want_unchanged: Whether unchanged files should be reported
-        :param include_trees: Whether to include trees
-        :param change_type_same: Whether to report files changing
+        Args:
+          source: SHA1 of the source tree
+          target: SHA1 of the target tree
+          want_unchanged: Whether unchanged files should be reported
+          include_trees: Whether to include trees
+          change_type_same: Whether to report files changing
             type in the same entry.
-        :return: Iterator over tuples with
+        Returns: Iterator over tuples with
             (oldpath, newpath), (oldmode, newmode), (oldsha, newsha)
         """
         for change in tree_changes(self, source, target,
@@ -181,9 +186,10 @@ class BaseObjectStore(object):
 
         Iteration is depth-first pre-order, as in e.g. os.walk.
 
-        :param tree_id: SHA1 of the tree.
-        :param include_trees: If True, include tree objects in the iteration.
-        :return: Iterator over TreeEntry namedtuples for all the objects in a
+        Args:
+          tree_id: SHA1 of the tree.
+          include_trees: If True, include tree objects in the iteration.
+        Returns: Iterator over TreeEntry namedtuples for all the objects in a
             tree.
         """
         for entry, _ in walk_trees(self, tree_id, None):
@@ -197,15 +203,16 @@ class BaseObjectStore(object):
                              depth=None):
         """Find the missing objects required for a set of revisions.
 
-        :param haves: Iterable over SHAs already in common.
-        :param wants: Iterable over SHAs of objects to fetch.
-        :param progress: Simple progress function that will be called with
+        Args:
+          haves: Iterable over SHAs already in common.
+          wants: Iterable over SHAs of objects to fetch.
+          progress: Simple progress function that will be called with
             updated progress strings.
-        :param get_tagged: Function that returns a dict of pointed-to sha ->
+          get_tagged: Function that returns a dict of pointed-to sha ->
             tag sha for including tags.
-        :param get_parents: Optional function for getting the parents of a
+          get_parents: Optional function for getting the parents of a
             commit.
-        :return: Iterator over (sha, path) pairs.
+        Returns: Iterator over (sha, path) pairs.
         """
         finder = MissingObjectFinder(self, haves, wants, progress, get_tagged,
                                      get_parents=get_parents)
@@ -214,8 +221,9 @@ class BaseObjectStore(object):
     def find_common_revisions(self, graphwalker):
         """Find which revisions this store has in common using graphwalker.
 
-        :param graphwalker: A graphwalker object.
-        :return: List of SHAs that are in common
+        Args:
+          graphwalker: A graphwalker object.
+        Returns: List of SHAs that are in common
         """
         haves = []
         sha = next(graphwalker)
@@ -229,19 +237,21 @@ class BaseObjectStore(object):
     def generate_pack_contents(self, have, want, progress=None):
         """Iterate over the contents of a pack file.
 
-        :param have: List of SHA1s of objects that should not be sent
-        :param want: List of SHA1s of objects that should be sent
-        :param progress: Optional progress reporting method
+        Args:
+          have: List of SHA1s of objects that should not be sent
+          want: List of SHA1s of objects that should be sent
+          progress: Optional progress reporting method
         """
         return self.iter_shas(self.find_missing_objects(have, want, progress))
 
     def generate_pack_data(self, have, want, progress=None, ofs_delta=True):
         """Generate pack data objects for a set of wants/haves.
 
-        :param have: List of SHA1s of objects that should not be sent
-        :param want: List of SHA1s of objects that should be sent
-        :param ofs_delta: Whether OFS deltas can be included
-        :param progress: Optional progress reporting method
+        Args:
+          have: List of SHA1s of objects that should not be sent
+          want: List of SHA1s of objects that should be sent
+          ofs_delta: Whether OFS deltas can be included
+          progress: Optional progress reporting method
         """
         # TODO(jelmer): More efficient implementation
         return pack_objects_to_data(
@@ -250,8 +260,9 @@ class BaseObjectStore(object):
     def peel_sha(self, sha):
         """Peel all tags from a SHA.
 
-        :param sha: The object SHA to peel.
-        :return: The fully-peeled SHA1 of a tag object, after peeling all
+        Args:
+          sha: The object SHA to peel.
+        Returns: The fully-peeled SHA1 of a tag object, after peeling all
             intermediate tags; if the original ref does not point to a tag,
             this will equal the original SHA1.
         """
@@ -266,12 +277,13 @@ class BaseObjectStore(object):
                            get_parents=lambda commit: commit.parents):
         """Collect all ancestors of heads up to (excluding) those in common.
 
-        :param heads: commits to start from
-        :param common: commits to end at, or empty set to walk repository
+        Args:
+          heads: commits to start from
+          common: commits to end at, or empty set to walk repository
             completely
-        :param get_parents: Optional function for getting the parents of a
+          get_parents: Optional function for getting the parents of a
             commit.
-        :return: a tuple (A, B) where A - all commits reachable
+        Returns: a tuple (A, B) where A - all commits reachable
             from heads but not present in common, B - common (shared) elements
             that are directly reachable from heads
         """
@@ -382,7 +394,7 @@ class PackBasedObjectStore(BaseObjectStore):
     def pack_loose_objects(self):
         """Pack loose objects.
 
-        :return: Number of objects packed
+        Returns: Number of objects packed
         """
         objects = set()
         for sha in self._iter_loose_objects():
@@ -444,8 +456,9 @@ class PackBasedObjectStore(BaseObjectStore):
     def get_raw(self, name):
         """Obtain the raw fulltext for an object.
 
-        :param name: sha for the object.
-        :return: tuple with numeric type and object contents.
+        Args:
+          name: sha for the object.
+        Returns: tuple with numeric type and object contents.
         """
         if name == ZERO_SHA:
             raise KeyError(name)
@@ -484,9 +497,10 @@ class PackBasedObjectStore(BaseObjectStore):
     def add_objects(self, objects, progress=None):
         """Add a set of objects to this object store.
 
-        :param objects: Iterable over (object, path) tuples, should support
+        Args:
+          objects: Iterable over (object, path) tuples, should support
             __len__.
-        :return: Pack object of the objects written.
+        Returns: Pack object of the objects written.
         """
         return self.add_pack_data(
                 *pack_objects_to_data(objects),
@@ -499,7 +513,8 @@ class DiskObjectStore(PackBasedObjectStore):
     def __init__(self, path):
         """Open an object store.
 
-        :param path: Path of the object store.
+        Args:
+          path: Path of the object store.
         """
         super(DiskObjectStore, self).__init__()
         self.path = path
@@ -632,13 +647,14 @@ class DiskObjectStore(PackBasedObjectStore):
     def _complete_thin_pack(self, f, path, copier, indexer):
         """Move a specific file containing a pack into the pack directory.
 
-        :note: The file should be on the same file system as the
+        Note: The file should be on the same file system as the
             packs directory.
 
-        :param f: Open file object for the pack.
-        :param path: Path to the pack file.
-        :param copier: A PackStreamCopier to use for writing pack data.
-        :param indexer: A PackIndexer for indexing the pack.
+        Args:
+          f: Open file object for the pack.
+          path: Path to the pack file.
+          copier: A PackStreamCopier to use for writing pack data.
+          indexer: A PackIndexer for indexing the pack.
         """
         entries = list(indexer)
 
@@ -701,11 +717,12 @@ class DiskObjectStore(PackBasedObjectStore):
         outside the pack. They should never be placed in the object store
         directly, and always indexed and completed as they are copied.
 
-        :param read_all: Read function that blocks until the number of
+        Args:
+          read_all: Read function that blocks until the number of
             requested bytes are read.
-        :param read_some: Read function that returns at least one byte, but may
+          read_some: Read function that returns at least one byte, but may
             not return the number of bytes requested.
-        :return: A Pack object pointing at the now-completed thin pack in the
+        Returns: A Pack object pointing at the now-completed thin pack in the
             objects/pack directory.
         """
         fd, path = tempfile.mkstemp(dir=self.path, prefix='tmp_pack_')
@@ -719,10 +736,11 @@ class DiskObjectStore(PackBasedObjectStore):
     def move_in_pack(self, path):
         """Move a specific file containing a pack into the pack directory.
 
-        :note: The file should be on the same file system as the
+        Note: The file should be on the same file system as the
             packs directory.
 
-        :param path: Path to the pack file.
+        Args:
+          path: Path to the pack file.
         """
         with PackData(path) as p:
             entries = p.sorted_entries()
@@ -751,7 +769,7 @@ class DiskObjectStore(PackBasedObjectStore):
     def add_pack(self):
         """Add a new pack to this object store.
 
-        :return: Fileobject to write to, a commit function to
+        Returns: Fileobject to write to, a commit function to
             call when the pack is finished and an abort
             function.
         """
@@ -776,7 +794,8 @@ class DiskObjectStore(PackBasedObjectStore):
     def add_object(self, obj):
         """Add a single object to this object store.
 
-        :param obj: Object to add
+        Args:
+          obj: Object to add
         """
         path = self._get_shafile_path(obj.id)
         dir = os.path.dirname(path)
@@ -837,8 +856,9 @@ class MemoryObjectStore(BaseObjectStore):
     def get_raw(self, name):
         """Obtain the raw text for an object.
 
-        :param name: sha for the object.
-        :return: tuple with numeric type and object contents.
+        Args:
+          name: sha for the object.
+        Returns: tuple with numeric type and object contents.
         """
         obj = self[self._to_hexsha(name)]
         return obj.type_num, obj.as_raw_string()
@@ -859,7 +879,8 @@ class MemoryObjectStore(BaseObjectStore):
     def add_objects(self, objects, progress=None):
         """Add a set of objects to this object store.
 
-        :param objects: Iterable over a list of (object, path) tuples
+        Args:
+          objects: Iterable over a list of (object, path) tuples
         """
         for obj, path in objects:
             self.add_object(obj)
@@ -870,7 +891,7 @@ class MemoryObjectStore(BaseObjectStore):
         Because this object store doesn't support packs, we extract and add the
         individual objects.
 
-        :return: Fileobject to write to and a commit function to
+        Returns: Fileobject to write to and a commit function to
             call when the pack is finished.
         """
         f = BytesIO()
@@ -888,8 +909,9 @@ class MemoryObjectStore(BaseObjectStore):
     def _complete_thin_pack(self, f, indexer):
         """Complete a thin pack by adding external references.
 
-        :param f: Open file object for the pack.
-        :param indexer: A PackIndexer for indexing the pack.
+        Args:
+          f: Open file object for the pack.
+          indexer: A PackIndexer for indexing the pack.
         """
         entries = list(indexer)
 
@@ -915,9 +937,10 @@ class MemoryObjectStore(BaseObjectStore):
         outside the pack. Because this object store doesn't support packs, we
         extract and add the individual objects.
 
-        :param read_all: Read function that blocks until the number of
+        Args:
+          read_all: Read function that blocks until the number of
             requested bytes are read.
-        :param read_some: Read function that returns at least one byte, but may
+          read_some: Read function that returns at least one byte, but may
             not return the number of bytes requested.
         """
         f, commit, abort = self.add_pack()
@@ -947,8 +970,9 @@ class ObjectStoreIterator(ObjectIterator):
     def __init__(self, store, sha_iter):
         """Create a new ObjectIterator.
 
-        :param store: Object store to retrieve from
-        :param sha_iter: Iterator over (sha, path) tuples
+        Args:
+          store: Object store to retrieve from
+          sha_iter: Iterator over (sha, path) tuples
         """
         self.store = store
         self.sha_iter = sha_iter
@@ -975,11 +999,12 @@ class ObjectStoreIterator(ObjectIterator):
     def __contains__(self, needle):
         """Check if an object is present.
 
-        :note: This checks if the object is present in
+        Note: This checks if the object is present in
             the underlying object store, not if it would
             be yielded by the iterator.
 
-        :param needle: SHA1 of the object to check for
+        Args:
+          needle: SHA1 of the object to check for
         """
         if needle == ZERO_SHA:
             return False
@@ -988,7 +1013,7 @@ class ObjectStoreIterator(ObjectIterator):
     def __getitem__(self, key):
         """Find an object by SHA1.
 
-        :note: This retrieves the object from the underlying
+        Note: This retrieves the object from the underlying
             object store. It will also succeed if the object would
             not be returned by the iterator.
         """
@@ -1020,10 +1045,11 @@ class ObjectStoreIterator(ObjectIterator):
 def tree_lookup_path(lookup_obj, root_sha, path):
     """Look up an object in a Git tree.
 
-    :param lookup_obj: Callback for retrieving object by SHA1
-    :param root_sha: SHA1 of the root tree
-    :param path: Path to lookup
-    :return: A tuple of (mode, SHA) of the resulting path.
+    Args:
+      lookup_obj: Callback for retrieving object by SHA1
+      root_sha: SHA1 of the root tree
+      path: Path to lookup
+    Returns: A tuple of (mode, SHA) of the resulting path.
     """
     tree = lookup_obj(root_sha)
     if not isinstance(tree, Tree):
@@ -1034,9 +1060,10 @@ def tree_lookup_path(lookup_obj, root_sha, path):
 def _collect_filetree_revs(obj_store, tree_sha, kset):
     """Collect SHA1s of files and directories for specified tree.
 
-    :param obj_store: Object store to get objects by SHA from
-    :param tree_sha: tree reference to walk
-    :param kset: set to fill with references to files and directories
+    Args:
+      obj_store: Object store to get objects by SHA from
+      tree_sha: tree reference to walk
+      kset: set to fill with references to files and directories
     """
     filetree = obj_store[tree_sha]
     for name, mode, sha in filetree.iteritems():
@@ -1054,11 +1081,12 @@ def _split_commits_and_tags(obj_store, lst, ignore_unknown=False):
     through, and unless ignore_unknown argument is True, KeyError
     is thrown for SHA1 missing in the repository
 
-    :param obj_store: Object store to get objects by SHA1 from
-    :param lst: Collection of commit and tag SHAs
-    :param ignore_unknown: True to skip SHA1 missing in the repository
+    Args:
+      obj_store: Object store to get objects by SHA1 from
+      lst: Collection of commit and tag SHAs
+      ignore_unknown: True to skip SHA1 missing in the repository
         silently.
-    :return: A tuple of (commits, tags, others) SHA1s
+    Returns: A tuple of (commits, tags, others) SHA1s
     """
     commits = set()
     tags = set()
@@ -1088,15 +1116,16 @@ def _split_commits_and_tags(obj_store, lst, ignore_unknown=False):
 class MissingObjectFinder(object):
     """Find the objects missing from another object store.
 
-    :param object_store: Object store containing at least all objects to be
+    Args:
+      object_store: Object store containing at least all objects to be
         sent
-    :param haves: SHA1s of commits not to send (already present in target)
-    :param wants: SHA1s of commits to send
-    :param progress: Optional function to report progress to.
-    :param get_tagged: Function that returns a dict of pointed-to sha -> tag
+      haves: SHA1s of commits not to send (already present in target)
+      wants: SHA1s of commits to send
+      progress: Optional function to report progress to.
+      get_tagged: Function that returns a dict of pointed-to sha -> tag
         sha for including tags.
-    :param get_parents: Optional function for getting the parents of a commit.
-    :param tagged: dict of pointed-to sha -> tag sha for including tags
+      get_parents: Optional function for getting the parents of a commit.
+      tagged: dict of pointed-to sha -> tag sha for including tags
     """
 
     def __init__(self, object_store, haves, wants, progress=None,
@@ -1190,8 +1219,9 @@ class ObjectStoreGraphWalker(object):
     def __init__(self, local_heads, get_parents, shallow=None):
         """Create a new instance.
 
-        :param local_heads: Heads to start search with
-        :param get_parents: Function for finding the parents of a SHA1.
+        Args:
+          local_heads: Heads to start search with
+          get_parents: Function for finding the parents of a SHA1.
         """
         self.heads = set(local_heads)
         self.get_parents = get_parents
@@ -1254,11 +1284,12 @@ def commit_tree_changes(object_store, tree, changes):
     number of changes to a big tree. For a large number of changes
     to a large tree, use e.g. commit_tree.
 
-    :param object_store: Object store to store new objects in
+    Args:
+      object_store: Object store to store new objects in
         and retrieve old ones from.
-    :param tree: Original tree root
-    :param changes: changes to apply
-    :return: New tree root object
+      tree: Original tree root
+      changes: changes to apply
+    Returns: New tree root object
     """
     # TODO(jelmer): Save up the objects and add them using .add_objects
     # rather than with individual calls to .add_object.

+ 96 - 65
dulwich/objects.py

@@ -73,8 +73,9 @@ BEGIN_PGP_SIGNATURE = b"-----BEGIN PGP SIGNATURE-----"
 def S_ISGITLINK(m):
     """Check if a mode indicates a submodule.
 
-    :param m: Mode to check
-    :return: a ``boolean``
+    Args:
+      m: Mode to check
+    Returns: a ``boolean``
     """
     return (stat.S_IFMT(m) == S_IFGITLINK)
 
@@ -162,8 +163,9 @@ def serializable_property(name, docstring=None):
 def object_class(type):
     """Get the object class corresponding to the given type.
 
-    :param type: Either a type name string or a numeric type.
-    :return: The ShaFile subclass corresponding to the given type, or None if
+    Args:
+      type: Either a type name string or a numeric type.
+    Returns: The ShaFile subclass corresponding to the given type, or None if
         type is not a valid type name/number.
     """
     return _TYPE_MAP.get(type, None)
@@ -172,9 +174,11 @@ def object_class(type):
 def check_hexsha(hex, error_msg):
     """Check if a string is a valid hex sha string.
 
-    :param hex: Hex string to check
-    :param error_msg: Error message to use in exception
-    :raise ObjectFormatException: Raised when the string is not valid
+    Args:
+      hex: Hex string to check
+      error_msg: Error message to use in exception
+    Raises:
+      ObjectFormatException: Raised when the string is not valid
     """
     if not valid_hexsha(hex):
         raise ObjectFormatException("%s %s" % (error_msg, hex))
@@ -185,8 +189,9 @@ def check_identity(identity, error_msg):
 
     This will raise an exception if the identity is not valid.
 
-    :param identity: Identity string
-    :param error_msg: Error message to use in exception
+    Args:
+      identity: Identity string
+      error_msg: Error message to use in exception
     """
     email_start = identity.find(b'<')
     email_end = identity.find(b'>')
@@ -202,7 +207,8 @@ def check_time(time_seconds):
 
     This will raise an exception if the time is not valid.
 
-    :param time_info: author/committer/tagger info
+    Args:
+      time_info: author/committer/tagger info
 
     """
     # Prevent overflow error
@@ -279,7 +285,7 @@ class ShaFile(object):
     def as_legacy_object_chunks(self):
         """Return chunks representing the object in the experimental format.
 
-        :return: List of strings
+        Returns: List of strings
         """
         compobj = zlib.compressobj()
         yield compobj.compress(self._header())
@@ -295,7 +301,7 @@ class ShaFile(object):
     def as_raw_chunks(self):
         """Return chunks with serialization of the object.
 
-        :return: List of strings, not necessarily one per line
+        Returns: List of strings, not necessarily one per line
         """
         if self._needs_serialization:
             self._sha = None
@@ -306,7 +312,7 @@ class ShaFile(object):
     def as_raw_string(self):
         """Return raw string with serialization of the object.
 
-        :return: String object
+        Returns: String object
         """
         return b''.join(self.as_raw_chunks())
 
@@ -417,9 +423,10 @@ class ShaFile(object):
     def from_raw_string(type_num, string, sha=None):
         """Creates an object of the indicated type from the raw string given.
 
-        :param type_num: The numeric type of the object.
-        :param string: The raw uncompressed contents.
-        :param sha: Optional known sha for the object
+        Args:
+          type_num: The numeric type of the object.
+          string: The raw uncompressed contents.
+          sha: Optional known sha for the object
         """
         obj = object_class(type_num)()
         obj.set_raw_string(string, sha)
@@ -429,9 +436,10 @@ class ShaFile(object):
     def from_raw_chunks(type_num, chunks, sha=None):
         """Creates an object of the indicated type from the raw chunks given.
 
-        :param type_num: The numeric type of the object.
-        :param chunks: An iterable of the raw uncompressed contents.
-        :param sha: Optional known sha for the object
+        Args:
+          type_num: The numeric type of the object.
+          chunks: An iterable of the raw uncompressed contents.
+          sha: Optional known sha for the object
         """
         obj = object_class(type_num)()
         obj.set_raw_chunks(chunks, sha)
@@ -447,9 +455,11 @@ class ShaFile(object):
     def _check_has_member(self, member, error_msg):
         """Check that the object has a given member variable.
 
-        :param member: the member variable to check for
-        :param error_msg: the message for an error if the member is missing
-        :raise ObjectFormatException: with the given error_msg if member is
+        Args:
+          member: the member variable to check for
+          error_msg: the message for an error if the member is missing
+        Raises:
+          ObjectFormatException: with the given error_msg if member is
             missing or is None
         """
         if getattr(self, member, None) is None:
@@ -458,8 +468,9 @@ class ShaFile(object):
     def check(self):
         """Check this object for internal consistency.
 
-        :raise ObjectFormatException: if the object is malformed in some way
-        :raise ChecksumMismatch: if the object was created with a SHA that does
+        Raises:
+          ObjectFormatException: if the object is malformed in some way
+          ChecksumMismatch: if the object was created with a SHA that does
             not match its contents
         """
         # TODO: if we find that error-checking during object parsing is a
@@ -603,7 +614,8 @@ class Blob(ShaFile):
     def check(self):
         """Check this object for internal consistency.
 
-        :raise ObjectFormatException: if the object is malformed in some way
+        Raises:
+          ObjectFormatException: if the object is malformed in some way
         """
         super(Blob, self).check()
 
@@ -638,8 +650,9 @@ class Blob(ShaFile):
 def _parse_message(chunks):
     """Parse a message with a list of fields and a body.
 
-    :param chunks: the raw chunks of the tag or commit object.
-    :return: iterator of tuples of (field, value), one per header line, in the
+    Args:
+      chunks: the raw chunks of the tag or commit object.
+    Returns: iterator of tuples of (field, value), one per header line, in the
         order read from the text, possibly including duplicates. Includes a
         field named None for the freeform tag/commit text.
     """
@@ -716,7 +729,8 @@ class Tag(ShaFile):
     def check(self):
         """Check this object for internal consistency.
 
-        :raise ObjectFormatException: if the object is malformed in some way
+        Raises:
+          ObjectFormatException: if the object is malformed in some way
         """
         super(Tag, self).check()
         self._check_has_member("_object_sha", "missing object sha")
@@ -807,7 +821,7 @@ class Tag(ShaFile):
     def _get_object(self):
         """Get the object pointed to by this tag.
 
-        :return: tuple of (object class, sha).
+        Returns: tuple of (object class, sha).
         """
         return (self._object_class, self._object_sha)
 
@@ -848,9 +862,11 @@ class TreeEntry(namedtuple('TreeEntry', ['path', 'mode', 'sha'])):
 def parse_tree(text, strict=False):
     """Parse a tree text.
 
-    :param text: Serialized text to parse
-    :return: iterator of tuples of (name, mode, sha)
-    :raise ObjectFormatException: if the object was malformed in some way
+    Args:
+      text: Serialized text to parse
+    Returns: iterator of tuples of (name, mode, sha)
+    Raises:
+      ObjectFormatException: if the object was malformed in some way
     """
     count = 0
     length = len(text)
@@ -876,8 +892,9 @@ def parse_tree(text, strict=False):
 def serialize_tree(items):
     """Serialize the items in a tree to a text.
 
-    :param items: Sorted iterable over (name, mode, sha) tuples
-    :return: Serialized tree text as chunks
+    Args:
+      items: Sorted iterable over (name, mode, sha) tuples
+    Returns: Serialized tree text as chunks
     """
     for name, mode, hexsha in items:
         yield (("%04o" % mode).encode('ascii') + b' ' + name +
@@ -887,11 +904,12 @@ def serialize_tree(items):
 def sorted_tree_items(entries, name_order):
     """Iterate over a tree entries dictionary.
 
-    :param name_order: If True, iterate entries in order of their name. If
+    Args:
+      name_order: If True, iterate entries in order of their name. If
         False, iterate entries in tree order, that is, treat subtree entries as
         having '/' appended.
-    :param entries: Dictionary mapping names to (mode, sha) tuples
-    :return: Iterator over (name, mode, hexsha)
+      entries: Dictionary mapping names to (mode, sha) tuples
+    Returns: Iterator over (name, mode, hexsha)
     """
     key_func = name_order and key_entry_name_order or key_entry
     for name, entry in sorted(entries.items(), key=key_func):
@@ -906,7 +924,8 @@ def sorted_tree_items(entries, name_order):
 def key_entry(entry):
     """Sort key for tree entry.
 
-    :param entry: (name, value) tuplee
+    Args:
+      entry: (name, value) tuplee
     """
     (name, value) = entry
     if stat.S_ISDIR(value[0]):
@@ -922,10 +941,11 @@ def key_entry_name_order(entry):
 def pretty_format_tree_entry(name, mode, hexsha, encoding="utf-8"):
     """Pretty format tree entry.
 
-    :param name: Name of the directory entry
-    :param mode: Mode of entry
-    :param hexsha: Hexsha of the referenced object
-    :return: string describing the tree entry
+    Args:
+      name: Name of the directory entry
+      mode: Mode of entry
+      hexsha: Hexsha of the referenced object
+    Returns: string describing the tree entry
     """
     if mode & stat.S_IFDIR:
         kind = "tree"
@@ -964,8 +984,9 @@ class Tree(ShaFile):
     def __setitem__(self, name, value):
         """Set a tree entry by name.
 
-        :param name: The name of the entry, as a string.
-        :param value: A tuple of (mode, hexsha), where mode is the mode of the
+        Args:
+          name: The name of the entry, as a string.
+          value: A tuple of (mode, hexsha), where mode is the mode of the
             entry as an integral type and hexsha is the hex SHA of the entry as
             a string.
         """
@@ -986,10 +1007,11 @@ class Tree(ShaFile):
     def add(self, name, mode, hexsha):
         """Add an entry to the tree.
 
-        :param mode: The mode of the entry as an integral type. Not all
+        Args:
+          mode: The mode of the entry as an integral type. Not all
             possible modes are supported by git; see check() for details.
-        :param name: The name of the entry, as a string.
-        :param hexsha: The hex SHA of the entry as a string.
+          name: The name of the entry, as a string.
+          hexsha: The hex SHA of the entry as a string.
         """
         if isinstance(name, int) and isinstance(mode, bytes):
             (name, mode) = (mode, name)
@@ -1002,16 +1024,17 @@ class Tree(ShaFile):
     def iteritems(self, name_order=False):
         """Iterate over entries.
 
-        :param name_order: If True, iterate in name order instead of tree
+        Args:
+          name_order: If True, iterate in name order instead of tree
             order.
-        :return: Iterator over (name, mode, sha) tuples
+        Returns: Iterator over (name, mode, sha) tuples
         """
         return sorted_tree_items(self._entries, name_order)
 
     def items(self):
         """Return the sorted entries in this tree.
 
-        :return: List with (name, mode, sha) tuples
+        Returns: List with (name, mode, sha) tuples
         """
         return list(self.iteritems())
 
@@ -1029,7 +1052,8 @@ class Tree(ShaFile):
     def check(self):
         """Check this object for internal consistency.
 
-        :raise ObjectFormatException: if the object is malformed in some way
+        Raises:
+          ObjectFormatException: if the object is malformed in some way
         """
         super(Tree, self).check()
         last = None
@@ -1068,9 +1092,10 @@ class Tree(ShaFile):
     def lookup_path(self, lookup_obj, path):
         """Look up an object in a Git tree.
 
-        :param lookup_obj: Callback for retrieving object by SHA1
-        :param path: Path to lookup
-        :return: A tuple of (mode, SHA) of the resulting path.
+        Args:
+          lookup_obj: Callback for retrieving object by SHA1
+          path: Path to lookup
+        Returns: A tuple of (mode, SHA) of the resulting path.
         """
         parts = path.split(b'/')
         sha = self.id
@@ -1088,8 +1113,9 @@ class Tree(ShaFile):
 def parse_timezone(text):
     """Parse a timezone text fragment (e.g. '+0100').
 
-    :param text: Text to parse.
-    :return: Tuple with timezone as seconds difference to UTC
+    Args:
+      text: Text to parse.
+    Returns: Tuple with timezone as seconds difference to UTC
         and a boolean indicating whether this was a UTC timezone
         prefixed with a negative sign (-0000).
     """
@@ -1114,8 +1140,9 @@ def parse_timezone(text):
 def format_timezone(offset, unnecessary_negative_timezone=False):
     """Format a timezone for Git serialization.
 
-    :param offset: Timezone offset as seconds difference to UTC
-    :param unnecessary_negative_timezone: Whether to use a minus sign for
+    Args:
+      offset: Timezone offset as seconds difference to UTC
+      unnecessary_negative_timezone: Whether to use a minus sign for
         UTC or positive timezones (-0000 and --700 rather than +0000 / +0700).
     """
     if offset % 60 != 0:
@@ -1132,10 +1159,12 @@ def format_timezone(offset, unnecessary_negative_timezone=False):
 def parse_time_entry(value):
     """Parse time entry behavior
 
-    :param value: Bytes representing a git commit/tag line
-    :raise: ObjectFormatException in case of parsing error (malformed
-            field date)
-    :return: Tuple of (author, time, (timezone, timezone_neg_utc))
+    Args:
+      value: Bytes representing a git commit/tag line
+    Raises:
+      ObjectFormatException in case of parsing error (malformed
+      field date)
+    Returns: Tuple of (author, time, (timezone, timezone_neg_utc))
     """
     try:
         sep = value.rindex(b'> ')
@@ -1155,8 +1184,9 @@ def parse_time_entry(value):
 def parse_commit(chunks):
     """Parse a commit object from chunks.
 
-    :param chunks: Chunks to parse
-    :return: Tuple of (tree, parents, author_info, commit_info,
+    Args:
+      chunks: Chunks to parse
+    Returns: Tuple of (tree, parents, author_info, commit_info,
         encoding, mergetag, gpgsig, message, extra)
     """
     parents = []
@@ -1234,7 +1264,8 @@ class Commit(ShaFile):
     def check(self):
         """Check this object for internal consistency.
 
-        :raise ObjectFormatException: if the object is malformed in some way
+        Raises:
+          ObjectFormatException: if the object is malformed in some way
         """
         super(Commit, self).check()
         self._check_has_member("_tree", "missing tree")

+ 52 - 36
dulwich/objectspec.py

@@ -30,10 +30,12 @@ def to_bytes(text):
 def parse_object(repo, objectish):
     """Parse a string referring to an object.
 
-    :param repo: A `Repo` object
-    :param objectish: A string referring to an object
-    :return: A git object
-    :raise KeyError: If the object can not be found
+    Args:
+      repo: A `Repo` object
+      objectish: A string referring to an object
+    Returns: A git object
+    Raises:
+      KeyError: If the object can not be found
     """
     objectish = to_bytes(objectish)
     return repo[objectish]
@@ -42,10 +44,12 @@ def parse_object(repo, objectish):
 def parse_tree(repo, treeish):
     """Parse a string referring to a tree.
 
-    :param repo: A `Repo` object
-    :param treeish: A string referring to a tree
-    :return: A git object
-    :raise KeyError: If the object can not be found
+    Args:
+      repo: A `Repo` object
+      treeish: A string referring to a tree
+    Returns: A git object
+    Raises:
+      KeyError: If the object can not be found
     """
     treeish = to_bytes(treeish)
     o = repo[treeish]
@@ -57,10 +61,12 @@ def parse_tree(repo, treeish):
 def parse_ref(container, refspec):
     """Parse a string referring to a reference.
 
-    :param container: A RefsContainer object
-    :param refspec: A string referring to a ref
-    :return: A ref
-    :raise KeyError: If the ref can not be found
+    Args:
+      container: A RefsContainer object
+      refspec: A string referring to a ref
+    Returns: A ref
+    Raises:
+      KeyError: If the ref can not be found
     """
     refspec = to_bytes(refspec)
     possible_refs = [
@@ -80,11 +86,13 @@ def parse_ref(container, refspec):
 def parse_reftuple(lh_container, rh_container, refspec):
     """Parse a reftuple spec.
 
-    :param lh_container: A RefsContainer object
-    :param hh_container: A RefsContainer object
-    :param refspec: A string
-    :return: A tuple with left and right ref
-    :raise KeyError: If one of the refs can not be found
+    Args:
+      lh_container: A RefsContainer object
+      hh_container: A RefsContainer object
+      refspec: A string
+    Returns: A tuple with left and right ref
+    Raises:
+      KeyError: If one of the refs can not be found
     """
     refspec = to_bytes(refspec)
     if refspec.startswith(b"+"):
@@ -115,11 +123,13 @@ def parse_reftuple(lh_container, rh_container, refspec):
 def parse_reftuples(lh_container, rh_container, refspecs):
     """Parse a list of reftuple specs to a list of reftuples.
 
-    :param lh_container: A RefsContainer object
-    :param hh_container: A RefsContainer object
-    :param refspecs: A list of refspecs or a string
-    :return: A list of refs
-    :raise KeyError: If one of the refs can not be found
+    Args:
+      lh_container: A RefsContainer object
+      hh_container: A RefsContainer object
+      refspecs: A list of refspecs or a string
+    Returns: A list of refs
+    Raises:
+      KeyError: If one of the refs can not be found
     """
     if not isinstance(refspecs, list):
         refspecs = [refspecs]
@@ -133,10 +143,12 @@ def parse_reftuples(lh_container, rh_container, refspecs):
 def parse_refs(container, refspecs):
     """Parse a list of refspecs to a list of refs.
 
-    :param container: A RefsContainer object
-    :param refspecs: A list of refspecs or a string
-    :return: A list of refs
-    :raise KeyError: If one of the refs can not be found
+    Args:
+      container: A RefsContainer object
+      refspecs: A list of refspecs or a string
+    Returns: A list of refs
+    Raises:
+      KeyError: If one of the refs can not be found
     """
     # TODO: Support * in refspecs
     if not isinstance(refspecs, list):
@@ -150,11 +162,13 @@ def parse_refs(container, refspecs):
 def parse_commit_range(repo, committishs):
     """Parse a string referring to a range of commits.
 
-    :param repo: A `Repo` object
-    :param committishs: A string referring to a range of commits.
-    :return: An iterator over `Commit` objects
-    :raise KeyError: When the reference commits can not be found
-    :raise ValueError: If the range can not be parsed
+    Args:
+      repo: A `Repo` object
+      committishs: A string referring to a range of commits.
+    Returns: An iterator over `Commit` objects
+    Raises:
+      KeyError: When the reference commits can not be found
+      ValueError: If the range can not be parsed
     """
     committishs = to_bytes(committishs)
     # TODO(jelmer): Support more than a single commit..
@@ -187,11 +201,13 @@ def scan_for_short_id(object_store, prefix):
 def parse_commit(repo, committish):
     """Parse a string referring to a single commit.
 
-    :param repo: A` Repo` object
-    :param commitish: A string referring to a single commit.
-    :return: A Commit object
-    :raise KeyError: When the reference commits can not be found
-    :raise ValueError: If the range can not be parsed
+    Args:
+      repo: A` Repo` object
+      commitish: A string referring to a single commit.
+    Returns: A Commit object
+    Raises:
+      KeyError: When the reference commits can not be found
+      ValueError: If the range can not be parsed
     """
     committish = to_bytes(committish)
     try:

+ 153 - 117
dulwich/pack.py

@@ -100,7 +100,8 @@ DEFAULT_PACK_DELTA_WINDOW_SIZE = 10
 def take_msb_bytes(read, crc32=None):
     """Read bytes marked with most significant bit.
 
-    :param read: Read function
+    Args:
+      read: Read function
     """
     ret = []
     while len(ret) == 0 or ret[-1] & 0x80:
@@ -206,9 +207,10 @@ def read_zlib_chunks(read_some, unpacked, include_comp=False,
     This function requires that the buffer have additional data following the
     compressed data, which is guaranteed to be the case for git pack files.
 
-    :param read_some: Read function that returns at least one byte, but may
+    Args:
+      read_some: Read function that returns at least one byte, but may
         return less than the requested size.
-    :param unpacked: An UnpackedObject to write result data to. If its crc32
+      unpacked: An UnpackedObject to write result data to. If its crc32
         attr is not None, the CRC32 of the compressed bytes will be computed
         using this starting CRC32.
         After this function, will have the following attrs set:
@@ -216,10 +218,11 @@ def read_zlib_chunks(read_some, unpacked, include_comp=False,
         * decomp_chunks
         * decomp_len
         * crc32
-    :param include_comp: If True, include compressed data in the result.
-    :param buffer_size: Size of the read buffer.
-    :return: Leftover unused data from the decompression.
-    :raise zlib.error: if a decompression error occurred.
+      include_comp: If True, include compressed data in the result.
+      buffer_size: Size of the read buffer.
+    Returns: Leftover unused data from the decompression.
+    Raises:
+      zlib.error: if a decompression error occurred.
     """
     if unpacked.decomp_len <= -1:
         raise ValueError('non-negative zlib data stream size expected')
@@ -263,8 +266,9 @@ def read_zlib_chunks(read_some, unpacked, include_comp=False,
 def iter_sha1(iter):
     """Return the hexdigest of the SHA1 over a set of names.
 
-    :param iter: Iterator over string objects
-    :return: 40-byte hex sha1 digest
+    Args:
+      iter: Iterator over string objects
+    Returns: 40-byte hex sha1 digest
     """
     sha = sha1()
     for name in iter:
@@ -275,8 +279,9 @@ def iter_sha1(iter):
 def load_pack_index(path):
     """Load an index file by path.
 
-    :param filename: Path to the index file
-    :return: A PackIndex loaded from the given path
+    Args:
+      filename: Path to the index file
+    Returns: A PackIndex loaded from the given path
     """
     with GitFile(path, 'rb') as f:
         return load_pack_index_file(path, f)
@@ -307,9 +312,10 @@ def _load_file_contents(f, size=None):
 def load_pack_index_file(path, f):
     """Load an index file from a file-like object.
 
-    :param path: Path for the index file
-    :param f: File-like object
-    :return: A PackIndex loaded from the given file
+    Args:
+      path: Path for the index file
+      f: File-like object
+    Returns: A PackIndex loaded from the given file
     """
     contents, size = _load_file_contents(f)
     if contents[:4] == b'\377tOc':
@@ -326,11 +332,12 @@ def load_pack_index_file(path, f):
 def bisect_find_sha(start, end, sha, unpack_name):
     """Find a SHA in a data blob with sorted SHAs.
 
-    :param start: Start index of range to search
-    :param end: End index of range to search
-    :param sha: Sha to find
-    :param unpack_name: Callback to retrieve SHA by index
-    :return: Index of the SHA, or None if it wasn't found
+    Args:
+      start: Start index of range to search
+      end: End index of range to search
+      sha: Sha to find
+      unpack_name: Callback to retrieve SHA by index
+    Returns: Index of the SHA, or None if it wasn't found
     """
     assert start <= end
     while start <= end:
@@ -376,7 +383,7 @@ class PackIndex(object):
     def iterentries(self):
         """Iterate over the entries in this pack index.
 
-        :return: iterator over tuples with object name, offset in packfile and
+        Returns: iterator over tuples with object name, offset in packfile and
             crc32 checksum.
         """
         raise NotImplementedError(self.iterentries)
@@ -384,7 +391,7 @@ class PackIndex(object):
     def get_pack_checksum(self):
         """Return the SHA1 checksum stored for the corresponding packfile.
 
-        :return: 20-byte binary digest
+        Returns: 20-byte binary digest
         """
         raise NotImplementedError(self.get_pack_checksum)
 
@@ -418,14 +425,15 @@ class PackIndex(object):
     def _object_index(self, sha):
         """See object_index.
 
-        :param sha: A *binary* SHA string. (20 characters long)_
+        Args:
+          sha: A *binary* SHA string. (20 characters long)_
         """
         raise NotImplementedError(self._object_index)
 
     def objects_sha1(self):
         """Return the hex SHA1 over all the shas of all objects in this pack.
 
-        :note: This is used for the filename of the pack.
+        Note: This is used for the filename of the pack.
         """
         return iter_sha1(self._itersha())
 
@@ -440,8 +448,9 @@ class MemoryPackIndex(PackIndex):
     def __init__(self, entries, pack_checksum=None):
         """Create a new MemoryPackIndex.
 
-        :param entries: Sequence of name, idx, crc32 (sorted)
-        :param pack_checksum: Optional pack checksum
+        Args:
+          entries: Sequence of name, idx, crc32 (sorted)
+          pack_checksum: Optional pack checksum
         """
         self._by_sha = {}
         self._by_index = {}
@@ -524,7 +533,7 @@ class FilePackIndex(PackIndex):
     def _unpack_entry(self, i):
         """Unpack the i-th entry in the index file.
 
-        :return: Tuple with object name (SHA), offset in pack file and CRC32
+        Returns: Tuple with object name (SHA), offset in pack file and CRC32
             checksum (if known).
         """
         raise NotImplementedError(self._unpack_entry)
@@ -549,7 +558,7 @@ class FilePackIndex(PackIndex):
     def iterentries(self):
         """Iterate over the entries in this pack index.
 
-        :return: iterator over tuples with object name, offset in packfile and
+        Returns: iterator over tuples with object name, offset in packfile and
             crc32 checksum.
         """
         for i in range(len(self)):
@@ -573,28 +582,29 @@ class FilePackIndex(PackIndex):
     def calculate_checksum(self):
         """Calculate the SHA1 checksum over this pack index.
 
-        :return: This is a 20-byte binary digest
+        Returns: This is a 20-byte binary digest
         """
         return sha1(self._contents[:-20]).digest()
 
     def get_pack_checksum(self):
         """Return the SHA1 checksum stored for the corresponding packfile.
 
-        :return: 20-byte binary digest
+        Returns: 20-byte binary digest
         """
         return bytes(self._contents[-40:-20])
 
     def get_stored_checksum(self):
         """Return the SHA1 checksum stored for this index.
 
-        :return: 20-byte binary digest
+        Returns: 20-byte binary digest
         """
         return bytes(self._contents[-20:])
 
     def _object_index(self, sha):
         """See object_index.
 
-        :param sha: A *binary* SHA string. (20 characters long)_
+        Args:
+          sha: A *binary* SHA string. (20 characters long)_
         """
         assert len(sha) == 20
         idx = ord(sha[:1])
@@ -679,8 +689,9 @@ class PackIndex2(FilePackIndex):
 def read_pack_header(read):
     """Read the header of a pack file.
 
-    :param read: Read function
-    :return: Tuple of (pack version, number of objects). If no data is
+    Args:
+      read: Read function
+    Returns: Tuple of (pack version, number of objects). If no data is
         available to read, returns (None, None).
     """
     header = read(12)
@@ -706,15 +717,16 @@ def unpack_object(read_all, read_some=None, compute_crc32=False,
                   include_comp=False, zlib_bufsize=_ZLIB_BUFSIZE):
     """Unpack a Git object.
 
-    :param read_all: Read function that blocks until the number of requested
+    Args:
+      read_all: Read function that blocks until the number of requested
         bytes are read.
-    :param read_some: Read function that returns at least one byte, but may not
+      read_some: Read function that returns at least one byte, but may not
         return the number of bytes requested.
-    :param compute_crc32: If True, compute the CRC32 of the compressed data. If
+      compute_crc32: If True, compute the CRC32 of the compressed data. If
         False, the returned CRC32 will be None.
-    :param include_comp: If True, include compressed data in the result.
-    :param zlib_bufsize: An optional buffer size for zlib operations.
-    :return: A tuple of (unpacked, unused), where unused is the unused data
+      include_comp: If True, include compressed data in the result.
+      zlib_bufsize: An optional buffer size for zlib operations.
+    Returns: A tuple of (unpacked, unused), where unused is the unused data
         leftover from decompression, and unpacked in an UnpackedObject with
         the following attrs set:
 
@@ -799,8 +811,9 @@ class PackStreamReader(object):
         As a side effect, update the verifier's hash (excluding the last 20
         bytes read).
 
-        :param read: The read callback to read from.
-        :param size: The maximum number of bytes to read; the particular
+        Args:
+          read: The read callback to read from.
+          size: The maximum number of bytes to read; the particular
             behavior is callback-specific.
         """
         data = read(size)
@@ -860,9 +873,10 @@ class PackStreamReader(object):
     def read_objects(self, compute_crc32=False):
         """Read the objects in this pack file.
 
-        :param compute_crc32: If True, compute the CRC32 of the compressed
+        Args:
+          compute_crc32: If True, compute the CRC32 of the compressed
             data. If False, the returned CRC32 will be None.
-        :return: Iterator over UnpackedObjects with the following members set:
+        Returns: Iterator over UnpackedObjects with the following members set:
             offset
             obj_type_num
             obj_chunks (for non-delta types)
@@ -870,10 +884,11 @@ class PackStreamReader(object):
             decomp_chunks
             decomp_len
             crc32 (if compute_crc32 is True)
-        :raise ChecksumMismatch: if the checksum of the pack contents does not
+        Raises:
+          ChecksumMismatch: if the checksum of the pack contents does not
             match the checksum in the pack trailer.
-        :raise zlib.error: if an error occurred during zlib decompression.
-        :raise IOError: if an error occurred writing to the output file.
+          zlib.error: if an error occurred during zlib decompression.
+          IOError: if an error occurred writing to the output file.
         """
         pack_version, self._num_objects = read_pack_header(self.read)
         if pack_version is None:
@@ -917,12 +932,13 @@ class PackStreamCopier(PackStreamReader):
     def __init__(self, read_all, read_some, outfile, delta_iter=None):
         """Initialize the copier.
 
-        :param read_all: Read function that blocks until the number of
+        Args:
+          read_all: Read function that blocks until the number of
             requested bytes are read.
-        :param read_some: Read function that returns at least one byte, but may
+          read_some: Read function that returns at least one byte, but may
             not return the number of bytes requested.
-        :param outfile: File-like object to write output through.
-        :param delta_iter: Optional DeltaChainIterator to record deltas as we
+          outfile: File-like object to write output through.
+          delta_iter: Optional DeltaChainIterator to record deltas as we
             read them.
         """
         super(PackStreamCopier, self).__init__(read_all, read_some=read_some)
@@ -964,12 +980,13 @@ def obj_sha(type, chunks):
 def compute_file_sha(f, start_ofs=0, end_ofs=0, buffer_size=1 << 16):
     """Hash a portion of a file into a new SHA.
 
-    :param f: A file-like object to read from that supports seek().
-    :param start_ofs: The offset in the file to start reading at.
-    :param end_ofs: The offset in the file to end reading at, relative to the
+    Args:
+      f: A file-like object to read from that supports seek().
+      start_ofs: The offset in the file to start reading at.
+      end_ofs: The offset in the file to end reading at, relative to the
         end of the file.
-    :param buffer_size: A buffer size for reading.
-    :return: A new SHA object updated with data read from the file.
+      buffer_size: A buffer size for reading.
+    Returns: A new SHA object updated with data read from the file.
     """
     sha = sha1()
     f.seek(0, SEEK_END)
@@ -1078,7 +1095,7 @@ class PackData(object):
     def calculate_checksum(self):
         """Calculate the checksum for this pack.
 
-        :return: 20-byte binary SHA1 digest
+        Returns: 20-byte binary SHA1 digest
         """
         return compute_file_sha(self._file, end_ofs=-20).digest()
 
@@ -1102,7 +1119,7 @@ class PackData(object):
     def resolve_object(self, offset, type, obj, get_ref=None):
         """Resolve an object, possibly resolving deltas when necessary.
 
-        :return: Tuple with object type and contents.
+        Returns: Tuple with object type and contents.
         """
         # Walk down the delta chain, building a stack of deltas to reach
         # the requested object.
@@ -1174,9 +1191,10 @@ class PackData(object):
     def iterentries(self, progress=None):
         """Yield entries summarizing the contents of this pack.
 
-        :param progress: Progress function, called with current and total
+        Args:
+          progress: Progress function, called with current and total
             object count.
-        :return: iterator of tuples with (sha, offset, crc32)
+        Returns: iterator of tuples with (sha, offset, crc32)
         """
         num_objects = self._num_objects
         resolve_ext_ref = (
@@ -1191,9 +1209,10 @@ class PackData(object):
     def sorted_entries(self, progress=None):
         """Return entries in this pack, sorted by SHA.
 
-        :param progress: Progress function, called with current and total
+        Args:
+          progress: Progress function, called with current and total
             object count
-        :return: List of tuples with (sha, offset, crc32)
+        Returns: List of tuples with (sha, offset, crc32)
         """
         ret = sorted(self.iterentries(progress=progress))
         return ret
@@ -1201,9 +1220,10 @@ class PackData(object):
     def create_index_v1(self, filename, progress=None):
         """Create a version 1 file for this data file.
 
-        :param filename: Index filename.
-        :param progress: Progress report function
-        :return: Checksum of index file
+        Args:
+          filename: Index filename.
+          progress: Progress report function
+        Returns: Checksum of index file
         """
         entries = self.sorted_entries(progress=progress)
         with GitFile(filename, 'wb') as f:
@@ -1212,9 +1232,10 @@ class PackData(object):
     def create_index_v2(self, filename, progress=None):
         """Create a version 2 index file for this data file.
 
-        :param filename: Index filename.
-        :param progress: Progress report function
-        :return: Checksum of index file
+        Args:
+          filename: Index filename.
+          progress: Progress report function
+        Returns: Checksum of index file
         """
         entries = self.sorted_entries(progress=progress)
         with GitFile(filename, 'wb') as f:
@@ -1224,9 +1245,10 @@ class PackData(object):
                      version=2):
         """Create an  index file for this data file.
 
-        :param filename: Index filename.
-        :param progress: Progress report function
-        :return: Checksum of index file
+        Args:
+          filename: Index filename.
+          progress: Progress report function
+        Returns: Checksum of index file
         """
         if version == 1:
             return self.create_index_v1(filename, progress)
@@ -1481,10 +1503,11 @@ class SHA1Writer(object):
 def pack_object_header(type_num, delta_base, size):
     """Create a pack object header for the given object info.
 
-    :param type_num: Numeric type of the object.
-    :param delta_base: Delta base offset or ref, or None for whole objects.
-    :param size: Uncompressed object size.
-    :return: A header for a packed object.
+    Args:
+      type_num: Numeric type of the object.
+      delta_base: Delta base offset or ref, or None for whole objects.
+      size: Uncompressed object size.
+    Returns: A header for a packed object.
     """
     header = []
     c = (type_num << 4) | (size & 15)
@@ -1511,10 +1534,11 @@ def pack_object_header(type_num, delta_base, size):
 def write_pack_object(f, type, object, sha=None):
     """Write pack object to a file.
 
-    :param f: File to write to
-    :param type: Numeric type of the object
-    :param object: Object to write
-    :return: Tuple with offset at which the object was written, and crc32
+    Args:
+      f: File to write to
+      type: Numeric type of the object
+      object: Object to write
+    Returns: Tuple with offset at which the object was written, and crc32
     """
     if type in DELTA_TYPES:
         delta_base, object = object
@@ -1534,12 +1558,13 @@ def write_pack_object(f, type, object, sha=None):
 def write_pack(filename, objects, deltify=None, delta_window_size=None):
     """Write a new pack data file.
 
-    :param filename: Path to the new pack file (without .pack extension)
-    :param objects: Iterable of (object, path) tuples to write.
+    Args:
+      filename: Path to the new pack file (without .pack extension)
+      objects: Iterable of (object, path) tuples to write.
         Should provide __len__
-    :param window_size: Delta window size
-    :param deltify: Whether to deltify pack objects
-    :return: Tuple with checksum of pack file and index file
+      window_size: Delta window size
+      deltify: Whether to deltify pack objects
+    Returns: Tuple with checksum of pack file and index file
     """
     with GitFile(filename + '.pack', 'wb') as f:
         entries, data_sum = write_pack_objects(
@@ -1559,9 +1584,10 @@ def write_pack_header(f, num_objects):
 def deltify_pack_objects(objects, window_size=None):
     """Generate deltas for pack objects.
 
-    :param objects: An iterable of (object, path) tuples to deltify.
-    :param window_size: Window size; None for default
-    :return: Iterator over type_num, object id, delta_base, content
+    Args:
+      objects: An iterable of (object, path) tuples to deltify.
+      window_size: Window size; None for default
+    Returns: Iterator over type_num, object id, delta_base, content
         delta_base is None for full text entries
     """
     # TODO(jelmer): Use threads
@@ -1596,8 +1622,9 @@ def deltify_pack_objects(objects, window_size=None):
 def pack_objects_to_data(objects):
     """Create pack data from objects
 
-    :param objects: Pack objects
-    :return: Tuples with (type_num, hexdigest, delta base, object chunks)
+    Args:
+      objects: Pack objects
+    Returns: Tuples with (type_num, hexdigest, delta base, object chunks)
     """
     count = len(objects)
     return (count,
@@ -1608,13 +1635,14 @@ def pack_objects_to_data(objects):
 def write_pack_objects(f, objects, delta_window_size=None, deltify=None):
     """Write a new pack data file.
 
-    :param f: File to write to
-    :param objects: Iterable of (object, path) tuples to write.
+    Args:
+      f: File to write to
+      objects: Iterable of (object, path) tuples to write.
         Should provide __len__
-    :param window_size: Sliding window size for searching for deltas;
+      window_size: Sliding window size for searching for deltas;
                         Set to None for default window size.
-    :param deltify: Whether to deltify objects
-    :return: Dict mapping id -> (offset, crc32 checksum), pack checksum
+      deltify: Whether to deltify objects
+    Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum
     """
     if deltify is None:
         # PERFORMANCE/TODO(jelmer): This should be enabled but is *much* too
@@ -1632,11 +1660,12 @@ def write_pack_objects(f, objects, delta_window_size=None, deltify=None):
 def write_pack_data(f, num_records, records, progress=None):
     """Write a new pack data file.
 
-    :param f: File to write to
-    :param num_records: Number of records
-    :param records: Iterator over type_num, object_id, delta_base, raw
-    :param progress: Function to report progress to
-    :return: Dict mapping id -> (offset, crc32 checksum), pack checksum
+    Args:
+      f: File to write to
+      num_records: Number of records
+      records: Iterator over type_num, object_id, delta_base, raw
+      progress: Function to report progress to
+    Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum
     """
     # Write the pack
     entries = {}
@@ -1665,11 +1694,12 @@ def write_pack_data(f, num_records, records, progress=None):
 def write_pack_index_v1(f, entries, pack_checksum):
     """Write a new pack index file.
 
-    :param f: A file-like object to write to
-    :param entries: List of tuples with object name (sha), offset_in_pack,
+    Args:
+      f: A file-like object to write to
+      entries: List of tuples with object name (sha), offset_in_pack,
         and crc32_checksum.
-    :param pack_checksum: Checksum of the pack file.
-    :return: The SHA of the written index file
+      pack_checksum: Checksum of the pack file.
+    Returns: The SHA of the written index file
     """
     f = SHA1Writer(f)
     fan_out_table = defaultdict(lambda: 0)
@@ -1723,8 +1753,9 @@ def _encode_copy_operation(start, length):
 def create_delta(base_buf, target_buf):
     """Use python difflib to work out how to transform base_buf to target_buf.
 
-    :param base_buf: Base buffer
-    :param target_buf: Target buffer
+    Args:
+      base_buf: Base buffer
+      target_buf: Target buffer
     """
     assert isinstance(base_buf, bytes)
     assert isinstance(target_buf, bytes)
@@ -1766,8 +1797,9 @@ def create_delta(base_buf, target_buf):
 def apply_delta(src_buf, delta):
     """Based on the similar function in git's patch-delta.c.
 
-    :param src_buf: Source buffer
-    :param delta: Delta instructions
+    Args:
+      src_buf: Source buffer
+      delta: Delta instructions
     """
     if not isinstance(src_buf, bytes):
         src_buf = b''.join(src_buf)
@@ -1833,11 +1865,12 @@ def apply_delta(src_buf, delta):
 def write_pack_index_v2(f, entries, pack_checksum):
     """Write a new pack index file.
 
-    :param f: File-like object to write to
-    :param entries: List of tuples with object name (sha), offset_in_pack, and
+    Args:
+      f: File-like object to write to
+      entries: List of tuples with object name (sha), offset_in_pack, and
         crc32_checksum.
-    :param pack_checksum: Checksum of the pack file.
-    :return: The SHA of the index file written
+      pack_checksum: Checksum of the pack file.
+    Returns: The SHA of the index file written
     """
     f = SHA1Writer(f)
     f.write(b'\377tOc')  # Magic!
@@ -1917,7 +1950,7 @@ class Pack(object):
     def index(self):
         """The index being used.
 
-        :note: This may be an in-memory index
+        Note: This may be an in-memory index
         """
         if self._idx is None:
             self._idx = self._idx_load()
@@ -1961,7 +1994,8 @@ class Pack(object):
     def check(self):
         """Check the integrity of this pack.
 
-        :raise ChecksumMismatch: if a checksum for the index or data is wrong
+        Raises:
+          ChecksumMismatch: if a checksum for the index or data is wrong
         """
         self.index.check()
         self.data.check()
@@ -1983,8 +2017,9 @@ class Pack(object):
     def get_raw_unresolved(self, sha1):
         """Get raw unresolved data for a SHA.
 
-        :param sha1: SHA to return data for
-        :return: Tuple with pack object type, delta base (if applicable),
+        Args:
+          sha1: SHA to return data for
+        Returns: Tuple with pack object type, delta base (if applicable),
             list of data chunks
         """
         offset = self.index.object_index(sha1)
@@ -2015,7 +2050,7 @@ class Pack(object):
     def pack_tuples(self):
         """Provide an iterable for use with write_pack_objects.
 
-        :return: Object that can iterate over (object, path) tuples
+        Returns: Object that can iterate over (object, path) tuples
             and provides __len__
         """
         class PackTupleIterable(object):
@@ -2034,9 +2069,10 @@ class Pack(object):
     def keep(self, msg=None):
         """Add a .keep file for the pack, preventing git from garbage collecting it.
 
-        :param msg: A message written inside the .keep file; can be used later
+        Args:
+          msg: A message written inside the .keep file; can be used later
             to determine whether or not a .keep file is obsolete.
-        :return: The path of the .keep file, as a string.
+        Returns: The path of the .keep file, as a string.
         """
         keepfile_name = '%s.keep' % self._basename
         with GitFile(keepfile_name, 'wb') as keepfile: