fastexport.py 8.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250
  1. # __init__.py -- Fast export/import functionality
  2. # Copyright (C) 2010-2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as public by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Fast export/import functionality."""
  22. import stat
  23. from fastimport import commands, parser, processor
  24. from fastimport import errors as fastimport_errors
  25. from .index import commit_tree
  26. from .object_store import iter_tree_contents
  27. from .objects import ZERO_SHA, Blob, Commit, Tag
  28. def split_email(text):
  29. # TODO(jelmer): Dedupe this and the same functionality in
  30. # format_annotate_line.
  31. (name, email) = text.rsplit(b" <", 1)
  32. return (name, email.rstrip(b">"))
  33. class GitFastExporter:
  34. """Generate a fast-export output stream for Git objects."""
  35. def __init__(self, outf, store) -> None:
  36. self.outf = outf
  37. self.store = store
  38. self.markers: dict[bytes, bytes] = {}
  39. self._marker_idx = 0
  40. def print_cmd(self, cmd) -> None:
  41. self.outf.write(getattr(cmd, "__bytes__", cmd.__repr__)() + b"\n")
  42. def _allocate_marker(self):
  43. self._marker_idx += 1
  44. return str(self._marker_idx).encode("ascii")
  45. def _export_blob(self, blob):
  46. marker = self._allocate_marker()
  47. self.markers[marker] = blob.id
  48. return (commands.BlobCommand(marker, blob.data), marker)
  49. def emit_blob(self, blob):
  50. (cmd, marker) = self._export_blob(blob)
  51. self.print_cmd(cmd)
  52. return marker
  53. def _iter_files(self, base_tree, new_tree):
  54. for (
  55. (old_path, new_path),
  56. (old_mode, new_mode),
  57. (old_hexsha, new_hexsha),
  58. ) in self.store.tree_changes(base_tree, new_tree):
  59. if new_path is None:
  60. yield commands.FileDeleteCommand(old_path)
  61. continue
  62. if not stat.S_ISDIR(new_mode):
  63. blob = self.store[new_hexsha]
  64. marker = self.emit_blob(blob)
  65. if old_path != new_path and old_path is not None:
  66. yield commands.FileRenameCommand(old_path, new_path)
  67. if old_mode != new_mode or old_hexsha != new_hexsha:
  68. prefixed_marker = b":" + marker
  69. yield commands.FileModifyCommand(
  70. new_path, new_mode, prefixed_marker, None
  71. )
  72. def _export_commit(self, commit, ref, base_tree=None):
  73. file_cmds = list(self._iter_files(base_tree, commit.tree))
  74. marker = self._allocate_marker()
  75. if commit.parents:
  76. from_ = commit.parents[0]
  77. merges = commit.parents[1:]
  78. else:
  79. from_ = None
  80. merges = []
  81. author, author_email = split_email(commit.author)
  82. committer, committer_email = split_email(commit.committer)
  83. cmd = commands.CommitCommand(
  84. ref,
  85. marker,
  86. (author, author_email, commit.author_time, commit.author_timezone),
  87. (
  88. committer,
  89. committer_email,
  90. commit.commit_time,
  91. commit.commit_timezone,
  92. ),
  93. commit.message,
  94. from_,
  95. merges,
  96. file_cmds,
  97. )
  98. return (cmd, marker)
  99. def emit_commit(self, commit, ref, base_tree=None):
  100. cmd, marker = self._export_commit(commit, ref, base_tree)
  101. self.print_cmd(cmd)
  102. return marker
  103. class GitImportProcessor(processor.ImportProcessor):
  104. """An import processor that imports into a Git repository using Dulwich."""
  105. # FIXME: Batch creation of objects?
  106. def __init__(self, repo, params=None, verbose=False, outf=None) -> None:
  107. processor.ImportProcessor.__init__(self, params, verbose)
  108. self.repo = repo
  109. self.last_commit = ZERO_SHA
  110. self.markers: dict[bytes, bytes] = {}
  111. self._contents: dict[bytes, tuple[int, bytes]] = {}
  112. def lookup_object(self, objectish):
  113. if objectish.startswith(b":"):
  114. return self.markers[objectish[1:]]
  115. return objectish
  116. def import_stream(self, stream):
  117. p = parser.ImportParser(stream)
  118. self.process(p.iter_commands)
  119. return self.markers
  120. def blob_handler(self, cmd) -> None:
  121. """Process a BlobCommand."""
  122. blob = Blob.from_string(cmd.data)
  123. self.repo.object_store.add_object(blob)
  124. if cmd.mark:
  125. self.markers[cmd.mark] = blob.id
  126. def checkpoint_handler(self, cmd) -> None:
  127. """Process a CheckpointCommand."""
  128. def commit_handler(self, cmd) -> None:
  129. """Process a CommitCommand."""
  130. commit = Commit()
  131. if cmd.author is not None:
  132. author = cmd.author
  133. else:
  134. author = cmd.committer
  135. (author_name, author_email, author_timestamp, author_timezone) = author
  136. (
  137. committer_name,
  138. committer_email,
  139. commit_timestamp,
  140. commit_timezone,
  141. ) = cmd.committer
  142. commit.author = author_name + b" <" + author_email + b">"
  143. commit.author_timezone = author_timezone
  144. commit.author_time = int(author_timestamp)
  145. commit.committer = committer_name + b" <" + committer_email + b">"
  146. commit.commit_timezone = commit_timezone
  147. commit.commit_time = int(commit_timestamp)
  148. commit.message = cmd.message
  149. commit.parents = []
  150. if cmd.from_:
  151. cmd.from_ = self.lookup_object(cmd.from_)
  152. self._reset_base(cmd.from_)
  153. for filecmd in cmd.iter_files():
  154. if filecmd.name == b"filemodify":
  155. if filecmd.data is not None:
  156. blob = Blob.from_string(filecmd.data)
  157. self.repo.object_store.add(blob)
  158. blob_id = blob.id
  159. else:
  160. blob_id = self.lookup_object(filecmd.dataref)
  161. self._contents[filecmd.path] = (filecmd.mode, blob_id)
  162. elif filecmd.name == b"filedelete":
  163. del self._contents[filecmd.path]
  164. elif filecmd.name == b"filecopy":
  165. self._contents[filecmd.dest_path] = self._contents[filecmd.src_path]
  166. elif filecmd.name == b"filerename":
  167. self._contents[filecmd.new_path] = self._contents[filecmd.old_path]
  168. del self._contents[filecmd.old_path]
  169. elif filecmd.name == b"filedeleteall":
  170. self._contents = {}
  171. else:
  172. raise Exception(f"Command {filecmd.name} not supported")
  173. commit.tree = commit_tree(
  174. self.repo.object_store,
  175. ((path, hexsha, mode) for (path, (mode, hexsha)) in self._contents.items()),
  176. )
  177. if self.last_commit != ZERO_SHA:
  178. commit.parents.append(self.last_commit)
  179. for merge in cmd.merges:
  180. commit.parents.append(self.lookup_object(merge))
  181. self.repo.object_store.add_object(commit)
  182. self.repo[cmd.ref] = commit.id
  183. self.last_commit = commit.id
  184. if cmd.mark:
  185. self.markers[cmd.mark] = commit.id
  186. def progress_handler(self, cmd) -> None:
  187. """Process a ProgressCommand."""
  188. def _reset_base(self, commit_id) -> None:
  189. if self.last_commit == commit_id:
  190. return
  191. self._contents = {}
  192. self.last_commit = commit_id
  193. if commit_id != ZERO_SHA:
  194. tree_id = self.repo[commit_id].tree
  195. for (
  196. path,
  197. mode,
  198. hexsha,
  199. ) in iter_tree_contents(self.repo.object_store, tree_id):
  200. self._contents[path] = (mode, hexsha)
  201. def reset_handler(self, cmd) -> None:
  202. """Process a ResetCommand."""
  203. if cmd.from_ is None:
  204. from_ = ZERO_SHA
  205. else:
  206. from_ = self.lookup_object(cmd.from_)
  207. self._reset_base(from_)
  208. self.repo.refs[cmd.ref] = from_
  209. def tag_handler(self, cmd) -> None:
  210. """Process a TagCommand."""
  211. tag = Tag()
  212. tag.tagger = cmd.tagger
  213. tag.message = cmd.message
  214. tag.name = cmd.tag
  215. self.repo.add_object(tag)
  216. self.repo.refs["refs/tags/" + tag.name] = tag.id
  217. def feature_handler(self, cmd):
  218. """Process a FeatureCommand."""
  219. raise fastimport_errors.UnknownFeature(cmd.feature_name)