fastexport.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384
  1. # __init__.py -- Fast export/import functionality
  2. # Copyright (C) 2010-2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as published by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Fast export/import functionality."""
  22. import stat
  23. from collections.abc import Generator
  24. from typing import TYPE_CHECKING, Any, BinaryIO
  25. from fastimport import commands, parser, processor
  26. from fastimport import errors as fastimport_errors
  27. from .index import commit_tree
  28. from .object_store import iter_tree_contents
  29. from .objects import ZERO_SHA, Blob, Commit, ObjectID, Tag
  30. from .refs import Ref
  31. if TYPE_CHECKING:
  32. from .object_store import BaseObjectStore
  33. from .repo import BaseRepo
  34. def split_email(text: bytes) -> tuple[bytes, bytes]:
  35. """Split email address from name.
  36. Args:
  37. text: Full name and email (e.g. b"John Doe <john@example.com>")
  38. Returns:
  39. Tuple of (name, email)
  40. """
  41. # TODO(jelmer): Dedupe this and the same functionality in
  42. # format_annotate_line.
  43. (name, email) = text.rsplit(b" <", 1)
  44. return (name, email.rstrip(b">"))
  45. class GitFastExporter:
  46. """Generate a fast-export output stream for Git objects."""
  47. def __init__(self, outf: BinaryIO, store: "BaseObjectStore") -> None:
  48. """Initialize the fast exporter.
  49. Args:
  50. outf: Output file to write to
  51. store: Object store to export from
  52. """
  53. self.outf = outf
  54. self.store = store
  55. self.markers: dict[bytes, bytes] = {}
  56. self._marker_idx = 0
  57. def print_cmd(self, cmd: object) -> None:
  58. """Print a command to the output stream.
  59. Args:
  60. cmd: Command object to print
  61. """
  62. if hasattr(cmd, "__bytes__"):
  63. output = cmd.__bytes__()
  64. else:
  65. output = cmd.__repr__().encode("utf-8")
  66. self.outf.write(output + b"\n")
  67. def _allocate_marker(self) -> bytes:
  68. """Allocate a new marker.
  69. Returns:
  70. New marker as bytes
  71. """
  72. self._marker_idx += 1
  73. return str(self._marker_idx).encode("ascii")
  74. def _export_blob(self, blob: Blob) -> tuple[Any, bytes]:
  75. """Export a blob object.
  76. Args:
  77. blob: Blob object to export
  78. Returns:
  79. Tuple of (BlobCommand, marker)
  80. """
  81. marker = self._allocate_marker()
  82. self.markers[marker] = blob.id
  83. return (commands.BlobCommand(marker, blob.data), marker) # type: ignore[no-untyped-call,unused-ignore]
  84. def emit_blob(self, blob: Blob) -> bytes:
  85. """Emit a blob to the output stream.
  86. Args:
  87. blob: Blob object to emit
  88. Returns:
  89. Marker for the blob
  90. """
  91. (cmd, marker) = self._export_blob(blob)
  92. self.print_cmd(cmd)
  93. return marker
  94. def _iter_files(
  95. self, base_tree: bytes | None, new_tree: bytes | None
  96. ) -> Generator[Any, None, None]:
  97. for (
  98. (old_path, new_path),
  99. (old_mode, new_mode),
  100. (old_hexsha, new_hexsha),
  101. ) in self.store.tree_changes(base_tree, new_tree):
  102. if new_path is None:
  103. if old_path is not None:
  104. yield commands.FileDeleteCommand(old_path) # type: ignore[no-untyped-call,unused-ignore]
  105. continue
  106. marker = b""
  107. if new_mode is not None and not stat.S_ISDIR(new_mode):
  108. if new_hexsha is not None:
  109. blob = self.store[new_hexsha]
  110. from .objects import Blob
  111. if isinstance(blob, Blob):
  112. marker = self.emit_blob(blob)
  113. if old_path != new_path and old_path is not None:
  114. yield commands.FileRenameCommand(old_path, new_path) # type: ignore[no-untyped-call,unused-ignore]
  115. if old_mode != new_mode or old_hexsha != new_hexsha:
  116. prefixed_marker = b":" + marker
  117. assert new_mode is not None
  118. yield commands.FileModifyCommand( # type: ignore[no-untyped-call,unused-ignore]
  119. new_path, new_mode, prefixed_marker, None
  120. )
  121. def _export_commit(
  122. self, commit: Commit, ref: Ref, base_tree: ObjectID | None = None
  123. ) -> tuple[Any, bytes]:
  124. file_cmds = list(self._iter_files(base_tree, commit.tree))
  125. marker = self._allocate_marker()
  126. if commit.parents:
  127. from_ = commit.parents[0]
  128. merges = commit.parents[1:]
  129. else:
  130. from_ = None
  131. merges = []
  132. author, author_email = split_email(commit.author)
  133. committer, committer_email = split_email(commit.committer)
  134. cmd = commands.CommitCommand( # type: ignore[no-untyped-call,unused-ignore]
  135. ref,
  136. marker,
  137. (author, author_email, commit.author_time, commit.author_timezone),
  138. (
  139. committer,
  140. committer_email,
  141. commit.commit_time,
  142. commit.commit_timezone,
  143. ),
  144. commit.message,
  145. from_,
  146. merges,
  147. file_cmds,
  148. )
  149. return (cmd, marker)
  150. def emit_commit(
  151. self, commit: Commit, ref: Ref, base_tree: ObjectID | None = None
  152. ) -> bytes:
  153. """Emit a commit in fast-export format.
  154. Args:
  155. commit: Commit object to export
  156. ref: Reference name for the commit
  157. base_tree: Base tree for incremental export
  158. Returns:
  159. Marker for the commit
  160. """
  161. cmd, marker = self._export_commit(commit, ref, base_tree)
  162. self.print_cmd(cmd)
  163. return marker
  164. class GitImportProcessor(processor.ImportProcessor): # type: ignore[misc,unused-ignore]
  165. """An import processor that imports into a Git repository using Dulwich."""
  166. # FIXME: Batch creation of objects?
  167. def __init__(
  168. self,
  169. repo: "BaseRepo",
  170. params: Any | None = None, # noqa: ANN401
  171. verbose: bool = False,
  172. outf: BinaryIO | None = None,
  173. ) -> None:
  174. """Initialize GitImportProcessor.
  175. Args:
  176. repo: Repository to import into
  177. params: Import parameters
  178. verbose: Whether to enable verbose output
  179. outf: Output file for verbose messages
  180. """
  181. processor.ImportProcessor.__init__(self, params, verbose) # type: ignore[no-untyped-call,unused-ignore]
  182. self.repo = repo
  183. self.last_commit = ZERO_SHA
  184. self.markers: dict[bytes, bytes] = {}
  185. self._contents: dict[bytes, tuple[int, bytes]] = {}
  186. def lookup_object(self, objectish: bytes) -> ObjectID:
  187. """Look up an object by reference or marker.
  188. Args:
  189. objectish: Object reference or marker
  190. Returns:
  191. Object ID
  192. """
  193. if objectish.startswith(b":"):
  194. return self.markers[objectish[1:]]
  195. return objectish
  196. def import_stream(self, stream: BinaryIO) -> dict[bytes, bytes]:
  197. """Import from a fast-import stream.
  198. Args:
  199. stream: Stream to import from
  200. Returns:
  201. Dictionary of markers to object IDs
  202. """
  203. p = parser.ImportParser(stream) # type: ignore[no-untyped-call,unused-ignore]
  204. self.process(p.iter_commands) # type: ignore[no-untyped-call,unused-ignore]
  205. return self.markers
  206. def blob_handler(self, cmd: commands.BlobCommand) -> None:
  207. """Process a BlobCommand."""
  208. blob = Blob.from_string(cmd.data)
  209. self.repo.object_store.add_object(blob)
  210. if cmd.mark:
  211. self.markers[cmd.mark] = blob.id
  212. def checkpoint_handler(self, cmd: commands.CheckpointCommand) -> None:
  213. """Process a CheckpointCommand."""
  214. def commit_handler(self, cmd: commands.CommitCommand) -> None:
  215. """Process a CommitCommand."""
  216. commit = Commit()
  217. if cmd.author is not None:
  218. (author_name, author_email, author_timestamp, author_timezone) = cmd.author
  219. else:
  220. (author_name, author_email, author_timestamp, author_timezone) = (
  221. cmd.committer
  222. )
  223. (
  224. committer_name,
  225. committer_email,
  226. commit_timestamp,
  227. commit_timezone,
  228. ) = cmd.committer
  229. if isinstance(author_name, str):
  230. author_name = author_name.encode("utf-8")
  231. if isinstance(author_email, str):
  232. author_email = author_email.encode("utf-8")
  233. commit.author = author_name + b" <" + author_email + b">"
  234. commit.author_timezone = author_timezone
  235. commit.author_time = int(author_timestamp)
  236. if isinstance(committer_name, str):
  237. committer_name = committer_name.encode("utf-8")
  238. if isinstance(committer_email, str):
  239. committer_email = committer_email.encode("utf-8")
  240. commit.committer = committer_name + b" <" + committer_email + b">"
  241. commit.commit_timezone = commit_timezone
  242. commit.commit_time = int(commit_timestamp)
  243. commit.message = cmd.message
  244. commit.parents = []
  245. if cmd.from_:
  246. cmd.from_ = self.lookup_object(cmd.from_)
  247. self._reset_base(cmd.from_)
  248. for filecmd in cmd.iter_files(): # type: ignore[no-untyped-call,unused-ignore]
  249. if filecmd.name == b"filemodify":
  250. assert isinstance(filecmd, commands.FileModifyCommand)
  251. if filecmd.data is not None:
  252. blob = Blob.from_string(filecmd.data)
  253. self.repo.object_store.add_object(blob)
  254. blob_id = blob.id
  255. else:
  256. assert filecmd.dataref is not None
  257. blob_id = self.lookup_object(filecmd.dataref)
  258. self._contents[filecmd.path] = (filecmd.mode, blob_id)
  259. elif filecmd.name == b"filedelete":
  260. assert isinstance(filecmd, commands.FileDeleteCommand)
  261. del self._contents[filecmd.path]
  262. elif filecmd.name == b"filecopy":
  263. assert isinstance(filecmd, commands.FileCopyCommand)
  264. self._contents[filecmd.dest_path] = self._contents[filecmd.src_path]
  265. elif filecmd.name == b"filerename":
  266. assert isinstance(filecmd, commands.FileRenameCommand)
  267. self._contents[filecmd.new_path] = self._contents[filecmd.old_path]
  268. del self._contents[filecmd.old_path]
  269. elif filecmd.name == b"filedeleteall":
  270. self._contents = {}
  271. else:
  272. raise Exception(f"Command {filecmd.name!r} not supported")
  273. commit.tree = commit_tree(
  274. self.repo.object_store,
  275. ((path, hexsha, mode) for (path, (mode, hexsha)) in self._contents.items()),
  276. )
  277. if self.last_commit != ZERO_SHA:
  278. commit.parents.append(self.last_commit)
  279. for merge in cmd.merges:
  280. commit.parents.append(self.lookup_object(merge))
  281. self.repo.object_store.add_object(commit)
  282. self.repo[cmd.ref] = commit.id
  283. self.last_commit = commit.id
  284. if cmd.mark:
  285. mark_bytes = (
  286. cmd.mark
  287. if isinstance(cmd.mark, bytes)
  288. else str(cmd.mark).encode("ascii")
  289. )
  290. self.markers[mark_bytes] = commit.id
  291. def progress_handler(self, cmd: commands.ProgressCommand) -> None:
  292. """Process a ProgressCommand."""
  293. def _reset_base(self, commit_id: ObjectID) -> None:
  294. if self.last_commit == commit_id:
  295. return
  296. self._contents = {}
  297. self.last_commit = commit_id
  298. if commit_id != ZERO_SHA:
  299. from .objects import Commit
  300. commit = self.repo[commit_id]
  301. tree_id = commit.tree if isinstance(commit, Commit) else None
  302. if tree_id is None:
  303. return
  304. for (
  305. path,
  306. mode,
  307. hexsha,
  308. ) in iter_tree_contents(self.repo.object_store, tree_id):
  309. assert path is not None and mode is not None and hexsha is not None
  310. self._contents[path] = (mode, hexsha)
  311. def reset_handler(self, cmd: commands.ResetCommand) -> None:
  312. """Process a ResetCommand."""
  313. if cmd.from_ is None:
  314. from_ = ZERO_SHA
  315. else:
  316. from_ = self.lookup_object(cmd.from_)
  317. self._reset_base(from_)
  318. self.repo.refs[cmd.ref] = from_
  319. def tag_handler(self, cmd: commands.TagCommand) -> None:
  320. """Process a TagCommand."""
  321. tag = Tag()
  322. tag.tagger = cmd.tagger
  323. tag.message = cmd.message
  324. tag.name = cmd.from_
  325. self.repo.object_store.add_object(tag)
  326. self.repo.refs["refs/tags/" + tag.name] = tag.id
  327. def feature_handler(self, cmd: commands.FeatureCommand) -> None:
  328. """Process a FeatureCommand."""
  329. feature_name = (
  330. cmd.feature_name.decode("utf-8")
  331. if isinstance(cmd.feature_name, bytes)
  332. else cmd.feature_name
  333. )
  334. raise fastimport_errors.UnknownFeature(feature_name) # type: ignore[no-untyped-call,unused-ignore]