porcelain.py 82 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662
  1. # porcelain.py -- Porcelain-like layer on top of Dulwich
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as public by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Simple wrapper that provides porcelain-like functions on top of Dulwich.
  22. Currently implemented:
  23. * archive
  24. * add
  25. * branch{_create,_delete,_list}
  26. * check_ignore
  27. * checkout_branch
  28. * clone
  29. * cone mode{_init, _set, _add}
  30. * commit
  31. * commit_tree
  32. * daemon
  33. * describe
  34. * diff_tree
  35. * fetch
  36. * for_each_ref
  37. * init
  38. * ls_files
  39. * ls_remote
  40. * ls_tree
  41. * merge
  42. * pull
  43. * push
  44. * rm
  45. * remote{_add}
  46. * receive_pack
  47. * reset
  48. * sparse_checkout
  49. * submodule_add
  50. * submodule_init
  51. * submodule_list
  52. * rev_list
  53. * tag{_create,_delete,_list}
  54. * upload_pack
  55. * update_server_info
  56. * status
  57. * symbolic_ref
  58. These functions are meant to behave similarly to the git subcommands.
  59. Differences in behaviour are considered bugs.
  60. Note: one of the consequences of this is that paths tend to be
  61. interpreted relative to the current working directory rather than relative
  62. to the repository root.
  63. Functions should generally accept both unicode strings and bytestrings
  64. """
  65. import datetime
  66. import fnmatch
  67. import os
  68. import posixpath
  69. import stat
  70. import sys
  71. import time
  72. from collections import namedtuple
  73. from contextlib import closing, contextmanager
  74. from io import BytesIO, RawIOBase
  75. from pathlib import Path
  76. from typing import Optional, Union
  77. from .archive import tar_stream
  78. from .client import get_transport_and_path
  79. from .config import Config, ConfigFile, StackedConfig, read_submodules
  80. from .diff_tree import (
  81. CHANGE_ADD,
  82. CHANGE_COPY,
  83. CHANGE_DELETE,
  84. CHANGE_MODIFY,
  85. CHANGE_RENAME,
  86. RENAME_CHANGE_TYPES,
  87. )
  88. from .errors import SendPackError
  89. from .file import ensure_dir_exists
  90. from .graph import can_fast_forward
  91. from .ignore import IgnoreFilterManager
  92. from .index import (
  93. _fs_to_tree_path,
  94. blob_from_path_and_stat,
  95. build_file_from_blob,
  96. get_unstaged_changes,
  97. index_entry_from_stat,
  98. update_working_tree,
  99. )
  100. from .object_store import iter_tree_contents, tree_lookup_path
  101. from .objects import (
  102. Commit,
  103. Tag,
  104. format_timezone,
  105. parse_timezone,
  106. pretty_format_tree_entry,
  107. )
  108. from .objectspec import (
  109. parse_commit,
  110. parse_object,
  111. parse_ref,
  112. parse_reftuples,
  113. parse_tree,
  114. to_bytes,
  115. )
  116. from .pack import write_pack_from_container, write_pack_index
  117. from .patch import write_tree_diff
  118. from .protocol import ZERO_SHA, Protocol
  119. from .refs import (
  120. LOCAL_BRANCH_PREFIX,
  121. LOCAL_REMOTE_PREFIX,
  122. LOCAL_TAG_PREFIX,
  123. Ref,
  124. _import_remote_refs,
  125. )
  126. from .repo import BaseRepo, Repo, get_user_identity
  127. from .server import (
  128. FileSystemBackend,
  129. ReceivePackHandler,
  130. TCPGitServer,
  131. UploadPackHandler,
  132. )
  133. from .server import update_server_info as server_update_server_info
  134. from .sparse_patterns import (
  135. SparseCheckoutConflictError,
  136. apply_included_paths,
  137. determine_included_paths,
  138. )
  139. # Module level tuple definition for status output
  140. GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
  141. class NoneStream(RawIOBase):
  142. """Fallback if stdout or stderr are unavailable, does nothing."""
  143. def read(self, size=-1) -> None:
  144. return None
  145. def readall(self) -> bytes:
  146. return b""
  147. def readinto(self, b) -> None:
  148. return None
  149. def write(self, b) -> None:
  150. return None
  151. default_bytes_out_stream = getattr(sys.stdout, "buffer", None) or NoneStream()
  152. default_bytes_err_stream = getattr(sys.stderr, "buffer", None) or NoneStream()
  153. DEFAULT_ENCODING = "utf-8"
  154. class Error(Exception):
  155. """Porcelain-based error."""
  156. def __init__(self, msg) -> None:
  157. super().__init__(msg)
  158. class RemoteExists(Error):
  159. """Raised when the remote already exists."""
  160. class TimezoneFormatError(Error):
  161. """Raised when the timezone cannot be determined from a given string."""
  162. class CheckoutError(Error):
  163. """Indicates that a checkout cannot be performed."""
  164. def parse_timezone_format(tz_str):
  165. """Parse given string and attempt to return a timezone offset.
  166. Different formats are considered in the following order:
  167. - Git internal format: <unix timestamp> <timezone offset>
  168. - RFC 2822: e.g. Mon, 20 Nov 1995 19:12:08 -0500
  169. - ISO 8601: e.g. 1995-11-20T19:12:08-0500
  170. Args:
  171. tz_str: datetime string
  172. Returns: Timezone offset as integer
  173. Raises:
  174. TimezoneFormatError: if timezone information cannot be extracted
  175. """
  176. import re
  177. # Git internal format
  178. internal_format_pattern = re.compile("^[0-9]+ [+-][0-9]{,4}$")
  179. if re.match(internal_format_pattern, tz_str):
  180. try:
  181. tz_internal = parse_timezone(tz_str.split(" ")[1].encode(DEFAULT_ENCODING))
  182. return tz_internal[0]
  183. except ValueError:
  184. pass
  185. # RFC 2822
  186. import email.utils
  187. rfc_2822 = email.utils.parsedate_tz(tz_str)
  188. if rfc_2822:
  189. return rfc_2822[9]
  190. # ISO 8601
  191. # Supported offsets:
  192. # sHHMM, sHH:MM, sHH
  193. iso_8601_pattern = re.compile(
  194. "[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$"
  195. )
  196. match = re.search(iso_8601_pattern, tz_str)
  197. total_secs = 0
  198. if match:
  199. sign, hours, minutes = match.groups()
  200. total_secs += int(hours) * 3600
  201. if minutes:
  202. total_secs += int(minutes) * 60
  203. total_secs = -total_secs if sign == "-" else total_secs
  204. return total_secs
  205. # YYYY.MM.DD, MM/DD/YYYY, DD.MM.YYYY contain no timezone information
  206. raise TimezoneFormatError(tz_str)
  207. def get_user_timezones():
  208. """Retrieve local timezone as described in
  209. https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
  210. Returns: A tuple containing author timezone, committer timezone.
  211. """
  212. local_timezone = time.localtime().tm_gmtoff
  213. if os.environ.get("GIT_AUTHOR_DATE"):
  214. author_timezone = parse_timezone_format(os.environ["GIT_AUTHOR_DATE"])
  215. else:
  216. author_timezone = local_timezone
  217. if os.environ.get("GIT_COMMITTER_DATE"):
  218. commit_timezone = parse_timezone_format(os.environ["GIT_COMMITTER_DATE"])
  219. else:
  220. commit_timezone = local_timezone
  221. return author_timezone, commit_timezone
  222. def open_repo(path_or_repo):
  223. """Open an argument that can be a repository or a path for a repository."""
  224. if isinstance(path_or_repo, BaseRepo):
  225. return path_or_repo
  226. return Repo(path_or_repo)
  227. @contextmanager
  228. def _noop_context_manager(obj):
  229. """Context manager that has the same api as closing but does nothing."""
  230. yield obj
  231. def open_repo_closing(path_or_repo):
  232. """Open an argument that can be a repository or a path for a repository.
  233. returns a context manager that will close the repo on exit if the argument
  234. is a path, else does nothing if the argument is a repo.
  235. """
  236. if isinstance(path_or_repo, BaseRepo):
  237. return _noop_context_manager(path_or_repo)
  238. return closing(Repo(path_or_repo))
  239. def path_to_tree_path(repopath, path, tree_encoding=DEFAULT_ENCODING):
  240. """Convert a path to a path usable in an index, e.g. bytes and relative to
  241. the repository root.
  242. Args:
  243. repopath: Repository path, absolute or relative to the cwd
  244. path: A path, absolute or relative to the cwd
  245. Returns: A path formatted for use in e.g. an index
  246. """
  247. # Resolve might returns a relative path on Windows
  248. # https://bugs.python.org/issue38671
  249. if sys.platform == "win32":
  250. path = os.path.abspath(path)
  251. path = Path(path)
  252. resolved_path = path.resolve()
  253. # Resolve and abspath seems to behave differently regarding symlinks,
  254. # as we are doing abspath on the file path, we need to do the same on
  255. # the repo path or they might not match
  256. if sys.platform == "win32":
  257. repopath = os.path.abspath(repopath)
  258. repopath = Path(repopath).resolve()
  259. try:
  260. relpath = resolved_path.relative_to(repopath)
  261. except ValueError:
  262. # If path is a symlink that points to a file outside the repo, we
  263. # want the relpath for the link itself, not the resolved target
  264. if path.is_symlink():
  265. parent = path.parent.resolve()
  266. relpath = (parent / path.name).relative_to(repopath)
  267. else:
  268. raise
  269. if sys.platform == "win32":
  270. return str(relpath).replace(os.path.sep, "/").encode(tree_encoding)
  271. else:
  272. return bytes(relpath)
  273. class DivergedBranches(Error):
  274. """Branches have diverged and fast-forward is not possible."""
  275. def __init__(self, current_sha, new_sha) -> None:
  276. self.current_sha = current_sha
  277. self.new_sha = new_sha
  278. def check_diverged(repo, current_sha, new_sha) -> None:
  279. """Check if updating to a sha can be done with fast forwarding.
  280. Args:
  281. repo: Repository object
  282. current_sha: Current head sha
  283. new_sha: New head sha
  284. """
  285. try:
  286. can = can_fast_forward(repo, current_sha, new_sha)
  287. except KeyError:
  288. can = False
  289. if not can:
  290. raise DivergedBranches(current_sha, new_sha)
  291. def archive(
  292. repo,
  293. committish=None,
  294. outstream=default_bytes_out_stream,
  295. errstream=default_bytes_err_stream,
  296. ) -> None:
  297. """Create an archive.
  298. Args:
  299. repo: Path of repository for which to generate an archive.
  300. committish: Commit SHA1 or ref to use
  301. outstream: Output stream (defaults to stdout)
  302. errstream: Error stream (defaults to stderr)
  303. """
  304. if committish is None:
  305. committish = "HEAD"
  306. with open_repo_closing(repo) as repo_obj:
  307. c = parse_commit(repo_obj, committish)
  308. for chunk in tar_stream(
  309. repo_obj.object_store, repo_obj.object_store[c.tree], c.commit_time
  310. ):
  311. outstream.write(chunk)
  312. def update_server_info(repo=".") -> None:
  313. """Update server info files for a repository.
  314. Args:
  315. repo: path to the repository
  316. """
  317. with open_repo_closing(repo) as r:
  318. server_update_server_info(r)
  319. def symbolic_ref(repo, ref_name, force=False) -> None:
  320. """Set git symbolic ref into HEAD.
  321. Args:
  322. repo: path to the repository
  323. ref_name: short name of the new ref
  324. force: force settings without checking if it exists in refs/heads
  325. """
  326. with open_repo_closing(repo) as repo_obj:
  327. ref_path = _make_branch_ref(ref_name)
  328. if not force and ref_path not in repo_obj.refs.keys():
  329. raise Error(f"fatal: ref `{ref_name}` is not a ref")
  330. repo_obj.refs.set_symbolic_ref(b"HEAD", ref_path)
  331. def pack_refs(repo, all=False) -> None:
  332. with open_repo_closing(repo) as repo_obj:
  333. refs = repo_obj.refs
  334. packed_refs = {
  335. ref: refs[ref]
  336. for ref in refs
  337. if (all or ref.startswith(LOCAL_TAG_PREFIX)) and ref != b"HEAD"
  338. }
  339. refs.add_packed_refs(packed_refs)
  340. def commit(
  341. repo=".",
  342. message=None,
  343. author=None,
  344. author_timezone=None,
  345. committer=None,
  346. commit_timezone=None,
  347. encoding=None,
  348. no_verify=False,
  349. signoff=False,
  350. ):
  351. """Create a new commit.
  352. Args:
  353. repo: Path to repository
  354. message: Optional commit message
  355. author: Optional author name and email
  356. author_timezone: Author timestamp timezone
  357. committer: Optional committer name and email
  358. commit_timezone: Commit timestamp timezone
  359. no_verify: Skip pre-commit and commit-msg hooks
  360. signoff: GPG Sign the commit (bool, defaults to False,
  361. pass True to use default GPG key,
  362. pass a str containing Key ID to use a specific GPG key)
  363. Returns: SHA1 of the new commit
  364. """
  365. # FIXME: Support --all argument
  366. if getattr(message, "encode", None):
  367. message = message.encode(encoding or DEFAULT_ENCODING)
  368. if getattr(author, "encode", None):
  369. author = author.encode(encoding or DEFAULT_ENCODING)
  370. if getattr(committer, "encode", None):
  371. committer = committer.encode(encoding or DEFAULT_ENCODING)
  372. local_timezone = get_user_timezones()
  373. if author_timezone is None:
  374. author_timezone = local_timezone[0]
  375. if commit_timezone is None:
  376. commit_timezone = local_timezone[1]
  377. with open_repo_closing(repo) as r:
  378. return r.do_commit(
  379. message=message,
  380. author=author,
  381. author_timezone=author_timezone,
  382. committer=committer,
  383. commit_timezone=commit_timezone,
  384. encoding=encoding,
  385. no_verify=no_verify,
  386. sign=signoff if isinstance(signoff, (str, bool)) else None,
  387. )
  388. def commit_tree(repo, tree, message=None, author=None, committer=None):
  389. """Create a new commit object.
  390. Args:
  391. repo: Path to repository
  392. tree: An existing tree object
  393. author: Optional author name and email
  394. committer: Optional committer name and email
  395. """
  396. with open_repo_closing(repo) as r:
  397. return r.do_commit(
  398. message=message, tree=tree, committer=committer, author=author
  399. )
  400. def init(path=".", *, bare=False, symlinks: Optional[bool] = None):
  401. """Create a new git repository.
  402. Args:
  403. path: Path to repository.
  404. bare: Whether to create a bare repository.
  405. symlinks: Whether to create actual symlinks (defaults to autodetect)
  406. Returns: A Repo instance
  407. """
  408. if not os.path.exists(path):
  409. os.mkdir(path)
  410. if bare:
  411. return Repo.init_bare(path)
  412. else:
  413. return Repo.init(path, symlinks=symlinks)
  414. def clone(
  415. source,
  416. target=None,
  417. bare=False,
  418. checkout=None,
  419. errstream=default_bytes_err_stream,
  420. outstream=None,
  421. origin: Optional[str] = "origin",
  422. depth: Optional[int] = None,
  423. branch: Optional[Union[str, bytes]] = None,
  424. config: Optional[Config] = None,
  425. filter_spec=None,
  426. protocol_version: Optional[int] = None,
  427. **kwargs,
  428. ):
  429. """Clone a local or remote git repository.
  430. Args:
  431. source: Path or URL for source repository
  432. target: Path to target repository (optional)
  433. bare: Whether or not to create a bare repository
  434. checkout: Whether or not to check-out HEAD after cloning
  435. errstream: Optional stream to write progress to
  436. outstream: Optional stream to write progress to (deprecated)
  437. origin: Name of remote from the repository used to clone
  438. depth: Depth to fetch at
  439. branch: Optional branch or tag to be used as HEAD in the new repository
  440. instead of the cloned repository's HEAD.
  441. config: Configuration to use
  442. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  443. bytestring/string.
  444. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  445. Only used if the server supports the Git protocol-v2 'filter'
  446. feature, and ignored otherwise.
  447. protocol_version: desired Git protocol version. By default the highest
  448. mutually supported protocol version will be used.
  449. Returns: The new repository
  450. """
  451. if outstream is not None:
  452. import warnings
  453. warnings.warn(
  454. "outstream= has been deprecated in favour of errstream=.",
  455. DeprecationWarning,
  456. stacklevel=3,
  457. )
  458. # TODO(jelmer): Capture logging output and stream to errstream
  459. if config is None:
  460. config = StackedConfig.default()
  461. if checkout is None:
  462. checkout = not bare
  463. if checkout and bare:
  464. raise Error("checkout and bare are incompatible")
  465. if target is None:
  466. target = source.split("/")[-1]
  467. if isinstance(branch, str):
  468. branch = branch.encode(DEFAULT_ENCODING)
  469. mkdir = not os.path.exists(target)
  470. (client, path) = get_transport_and_path(source, config=config, **kwargs)
  471. if filter_spec:
  472. filter_spec = filter_spec.encode("ascii")
  473. return client.clone(
  474. path,
  475. target,
  476. mkdir=mkdir,
  477. bare=bare,
  478. origin=origin,
  479. checkout=checkout,
  480. branch=branch,
  481. progress=errstream.write,
  482. depth=depth,
  483. filter_spec=filter_spec,
  484. protocol_version=protocol_version,
  485. )
  486. def add(repo=".", paths=None):
  487. """Add files to the staging area.
  488. Args:
  489. repo: Repository for the files
  490. paths: Paths to add. No value passed stages all modified files.
  491. Returns: Tuple with set of added files and ignored files
  492. If the repository contains ignored directories, the returned set will
  493. contain the path to an ignored directory (with trailing slash). Individual
  494. files within ignored directories will not be returned.
  495. """
  496. ignored = set()
  497. with open_repo_closing(repo) as r:
  498. repo_path = Path(r.path).resolve()
  499. ignore_manager = IgnoreFilterManager.from_repo(r)
  500. if not paths:
  501. paths = list(
  502. get_untracked_paths(
  503. str(Path(os.getcwd()).resolve()),
  504. str(repo_path),
  505. r.open_index(),
  506. )
  507. )
  508. relpaths = []
  509. if not isinstance(paths, list):
  510. paths = [paths]
  511. for p in paths:
  512. path = Path(p)
  513. relpath = str(path.resolve().relative_to(repo_path))
  514. # FIXME: Support patterns
  515. if path.is_dir():
  516. relpath = os.path.join(relpath, "")
  517. if ignore_manager.is_ignored(relpath):
  518. ignored.add(relpath)
  519. continue
  520. relpaths.append(relpath)
  521. r.stage(relpaths)
  522. return (relpaths, ignored)
  523. def _is_subdir(subdir, parentdir):
  524. """Check whether subdir is parentdir or a subdir of parentdir.
  525. If parentdir or subdir is a relative path, it will be disamgibuated
  526. relative to the pwd.
  527. """
  528. parentdir_abs = os.path.realpath(parentdir) + os.path.sep
  529. subdir_abs = os.path.realpath(subdir) + os.path.sep
  530. return subdir_abs.startswith(parentdir_abs)
  531. # TODO: option to remove ignored files also, in line with `git clean -fdx`
  532. def clean(repo=".", target_dir=None) -> None:
  533. """Remove any untracked files from the target directory recursively.
  534. Equivalent to running ``git clean -fd`` in target_dir.
  535. Args:
  536. repo: Repository where the files may be tracked
  537. target_dir: Directory to clean - current directory if None
  538. """
  539. if target_dir is None:
  540. target_dir = os.getcwd()
  541. with open_repo_closing(repo) as r:
  542. if not _is_subdir(target_dir, r.path):
  543. raise Error("target_dir must be in the repo's working dir")
  544. config = r.get_config_stack()
  545. config.get_boolean((b"clean",), b"requireForce", True)
  546. # TODO(jelmer): if require_force is set, then make sure that -f, -i or
  547. # -n is specified.
  548. index = r.open_index()
  549. ignore_manager = IgnoreFilterManager.from_repo(r)
  550. paths_in_wd = _walk_working_dir_paths(target_dir, r.path)
  551. # Reverse file visit order, so that files and subdirectories are
  552. # removed before containing directory
  553. for ap, is_dir in reversed(list(paths_in_wd)):
  554. if is_dir:
  555. # All subdirectories and files have been removed if untracked,
  556. # so dir contains no tracked files iff it is empty.
  557. is_empty = len(os.listdir(ap)) == 0
  558. if is_empty:
  559. os.rmdir(ap)
  560. else:
  561. ip = path_to_tree_path(r.path, ap)
  562. is_tracked = ip in index
  563. rp = os.path.relpath(ap, r.path)
  564. is_ignored = ignore_manager.is_ignored(rp)
  565. if not is_tracked and not is_ignored:
  566. os.remove(ap)
  567. def remove(repo=".", paths=None, cached=False) -> None:
  568. """Remove files from the staging area.
  569. Args:
  570. repo: Repository for the files
  571. paths: Paths to remove
  572. """
  573. with open_repo_closing(repo) as r:
  574. index = r.open_index()
  575. for p in paths:
  576. full_path = os.fsencode(os.path.abspath(p))
  577. tree_path = path_to_tree_path(r.path, p)
  578. try:
  579. index_sha = index[tree_path].sha
  580. except KeyError as exc:
  581. raise Error(f"{p} did not match any files") from exc
  582. if not cached:
  583. try:
  584. st = os.lstat(full_path)
  585. except OSError:
  586. pass
  587. else:
  588. try:
  589. blob = blob_from_path_and_stat(full_path, st)
  590. except OSError:
  591. pass
  592. else:
  593. try:
  594. committed_sha = tree_lookup_path(
  595. r.__getitem__, r[r.head()].tree, tree_path
  596. )[1]
  597. except KeyError:
  598. committed_sha = None
  599. if blob.id != index_sha and index_sha != committed_sha:
  600. raise Error(
  601. "file has staged content differing "
  602. f"from both the file and head: {p}"
  603. )
  604. if index_sha != committed_sha:
  605. raise Error(f"file has staged changes: {p}")
  606. os.remove(full_path)
  607. del index[tree_path]
  608. index.write()
  609. rm = remove
  610. def commit_decode(commit, contents, default_encoding=DEFAULT_ENCODING):
  611. if commit.encoding:
  612. encoding = commit.encoding.decode("ascii")
  613. else:
  614. encoding = default_encoding
  615. return contents.decode(encoding, "replace")
  616. def commit_encode(commit, contents, default_encoding=DEFAULT_ENCODING):
  617. if commit.encoding:
  618. encoding = commit.encoding.decode("ascii")
  619. else:
  620. encoding = default_encoding
  621. return contents.encode(encoding)
  622. def print_commit(commit, decode, outstream=sys.stdout) -> None:
  623. """Write a human-readable commit log entry.
  624. Args:
  625. commit: A `Commit` object
  626. outstream: A stream file to write to
  627. """
  628. outstream.write("-" * 50 + "\n")
  629. outstream.write("commit: " + commit.id.decode("ascii") + "\n")
  630. if len(commit.parents) > 1:
  631. outstream.write(
  632. "merge: "
  633. + "...".join([c.decode("ascii") for c in commit.parents[1:]])
  634. + "\n"
  635. )
  636. outstream.write("Author: " + decode(commit.author) + "\n")
  637. if commit.author != commit.committer:
  638. outstream.write("Committer: " + decode(commit.committer) + "\n")
  639. time_tuple = time.gmtime(commit.author_time + commit.author_timezone)
  640. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  641. timezone_str = format_timezone(commit.author_timezone).decode("ascii")
  642. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  643. if commit.message:
  644. outstream.write("\n")
  645. outstream.write(decode(commit.message) + "\n")
  646. outstream.write("\n")
  647. def print_tag(tag, decode, outstream=sys.stdout) -> None:
  648. """Write a human-readable tag.
  649. Args:
  650. tag: A `Tag` object
  651. decode: Function for decoding bytes to unicode string
  652. outstream: A stream to write to
  653. """
  654. outstream.write("Tagger: " + decode(tag.tagger) + "\n")
  655. time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone)
  656. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  657. timezone_str = format_timezone(tag.tag_timezone).decode("ascii")
  658. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  659. outstream.write("\n")
  660. outstream.write(decode(tag.message))
  661. outstream.write("\n")
  662. def show_blob(repo, blob, decode, outstream=sys.stdout) -> None:
  663. """Write a blob to a stream.
  664. Args:
  665. repo: A `Repo` object
  666. blob: A `Blob` object
  667. decode: Function for decoding bytes to unicode string
  668. outstream: A stream file to write to
  669. """
  670. outstream.write(decode(blob.data))
  671. def show_commit(repo, commit, decode, outstream=sys.stdout) -> None:
  672. """Show a commit to a stream.
  673. Args:
  674. repo: A `Repo` object
  675. commit: A `Commit` object
  676. decode: Function for decoding bytes to unicode string
  677. outstream: Stream to write to
  678. """
  679. print_commit(commit, decode=decode, outstream=outstream)
  680. if commit.parents:
  681. parent_commit = repo[commit.parents[0]]
  682. base_tree = parent_commit.tree
  683. else:
  684. base_tree = None
  685. diffstream = BytesIO()
  686. write_tree_diff(diffstream, repo.object_store, base_tree, commit.tree)
  687. diffstream.seek(0)
  688. outstream.write(commit_decode(commit, diffstream.getvalue()))
  689. def show_tree(repo, tree, decode, outstream=sys.stdout) -> None:
  690. """Print a tree to a stream.
  691. Args:
  692. repo: A `Repo` object
  693. tree: A `Tree` object
  694. decode: Function for decoding bytes to unicode string
  695. outstream: Stream to write to
  696. """
  697. for n in tree:
  698. outstream.write(decode(n) + "\n")
  699. def show_tag(repo, tag, decode, outstream=sys.stdout) -> None:
  700. """Print a tag to a stream.
  701. Args:
  702. repo: A `Repo` object
  703. tag: A `Tag` object
  704. decode: Function for decoding bytes to unicode string
  705. outstream: Stream to write to
  706. """
  707. print_tag(tag, decode, outstream)
  708. show_object(repo, repo[tag.object[1]], decode, outstream)
  709. def show_object(repo, obj, decode, outstream):
  710. return {
  711. b"tree": show_tree,
  712. b"blob": show_blob,
  713. b"commit": show_commit,
  714. b"tag": show_tag,
  715. }[obj.type_name](repo, obj, decode, outstream)
  716. def print_name_status(changes):
  717. """Print a simple status summary, listing changed files."""
  718. for change in changes:
  719. if not change:
  720. continue
  721. if isinstance(change, list):
  722. change = change[0]
  723. if change.type == CHANGE_ADD:
  724. path1 = change.new.path
  725. path2 = ""
  726. kind = "A"
  727. elif change.type == CHANGE_DELETE:
  728. path1 = change.old.path
  729. path2 = ""
  730. kind = "D"
  731. elif change.type == CHANGE_MODIFY:
  732. path1 = change.new.path
  733. path2 = ""
  734. kind = "M"
  735. elif change.type in RENAME_CHANGE_TYPES:
  736. path1 = change.old.path
  737. path2 = change.new.path
  738. if change.type == CHANGE_RENAME:
  739. kind = "R"
  740. elif change.type == CHANGE_COPY:
  741. kind = "C"
  742. yield "%-8s%-20s%-20s" % (kind, path1, path2) # noqa: UP031
  743. def log(
  744. repo=".",
  745. paths=None,
  746. outstream=sys.stdout,
  747. max_entries=None,
  748. reverse=False,
  749. name_status=False,
  750. ) -> None:
  751. """Write commit logs.
  752. Args:
  753. repo: Path to repository
  754. paths: Optional set of specific paths to print entries for
  755. outstream: Stream to write log output to
  756. reverse: Reverse order in which entries are printed
  757. name_status: Print name status
  758. max_entries: Optional maximum number of entries to display
  759. """
  760. with open_repo_closing(repo) as r:
  761. try:
  762. include = [r.head()]
  763. except KeyError:
  764. include = []
  765. walker = r.get_walker(
  766. include=include, max_entries=max_entries, paths=paths, reverse=reverse
  767. )
  768. for entry in walker:
  769. def decode(x):
  770. return commit_decode(entry.commit, x)
  771. print_commit(entry.commit, decode, outstream)
  772. if name_status:
  773. outstream.writelines(
  774. [line + "\n" for line in print_name_status(entry.changes())]
  775. )
  776. # TODO(jelmer): better default for encoding?
  777. def show(
  778. repo=".",
  779. objects=None,
  780. outstream=sys.stdout,
  781. default_encoding=DEFAULT_ENCODING,
  782. ) -> None:
  783. """Print the changes in a commit.
  784. Args:
  785. repo: Path to repository
  786. objects: Objects to show (defaults to [HEAD])
  787. outstream: Stream to write to
  788. default_encoding: Default encoding to use if none is set in the
  789. commit
  790. """
  791. if objects is None:
  792. objects = ["HEAD"]
  793. if not isinstance(objects, list):
  794. objects = [objects]
  795. with open_repo_closing(repo) as r:
  796. for objectish in objects:
  797. o = parse_object(r, objectish)
  798. if isinstance(o, Commit):
  799. def decode(x):
  800. return commit_decode(o, x, default_encoding)
  801. else:
  802. def decode(x):
  803. return x.decode(default_encoding)
  804. show_object(r, o, decode, outstream)
  805. def diff_tree(repo, old_tree, new_tree, outstream=default_bytes_out_stream) -> None:
  806. """Compares the content and mode of blobs found via two tree objects.
  807. Args:
  808. repo: Path to repository
  809. old_tree: Id of old tree
  810. new_tree: Id of new tree
  811. outstream: Stream to write to
  812. """
  813. with open_repo_closing(repo) as r:
  814. write_tree_diff(outstream, r.object_store, old_tree, new_tree)
  815. def rev_list(repo, commits, outstream=sys.stdout) -> None:
  816. """Lists commit objects in reverse chronological order.
  817. Args:
  818. repo: Path to repository
  819. commits: Commits over which to iterate
  820. outstream: Stream to write to
  821. """
  822. with open_repo_closing(repo) as r:
  823. for entry in r.get_walker(include=[r[c].id for c in commits]):
  824. outstream.write(entry.commit.id + b"\n")
  825. def _canonical_part(url: str) -> str:
  826. name = url.rsplit("/", 1)[-1]
  827. if name.endswith(".git"):
  828. name = name[:-4]
  829. return name
  830. def submodule_add(repo, url, path=None, name=None) -> None:
  831. """Add a new submodule.
  832. Args:
  833. repo: Path to repository
  834. url: URL of repository to add as submodule
  835. path: Path where submodule should live
  836. """
  837. with open_repo_closing(repo) as r:
  838. if path is None:
  839. path = os.path.relpath(_canonical_part(url), r.path)
  840. if name is None:
  841. name = path
  842. # TODO(jelmer): Move this logic to dulwich.submodule
  843. gitmodules_path = os.path.join(r.path, ".gitmodules")
  844. try:
  845. config = ConfigFile.from_path(gitmodules_path)
  846. except FileNotFoundError:
  847. config = ConfigFile()
  848. config.path = gitmodules_path
  849. config.set(("submodule", name), "url", url)
  850. config.set(("submodule", name), "path", path)
  851. config.write_to_path()
  852. def submodule_init(repo) -> None:
  853. """Initialize submodules.
  854. Args:
  855. repo: Path to repository
  856. """
  857. with open_repo_closing(repo) as r:
  858. config = r.get_config()
  859. gitmodules_path = os.path.join(r.path, ".gitmodules")
  860. for path, url, name in read_submodules(gitmodules_path):
  861. config.set((b"submodule", name), b"active", True)
  862. config.set((b"submodule", name), b"url", url)
  863. config.write_to_path()
  864. def submodule_list(repo):
  865. """List submodules.
  866. Args:
  867. repo: Path to repository
  868. """
  869. from .submodule import iter_cached_submodules
  870. with open_repo_closing(repo) as r:
  871. for path, sha in iter_cached_submodules(r.object_store, r[r.head()].tree):
  872. yield path, sha.decode(DEFAULT_ENCODING)
  873. def tag_create(
  874. repo,
  875. tag: Union[str, bytes],
  876. author: Optional[Union[str, bytes]] = None,
  877. message: Optional[Union[str, bytes]] = None,
  878. annotated=False,
  879. objectish: Union[str, bytes] = "HEAD",
  880. tag_time=None,
  881. tag_timezone=None,
  882. sign: bool = False,
  883. encoding: str = DEFAULT_ENCODING,
  884. ) -> None:
  885. """Creates a tag in git via dulwich calls.
  886. Args:
  887. repo: Path to repository
  888. tag: tag string
  889. author: tag author (optional, if annotated is set)
  890. message: tag message (optional)
  891. annotated: whether to create an annotated tag
  892. objectish: object the tag should point at, defaults to HEAD
  893. tag_time: Optional time for annotated tag
  894. tag_timezone: Optional timezone for annotated tag
  895. sign: GPG Sign the tag (bool, defaults to False,
  896. pass True to use default GPG key,
  897. pass a str containing Key ID to use a specific GPG key)
  898. """
  899. with open_repo_closing(repo) as r:
  900. object = parse_object(r, objectish)
  901. if isinstance(tag, str):
  902. tag = tag.encode(encoding)
  903. if annotated:
  904. # Create the tag object
  905. tag_obj = Tag()
  906. if author is None:
  907. author = get_user_identity(r.get_config_stack())
  908. elif isinstance(author, str):
  909. author = author.encode(encoding)
  910. else:
  911. assert isinstance(author, bytes)
  912. tag_obj.tagger = author
  913. if isinstance(message, str):
  914. message = message.encode(encoding)
  915. elif isinstance(message, bytes):
  916. pass
  917. else:
  918. message = b""
  919. tag_obj.message = message + "\n".encode(encoding)
  920. tag_obj.name = tag
  921. tag_obj.object = (type(object), object.id)
  922. if tag_time is None:
  923. tag_time = int(time.time())
  924. tag_obj.tag_time = tag_time
  925. if tag_timezone is None:
  926. tag_timezone = get_user_timezones()[1]
  927. elif isinstance(tag_timezone, str):
  928. tag_timezone = parse_timezone(tag_timezone)
  929. tag_obj.tag_timezone = tag_timezone
  930. if sign:
  931. tag_obj.sign(sign if isinstance(sign, str) else None)
  932. r.object_store.add_object(tag_obj)
  933. tag_id = tag_obj.id
  934. else:
  935. tag_id = object.id
  936. r.refs[_make_tag_ref(tag)] = tag_id
  937. def tag_list(repo, outstream=sys.stdout):
  938. """List all tags.
  939. Args:
  940. repo: Path to repository
  941. outstream: Stream to write tags to
  942. """
  943. with open_repo_closing(repo) as r:
  944. tags = sorted(r.refs.as_dict(b"refs/tags"))
  945. return tags
  946. def tag_delete(repo, name) -> None:
  947. """Remove a tag.
  948. Args:
  949. repo: Path to repository
  950. name: Name of tag to remove
  951. """
  952. with open_repo_closing(repo) as r:
  953. if isinstance(name, bytes):
  954. names = [name]
  955. elif isinstance(name, list):
  956. names = name
  957. else:
  958. raise Error(f"Unexpected tag name type {name!r}")
  959. for name in names:
  960. del r.refs[_make_tag_ref(name)]
  961. def reset(repo, mode, treeish="HEAD") -> None:
  962. """Reset current HEAD to the specified state.
  963. Args:
  964. repo: Path to repository
  965. mode: Mode ("hard", "soft", "mixed")
  966. treeish: Treeish to reset to
  967. """
  968. if mode != "hard":
  969. raise Error("hard is the only mode currently supported")
  970. with open_repo_closing(repo) as r:
  971. tree = parse_tree(r, treeish)
  972. # Get current HEAD tree for comparison
  973. try:
  974. current_head = r.refs[b"HEAD"]
  975. current_tree = r[current_head].tree
  976. except KeyError:
  977. current_tree = None
  978. # Get configuration for working directory update
  979. config = r.get_config()
  980. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  981. # Import validation functions
  982. from .index import validate_path_element_default, validate_path_element_ntfs
  983. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  984. validate_path_element = validate_path_element_ntfs
  985. else:
  986. validate_path_element = validate_path_element_default
  987. # Import symlink function
  988. from .index import symlink
  989. if config.get_boolean(b"core", b"symlinks", True):
  990. symlink_fn = symlink
  991. else:
  992. def symlink_fn(source, target) -> None:
  993. mode = "w" + ("b" if isinstance(source, bytes) else "")
  994. with open(target, mode) as f:
  995. f.write(source)
  996. # Update working tree and index
  997. update_working_tree(
  998. r,
  999. current_tree,
  1000. tree.id,
  1001. honor_filemode=honor_filemode,
  1002. validate_path_element=validate_path_element,
  1003. symlink_fn=symlink_fn,
  1004. force_remove_untracked=True,
  1005. )
  1006. def get_remote_repo(
  1007. repo: Repo, remote_location: Optional[Union[str, bytes]] = None
  1008. ) -> tuple[Optional[str], str]:
  1009. config = repo.get_config()
  1010. if remote_location is None:
  1011. remote_location = get_branch_remote(repo)
  1012. if isinstance(remote_location, str):
  1013. encoded_location = remote_location.encode()
  1014. else:
  1015. encoded_location = remote_location
  1016. section = (b"remote", encoded_location)
  1017. remote_name: Optional[str] = None
  1018. if config.has_section(section):
  1019. remote_name = encoded_location.decode()
  1020. encoded_location = config.get(section, "url")
  1021. else:
  1022. remote_name = None
  1023. return (remote_name, encoded_location.decode())
  1024. def push(
  1025. repo,
  1026. remote_location=None,
  1027. refspecs=None,
  1028. outstream=default_bytes_out_stream,
  1029. errstream=default_bytes_err_stream,
  1030. force=False,
  1031. **kwargs,
  1032. ) -> None:
  1033. """Remote push with dulwich via dulwich.client.
  1034. Args:
  1035. repo: Path to repository
  1036. remote_location: Location of the remote
  1037. refspecs: Refs to push to remote
  1038. outstream: A stream file to write output
  1039. errstream: A stream file to write errors
  1040. force: Force overwriting refs
  1041. """
  1042. # Open the repo
  1043. with open_repo_closing(repo) as r:
  1044. if refspecs is None:
  1045. refspecs = [active_branch(r)]
  1046. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1047. # Get the client and path
  1048. client, path = get_transport_and_path(
  1049. remote_location, config=r.get_config_stack(), **kwargs
  1050. )
  1051. selected_refs = []
  1052. remote_changed_refs = {}
  1053. def update_refs(refs):
  1054. selected_refs.extend(parse_reftuples(r.refs, refs, refspecs, force=force))
  1055. new_refs = {}
  1056. # TODO: Handle selected_refs == {None: None}
  1057. for lh, rh, force_ref in selected_refs:
  1058. if lh is None:
  1059. new_refs[rh] = ZERO_SHA
  1060. remote_changed_refs[rh] = None
  1061. else:
  1062. try:
  1063. localsha = r.refs[lh]
  1064. except KeyError as exc:
  1065. raise Error(f"No valid ref {lh} in local repository") from exc
  1066. if not force_ref and rh in refs:
  1067. check_diverged(r, refs[rh], localsha)
  1068. new_refs[rh] = localsha
  1069. remote_changed_refs[rh] = localsha
  1070. return new_refs
  1071. err_encoding = getattr(errstream, "encoding", None) or DEFAULT_ENCODING
  1072. remote_location = client.get_url(path)
  1073. try:
  1074. result = client.send_pack(
  1075. path,
  1076. update_refs,
  1077. generate_pack_data=r.generate_pack_data,
  1078. progress=errstream.write,
  1079. )
  1080. except SendPackError as exc:
  1081. raise Error(
  1082. "Push to " + remote_location + " failed -> " + exc.args[0].decode(),
  1083. ) from exc
  1084. else:
  1085. errstream.write(
  1086. b"Push to " + remote_location.encode(err_encoding) + b" successful.\n"
  1087. )
  1088. for ref, error in (result.ref_status or {}).items():
  1089. if error is not None:
  1090. errstream.write(
  1091. b"Push of ref %s failed: %s\n" % (ref, error.encode(err_encoding))
  1092. )
  1093. else:
  1094. errstream.write(b"Ref %s updated\n" % ref)
  1095. if remote_name is not None:
  1096. _import_remote_refs(r.refs, remote_name, remote_changed_refs)
  1097. def pull(
  1098. repo,
  1099. remote_location=None,
  1100. refspecs=None,
  1101. outstream=default_bytes_out_stream,
  1102. errstream=default_bytes_err_stream,
  1103. fast_forward=True,
  1104. ff_only=False,
  1105. force=False,
  1106. filter_spec=None,
  1107. protocol_version=None,
  1108. **kwargs,
  1109. ) -> None:
  1110. """Pull from remote via dulwich.client.
  1111. Args:
  1112. repo: Path to repository
  1113. remote_location: Location of the remote
  1114. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  1115. bytestring/string.
  1116. outstream: A stream file to write to output
  1117. errstream: A stream file to write to errors
  1118. fast_forward: If True, raise an exception when fast-forward is not possible
  1119. ff_only: If True, only allow fast-forward merges. Raises DivergedBranches
  1120. when branches have diverged rather than performing a merge.
  1121. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  1122. Only used if the server supports the Git protocol-v2 'filter'
  1123. feature, and ignored otherwise.
  1124. protocol_version: desired Git protocol version. By default the highest
  1125. mutually supported protocol version will be used
  1126. """
  1127. # Open the repo
  1128. with open_repo_closing(repo) as r:
  1129. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1130. selected_refs = []
  1131. if refspecs is None:
  1132. refspecs = [b"HEAD"]
  1133. def determine_wants(remote_refs, *args, **kwargs):
  1134. selected_refs.extend(
  1135. parse_reftuples(remote_refs, r.refs, refspecs, force=force)
  1136. )
  1137. return [
  1138. remote_refs[lh]
  1139. for (lh, rh, force_ref) in selected_refs
  1140. if remote_refs[lh] not in r.object_store
  1141. ]
  1142. client, path = get_transport_and_path(
  1143. remote_location, config=r.get_config_stack(), **kwargs
  1144. )
  1145. if filter_spec:
  1146. filter_spec = filter_spec.encode("ascii")
  1147. fetch_result = client.fetch(
  1148. path,
  1149. r,
  1150. progress=errstream.write,
  1151. determine_wants=determine_wants,
  1152. filter_spec=filter_spec,
  1153. protocol_version=protocol_version,
  1154. )
  1155. # Store the old HEAD tree before making changes
  1156. try:
  1157. old_head = r.refs[b"HEAD"]
  1158. old_tree_id = r[old_head].tree
  1159. except KeyError:
  1160. old_tree_id = None
  1161. merged = False
  1162. for lh, rh, force_ref in selected_refs:
  1163. if not force_ref and rh in r.refs:
  1164. try:
  1165. check_diverged(r, r.refs.follow(rh)[1], fetch_result.refs[lh])
  1166. except DivergedBranches as exc:
  1167. if ff_only or fast_forward:
  1168. raise
  1169. else:
  1170. # Perform merge
  1171. merge_result, conflicts = _do_merge(r, fetch_result.refs[lh])
  1172. if conflicts:
  1173. raise Error(
  1174. f"Merge conflicts occurred: {conflicts}"
  1175. ) from exc
  1176. merged = True
  1177. # Skip updating ref since merge already updated HEAD
  1178. continue
  1179. r.refs[rh] = fetch_result.refs[lh]
  1180. # Only update HEAD if we didn't perform a merge
  1181. if selected_refs and not merged:
  1182. r[b"HEAD"] = fetch_result.refs[selected_refs[0][1]]
  1183. # Update working tree to match the new HEAD
  1184. # Skip if merge was performed as merge already updates the working tree
  1185. if not merged and old_tree_id is not None:
  1186. new_tree_id = r[b"HEAD"].tree
  1187. update_working_tree(r, old_tree_id, new_tree_id)
  1188. if remote_name is not None:
  1189. _import_remote_refs(r.refs, remote_name, fetch_result.refs)
  1190. def status(repo=".", ignored=False, untracked_files="all"):
  1191. """Returns staged, unstaged, and untracked changes relative to the HEAD.
  1192. Args:
  1193. repo: Path to repository or repository object
  1194. ignored: Whether to include ignored files in untracked
  1195. untracked_files: How to handle untracked files, defaults to "all":
  1196. "no": do not return untracked files
  1197. "all": include all files in untracked directories
  1198. Using untracked_files="no" can be faster than "all" when the worktreee
  1199. contains many untracked files/directories.
  1200. Note: untracked_files="normal" (git's default) is not implemented.
  1201. Returns: GitStatus tuple,
  1202. staged - dict with lists of staged paths (diff index/HEAD)
  1203. unstaged - list of unstaged paths (diff index/working-tree)
  1204. untracked - list of untracked, un-ignored & non-.git paths
  1205. """
  1206. with open_repo_closing(repo) as r:
  1207. # 1. Get status of staged
  1208. tracked_changes = get_tree_changes(r)
  1209. # 2. Get status of unstaged
  1210. index = r.open_index()
  1211. normalizer = r.get_blob_normalizer()
  1212. filter_callback = normalizer.checkin_normalize
  1213. unstaged_changes = list(get_unstaged_changes(index, r.path, filter_callback))
  1214. untracked_paths = get_untracked_paths(
  1215. r.path,
  1216. r.path,
  1217. index,
  1218. exclude_ignored=not ignored,
  1219. untracked_files=untracked_files,
  1220. )
  1221. if sys.platform == "win32":
  1222. untracked_changes = [
  1223. path.replace(os.path.sep, "/") for path in untracked_paths
  1224. ]
  1225. else:
  1226. untracked_changes = list(untracked_paths)
  1227. return GitStatus(tracked_changes, unstaged_changes, untracked_changes)
  1228. def _walk_working_dir_paths(frompath, basepath, prune_dirnames=None):
  1229. """Get path, is_dir for files in working dir from frompath.
  1230. Args:
  1231. frompath: Path to begin walk
  1232. basepath: Path to compare to
  1233. prune_dirnames: Optional callback to prune dirnames during os.walk
  1234. dirnames will be set to result of prune_dirnames(dirpath, dirnames)
  1235. """
  1236. for dirpath, dirnames, filenames in os.walk(frompath):
  1237. # Skip .git and below.
  1238. if ".git" in dirnames:
  1239. dirnames.remove(".git")
  1240. if dirpath != basepath:
  1241. continue
  1242. if ".git" in filenames:
  1243. filenames.remove(".git")
  1244. if dirpath != basepath:
  1245. continue
  1246. if dirpath != frompath:
  1247. yield dirpath, True
  1248. for filename in filenames:
  1249. filepath = os.path.join(dirpath, filename)
  1250. yield filepath, False
  1251. if prune_dirnames:
  1252. dirnames[:] = prune_dirnames(dirpath, dirnames)
  1253. def get_untracked_paths(
  1254. frompath, basepath, index, exclude_ignored=False, untracked_files="all"
  1255. ):
  1256. """Get untracked paths.
  1257. Args:
  1258. frompath: Path to walk
  1259. basepath: Path to compare to
  1260. index: Index to check against
  1261. exclude_ignored: Whether to exclude ignored paths
  1262. untracked_files: How to handle untracked files:
  1263. - "no": return an empty list
  1264. - "all": return all files in untracked directories
  1265. - "normal": Not implemented
  1266. Note: ignored directories will never be walked for performance reasons.
  1267. If exclude_ignored is False, only the path to an ignored directory will
  1268. be yielded, no files inside the directory will be returned
  1269. """
  1270. if untracked_files == "normal":
  1271. raise NotImplementedError("normal is not yet supported")
  1272. if untracked_files not in ("no", "all"):
  1273. raise ValueError("untracked_files must be one of (no, all)")
  1274. if untracked_files == "no":
  1275. return
  1276. with open_repo_closing(basepath) as r:
  1277. ignore_manager = IgnoreFilterManager.from_repo(r)
  1278. ignored_dirs = []
  1279. def prune_dirnames(dirpath, dirnames):
  1280. for i in range(len(dirnames) - 1, -1, -1):
  1281. path = os.path.join(dirpath, dirnames[i])
  1282. ip = os.path.join(os.path.relpath(path, basepath), "")
  1283. if ignore_manager.is_ignored(ip):
  1284. if not exclude_ignored:
  1285. ignored_dirs.append(
  1286. os.path.join(os.path.relpath(path, frompath), "")
  1287. )
  1288. del dirnames[i]
  1289. return dirnames
  1290. for ap, is_dir in _walk_working_dir_paths(
  1291. frompath, basepath, prune_dirnames=prune_dirnames
  1292. ):
  1293. if not is_dir:
  1294. ip = path_to_tree_path(basepath, ap)
  1295. if ip not in index:
  1296. if not exclude_ignored or not ignore_manager.is_ignored(
  1297. os.path.relpath(ap, basepath)
  1298. ):
  1299. yield os.path.relpath(ap, frompath)
  1300. yield from ignored_dirs
  1301. def get_tree_changes(repo):
  1302. """Return add/delete/modify changes to tree by comparing index to HEAD.
  1303. Args:
  1304. repo: repo path or object
  1305. Returns: dict with lists for each type of change
  1306. """
  1307. with open_repo_closing(repo) as r:
  1308. index = r.open_index()
  1309. # Compares the Index to the HEAD & determines changes
  1310. # Iterate through the changes and report add/delete/modify
  1311. # TODO: call out to dulwich.diff_tree somehow.
  1312. tracked_changes = {
  1313. "add": [],
  1314. "delete": [],
  1315. "modify": [],
  1316. }
  1317. try:
  1318. tree_id = r[b"HEAD"].tree
  1319. except KeyError:
  1320. tree_id = None
  1321. for change in index.changes_from_tree(r.object_store, tree_id):
  1322. if not change[0][0]:
  1323. tracked_changes["add"].append(change[0][1])
  1324. elif not change[0][1]:
  1325. tracked_changes["delete"].append(change[0][0])
  1326. elif change[0][0] == change[0][1]:
  1327. tracked_changes["modify"].append(change[0][0])
  1328. else:
  1329. raise NotImplementedError("git mv ops not yet supported")
  1330. return tracked_changes
  1331. def daemon(path=".", address=None, port=None) -> None:
  1332. """Run a daemon serving Git requests over TCP/IP.
  1333. Args:
  1334. path: Path to the directory to serve.
  1335. address: Optional address to listen on (defaults to ::)
  1336. port: Optional port to listen on (defaults to TCP_GIT_PORT)
  1337. """
  1338. # TODO(jelmer): Support git-daemon-export-ok and --export-all.
  1339. backend = FileSystemBackend(path)
  1340. server = TCPGitServer(backend, address, port)
  1341. server.serve_forever()
  1342. def web_daemon(path=".", address=None, port=None) -> None:
  1343. """Run a daemon serving Git requests over HTTP.
  1344. Args:
  1345. path: Path to the directory to serve
  1346. address: Optional address to listen on (defaults to ::)
  1347. port: Optional port to listen on (defaults to 80)
  1348. """
  1349. from .web import (
  1350. WSGIRequestHandlerLogger,
  1351. WSGIServerLogger,
  1352. make_server,
  1353. make_wsgi_chain,
  1354. )
  1355. backend = FileSystemBackend(path)
  1356. app = make_wsgi_chain(backend)
  1357. server = make_server(
  1358. address,
  1359. port,
  1360. app,
  1361. handler_class=WSGIRequestHandlerLogger,
  1362. server_class=WSGIServerLogger,
  1363. )
  1364. server.serve_forever()
  1365. def upload_pack(path=".", inf=None, outf=None) -> int:
  1366. """Upload a pack file after negotiating its contents using smart protocol.
  1367. Args:
  1368. path: Path to the repository
  1369. inf: Input stream to communicate with client
  1370. outf: Output stream to communicate with client
  1371. """
  1372. if outf is None:
  1373. outf = getattr(sys.stdout, "buffer", sys.stdout)
  1374. if inf is None:
  1375. inf = getattr(sys.stdin, "buffer", sys.stdin)
  1376. path = os.path.expanduser(path)
  1377. backend = FileSystemBackend(path)
  1378. def send_fn(data) -> None:
  1379. outf.write(data)
  1380. outf.flush()
  1381. proto = Protocol(inf.read, send_fn)
  1382. handler = UploadPackHandler(backend, [path], proto)
  1383. # FIXME: Catch exceptions and write a single-line summary to outf.
  1384. handler.handle()
  1385. return 0
  1386. def receive_pack(path=".", inf=None, outf=None) -> int:
  1387. """Receive a pack file after negotiating its contents using smart protocol.
  1388. Args:
  1389. path: Path to the repository
  1390. inf: Input stream to communicate with client
  1391. outf: Output stream to communicate with client
  1392. """
  1393. if outf is None:
  1394. outf = getattr(sys.stdout, "buffer", sys.stdout)
  1395. if inf is None:
  1396. inf = getattr(sys.stdin, "buffer", sys.stdin)
  1397. path = os.path.expanduser(path)
  1398. backend = FileSystemBackend(path)
  1399. def send_fn(data) -> None:
  1400. outf.write(data)
  1401. outf.flush()
  1402. proto = Protocol(inf.read, send_fn)
  1403. handler = ReceivePackHandler(backend, [path], proto)
  1404. # FIXME: Catch exceptions and write a single-line summary to outf.
  1405. handler.handle()
  1406. return 0
  1407. def _make_branch_ref(name: Union[str, bytes]) -> Ref:
  1408. if isinstance(name, str):
  1409. name = name.encode(DEFAULT_ENCODING)
  1410. return LOCAL_BRANCH_PREFIX + name
  1411. def _make_tag_ref(name: Union[str, bytes]) -> Ref:
  1412. if isinstance(name, str):
  1413. name = name.encode(DEFAULT_ENCODING)
  1414. return LOCAL_TAG_PREFIX + name
  1415. def branch_delete(repo, name) -> None:
  1416. """Delete a branch.
  1417. Args:
  1418. repo: Path to the repository
  1419. name: Name of the branch
  1420. """
  1421. with open_repo_closing(repo) as r:
  1422. if isinstance(name, list):
  1423. names = name
  1424. else:
  1425. names = [name]
  1426. for name in names:
  1427. del r.refs[_make_branch_ref(name)]
  1428. def branch_create(repo, name, objectish=None, force=False) -> None:
  1429. """Create a branch.
  1430. Args:
  1431. repo: Path to the repository
  1432. name: Name of the new branch
  1433. objectish: Target object to point new branch at (defaults to HEAD)
  1434. force: Force creation of branch, even if it already exists
  1435. """
  1436. with open_repo_closing(repo) as r:
  1437. if objectish is None:
  1438. objectish = "HEAD"
  1439. object = parse_object(r, objectish)
  1440. refname = _make_branch_ref(name)
  1441. ref_message = b"branch: Created from " + objectish.encode(DEFAULT_ENCODING)
  1442. if force:
  1443. r.refs.set_if_equals(refname, None, object.id, message=ref_message)
  1444. else:
  1445. if not r.refs.add_if_new(refname, object.id, message=ref_message):
  1446. raise Error(f"Branch with name {name} already exists.")
  1447. def branch_list(repo):
  1448. """List all branches.
  1449. Args:
  1450. repo: Path to the repository
  1451. """
  1452. with open_repo_closing(repo) as r:
  1453. return r.refs.keys(base=LOCAL_BRANCH_PREFIX)
  1454. def active_branch(repo):
  1455. """Return the active branch in the repository, if any.
  1456. Args:
  1457. repo: Repository to open
  1458. Returns:
  1459. branch name
  1460. Raises:
  1461. KeyError: if the repository does not have a working tree
  1462. IndexError: if HEAD is floating
  1463. """
  1464. with open_repo_closing(repo) as r:
  1465. active_ref = r.refs.follow(b"HEAD")[0][1]
  1466. if not active_ref.startswith(LOCAL_BRANCH_PREFIX):
  1467. raise ValueError(active_ref)
  1468. return active_ref[len(LOCAL_BRANCH_PREFIX) :]
  1469. def get_branch_remote(repo):
  1470. """Return the active branch's remote name, if any.
  1471. Args:
  1472. repo: Repository to open
  1473. Returns:
  1474. remote name
  1475. Raises:
  1476. KeyError: if the repository does not have a working tree
  1477. """
  1478. with open_repo_closing(repo) as r:
  1479. branch_name = active_branch(r.path)
  1480. config = r.get_config()
  1481. try:
  1482. remote_name = config.get((b"branch", branch_name), b"remote")
  1483. except KeyError:
  1484. remote_name = b"origin"
  1485. return remote_name
  1486. def fetch(
  1487. repo,
  1488. remote_location=None,
  1489. outstream=sys.stdout,
  1490. errstream=default_bytes_err_stream,
  1491. message=None,
  1492. depth=None,
  1493. prune=False,
  1494. prune_tags=False,
  1495. force=False,
  1496. **kwargs,
  1497. ):
  1498. """Fetch objects from a remote server.
  1499. Args:
  1500. repo: Path to the repository
  1501. remote_location: String identifying a remote server
  1502. outstream: Output stream (defaults to stdout)
  1503. errstream: Error stream (defaults to stderr)
  1504. message: Reflog message (defaults to b"fetch: from <remote_name>")
  1505. depth: Depth to fetch at
  1506. prune: Prune remote removed refs
  1507. prune_tags: Prune reomte removed tags
  1508. Returns:
  1509. Dictionary with refs on the remote
  1510. """
  1511. with open_repo_closing(repo) as r:
  1512. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1513. if message is None:
  1514. message = b"fetch: from " + remote_location.encode(DEFAULT_ENCODING)
  1515. client, path = get_transport_and_path(
  1516. remote_location, config=r.get_config_stack(), **kwargs
  1517. )
  1518. fetch_result = client.fetch(path, r, progress=errstream.write, depth=depth)
  1519. if remote_name is not None:
  1520. _import_remote_refs(
  1521. r.refs,
  1522. remote_name,
  1523. fetch_result.refs,
  1524. message,
  1525. prune=prune,
  1526. prune_tags=prune_tags,
  1527. )
  1528. return fetch_result
  1529. def for_each_ref(
  1530. repo: Union[Repo, str] = ".",
  1531. pattern: Optional[Union[str, bytes]] = None,
  1532. ) -> list[tuple[bytes, bytes, bytes]]:
  1533. """Iterate over all refs that match the (optional) pattern.
  1534. Args:
  1535. repo: Path to the repository
  1536. pattern: Optional glob (7) patterns to filter the refs with
  1537. Returns:
  1538. List of bytes tuples with: (sha, object_type, ref_name)
  1539. """
  1540. if isinstance(pattern, str):
  1541. pattern = os.fsencode(pattern)
  1542. with open_repo_closing(repo) as r:
  1543. refs = r.get_refs()
  1544. if pattern:
  1545. matching_refs: dict[bytes, bytes] = {}
  1546. pattern_parts = pattern.split(b"/")
  1547. for ref, sha in refs.items():
  1548. matches = False
  1549. # git for-each-ref uses glob (7) style patterns, but fnmatch
  1550. # is greedy and also matches slashes, unlike glob.glob.
  1551. # We have to check parts of the pattern individually.
  1552. # See https://github.com/python/cpython/issues/72904
  1553. ref_parts = ref.split(b"/")
  1554. if len(ref_parts) > len(pattern_parts):
  1555. continue
  1556. for pat, ref_part in zip(pattern_parts, ref_parts):
  1557. matches = fnmatch.fnmatchcase(ref_part, pat)
  1558. if not matches:
  1559. break
  1560. if matches:
  1561. matching_refs[ref] = sha
  1562. refs = matching_refs
  1563. ret: list[tuple[bytes, bytes, bytes]] = [
  1564. (sha, r.get_object(sha).type_name, ref)
  1565. for ref, sha in sorted(
  1566. refs.items(),
  1567. key=lambda ref_sha: ref_sha[0],
  1568. )
  1569. if ref != b"HEAD"
  1570. ]
  1571. return ret
  1572. def ls_remote(remote, config: Optional[Config] = None, **kwargs):
  1573. """List the refs in a remote.
  1574. Args:
  1575. remote: Remote repository location
  1576. config: Configuration to use
  1577. Returns:
  1578. Dictionary with remote refs
  1579. """
  1580. if config is None:
  1581. config = StackedConfig.default()
  1582. client, host_path = get_transport_and_path(remote, config=config, **kwargs)
  1583. return client.get_refs(host_path)
  1584. def repack(repo) -> None:
  1585. """Repack loose files in a repository.
  1586. Currently this only packs loose objects.
  1587. Args:
  1588. repo: Path to the repository
  1589. """
  1590. with open_repo_closing(repo) as r:
  1591. r.object_store.pack_loose_objects()
  1592. def pack_objects(
  1593. repo,
  1594. object_ids,
  1595. packf,
  1596. idxf,
  1597. delta_window_size=None,
  1598. deltify=None,
  1599. reuse_deltas=True,
  1600. ) -> None:
  1601. """Pack objects into a file.
  1602. Args:
  1603. repo: Path to the repository
  1604. object_ids: List of object ids to write
  1605. packf: File-like object to write to
  1606. idxf: File-like object to write to (can be None)
  1607. delta_window_size: Sliding window size for searching for deltas;
  1608. Set to None for default window size.
  1609. deltify: Whether to deltify objects
  1610. reuse_deltas: Allow reuse of existing deltas while deltifying
  1611. """
  1612. with open_repo_closing(repo) as r:
  1613. entries, data_sum = write_pack_from_container(
  1614. packf.write,
  1615. r.object_store,
  1616. [(oid, None) for oid in object_ids],
  1617. deltify=deltify,
  1618. delta_window_size=delta_window_size,
  1619. reuse_deltas=reuse_deltas,
  1620. )
  1621. if idxf is not None:
  1622. entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
  1623. write_pack_index(idxf, entries, data_sum)
  1624. def ls_tree(
  1625. repo,
  1626. treeish=b"HEAD",
  1627. outstream=sys.stdout,
  1628. recursive=False,
  1629. name_only=False,
  1630. ) -> None:
  1631. """List contents of a tree.
  1632. Args:
  1633. repo: Path to the repository
  1634. treeish: Tree id to list
  1635. outstream: Output stream (defaults to stdout)
  1636. recursive: Whether to recursively list files
  1637. name_only: Only print item name
  1638. """
  1639. def list_tree(store, treeid, base) -> None:
  1640. for name, mode, sha in store[treeid].iteritems():
  1641. if base:
  1642. name = posixpath.join(base, name)
  1643. if name_only:
  1644. outstream.write(name + b"\n")
  1645. else:
  1646. outstream.write(pretty_format_tree_entry(name, mode, sha))
  1647. if stat.S_ISDIR(mode) and recursive:
  1648. list_tree(store, sha, name)
  1649. with open_repo_closing(repo) as r:
  1650. tree = parse_tree(r, treeish)
  1651. list_tree(r.object_store, tree.id, "")
  1652. def remote_add(repo, name: Union[bytes, str], url: Union[bytes, str]) -> None:
  1653. """Add a remote.
  1654. Args:
  1655. repo: Path to the repository
  1656. name: Remote name
  1657. url: Remote URL
  1658. """
  1659. if not isinstance(name, bytes):
  1660. name = name.encode(DEFAULT_ENCODING)
  1661. if not isinstance(url, bytes):
  1662. url = url.encode(DEFAULT_ENCODING)
  1663. with open_repo_closing(repo) as r:
  1664. c = r.get_config()
  1665. section = (b"remote", name)
  1666. if c.has_section(section):
  1667. raise RemoteExists(section)
  1668. c.set(section, b"url", url)
  1669. c.write_to_path()
  1670. def remote_remove(repo: Repo, name: Union[bytes, str]) -> None:
  1671. """Remove a remote.
  1672. Args:
  1673. repo: Path to the repository
  1674. name: Remote name
  1675. """
  1676. if not isinstance(name, bytes):
  1677. name = name.encode(DEFAULT_ENCODING)
  1678. with open_repo_closing(repo) as r:
  1679. c = r.get_config()
  1680. section = (b"remote", name)
  1681. del c[section]
  1682. c.write_to_path()
  1683. def check_ignore(repo, paths, no_index=False):
  1684. """Debug gitignore files.
  1685. Args:
  1686. repo: Path to the repository
  1687. paths: List of paths to check for
  1688. no_index: Don't check index
  1689. Returns: List of ignored files
  1690. """
  1691. with open_repo_closing(repo) as r:
  1692. index = r.open_index()
  1693. ignore_manager = IgnoreFilterManager.from_repo(r)
  1694. for path in paths:
  1695. if not no_index and path_to_tree_path(r.path, path) in index:
  1696. continue
  1697. if os.path.isabs(path):
  1698. path = os.path.relpath(path, r.path)
  1699. if ignore_manager.is_ignored(path):
  1700. yield path
  1701. def update_head(repo, target, detached=False, new_branch=None) -> None:
  1702. """Update HEAD to point at a new branch/commit.
  1703. Note that this does not actually update the working tree.
  1704. Args:
  1705. repo: Path to the repository
  1706. detached: Create a detached head
  1707. target: Branch or committish to switch to
  1708. new_branch: New branch to create
  1709. """
  1710. with open_repo_closing(repo) as r:
  1711. if new_branch is not None:
  1712. to_set = _make_branch_ref(new_branch)
  1713. else:
  1714. to_set = b"HEAD"
  1715. if detached:
  1716. # TODO(jelmer): Provide some way so that the actual ref gets
  1717. # updated rather than what it points to, so the delete isn't
  1718. # necessary.
  1719. del r.refs[to_set]
  1720. r.refs[to_set] = parse_commit(r, target).id
  1721. else:
  1722. r.refs.set_symbolic_ref(to_set, parse_ref(r, target))
  1723. if new_branch is not None:
  1724. r.refs.set_symbolic_ref(b"HEAD", to_set)
  1725. def reset_file(repo, file_path: str, target: bytes = b"HEAD", symlink_fn=None) -> None:
  1726. """Reset the file to specific commit or branch.
  1727. Args:
  1728. repo: dulwich Repo object
  1729. file_path: file to reset, relative to the repository path
  1730. target: branch or commit or b'HEAD' to reset
  1731. """
  1732. tree = parse_tree(repo, treeish=target)
  1733. tree_path = _fs_to_tree_path(file_path)
  1734. file_entry = tree.lookup_path(repo.object_store.__getitem__, tree_path)
  1735. full_path = os.path.join(os.fsencode(repo.path), tree_path)
  1736. blob = repo.object_store[file_entry[1]]
  1737. mode = file_entry[0]
  1738. build_file_from_blob(blob, mode, full_path, symlink_fn=symlink_fn)
  1739. def _update_head_during_checkout_branch(repo, target):
  1740. checkout_target = None
  1741. if target == b"HEAD": # Do not update head while trying to checkout to HEAD.
  1742. pass
  1743. elif target in repo.refs.keys(base=LOCAL_BRANCH_PREFIX):
  1744. update_head(repo, target)
  1745. else:
  1746. # If checking out a remote branch, create a local one without the remote name prefix.
  1747. config = repo.get_config()
  1748. name = target.split(b"/")[0]
  1749. section = (b"remote", name)
  1750. if config.has_section(section):
  1751. checkout_target = target.replace(name + b"/", b"")
  1752. try:
  1753. branch_create(
  1754. repo, checkout_target, (LOCAL_REMOTE_PREFIX + target).decode()
  1755. )
  1756. except Error:
  1757. pass
  1758. update_head(repo, LOCAL_BRANCH_PREFIX + checkout_target)
  1759. else:
  1760. update_head(repo, target, detached=True)
  1761. return checkout_target
  1762. def checkout_branch(repo, target: Union[bytes, str], force: bool = False) -> None:
  1763. """Switch branches or restore working tree files.
  1764. The implementation of this function will probably not scale well
  1765. for branches with lots of local changes.
  1766. This is due to the analysis of a diff between branches before any
  1767. changes are applied.
  1768. Args:
  1769. repo: dulwich Repo object
  1770. target: branch name or commit sha to checkout
  1771. force: true or not to force checkout
  1772. """
  1773. target = to_bytes(target)
  1774. current_tree = parse_tree(repo, repo.head())
  1775. target_tree = parse_tree(repo, target)
  1776. if force:
  1777. repo.reset_index(target_tree.id)
  1778. _update_head_during_checkout_branch(repo, target)
  1779. else:
  1780. status_report = status(repo)
  1781. changes = list(
  1782. set(
  1783. status_report[0]["add"]
  1784. + status_report[0]["delete"]
  1785. + status_report[0]["modify"]
  1786. + status_report[1]
  1787. )
  1788. )
  1789. index = 0
  1790. while index < len(changes):
  1791. change = changes[index]
  1792. try:
  1793. current_tree.lookup_path(repo.object_store.__getitem__, change)
  1794. try:
  1795. target_tree.lookup_path(repo.object_store.__getitem__, change)
  1796. index += 1
  1797. except KeyError:
  1798. raise CheckoutError(
  1799. "Your local changes to the following files would be overwritten by checkout: "
  1800. + change.decode()
  1801. )
  1802. except KeyError:
  1803. changes.pop(index)
  1804. # Update head.
  1805. checkout_target = _update_head_during_checkout_branch(repo, target)
  1806. if checkout_target is not None:
  1807. target_tree = parse_tree(repo, checkout_target)
  1808. dealt_with = set()
  1809. repo_index = repo.open_index()
  1810. for entry in iter_tree_contents(repo.object_store, target_tree.id):
  1811. dealt_with.add(entry.path)
  1812. if entry.path in changes:
  1813. continue
  1814. full_path = os.path.join(os.fsencode(repo.path), entry.path)
  1815. blob = repo.object_store[entry.sha]
  1816. ensure_dir_exists(os.path.dirname(full_path))
  1817. st = build_file_from_blob(blob, entry.mode, full_path)
  1818. repo_index[entry.path] = index_entry_from_stat(st, entry.sha)
  1819. repo_index.write()
  1820. for entry in iter_tree_contents(repo.object_store, current_tree.id):
  1821. if entry.path not in dealt_with:
  1822. repo.unstage([entry.path])
  1823. # Remove the untracked files which are in the current_file_set.
  1824. repo_index = repo.open_index()
  1825. for change in repo_index.changes_from_tree(repo.object_store, current_tree.id):
  1826. path_change = change[0]
  1827. if path_change[1] is None:
  1828. file_name = path_change[0]
  1829. full_path = os.path.join(repo.path, file_name.decode())
  1830. if os.path.isfile(full_path):
  1831. os.remove(full_path)
  1832. dir_path = os.path.dirname(full_path)
  1833. while dir_path != repo.path:
  1834. is_empty = len(os.listdir(dir_path)) == 0
  1835. if is_empty:
  1836. os.rmdir(dir_path)
  1837. dir_path = os.path.dirname(dir_path)
  1838. def sparse_checkout(
  1839. repo, patterns=None, force: bool = False, cone: Union[bool, None] = None
  1840. ):
  1841. """Perform a sparse checkout in the repository (either 'full' or 'cone mode').
  1842. Perform sparse checkout in either 'cone' (directory-based) mode or
  1843. 'full pattern' (.gitignore) mode, depending on the ``cone`` parameter.
  1844. If ``cone`` is ``None``, the mode is inferred from the repository's
  1845. ``core.sparseCheckoutCone`` config setting.
  1846. Steps:
  1847. 1) If ``patterns`` is provided, write them to ``.git/info/sparse-checkout``.
  1848. 2) Determine which paths in the index are included vs. excluded.
  1849. - If ``cone=True``, use "cone-compatible" directory-based logic.
  1850. - If ``cone=False``, use standard .gitignore-style matching.
  1851. 3) Update the index's skip-worktree bits and add/remove files in
  1852. the working tree accordingly.
  1853. 4) If ``force=False``, refuse to remove files that have local modifications.
  1854. Args:
  1855. repo: Path to the repository or a Repo object.
  1856. patterns: Optional list of sparse-checkout patterns to write.
  1857. force: Whether to force removal of locally modified files (default False).
  1858. cone: Boolean indicating cone mode (True/False). If None, read from config.
  1859. Returns:
  1860. None
  1861. """
  1862. with open_repo_closing(repo) as repo_obj:
  1863. # --- 0) Possibly infer 'cone' from config ---
  1864. if cone is None:
  1865. cone = repo_obj.infer_cone_mode()
  1866. # --- 1) Read or write patterns ---
  1867. if patterns is None:
  1868. lines = repo_obj.get_sparse_checkout_patterns()
  1869. if lines is None:
  1870. raise Error("No sparse checkout patterns found.")
  1871. else:
  1872. lines = patterns
  1873. repo_obj.set_sparse_checkout_patterns(patterns)
  1874. # --- 2) Determine the set of included paths ---
  1875. included_paths = determine_included_paths(repo_obj, lines, cone)
  1876. # --- 3) Apply those results to the index & working tree ---
  1877. try:
  1878. apply_included_paths(repo_obj, included_paths, force=force)
  1879. except SparseCheckoutConflictError as exc:
  1880. raise CheckoutError(*exc.args) from exc
  1881. def cone_mode_init(repo):
  1882. """Initialize a repository to use sparse checkout in 'cone' mode.
  1883. Sets ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` in the config.
  1884. Writes an initial ``.git/info/sparse-checkout`` file that includes only
  1885. top-level files (and excludes all subdirectories), e.g. ``["/*", "!/*/"]``.
  1886. Then performs a sparse checkout to update the working tree accordingly.
  1887. If no directories are specified, then only top-level files are included:
  1888. https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling
  1889. Args:
  1890. repo: Path to the repository or a Repo object.
  1891. Returns:
  1892. None
  1893. """
  1894. with open_repo_closing(repo) as repo_obj:
  1895. repo_obj.configure_for_cone_mode()
  1896. patterns = ["/*", "!/*/"] # root-level files only
  1897. sparse_checkout(repo_obj, patterns, force=True, cone=True)
  1898. def cone_mode_set(repo, dirs, force=False):
  1899. """Overwrite the existing 'cone-mode' sparse patterns with a new set of directories.
  1900. Ensures ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` are enabled.
  1901. Writes new patterns so that only the specified directories (and top-level files)
  1902. remain in the working tree, and applies the sparse checkout update.
  1903. Args:
  1904. repo: Path to the repository or a Repo object.
  1905. dirs: List of directory names to include.
  1906. force: Whether to forcibly discard local modifications (default False).
  1907. Returns:
  1908. None
  1909. """
  1910. with open_repo_closing(repo) as repo_obj:
  1911. repo_obj.configure_for_cone_mode()
  1912. repo_obj.set_cone_mode_patterns(dirs=dirs)
  1913. new_patterns = repo_obj.get_sparse_checkout_patterns()
  1914. # Finally, apply the patterns and update the working tree
  1915. sparse_checkout(repo_obj, new_patterns, force=force, cone=True)
  1916. def cone_mode_add(repo, dirs, force=False):
  1917. """Add new directories to the existing 'cone-mode' sparse-checkout patterns.
  1918. Reads the current patterns from ``.git/info/sparse-checkout``, adds pattern
  1919. lines to include the specified directories, and then performs a sparse
  1920. checkout to update the working tree accordingly.
  1921. Args:
  1922. repo: Path to the repository or a Repo object.
  1923. dirs: List of directory names to add to the sparse-checkout.
  1924. force: Whether to forcibly discard local modifications (default False).
  1925. Returns:
  1926. None
  1927. """
  1928. with open_repo_closing(repo) as repo_obj:
  1929. repo_obj.configure_for_cone_mode()
  1930. # Do not pass base patterns as dirs
  1931. base_patterns = ["/*", "!/*/"]
  1932. existing_dirs = [
  1933. pat.strip("/")
  1934. for pat in repo_obj.get_sparse_checkout_patterns()
  1935. if pat not in base_patterns
  1936. ]
  1937. added_dirs = existing_dirs + (dirs or [])
  1938. repo_obj.set_cone_mode_patterns(dirs=added_dirs)
  1939. new_patterns = repo_obj.get_sparse_checkout_patterns()
  1940. sparse_checkout(repo_obj, patterns=new_patterns, force=force, cone=True)
  1941. def check_mailmap(repo, contact):
  1942. """Check canonical name and email of contact.
  1943. Args:
  1944. repo: Path to the repository
  1945. contact: Contact name and/or email
  1946. Returns: Canonical contact data
  1947. """
  1948. with open_repo_closing(repo) as r:
  1949. from .mailmap import Mailmap
  1950. try:
  1951. mailmap = Mailmap.from_path(os.path.join(r.path, ".mailmap"))
  1952. except FileNotFoundError:
  1953. mailmap = Mailmap()
  1954. return mailmap.lookup(contact)
  1955. def fsck(repo):
  1956. """Check a repository.
  1957. Args:
  1958. repo: A path to the repository
  1959. Returns: Iterator over errors/warnings
  1960. """
  1961. with open_repo_closing(repo) as r:
  1962. # TODO(jelmer): check pack files
  1963. # TODO(jelmer): check graph
  1964. # TODO(jelmer): check refs
  1965. for sha in r.object_store:
  1966. o = r.object_store[sha]
  1967. try:
  1968. o.check()
  1969. except Exception as e:
  1970. yield (sha, e)
  1971. def stash_list(repo):
  1972. """List all stashes in a repository."""
  1973. with open_repo_closing(repo) as r:
  1974. from .stash import Stash
  1975. stash = Stash.from_repo(r)
  1976. return enumerate(list(stash.stashes()))
  1977. def stash_push(repo) -> None:
  1978. """Push a new stash onto the stack."""
  1979. with open_repo_closing(repo) as r:
  1980. from .stash import Stash
  1981. stash = Stash.from_repo(r)
  1982. stash.push()
  1983. def stash_pop(repo) -> None:
  1984. """Pop a stash from the stack."""
  1985. with open_repo_closing(repo) as r:
  1986. from .stash import Stash
  1987. stash = Stash.from_repo(r)
  1988. stash.pop()
  1989. def stash_drop(repo, index) -> None:
  1990. """Drop a stash from the stack."""
  1991. with open_repo_closing(repo) as r:
  1992. from .stash import Stash
  1993. stash = Stash.from_repo(r)
  1994. stash.drop(index)
  1995. def ls_files(repo):
  1996. """List all files in an index."""
  1997. with open_repo_closing(repo) as r:
  1998. return sorted(r.open_index())
  1999. def find_unique_abbrev(object_store, object_id):
  2000. """For now, just return 7 characters."""
  2001. # TODO(jelmer): Add some logic here to return a number of characters that
  2002. # scales relative with the size of the repository
  2003. return object_id.decode("ascii")[:7]
  2004. def describe(repo, abbrev=None):
  2005. """Describe the repository version.
  2006. Args:
  2007. repo: git repository
  2008. abbrev: number of characters of commit to take, default is 7
  2009. Returns: a string description of the current git revision
  2010. Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh".
  2011. """
  2012. abbrev_slice = slice(0, abbrev if abbrev is not None else 7)
  2013. # Get the repository
  2014. with open_repo_closing(repo) as r:
  2015. # Get a list of all tags
  2016. refs = r.get_refs()
  2017. tags = {}
  2018. for key, value in refs.items():
  2019. key = key.decode()
  2020. obj = r.get_object(value)
  2021. if "tags" not in key:
  2022. continue
  2023. _, tag = key.rsplit("/", 1)
  2024. try:
  2025. commit = obj.object
  2026. except AttributeError:
  2027. continue
  2028. else:
  2029. commit = r.get_object(commit[1])
  2030. tags[tag] = [
  2031. datetime.datetime(*time.gmtime(commit.commit_time)[:6]),
  2032. commit.id.decode("ascii"),
  2033. ]
  2034. sorted_tags = sorted(tags.items(), key=lambda tag: tag[1][0], reverse=True)
  2035. # Get the latest commit
  2036. latest_commit = r[r.head()]
  2037. # If there are no tags, return the latest commit
  2038. if len(sorted_tags) == 0:
  2039. if abbrev is not None:
  2040. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  2041. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  2042. # We're now 0 commits from the top
  2043. commit_count = 0
  2044. # Walk through all commits
  2045. walker = r.get_walker()
  2046. for entry in walker:
  2047. # Check if tag
  2048. commit_id = entry.commit.id.decode("ascii")
  2049. for tag in sorted_tags:
  2050. tag_name = tag[0]
  2051. tag_commit = tag[1][1]
  2052. if commit_id == tag_commit:
  2053. if commit_count == 0:
  2054. return tag_name
  2055. else:
  2056. return "{}-{}-g{}".format(
  2057. tag_name,
  2058. commit_count,
  2059. latest_commit.id.decode("ascii")[abbrev_slice],
  2060. )
  2061. commit_count += 1
  2062. # Return plain commit if no parent tag can be found
  2063. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  2064. def get_object_by_path(repo, path, committish=None):
  2065. """Get an object by path.
  2066. Args:
  2067. repo: A path to the repository
  2068. path: Path to look up
  2069. committish: Commit to look up path in
  2070. Returns: A `ShaFile` object
  2071. """
  2072. if committish is None:
  2073. committish = "HEAD"
  2074. # Get the repository
  2075. with open_repo_closing(repo) as r:
  2076. commit = parse_commit(r, committish)
  2077. base_tree = commit.tree
  2078. if not isinstance(path, bytes):
  2079. path = commit_encode(commit, path)
  2080. (mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path)
  2081. return r[sha]
  2082. def write_tree(repo):
  2083. """Write a tree object from the index.
  2084. Args:
  2085. repo: Repository for which to write tree
  2086. Returns: tree id for the tree that was written
  2087. """
  2088. with open_repo_closing(repo) as r:
  2089. return r.open_index().commit(r.object_store)
  2090. def _do_merge(
  2091. r,
  2092. merge_commit_id,
  2093. no_commit=False,
  2094. no_ff=False,
  2095. message=None,
  2096. author=None,
  2097. committer=None,
  2098. ):
  2099. """Internal merge implementation that operates on an open repository.
  2100. Args:
  2101. r: Open repository object
  2102. merge_commit_id: SHA of commit to merge
  2103. no_commit: If True, do not create a merge commit
  2104. no_ff: If True, force creation of a merge commit
  2105. message: Optional merge commit message
  2106. author: Optional author for merge commit
  2107. committer: Optional committer for merge commit
  2108. Returns:
  2109. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  2110. if no_commit=True or there were conflicts
  2111. """
  2112. from .graph import find_merge_base
  2113. from .merge import three_way_merge
  2114. # Get HEAD commit
  2115. try:
  2116. head_commit_id = r.refs[b"HEAD"]
  2117. except KeyError:
  2118. raise Error("No HEAD reference found")
  2119. head_commit = r[head_commit_id]
  2120. merge_commit = r[merge_commit_id]
  2121. # Check if fast-forward is possible
  2122. merge_bases = find_merge_base(r, [head_commit_id, merge_commit_id])
  2123. if not merge_bases:
  2124. raise Error("No common ancestor found")
  2125. # Use the first merge base
  2126. base_commit_id = merge_bases[0]
  2127. # Check for fast-forward
  2128. if base_commit_id == head_commit_id and not no_ff:
  2129. # Fast-forward merge
  2130. r.refs[b"HEAD"] = merge_commit_id
  2131. # Update the working directory
  2132. update_working_tree(r, head_commit.tree, merge_commit.tree)
  2133. return (merge_commit_id, [])
  2134. if base_commit_id == merge_commit_id:
  2135. # Already up to date
  2136. return (None, [])
  2137. # Perform three-way merge
  2138. base_commit = r[base_commit_id]
  2139. merged_tree, conflicts = three_way_merge(
  2140. r.object_store, base_commit, head_commit, merge_commit
  2141. )
  2142. # Add merged tree to object store
  2143. r.object_store.add_object(merged_tree)
  2144. # Update index and working directory
  2145. update_working_tree(r, head_commit.tree, merged_tree.id)
  2146. if conflicts or no_commit:
  2147. # Don't create a commit if there are conflicts or no_commit is True
  2148. return (None, conflicts)
  2149. # Create merge commit
  2150. merge_commit_obj = Commit()
  2151. merge_commit_obj.tree = merged_tree.id
  2152. merge_commit_obj.parents = [head_commit_id, merge_commit_id]
  2153. # Set author/committer
  2154. if author is None:
  2155. author = get_user_identity(r.get_config_stack())
  2156. if committer is None:
  2157. committer = author
  2158. merge_commit_obj.author = author
  2159. merge_commit_obj.committer = committer
  2160. # Set timestamps
  2161. timestamp = int(time.time())
  2162. timezone = 0 # UTC
  2163. merge_commit_obj.author_time = timestamp
  2164. merge_commit_obj.author_timezone = timezone
  2165. merge_commit_obj.commit_time = timestamp
  2166. merge_commit_obj.commit_timezone = timezone
  2167. # Set commit message
  2168. if message is None:
  2169. message = f"Merge commit '{merge_commit_id.decode()[:7]}'\n"
  2170. merge_commit_obj.message = message.encode() if isinstance(message, str) else message
  2171. # Add commit to object store
  2172. r.object_store.add_object(merge_commit_obj)
  2173. # Update HEAD
  2174. r.refs[b"HEAD"] = merge_commit_obj.id
  2175. return (merge_commit_obj.id, [])
  2176. def merge(
  2177. repo,
  2178. committish,
  2179. no_commit=False,
  2180. no_ff=False,
  2181. message=None,
  2182. author=None,
  2183. committer=None,
  2184. ):
  2185. """Merge a commit into the current branch.
  2186. Args:
  2187. repo: Repository to merge into
  2188. committish: Commit to merge
  2189. no_commit: If True, do not create a merge commit
  2190. no_ff: If True, force creation of a merge commit
  2191. message: Optional merge commit message
  2192. author: Optional author for merge commit
  2193. committer: Optional committer for merge commit
  2194. Returns:
  2195. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  2196. if no_commit=True or there were conflicts
  2197. Raises:
  2198. Error: If there is no HEAD reference or commit cannot be found
  2199. """
  2200. with open_repo_closing(repo) as r:
  2201. # Parse the commit to merge
  2202. try:
  2203. merge_commit_id = parse_commit(r, committish)
  2204. except KeyError:
  2205. raise Error(f"Cannot find commit '{committish}'")
  2206. return _do_merge(
  2207. r, merge_commit_id, no_commit, no_ff, message, author, committer
  2208. )