porcelain.py 70 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305
  1. # porcelain.py -- Porcelain-like layer on top of Dulwich
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  5. # General Public License as public by the Free Software Foundation; version 2.0
  6. # or (at your option) any later version. You can redistribute it and/or
  7. # modify it under the terms of either of these two licenses.
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. #
  15. # You should have received a copy of the licenses; if not, see
  16. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  17. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  18. # License, Version 2.0.
  19. #
  20. """Simple wrapper that provides porcelain-like functions on top of Dulwich.
  21. Currently implemented:
  22. * archive
  23. * add
  24. * branch{_create,_delete,_list}
  25. * check-ignore
  26. * checkout_branch
  27. * clone
  28. * commit
  29. * commit-tree
  30. * daemon
  31. * describe
  32. * diff-tree
  33. * fetch
  34. * for-each-ref
  35. * init
  36. * ls-files
  37. * ls-remote
  38. * ls-tree
  39. * pull
  40. * push
  41. * rm
  42. * remote{_add}
  43. * receive-pack
  44. * reset
  45. * submodule_add
  46. * submodule_init
  47. * submodule_list
  48. * rev-list
  49. * tag{_create,_delete,_list}
  50. * upload-pack
  51. * update-server-info
  52. * status
  53. * symbolic-ref
  54. These functions are meant to behave similarly to the git subcommands.
  55. Differences in behaviour are considered bugs.
  56. Note: one of the consequences of this is that paths tend to be
  57. interpreted relative to the current working directory rather than relative
  58. to the repository root.
  59. Functions should generally accept both unicode strings and bytestrings
  60. """
  61. import datetime
  62. import fnmatch
  63. import os
  64. import posixpath
  65. import stat
  66. import sys
  67. import time
  68. from collections import namedtuple
  69. from contextlib import closing, contextmanager
  70. from io import BytesIO, RawIOBase
  71. from pathlib import Path
  72. from typing import Optional, Union
  73. from .archive import tar_stream
  74. from .client import get_transport_and_path
  75. from .config import Config, ConfigFile, StackedConfig, read_submodules
  76. from .diff_tree import (
  77. CHANGE_ADD,
  78. CHANGE_COPY,
  79. CHANGE_DELETE,
  80. CHANGE_MODIFY,
  81. CHANGE_RENAME,
  82. RENAME_CHANGE_TYPES,
  83. )
  84. from .errors import SendPackError
  85. from .file import ensure_dir_exists
  86. from .graph import can_fast_forward
  87. from .ignore import IgnoreFilterManager
  88. from .index import (
  89. _fs_to_tree_path,
  90. blob_from_path_and_stat,
  91. build_file_from_blob,
  92. get_unstaged_changes,
  93. index_entry_from_stat,
  94. )
  95. from .object_store import iter_tree_contents, tree_lookup_path
  96. from .objects import (
  97. Commit,
  98. Tag,
  99. format_timezone,
  100. parse_timezone,
  101. pretty_format_tree_entry,
  102. )
  103. from .objectspec import (
  104. parse_commit,
  105. parse_object,
  106. parse_ref,
  107. parse_reftuples,
  108. parse_tree,
  109. to_bytes,
  110. )
  111. from .pack import write_pack_from_container, write_pack_index
  112. from .patch import write_tree_diff
  113. from .protocol import ZERO_SHA, Protocol
  114. from .refs import (
  115. LOCAL_BRANCH_PREFIX,
  116. LOCAL_REMOTE_PREFIX,
  117. LOCAL_TAG_PREFIX,
  118. _import_remote_refs,
  119. )
  120. from .repo import BaseRepo, Repo, get_user_identity
  121. from .server import (
  122. FileSystemBackend,
  123. ReceivePackHandler,
  124. TCPGitServer,
  125. UploadPackHandler,
  126. )
  127. from .server import update_server_info as server_update_server_info
  128. # Module level tuple definition for status output
  129. GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
  130. class NoneStream(RawIOBase):
  131. """Fallback if stdout or stderr are unavailable, does nothing."""
  132. def read(self, size=-1):
  133. return None
  134. def readall(self):
  135. return None
  136. def readinto(self, b):
  137. return None
  138. def write(self, b):
  139. return None
  140. default_bytes_out_stream = getattr(sys.stdout, "buffer", None) or NoneStream()
  141. default_bytes_err_stream = getattr(sys.stderr, "buffer", None) or NoneStream()
  142. DEFAULT_ENCODING = "utf-8"
  143. class Error(Exception):
  144. """Porcelain-based error."""
  145. def __init__(self, msg) -> None:
  146. super().__init__(msg)
  147. class RemoteExists(Error):
  148. """Raised when the remote already exists."""
  149. class TimezoneFormatError(Error):
  150. """Raised when the timezone cannot be determined from a given string."""
  151. class CheckoutError(Error):
  152. """Indicates that a checkout cannot be performed."""
  153. def parse_timezone_format(tz_str):
  154. """Parse given string and attempt to return a timezone offset.
  155. Different formats are considered in the following order:
  156. - Git internal format: <unix timestamp> <timezone offset>
  157. - RFC 2822: e.g. Mon, 20 Nov 1995 19:12:08 -0500
  158. - ISO 8601: e.g. 1995-11-20T19:12:08-0500
  159. Args:
  160. tz_str: datetime string
  161. Returns: Timezone offset as integer
  162. Raises:
  163. TimezoneFormatError: if timezone information cannot be extracted
  164. """
  165. import re
  166. # Git internal format
  167. internal_format_pattern = re.compile("^[0-9]+ [+-][0-9]{,4}$")
  168. if re.match(internal_format_pattern, tz_str):
  169. try:
  170. tz_internal = parse_timezone(tz_str.split(" ")[1].encode(DEFAULT_ENCODING))
  171. return tz_internal[0]
  172. except ValueError:
  173. pass
  174. # RFC 2822
  175. import email.utils
  176. rfc_2822 = email.utils.parsedate_tz(tz_str)
  177. if rfc_2822:
  178. return rfc_2822[9]
  179. # ISO 8601
  180. # Supported offsets:
  181. # sHHMM, sHH:MM, sHH
  182. iso_8601_pattern = re.compile(
  183. "[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$"
  184. )
  185. match = re.search(iso_8601_pattern, tz_str)
  186. total_secs = 0
  187. if match:
  188. sign, hours, minutes = match.groups()
  189. total_secs += int(hours) * 3600
  190. if minutes:
  191. total_secs += int(minutes) * 60
  192. total_secs = -total_secs if sign == "-" else total_secs
  193. return total_secs
  194. # YYYY.MM.DD, MM/DD/YYYY, DD.MM.YYYY contain no timezone information
  195. raise TimezoneFormatError(tz_str)
  196. def get_user_timezones():
  197. """Retrieve local timezone as described in
  198. https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
  199. Returns: A tuple containing author timezone, committer timezone.
  200. """
  201. local_timezone = time.localtime().tm_gmtoff
  202. if os.environ.get("GIT_AUTHOR_DATE"):
  203. author_timezone = parse_timezone_format(os.environ["GIT_AUTHOR_DATE"])
  204. else:
  205. author_timezone = local_timezone
  206. if os.environ.get("GIT_COMMITTER_DATE"):
  207. commit_timezone = parse_timezone_format(os.environ["GIT_COMMITTER_DATE"])
  208. else:
  209. commit_timezone = local_timezone
  210. return author_timezone, commit_timezone
  211. def open_repo(path_or_repo):
  212. """Open an argument that can be a repository or a path for a repository."""
  213. if isinstance(path_or_repo, BaseRepo):
  214. return path_or_repo
  215. return Repo(path_or_repo)
  216. @contextmanager
  217. def _noop_context_manager(obj):
  218. """Context manager that has the same api as closing but does nothing."""
  219. yield obj
  220. def open_repo_closing(path_or_repo):
  221. """Open an argument that can be a repository or a path for a repository.
  222. returns a context manager that will close the repo on exit if the argument
  223. is a path, else does nothing if the argument is a repo.
  224. """
  225. if isinstance(path_or_repo, BaseRepo):
  226. return _noop_context_manager(path_or_repo)
  227. return closing(Repo(path_or_repo))
  228. def path_to_tree_path(repopath, path, tree_encoding=DEFAULT_ENCODING):
  229. """Convert a path to a path usable in an index, e.g. bytes and relative to
  230. the repository root.
  231. Args:
  232. repopath: Repository path, absolute or relative to the cwd
  233. path: A path, absolute or relative to the cwd
  234. Returns: A path formatted for use in e.g. an index
  235. """
  236. # Resolve might returns a relative path on Windows
  237. # https://bugs.python.org/issue38671
  238. if sys.platform == "win32":
  239. path = os.path.abspath(path)
  240. path = Path(path)
  241. resolved_path = path.resolve()
  242. # Resolve and abspath seems to behave differently regarding symlinks,
  243. # as we are doing abspath on the file path, we need to do the same on
  244. # the repo path or they might not match
  245. if sys.platform == "win32":
  246. repopath = os.path.abspath(repopath)
  247. repopath = Path(repopath).resolve()
  248. try:
  249. relpath = resolved_path.relative_to(repopath)
  250. except ValueError:
  251. # If path is a symlink that points to a file outside the repo, we
  252. # want the relpath for the link itself, not the resolved target
  253. if path.is_symlink():
  254. parent = path.parent.resolve()
  255. relpath = (parent / path.name).relative_to(repopath)
  256. else:
  257. raise
  258. if sys.platform == "win32":
  259. return str(relpath).replace(os.path.sep, "/").encode(tree_encoding)
  260. else:
  261. return bytes(relpath)
  262. class DivergedBranches(Error):
  263. """Branches have diverged and fast-forward is not possible."""
  264. def __init__(self, current_sha, new_sha) -> None:
  265. self.current_sha = current_sha
  266. self.new_sha = new_sha
  267. def check_diverged(repo, current_sha, new_sha):
  268. """Check if updating to a sha can be done with fast forwarding.
  269. Args:
  270. repo: Repository object
  271. current_sha: Current head sha
  272. new_sha: New head sha
  273. """
  274. try:
  275. can = can_fast_forward(repo, current_sha, new_sha)
  276. except KeyError:
  277. can = False
  278. if not can:
  279. raise DivergedBranches(current_sha, new_sha)
  280. def archive(
  281. repo,
  282. committish=None,
  283. outstream=default_bytes_out_stream,
  284. errstream=default_bytes_err_stream,
  285. ):
  286. """Create an archive.
  287. Args:
  288. repo: Path of repository for which to generate an archive.
  289. committish: Commit SHA1 or ref to use
  290. outstream: Output stream (defaults to stdout)
  291. errstream: Error stream (defaults to stderr)
  292. """
  293. if committish is None:
  294. committish = "HEAD"
  295. with open_repo_closing(repo) as repo_obj:
  296. c = parse_commit(repo_obj, committish)
  297. for chunk in tar_stream(
  298. repo_obj.object_store, repo_obj.object_store[c.tree], c.commit_time
  299. ):
  300. outstream.write(chunk)
  301. def update_server_info(repo="."):
  302. """Update server info files for a repository.
  303. Args:
  304. repo: path to the repository
  305. """
  306. with open_repo_closing(repo) as r:
  307. server_update_server_info(r)
  308. def symbolic_ref(repo, ref_name, force=False):
  309. """Set git symbolic ref into HEAD.
  310. Args:
  311. repo: path to the repository
  312. ref_name: short name of the new ref
  313. force: force settings without checking if it exists in refs/heads
  314. """
  315. with open_repo_closing(repo) as repo_obj:
  316. ref_path = _make_branch_ref(ref_name)
  317. if not force and ref_path not in repo_obj.refs.keys():
  318. raise Error(f"fatal: ref `{ref_name}` is not a ref")
  319. repo_obj.refs.set_symbolic_ref(b"HEAD", ref_path)
  320. def pack_refs(repo, all=False):
  321. with open_repo_closing(repo) as repo_obj:
  322. refs = repo_obj.refs
  323. packed_refs = {
  324. ref: refs[ref]
  325. for ref in refs
  326. if (all or ref.startswith(LOCAL_TAG_PREFIX)) and ref != b"HEAD"
  327. }
  328. refs.add_packed_refs(packed_refs)
  329. def commit(
  330. repo=".",
  331. message=None,
  332. author=None,
  333. author_timezone=None,
  334. committer=None,
  335. commit_timezone=None,
  336. encoding=None,
  337. no_verify=False,
  338. signoff=False,
  339. ):
  340. """Create a new commit.
  341. Args:
  342. repo: Path to repository
  343. message: Optional commit message
  344. author: Optional author name and email
  345. author_timezone: Author timestamp timezone
  346. committer: Optional committer name and email
  347. commit_timezone: Commit timestamp timezone
  348. no_verify: Skip pre-commit and commit-msg hooks
  349. signoff: GPG Sign the commit (bool, defaults to False,
  350. pass True to use default GPG key,
  351. pass a str containing Key ID to use a specific GPG key)
  352. Returns: SHA1 of the new commit
  353. """
  354. # FIXME: Support --all argument
  355. if getattr(message, "encode", None):
  356. message = message.encode(encoding or DEFAULT_ENCODING)
  357. if getattr(author, "encode", None):
  358. author = author.encode(encoding or DEFAULT_ENCODING)
  359. if getattr(committer, "encode", None):
  360. committer = committer.encode(encoding or DEFAULT_ENCODING)
  361. local_timezone = get_user_timezones()
  362. if author_timezone is None:
  363. author_timezone = local_timezone[0]
  364. if commit_timezone is None:
  365. commit_timezone = local_timezone[1]
  366. with open_repo_closing(repo) as r:
  367. return r.do_commit(
  368. message=message,
  369. author=author,
  370. author_timezone=author_timezone,
  371. committer=committer,
  372. commit_timezone=commit_timezone,
  373. encoding=encoding,
  374. no_verify=no_verify,
  375. sign=signoff if isinstance(signoff, (str, bool)) else None,
  376. )
  377. def commit_tree(repo, tree, message=None, author=None, committer=None):
  378. """Create a new commit object.
  379. Args:
  380. repo: Path to repository
  381. tree: An existing tree object
  382. author: Optional author name and email
  383. committer: Optional committer name and email
  384. """
  385. with open_repo_closing(repo) as r:
  386. return r.do_commit(
  387. message=message, tree=tree, committer=committer, author=author
  388. )
  389. def init(path=".", *, bare=False, symlinks: Optional[bool] = None):
  390. """Create a new git repository.
  391. Args:
  392. path: Path to repository.
  393. bare: Whether to create a bare repository.
  394. symlinks: Whether to create actual symlinks (defaults to autodetect)
  395. Returns: A Repo instance
  396. """
  397. if not os.path.exists(path):
  398. os.mkdir(path)
  399. if bare:
  400. return Repo.init_bare(path)
  401. else:
  402. return Repo.init(path, symlinks=symlinks)
  403. def encode_refspecs(refspecs):
  404. if refspecs is None:
  405. return [b"HEAD"]
  406. def encode_refspec(ref):
  407. if isinstance(ref, bytes):
  408. return ref
  409. else:
  410. return ref.encode(DEFAULT_ENCODING)
  411. encoded_refs = []
  412. if isinstance(refspecs, bytes) or isinstance(refspecs, str):
  413. encoded_refs.append(encode_refspec(refspecs))
  414. else:
  415. for ref in refspecs:
  416. encoded_refs.append(encode_refspec(ref))
  417. return encoded_refs
  418. def clone(
  419. source,
  420. target=None,
  421. bare=False,
  422. checkout=None,
  423. errstream=default_bytes_err_stream,
  424. outstream=None,
  425. origin: Optional[str] = "origin",
  426. depth: Optional[int] = None,
  427. branch: Optional[Union[str, bytes]] = None,
  428. config: Optional[Config] = None,
  429. filter_spec=None,
  430. protocol_version: Optional[int] = None,
  431. **kwargs,
  432. ):
  433. """Clone a local or remote git repository.
  434. Args:
  435. source: Path or URL for source repository
  436. target: Path to target repository (optional)
  437. bare: Whether or not to create a bare repository
  438. checkout: Whether or not to check-out HEAD after cloning
  439. errstream: Optional stream to write progress to
  440. outstream: Optional stream to write progress to (deprecated)
  441. origin: Name of remote from the repository used to clone
  442. depth: Depth to fetch at
  443. branch: Optional branch or tag to be used as HEAD in the new repository
  444. instead of the cloned repository's HEAD.
  445. config: Configuration to use
  446. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  447. bytestring/string.
  448. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  449. Only used if the server supports the Git protocol-v2 'filter'
  450. feature, and ignored otherwise.
  451. protocol_version: desired Git protocol version. By default the highest
  452. mutually supported protocol version will be used.
  453. Returns: The new repository
  454. """
  455. if outstream is not None:
  456. import warnings
  457. warnings.warn(
  458. "outstream= has been deprecated in favour of errstream=.",
  459. DeprecationWarning,
  460. stacklevel=3,
  461. )
  462. # TODO(jelmer): Capture logging output and stream to errstream
  463. if config is None:
  464. config = StackedConfig.default()
  465. if checkout is None:
  466. checkout = not bare
  467. if checkout and bare:
  468. raise Error("checkout and bare are incompatible")
  469. if target is None:
  470. target = source.split("/")[-1]
  471. if isinstance(branch, str):
  472. branch = branch.encode(DEFAULT_ENCODING)
  473. mkdir = not os.path.exists(target)
  474. (client, path) = get_transport_and_path(source, config=config, **kwargs)
  475. if filter_spec:
  476. filter_spec = filter_spec.encode("ascii")
  477. return client.clone(
  478. path,
  479. target,
  480. mkdir=mkdir,
  481. bare=bare,
  482. origin=origin,
  483. checkout=checkout,
  484. branch=branch,
  485. progress=errstream.write,
  486. depth=depth,
  487. filter_spec=filter_spec,
  488. protocol_version=protocol_version,
  489. **kwargs,
  490. )
  491. def add(repo=".", paths=None):
  492. """Add files to the staging area.
  493. Args:
  494. repo: Repository for the files
  495. paths: Paths to add. No value passed stages all modified files.
  496. Returns: Tuple with set of added files and ignored files
  497. If the repository contains ignored directories, the returned set will
  498. contain the path to an ignored directory (with trailing slash). Individual
  499. files within ignored directories will not be returned.
  500. """
  501. ignored = set()
  502. with open_repo_closing(repo) as r:
  503. repo_path = Path(r.path).resolve()
  504. ignore_manager = IgnoreFilterManager.from_repo(r)
  505. if not paths:
  506. paths = list(
  507. get_untracked_paths(
  508. str(Path(os.getcwd()).resolve()),
  509. str(repo_path),
  510. r.open_index(),
  511. )
  512. )
  513. relpaths = []
  514. if not isinstance(paths, list):
  515. paths = [paths]
  516. for p in paths:
  517. path = Path(p)
  518. relpath = str(path.resolve().relative_to(repo_path))
  519. # FIXME: Support patterns
  520. if path.is_dir():
  521. relpath = os.path.join(relpath, "")
  522. if ignore_manager.is_ignored(relpath):
  523. ignored.add(relpath)
  524. continue
  525. relpaths.append(relpath)
  526. r.stage(relpaths)
  527. return (relpaths, ignored)
  528. def _is_subdir(subdir, parentdir):
  529. """Check whether subdir is parentdir or a subdir of parentdir.
  530. If parentdir or subdir is a relative path, it will be disamgibuated
  531. relative to the pwd.
  532. """
  533. parentdir_abs = os.path.realpath(parentdir) + os.path.sep
  534. subdir_abs = os.path.realpath(subdir) + os.path.sep
  535. return subdir_abs.startswith(parentdir_abs)
  536. # TODO: option to remove ignored files also, in line with `git clean -fdx`
  537. def clean(repo=".", target_dir=None):
  538. """Remove any untracked files from the target directory recursively.
  539. Equivalent to running ``git clean -fd`` in target_dir.
  540. Args:
  541. repo: Repository where the files may be tracked
  542. target_dir: Directory to clean - current directory if None
  543. """
  544. if target_dir is None:
  545. target_dir = os.getcwd()
  546. with open_repo_closing(repo) as r:
  547. if not _is_subdir(target_dir, r.path):
  548. raise Error("target_dir must be in the repo's working dir")
  549. config = r.get_config_stack()
  550. config.get_boolean((b"clean",), b"requireForce", True)
  551. # TODO(jelmer): if require_force is set, then make sure that -f, -i or
  552. # -n is specified.
  553. index = r.open_index()
  554. ignore_manager = IgnoreFilterManager.from_repo(r)
  555. paths_in_wd = _walk_working_dir_paths(target_dir, r.path)
  556. # Reverse file visit order, so that files and subdirectories are
  557. # removed before containing directory
  558. for ap, is_dir in reversed(list(paths_in_wd)):
  559. if is_dir:
  560. # All subdirectories and files have been removed if untracked,
  561. # so dir contains no tracked files iff it is empty.
  562. is_empty = len(os.listdir(ap)) == 0
  563. if is_empty:
  564. os.rmdir(ap)
  565. else:
  566. ip = path_to_tree_path(r.path, ap)
  567. is_tracked = ip in index
  568. rp = os.path.relpath(ap, r.path)
  569. is_ignored = ignore_manager.is_ignored(rp)
  570. if not is_tracked and not is_ignored:
  571. os.remove(ap)
  572. def remove(repo=".", paths=None, cached=False):
  573. """Remove files from the staging area.
  574. Args:
  575. repo: Repository for the files
  576. paths: Paths to remove
  577. """
  578. with open_repo_closing(repo) as r:
  579. index = r.open_index()
  580. for p in paths:
  581. full_path = os.fsencode(os.path.abspath(p))
  582. tree_path = path_to_tree_path(r.path, p)
  583. try:
  584. index_sha = index[tree_path].sha
  585. except KeyError as exc:
  586. raise Error(f"{p} did not match any files") from exc
  587. if not cached:
  588. try:
  589. st = os.lstat(full_path)
  590. except OSError:
  591. pass
  592. else:
  593. try:
  594. blob = blob_from_path_and_stat(full_path, st)
  595. except OSError:
  596. pass
  597. else:
  598. try:
  599. committed_sha = tree_lookup_path(
  600. r.__getitem__, r[r.head()].tree, tree_path
  601. )[1]
  602. except KeyError:
  603. committed_sha = None
  604. if blob.id != index_sha and index_sha != committed_sha:
  605. raise Error(
  606. "file has staged content differing "
  607. f"from both the file and head: {p}"
  608. )
  609. if index_sha != committed_sha:
  610. raise Error(f"file has staged changes: {p}")
  611. os.remove(full_path)
  612. del index[tree_path]
  613. index.write()
  614. rm = remove
  615. def commit_decode(commit, contents, default_encoding=DEFAULT_ENCODING):
  616. if commit.encoding:
  617. encoding = commit.encoding.decode("ascii")
  618. else:
  619. encoding = default_encoding
  620. return contents.decode(encoding, "replace")
  621. def commit_encode(commit, contents, default_encoding=DEFAULT_ENCODING):
  622. if commit.encoding:
  623. encoding = commit.encoding.decode("ascii")
  624. else:
  625. encoding = default_encoding
  626. return contents.encode(encoding)
  627. def print_commit(commit, decode, outstream=sys.stdout):
  628. """Write a human-readable commit log entry.
  629. Args:
  630. commit: A `Commit` object
  631. outstream: A stream file to write to
  632. """
  633. outstream.write("-" * 50 + "\n")
  634. outstream.write("commit: " + commit.id.decode("ascii") + "\n")
  635. if len(commit.parents) > 1:
  636. outstream.write(
  637. "merge: "
  638. + "...".join([c.decode("ascii") for c in commit.parents[1:]])
  639. + "\n"
  640. )
  641. outstream.write("Author: " + decode(commit.author) + "\n")
  642. if commit.author != commit.committer:
  643. outstream.write("Committer: " + decode(commit.committer) + "\n")
  644. time_tuple = time.gmtime(commit.author_time + commit.author_timezone)
  645. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  646. timezone_str = format_timezone(commit.author_timezone).decode("ascii")
  647. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  648. outstream.write("\n")
  649. outstream.write(decode(commit.message) + "\n")
  650. outstream.write("\n")
  651. def print_tag(tag, decode, outstream=sys.stdout):
  652. """Write a human-readable tag.
  653. Args:
  654. tag: A `Tag` object
  655. decode: Function for decoding bytes to unicode string
  656. outstream: A stream to write to
  657. """
  658. outstream.write("Tagger: " + decode(tag.tagger) + "\n")
  659. time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone)
  660. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  661. timezone_str = format_timezone(tag.tag_timezone).decode("ascii")
  662. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  663. outstream.write("\n")
  664. outstream.write(decode(tag.message))
  665. outstream.write("\n")
  666. def show_blob(repo, blob, decode, outstream=sys.stdout):
  667. """Write a blob to a stream.
  668. Args:
  669. repo: A `Repo` object
  670. blob: A `Blob` object
  671. decode: Function for decoding bytes to unicode string
  672. outstream: A stream file to write to
  673. """
  674. outstream.write(decode(blob.data))
  675. def show_commit(repo, commit, decode, outstream=sys.stdout):
  676. """Show a commit to a stream.
  677. Args:
  678. repo: A `Repo` object
  679. commit: A `Commit` object
  680. decode: Function for decoding bytes to unicode string
  681. outstream: Stream to write to
  682. """
  683. print_commit(commit, decode=decode, outstream=outstream)
  684. if commit.parents:
  685. parent_commit = repo[commit.parents[0]]
  686. base_tree = parent_commit.tree
  687. else:
  688. base_tree = None
  689. diffstream = BytesIO()
  690. write_tree_diff(diffstream, repo.object_store, base_tree, commit.tree)
  691. diffstream.seek(0)
  692. outstream.write(commit_decode(commit, diffstream.getvalue()))
  693. def show_tree(repo, tree, decode, outstream=sys.stdout):
  694. """Print a tree to a stream.
  695. Args:
  696. repo: A `Repo` object
  697. tree: A `Tree` object
  698. decode: Function for decoding bytes to unicode string
  699. outstream: Stream to write to
  700. """
  701. for n in tree:
  702. outstream.write(decode(n) + "\n")
  703. def show_tag(repo, tag, decode, outstream=sys.stdout):
  704. """Print a tag to a stream.
  705. Args:
  706. repo: A `Repo` object
  707. tag: A `Tag` object
  708. decode: Function for decoding bytes to unicode string
  709. outstream: Stream to write to
  710. """
  711. print_tag(tag, decode, outstream)
  712. show_object(repo, repo[tag.object[1]], decode, outstream)
  713. def show_object(repo, obj, decode, outstream):
  714. return {
  715. b"tree": show_tree,
  716. b"blob": show_blob,
  717. b"commit": show_commit,
  718. b"tag": show_tag,
  719. }[obj.type_name](repo, obj, decode, outstream)
  720. def print_name_status(changes):
  721. """Print a simple status summary, listing changed files."""
  722. for change in changes:
  723. if not change:
  724. continue
  725. if isinstance(change, list):
  726. change = change[0]
  727. if change.type == CHANGE_ADD:
  728. path1 = change.new.path
  729. path2 = ""
  730. kind = "A"
  731. elif change.type == CHANGE_DELETE:
  732. path1 = change.old.path
  733. path2 = ""
  734. kind = "D"
  735. elif change.type == CHANGE_MODIFY:
  736. path1 = change.new.path
  737. path2 = ""
  738. kind = "M"
  739. elif change.type in RENAME_CHANGE_TYPES:
  740. path1 = change.old.path
  741. path2 = change.new.path
  742. if change.type == CHANGE_RENAME:
  743. kind = "R"
  744. elif change.type == CHANGE_COPY:
  745. kind = "C"
  746. yield "%-8s%-20s%-20s" % (kind, path1, path2)
  747. def log(
  748. repo=".",
  749. paths=None,
  750. outstream=sys.stdout,
  751. max_entries=None,
  752. reverse=False,
  753. name_status=False,
  754. ):
  755. """Write commit logs.
  756. Args:
  757. repo: Path to repository
  758. paths: Optional set of specific paths to print entries for
  759. outstream: Stream to write log output to
  760. reverse: Reverse order in which entries are printed
  761. name_status: Print name status
  762. max_entries: Optional maximum number of entries to display
  763. """
  764. with open_repo_closing(repo) as r:
  765. walker = r.get_walker(max_entries=max_entries, paths=paths, reverse=reverse)
  766. for entry in walker:
  767. def decode(x):
  768. return commit_decode(entry.commit, x)
  769. print_commit(entry.commit, decode, outstream)
  770. if name_status:
  771. outstream.writelines(
  772. [line + "\n" for line in print_name_status(entry.changes())]
  773. )
  774. # TODO(jelmer): better default for encoding?
  775. def show(
  776. repo=".",
  777. objects=None,
  778. outstream=sys.stdout,
  779. default_encoding=DEFAULT_ENCODING,
  780. ):
  781. """Print the changes in a commit.
  782. Args:
  783. repo: Path to repository
  784. objects: Objects to show (defaults to [HEAD])
  785. outstream: Stream to write to
  786. default_encoding: Default encoding to use if none is set in the
  787. commit
  788. """
  789. if objects is None:
  790. objects = ["HEAD"]
  791. if not isinstance(objects, list):
  792. objects = [objects]
  793. with open_repo_closing(repo) as r:
  794. for objectish in objects:
  795. o = parse_object(r, objectish)
  796. if isinstance(o, Commit):
  797. def decode(x):
  798. return commit_decode(o, x, default_encoding)
  799. else:
  800. def decode(x):
  801. return x.decode(default_encoding)
  802. show_object(r, o, decode, outstream)
  803. def diff_tree(repo, old_tree, new_tree, outstream=default_bytes_out_stream):
  804. """Compares the content and mode of blobs found via two tree objects.
  805. Args:
  806. repo: Path to repository
  807. old_tree: Id of old tree
  808. new_tree: Id of new tree
  809. outstream: Stream to write to
  810. """
  811. with open_repo_closing(repo) as r:
  812. write_tree_diff(outstream, r.object_store, old_tree, new_tree)
  813. def rev_list(repo, commits, outstream=sys.stdout):
  814. """Lists commit objects in reverse chronological order.
  815. Args:
  816. repo: Path to repository
  817. commits: Commits over which to iterate
  818. outstream: Stream to write to
  819. """
  820. with open_repo_closing(repo) as r:
  821. for entry in r.get_walker(include=[r[c].id for c in commits]):
  822. outstream.write(entry.commit.id + b"\n")
  823. def _canonical_part(url: str) -> str:
  824. name = url.rsplit("/", 1)[-1]
  825. if name.endswith(".git"):
  826. name = name[:-4]
  827. return name
  828. def submodule_add(repo, url, path=None, name=None):
  829. """Add a new submodule.
  830. Args:
  831. repo: Path to repository
  832. url: URL of repository to add as submodule
  833. path: Path where submodule should live
  834. """
  835. with open_repo_closing(repo) as r:
  836. if path is None:
  837. path = os.path.relpath(_canonical_part(url), r.path)
  838. if name is None:
  839. name = path
  840. # TODO(jelmer): Move this logic to dulwich.submodule
  841. gitmodules_path = os.path.join(r.path, ".gitmodules")
  842. try:
  843. config = ConfigFile.from_path(gitmodules_path)
  844. except FileNotFoundError:
  845. config = ConfigFile()
  846. config.path = gitmodules_path
  847. config.set(("submodule", name), "url", url)
  848. config.set(("submodule", name), "path", path)
  849. config.write_to_path()
  850. def submodule_init(repo):
  851. """Initialize submodules.
  852. Args:
  853. repo: Path to repository
  854. """
  855. with open_repo_closing(repo) as r:
  856. config = r.get_config()
  857. gitmodules_path = os.path.join(r.path, ".gitmodules")
  858. for path, url, name in read_submodules(gitmodules_path):
  859. config.set((b"submodule", name), b"active", True)
  860. config.set((b"submodule", name), b"url", url)
  861. config.write_to_path()
  862. def submodule_list(repo):
  863. """List submodules.
  864. Args:
  865. repo: Path to repository
  866. """
  867. from .submodule import iter_cached_submodules
  868. with open_repo_closing(repo) as r:
  869. for path, sha in iter_cached_submodules(r.object_store, r[r.head()].tree):
  870. yield path, sha.decode(DEFAULT_ENCODING)
  871. def tag_create(
  872. repo,
  873. tag,
  874. author=None,
  875. message=None,
  876. annotated=False,
  877. objectish="HEAD",
  878. tag_time=None,
  879. tag_timezone=None,
  880. sign=False,
  881. encoding=DEFAULT_ENCODING,
  882. ):
  883. """Creates a tag in git via dulwich calls.
  884. Args:
  885. repo: Path to repository
  886. tag: tag string
  887. author: tag author (optional, if annotated is set)
  888. message: tag message (optional)
  889. annotated: whether to create an annotated tag
  890. objectish: object the tag should point at, defaults to HEAD
  891. tag_time: Optional time for annotated tag
  892. tag_timezone: Optional timezone for annotated tag
  893. sign: GPG Sign the tag (bool, defaults to False,
  894. pass True to use default GPG key,
  895. pass a str containing Key ID to use a specific GPG key)
  896. """
  897. with open_repo_closing(repo) as r:
  898. object = parse_object(r, objectish)
  899. if annotated:
  900. # Create the tag object
  901. tag_obj = Tag()
  902. if author is None:
  903. author = get_user_identity(r.get_config_stack())
  904. tag_obj.tagger = author
  905. tag_obj.message = message + "\n".encode(encoding)
  906. tag_obj.name = tag
  907. tag_obj.object = (type(object), object.id)
  908. if tag_time is None:
  909. tag_time = int(time.time())
  910. tag_obj.tag_time = tag_time
  911. if tag_timezone is None:
  912. tag_timezone = get_user_timezones()[1]
  913. elif isinstance(tag_timezone, str):
  914. tag_timezone = parse_timezone(tag_timezone)
  915. tag_obj.tag_timezone = tag_timezone
  916. if sign:
  917. tag_obj.sign(sign if isinstance(sign, str) else None)
  918. r.object_store.add_object(tag_obj)
  919. tag_id = tag_obj.id
  920. else:
  921. tag_id = object.id
  922. r.refs[_make_tag_ref(tag)] = tag_id
  923. def tag_list(repo, outstream=sys.stdout):
  924. """List all tags.
  925. Args:
  926. repo: Path to repository
  927. outstream: Stream to write tags to
  928. """
  929. with open_repo_closing(repo) as r:
  930. tags = sorted(r.refs.as_dict(b"refs/tags"))
  931. return tags
  932. def tag_delete(repo, name):
  933. """Remove a tag.
  934. Args:
  935. repo: Path to repository
  936. name: Name of tag to remove
  937. """
  938. with open_repo_closing(repo) as r:
  939. if isinstance(name, bytes):
  940. names = [name]
  941. elif isinstance(name, list):
  942. names = name
  943. else:
  944. raise Error(f"Unexpected tag name type {name!r}")
  945. for name in names:
  946. del r.refs[_make_tag_ref(name)]
  947. def reset(repo, mode, treeish="HEAD"):
  948. """Reset current HEAD to the specified state.
  949. Args:
  950. repo: Path to repository
  951. mode: Mode ("hard", "soft", "mixed")
  952. treeish: Treeish to reset to
  953. """
  954. if mode != "hard":
  955. raise Error("hard is the only mode currently supported")
  956. with open_repo_closing(repo) as r:
  957. tree = parse_tree(r, treeish)
  958. r.reset_index(tree.id)
  959. def get_remote_repo(
  960. repo: Repo, remote_location: Optional[Union[str, bytes]] = None
  961. ) -> tuple[Optional[str], str]:
  962. config = repo.get_config()
  963. if remote_location is None:
  964. remote_location = get_branch_remote(repo)
  965. if isinstance(remote_location, str):
  966. encoded_location = remote_location.encode()
  967. else:
  968. encoded_location = remote_location
  969. section = (b"remote", encoded_location)
  970. remote_name: Optional[str] = None
  971. if config.has_section(section):
  972. remote_name = encoded_location.decode()
  973. encoded_location = config.get(section, "url")
  974. else:
  975. remote_name = None
  976. return (remote_name, encoded_location.decode())
  977. def push(
  978. repo,
  979. remote_location=None,
  980. refspecs=None,
  981. outstream=default_bytes_out_stream,
  982. errstream=default_bytes_err_stream,
  983. force=False,
  984. **kwargs,
  985. ):
  986. """Remote push with dulwich via dulwich.client.
  987. Args:
  988. repo: Path to repository
  989. remote_location: Location of the remote
  990. refspecs: Refs to push to remote
  991. outstream: A stream file to write output
  992. errstream: A stream file to write errors
  993. force: Force overwriting refs
  994. """
  995. # Open the repo
  996. with open_repo_closing(repo) as r:
  997. if refspecs is None:
  998. refspecs = [active_branch(r)]
  999. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1000. # Get the client and path
  1001. client, path = get_transport_and_path(
  1002. remote_location, config=r.get_config_stack(), **kwargs
  1003. )
  1004. selected_refs = []
  1005. remote_changed_refs = {}
  1006. def update_refs(refs):
  1007. selected_refs.extend(parse_reftuples(r.refs, refs, refspecs, force=force))
  1008. new_refs = {}
  1009. # TODO: Handle selected_refs == {None: None}
  1010. for lh, rh, force_ref in selected_refs:
  1011. if lh is None:
  1012. new_refs[rh] = ZERO_SHA
  1013. remote_changed_refs[rh] = None
  1014. else:
  1015. try:
  1016. localsha = r.refs[lh]
  1017. except KeyError as exc:
  1018. raise Error(f"No valid ref {lh} in local repository") from exc
  1019. if not force_ref and rh in refs:
  1020. check_diverged(r, refs[rh], localsha)
  1021. new_refs[rh] = localsha
  1022. remote_changed_refs[rh] = localsha
  1023. return new_refs
  1024. err_encoding = getattr(errstream, "encoding", None) or DEFAULT_ENCODING
  1025. remote_location = client.get_url(path)
  1026. try:
  1027. result = client.send_pack(
  1028. path,
  1029. update_refs,
  1030. generate_pack_data=r.generate_pack_data,
  1031. progress=errstream.write,
  1032. )
  1033. except SendPackError as exc:
  1034. raise Error(
  1035. "Push to " + remote_location + " failed -> " + exc.args[0].decode(),
  1036. ) from exc
  1037. else:
  1038. errstream.write(
  1039. b"Push to " + remote_location.encode(err_encoding) + b" successful.\n"
  1040. )
  1041. for ref, error in (result.ref_status or {}).items():
  1042. if error is not None:
  1043. errstream.write(
  1044. b"Push of ref %s failed: %s\n" % (ref, error.encode(err_encoding))
  1045. )
  1046. else:
  1047. errstream.write(b"Ref %s updated\n" % ref)
  1048. if remote_name is not None:
  1049. _import_remote_refs(r.refs, remote_name, remote_changed_refs)
  1050. def pull(
  1051. repo,
  1052. remote_location=None,
  1053. refspecs=None,
  1054. outstream=default_bytes_out_stream,
  1055. errstream=default_bytes_err_stream,
  1056. fast_forward=True,
  1057. force=False,
  1058. filter_spec=None,
  1059. protocol_version=None,
  1060. **kwargs,
  1061. ):
  1062. """Pull from remote via dulwich.client.
  1063. Args:
  1064. repo: Path to repository
  1065. remote_location: Location of the remote
  1066. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  1067. bytestring/string.
  1068. outstream: A stream file to write to output
  1069. errstream: A stream file to write to errors
  1070. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  1071. Only used if the server supports the Git protocol-v2 'filter'
  1072. feature, and ignored otherwise.
  1073. protocol_version: desired Git protocol version. By default the highest
  1074. mutually supported protocol version will be used
  1075. """
  1076. # Open the repo
  1077. with open_repo_closing(repo) as r:
  1078. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1079. encoded_refs = encode_refspecs(refspecs)
  1080. selected_refs = []
  1081. def determine_wants(remote_refs, **kwargs):
  1082. selected_refs.extend(
  1083. parse_reftuples(remote_refs, r.refs, encoded_refs, force=force)
  1084. )
  1085. return [
  1086. remote_refs[lh]
  1087. for (lh, rh, force_ref) in selected_refs
  1088. if remote_refs[lh] not in r.object_store
  1089. ]
  1090. client, path = get_transport_and_path(
  1091. remote_location, config=r.get_config_stack(), **kwargs
  1092. )
  1093. if filter_spec:
  1094. filter_spec = filter_spec.encode("ascii")
  1095. fetch_result = client.fetch(
  1096. path,
  1097. r,
  1098. progress=errstream.write,
  1099. determine_wants=determine_wants,
  1100. ref_prefix=refspecs,
  1101. filter_spec=filter_spec,
  1102. protocol_version=protocol_version,
  1103. )
  1104. for lh, rh, force_ref in selected_refs:
  1105. if not force_ref and rh in r.refs:
  1106. try:
  1107. check_diverged(r, r.refs.follow(rh)[1], fetch_result.refs[lh])
  1108. except DivergedBranches as exc:
  1109. if fast_forward:
  1110. raise
  1111. else:
  1112. raise NotImplementedError("merge is not yet supported") from exc
  1113. r.refs[rh] = fetch_result.refs[lh]
  1114. if selected_refs:
  1115. r[b"HEAD"] = fetch_result.refs[selected_refs[0][1]]
  1116. # Perform 'git checkout .' - syncs staged changes
  1117. tree = r[b"HEAD"].tree
  1118. r.reset_index(tree=tree)
  1119. if remote_name is not None:
  1120. _import_remote_refs(r.refs, remote_name, fetch_result.refs)
  1121. def status(repo=".", ignored=False, untracked_files="all"):
  1122. """Returns staged, unstaged, and untracked changes relative to the HEAD.
  1123. Args:
  1124. repo: Path to repository or repository object
  1125. ignored: Whether to include ignored files in untracked
  1126. untracked_files: How to handle untracked files, defaults to "all":
  1127. "no": do not return untracked files
  1128. "all": include all files in untracked directories
  1129. Using untracked_files="no" can be faster than "all" when the worktreee
  1130. contains many untracked files/directories.
  1131. Note: untracked_files="normal" (git's default) is not implemented.
  1132. Returns: GitStatus tuple,
  1133. staged - dict with lists of staged paths (diff index/HEAD)
  1134. unstaged - list of unstaged paths (diff index/working-tree)
  1135. untracked - list of untracked, un-ignored & non-.git paths
  1136. """
  1137. with open_repo_closing(repo) as r:
  1138. # 1. Get status of staged
  1139. tracked_changes = get_tree_changes(r)
  1140. # 2. Get status of unstaged
  1141. index = r.open_index()
  1142. normalizer = r.get_blob_normalizer()
  1143. filter_callback = normalizer.checkin_normalize
  1144. unstaged_changes = list(get_unstaged_changes(index, r.path, filter_callback))
  1145. untracked_paths = get_untracked_paths(
  1146. r.path,
  1147. r.path,
  1148. index,
  1149. exclude_ignored=not ignored,
  1150. untracked_files=untracked_files,
  1151. )
  1152. if sys.platform == "win32":
  1153. untracked_changes = [
  1154. path.replace(os.path.sep, "/") for path in untracked_paths
  1155. ]
  1156. else:
  1157. untracked_changes = list(untracked_paths)
  1158. return GitStatus(tracked_changes, unstaged_changes, untracked_changes)
  1159. def _walk_working_dir_paths(frompath, basepath, prune_dirnames=None):
  1160. """Get path, is_dir for files in working dir from frompath.
  1161. Args:
  1162. frompath: Path to begin walk
  1163. basepath: Path to compare to
  1164. prune_dirnames: Optional callback to prune dirnames during os.walk
  1165. dirnames will be set to result of prune_dirnames(dirpath, dirnames)
  1166. """
  1167. for dirpath, dirnames, filenames in os.walk(frompath):
  1168. # Skip .git and below.
  1169. if ".git" in dirnames:
  1170. dirnames.remove(".git")
  1171. if dirpath != basepath:
  1172. continue
  1173. if ".git" in filenames:
  1174. filenames.remove(".git")
  1175. if dirpath != basepath:
  1176. continue
  1177. if dirpath != frompath:
  1178. yield dirpath, True
  1179. for filename in filenames:
  1180. filepath = os.path.join(dirpath, filename)
  1181. yield filepath, False
  1182. if prune_dirnames:
  1183. dirnames[:] = prune_dirnames(dirpath, dirnames)
  1184. def get_untracked_paths(
  1185. frompath, basepath, index, exclude_ignored=False, untracked_files="all"
  1186. ):
  1187. """Get untracked paths.
  1188. Args:
  1189. frompath: Path to walk
  1190. basepath: Path to compare to
  1191. index: Index to check against
  1192. exclude_ignored: Whether to exclude ignored paths
  1193. untracked_files: How to handle untracked files:
  1194. - "no": return an empty list
  1195. - "all": return all files in untracked directories
  1196. - "normal": Not implemented
  1197. Note: ignored directories will never be walked for performance reasons.
  1198. If exclude_ignored is False, only the path to an ignored directory will
  1199. be yielded, no files inside the directory will be returned
  1200. """
  1201. if untracked_files == "normal":
  1202. raise NotImplementedError("normal is not yet supported")
  1203. if untracked_files not in ("no", "all"):
  1204. raise ValueError("untracked_files must be one of (no, all)")
  1205. if untracked_files == "no":
  1206. return
  1207. with open_repo_closing(basepath) as r:
  1208. ignore_manager = IgnoreFilterManager.from_repo(r)
  1209. ignored_dirs = []
  1210. def prune_dirnames(dirpath, dirnames):
  1211. for i in range(len(dirnames) - 1, -1, -1):
  1212. path = os.path.join(dirpath, dirnames[i])
  1213. ip = os.path.join(os.path.relpath(path, basepath), "")
  1214. if ignore_manager.is_ignored(ip):
  1215. if not exclude_ignored:
  1216. ignored_dirs.append(
  1217. os.path.join(os.path.relpath(path, frompath), "")
  1218. )
  1219. del dirnames[i]
  1220. return dirnames
  1221. for ap, is_dir in _walk_working_dir_paths(
  1222. frompath, basepath, prune_dirnames=prune_dirnames
  1223. ):
  1224. if not is_dir:
  1225. ip = path_to_tree_path(basepath, ap)
  1226. if ip not in index:
  1227. if not exclude_ignored or not ignore_manager.is_ignored(
  1228. os.path.relpath(ap, basepath)
  1229. ):
  1230. yield os.path.relpath(ap, frompath)
  1231. yield from ignored_dirs
  1232. def get_tree_changes(repo):
  1233. """Return add/delete/modify changes to tree by comparing index to HEAD.
  1234. Args:
  1235. repo: repo path or object
  1236. Returns: dict with lists for each type of change
  1237. """
  1238. with open_repo_closing(repo) as r:
  1239. index = r.open_index()
  1240. # Compares the Index to the HEAD & determines changes
  1241. # Iterate through the changes and report add/delete/modify
  1242. # TODO: call out to dulwich.diff_tree somehow.
  1243. tracked_changes = {
  1244. "add": [],
  1245. "delete": [],
  1246. "modify": [],
  1247. }
  1248. try:
  1249. tree_id = r[b"HEAD"].tree
  1250. except KeyError:
  1251. tree_id = None
  1252. for change in index.changes_from_tree(r.object_store, tree_id):
  1253. if not change[0][0]:
  1254. tracked_changes["add"].append(change[0][1])
  1255. elif not change[0][1]:
  1256. tracked_changes["delete"].append(change[0][0])
  1257. elif change[0][0] == change[0][1]:
  1258. tracked_changes["modify"].append(change[0][0])
  1259. else:
  1260. raise NotImplementedError("git mv ops not yet supported")
  1261. return tracked_changes
  1262. def daemon(path=".", address=None, port=None):
  1263. """Run a daemon serving Git requests over TCP/IP.
  1264. Args:
  1265. path: Path to the directory to serve.
  1266. address: Optional address to listen on (defaults to ::)
  1267. port: Optional port to listen on (defaults to TCP_GIT_PORT)
  1268. """
  1269. # TODO(jelmer): Support git-daemon-export-ok and --export-all.
  1270. backend = FileSystemBackend(path)
  1271. server = TCPGitServer(backend, address, port)
  1272. server.serve_forever()
  1273. def web_daemon(path=".", address=None, port=None):
  1274. """Run a daemon serving Git requests over HTTP.
  1275. Args:
  1276. path: Path to the directory to serve
  1277. address: Optional address to listen on (defaults to ::)
  1278. port: Optional port to listen on (defaults to 80)
  1279. """
  1280. from .web import (
  1281. WSGIRequestHandlerLogger,
  1282. WSGIServerLogger,
  1283. make_server,
  1284. make_wsgi_chain,
  1285. )
  1286. backend = FileSystemBackend(path)
  1287. app = make_wsgi_chain(backend)
  1288. server = make_server(
  1289. address,
  1290. port,
  1291. app,
  1292. handler_class=WSGIRequestHandlerLogger,
  1293. server_class=WSGIServerLogger,
  1294. )
  1295. server.serve_forever()
  1296. def upload_pack(path=".", inf=None, outf=None):
  1297. """Upload a pack file after negotiating its contents using smart protocol.
  1298. Args:
  1299. path: Path to the repository
  1300. inf: Input stream to communicate with client
  1301. outf: Output stream to communicate with client
  1302. """
  1303. if outf is None:
  1304. outf = getattr(sys.stdout, "buffer", sys.stdout)
  1305. if inf is None:
  1306. inf = getattr(sys.stdin, "buffer", sys.stdin)
  1307. path = os.path.expanduser(path)
  1308. backend = FileSystemBackend(path)
  1309. def send_fn(data):
  1310. outf.write(data)
  1311. outf.flush()
  1312. proto = Protocol(inf.read, send_fn)
  1313. handler = UploadPackHandler(backend, [path], proto)
  1314. # FIXME: Catch exceptions and write a single-line summary to outf.
  1315. handler.handle()
  1316. return 0
  1317. def receive_pack(path=".", inf=None, outf=None):
  1318. """Receive a pack file after negotiating its contents using smart protocol.
  1319. Args:
  1320. path: Path to the repository
  1321. inf: Input stream to communicate with client
  1322. outf: Output stream to communicate with client
  1323. """
  1324. if outf is None:
  1325. outf = getattr(sys.stdout, "buffer", sys.stdout)
  1326. if inf is None:
  1327. inf = getattr(sys.stdin, "buffer", sys.stdin)
  1328. path = os.path.expanduser(path)
  1329. backend = FileSystemBackend(path)
  1330. def send_fn(data):
  1331. outf.write(data)
  1332. outf.flush()
  1333. proto = Protocol(inf.read, send_fn)
  1334. handler = ReceivePackHandler(backend, [path], proto)
  1335. # FIXME: Catch exceptions and write a single-line summary to outf.
  1336. handler.handle()
  1337. return 0
  1338. def _make_branch_ref(name):
  1339. if getattr(name, "encode", None):
  1340. name = name.encode(DEFAULT_ENCODING)
  1341. return LOCAL_BRANCH_PREFIX + name
  1342. def _make_tag_ref(name):
  1343. if getattr(name, "encode", None):
  1344. name = name.encode(DEFAULT_ENCODING)
  1345. return LOCAL_TAG_PREFIX + name
  1346. def branch_delete(repo, name):
  1347. """Delete a branch.
  1348. Args:
  1349. repo: Path to the repository
  1350. name: Name of the branch
  1351. """
  1352. with open_repo_closing(repo) as r:
  1353. if isinstance(name, list):
  1354. names = name
  1355. else:
  1356. names = [name]
  1357. for name in names:
  1358. del r.refs[_make_branch_ref(name)]
  1359. def branch_create(repo, name, objectish=None, force=False):
  1360. """Create a branch.
  1361. Args:
  1362. repo: Path to the repository
  1363. name: Name of the new branch
  1364. objectish: Target object to point new branch at (defaults to HEAD)
  1365. force: Force creation of branch, even if it already exists
  1366. """
  1367. with open_repo_closing(repo) as r:
  1368. if objectish is None:
  1369. objectish = "HEAD"
  1370. object = parse_object(r, objectish)
  1371. refname = _make_branch_ref(name)
  1372. ref_message = b"branch: Created from " + objectish.encode(DEFAULT_ENCODING)
  1373. if force:
  1374. r.refs.set_if_equals(refname, None, object.id, message=ref_message)
  1375. else:
  1376. if not r.refs.add_if_new(refname, object.id, message=ref_message):
  1377. raise Error(f"Branch with name {name} already exists.")
  1378. def branch_list(repo):
  1379. """List all branches.
  1380. Args:
  1381. repo: Path to the repository
  1382. """
  1383. with open_repo_closing(repo) as r:
  1384. return r.refs.keys(base=LOCAL_BRANCH_PREFIX)
  1385. def active_branch(repo):
  1386. """Return the active branch in the repository, if any.
  1387. Args:
  1388. repo: Repository to open
  1389. Returns:
  1390. branch name
  1391. Raises:
  1392. KeyError: if the repository does not have a working tree
  1393. IndexError: if HEAD is floating
  1394. """
  1395. with open_repo_closing(repo) as r:
  1396. active_ref = r.refs.follow(b"HEAD")[0][1]
  1397. if not active_ref.startswith(LOCAL_BRANCH_PREFIX):
  1398. raise ValueError(active_ref)
  1399. return active_ref[len(LOCAL_BRANCH_PREFIX) :]
  1400. def get_branch_remote(repo):
  1401. """Return the active branch's remote name, if any.
  1402. Args:
  1403. repo: Repository to open
  1404. Returns:
  1405. remote name
  1406. Raises:
  1407. KeyError: if the repository does not have a working tree
  1408. """
  1409. with open_repo_closing(repo) as r:
  1410. branch_name = active_branch(r.path)
  1411. config = r.get_config()
  1412. try:
  1413. remote_name = config.get((b"branch", branch_name), b"remote")
  1414. except KeyError:
  1415. remote_name = b"origin"
  1416. return remote_name
  1417. def fetch(
  1418. repo,
  1419. remote_location=None,
  1420. outstream=sys.stdout,
  1421. errstream=default_bytes_err_stream,
  1422. message=None,
  1423. depth=None,
  1424. prune=False,
  1425. prune_tags=False,
  1426. force=False,
  1427. **kwargs,
  1428. ):
  1429. """Fetch objects from a remote server.
  1430. Args:
  1431. repo: Path to the repository
  1432. remote_location: String identifying a remote server
  1433. outstream: Output stream (defaults to stdout)
  1434. errstream: Error stream (defaults to stderr)
  1435. message: Reflog message (defaults to b"fetch: from <remote_name>")
  1436. depth: Depth to fetch at
  1437. prune: Prune remote removed refs
  1438. prune_tags: Prune reomte removed tags
  1439. Returns:
  1440. Dictionary with refs on the remote
  1441. """
  1442. with open_repo_closing(repo) as r:
  1443. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1444. if message is None:
  1445. message = b"fetch: from " + remote_location.encode(DEFAULT_ENCODING)
  1446. client, path = get_transport_and_path(
  1447. remote_location, config=r.get_config_stack(), **kwargs
  1448. )
  1449. fetch_result = client.fetch(path, r, progress=errstream.write, depth=depth)
  1450. if remote_name is not None:
  1451. _import_remote_refs(
  1452. r.refs,
  1453. remote_name,
  1454. fetch_result.refs,
  1455. message,
  1456. prune=prune,
  1457. prune_tags=prune_tags,
  1458. )
  1459. return fetch_result
  1460. def for_each_ref(
  1461. repo: Union[Repo, str] = ".",
  1462. pattern: Optional[Union[str, bytes]] = None,
  1463. ) -> list[tuple[bytes, bytes, bytes]]:
  1464. """Iterate over all refs that match the (optional) pattern.
  1465. Args:
  1466. repo: Path to the repository
  1467. pattern: Optional glob (7) patterns to filter the refs with
  1468. Returns:
  1469. List of bytes tuples with: (sha, object_type, ref_name)
  1470. """
  1471. if isinstance(pattern, str):
  1472. pattern = os.fsencode(pattern)
  1473. with open_repo_closing(repo) as r:
  1474. refs = r.get_refs()
  1475. if pattern:
  1476. matching_refs: dict[bytes, bytes] = {}
  1477. pattern_parts = pattern.split(b"/")
  1478. for ref, sha in refs.items():
  1479. matches = False
  1480. # git for-each-ref uses glob (7) style patterns, but fnmatch
  1481. # is greedy and also matches slashes, unlike glob.glob.
  1482. # We have to check parts of the pattern individually.
  1483. # See https://github.com/python/cpython/issues/72904
  1484. ref_parts = ref.split(b"/")
  1485. if len(ref_parts) > len(pattern_parts):
  1486. continue
  1487. for pat, ref_part in zip(pattern_parts, ref_parts):
  1488. matches = fnmatch.fnmatchcase(ref_part, pat)
  1489. if not matches:
  1490. break
  1491. if matches:
  1492. matching_refs[ref] = sha
  1493. refs = matching_refs
  1494. ret: list[tuple[bytes, bytes, bytes]] = [
  1495. (sha, r.get_object(sha).type_name, ref)
  1496. for ref, sha in sorted(
  1497. refs.items(),
  1498. key=lambda ref_sha: ref_sha[0],
  1499. )
  1500. if ref != b"HEAD"
  1501. ]
  1502. return ret
  1503. def ls_remote(remote, config: Optional[Config] = None, **kwargs):
  1504. """List the refs in a remote.
  1505. Args:
  1506. remote: Remote repository location
  1507. config: Configuration to use
  1508. Returns:
  1509. Dictionary with remote refs
  1510. """
  1511. if config is None:
  1512. config = StackedConfig.default()
  1513. client, host_path = get_transport_and_path(remote, config=config, **kwargs)
  1514. return client.get_refs(host_path)
  1515. def repack(repo):
  1516. """Repack loose files in a repository.
  1517. Currently this only packs loose objects.
  1518. Args:
  1519. repo: Path to the repository
  1520. """
  1521. with open_repo_closing(repo) as r:
  1522. r.object_store.pack_loose_objects()
  1523. def pack_objects(
  1524. repo,
  1525. object_ids,
  1526. packf,
  1527. idxf,
  1528. delta_window_size=None,
  1529. deltify=None,
  1530. reuse_deltas=True,
  1531. ):
  1532. """Pack objects into a file.
  1533. Args:
  1534. repo: Path to the repository
  1535. object_ids: List of object ids to write
  1536. packf: File-like object to write to
  1537. idxf: File-like object to write to (can be None)
  1538. delta_window_size: Sliding window size for searching for deltas;
  1539. Set to None for default window size.
  1540. deltify: Whether to deltify objects
  1541. reuse_deltas: Allow reuse of existing deltas while deltifying
  1542. """
  1543. with open_repo_closing(repo) as r:
  1544. entries, data_sum = write_pack_from_container(
  1545. packf.write,
  1546. r.object_store,
  1547. [(oid, None) for oid in object_ids],
  1548. deltify=deltify,
  1549. delta_window_size=delta_window_size,
  1550. reuse_deltas=reuse_deltas,
  1551. )
  1552. if idxf is not None:
  1553. entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
  1554. write_pack_index(idxf, entries, data_sum)
  1555. def ls_tree(
  1556. repo,
  1557. treeish=b"HEAD",
  1558. outstream=sys.stdout,
  1559. recursive=False,
  1560. name_only=False,
  1561. ):
  1562. """List contents of a tree.
  1563. Args:
  1564. repo: Path to the repository
  1565. treeish: Tree id to list
  1566. outstream: Output stream (defaults to stdout)
  1567. recursive: Whether to recursively list files
  1568. name_only: Only print item name
  1569. """
  1570. def list_tree(store, treeid, base):
  1571. for name, mode, sha in store[treeid].iteritems():
  1572. if base:
  1573. name = posixpath.join(base, name)
  1574. if name_only:
  1575. outstream.write(name + b"\n")
  1576. else:
  1577. outstream.write(pretty_format_tree_entry(name, mode, sha))
  1578. if stat.S_ISDIR(mode) and recursive:
  1579. list_tree(store, sha, name)
  1580. with open_repo_closing(repo) as r:
  1581. tree = parse_tree(r, treeish)
  1582. list_tree(r.object_store, tree.id, "")
  1583. def remote_add(repo: Repo, name: Union[bytes, str], url: Union[bytes, str]):
  1584. """Add a remote.
  1585. Args:
  1586. repo: Path to the repository
  1587. name: Remote name
  1588. url: Remote URL
  1589. """
  1590. if not isinstance(name, bytes):
  1591. name = name.encode(DEFAULT_ENCODING)
  1592. if not isinstance(url, bytes):
  1593. url = url.encode(DEFAULT_ENCODING)
  1594. with open_repo_closing(repo) as r:
  1595. c = r.get_config()
  1596. section = (b"remote", name)
  1597. if c.has_section(section):
  1598. raise RemoteExists(section)
  1599. c.set(section, b"url", url)
  1600. c.write_to_path()
  1601. def remote_remove(repo: Repo, name: Union[bytes, str]):
  1602. """Remove a remote.
  1603. Args:
  1604. repo: Path to the repository
  1605. name: Remote name
  1606. """
  1607. if not isinstance(name, bytes):
  1608. name = name.encode(DEFAULT_ENCODING)
  1609. with open_repo_closing(repo) as r:
  1610. c = r.get_config()
  1611. section = (b"remote", name)
  1612. del c[section]
  1613. c.write_to_path()
  1614. def check_ignore(repo, paths, no_index=False):
  1615. """Debug gitignore files.
  1616. Args:
  1617. repo: Path to the repository
  1618. paths: List of paths to check for
  1619. no_index: Don't check index
  1620. Returns: List of ignored files
  1621. """
  1622. with open_repo_closing(repo) as r:
  1623. index = r.open_index()
  1624. ignore_manager = IgnoreFilterManager.from_repo(r)
  1625. for path in paths:
  1626. if not no_index and path_to_tree_path(r.path, path) in index:
  1627. continue
  1628. if os.path.isabs(path):
  1629. path = os.path.relpath(path, r.path)
  1630. if ignore_manager.is_ignored(path):
  1631. yield path
  1632. def update_head(repo, target, detached=False, new_branch=None):
  1633. """Update HEAD to point at a new branch/commit.
  1634. Note that this does not actually update the working tree.
  1635. Args:
  1636. repo: Path to the repository
  1637. detached: Create a detached head
  1638. target: Branch or committish to switch to
  1639. new_branch: New branch to create
  1640. """
  1641. with open_repo_closing(repo) as r:
  1642. if new_branch is not None:
  1643. to_set = _make_branch_ref(new_branch)
  1644. else:
  1645. to_set = b"HEAD"
  1646. if detached:
  1647. # TODO(jelmer): Provide some way so that the actual ref gets
  1648. # updated rather than what it points to, so the delete isn't
  1649. # necessary.
  1650. del r.refs[to_set]
  1651. r.refs[to_set] = parse_commit(r, target).id
  1652. else:
  1653. r.refs.set_symbolic_ref(to_set, parse_ref(r, target))
  1654. if new_branch is not None:
  1655. r.refs.set_symbolic_ref(b"HEAD", to_set)
  1656. def reset_file(repo, file_path: str, target: bytes = b"HEAD", symlink_fn=None):
  1657. """Reset the file to specific commit or branch.
  1658. Args:
  1659. repo: dulwich Repo object
  1660. file_path: file to reset, relative to the repository path
  1661. target: branch or commit or b'HEAD' to reset
  1662. """
  1663. tree = parse_tree(repo, treeish=target)
  1664. tree_path = _fs_to_tree_path(file_path)
  1665. file_entry = tree.lookup_path(repo.object_store.__getitem__, tree_path)
  1666. full_path = os.path.join(os.fsencode(repo.path), tree_path)
  1667. blob = repo.object_store[file_entry[1]]
  1668. mode = file_entry[0]
  1669. build_file_from_blob(blob, mode, full_path, symlink_fn=symlink_fn)
  1670. def _update_head_during_checkout_branch(repo, target):
  1671. checkout_target = None
  1672. if target == b"HEAD": # Do not update head while trying to checkout to HEAD.
  1673. pass
  1674. elif target in repo.refs.keys(base=LOCAL_BRANCH_PREFIX):
  1675. update_head(repo, target)
  1676. else:
  1677. # If checking out a remote branch, create a local one without the remote name prefix.
  1678. config = repo.get_config()
  1679. name = target.split(b"/")[0]
  1680. section = (b"remote", name)
  1681. if config.has_section(section):
  1682. checkout_target = target.replace(name + b"/", b"")
  1683. try:
  1684. branch_create(
  1685. repo, checkout_target, (LOCAL_REMOTE_PREFIX + target).decode()
  1686. )
  1687. except Error:
  1688. pass
  1689. update_head(repo, LOCAL_BRANCH_PREFIX + checkout_target)
  1690. else:
  1691. update_head(repo, target, detached=True)
  1692. return checkout_target
  1693. def checkout_branch(repo, target: Union[bytes, str], force: bool = False):
  1694. """Switch branches or restore working tree files.
  1695. The implementation of this function will probably not scale well
  1696. for branches with lots of local changes.
  1697. This is due to the analysis of a diff between branches before any
  1698. changes are applied.
  1699. Args:
  1700. repo: dulwich Repo object
  1701. target: branch name or commit sha to checkout
  1702. force: true or not to force checkout
  1703. """
  1704. target = to_bytes(target)
  1705. current_tree = parse_tree(repo, repo.head())
  1706. target_tree = parse_tree(repo, target)
  1707. if force:
  1708. repo.reset_index(target_tree.id)
  1709. _update_head_during_checkout_branch(repo, target)
  1710. else:
  1711. status_report = status(repo)
  1712. changes = list(
  1713. set(
  1714. status_report[0]["add"]
  1715. + status_report[0]["delete"]
  1716. + status_report[0]["modify"]
  1717. + status_report[1]
  1718. )
  1719. )
  1720. index = 0
  1721. while index < len(changes):
  1722. change = changes[index]
  1723. try:
  1724. current_tree.lookup_path(repo.object_store.__getitem__, change)
  1725. try:
  1726. target_tree.lookup_path(repo.object_store.__getitem__, change)
  1727. index += 1
  1728. except KeyError:
  1729. raise CheckoutError(
  1730. "Your local changes to the following files would be overwritten by checkout: "
  1731. + change.decode()
  1732. )
  1733. except KeyError:
  1734. changes.pop(index)
  1735. # Update head.
  1736. checkout_target = _update_head_during_checkout_branch(repo, target)
  1737. if checkout_target is not None:
  1738. target_tree = parse_tree(repo, checkout_target)
  1739. dealt_with = set()
  1740. repo_index = repo.open_index()
  1741. for entry in iter_tree_contents(repo.object_store, target_tree.id):
  1742. dealt_with.add(entry.path)
  1743. if entry.path in changes:
  1744. continue
  1745. full_path = os.path.join(os.fsencode(repo.path), entry.path)
  1746. blob = repo.object_store[entry.sha]
  1747. ensure_dir_exists(os.path.dirname(full_path))
  1748. st = build_file_from_blob(blob, entry.mode, full_path)
  1749. repo_index[entry.path] = index_entry_from_stat(st, entry.sha)
  1750. repo_index.write()
  1751. for entry in iter_tree_contents(repo.object_store, current_tree.id):
  1752. if entry.path not in dealt_with:
  1753. repo.unstage([entry.path])
  1754. # Remove the untracked files which are in the current_file_set.
  1755. repo_index = repo.open_index()
  1756. for change in repo_index.changes_from_tree(repo.object_store, current_tree.id):
  1757. path_change = change[0]
  1758. if path_change[1] is None:
  1759. file_name = path_change[0]
  1760. full_path = os.path.join(repo.path, file_name.decode())
  1761. if os.path.isfile(full_path):
  1762. os.remove(full_path)
  1763. dir_path = os.path.dirname(full_path)
  1764. while dir_path != repo.path:
  1765. is_empty = len(os.listdir(dir_path)) == 0
  1766. if is_empty:
  1767. os.rmdir(dir_path)
  1768. dir_path = os.path.dirname(dir_path)
  1769. def check_mailmap(repo, contact):
  1770. """Check canonical name and email of contact.
  1771. Args:
  1772. repo: Path to the repository
  1773. contact: Contact name and/or email
  1774. Returns: Canonical contact data
  1775. """
  1776. with open_repo_closing(repo) as r:
  1777. from .mailmap import Mailmap
  1778. try:
  1779. mailmap = Mailmap.from_path(os.path.join(r.path, ".mailmap"))
  1780. except FileNotFoundError:
  1781. mailmap = Mailmap()
  1782. return mailmap.lookup(contact)
  1783. def fsck(repo):
  1784. """Check a repository.
  1785. Args:
  1786. repo: A path to the repository
  1787. Returns: Iterator over errors/warnings
  1788. """
  1789. with open_repo_closing(repo) as r:
  1790. # TODO(jelmer): check pack files
  1791. # TODO(jelmer): check graph
  1792. # TODO(jelmer): check refs
  1793. for sha in r.object_store:
  1794. o = r.object_store[sha]
  1795. try:
  1796. o.check()
  1797. except Exception as e:
  1798. yield (sha, e)
  1799. def stash_list(repo):
  1800. """List all stashes in a repository."""
  1801. with open_repo_closing(repo) as r:
  1802. from .stash import Stash
  1803. stash = Stash.from_repo(r)
  1804. return enumerate(list(stash.stashes()))
  1805. def stash_push(repo):
  1806. """Push a new stash onto the stack."""
  1807. with open_repo_closing(repo) as r:
  1808. from .stash import Stash
  1809. stash = Stash.from_repo(r)
  1810. stash.push()
  1811. def stash_pop(repo, index):
  1812. """Pop a stash from the stack."""
  1813. with open_repo_closing(repo) as r:
  1814. from .stash import Stash
  1815. stash = Stash.from_repo(r)
  1816. stash.pop(index)
  1817. def stash_drop(repo, index):
  1818. """Drop a stash from the stack."""
  1819. with open_repo_closing(repo) as r:
  1820. from .stash import Stash
  1821. stash = Stash.from_repo(r)
  1822. stash.drop(index)
  1823. def ls_files(repo):
  1824. """List all files in an index."""
  1825. with open_repo_closing(repo) as r:
  1826. return sorted(r.open_index())
  1827. def find_unique_abbrev(object_store, object_id):
  1828. """For now, just return 7 characters."""
  1829. # TODO(jelmer): Add some logic here to return a number of characters that
  1830. # scales relative with the size of the repository
  1831. return object_id.decode("ascii")[:7]
  1832. def describe(repo, abbrev=7):
  1833. """Describe the repository version.
  1834. Args:
  1835. repo: git repository
  1836. abbrev: number of characters of commit to take, default is 7
  1837. Returns: a string description of the current git revision
  1838. Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh".
  1839. """
  1840. # Get the repository
  1841. with open_repo_closing(repo) as r:
  1842. # Get a list of all tags
  1843. refs = r.get_refs()
  1844. tags = {}
  1845. for key, value in refs.items():
  1846. key = key.decode()
  1847. obj = r.get_object(value)
  1848. if "tags" not in key:
  1849. continue
  1850. _, tag = key.rsplit("/", 1)
  1851. try:
  1852. commit = obj.object
  1853. except AttributeError:
  1854. continue
  1855. else:
  1856. commit = r.get_object(commit[1])
  1857. tags[tag] = [
  1858. datetime.datetime(*time.gmtime(commit.commit_time)[:6]),
  1859. commit.id.decode("ascii"),
  1860. ]
  1861. sorted_tags = sorted(tags.items(), key=lambda tag: tag[1][0], reverse=True)
  1862. # If there are no tags, return the current commit
  1863. if len(sorted_tags) == 0:
  1864. return f"g{find_unique_abbrev(r.object_store, r[r.head()].id)}"
  1865. # We're now 0 commits from the top
  1866. commit_count = 0
  1867. # Get the latest commit
  1868. latest_commit = r[r.head()]
  1869. # Walk through all commits
  1870. walker = r.get_walker()
  1871. for entry in walker:
  1872. # Check if tag
  1873. commit_id = entry.commit.id.decode("ascii")
  1874. for tag in sorted_tags:
  1875. tag_name = tag[0]
  1876. tag_commit = tag[1][1]
  1877. if commit_id == tag_commit:
  1878. if commit_count == 0:
  1879. return tag_name
  1880. else:
  1881. return "{}-{}-g{}".format(
  1882. tag_name,
  1883. commit_count,
  1884. latest_commit.id.decode("ascii")[:abbrev],
  1885. )
  1886. commit_count += 1
  1887. # Return plain commit if no parent tag can be found
  1888. return "g{}".format(latest_commit.id.decode("ascii")[:abbrev])
  1889. def get_object_by_path(repo, path, committish=None):
  1890. """Get an object by path.
  1891. Args:
  1892. repo: A path to the repository
  1893. path: Path to look up
  1894. committish: Commit to look up path in
  1895. Returns: A `ShaFile` object
  1896. """
  1897. if committish is None:
  1898. committish = "HEAD"
  1899. # Get the repository
  1900. with open_repo_closing(repo) as r:
  1901. commit = parse_commit(r, committish)
  1902. base_tree = commit.tree
  1903. if not isinstance(path, bytes):
  1904. path = commit_encode(commit, path)
  1905. (mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path)
  1906. return r[sha]
  1907. def write_tree(repo):
  1908. """Write a tree object from the index.
  1909. Args:
  1910. repo: Repository for which to write tree
  1911. Returns: tree id for the tree that was written
  1912. """
  1913. with open_repo_closing(repo) as r:
  1914. return r.open_index().commit(r.object_store)