porcelain.py 66 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202
  1. # porcelain.py -- Porcelain-like layer on top of Dulwich
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  5. # General Public License as public by the Free Software Foundation; version 2.0
  6. # or (at your option) any later version. You can redistribute it and/or
  7. # modify it under the terms of either of these two licenses.
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. #
  15. # You should have received a copy of the licenses; if not, see
  16. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  17. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  18. # License, Version 2.0.
  19. #
  20. """Simple wrapper that provides porcelain-like functions on top of Dulwich.
  21. Currently implemented:
  22. * archive
  23. * add
  24. * branch{_create,_delete,_list}
  25. * check-ignore
  26. * checkout_branch
  27. * clone
  28. * commit
  29. * commit-tree
  30. * daemon
  31. * describe
  32. * diff-tree
  33. * fetch
  34. * init
  35. * ls-files
  36. * ls-remote
  37. * ls-tree
  38. * pull
  39. * push
  40. * rm
  41. * remote{_add}
  42. * receive-pack
  43. * reset
  44. * submodule_add
  45. * submodule_init
  46. * submodule_list
  47. * rev-list
  48. * tag{_create,_delete,_list}
  49. * upload-pack
  50. * update-server-info
  51. * status
  52. * symbolic-ref
  53. These functions are meant to behave similarly to the git subcommands.
  54. Differences in behaviour are considered bugs.
  55. Note: one of the consequences of this is that paths tend to be
  56. interpreted relative to the current working directory rather than relative
  57. to the repository root.
  58. Functions should generally accept both unicode strings and bytestrings
  59. """
  60. import datetime
  61. import os
  62. import posixpath
  63. import stat
  64. import sys
  65. import time
  66. from collections import namedtuple
  67. from contextlib import closing, contextmanager
  68. from io import BytesIO, RawIOBase
  69. from pathlib import Path
  70. from typing import Optional, Tuple, Union
  71. from .archive import tar_stream
  72. from .client import get_transport_and_path
  73. from .config import Config, ConfigFile, StackedConfig, read_submodules
  74. from .diff_tree import (
  75. CHANGE_ADD,
  76. CHANGE_COPY,
  77. CHANGE_DELETE,
  78. CHANGE_MODIFY,
  79. CHANGE_RENAME,
  80. RENAME_CHANGE_TYPES,
  81. )
  82. from .errors import SendPackError
  83. from .file import ensure_dir_exists
  84. from .graph import can_fast_forward
  85. from .ignore import IgnoreFilterManager
  86. from .index import (
  87. _fs_to_tree_path,
  88. blob_from_path_and_stat,
  89. build_file_from_blob,
  90. get_unstaged_changes,
  91. index_entry_from_stat,
  92. )
  93. from .object_store import iter_tree_contents, tree_lookup_path
  94. from .objects import (
  95. Commit,
  96. Tag,
  97. format_timezone,
  98. parse_timezone,
  99. pretty_format_tree_entry,
  100. )
  101. from .objectspec import (
  102. parse_commit,
  103. parse_object,
  104. parse_ref,
  105. parse_reftuples,
  106. parse_tree,
  107. to_bytes,
  108. )
  109. from .pack import write_pack_from_container, write_pack_index
  110. from .patch import write_tree_diff
  111. from .protocol import ZERO_SHA, Protocol
  112. from .refs import (
  113. LOCAL_BRANCH_PREFIX,
  114. LOCAL_REMOTE_PREFIX,
  115. LOCAL_TAG_PREFIX,
  116. _import_remote_refs,
  117. )
  118. from .repo import BaseRepo, Repo
  119. from .server import (
  120. FileSystemBackend,
  121. ReceivePackHandler,
  122. TCPGitServer,
  123. UploadPackHandler,
  124. )
  125. from .server import update_server_info as server_update_server_info
  126. # Module level tuple definition for status output
  127. GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
  128. class NoneStream(RawIOBase):
  129. """Fallback if stdout or stderr are unavailable, does nothing."""
  130. def read(self, size=-1):
  131. return None
  132. def readall(self):
  133. return None
  134. def readinto(self, b):
  135. return None
  136. def write(self, b):
  137. return None
  138. default_bytes_out_stream = getattr(sys.stdout, "buffer", None) or NoneStream()
  139. default_bytes_err_stream = getattr(sys.stderr, "buffer", None) or NoneStream()
  140. DEFAULT_ENCODING = "utf-8"
  141. class Error(Exception):
  142. """Porcelain-based error."""
  143. def __init__(self, msg) -> None:
  144. super().__init__(msg)
  145. class RemoteExists(Error):
  146. """Raised when the remote already exists."""
  147. class TimezoneFormatError(Error):
  148. """Raised when the timezone cannot be determined from a given string."""
  149. class CheckoutError(Error):
  150. """Indicates that a checkout cannot be performed."""
  151. def parse_timezone_format(tz_str):
  152. """Parse given string and attempt to return a timezone offset.
  153. Different formats are considered in the following order:
  154. - Git internal format: <unix timestamp> <timezone offset>
  155. - RFC 2822: e.g. Mon, 20 Nov 1995 19:12:08 -0500
  156. - ISO 8601: e.g. 1995-11-20T19:12:08-0500
  157. Args:
  158. tz_str: datetime string
  159. Returns: Timezone offset as integer
  160. Raises:
  161. TimezoneFormatError: if timezone information cannot be extracted
  162. """
  163. import re
  164. # Git internal format
  165. internal_format_pattern = re.compile("^[0-9]+ [+-][0-9]{,4}$")
  166. if re.match(internal_format_pattern, tz_str):
  167. try:
  168. tz_internal = parse_timezone(tz_str.split(" ")[1].encode(DEFAULT_ENCODING))
  169. return tz_internal[0]
  170. except ValueError:
  171. pass
  172. # RFC 2822
  173. import email.utils
  174. rfc_2822 = email.utils.parsedate_tz(tz_str)
  175. if rfc_2822:
  176. return rfc_2822[9]
  177. # ISO 8601
  178. # Supported offsets:
  179. # sHHMM, sHH:MM, sHH
  180. iso_8601_pattern = re.compile(
  181. "[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$"
  182. )
  183. match = re.search(iso_8601_pattern, tz_str)
  184. total_secs = 0
  185. if match:
  186. sign, hours, minutes = match.groups()
  187. total_secs += int(hours) * 3600
  188. if minutes:
  189. total_secs += int(minutes) * 60
  190. total_secs = -total_secs if sign == "-" else total_secs
  191. return total_secs
  192. # YYYY.MM.DD, MM/DD/YYYY, DD.MM.YYYY contain no timezone information
  193. raise TimezoneFormatError(tz_str)
  194. def get_user_timezones():
  195. """Retrieve local timezone as described in
  196. https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
  197. Returns: A tuple containing author timezone, committer timezone.
  198. """
  199. local_timezone = time.localtime().tm_gmtoff
  200. if os.environ.get("GIT_AUTHOR_DATE"):
  201. author_timezone = parse_timezone_format(os.environ["GIT_AUTHOR_DATE"])
  202. else:
  203. author_timezone = local_timezone
  204. if os.environ.get("GIT_COMMITTER_DATE"):
  205. commit_timezone = parse_timezone_format(os.environ["GIT_COMMITTER_DATE"])
  206. else:
  207. commit_timezone = local_timezone
  208. return author_timezone, commit_timezone
  209. def open_repo(path_or_repo):
  210. """Open an argument that can be a repository or a path for a repository."""
  211. if isinstance(path_or_repo, BaseRepo):
  212. return path_or_repo
  213. return Repo(path_or_repo)
  214. @contextmanager
  215. def _noop_context_manager(obj):
  216. """Context manager that has the same api as closing but does nothing."""
  217. yield obj
  218. def open_repo_closing(path_or_repo):
  219. """Open an argument that can be a repository or a path for a repository.
  220. returns a context manager that will close the repo on exit if the argument
  221. is a path, else does nothing if the argument is a repo.
  222. """
  223. if isinstance(path_or_repo, BaseRepo):
  224. return _noop_context_manager(path_or_repo)
  225. return closing(Repo(path_or_repo))
  226. def path_to_tree_path(repopath, path, tree_encoding=DEFAULT_ENCODING):
  227. """Convert a path to a path usable in an index, e.g. bytes and relative to
  228. the repository root.
  229. Args:
  230. repopath: Repository path, absolute or relative to the cwd
  231. path: A path, absolute or relative to the cwd
  232. Returns: A path formatted for use in e.g. an index
  233. """
  234. # Resolve might returns a relative path on Windows
  235. # https://bugs.python.org/issue38671
  236. if sys.platform == "win32":
  237. path = os.path.abspath(path)
  238. path = Path(path)
  239. resolved_path = path.resolve()
  240. # Resolve and abspath seems to behave differently regarding symlinks,
  241. # as we are doing abspath on the file path, we need to do the same on
  242. # the repo path or they might not match
  243. if sys.platform == "win32":
  244. repopath = os.path.abspath(repopath)
  245. repopath = Path(repopath).resolve()
  246. try:
  247. relpath = resolved_path.relative_to(repopath)
  248. except ValueError:
  249. # If path is a symlink that points to a file outside the repo, we
  250. # want the relpath for the link itself, not the resolved target
  251. if path.is_symlink():
  252. parent = path.parent.resolve()
  253. relpath = (parent / path.name).relative_to(repopath)
  254. else:
  255. raise
  256. if sys.platform == "win32":
  257. return str(relpath).replace(os.path.sep, "/").encode(tree_encoding)
  258. else:
  259. return bytes(relpath)
  260. class DivergedBranches(Error):
  261. """Branches have diverged and fast-forward is not possible."""
  262. def __init__(self, current_sha, new_sha) -> None:
  263. self.current_sha = current_sha
  264. self.new_sha = new_sha
  265. def check_diverged(repo, current_sha, new_sha):
  266. """Check if updating to a sha can be done with fast forwarding.
  267. Args:
  268. repo: Repository object
  269. current_sha: Current head sha
  270. new_sha: New head sha
  271. """
  272. try:
  273. can = can_fast_forward(repo, current_sha, new_sha)
  274. except KeyError:
  275. can = False
  276. if not can:
  277. raise DivergedBranches(current_sha, new_sha)
  278. def archive(
  279. repo,
  280. committish=None,
  281. outstream=default_bytes_out_stream,
  282. errstream=default_bytes_err_stream,
  283. ):
  284. """Create an archive.
  285. Args:
  286. repo: Path of repository for which to generate an archive.
  287. committish: Commit SHA1 or ref to use
  288. outstream: Output stream (defaults to stdout)
  289. errstream: Error stream (defaults to stderr)
  290. """
  291. if committish is None:
  292. committish = "HEAD"
  293. with open_repo_closing(repo) as repo_obj:
  294. c = parse_commit(repo_obj, committish)
  295. for chunk in tar_stream(
  296. repo_obj.object_store, repo_obj.object_store[c.tree], c.commit_time
  297. ):
  298. outstream.write(chunk)
  299. def update_server_info(repo="."):
  300. """Update server info files for a repository.
  301. Args:
  302. repo: path to the repository
  303. """
  304. with open_repo_closing(repo) as r:
  305. server_update_server_info(r)
  306. def symbolic_ref(repo, ref_name, force=False):
  307. """Set git symbolic ref into HEAD.
  308. Args:
  309. repo: path to the repository
  310. ref_name: short name of the new ref
  311. force: force settings without checking if it exists in refs/heads
  312. """
  313. with open_repo_closing(repo) as repo_obj:
  314. ref_path = _make_branch_ref(ref_name)
  315. if not force and ref_path not in repo_obj.refs.keys():
  316. raise Error("fatal: ref `%s` is not a ref" % ref_name)
  317. repo_obj.refs.set_symbolic_ref(b"HEAD", ref_path)
  318. def pack_refs(repo, all=False):
  319. with open_repo_closing(repo) as repo_obj:
  320. refs = repo_obj.refs
  321. packed_refs = {
  322. ref: refs[ref]
  323. for ref in refs
  324. if (all or ref.startswith(LOCAL_TAG_PREFIX)) and ref != b"HEAD"
  325. }
  326. refs.add_packed_refs(packed_refs)
  327. def commit(
  328. repo=".",
  329. message=None,
  330. author=None,
  331. author_timezone=None,
  332. committer=None,
  333. commit_timezone=None,
  334. encoding=None,
  335. no_verify=False,
  336. signoff=False,
  337. ):
  338. """Create a new commit.
  339. Args:
  340. repo: Path to repository
  341. message: Optional commit message
  342. author: Optional author name and email
  343. author_timezone: Author timestamp timezone
  344. committer: Optional committer name and email
  345. commit_timezone: Commit timestamp timezone
  346. no_verify: Skip pre-commit and commit-msg hooks
  347. signoff: GPG Sign the commit (bool, defaults to False,
  348. pass True to use default GPG key,
  349. pass a str containing Key ID to use a specific GPG key)
  350. Returns: SHA1 of the new commit
  351. """
  352. # FIXME: Support --all argument
  353. if getattr(message, "encode", None):
  354. message = message.encode(encoding or DEFAULT_ENCODING)
  355. if getattr(author, "encode", None):
  356. author = author.encode(encoding or DEFAULT_ENCODING)
  357. if getattr(committer, "encode", None):
  358. committer = committer.encode(encoding or DEFAULT_ENCODING)
  359. local_timezone = get_user_timezones()
  360. if author_timezone is None:
  361. author_timezone = local_timezone[0]
  362. if commit_timezone is None:
  363. commit_timezone = local_timezone[1]
  364. with open_repo_closing(repo) as r:
  365. return r.do_commit(
  366. message=message,
  367. author=author,
  368. author_timezone=author_timezone,
  369. committer=committer,
  370. commit_timezone=commit_timezone,
  371. encoding=encoding,
  372. no_verify=no_verify,
  373. sign=signoff if isinstance(signoff, (str, bool)) else None,
  374. )
  375. def commit_tree(repo, tree, message=None, author=None, committer=None):
  376. """Create a new commit object.
  377. Args:
  378. repo: Path to repository
  379. tree: An existing tree object
  380. author: Optional author name and email
  381. committer: Optional committer name and email
  382. """
  383. with open_repo_closing(repo) as r:
  384. return r.do_commit(
  385. message=message, tree=tree, committer=committer, author=author
  386. )
  387. def init(path=".", *, bare=False, symlinks: Optional[bool] = None):
  388. """Create a new git repository.
  389. Args:
  390. path: Path to repository.
  391. bare: Whether to create a bare repository.
  392. symlinks: Whether to create actual symlinks (defaults to autodetect)
  393. Returns: A Repo instance
  394. """
  395. if not os.path.exists(path):
  396. os.mkdir(path)
  397. if bare:
  398. return Repo.init_bare(path)
  399. else:
  400. return Repo.init(path, symlinks=symlinks)
  401. def clone(
  402. source,
  403. target=None,
  404. bare=False,
  405. checkout=None,
  406. errstream=default_bytes_err_stream,
  407. outstream=None,
  408. origin: Optional[str] = "origin",
  409. depth: Optional[int] = None,
  410. branch: Optional[Union[str, bytes]] = None,
  411. config: Optional[Config] = None,
  412. **kwargs,
  413. ):
  414. """Clone a local or remote git repository.
  415. Args:
  416. source: Path or URL for source repository
  417. target: Path to target repository (optional)
  418. bare: Whether or not to create a bare repository
  419. checkout: Whether or not to check-out HEAD after cloning
  420. errstream: Optional stream to write progress to
  421. outstream: Optional stream to write progress to (deprecated)
  422. origin: Name of remote from the repository used to clone
  423. depth: Depth to fetch at
  424. branch: Optional branch or tag to be used as HEAD in the new repository
  425. instead of the cloned repository's HEAD.
  426. config: Configuration to use
  427. Returns: The new repository
  428. """
  429. if outstream is not None:
  430. import warnings
  431. warnings.warn(
  432. "outstream= has been deprecated in favour of errstream=.",
  433. DeprecationWarning,
  434. stacklevel=3,
  435. )
  436. # TODO(jelmer): Capture logging output and stream to errstream
  437. if config is None:
  438. config = StackedConfig.default()
  439. if checkout is None:
  440. checkout = not bare
  441. if checkout and bare:
  442. raise Error("checkout and bare are incompatible")
  443. if target is None:
  444. target = source.split("/")[-1]
  445. if isinstance(branch, str):
  446. branch = branch.encode(DEFAULT_ENCODING)
  447. mkdir = not os.path.exists(target)
  448. (client, path) = get_transport_and_path(source, config=config, **kwargs)
  449. return client.clone(
  450. path,
  451. target,
  452. mkdir=mkdir,
  453. bare=bare,
  454. origin=origin,
  455. checkout=checkout,
  456. branch=branch,
  457. progress=errstream.write,
  458. depth=depth,
  459. )
  460. def add(repo=".", paths=None):
  461. """Add files to the staging area.
  462. Args:
  463. repo: Repository for the files
  464. paths: Paths to add. No value passed stages all modified files.
  465. Returns: Tuple with set of added files and ignored files
  466. If the repository contains ignored directories, the returned set will
  467. contain the path to an ignored directory (with trailing slash). Individual
  468. files within ignored directories will not be returned.
  469. """
  470. ignored = set()
  471. with open_repo_closing(repo) as r:
  472. repo_path = Path(r.path).resolve()
  473. ignore_manager = IgnoreFilterManager.from_repo(r)
  474. if not paths:
  475. paths = list(
  476. get_untracked_paths(
  477. str(Path(os.getcwd()).resolve()),
  478. str(repo_path),
  479. r.open_index(),
  480. )
  481. )
  482. relpaths = []
  483. if not isinstance(paths, list):
  484. paths = [paths]
  485. for p in paths:
  486. path = Path(p)
  487. relpath = str(path.resolve().relative_to(repo_path))
  488. # FIXME: Support patterns
  489. if path.is_dir():
  490. relpath = os.path.join(relpath, "")
  491. if ignore_manager.is_ignored(relpath):
  492. ignored.add(relpath)
  493. continue
  494. relpaths.append(relpath)
  495. r.stage(relpaths)
  496. return (relpaths, ignored)
  497. def _is_subdir(subdir, parentdir):
  498. """Check whether subdir is parentdir or a subdir of parentdir.
  499. If parentdir or subdir is a relative path, it will be disamgibuated
  500. relative to the pwd.
  501. """
  502. parentdir_abs = os.path.realpath(parentdir) + os.path.sep
  503. subdir_abs = os.path.realpath(subdir) + os.path.sep
  504. return subdir_abs.startswith(parentdir_abs)
  505. # TODO: option to remove ignored files also, in line with `git clean -fdx`
  506. def clean(repo=".", target_dir=None):
  507. """Remove any untracked files from the target directory recursively.
  508. Equivalent to running ``git clean -fd`` in target_dir.
  509. Args:
  510. repo: Repository where the files may be tracked
  511. target_dir: Directory to clean - current directory if None
  512. """
  513. if target_dir is None:
  514. target_dir = os.getcwd()
  515. with open_repo_closing(repo) as r:
  516. if not _is_subdir(target_dir, r.path):
  517. raise Error("target_dir must be in the repo's working dir")
  518. config = r.get_config_stack()
  519. require_force = config.get_boolean( # noqa: F841
  520. (b"clean",), b"requireForce", True
  521. )
  522. # TODO(jelmer): if require_force is set, then make sure that -f, -i or
  523. # -n is specified.
  524. index = r.open_index()
  525. ignore_manager = IgnoreFilterManager.from_repo(r)
  526. paths_in_wd = _walk_working_dir_paths(target_dir, r.path)
  527. # Reverse file visit order, so that files and subdirectories are
  528. # removed before containing directory
  529. for ap, is_dir in reversed(list(paths_in_wd)):
  530. if is_dir:
  531. # All subdirectories and files have been removed if untracked,
  532. # so dir contains no tracked files iff it is empty.
  533. is_empty = len(os.listdir(ap)) == 0
  534. if is_empty:
  535. os.rmdir(ap)
  536. else:
  537. ip = path_to_tree_path(r.path, ap)
  538. is_tracked = ip in index
  539. rp = os.path.relpath(ap, r.path)
  540. is_ignored = ignore_manager.is_ignored(rp)
  541. if not is_tracked and not is_ignored:
  542. os.remove(ap)
  543. def remove(repo=".", paths=None, cached=False):
  544. """Remove files from the staging area.
  545. Args:
  546. repo: Repository for the files
  547. paths: Paths to remove
  548. """
  549. with open_repo_closing(repo) as r:
  550. index = r.open_index()
  551. for p in paths:
  552. full_path = os.fsencode(os.path.abspath(p))
  553. tree_path = path_to_tree_path(r.path, p)
  554. try:
  555. index_sha = index[tree_path].sha
  556. except KeyError as exc:
  557. raise Error("%s did not match any files" % p) from exc
  558. if not cached:
  559. try:
  560. st = os.lstat(full_path)
  561. except OSError:
  562. pass
  563. else:
  564. try:
  565. blob = blob_from_path_and_stat(full_path, st)
  566. except OSError:
  567. pass
  568. else:
  569. try:
  570. committed_sha = tree_lookup_path(
  571. r.__getitem__, r[r.head()].tree, tree_path
  572. )[1]
  573. except KeyError:
  574. committed_sha = None
  575. if blob.id != index_sha and index_sha != committed_sha:
  576. raise Error(
  577. "file has staged content differing "
  578. "from both the file and head: %s" % p
  579. )
  580. if index_sha != committed_sha:
  581. raise Error("file has staged changes: %s" % p)
  582. os.remove(full_path)
  583. del index[tree_path]
  584. index.write()
  585. rm = remove
  586. def commit_decode(commit, contents, default_encoding=DEFAULT_ENCODING):
  587. if commit.encoding:
  588. encoding = commit.encoding.decode("ascii")
  589. else:
  590. encoding = default_encoding
  591. return contents.decode(encoding, "replace")
  592. def commit_encode(commit, contents, default_encoding=DEFAULT_ENCODING):
  593. if commit.encoding:
  594. encoding = commit.encoding.decode("ascii")
  595. else:
  596. encoding = default_encoding
  597. return contents.encode(encoding)
  598. def print_commit(commit, decode, outstream=sys.stdout):
  599. """Write a human-readable commit log entry.
  600. Args:
  601. commit: A `Commit` object
  602. outstream: A stream file to write to
  603. """
  604. outstream.write("-" * 50 + "\n")
  605. outstream.write("commit: " + commit.id.decode("ascii") + "\n")
  606. if len(commit.parents) > 1:
  607. outstream.write(
  608. "merge: "
  609. + "...".join([c.decode("ascii") for c in commit.parents[1:]])
  610. + "\n"
  611. )
  612. outstream.write("Author: " + decode(commit.author) + "\n")
  613. if commit.author != commit.committer:
  614. outstream.write("Committer: " + decode(commit.committer) + "\n")
  615. time_tuple = time.gmtime(commit.author_time + commit.author_timezone)
  616. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  617. timezone_str = format_timezone(commit.author_timezone).decode("ascii")
  618. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  619. outstream.write("\n")
  620. outstream.write(decode(commit.message) + "\n")
  621. outstream.write("\n")
  622. def print_tag(tag, decode, outstream=sys.stdout):
  623. """Write a human-readable tag.
  624. Args:
  625. tag: A `Tag` object
  626. decode: Function for decoding bytes to unicode string
  627. outstream: A stream to write to
  628. """
  629. outstream.write("Tagger: " + decode(tag.tagger) + "\n")
  630. time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone)
  631. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  632. timezone_str = format_timezone(tag.tag_timezone).decode("ascii")
  633. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  634. outstream.write("\n")
  635. outstream.write(decode(tag.message))
  636. outstream.write("\n")
  637. def show_blob(repo, blob, decode, outstream=sys.stdout):
  638. """Write a blob to a stream.
  639. Args:
  640. repo: A `Repo` object
  641. blob: A `Blob` object
  642. decode: Function for decoding bytes to unicode string
  643. outstream: A stream file to write to
  644. """
  645. outstream.write(decode(blob.data))
  646. def show_commit(repo, commit, decode, outstream=sys.stdout):
  647. """Show a commit to a stream.
  648. Args:
  649. repo: A `Repo` object
  650. commit: A `Commit` object
  651. decode: Function for decoding bytes to unicode string
  652. outstream: Stream to write to
  653. """
  654. print_commit(commit, decode=decode, outstream=outstream)
  655. if commit.parents:
  656. parent_commit = repo[commit.parents[0]]
  657. base_tree = parent_commit.tree
  658. else:
  659. base_tree = None
  660. diffstream = BytesIO()
  661. write_tree_diff(diffstream, repo.object_store, base_tree, commit.tree)
  662. diffstream.seek(0)
  663. outstream.write(commit_decode(commit, diffstream.getvalue()))
  664. def show_tree(repo, tree, decode, outstream=sys.stdout):
  665. """Print a tree to a stream.
  666. Args:
  667. repo: A `Repo` object
  668. tree: A `Tree` object
  669. decode: Function for decoding bytes to unicode string
  670. outstream: Stream to write to
  671. """
  672. for n in tree:
  673. outstream.write(decode(n) + "\n")
  674. def show_tag(repo, tag, decode, outstream=sys.stdout):
  675. """Print a tag to a stream.
  676. Args:
  677. repo: A `Repo` object
  678. tag: A `Tag` object
  679. decode: Function for decoding bytes to unicode string
  680. outstream: Stream to write to
  681. """
  682. print_tag(tag, decode, outstream)
  683. show_object(repo, repo[tag.object[1]], decode, outstream)
  684. def show_object(repo, obj, decode, outstream):
  685. return {
  686. b"tree": show_tree,
  687. b"blob": show_blob,
  688. b"commit": show_commit,
  689. b"tag": show_tag,
  690. }[obj.type_name](repo, obj, decode, outstream)
  691. def print_name_status(changes):
  692. """Print a simple status summary, listing changed files."""
  693. for change in changes:
  694. if not change:
  695. continue
  696. if isinstance(change, list):
  697. change = change[0]
  698. if change.type == CHANGE_ADD:
  699. path1 = change.new.path
  700. path2 = ""
  701. kind = "A"
  702. elif change.type == CHANGE_DELETE:
  703. path1 = change.old.path
  704. path2 = ""
  705. kind = "D"
  706. elif change.type == CHANGE_MODIFY:
  707. path1 = change.new.path
  708. path2 = ""
  709. kind = "M"
  710. elif change.type in RENAME_CHANGE_TYPES:
  711. path1 = change.old.path
  712. path2 = change.new.path
  713. if change.type == CHANGE_RENAME:
  714. kind = "R"
  715. elif change.type == CHANGE_COPY:
  716. kind = "C"
  717. yield "%-8s%-20s%-20s" % (kind, path1, path2)
  718. def log(
  719. repo=".",
  720. paths=None,
  721. outstream=sys.stdout,
  722. max_entries=None,
  723. reverse=False,
  724. name_status=False,
  725. ):
  726. """Write commit logs.
  727. Args:
  728. repo: Path to repository
  729. paths: Optional set of specific paths to print entries for
  730. outstream: Stream to write log output to
  731. reverse: Reverse order in which entries are printed
  732. name_status: Print name status
  733. max_entries: Optional maximum number of entries to display
  734. """
  735. with open_repo_closing(repo) as r:
  736. walker = r.get_walker(max_entries=max_entries, paths=paths, reverse=reverse)
  737. for entry in walker:
  738. def decode(x):
  739. return commit_decode(entry.commit, x)
  740. print_commit(entry.commit, decode, outstream)
  741. if name_status:
  742. outstream.writelines(
  743. [line + "\n" for line in print_name_status(entry.changes())]
  744. )
  745. # TODO(jelmer): better default for encoding?
  746. def show(
  747. repo=".",
  748. objects=None,
  749. outstream=sys.stdout,
  750. default_encoding=DEFAULT_ENCODING,
  751. ):
  752. """Print the changes in a commit.
  753. Args:
  754. repo: Path to repository
  755. objects: Objects to show (defaults to [HEAD])
  756. outstream: Stream to write to
  757. default_encoding: Default encoding to use if none is set in the
  758. commit
  759. """
  760. if objects is None:
  761. objects = ["HEAD"]
  762. if not isinstance(objects, list):
  763. objects = [objects]
  764. with open_repo_closing(repo) as r:
  765. for objectish in objects:
  766. o = parse_object(r, objectish)
  767. if isinstance(o, Commit):
  768. def decode(x):
  769. return commit_decode(o, x, default_encoding)
  770. else:
  771. def decode(x):
  772. return x.decode(default_encoding)
  773. show_object(r, o, decode, outstream)
  774. def diff_tree(repo, old_tree, new_tree, outstream=default_bytes_out_stream):
  775. """Compares the content and mode of blobs found via two tree objects.
  776. Args:
  777. repo: Path to repository
  778. old_tree: Id of old tree
  779. new_tree: Id of new tree
  780. outstream: Stream to write to
  781. """
  782. with open_repo_closing(repo) as r:
  783. write_tree_diff(outstream, r.object_store, old_tree, new_tree)
  784. def rev_list(repo, commits, outstream=sys.stdout):
  785. """Lists commit objects in reverse chronological order.
  786. Args:
  787. repo: Path to repository
  788. commits: Commits over which to iterate
  789. outstream: Stream to write to
  790. """
  791. with open_repo_closing(repo) as r:
  792. for entry in r.get_walker(include=[r[c].id for c in commits]):
  793. outstream.write(entry.commit.id + b"\n")
  794. def _canonical_part(url: str) -> str:
  795. name = url.rsplit("/", 1)[-1]
  796. if name.endswith(".git"):
  797. name = name[:-4]
  798. return name
  799. def submodule_add(repo, url, path=None, name=None):
  800. """Add a new submodule.
  801. Args:
  802. repo: Path to repository
  803. url: URL of repository to add as submodule
  804. path: Path where submodule should live
  805. """
  806. with open_repo_closing(repo) as r:
  807. if path is None:
  808. path = os.path.relpath(_canonical_part(url), r.path)
  809. if name is None:
  810. name = path
  811. # TODO(jelmer): Move this logic to dulwich.submodule
  812. gitmodules_path = os.path.join(r.path, ".gitmodules")
  813. try:
  814. config = ConfigFile.from_path(gitmodules_path)
  815. except FileNotFoundError:
  816. config = ConfigFile()
  817. config.path = gitmodules_path
  818. config.set(("submodule", name), "url", url)
  819. config.set(("submodule", name), "path", path)
  820. config.write_to_path()
  821. def submodule_init(repo):
  822. """Initialize submodules.
  823. Args:
  824. repo: Path to repository
  825. """
  826. with open_repo_closing(repo) as r:
  827. config = r.get_config()
  828. gitmodules_path = os.path.join(r.path, ".gitmodules")
  829. for path, url, name in read_submodules(gitmodules_path):
  830. config.set((b"submodule", name), b"active", True)
  831. config.set((b"submodule", name), b"url", url)
  832. config.write_to_path()
  833. def submodule_list(repo):
  834. """List submodules.
  835. Args:
  836. repo: Path to repository
  837. """
  838. from .submodule import iter_cached_submodules
  839. with open_repo_closing(repo) as r:
  840. for path, sha in iter_cached_submodules(r.object_store, r[r.head()].tree):
  841. yield path, sha.decode(DEFAULT_ENCODING)
  842. def tag_create(
  843. repo,
  844. tag,
  845. author=None,
  846. message=None,
  847. annotated=False,
  848. objectish="HEAD",
  849. tag_time=None,
  850. tag_timezone=None,
  851. sign=False,
  852. encoding=DEFAULT_ENCODING,
  853. ):
  854. """Creates a tag in git via dulwich calls.
  855. Args:
  856. repo: Path to repository
  857. tag: tag string
  858. author: tag author (optional, if annotated is set)
  859. message: tag message (optional)
  860. annotated: whether to create an annotated tag
  861. objectish: object the tag should point at, defaults to HEAD
  862. tag_time: Optional time for annotated tag
  863. tag_timezone: Optional timezone for annotated tag
  864. sign: GPG Sign the tag (bool, defaults to False,
  865. pass True to use default GPG key,
  866. pass a str containing Key ID to use a specific GPG key)
  867. """
  868. with open_repo_closing(repo) as r:
  869. object = parse_object(r, objectish)
  870. if annotated:
  871. # Create the tag object
  872. tag_obj = Tag()
  873. if author is None:
  874. # TODO(jelmer): Don't use repo private method.
  875. author = r._get_user_identity(r.get_config_stack())
  876. tag_obj.tagger = author
  877. tag_obj.message = message + "\n".encode(encoding)
  878. tag_obj.name = tag
  879. tag_obj.object = (type(object), object.id)
  880. if tag_time is None:
  881. tag_time = int(time.time())
  882. tag_obj.tag_time = tag_time
  883. if tag_timezone is None:
  884. tag_timezone = get_user_timezones()[1]
  885. elif isinstance(tag_timezone, str):
  886. tag_timezone = parse_timezone(tag_timezone)
  887. tag_obj.tag_timezone = tag_timezone
  888. if sign:
  889. tag_obj.sign(sign if isinstance(sign, str) else None)
  890. r.object_store.add_object(tag_obj)
  891. tag_id = tag_obj.id
  892. else:
  893. tag_id = object.id
  894. r.refs[_make_tag_ref(tag)] = tag_id
  895. def tag_list(repo, outstream=sys.stdout):
  896. """List all tags.
  897. Args:
  898. repo: Path to repository
  899. outstream: Stream to write tags to
  900. """
  901. with open_repo_closing(repo) as r:
  902. tags = sorted(r.refs.as_dict(b"refs/tags"))
  903. return tags
  904. def tag_delete(repo, name):
  905. """Remove a tag.
  906. Args:
  907. repo: Path to repository
  908. name: Name of tag to remove
  909. """
  910. with open_repo_closing(repo) as r:
  911. if isinstance(name, bytes):
  912. names = [name]
  913. elif isinstance(name, list):
  914. names = name
  915. else:
  916. raise Error("Unexpected tag name type %r" % name)
  917. for name in names:
  918. del r.refs[_make_tag_ref(name)]
  919. def reset(repo, mode, treeish="HEAD"):
  920. """Reset current HEAD to the specified state.
  921. Args:
  922. repo: Path to repository
  923. mode: Mode ("hard", "soft", "mixed")
  924. treeish: Treeish to reset to
  925. """
  926. if mode != "hard":
  927. raise Error("hard is the only mode currently supported")
  928. with open_repo_closing(repo) as r:
  929. tree = parse_tree(r, treeish)
  930. r.reset_index(tree.id)
  931. def get_remote_repo(
  932. repo: Repo, remote_location: Optional[Union[str, bytes]] = None
  933. ) -> Tuple[Optional[str], str]:
  934. config = repo.get_config()
  935. if remote_location is None:
  936. remote_location = get_branch_remote(repo)
  937. if isinstance(remote_location, str):
  938. encoded_location = remote_location.encode()
  939. else:
  940. encoded_location = remote_location
  941. section = (b"remote", encoded_location)
  942. remote_name: Optional[str] = None
  943. if config.has_section(section):
  944. remote_name = encoded_location.decode()
  945. encoded_location = config.get(section, "url")
  946. else:
  947. remote_name = None
  948. return (remote_name, encoded_location.decode())
  949. def push(
  950. repo,
  951. remote_location=None,
  952. refspecs=None,
  953. outstream=default_bytes_out_stream,
  954. errstream=default_bytes_err_stream,
  955. force=False,
  956. **kwargs,
  957. ):
  958. """Remote push with dulwich via dulwich.client.
  959. Args:
  960. repo: Path to repository
  961. remote_location: Location of the remote
  962. refspecs: Refs to push to remote
  963. outstream: A stream file to write output
  964. errstream: A stream file to write errors
  965. force: Force overwriting refs
  966. """
  967. # Open the repo
  968. with open_repo_closing(repo) as r:
  969. if refspecs is None:
  970. refspecs = [active_branch(r)]
  971. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  972. # Get the client and path
  973. client, path = get_transport_and_path(
  974. remote_location, config=r.get_config_stack(), **kwargs
  975. )
  976. selected_refs = []
  977. remote_changed_refs = {}
  978. def update_refs(refs):
  979. selected_refs.extend(parse_reftuples(r.refs, refs, refspecs, force=force))
  980. new_refs = {}
  981. # TODO: Handle selected_refs == {None: None}
  982. for lh, rh, force_ref in selected_refs:
  983. if lh is None:
  984. new_refs[rh] = ZERO_SHA
  985. remote_changed_refs[rh] = None
  986. else:
  987. try:
  988. localsha = r.refs[lh]
  989. except KeyError as exc:
  990. raise Error("No valid ref %s in local repository" % lh) from exc
  991. if not force_ref and rh in refs:
  992. check_diverged(r, refs[rh], localsha)
  993. new_refs[rh] = localsha
  994. remote_changed_refs[rh] = localsha
  995. return new_refs
  996. err_encoding = getattr(errstream, "encoding", None) or DEFAULT_ENCODING
  997. remote_location = client.get_url(path)
  998. try:
  999. result = client.send_pack(
  1000. path,
  1001. update_refs,
  1002. generate_pack_data=r.generate_pack_data,
  1003. progress=errstream.write,
  1004. )
  1005. except SendPackError as exc:
  1006. raise Error(
  1007. "Push to " + remote_location + " failed -> " + exc.args[0].decode(),
  1008. ) from exc
  1009. else:
  1010. errstream.write(
  1011. b"Push to " + remote_location.encode(err_encoding) + b" successful.\n"
  1012. )
  1013. for ref, error in (result.ref_status or {}).items():
  1014. if error is not None:
  1015. errstream.write(
  1016. b"Push of ref %s failed: %s\n" % (ref, error.encode(err_encoding))
  1017. )
  1018. else:
  1019. errstream.write(b"Ref %s updated\n" % ref)
  1020. if remote_name is not None:
  1021. _import_remote_refs(r.refs, remote_name, remote_changed_refs)
  1022. def pull(
  1023. repo,
  1024. remote_location=None,
  1025. refspecs=None,
  1026. outstream=default_bytes_out_stream,
  1027. errstream=default_bytes_err_stream,
  1028. fast_forward=True,
  1029. force=False,
  1030. **kwargs,
  1031. ):
  1032. """Pull from remote via dulwich.client.
  1033. Args:
  1034. repo: Path to repository
  1035. remote_location: Location of the remote
  1036. refspecs: refspecs to fetch
  1037. outstream: A stream file to write to output
  1038. errstream: A stream file to write to errors
  1039. """
  1040. # Open the repo
  1041. with open_repo_closing(repo) as r:
  1042. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1043. if refspecs is None:
  1044. refspecs = [b"HEAD"]
  1045. selected_refs = []
  1046. def determine_wants(remote_refs, **kwargs):
  1047. selected_refs.extend(
  1048. parse_reftuples(remote_refs, r.refs, refspecs, force=force)
  1049. )
  1050. return [
  1051. remote_refs[lh]
  1052. for (lh, rh, force_ref) in selected_refs
  1053. if remote_refs[lh] not in r.object_store
  1054. ]
  1055. client, path = get_transport_and_path(
  1056. remote_location, config=r.get_config_stack(), **kwargs
  1057. )
  1058. fetch_result = client.fetch(
  1059. path, r, progress=errstream.write, determine_wants=determine_wants
  1060. )
  1061. for lh, rh, force_ref in selected_refs:
  1062. if not force_ref and rh in r.refs:
  1063. try:
  1064. check_diverged(r, r.refs.follow(rh)[1], fetch_result.refs[lh])
  1065. except DivergedBranches as exc:
  1066. if fast_forward:
  1067. raise
  1068. else:
  1069. raise NotImplementedError("merge is not yet supported") from exc
  1070. r.refs[rh] = fetch_result.refs[lh]
  1071. if selected_refs:
  1072. r[b"HEAD"] = fetch_result.refs[selected_refs[0][1]]
  1073. # Perform 'git checkout .' - syncs staged changes
  1074. tree = r[b"HEAD"].tree
  1075. r.reset_index(tree=tree)
  1076. if remote_name is not None:
  1077. _import_remote_refs(r.refs, remote_name, fetch_result.refs)
  1078. def status(repo=".", ignored=False, untracked_files="all"):
  1079. """Returns staged, unstaged, and untracked changes relative to the HEAD.
  1080. Args:
  1081. repo: Path to repository or repository object
  1082. ignored: Whether to include ignored files in untracked
  1083. untracked_files: How to handle untracked files, defaults to "all":
  1084. "no": do not return untracked files
  1085. "all": include all files in untracked directories
  1086. Using untracked_files="no" can be faster than "all" when the worktreee
  1087. contains many untracked files/directories.
  1088. Note: untracked_files="normal" (git's default) is not implemented.
  1089. Returns: GitStatus tuple,
  1090. staged - dict with lists of staged paths (diff index/HEAD)
  1091. unstaged - list of unstaged paths (diff index/working-tree)
  1092. untracked - list of untracked, un-ignored & non-.git paths
  1093. """
  1094. with open_repo_closing(repo) as r:
  1095. # 1. Get status of staged
  1096. tracked_changes = get_tree_changes(r)
  1097. # 2. Get status of unstaged
  1098. index = r.open_index()
  1099. normalizer = r.get_blob_normalizer()
  1100. filter_callback = normalizer.checkin_normalize
  1101. unstaged_changes = list(get_unstaged_changes(index, r.path, filter_callback))
  1102. untracked_paths = get_untracked_paths(
  1103. r.path,
  1104. r.path,
  1105. index,
  1106. exclude_ignored=not ignored,
  1107. untracked_files=untracked_files,
  1108. )
  1109. if sys.platform == "win32":
  1110. untracked_changes = [
  1111. path.replace(os.path.sep, "/") for path in untracked_paths
  1112. ]
  1113. else:
  1114. untracked_changes = list(untracked_paths)
  1115. return GitStatus(tracked_changes, unstaged_changes, untracked_changes)
  1116. def _walk_working_dir_paths(frompath, basepath, prune_dirnames=None):
  1117. """Get path, is_dir for files in working dir from frompath.
  1118. Args:
  1119. frompath: Path to begin walk
  1120. basepath: Path to compare to
  1121. prune_dirnames: Optional callback to prune dirnames during os.walk
  1122. dirnames will be set to result of prune_dirnames(dirpath, dirnames)
  1123. """
  1124. for dirpath, dirnames, filenames in os.walk(frompath):
  1125. # Skip .git and below.
  1126. if ".git" in dirnames:
  1127. dirnames.remove(".git")
  1128. if dirpath != basepath:
  1129. continue
  1130. if ".git" in filenames:
  1131. filenames.remove(".git")
  1132. if dirpath != basepath:
  1133. continue
  1134. if dirpath != frompath:
  1135. yield dirpath, True
  1136. for filename in filenames:
  1137. filepath = os.path.join(dirpath, filename)
  1138. yield filepath, False
  1139. if prune_dirnames:
  1140. dirnames[:] = prune_dirnames(dirpath, dirnames)
  1141. def get_untracked_paths(
  1142. frompath, basepath, index, exclude_ignored=False, untracked_files="all"
  1143. ):
  1144. """Get untracked paths.
  1145. Args:
  1146. frompath: Path to walk
  1147. basepath: Path to compare to
  1148. index: Index to check against
  1149. exclude_ignored: Whether to exclude ignored paths
  1150. untracked_files: How to handle untracked files:
  1151. - "no": return an empty list
  1152. - "all": return all files in untracked directories
  1153. - "normal": Not implemented
  1154. Note: ignored directories will never be walked for performance reasons.
  1155. If exclude_ignored is False, only the path to an ignored directory will
  1156. be yielded, no files inside the directory will be returned
  1157. """
  1158. if untracked_files == "normal":
  1159. raise NotImplementedError("normal is not yet supported")
  1160. if untracked_files not in ("no", "all"):
  1161. raise ValueError("untracked_files must be one of (no, all)")
  1162. if untracked_files == "no":
  1163. return
  1164. with open_repo_closing(basepath) as r:
  1165. ignore_manager = IgnoreFilterManager.from_repo(r)
  1166. ignored_dirs = []
  1167. def prune_dirnames(dirpath, dirnames):
  1168. for i in range(len(dirnames) - 1, -1, -1):
  1169. path = os.path.join(dirpath, dirnames[i])
  1170. ip = os.path.join(os.path.relpath(path, basepath), "")
  1171. if ignore_manager.is_ignored(ip):
  1172. if not exclude_ignored:
  1173. ignored_dirs.append(
  1174. os.path.join(os.path.relpath(path, frompath), "")
  1175. )
  1176. del dirnames[i]
  1177. return dirnames
  1178. for ap, is_dir in _walk_working_dir_paths(
  1179. frompath, basepath, prune_dirnames=prune_dirnames
  1180. ):
  1181. if not is_dir:
  1182. ip = path_to_tree_path(basepath, ap)
  1183. if ip not in index:
  1184. if not exclude_ignored or not ignore_manager.is_ignored(
  1185. os.path.relpath(ap, basepath)
  1186. ):
  1187. yield os.path.relpath(ap, frompath)
  1188. yield from ignored_dirs
  1189. def get_tree_changes(repo):
  1190. """Return add/delete/modify changes to tree by comparing index to HEAD.
  1191. Args:
  1192. repo: repo path or object
  1193. Returns: dict with lists for each type of change
  1194. """
  1195. with open_repo_closing(repo) as r:
  1196. index = r.open_index()
  1197. # Compares the Index to the HEAD & determines changes
  1198. # Iterate through the changes and report add/delete/modify
  1199. # TODO: call out to dulwich.diff_tree somehow.
  1200. tracked_changes = {
  1201. "add": [],
  1202. "delete": [],
  1203. "modify": [],
  1204. }
  1205. try:
  1206. tree_id = r[b"HEAD"].tree
  1207. except KeyError:
  1208. tree_id = None
  1209. for change in index.changes_from_tree(r.object_store, tree_id):
  1210. if not change[0][0]:
  1211. tracked_changes["add"].append(change[0][1])
  1212. elif not change[0][1]:
  1213. tracked_changes["delete"].append(change[0][0])
  1214. elif change[0][0] == change[0][1]:
  1215. tracked_changes["modify"].append(change[0][0])
  1216. else:
  1217. raise NotImplementedError("git mv ops not yet supported")
  1218. return tracked_changes
  1219. def daemon(path=".", address=None, port=None):
  1220. """Run a daemon serving Git requests over TCP/IP.
  1221. Args:
  1222. path: Path to the directory to serve.
  1223. address: Optional address to listen on (defaults to ::)
  1224. port: Optional port to listen on (defaults to TCP_GIT_PORT)
  1225. """
  1226. # TODO(jelmer): Support git-daemon-export-ok and --export-all.
  1227. backend = FileSystemBackend(path)
  1228. server = TCPGitServer(backend, address, port)
  1229. server.serve_forever()
  1230. def web_daemon(path=".", address=None, port=None):
  1231. """Run a daemon serving Git requests over HTTP.
  1232. Args:
  1233. path: Path to the directory to serve
  1234. address: Optional address to listen on (defaults to ::)
  1235. port: Optional port to listen on (defaults to 80)
  1236. """
  1237. from .web import (
  1238. WSGIRequestHandlerLogger,
  1239. WSGIServerLogger,
  1240. make_server,
  1241. make_wsgi_chain,
  1242. )
  1243. backend = FileSystemBackend(path)
  1244. app = make_wsgi_chain(backend)
  1245. server = make_server(
  1246. address,
  1247. port,
  1248. app,
  1249. handler_class=WSGIRequestHandlerLogger,
  1250. server_class=WSGIServerLogger,
  1251. )
  1252. server.serve_forever()
  1253. def upload_pack(path=".", inf=None, outf=None):
  1254. """Upload a pack file after negotiating its contents using smart protocol.
  1255. Args:
  1256. path: Path to the repository
  1257. inf: Input stream to communicate with client
  1258. outf: Output stream to communicate with client
  1259. """
  1260. if outf is None:
  1261. outf = getattr(sys.stdout, "buffer", sys.stdout)
  1262. if inf is None:
  1263. inf = getattr(sys.stdin, "buffer", sys.stdin)
  1264. path = os.path.expanduser(path)
  1265. backend = FileSystemBackend(path)
  1266. def send_fn(data):
  1267. outf.write(data)
  1268. outf.flush()
  1269. proto = Protocol(inf.read, send_fn)
  1270. handler = UploadPackHandler(backend, [path], proto)
  1271. # FIXME: Catch exceptions and write a single-line summary to outf.
  1272. handler.handle()
  1273. return 0
  1274. def receive_pack(path=".", inf=None, outf=None):
  1275. """Receive a pack file after negotiating its contents using smart protocol.
  1276. Args:
  1277. path: Path to the repository
  1278. inf: Input stream to communicate with client
  1279. outf: Output stream to communicate with client
  1280. """
  1281. if outf is None:
  1282. outf = getattr(sys.stdout, "buffer", sys.stdout)
  1283. if inf is None:
  1284. inf = getattr(sys.stdin, "buffer", sys.stdin)
  1285. path = os.path.expanduser(path)
  1286. backend = FileSystemBackend(path)
  1287. def send_fn(data):
  1288. outf.write(data)
  1289. outf.flush()
  1290. proto = Protocol(inf.read, send_fn)
  1291. handler = ReceivePackHandler(backend, [path], proto)
  1292. # FIXME: Catch exceptions and write a single-line summary to outf.
  1293. handler.handle()
  1294. return 0
  1295. def _make_branch_ref(name):
  1296. if getattr(name, "encode", None):
  1297. name = name.encode(DEFAULT_ENCODING)
  1298. return LOCAL_BRANCH_PREFIX + name
  1299. def _make_tag_ref(name):
  1300. if getattr(name, "encode", None):
  1301. name = name.encode(DEFAULT_ENCODING)
  1302. return LOCAL_TAG_PREFIX + name
  1303. def branch_delete(repo, name):
  1304. """Delete a branch.
  1305. Args:
  1306. repo: Path to the repository
  1307. name: Name of the branch
  1308. """
  1309. with open_repo_closing(repo) as r:
  1310. if isinstance(name, list):
  1311. names = name
  1312. else:
  1313. names = [name]
  1314. for name in names:
  1315. del r.refs[_make_branch_ref(name)]
  1316. def branch_create(repo, name, objectish=None, force=False):
  1317. """Create a branch.
  1318. Args:
  1319. repo: Path to the repository
  1320. name: Name of the new branch
  1321. objectish: Target object to point new branch at (defaults to HEAD)
  1322. force: Force creation of branch, even if it already exists
  1323. """
  1324. with open_repo_closing(repo) as r:
  1325. if objectish is None:
  1326. objectish = "HEAD"
  1327. object = parse_object(r, objectish)
  1328. refname = _make_branch_ref(name)
  1329. ref_message = b"branch: Created from " + objectish.encode(DEFAULT_ENCODING)
  1330. if force:
  1331. r.refs.set_if_equals(refname, None, object.id, message=ref_message)
  1332. else:
  1333. if not r.refs.add_if_new(refname, object.id, message=ref_message):
  1334. raise Error("Branch with name %s already exists." % name)
  1335. def branch_list(repo):
  1336. """List all branches.
  1337. Args:
  1338. repo: Path to the repository
  1339. """
  1340. with open_repo_closing(repo) as r:
  1341. return r.refs.keys(base=LOCAL_BRANCH_PREFIX)
  1342. def active_branch(repo):
  1343. """Return the active branch in the repository, if any.
  1344. Args:
  1345. repo: Repository to open
  1346. Returns:
  1347. branch name
  1348. Raises:
  1349. KeyError: if the repository does not have a working tree
  1350. IndexError: if HEAD is floating
  1351. """
  1352. with open_repo_closing(repo) as r:
  1353. active_ref = r.refs.follow(b"HEAD")[0][1]
  1354. if not active_ref.startswith(LOCAL_BRANCH_PREFIX):
  1355. raise ValueError(active_ref)
  1356. return active_ref[len(LOCAL_BRANCH_PREFIX) :]
  1357. def get_branch_remote(repo):
  1358. """Return the active branch's remote name, if any.
  1359. Args:
  1360. repo: Repository to open
  1361. Returns:
  1362. remote name
  1363. Raises:
  1364. KeyError: if the repository does not have a working tree
  1365. """
  1366. with open_repo_closing(repo) as r:
  1367. branch_name = active_branch(r.path)
  1368. config = r.get_config()
  1369. try:
  1370. remote_name = config.get((b"branch", branch_name), b"remote")
  1371. except KeyError:
  1372. remote_name = b"origin"
  1373. return remote_name
  1374. def fetch(
  1375. repo,
  1376. remote_location=None,
  1377. outstream=sys.stdout,
  1378. errstream=default_bytes_err_stream,
  1379. message=None,
  1380. depth=None,
  1381. prune=False,
  1382. prune_tags=False,
  1383. force=False,
  1384. **kwargs,
  1385. ):
  1386. """Fetch objects from a remote server.
  1387. Args:
  1388. repo: Path to the repository
  1389. remote_location: String identifying a remote server
  1390. outstream: Output stream (defaults to stdout)
  1391. errstream: Error stream (defaults to stderr)
  1392. message: Reflog message (defaults to b"fetch: from <remote_name>")
  1393. depth: Depth to fetch at
  1394. prune: Prune remote removed refs
  1395. prune_tags: Prune reomte removed tags
  1396. Returns:
  1397. Dictionary with refs on the remote
  1398. """
  1399. with open_repo_closing(repo) as r:
  1400. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1401. if message is None:
  1402. message = b"fetch: from " + remote_location.encode(DEFAULT_ENCODING)
  1403. client, path = get_transport_and_path(
  1404. remote_location, config=r.get_config_stack(), **kwargs
  1405. )
  1406. fetch_result = client.fetch(path, r, progress=errstream.write, depth=depth)
  1407. if remote_name is not None:
  1408. _import_remote_refs(
  1409. r.refs,
  1410. remote_name,
  1411. fetch_result.refs,
  1412. message,
  1413. prune=prune,
  1414. prune_tags=prune_tags,
  1415. )
  1416. return fetch_result
  1417. def ls_remote(remote, config: Optional[Config] = None, **kwargs):
  1418. """List the refs in a remote.
  1419. Args:
  1420. remote: Remote repository location
  1421. config: Configuration to use
  1422. Returns:
  1423. Dictionary with remote refs
  1424. """
  1425. if config is None:
  1426. config = StackedConfig.default()
  1427. client, host_path = get_transport_and_path(remote, config=config, **kwargs)
  1428. return client.get_refs(host_path)
  1429. def repack(repo):
  1430. """Repack loose files in a repository.
  1431. Currently this only packs loose objects.
  1432. Args:
  1433. repo: Path to the repository
  1434. """
  1435. with open_repo_closing(repo) as r:
  1436. r.object_store.pack_loose_objects()
  1437. def pack_objects(
  1438. repo,
  1439. object_ids,
  1440. packf,
  1441. idxf,
  1442. delta_window_size=None,
  1443. deltify=None,
  1444. reuse_deltas=True,
  1445. ):
  1446. """Pack objects into a file.
  1447. Args:
  1448. repo: Path to the repository
  1449. object_ids: List of object ids to write
  1450. packf: File-like object to write to
  1451. idxf: File-like object to write to (can be None)
  1452. delta_window_size: Sliding window size for searching for deltas;
  1453. Set to None for default window size.
  1454. deltify: Whether to deltify objects
  1455. reuse_deltas: Allow reuse of existing deltas while deltifying
  1456. """
  1457. with open_repo_closing(repo) as r:
  1458. entries, data_sum = write_pack_from_container(
  1459. packf.write,
  1460. r.object_store,
  1461. [(oid, None) for oid in object_ids],
  1462. deltify=deltify,
  1463. delta_window_size=delta_window_size,
  1464. reuse_deltas=reuse_deltas,
  1465. )
  1466. if idxf is not None:
  1467. entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
  1468. write_pack_index(idxf, entries, data_sum)
  1469. def ls_tree(
  1470. repo,
  1471. treeish=b"HEAD",
  1472. outstream=sys.stdout,
  1473. recursive=False,
  1474. name_only=False,
  1475. ):
  1476. """List contents of a tree.
  1477. Args:
  1478. repo: Path to the repository
  1479. treeish: Tree id to list
  1480. outstream: Output stream (defaults to stdout)
  1481. recursive: Whether to recursively list files
  1482. name_only: Only print item name
  1483. """
  1484. def list_tree(store, treeid, base):
  1485. for name, mode, sha in store[treeid].iteritems():
  1486. if base:
  1487. name = posixpath.join(base, name)
  1488. if name_only:
  1489. outstream.write(name + b"\n")
  1490. else:
  1491. outstream.write(pretty_format_tree_entry(name, mode, sha))
  1492. if stat.S_ISDIR(mode) and recursive:
  1493. list_tree(store, sha, name)
  1494. with open_repo_closing(repo) as r:
  1495. tree = parse_tree(r, treeish)
  1496. list_tree(r.object_store, tree.id, "")
  1497. def remote_add(repo: Repo, name: Union[bytes, str], url: Union[bytes, str]):
  1498. """Add a remote.
  1499. Args:
  1500. repo: Path to the repository
  1501. name: Remote name
  1502. url: Remote URL
  1503. """
  1504. if not isinstance(name, bytes):
  1505. name = name.encode(DEFAULT_ENCODING)
  1506. if not isinstance(url, bytes):
  1507. url = url.encode(DEFAULT_ENCODING)
  1508. with open_repo_closing(repo) as r:
  1509. c = r.get_config()
  1510. section = (b"remote", name)
  1511. if c.has_section(section):
  1512. raise RemoteExists(section)
  1513. c.set(section, b"url", url)
  1514. c.write_to_path()
  1515. def remote_remove(repo: Repo, name: Union[bytes, str]):
  1516. """Remove a remote.
  1517. Args:
  1518. repo: Path to the repository
  1519. name: Remote name
  1520. """
  1521. if not isinstance(name, bytes):
  1522. name = name.encode(DEFAULT_ENCODING)
  1523. with open_repo_closing(repo) as r:
  1524. c = r.get_config()
  1525. section = (b"remote", name)
  1526. del c[section]
  1527. c.write_to_path()
  1528. def check_ignore(repo, paths, no_index=False):
  1529. """Debug gitignore files.
  1530. Args:
  1531. repo: Path to the repository
  1532. paths: List of paths to check for
  1533. no_index: Don't check index
  1534. Returns: List of ignored files
  1535. """
  1536. with open_repo_closing(repo) as r:
  1537. index = r.open_index()
  1538. ignore_manager = IgnoreFilterManager.from_repo(r)
  1539. for path in paths:
  1540. if not no_index and path_to_tree_path(r.path, path) in index:
  1541. continue
  1542. if os.path.isabs(path):
  1543. path = os.path.relpath(path, r.path)
  1544. if ignore_manager.is_ignored(path):
  1545. yield path
  1546. def update_head(repo, target, detached=False, new_branch=None):
  1547. """Update HEAD to point at a new branch/commit.
  1548. Note that this does not actually update the working tree.
  1549. Args:
  1550. repo: Path to the repository
  1551. detached: Create a detached head
  1552. target: Branch or committish to switch to
  1553. new_branch: New branch to create
  1554. """
  1555. with open_repo_closing(repo) as r:
  1556. if new_branch is not None:
  1557. to_set = _make_branch_ref(new_branch)
  1558. else:
  1559. to_set = b"HEAD"
  1560. if detached:
  1561. # TODO(jelmer): Provide some way so that the actual ref gets
  1562. # updated rather than what it points to, so the delete isn't
  1563. # necessary.
  1564. del r.refs[to_set]
  1565. r.refs[to_set] = parse_commit(r, target).id
  1566. else:
  1567. r.refs.set_symbolic_ref(to_set, parse_ref(r, target))
  1568. if new_branch is not None:
  1569. r.refs.set_symbolic_ref(b"HEAD", to_set)
  1570. def reset_file(repo, file_path: str, target: bytes = b"HEAD", symlink_fn=None):
  1571. """Reset the file to specific commit or branch.
  1572. Args:
  1573. repo: dulwich Repo object
  1574. file_path: file to reset, relative to the repository path
  1575. target: branch or commit or b'HEAD' to reset
  1576. """
  1577. tree = parse_tree(repo, treeish=target)
  1578. tree_path = _fs_to_tree_path(file_path)
  1579. file_entry = tree.lookup_path(repo.object_store.__getitem__, tree_path)
  1580. full_path = os.path.join(os.fsencode(repo.path), tree_path)
  1581. blob = repo.object_store[file_entry[1]]
  1582. mode = file_entry[0]
  1583. build_file_from_blob(blob, mode, full_path, symlink_fn=symlink_fn)
  1584. def _update_head_during_checkout_branch(repo, target):
  1585. checkout_target = None
  1586. if target == b"HEAD": # Do not update head while trying to checkout to HEAD.
  1587. pass
  1588. elif target in repo.refs.keys(base=LOCAL_BRANCH_PREFIX):
  1589. update_head(repo, target)
  1590. else:
  1591. # If checking out a remote branch, create a local one without the remote name prefix.
  1592. config = repo.get_config()
  1593. name = target.split(b"/")[0]
  1594. section = (b"remote", name)
  1595. if config.has_section(section):
  1596. checkout_target = target.replace(name + b"/", b"")
  1597. try:
  1598. branch_create(
  1599. repo, checkout_target, (LOCAL_REMOTE_PREFIX + target).decode()
  1600. )
  1601. except Error:
  1602. pass
  1603. update_head(repo, LOCAL_BRANCH_PREFIX + checkout_target)
  1604. else:
  1605. update_head(repo, target, detached=True)
  1606. return checkout_target
  1607. def checkout_branch(repo, target: Union[bytes, str], force: bool = False):
  1608. """Switch branches or restore working tree files.
  1609. The implementation of this function will probably not scale well
  1610. for branches with lots of local changes.
  1611. This is due to the analysis of a diff between branches before any
  1612. changes are applied.
  1613. Args:
  1614. repo: dulwich Repo object
  1615. target: branch name or commit sha to checkout
  1616. force: true or not to force checkout
  1617. """
  1618. target = to_bytes(target)
  1619. current_tree = parse_tree(repo, repo.head())
  1620. target_tree = parse_tree(repo, target)
  1621. if force:
  1622. repo.reset_index(target_tree.id)
  1623. _update_head_during_checkout_branch(repo, target)
  1624. else:
  1625. status_report = status(repo)
  1626. changes = list(
  1627. set(
  1628. status_report[0]["add"]
  1629. + status_report[0]["delete"]
  1630. + status_report[0]["modify"]
  1631. + status_report[1]
  1632. )
  1633. )
  1634. index = 0
  1635. while index < len(changes):
  1636. change = changes[index]
  1637. try:
  1638. current_tree.lookup_path(repo.object_store.__getitem__, change)
  1639. try:
  1640. target_tree.lookup_path(repo.object_store.__getitem__, change)
  1641. index += 1
  1642. except KeyError:
  1643. raise CheckoutError(
  1644. "Your local changes to the following files would be overwritten by checkout: "
  1645. + change.decode()
  1646. )
  1647. except KeyError:
  1648. changes.pop(index)
  1649. # Update head.
  1650. checkout_target = _update_head_during_checkout_branch(repo, target)
  1651. if checkout_target is not None:
  1652. target_tree = parse_tree(repo, checkout_target)
  1653. dealt_with = set()
  1654. repo_index = repo.open_index()
  1655. for entry in iter_tree_contents(repo.object_store, target_tree.id):
  1656. dealt_with.add(entry.path)
  1657. if entry.path in changes:
  1658. continue
  1659. full_path = os.path.join(os.fsencode(repo.path), entry.path)
  1660. blob = repo.object_store[entry.sha]
  1661. ensure_dir_exists(os.path.dirname(full_path))
  1662. st = build_file_from_blob(blob, entry.mode, full_path)
  1663. repo_index[entry.path] = index_entry_from_stat(st, entry.sha)
  1664. repo_index.write()
  1665. for entry in iter_tree_contents(repo.object_store, current_tree.id):
  1666. if entry.path not in dealt_with:
  1667. repo.unstage([entry.path])
  1668. # Remove the untracked files which are in the current_file_set.
  1669. repo_index = repo.open_index()
  1670. for change in repo_index.changes_from_tree(repo.object_store, current_tree.id):
  1671. path_change = change[0]
  1672. if path_change[1] is None:
  1673. file_name = path_change[0]
  1674. full_path = os.path.join(repo.path, file_name.decode())
  1675. if os.path.isfile(full_path):
  1676. os.remove(full_path)
  1677. dir_path = os.path.dirname(full_path)
  1678. while dir_path != repo.path:
  1679. is_empty = len(os.listdir(dir_path)) == 0
  1680. if is_empty:
  1681. os.rmdir(dir_path)
  1682. dir_path = os.path.dirname(dir_path)
  1683. def check_mailmap(repo, contact):
  1684. """Check canonical name and email of contact.
  1685. Args:
  1686. repo: Path to the repository
  1687. contact: Contact name and/or email
  1688. Returns: Canonical contact data
  1689. """
  1690. with open_repo_closing(repo) as r:
  1691. from .mailmap import Mailmap
  1692. try:
  1693. mailmap = Mailmap.from_path(os.path.join(r.path, ".mailmap"))
  1694. except FileNotFoundError:
  1695. mailmap = Mailmap()
  1696. return mailmap.lookup(contact)
  1697. def fsck(repo):
  1698. """Check a repository.
  1699. Args:
  1700. repo: A path to the repository
  1701. Returns: Iterator over errors/warnings
  1702. """
  1703. with open_repo_closing(repo) as r:
  1704. # TODO(jelmer): check pack files
  1705. # TODO(jelmer): check graph
  1706. # TODO(jelmer): check refs
  1707. for sha in r.object_store:
  1708. o = r.object_store[sha]
  1709. try:
  1710. o.check()
  1711. except Exception as e:
  1712. yield (sha, e)
  1713. def stash_list(repo):
  1714. """List all stashes in a repository."""
  1715. with open_repo_closing(repo) as r:
  1716. from .stash import Stash
  1717. stash = Stash.from_repo(r)
  1718. return enumerate(list(stash.stashes()))
  1719. def stash_push(repo):
  1720. """Push a new stash onto the stack."""
  1721. with open_repo_closing(repo) as r:
  1722. from .stash import Stash
  1723. stash = Stash.from_repo(r)
  1724. stash.push()
  1725. def stash_pop(repo, index):
  1726. """Pop a stash from the stack."""
  1727. with open_repo_closing(repo) as r:
  1728. from .stash import Stash
  1729. stash = Stash.from_repo(r)
  1730. stash.pop(index)
  1731. def stash_drop(repo, index):
  1732. """Drop a stash from the stack."""
  1733. with open_repo_closing(repo) as r:
  1734. from .stash import Stash
  1735. stash = Stash.from_repo(r)
  1736. stash.drop(index)
  1737. def ls_files(repo):
  1738. """List all files in an index."""
  1739. with open_repo_closing(repo) as r:
  1740. return sorted(r.open_index())
  1741. def find_unique_abbrev(object_store, object_id):
  1742. """For now, just return 7 characters."""
  1743. # TODO(jelmer): Add some logic here to return a number of characters that
  1744. # scales relative with the size of the repository
  1745. return object_id.decode("ascii")[:7]
  1746. def describe(repo, abbrev=7):
  1747. """Describe the repository version.
  1748. Args:
  1749. repo: git repository
  1750. abbrev: number of characters of commit to take, default is 7
  1751. Returns: a string description of the current git revision
  1752. Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh".
  1753. """
  1754. # Get the repository
  1755. with open_repo_closing(repo) as r:
  1756. # Get a list of all tags
  1757. refs = r.get_refs()
  1758. tags = {}
  1759. for key, value in refs.items():
  1760. key = key.decode()
  1761. obj = r.get_object(value)
  1762. if "tags" not in key:
  1763. continue
  1764. _, tag = key.rsplit("/", 1)
  1765. try:
  1766. commit = obj.object
  1767. except AttributeError:
  1768. continue
  1769. else:
  1770. commit = r.get_object(commit[1])
  1771. tags[tag] = [
  1772. datetime.datetime(*time.gmtime(commit.commit_time)[:6]),
  1773. commit.id.decode("ascii"),
  1774. ]
  1775. sorted_tags = sorted(tags.items(), key=lambda tag: tag[1][0], reverse=True)
  1776. # If there are no tags, return the current commit
  1777. if len(sorted_tags) == 0:
  1778. return f"g{find_unique_abbrev(r.object_store, r[r.head()].id)}"
  1779. # We're now 0 commits from the top
  1780. commit_count = 0
  1781. # Get the latest commit
  1782. latest_commit = r[r.head()]
  1783. # Walk through all commits
  1784. walker = r.get_walker()
  1785. for entry in walker:
  1786. # Check if tag
  1787. commit_id = entry.commit.id.decode("ascii")
  1788. for tag in sorted_tags:
  1789. tag_name = tag[0]
  1790. tag_commit = tag[1][1]
  1791. if commit_id == tag_commit:
  1792. if commit_count == 0:
  1793. return tag_name
  1794. else:
  1795. return "{}-{}-g{}".format(
  1796. tag_name,
  1797. commit_count,
  1798. latest_commit.id.decode("ascii")[:abbrev],
  1799. )
  1800. commit_count += 1
  1801. # Return plain commit if no parent tag can be found
  1802. return "g{}".format(latest_commit.id.decode("ascii")[:abbrev])
  1803. def get_object_by_path(repo, path, committish=None):
  1804. """Get an object by path.
  1805. Args:
  1806. repo: A path to the repository
  1807. path: Path to look up
  1808. committish: Commit to look up path in
  1809. Returns: A `ShaFile` object
  1810. """
  1811. if committish is None:
  1812. committish = "HEAD"
  1813. # Get the repository
  1814. with open_repo_closing(repo) as r:
  1815. commit = parse_commit(r, committish)
  1816. base_tree = commit.tree
  1817. if not isinstance(path, bytes):
  1818. path = commit_encode(commit, path)
  1819. (mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path)
  1820. return r[sha]
  1821. def write_tree(repo):
  1822. """Write a tree object from the index.
  1823. Args:
  1824. repo: Repository for which to write tree
  1825. Returns: tree id for the tree that was written
  1826. """
  1827. with open_repo_closing(repo) as r:
  1828. return r.open_index().commit(r.object_store)