repo.py 77 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317
  1. # repo.py -- For dealing with git repositories.
  2. # Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
  3. # Copyright (C) 2008-2013 Jelmer Vernooij <jelmer@jelmer.uk>
  4. #
  5. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  6. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  7. # General Public License as public by the Free Software Foundation; version 2.0
  8. # or (at your option) any later version. You can redistribute it and/or
  9. # modify it under the terms of either of these two licenses.
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. #
  17. # You should have received a copy of the licenses; if not, see
  18. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  19. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  20. # License, Version 2.0.
  21. #
  22. """Repository access.
  23. This module contains the base class for git repositories
  24. (BaseRepo) and an implementation which uses a repository on
  25. local disk (Repo).
  26. """
  27. import os
  28. import stat
  29. import sys
  30. import time
  31. import warnings
  32. from collections.abc import Iterable
  33. from io import BytesIO
  34. from typing import (
  35. TYPE_CHECKING,
  36. Any,
  37. BinaryIO,
  38. Callable,
  39. Optional,
  40. Union,
  41. )
  42. if TYPE_CHECKING:
  43. # There are no circular imports here, but we try to defer imports as long
  44. # as possible to reduce start-up time for anything that doesn't need
  45. # these imports.
  46. from .attrs import GitAttributes
  47. from .config import ConditionMatcher, ConfigFile, StackedConfig
  48. from .index import Index
  49. from .notes import Notes
  50. from .errors import (
  51. CommitError,
  52. HookError,
  53. NoIndexPresent,
  54. NotBlobError,
  55. NotCommitError,
  56. NotGitRepository,
  57. NotTagError,
  58. NotTreeError,
  59. RefFormatError,
  60. )
  61. from .file import GitFile
  62. from .hooks import (
  63. CommitMsgShellHook,
  64. Hook,
  65. PostCommitShellHook,
  66. PostReceiveShellHook,
  67. PreCommitShellHook,
  68. )
  69. from .line_ending import BlobNormalizer, TreeBlobNormalizer
  70. from .object_store import (
  71. DiskObjectStore,
  72. MemoryObjectStore,
  73. MissingObjectFinder,
  74. ObjectStoreGraphWalker,
  75. PackBasedObjectStore,
  76. find_shallow,
  77. peel_sha,
  78. )
  79. from .objects import (
  80. Blob,
  81. Commit,
  82. ObjectID,
  83. ShaFile,
  84. Tag,
  85. Tree,
  86. check_hexsha,
  87. valid_hexsha,
  88. )
  89. from .pack import generate_unpacked_objects
  90. from .refs import (
  91. ANNOTATED_TAG_SUFFIX, # noqa: F401
  92. LOCAL_BRANCH_PREFIX,
  93. LOCAL_TAG_PREFIX, # noqa: F401
  94. SYMREF, # noqa: F401
  95. DictRefsContainer,
  96. DiskRefsContainer,
  97. InfoRefsContainer, # noqa: F401
  98. Ref,
  99. RefsContainer,
  100. _set_default_branch,
  101. _set_head,
  102. _set_origin_head,
  103. check_ref_format, # noqa: F401
  104. read_packed_refs, # noqa: F401
  105. read_packed_refs_with_peeled, # noqa: F401
  106. serialize_refs,
  107. write_packed_refs, # noqa: F401
  108. )
  109. CONTROLDIR = ".git"
  110. OBJECTDIR = "objects"
  111. REFSDIR = "refs"
  112. REFSDIR_TAGS = "tags"
  113. REFSDIR_HEADS = "heads"
  114. INDEX_FILENAME = "index"
  115. COMMONDIR = "commondir"
  116. GITDIR = "gitdir"
  117. WORKTREES = "worktrees"
  118. BASE_DIRECTORIES = [
  119. ["branches"],
  120. [REFSDIR],
  121. [REFSDIR, REFSDIR_TAGS],
  122. [REFSDIR, REFSDIR_HEADS],
  123. ["hooks"],
  124. ["info"],
  125. ]
  126. DEFAULT_BRANCH = b"master"
  127. class InvalidUserIdentity(Exception):
  128. """User identity is not of the format 'user <email>'."""
  129. def __init__(self, identity) -> None:
  130. self.identity = identity
  131. class DefaultIdentityNotFound(Exception):
  132. """Default identity could not be determined."""
  133. # TODO(jelmer): Cache?
  134. def _get_default_identity() -> tuple[str, str]:
  135. import socket
  136. for name in ("LOGNAME", "USER", "LNAME", "USERNAME"):
  137. username = os.environ.get(name)
  138. if username:
  139. break
  140. else:
  141. username = None
  142. try:
  143. import pwd
  144. except ImportError:
  145. fullname = None
  146. else:
  147. try:
  148. entry = pwd.getpwuid(os.getuid()) # type: ignore
  149. except KeyError:
  150. fullname = None
  151. else:
  152. if getattr(entry, "gecos", None):
  153. fullname = entry.pw_gecos.split(",")[0]
  154. else:
  155. fullname = None
  156. if username is None:
  157. username = entry.pw_name
  158. if not fullname:
  159. if username is None:
  160. raise DefaultIdentityNotFound("no username found")
  161. fullname = username
  162. email = os.environ.get("EMAIL")
  163. if email is None:
  164. if username is None:
  165. raise DefaultIdentityNotFound("no username found")
  166. email = f"{username}@{socket.gethostname()}"
  167. return (fullname, email)
  168. def get_user_identity(config: "StackedConfig", kind: Optional[str] = None) -> bytes:
  169. """Determine the identity to use for new commits.
  170. If kind is set, this first checks
  171. GIT_${KIND}_NAME and GIT_${KIND}_EMAIL.
  172. If those variables are not set, then it will fall back
  173. to reading the user.name and user.email settings from
  174. the specified configuration.
  175. If that also fails, then it will fall back to using
  176. the current users' identity as obtained from the host
  177. system (e.g. the gecos field, $EMAIL, $USER@$(hostname -f).
  178. Args:
  179. kind: Optional kind to return identity for,
  180. usually either "AUTHOR" or "COMMITTER".
  181. Returns:
  182. A user identity
  183. """
  184. user: Optional[bytes] = None
  185. email: Optional[bytes] = None
  186. if kind:
  187. user_uc = os.environ.get("GIT_" + kind + "_NAME")
  188. if user_uc is not None:
  189. user = user_uc.encode("utf-8")
  190. email_uc = os.environ.get("GIT_" + kind + "_EMAIL")
  191. if email_uc is not None:
  192. email = email_uc.encode("utf-8")
  193. if user is None:
  194. try:
  195. user = config.get(("user",), "name")
  196. except KeyError:
  197. user = None
  198. if email is None:
  199. try:
  200. email = config.get(("user",), "email")
  201. except KeyError:
  202. email = None
  203. default_user, default_email = _get_default_identity()
  204. if user is None:
  205. user = default_user.encode("utf-8")
  206. if email is None:
  207. email = default_email.encode("utf-8")
  208. if email.startswith(b"<") and email.endswith(b">"):
  209. email = email[1:-1]
  210. return user + b" <" + email + b">"
  211. def check_user_identity(identity) -> None:
  212. """Verify that a user identity is formatted correctly.
  213. Args:
  214. identity: User identity bytestring
  215. Raises:
  216. InvalidUserIdentity: Raised when identity is invalid
  217. """
  218. try:
  219. fst, snd = identity.split(b" <", 1)
  220. except ValueError as exc:
  221. raise InvalidUserIdentity(identity) from exc
  222. if b">" not in snd:
  223. raise InvalidUserIdentity(identity)
  224. if b"\0" in identity or b"\n" in identity:
  225. raise InvalidUserIdentity(identity)
  226. def parse_graftpoints(
  227. graftpoints: Iterable[bytes],
  228. ) -> dict[bytes, list[bytes]]:
  229. """Convert a list of graftpoints into a dict.
  230. Args:
  231. graftpoints: Iterator of graftpoint lines
  232. Each line is formatted as:
  233. <commit sha1> <parent sha1> [<parent sha1>]*
  234. Resulting dictionary is:
  235. <commit sha1>: [<parent sha1>*]
  236. https://git.wiki.kernel.org/index.php/GraftPoint
  237. """
  238. grafts = {}
  239. for line in graftpoints:
  240. raw_graft = line.split(None, 1)
  241. commit = raw_graft[0]
  242. if len(raw_graft) == 2:
  243. parents = raw_graft[1].split()
  244. else:
  245. parents = []
  246. for sha in [commit, *parents]:
  247. check_hexsha(sha, "Invalid graftpoint")
  248. grafts[commit] = parents
  249. return grafts
  250. def serialize_graftpoints(graftpoints: dict[bytes, list[bytes]]) -> bytes:
  251. """Convert a dictionary of grafts into string.
  252. The graft dictionary is:
  253. <commit sha1>: [<parent sha1>*]
  254. Each line is formatted as:
  255. <commit sha1> <parent sha1> [<parent sha1>]*
  256. https://git.wiki.kernel.org/index.php/GraftPoint
  257. """
  258. graft_lines = []
  259. for commit, parents in graftpoints.items():
  260. if parents:
  261. graft_lines.append(commit + b" " + b" ".join(parents))
  262. else:
  263. graft_lines.append(commit)
  264. return b"\n".join(graft_lines)
  265. def _set_filesystem_hidden(path) -> None:
  266. """Mark path as to be hidden if supported by platform and filesystem.
  267. On win32 uses SetFileAttributesW api:
  268. <https://docs.microsoft.com/windows/desktop/api/fileapi/nf-fileapi-setfileattributesw>
  269. """
  270. if sys.platform == "win32":
  271. import ctypes
  272. from ctypes.wintypes import BOOL, DWORD, LPCWSTR
  273. FILE_ATTRIBUTE_HIDDEN = 2
  274. SetFileAttributesW = ctypes.WINFUNCTYPE(BOOL, LPCWSTR, DWORD)(
  275. ("SetFileAttributesW", ctypes.windll.kernel32)
  276. )
  277. if isinstance(path, bytes):
  278. path = os.fsdecode(path)
  279. if not SetFileAttributesW(path, FILE_ATTRIBUTE_HIDDEN):
  280. pass # Could raise or log `ctypes.WinError()` here
  281. # Could implement other platform specific filesystem hiding here
  282. class ParentsProvider:
  283. def __init__(self, store, grafts={}, shallows=[]) -> None:
  284. self.store = store
  285. self.grafts = grafts
  286. self.shallows = set(shallows)
  287. # Get commit graph once at initialization for performance
  288. self.commit_graph = store.get_commit_graph()
  289. def get_parents(self, commit_id, commit=None):
  290. try:
  291. return self.grafts[commit_id]
  292. except KeyError:
  293. pass
  294. if commit_id in self.shallows:
  295. return []
  296. # Try to use commit graph for faster parent lookup
  297. if self.commit_graph:
  298. parents = self.commit_graph.get_parents(commit_id)
  299. if parents is not None:
  300. return parents
  301. # Fallback to reading the commit object
  302. if commit is None:
  303. commit = self.store[commit_id]
  304. return commit.parents
  305. class BaseRepo:
  306. """Base class for a git repository.
  307. This base class is meant to be used for Repository implementations that e.g.
  308. work on top of a different transport than a standard filesystem path.
  309. Attributes:
  310. object_store: Dictionary-like object for accessing
  311. the objects
  312. refs: Dictionary-like object with the refs in this
  313. repository
  314. """
  315. def __init__(self, object_store: PackBasedObjectStore, refs: RefsContainer) -> None:
  316. """Open a repository.
  317. This shouldn't be called directly, but rather through one of the
  318. base classes, such as MemoryRepo or Repo.
  319. Args:
  320. object_store: Object store to use
  321. refs: Refs container to use
  322. """
  323. self.object_store = object_store
  324. self.refs = refs
  325. self._graftpoints: dict[bytes, list[bytes]] = {}
  326. self.hooks: dict[str, Hook] = {}
  327. def _determine_file_mode(self) -> bool:
  328. """Probe the file-system to determine whether permissions can be trusted.
  329. Returns: True if permissions can be trusted, False otherwise.
  330. """
  331. raise NotImplementedError(self._determine_file_mode)
  332. def _determine_symlinks(self) -> bool:
  333. """Probe the filesystem to determine whether symlinks can be created.
  334. Returns: True if symlinks can be created, False otherwise.
  335. """
  336. # For now, just mimic the old behaviour
  337. return sys.platform != "win32"
  338. def _init_files(
  339. self, bare: bool, symlinks: Optional[bool] = None, format: Optional[int] = None
  340. ) -> None:
  341. """Initialize a default set of named files."""
  342. from .config import ConfigFile
  343. self._put_named_file("description", b"Unnamed repository")
  344. f = BytesIO()
  345. cf = ConfigFile()
  346. if format is None:
  347. format = 0
  348. if format not in (0, 1):
  349. raise ValueError(f"Unsupported repository format version: {format}")
  350. cf.set("core", "repositoryformatversion", str(format))
  351. if self._determine_file_mode():
  352. cf.set("core", "filemode", True)
  353. else:
  354. cf.set("core", "filemode", False)
  355. if symlinks is None and not bare:
  356. symlinks = self._determine_symlinks()
  357. if symlinks is False:
  358. cf.set("core", "symlinks", symlinks)
  359. cf.set("core", "bare", bare)
  360. cf.set("core", "logallrefupdates", True)
  361. cf.write_to_file(f)
  362. self._put_named_file("config", f.getvalue())
  363. self._put_named_file(os.path.join("info", "exclude"), b"")
  364. def get_named_file(self, path: str) -> Optional[BinaryIO]:
  365. """Get a file from the control dir with a specific name.
  366. Although the filename should be interpreted as a filename relative to
  367. the control dir in a disk-based Repo, the object returned need not be
  368. pointing to a file in that location.
  369. Args:
  370. path: The path to the file, relative to the control dir.
  371. Returns: An open file object, or None if the file does not exist.
  372. """
  373. raise NotImplementedError(self.get_named_file)
  374. def _put_named_file(self, path: str, contents: bytes) -> None:
  375. """Write a file to the control dir with the given name and contents.
  376. Args:
  377. path: The path to the file, relative to the control dir.
  378. contents: A string to write to the file.
  379. """
  380. raise NotImplementedError(self._put_named_file)
  381. def _del_named_file(self, path: str) -> None:
  382. """Delete a file in the control directory with the given name."""
  383. raise NotImplementedError(self._del_named_file)
  384. def open_index(self) -> "Index":
  385. """Open the index for this repository.
  386. Raises:
  387. NoIndexPresent: If no index is present
  388. Returns: The matching `Index`
  389. """
  390. raise NotImplementedError(self.open_index)
  391. def fetch(
  392. self, target, determine_wants=None, progress=None, depth: Optional[int] = None
  393. ):
  394. """Fetch objects into another repository.
  395. Args:
  396. target: The target repository
  397. determine_wants: Optional function to determine what refs to
  398. fetch.
  399. progress: Optional progress function
  400. depth: Optional shallow fetch depth
  401. Returns: The local refs
  402. """
  403. if determine_wants is None:
  404. determine_wants = target.object_store.determine_wants_all
  405. count, pack_data = self.fetch_pack_data(
  406. determine_wants,
  407. target.get_graph_walker(),
  408. progress=progress,
  409. depth=depth,
  410. )
  411. target.object_store.add_pack_data(count, pack_data, progress)
  412. return self.get_refs()
  413. def fetch_pack_data(
  414. self,
  415. determine_wants,
  416. graph_walker,
  417. progress,
  418. *,
  419. get_tagged=None,
  420. depth: Optional[int] = None,
  421. ):
  422. """Fetch the pack data required for a set of revisions.
  423. Args:
  424. determine_wants: Function that takes a dictionary with heads
  425. and returns the list of heads to fetch.
  426. graph_walker: Object that can iterate over the list of revisions
  427. to fetch and has an "ack" method that will be called to acknowledge
  428. that a revision is present.
  429. progress: Simple progress function that will be called with
  430. updated progress strings.
  431. get_tagged: Function that returns a dict of pointed-to sha ->
  432. tag sha for including tags.
  433. depth: Shallow fetch depth
  434. Returns: count and iterator over pack data
  435. """
  436. missing_objects = self.find_missing_objects(
  437. determine_wants, graph_walker, progress, get_tagged=get_tagged, depth=depth
  438. )
  439. if missing_objects is None:
  440. return 0, iter([])
  441. remote_has = missing_objects.get_remote_has()
  442. object_ids = list(missing_objects)
  443. return len(object_ids), generate_unpacked_objects(
  444. self.object_store, object_ids, progress=progress, other_haves=remote_has
  445. )
  446. def find_missing_objects(
  447. self,
  448. determine_wants,
  449. graph_walker,
  450. progress,
  451. *,
  452. get_tagged=None,
  453. depth: Optional[int] = None,
  454. ) -> Optional[MissingObjectFinder]:
  455. """Fetch the missing objects required for a set of revisions.
  456. Args:
  457. determine_wants: Function that takes a dictionary with heads
  458. and returns the list of heads to fetch.
  459. graph_walker: Object that can iterate over the list of revisions
  460. to fetch and has an "ack" method that will be called to acknowledge
  461. that a revision is present.
  462. progress: Simple progress function that will be called with
  463. updated progress strings.
  464. get_tagged: Function that returns a dict of pointed-to sha ->
  465. tag sha for including tags.
  466. depth: Shallow fetch depth
  467. Returns: iterator over objects, with __len__ implemented
  468. """
  469. refs = serialize_refs(self.object_store, self.get_refs())
  470. wants = determine_wants(refs)
  471. if not isinstance(wants, list):
  472. raise TypeError("determine_wants() did not return a list")
  473. current_shallow = set(getattr(graph_walker, "shallow", set()))
  474. if depth not in (None, 0):
  475. shallow, not_shallow = find_shallow(self.object_store, wants, depth)
  476. # Only update if graph_walker has shallow attribute
  477. if hasattr(graph_walker, "shallow"):
  478. graph_walker.shallow.update(shallow - not_shallow)
  479. new_shallow = graph_walker.shallow - current_shallow
  480. unshallow = graph_walker.unshallow = not_shallow & current_shallow
  481. if hasattr(graph_walker, "update_shallow"):
  482. graph_walker.update_shallow(new_shallow, unshallow)
  483. else:
  484. unshallow = getattr(graph_walker, "unshallow", frozenset())
  485. if wants == []:
  486. # TODO(dborowitz): find a way to short-circuit that doesn't change
  487. # this interface.
  488. if getattr(graph_walker, "shallow", set()) or unshallow:
  489. # Do not send a pack in shallow short-circuit path
  490. return None
  491. class DummyMissingObjectFinder:
  492. def get_remote_has(self) -> None:
  493. return None
  494. def __len__(self) -> int:
  495. return 0
  496. def __iter__(self):
  497. yield from []
  498. return DummyMissingObjectFinder() # type: ignore
  499. # If the graph walker is set up with an implementation that can
  500. # ACK/NAK to the wire, it will write data to the client through
  501. # this call as a side-effect.
  502. haves = self.object_store.find_common_revisions(graph_walker)
  503. # Deal with shallow requests separately because the haves do
  504. # not reflect what objects are missing
  505. if getattr(graph_walker, "shallow", set()) or unshallow:
  506. # TODO: filter the haves commits from iter_shas. the specific
  507. # commits aren't missing.
  508. haves = []
  509. parents_provider = ParentsProvider(self.object_store, shallows=current_shallow)
  510. def get_parents(commit):
  511. return parents_provider.get_parents(commit.id, commit)
  512. return MissingObjectFinder(
  513. self.object_store,
  514. haves=haves,
  515. wants=wants,
  516. shallow=getattr(graph_walker, "shallow", set()),
  517. progress=progress,
  518. get_tagged=get_tagged,
  519. get_parents=get_parents,
  520. )
  521. def generate_pack_data(
  522. self,
  523. have: list[ObjectID],
  524. want: list[ObjectID],
  525. progress: Optional[Callable[[str], None]] = None,
  526. ofs_delta: Optional[bool] = None,
  527. ):
  528. """Generate pack data objects for a set of wants/haves.
  529. Args:
  530. have: List of SHA1s of objects that should not be sent
  531. want: List of SHA1s of objects that should be sent
  532. ofs_delta: Whether OFS deltas can be included
  533. progress: Optional progress reporting method
  534. """
  535. return self.object_store.generate_pack_data(
  536. have,
  537. want,
  538. shallow=self.get_shallow(),
  539. progress=progress,
  540. ofs_delta=ofs_delta,
  541. )
  542. def get_graph_walker(
  543. self, heads: Optional[list[ObjectID]] = None
  544. ) -> ObjectStoreGraphWalker:
  545. """Retrieve a graph walker.
  546. A graph walker is used by a remote repository (or proxy)
  547. to find out which objects are present in this repository.
  548. Args:
  549. heads: Repository heads to use (optional)
  550. Returns: A graph walker object
  551. """
  552. if heads is None:
  553. heads = [
  554. sha
  555. for sha in self.refs.as_dict(b"refs/heads").values()
  556. if sha in self.object_store
  557. ]
  558. parents_provider = ParentsProvider(self.object_store)
  559. return ObjectStoreGraphWalker(
  560. heads,
  561. parents_provider.get_parents,
  562. shallow=self.get_shallow(),
  563. update_shallow=self.update_shallow,
  564. )
  565. def get_refs(self) -> dict[bytes, bytes]:
  566. """Get dictionary with all refs.
  567. Returns: A ``dict`` mapping ref names to SHA1s
  568. """
  569. return self.refs.as_dict()
  570. def head(self) -> bytes:
  571. """Return the SHA1 pointed at by HEAD."""
  572. return self.refs[b"HEAD"]
  573. def _get_object(self, sha, cls):
  574. assert len(sha) in (20, 40)
  575. ret = self.get_object(sha)
  576. if not isinstance(ret, cls):
  577. if cls is Commit:
  578. raise NotCommitError(ret)
  579. elif cls is Blob:
  580. raise NotBlobError(ret)
  581. elif cls is Tree:
  582. raise NotTreeError(ret)
  583. elif cls is Tag:
  584. raise NotTagError(ret)
  585. else:
  586. raise Exception(f"Type invalid: {ret.type_name!r} != {cls.type_name!r}")
  587. return ret
  588. def get_object(self, sha: bytes) -> ShaFile:
  589. """Retrieve the object with the specified SHA.
  590. Args:
  591. sha: SHA to retrieve
  592. Returns: A ShaFile object
  593. Raises:
  594. KeyError: when the object can not be found
  595. """
  596. return self.object_store[sha]
  597. def parents_provider(self) -> ParentsProvider:
  598. return ParentsProvider(
  599. self.object_store,
  600. grafts=self._graftpoints,
  601. shallows=self.get_shallow(),
  602. )
  603. def get_parents(self, sha: bytes, commit: Optional[Commit] = None) -> list[bytes]:
  604. """Retrieve the parents of a specific commit.
  605. If the specific commit is a graftpoint, the graft parents
  606. will be returned instead.
  607. Args:
  608. sha: SHA of the commit for which to retrieve the parents
  609. commit: Optional commit matching the sha
  610. Returns: List of parents
  611. """
  612. return self.parents_provider().get_parents(sha, commit)
  613. def get_config(self) -> "ConfigFile":
  614. """Retrieve the config object.
  615. Returns: `ConfigFile` object for the ``.git/config`` file.
  616. """
  617. raise NotImplementedError(self.get_config)
  618. def get_worktree_config(self) -> "ConfigFile":
  619. """Retrieve the worktree config object."""
  620. raise NotImplementedError(self.get_worktree_config)
  621. def get_description(self) -> Optional[str]:
  622. """Retrieve the description for this repository.
  623. Returns: String with the description of the repository
  624. as set by the user.
  625. """
  626. raise NotImplementedError(self.get_description)
  627. def set_description(self, description) -> None:
  628. """Set the description for this repository.
  629. Args:
  630. description: Text to set as description for this repository.
  631. """
  632. raise NotImplementedError(self.set_description)
  633. def get_rebase_state_manager(self):
  634. """Get the appropriate rebase state manager for this repository.
  635. Returns: RebaseStateManager instance
  636. """
  637. raise NotImplementedError(self.get_rebase_state_manager)
  638. def get_config_stack(self) -> "StackedConfig":
  639. """Return a config stack for this repository.
  640. This stack accesses the configuration for both this repository
  641. itself (.git/config) and the global configuration, which usually
  642. lives in ~/.gitconfig.
  643. Returns: `Config` instance for this repository
  644. """
  645. from .config import ConfigFile, StackedConfig
  646. local_config = self.get_config()
  647. backends: list[ConfigFile] = [local_config]
  648. if local_config.get_boolean((b"extensions",), b"worktreeconfig", False):
  649. backends.append(self.get_worktree_config())
  650. backends += StackedConfig.default_backends()
  651. return StackedConfig(backends, writable=local_config)
  652. def get_shallow(self) -> set[ObjectID]:
  653. """Get the set of shallow commits.
  654. Returns: Set of shallow commits.
  655. """
  656. f = self.get_named_file("shallow")
  657. if f is None:
  658. return set()
  659. with f:
  660. return {line.strip() for line in f}
  661. def update_shallow(self, new_shallow, new_unshallow) -> None:
  662. """Update the list of shallow objects.
  663. Args:
  664. new_shallow: Newly shallow objects
  665. new_unshallow: Newly no longer shallow objects
  666. """
  667. shallow = self.get_shallow()
  668. if new_shallow:
  669. shallow.update(new_shallow)
  670. if new_unshallow:
  671. shallow.difference_update(new_unshallow)
  672. if shallow:
  673. self._put_named_file("shallow", b"".join([sha + b"\n" for sha in shallow]))
  674. else:
  675. self._del_named_file("shallow")
  676. def get_peeled(self, ref: Ref) -> ObjectID:
  677. """Get the peeled value of a ref.
  678. Args:
  679. ref: The refname to peel.
  680. Returns: The fully-peeled SHA1 of a tag object, after peeling all
  681. intermediate tags; if the original ref does not point to a tag,
  682. this will equal the original SHA1.
  683. """
  684. cached = self.refs.get_peeled(ref)
  685. if cached is not None:
  686. return cached
  687. return peel_sha(self.object_store, self.refs[ref])[1].id
  688. @property
  689. def notes(self) -> "Notes":
  690. """Access notes functionality for this repository.
  691. Returns:
  692. Notes object for accessing notes
  693. """
  694. from .notes import Notes
  695. return Notes(self.object_store, self.refs)
  696. def get_walker(self, include: Optional[list[bytes]] = None, **kwargs):
  697. """Obtain a walker for this repository.
  698. Args:
  699. include: Iterable of SHAs of commits to include along with their
  700. ancestors. Defaults to [HEAD]
  701. Keyword Args:
  702. exclude: Iterable of SHAs of commits to exclude along with their
  703. ancestors, overriding includes.
  704. order: ORDER_* constant specifying the order of results.
  705. Anything other than ORDER_DATE may result in O(n) memory usage.
  706. reverse: If True, reverse the order of output, requiring O(n)
  707. memory.
  708. max_entries: The maximum number of entries to yield, or None for
  709. no limit.
  710. paths: Iterable of file or subtree paths to show entries for.
  711. rename_detector: diff.RenameDetector object for detecting
  712. renames.
  713. follow: If True, follow path across renames/copies. Forces a
  714. default rename_detector.
  715. since: Timestamp to list commits after.
  716. until: Timestamp to list commits before.
  717. queue_cls: A class to use for a queue of commits, supporting the
  718. iterator protocol. The constructor takes a single argument, the
  719. Walker.
  720. Returns: A `Walker` object
  721. """
  722. from .walk import Walker
  723. if include is None:
  724. include = [self.head()]
  725. kwargs["get_parents"] = lambda commit: self.get_parents(commit.id, commit)
  726. return Walker(self.object_store, include, **kwargs)
  727. def __getitem__(self, name: Union[ObjectID, Ref]):
  728. """Retrieve a Git object by SHA1 or ref.
  729. Args:
  730. name: A Git object SHA1 or a ref name
  731. Returns: A `ShaFile` object, such as a Commit or Blob
  732. Raises:
  733. KeyError: when the specified ref or object does not exist
  734. """
  735. if not isinstance(name, bytes):
  736. raise TypeError(f"'name' must be bytestring, not {type(name).__name__:.80}")
  737. if len(name) in (20, 40):
  738. try:
  739. return self.object_store[name]
  740. except (KeyError, ValueError):
  741. pass
  742. try:
  743. return self.object_store[self.refs[name]]
  744. except RefFormatError as exc:
  745. raise KeyError(name) from exc
  746. def __contains__(self, name: bytes) -> bool:
  747. """Check if a specific Git object or ref is present.
  748. Args:
  749. name: Git object SHA1 or ref name
  750. """
  751. if len(name) == 20 or (len(name) == 40 and valid_hexsha(name)):
  752. return name in self.object_store or name in self.refs
  753. else:
  754. return name in self.refs
  755. def __setitem__(self, name: bytes, value: Union[ShaFile, bytes]) -> None:
  756. """Set a ref.
  757. Args:
  758. name: ref name
  759. value: Ref value - either a ShaFile object, or a hex sha
  760. """
  761. if name.startswith(b"refs/") or name == b"HEAD":
  762. if isinstance(value, ShaFile):
  763. self.refs[name] = value.id
  764. elif isinstance(value, bytes):
  765. self.refs[name] = value
  766. else:
  767. raise TypeError(value)
  768. else:
  769. raise ValueError(name)
  770. def __delitem__(self, name: bytes) -> None:
  771. """Remove a ref.
  772. Args:
  773. name: Name of the ref to remove
  774. """
  775. if name.startswith(b"refs/") or name == b"HEAD":
  776. del self.refs[name]
  777. else:
  778. raise ValueError(name)
  779. def _get_user_identity(
  780. self, config: "StackedConfig", kind: Optional[str] = None
  781. ) -> bytes:
  782. """Determine the identity to use for new commits."""
  783. warnings.warn(
  784. "use get_user_identity() rather than Repo._get_user_identity",
  785. DeprecationWarning,
  786. )
  787. return get_user_identity(config)
  788. def _add_graftpoints(self, updated_graftpoints: dict[bytes, list[bytes]]) -> None:
  789. """Add or modify graftpoints.
  790. Args:
  791. updated_graftpoints: Dict of commit shas to list of parent shas
  792. """
  793. # Simple validation
  794. for commit, parents in updated_graftpoints.items():
  795. for sha in [commit, *parents]:
  796. check_hexsha(sha, "Invalid graftpoint")
  797. self._graftpoints.update(updated_graftpoints)
  798. def _remove_graftpoints(self, to_remove: list[bytes] = []) -> None:
  799. """Remove graftpoints.
  800. Args:
  801. to_remove: List of commit shas
  802. """
  803. for sha in to_remove:
  804. del self._graftpoints[sha]
  805. def _read_heads(self, name):
  806. f = self.get_named_file(name)
  807. if f is None:
  808. return []
  809. with f:
  810. return [line.strip() for line in f.readlines() if line.strip()]
  811. def do_commit(
  812. self,
  813. message: Optional[bytes] = None,
  814. committer: Optional[bytes] = None,
  815. author: Optional[bytes] = None,
  816. commit_timestamp=None,
  817. commit_timezone=None,
  818. author_timestamp=None,
  819. author_timezone=None,
  820. tree: Optional[ObjectID] = None,
  821. encoding: Optional[bytes] = None,
  822. ref: Optional[Ref] = b"HEAD",
  823. merge_heads: Optional[list[ObjectID]] = None,
  824. no_verify: bool = False,
  825. sign: bool = False,
  826. ):
  827. """Create a new commit.
  828. If not specified, committer and author default to
  829. get_user_identity(..., 'COMMITTER')
  830. and get_user_identity(..., 'AUTHOR') respectively.
  831. Args:
  832. message: Commit message
  833. committer: Committer fullname
  834. author: Author fullname
  835. commit_timestamp: Commit timestamp (defaults to now)
  836. commit_timezone: Commit timestamp timezone (defaults to GMT)
  837. author_timestamp: Author timestamp (defaults to commit
  838. timestamp)
  839. author_timezone: Author timestamp timezone
  840. (defaults to commit timestamp timezone)
  841. tree: SHA1 of the tree root to use (if not specified the
  842. current index will be committed).
  843. encoding: Encoding
  844. ref: Optional ref to commit to (defaults to current branch).
  845. If None, creates a dangling commit without updating any ref.
  846. merge_heads: Merge heads (defaults to .git/MERGE_HEAD)
  847. no_verify: Skip pre-commit and commit-msg hooks
  848. sign: GPG Sign the commit (bool, defaults to False,
  849. pass True to use default GPG key,
  850. pass a str containing Key ID to use a specific GPG key)
  851. Returns:
  852. New commit SHA1
  853. """
  854. try:
  855. if not no_verify:
  856. self.hooks["pre-commit"].execute()
  857. except HookError as exc:
  858. raise CommitError(exc) from exc
  859. except KeyError: # no hook defined, silent fallthrough
  860. pass
  861. c = Commit()
  862. if tree is None:
  863. index = self.open_index()
  864. c.tree = index.commit(self.object_store)
  865. else:
  866. if len(tree) != 40:
  867. raise ValueError("tree must be a 40-byte hex sha string")
  868. c.tree = tree
  869. config = self.get_config_stack()
  870. if merge_heads is None:
  871. merge_heads = self._read_heads("MERGE_HEAD")
  872. if committer is None:
  873. committer = get_user_identity(config, kind="COMMITTER")
  874. check_user_identity(committer)
  875. c.committer = committer
  876. if commit_timestamp is None:
  877. # FIXME: Support GIT_COMMITTER_DATE environment variable
  878. commit_timestamp = time.time()
  879. c.commit_time = int(commit_timestamp)
  880. if commit_timezone is None:
  881. # FIXME: Use current user timezone rather than UTC
  882. commit_timezone = 0
  883. c.commit_timezone = commit_timezone
  884. if author is None:
  885. author = get_user_identity(config, kind="AUTHOR")
  886. c.author = author
  887. check_user_identity(author)
  888. if author_timestamp is None:
  889. # FIXME: Support GIT_AUTHOR_DATE environment variable
  890. author_timestamp = commit_timestamp
  891. c.author_time = int(author_timestamp)
  892. if author_timezone is None:
  893. author_timezone = commit_timezone
  894. c.author_timezone = author_timezone
  895. if encoding is None:
  896. try:
  897. encoding = config.get(("i18n",), "commitEncoding")
  898. except KeyError:
  899. pass # No dice
  900. if encoding is not None:
  901. c.encoding = encoding
  902. if message is None:
  903. # FIXME: Try to read commit message from .git/MERGE_MSG
  904. raise ValueError("No commit message specified")
  905. try:
  906. if no_verify:
  907. c.message = message
  908. else:
  909. c.message = self.hooks["commit-msg"].execute(message)
  910. if c.message is None:
  911. c.message = message
  912. except HookError as exc:
  913. raise CommitError(exc) from exc
  914. except KeyError: # no hook defined, message not modified
  915. c.message = message
  916. # Check if we should sign the commit
  917. should_sign = sign
  918. if sign is None:
  919. # Check commit.gpgSign configuration when sign is not explicitly set
  920. config = self.get_config_stack()
  921. try:
  922. should_sign = config.get_boolean((b"commit",), b"gpgSign")
  923. except KeyError:
  924. should_sign = False # Default to not signing if no config
  925. keyid = sign if isinstance(sign, str) else None
  926. if ref is None:
  927. # Create a dangling commit
  928. c.parents = merge_heads
  929. if should_sign:
  930. c.sign(keyid)
  931. self.object_store.add_object(c)
  932. else:
  933. try:
  934. old_head = self.refs[ref]
  935. c.parents = [old_head, *merge_heads]
  936. if should_sign:
  937. c.sign(keyid)
  938. self.object_store.add_object(c)
  939. ok = self.refs.set_if_equals(
  940. ref,
  941. old_head,
  942. c.id,
  943. message=b"commit: " + message,
  944. committer=committer,
  945. timestamp=commit_timestamp,
  946. timezone=commit_timezone,
  947. )
  948. except KeyError:
  949. c.parents = merge_heads
  950. if should_sign:
  951. c.sign(keyid)
  952. self.object_store.add_object(c)
  953. ok = self.refs.add_if_new(
  954. ref,
  955. c.id,
  956. message=b"commit: " + message,
  957. committer=committer,
  958. timestamp=commit_timestamp,
  959. timezone=commit_timezone,
  960. )
  961. if not ok:
  962. # Fail if the atomic compare-and-swap failed, leaving the
  963. # commit and all its objects as garbage.
  964. raise CommitError(f"{ref!r} changed during commit")
  965. self._del_named_file("MERGE_HEAD")
  966. try:
  967. self.hooks["post-commit"].execute()
  968. except HookError as e: # silent failure
  969. warnings.warn(f"post-commit hook failed: {e}", UserWarning)
  970. except KeyError: # no hook defined, silent fallthrough
  971. pass
  972. # Trigger auto GC if needed
  973. from .gc import maybe_auto_gc
  974. maybe_auto_gc(self)
  975. return c.id
  976. def read_gitfile(f):
  977. """Read a ``.git`` file.
  978. The first line of the file should start with "gitdir: "
  979. Args:
  980. f: File-like object to read from
  981. Returns: A path
  982. """
  983. cs = f.read()
  984. if not cs.startswith("gitdir: "):
  985. raise ValueError("Expected file to start with 'gitdir: '")
  986. return cs[len("gitdir: ") :].rstrip("\n")
  987. class UnsupportedVersion(Exception):
  988. """Unsupported repository version."""
  989. def __init__(self, version) -> None:
  990. self.version = version
  991. class UnsupportedExtension(Exception):
  992. """Unsupported repository extension."""
  993. def __init__(self, extension) -> None:
  994. self.extension = extension
  995. class Repo(BaseRepo):
  996. """A git repository backed by local disk.
  997. To open an existing repository, call the constructor with
  998. the path of the repository.
  999. To create a new repository, use the Repo.init class method.
  1000. Note that a repository object may hold on to resources such
  1001. as file handles for performance reasons; call .close() to free
  1002. up those resources.
  1003. Attributes:
  1004. path: Path to the working copy (if it exists) or repository control
  1005. directory (if the repository is bare)
  1006. bare: Whether this is a bare repository
  1007. """
  1008. path: str
  1009. bare: bool
  1010. def __init__(
  1011. self,
  1012. root: Union[str, bytes, os.PathLike],
  1013. object_store: Optional[PackBasedObjectStore] = None,
  1014. bare: Optional[bool] = None,
  1015. ) -> None:
  1016. """Open a repository on disk.
  1017. Args:
  1018. root: Path to the repository's root.
  1019. object_store: ObjectStore to use; if omitted, we use the
  1020. repository's default object store
  1021. bare: True if this is a bare repository.
  1022. """
  1023. root = os.fspath(root)
  1024. if isinstance(root, bytes):
  1025. root = os.fsdecode(root)
  1026. hidden_path = os.path.join(root, CONTROLDIR)
  1027. if bare is None:
  1028. if os.path.isfile(hidden_path) or os.path.isdir(
  1029. os.path.join(hidden_path, OBJECTDIR)
  1030. ):
  1031. bare = False
  1032. elif os.path.isdir(os.path.join(root, OBJECTDIR)) and os.path.isdir(
  1033. os.path.join(root, REFSDIR)
  1034. ):
  1035. bare = True
  1036. else:
  1037. raise NotGitRepository(
  1038. "No git repository was found at {path}".format(**dict(path=root))
  1039. )
  1040. self.bare = bare
  1041. if bare is False:
  1042. if os.path.isfile(hidden_path):
  1043. with open(hidden_path) as f:
  1044. path = read_gitfile(f)
  1045. self._controldir = os.path.join(root, path)
  1046. else:
  1047. self._controldir = hidden_path
  1048. else:
  1049. self._controldir = root
  1050. commondir = self.get_named_file(COMMONDIR)
  1051. if commondir is not None:
  1052. with commondir:
  1053. self._commondir = os.path.join(
  1054. self.controldir(),
  1055. os.fsdecode(commondir.read().rstrip(b"\r\n")),
  1056. )
  1057. else:
  1058. self._commondir = self._controldir
  1059. self.path = root
  1060. # Initialize refs early so they're available for config condition matchers
  1061. self.refs = DiskRefsContainer(
  1062. self.commondir(), self._controldir, logger=self._write_reflog
  1063. )
  1064. config = self.get_config()
  1065. try:
  1066. repository_format_version = config.get("core", "repositoryformatversion")
  1067. format_version = (
  1068. 0
  1069. if repository_format_version is None
  1070. else int(repository_format_version)
  1071. )
  1072. except KeyError:
  1073. format_version = 0
  1074. if format_version not in (0, 1):
  1075. raise UnsupportedVersion(format_version)
  1076. # Track extensions we encounter
  1077. has_reftable_extension = False
  1078. for extension, value in config.items((b"extensions",)):
  1079. if extension.lower() == b"refstorage":
  1080. if value == b"reftable":
  1081. has_reftable_extension = True
  1082. else:
  1083. raise UnsupportedExtension(f"refStorage = {value.decode()}")
  1084. elif extension.lower() not in (b"worktreeconfig",):
  1085. raise UnsupportedExtension(extension)
  1086. if object_store is None:
  1087. object_store = DiskObjectStore.from_config(
  1088. os.path.join(self.commondir(), OBJECTDIR), config
  1089. )
  1090. # Use reftable if extension is configured
  1091. if has_reftable_extension:
  1092. from .reftable import ReftableRefsContainer
  1093. self.refs = ReftableRefsContainer(self.commondir())
  1094. BaseRepo.__init__(self, object_store, self.refs)
  1095. self._graftpoints = {}
  1096. graft_file = self.get_named_file(
  1097. os.path.join("info", "grafts"), basedir=self.commondir()
  1098. )
  1099. if graft_file:
  1100. with graft_file:
  1101. self._graftpoints.update(parse_graftpoints(graft_file))
  1102. graft_file = self.get_named_file("shallow", basedir=self.commondir())
  1103. if graft_file:
  1104. with graft_file:
  1105. self._graftpoints.update(parse_graftpoints(graft_file))
  1106. self.hooks["pre-commit"] = PreCommitShellHook(self.path, self.controldir())
  1107. self.hooks["commit-msg"] = CommitMsgShellHook(self.controldir())
  1108. self.hooks["post-commit"] = PostCommitShellHook(self.controldir())
  1109. self.hooks["post-receive"] = PostReceiveShellHook(self.controldir())
  1110. def _write_reflog(
  1111. self, ref, old_sha, new_sha, committer, timestamp, timezone, message
  1112. ) -> None:
  1113. from .reflog import format_reflog_line
  1114. path = os.path.join(self.controldir(), "logs", os.fsdecode(ref))
  1115. try:
  1116. os.makedirs(os.path.dirname(path))
  1117. except FileExistsError:
  1118. pass
  1119. if committer is None:
  1120. config = self.get_config_stack()
  1121. committer = get_user_identity(config)
  1122. check_user_identity(committer)
  1123. if timestamp is None:
  1124. timestamp = int(time.time())
  1125. if timezone is None:
  1126. timezone = 0 # FIXME
  1127. with open(path, "ab") as f:
  1128. f.write(
  1129. format_reflog_line(
  1130. old_sha, new_sha, committer, timestamp, timezone, message
  1131. )
  1132. + b"\n"
  1133. )
  1134. @classmethod
  1135. def discover(cls, start="."):
  1136. """Iterate parent directories to discover a repository.
  1137. Return a Repo object for the first parent directory that looks like a
  1138. Git repository.
  1139. Args:
  1140. start: The directory to start discovery from (defaults to '.')
  1141. """
  1142. remaining = True
  1143. path = os.path.abspath(start)
  1144. while remaining:
  1145. try:
  1146. return cls(path)
  1147. except NotGitRepository:
  1148. path, remaining = os.path.split(path)
  1149. raise NotGitRepository(
  1150. "No git repository was found at {path}".format(**dict(path=start))
  1151. )
  1152. def controldir(self):
  1153. """Return the path of the control directory."""
  1154. return self._controldir
  1155. def commondir(self):
  1156. """Return the path of the common directory.
  1157. For a main working tree, it is identical to controldir().
  1158. For a linked working tree, it is the control directory of the
  1159. main working tree.
  1160. """
  1161. return self._commondir
  1162. def _determine_file_mode(self):
  1163. """Probe the file-system to determine whether permissions can be trusted.
  1164. Returns: True if permissions can be trusted, False otherwise.
  1165. """
  1166. fname = os.path.join(self.path, ".probe-permissions")
  1167. with open(fname, "w") as f:
  1168. f.write("")
  1169. st1 = os.lstat(fname)
  1170. try:
  1171. os.chmod(fname, st1.st_mode ^ stat.S_IXUSR)
  1172. except PermissionError:
  1173. return False
  1174. st2 = os.lstat(fname)
  1175. os.unlink(fname)
  1176. mode_differs = st1.st_mode != st2.st_mode
  1177. st2_has_exec = (st2.st_mode & stat.S_IXUSR) != 0
  1178. return mode_differs and st2_has_exec
  1179. def _determine_symlinks(self):
  1180. """Probe the filesystem to determine whether symlinks can be created.
  1181. Returns: True if symlinks can be created, False otherwise.
  1182. """
  1183. # TODO(jelmer): Actually probe disk / look at filesystem
  1184. return sys.platform != "win32"
  1185. def _put_named_file(self, path, contents) -> None:
  1186. """Write a file to the control dir with the given name and contents.
  1187. Args:
  1188. path: The path to the file, relative to the control dir.
  1189. contents: A string to write to the file.
  1190. """
  1191. path = path.lstrip(os.path.sep)
  1192. with GitFile(os.path.join(self.controldir(), path), "wb") as f:
  1193. f.write(contents)
  1194. def _del_named_file(self, path) -> None:
  1195. try:
  1196. os.unlink(os.path.join(self.controldir(), path))
  1197. except FileNotFoundError:
  1198. return
  1199. def get_named_file(self, path, basedir=None):
  1200. """Get a file from the control dir with a specific name.
  1201. Although the filename should be interpreted as a filename relative to
  1202. the control dir in a disk-based Repo, the object returned need not be
  1203. pointing to a file in that location.
  1204. Args:
  1205. path: The path to the file, relative to the control dir.
  1206. basedir: Optional argument that specifies an alternative to the
  1207. control dir.
  1208. Returns: An open file object, or None if the file does not exist.
  1209. """
  1210. # TODO(dborowitz): sanitize filenames, since this is used directly by
  1211. # the dumb web serving code.
  1212. if basedir is None:
  1213. basedir = self.controldir()
  1214. path = path.lstrip(os.path.sep)
  1215. try:
  1216. return open(os.path.join(basedir, path), "rb")
  1217. except FileNotFoundError:
  1218. return None
  1219. def index_path(self):
  1220. """Return path to the index file."""
  1221. return os.path.join(self.controldir(), INDEX_FILENAME)
  1222. def open_index(self) -> "Index":
  1223. """Open the index for this repository.
  1224. Raises:
  1225. NoIndexPresent: If no index is present
  1226. Returns: The matching `Index`
  1227. """
  1228. from .index import Index
  1229. if not self.has_index():
  1230. raise NoIndexPresent
  1231. # Check for manyFiles feature configuration
  1232. config = self.get_config_stack()
  1233. many_files = config.get_boolean(b"feature", b"manyFiles", False)
  1234. skip_hash = False
  1235. index_version = None
  1236. if many_files:
  1237. # When feature.manyFiles is enabled, set index.version=4 and index.skipHash=true
  1238. try:
  1239. index_version_str = config.get(b"index", b"version")
  1240. index_version = int(index_version_str)
  1241. except KeyError:
  1242. index_version = 4 # Default to version 4 for manyFiles
  1243. skip_hash = config.get_boolean(b"index", b"skipHash", True)
  1244. else:
  1245. # Check for explicit index settings
  1246. try:
  1247. index_version_str = config.get(b"index", b"version")
  1248. index_version = int(index_version_str)
  1249. except KeyError:
  1250. index_version = None
  1251. skip_hash = config.get_boolean(b"index", b"skipHash", False)
  1252. return Index(self.index_path(), skip_hash=skip_hash, version=index_version)
  1253. def has_index(self) -> bool:
  1254. """Check if an index is present."""
  1255. # Bare repos must never have index files; non-bare repos may have a
  1256. # missing index file, which is treated as empty.
  1257. return not self.bare
  1258. def stage(
  1259. self,
  1260. fs_paths: Union[
  1261. str, bytes, os.PathLike, Iterable[Union[str, bytes, os.PathLike]]
  1262. ],
  1263. ) -> None:
  1264. """Stage a set of paths.
  1265. Args:
  1266. fs_paths: List of paths, relative to the repository path
  1267. """
  1268. root_path_bytes = os.fsencode(self.path)
  1269. if isinstance(fs_paths, (str, bytes, os.PathLike)):
  1270. fs_paths = [fs_paths]
  1271. fs_paths = list(fs_paths)
  1272. from .index import (
  1273. _fs_to_tree_path,
  1274. blob_from_path_and_stat,
  1275. index_entry_from_directory,
  1276. index_entry_from_stat,
  1277. )
  1278. index = self.open_index()
  1279. blob_normalizer = self.get_blob_normalizer()
  1280. for fs_path in fs_paths:
  1281. if not isinstance(fs_path, bytes):
  1282. fs_path = os.fsencode(fs_path)
  1283. if os.path.isabs(fs_path):
  1284. raise ValueError(
  1285. f"path {fs_path!r} should be relative to "
  1286. "repository root, not absolute"
  1287. )
  1288. tree_path = _fs_to_tree_path(fs_path)
  1289. full_path = os.path.join(root_path_bytes, fs_path)
  1290. try:
  1291. st = os.lstat(full_path)
  1292. except OSError:
  1293. # File no longer exists
  1294. try:
  1295. del index[tree_path]
  1296. except KeyError:
  1297. pass # already removed
  1298. else:
  1299. if stat.S_ISDIR(st.st_mode):
  1300. entry = index_entry_from_directory(st, full_path)
  1301. if entry:
  1302. index[tree_path] = entry
  1303. else:
  1304. try:
  1305. del index[tree_path]
  1306. except KeyError:
  1307. pass
  1308. elif not stat.S_ISREG(st.st_mode) and not stat.S_ISLNK(st.st_mode):
  1309. try:
  1310. del index[tree_path]
  1311. except KeyError:
  1312. pass
  1313. else:
  1314. blob = blob_from_path_and_stat(full_path, st)
  1315. blob = blob_normalizer.checkin_normalize(blob, fs_path)
  1316. self.object_store.add_object(blob)
  1317. index[tree_path] = index_entry_from_stat(st, blob.id)
  1318. index.write()
  1319. def unstage(self, fs_paths: list[str]) -> None:
  1320. """Unstage specific file in the index
  1321. Args:
  1322. fs_paths: a list of files to unstage,
  1323. relative to the repository path.
  1324. """
  1325. from .index import IndexEntry, _fs_to_tree_path
  1326. index = self.open_index()
  1327. try:
  1328. tree_id = self[b"HEAD"].tree
  1329. except KeyError:
  1330. # no head mean no commit in the repo
  1331. for fs_path in fs_paths:
  1332. tree_path = _fs_to_tree_path(fs_path)
  1333. del index[tree_path]
  1334. index.write()
  1335. return
  1336. for fs_path in fs_paths:
  1337. tree_path = _fs_to_tree_path(fs_path)
  1338. try:
  1339. tree = self.object_store[tree_id]
  1340. assert isinstance(tree, Tree)
  1341. tree_entry = tree.lookup_path(self.object_store.__getitem__, tree_path)
  1342. except KeyError:
  1343. # if tree_entry didn't exist, this file was being added, so
  1344. # remove index entry
  1345. try:
  1346. del index[tree_path]
  1347. continue
  1348. except KeyError as exc:
  1349. raise KeyError(f"file '{tree_path.decode()}' not in index") from exc
  1350. st = None
  1351. try:
  1352. st = os.lstat(os.path.join(self.path, fs_path))
  1353. except FileNotFoundError:
  1354. pass
  1355. index_entry = IndexEntry(
  1356. ctime=(self[b"HEAD"].commit_time, 0),
  1357. mtime=(self[b"HEAD"].commit_time, 0),
  1358. dev=st.st_dev if st else 0,
  1359. ino=st.st_ino if st else 0,
  1360. mode=tree_entry[0],
  1361. uid=st.st_uid if st else 0,
  1362. gid=st.st_gid if st else 0,
  1363. size=len(self[tree_entry[1]].data),
  1364. sha=tree_entry[1],
  1365. flags=0,
  1366. extended_flags=0,
  1367. )
  1368. index[tree_path] = index_entry
  1369. index.write()
  1370. def clone(
  1371. self,
  1372. target_path,
  1373. *,
  1374. mkdir=True,
  1375. bare=False,
  1376. origin=b"origin",
  1377. checkout=None,
  1378. branch=None,
  1379. progress=None,
  1380. depth: Optional[int] = None,
  1381. symlinks=None,
  1382. ) -> "Repo":
  1383. """Clone this repository.
  1384. Args:
  1385. target_path: Target path
  1386. mkdir: Create the target directory
  1387. bare: Whether to create a bare repository
  1388. checkout: Whether or not to check-out HEAD after cloning
  1389. origin: Base name for refs in target repository
  1390. cloned from this repository
  1391. branch: Optional branch or tag to be used as HEAD in the new repository
  1392. instead of this repository's HEAD.
  1393. progress: Optional progress function
  1394. depth: Depth at which to fetch
  1395. symlinks: Symlinks setting (default to autodetect)
  1396. Returns: Created repository as `Repo`
  1397. """
  1398. encoded_path = os.fsencode(self.path)
  1399. if mkdir:
  1400. os.mkdir(target_path)
  1401. try:
  1402. if not bare:
  1403. target = Repo.init(target_path, symlinks=symlinks)
  1404. if checkout is None:
  1405. checkout = True
  1406. else:
  1407. if checkout:
  1408. raise ValueError("checkout and bare are incompatible")
  1409. target = Repo.init_bare(target_path)
  1410. try:
  1411. target_config = target.get_config()
  1412. target_config.set((b"remote", origin), b"url", encoded_path)
  1413. target_config.set(
  1414. (b"remote", origin),
  1415. b"fetch",
  1416. b"+refs/heads/*:refs/remotes/" + origin + b"/*",
  1417. )
  1418. target_config.write_to_path()
  1419. ref_message = b"clone: from " + encoded_path
  1420. self.fetch(target, depth=depth)
  1421. target.refs.import_refs(
  1422. b"refs/remotes/" + origin,
  1423. self.refs.as_dict(b"refs/heads"),
  1424. message=ref_message,
  1425. )
  1426. target.refs.import_refs(
  1427. b"refs/tags", self.refs.as_dict(b"refs/tags"), message=ref_message
  1428. )
  1429. head_chain, origin_sha = self.refs.follow(b"HEAD")
  1430. origin_head = head_chain[-1] if head_chain else None
  1431. if origin_sha and not origin_head:
  1432. # set detached HEAD
  1433. target.refs[b"HEAD"] = origin_sha
  1434. else:
  1435. _set_origin_head(target.refs, origin, origin_head)
  1436. head_ref = _set_default_branch(
  1437. target.refs, origin, origin_head, branch, ref_message
  1438. )
  1439. # Update target head
  1440. if head_ref:
  1441. head = _set_head(target.refs, head_ref, ref_message)
  1442. else:
  1443. head = None
  1444. if checkout and head is not None:
  1445. target.reset_index()
  1446. except BaseException:
  1447. target.close()
  1448. raise
  1449. except BaseException:
  1450. if mkdir:
  1451. import shutil
  1452. shutil.rmtree(target_path)
  1453. raise
  1454. return target
  1455. def reset_index(self, tree: Optional[bytes] = None):
  1456. """Reset the index back to a specific tree.
  1457. Args:
  1458. tree: Tree SHA to reset to, None for current HEAD tree.
  1459. """
  1460. from .index import (
  1461. build_index_from_tree,
  1462. symlink,
  1463. validate_path_element_default,
  1464. validate_path_element_ntfs,
  1465. )
  1466. if tree is None:
  1467. head = self[b"HEAD"]
  1468. if isinstance(head, Tag):
  1469. _cls, obj = head.object
  1470. head = self.get_object(obj)
  1471. tree = head.tree
  1472. config = self.get_config()
  1473. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  1474. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  1475. validate_path_element = validate_path_element_ntfs
  1476. else:
  1477. validate_path_element = validate_path_element_default
  1478. if config.get_boolean(b"core", b"symlinks", True):
  1479. symlink_fn = symlink
  1480. else:
  1481. def symlink_fn(source, target) -> None: # type: ignore
  1482. with open(
  1483. target, "w" + ("b" if isinstance(source, bytes) else "")
  1484. ) as f:
  1485. f.write(source)
  1486. blob_normalizer = self.get_blob_normalizer()
  1487. return build_index_from_tree(
  1488. self.path,
  1489. self.index_path(),
  1490. self.object_store,
  1491. tree,
  1492. honor_filemode=honor_filemode,
  1493. validate_path_element=validate_path_element,
  1494. symlink_fn=symlink_fn,
  1495. blob_normalizer=blob_normalizer,
  1496. )
  1497. def _get_config_condition_matchers(self) -> dict[str, "ConditionMatcher"]:
  1498. """Get condition matchers for includeIf conditions.
  1499. Returns a dict of condition prefix to matcher function.
  1500. """
  1501. from pathlib import Path
  1502. from .config import ConditionMatcher, match_glob_pattern
  1503. # Add gitdir matchers
  1504. def match_gitdir(pattern: str, case_sensitive: bool = True) -> bool:
  1505. # Handle relative patterns (starting with ./)
  1506. if pattern.startswith("./"):
  1507. # Can't handle relative patterns without config directory context
  1508. return False
  1509. # Normalize repository path
  1510. try:
  1511. repo_path = str(Path(self._controldir).resolve())
  1512. except (OSError, ValueError):
  1513. return False
  1514. # Expand ~ in pattern and normalize
  1515. pattern = os.path.expanduser(pattern)
  1516. # Normalize pattern following Git's rules
  1517. pattern = pattern.replace("\\", "/")
  1518. if not pattern.startswith(("~/", "./", "/", "**")):
  1519. # Check for Windows absolute path
  1520. if len(pattern) >= 2 and pattern[1] == ":":
  1521. pass
  1522. else:
  1523. pattern = "**/" + pattern
  1524. if pattern.endswith("/"):
  1525. pattern = pattern + "**"
  1526. # Use the existing _match_gitdir_pattern function
  1527. from .config import _match_gitdir_pattern
  1528. pattern_bytes = pattern.encode("utf-8", errors="replace")
  1529. repo_path_bytes = repo_path.encode("utf-8", errors="replace")
  1530. return _match_gitdir_pattern(
  1531. repo_path_bytes, pattern_bytes, ignorecase=not case_sensitive
  1532. )
  1533. # Add onbranch matcher
  1534. def match_onbranch(pattern: str) -> bool:
  1535. try:
  1536. # Get the current branch using refs
  1537. ref_chain, _ = self.refs.follow(b"HEAD")
  1538. head_ref = ref_chain[-1] # Get the final resolved ref
  1539. except KeyError:
  1540. pass
  1541. else:
  1542. if head_ref and head_ref.startswith(b"refs/heads/"):
  1543. # Extract branch name from ref
  1544. branch = head_ref[11:].decode("utf-8", errors="replace")
  1545. return match_glob_pattern(branch, pattern)
  1546. return False
  1547. matchers: dict[str, ConditionMatcher] = {
  1548. "onbranch:": match_onbranch,
  1549. "gitdir:": lambda pattern: match_gitdir(pattern, True),
  1550. "gitdir/i:": lambda pattern: match_gitdir(pattern, False),
  1551. }
  1552. return matchers
  1553. def get_worktree_config(self) -> "ConfigFile":
  1554. from .config import ConfigFile
  1555. path = os.path.join(self.commondir(), "config.worktree")
  1556. try:
  1557. # Pass condition matchers for includeIf evaluation
  1558. condition_matchers = self._get_config_condition_matchers()
  1559. return ConfigFile.from_path(path, condition_matchers=condition_matchers)
  1560. except FileNotFoundError:
  1561. cf = ConfigFile()
  1562. cf.path = path
  1563. return cf
  1564. def get_config(self) -> "ConfigFile":
  1565. """Retrieve the config object.
  1566. Returns: `ConfigFile` object for the ``.git/config`` file.
  1567. """
  1568. from .config import ConfigFile
  1569. path = os.path.join(self._commondir, "config")
  1570. try:
  1571. # Pass condition matchers for includeIf evaluation
  1572. condition_matchers = self._get_config_condition_matchers()
  1573. return ConfigFile.from_path(path, condition_matchers=condition_matchers)
  1574. except FileNotFoundError:
  1575. ret = ConfigFile()
  1576. ret.path = path
  1577. return ret
  1578. def get_rebase_state_manager(self):
  1579. """Get the appropriate rebase state manager for this repository.
  1580. Returns: DiskRebaseStateManager instance
  1581. """
  1582. import os
  1583. from .rebase import DiskRebaseStateManager
  1584. path = os.path.join(self.controldir(), "rebase-merge")
  1585. return DiskRebaseStateManager(path)
  1586. def get_description(self):
  1587. """Retrieve the description of this repository.
  1588. Returns: A string describing the repository or None.
  1589. """
  1590. path = os.path.join(self._controldir, "description")
  1591. try:
  1592. with GitFile(path, "rb") as f:
  1593. return f.read()
  1594. except FileNotFoundError:
  1595. return None
  1596. def __repr__(self) -> str:
  1597. return f"<Repo at {self.path!r}>"
  1598. def set_description(self, description) -> None:
  1599. """Set the description for this repository.
  1600. Args:
  1601. description: Text to set as description for this repository.
  1602. """
  1603. self._put_named_file("description", description)
  1604. @classmethod
  1605. def _init_maybe_bare(
  1606. cls,
  1607. path: Union[str, bytes, os.PathLike],
  1608. controldir: Union[str, bytes, os.PathLike],
  1609. bare,
  1610. object_store=None,
  1611. config=None,
  1612. default_branch=None,
  1613. symlinks: Optional[bool] = None,
  1614. format: Optional[int] = None,
  1615. ):
  1616. path = os.fspath(path)
  1617. if isinstance(path, bytes):
  1618. path = os.fsdecode(path)
  1619. controldir = os.fspath(controldir)
  1620. if isinstance(controldir, bytes):
  1621. controldir = os.fsdecode(controldir)
  1622. for d in BASE_DIRECTORIES:
  1623. os.mkdir(os.path.join(controldir, *d))
  1624. if object_store is None:
  1625. object_store = DiskObjectStore.init(os.path.join(controldir, OBJECTDIR))
  1626. ret = cls(path, bare=bare, object_store=object_store)
  1627. if default_branch is None:
  1628. if config is None:
  1629. from .config import StackedConfig
  1630. config = StackedConfig.default()
  1631. try:
  1632. default_branch = config.get("init", "defaultBranch")
  1633. except KeyError:
  1634. default_branch = DEFAULT_BRANCH
  1635. ret.refs.set_symbolic_ref(b"HEAD", LOCAL_BRANCH_PREFIX + default_branch)
  1636. ret._init_files(bare=bare, symlinks=symlinks, format=format)
  1637. return ret
  1638. @classmethod
  1639. def init(
  1640. cls,
  1641. path: Union[str, bytes, os.PathLike],
  1642. *,
  1643. mkdir: bool = False,
  1644. config=None,
  1645. default_branch=None,
  1646. symlinks: Optional[bool] = None,
  1647. format: Optional[int] = None,
  1648. ) -> "Repo":
  1649. """Create a new repository.
  1650. Args:
  1651. path: Path in which to create the repository
  1652. mkdir: Whether to create the directory
  1653. format: Repository format version (defaults to 0)
  1654. Returns: `Repo` instance
  1655. """
  1656. path = os.fspath(path)
  1657. if isinstance(path, bytes):
  1658. path = os.fsdecode(path)
  1659. if mkdir:
  1660. os.mkdir(path)
  1661. controldir = os.path.join(path, CONTROLDIR)
  1662. os.mkdir(controldir)
  1663. _set_filesystem_hidden(controldir)
  1664. return cls._init_maybe_bare(
  1665. path,
  1666. controldir,
  1667. False,
  1668. config=config,
  1669. default_branch=default_branch,
  1670. symlinks=symlinks,
  1671. format=format,
  1672. )
  1673. @classmethod
  1674. def _init_new_working_directory(
  1675. cls,
  1676. path: Union[str, bytes, os.PathLike],
  1677. main_repo,
  1678. identifier=None,
  1679. mkdir=False,
  1680. ):
  1681. """Create a new working directory linked to a repository.
  1682. Args:
  1683. path: Path in which to create the working tree.
  1684. main_repo: Main repository to reference
  1685. identifier: Worktree identifier
  1686. mkdir: Whether to create the directory
  1687. Returns: `Repo` instance
  1688. """
  1689. path = os.fspath(path)
  1690. if isinstance(path, bytes):
  1691. path = os.fsdecode(path)
  1692. if mkdir:
  1693. os.mkdir(path)
  1694. if identifier is None:
  1695. identifier = os.path.basename(path)
  1696. main_worktreesdir = os.path.join(main_repo.controldir(), WORKTREES)
  1697. worktree_controldir = os.path.join(main_worktreesdir, identifier)
  1698. gitdirfile = os.path.join(path, CONTROLDIR)
  1699. with open(gitdirfile, "wb") as f:
  1700. f.write(b"gitdir: " + os.fsencode(worktree_controldir) + b"\n")
  1701. try:
  1702. os.mkdir(main_worktreesdir)
  1703. except FileExistsError:
  1704. pass
  1705. try:
  1706. os.mkdir(worktree_controldir)
  1707. except FileExistsError:
  1708. pass
  1709. with open(os.path.join(worktree_controldir, GITDIR), "wb") as f:
  1710. f.write(os.fsencode(gitdirfile) + b"\n")
  1711. with open(os.path.join(worktree_controldir, COMMONDIR), "wb") as f:
  1712. f.write(b"../..\n")
  1713. with open(os.path.join(worktree_controldir, "HEAD"), "wb") as f:
  1714. f.write(main_repo.head() + b"\n")
  1715. r = cls(path)
  1716. r.reset_index()
  1717. return r
  1718. @classmethod
  1719. def init_bare(
  1720. cls,
  1721. path: Union[str, bytes, os.PathLike],
  1722. *,
  1723. mkdir=False,
  1724. object_store=None,
  1725. config=None,
  1726. default_branch=None,
  1727. format: Optional[int] = None,
  1728. ):
  1729. """Create a new bare repository.
  1730. ``path`` should already exist and be an empty directory.
  1731. Args:
  1732. path: Path to create bare repository in
  1733. format: Repository format version (defaults to 0)
  1734. Returns: a `Repo` instance
  1735. """
  1736. path = os.fspath(path)
  1737. if isinstance(path, bytes):
  1738. path = os.fsdecode(path)
  1739. if mkdir:
  1740. os.mkdir(path)
  1741. return cls._init_maybe_bare(
  1742. path,
  1743. path,
  1744. True,
  1745. object_store=object_store,
  1746. config=config,
  1747. default_branch=default_branch,
  1748. format=format,
  1749. )
  1750. create = init_bare
  1751. def close(self) -> None:
  1752. """Close any files opened by this repository."""
  1753. self.object_store.close()
  1754. def __enter__(self):
  1755. return self
  1756. def __exit__(self, exc_type, exc_val, exc_tb):
  1757. self.close()
  1758. def get_blob_normalizer(self):
  1759. """Return a BlobNormalizer object."""
  1760. # TODO Parse the git attributes files
  1761. git_attributes = {}
  1762. config_stack = self.get_config_stack()
  1763. try:
  1764. head_sha = self.refs[b"HEAD"]
  1765. # Peel tags to get the underlying commit
  1766. _, obj = peel_sha(self.object_store, head_sha)
  1767. tree = obj.tree
  1768. return TreeBlobNormalizer(
  1769. config_stack,
  1770. git_attributes,
  1771. self.object_store,
  1772. tree,
  1773. )
  1774. except KeyError:
  1775. return BlobNormalizer(config_stack, git_attributes)
  1776. def get_gitattributes(self, tree: Optional[bytes] = None) -> "GitAttributes":
  1777. """Read gitattributes for the repository.
  1778. Args:
  1779. tree: Tree SHA to read .gitattributes from (defaults to HEAD)
  1780. Returns:
  1781. GitAttributes object that can be used to match paths
  1782. """
  1783. from .attrs import (
  1784. GitAttributes,
  1785. Pattern,
  1786. parse_git_attributes,
  1787. )
  1788. patterns = []
  1789. # Read system gitattributes (TODO: implement this)
  1790. # Read global gitattributes (TODO: implement this)
  1791. # Read repository .gitattributes from index/tree
  1792. if tree is None:
  1793. try:
  1794. # Try to get from HEAD
  1795. head = self[b"HEAD"]
  1796. if isinstance(head, Tag):
  1797. _cls, obj = head.object
  1798. head = self.get_object(obj)
  1799. tree = head.tree
  1800. except KeyError:
  1801. # No HEAD, no attributes from tree
  1802. pass
  1803. if tree is not None:
  1804. try:
  1805. tree_obj = self[tree]
  1806. if b".gitattributes" in tree_obj:
  1807. _, attrs_sha = tree_obj[b".gitattributes"]
  1808. attrs_blob = self[attrs_sha]
  1809. if isinstance(attrs_blob, Blob):
  1810. attrs_data = BytesIO(attrs_blob.data)
  1811. for pattern_bytes, attrs in parse_git_attributes(attrs_data):
  1812. pattern = Pattern(pattern_bytes)
  1813. patterns.append((pattern, attrs))
  1814. except (KeyError, NotTreeError):
  1815. pass
  1816. # Read .git/info/attributes
  1817. info_attrs_path = os.path.join(self.controldir(), "info", "attributes")
  1818. if os.path.exists(info_attrs_path):
  1819. with open(info_attrs_path, "rb") as f:
  1820. for pattern_bytes, attrs in parse_git_attributes(f):
  1821. pattern = Pattern(pattern_bytes)
  1822. patterns.append((pattern, attrs))
  1823. return GitAttributes(patterns)
  1824. def _sparse_checkout_file_path(self) -> str:
  1825. """Return the path of the sparse-checkout file in this repo's control dir."""
  1826. return os.path.join(self.controldir(), "info", "sparse-checkout")
  1827. def configure_for_cone_mode(self) -> None:
  1828. """Ensure the repository is configured for cone-mode sparse-checkout."""
  1829. config = self.get_config()
  1830. config.set((b"core",), b"sparseCheckout", b"true")
  1831. config.set((b"core",), b"sparseCheckoutCone", b"true")
  1832. config.write_to_path()
  1833. def infer_cone_mode(self) -> bool:
  1834. """Return True if 'core.sparseCheckoutCone' is set to 'true' in config, else False."""
  1835. config = self.get_config()
  1836. try:
  1837. sc_cone = config.get((b"core",), b"sparseCheckoutCone")
  1838. return sc_cone == b"true"
  1839. except KeyError:
  1840. # If core.sparseCheckoutCone is not set, default to False
  1841. return False
  1842. def get_sparse_checkout_patterns(self) -> list[str]:
  1843. """Return a list of sparse-checkout patterns from info/sparse-checkout.
  1844. Returns:
  1845. A list of patterns. Returns an empty list if the file is missing.
  1846. """
  1847. path = self._sparse_checkout_file_path()
  1848. try:
  1849. with open(path, encoding="utf-8") as f:
  1850. return [line.strip() for line in f if line.strip()]
  1851. except FileNotFoundError:
  1852. return []
  1853. def set_sparse_checkout_patterns(self, patterns: list[str]) -> None:
  1854. """Write the given sparse-checkout patterns into info/sparse-checkout.
  1855. Creates the info/ directory if it does not exist.
  1856. Args:
  1857. patterns: A list of gitignore-style patterns to store.
  1858. """
  1859. info_dir = os.path.join(self.controldir(), "info")
  1860. os.makedirs(info_dir, exist_ok=True)
  1861. path = self._sparse_checkout_file_path()
  1862. with open(path, "w", encoding="utf-8") as f:
  1863. for pat in patterns:
  1864. f.write(pat + "\n")
  1865. def set_cone_mode_patterns(self, dirs: Union[list[str], None] = None) -> None:
  1866. """Write the given cone-mode directory patterns into info/sparse-checkout.
  1867. For each directory to include, add an inclusion line that "undoes" the prior
  1868. ``!/*/`` 'exclude' that re-includes that directory and everything under it.
  1869. Never add the same line twice.
  1870. """
  1871. patterns = ["/*", "!/*/"]
  1872. if dirs:
  1873. for d in dirs:
  1874. d = d.strip("/")
  1875. line = f"/{d}/"
  1876. if d and line not in patterns:
  1877. patterns.append(line)
  1878. self.set_sparse_checkout_patterns(patterns)
  1879. class MemoryRepo(BaseRepo):
  1880. """Repo that stores refs, objects, and named files in memory.
  1881. MemoryRepos are always bare: they have no working tree and no index, since
  1882. those have a stronger dependency on the filesystem.
  1883. """
  1884. def __init__(self) -> None:
  1885. """Create a new repository in memory."""
  1886. from .config import ConfigFile
  1887. self._reflog: list[Any] = []
  1888. refs_container = DictRefsContainer({}, logger=self._append_reflog)
  1889. BaseRepo.__init__(self, MemoryObjectStore(), refs_container) # type: ignore
  1890. self._named_files: dict[str, bytes] = {}
  1891. self.bare = True
  1892. self._config = ConfigFile()
  1893. self._description = None
  1894. def _append_reflog(self, *args) -> None:
  1895. self._reflog.append(args)
  1896. def set_description(self, description) -> None:
  1897. self._description = description
  1898. def get_description(self):
  1899. return self._description
  1900. def _determine_file_mode(self):
  1901. """Probe the file-system to determine whether permissions can be trusted.
  1902. Returns: True if permissions can be trusted, False otherwise.
  1903. """
  1904. return sys.platform != "win32"
  1905. def _determine_symlinks(self):
  1906. """Probe the file-system to determine whether permissions can be trusted.
  1907. Returns: True if permissions can be trusted, False otherwise.
  1908. """
  1909. return sys.platform != "win32"
  1910. def _put_named_file(self, path, contents) -> None:
  1911. """Write a file to the control dir with the given name and contents.
  1912. Args:
  1913. path: The path to the file, relative to the control dir.
  1914. contents: A string to write to the file.
  1915. """
  1916. self._named_files[path] = contents
  1917. def _del_named_file(self, path) -> None:
  1918. try:
  1919. del self._named_files[path]
  1920. except KeyError:
  1921. pass
  1922. def get_named_file(self, path, basedir=None):
  1923. """Get a file from the control dir with a specific name.
  1924. Although the filename should be interpreted as a filename relative to
  1925. the control dir in a disk-baked Repo, the object returned need not be
  1926. pointing to a file in that location.
  1927. Args:
  1928. path: The path to the file, relative to the control dir.
  1929. Returns: An open file object, or None if the file does not exist.
  1930. """
  1931. contents = self._named_files.get(path, None)
  1932. if contents is None:
  1933. return None
  1934. return BytesIO(contents)
  1935. def open_index(self) -> "Index":
  1936. """Fail to open index for this repo, since it is bare.
  1937. Raises:
  1938. NoIndexPresent: Raised when no index is present
  1939. """
  1940. raise NoIndexPresent
  1941. def get_config(self):
  1942. """Retrieve the config object.
  1943. Returns: `ConfigFile` object.
  1944. """
  1945. return self._config
  1946. def get_rebase_state_manager(self):
  1947. """Get the appropriate rebase state manager for this repository.
  1948. Returns: MemoryRebaseStateManager instance
  1949. """
  1950. from .rebase import MemoryRebaseStateManager
  1951. return MemoryRebaseStateManager(self)
  1952. @classmethod
  1953. def init_bare(cls, objects, refs, format: Optional[int] = None):
  1954. """Create a new bare repository in memory.
  1955. Args:
  1956. objects: Objects for the new repository,
  1957. as iterable
  1958. refs: Refs as dictionary, mapping names
  1959. to object SHA1s
  1960. format: Repository format version (defaults to 0)
  1961. """
  1962. ret = cls()
  1963. for obj in objects:
  1964. ret.object_store.add_object(obj)
  1965. for refname, sha in refs.items():
  1966. ret.refs.add_if_new(refname, sha)
  1967. ret._init_files(bare=True, format=format)
  1968. return ret