repo.py 92 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758
  1. # repo.py -- For dealing with git repositories.
  2. # Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
  3. # Copyright (C) 2008-2013 Jelmer Vernooij <jelmer@jelmer.uk>
  4. #
  5. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  6. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  7. # General Public License as published by the Free Software Foundation; version 2.0
  8. # or (at your option) any later version. You can redistribute it and/or
  9. # modify it under the terms of either of these two licenses.
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. #
  17. # You should have received a copy of the licenses; if not, see
  18. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  19. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  20. # License, Version 2.0.
  21. #
  22. """Repository access.
  23. This module contains the base class for git repositories
  24. (BaseRepo) and an implementation which uses a repository on
  25. local disk (Repo).
  26. """
  27. __all__ = [
  28. "BASE_DIRECTORIES",
  29. "COMMONDIR",
  30. "CONTROLDIR",
  31. "DEFAULT_BRANCH",
  32. "DEFAULT_OFS_DELTA",
  33. "GITDIR",
  34. "INDEX_FILENAME",
  35. "OBJECTDIR",
  36. "REFSDIR",
  37. "REFSDIR_HEADS",
  38. "REFSDIR_TAGS",
  39. "WORKTREES",
  40. "BaseRepo",
  41. "DefaultIdentityNotFound",
  42. "InvalidUserIdentity",
  43. "MemoryRepo",
  44. "ParentsProvider",
  45. "Repo",
  46. "UnsupportedExtension",
  47. "UnsupportedVersion",
  48. "check_user_identity",
  49. "get_user_identity",
  50. "parse_graftpoints",
  51. "parse_shared_repository",
  52. "read_gitfile",
  53. "serialize_graftpoints",
  54. ]
  55. import os
  56. import stat
  57. import sys
  58. import time
  59. import warnings
  60. from collections.abc import Callable, Generator, Iterable, Iterator, Mapping, Sequence
  61. from io import BytesIO
  62. from types import TracebackType
  63. from typing import (
  64. TYPE_CHECKING,
  65. Any,
  66. BinaryIO,
  67. TypeVar,
  68. )
  69. if TYPE_CHECKING:
  70. # There are no circular imports here, but we try to defer imports as long
  71. # as possible to reduce start-up time for anything that doesn't need
  72. # these imports.
  73. from .attrs import GitAttributes
  74. from .config import ConditionMatcher, ConfigFile, StackedConfig
  75. from .diff_tree import RenameDetector
  76. from .filters import FilterBlobNormalizer, FilterContext
  77. from .index import Index
  78. from .notes import Notes
  79. from .object_store import BaseObjectStore, GraphWalker
  80. from .pack import UnpackedObject
  81. from .rebase import RebaseStateManager
  82. from .walk import Walker
  83. from .worktree import WorkTree
  84. from . import reflog, replace_me
  85. from .errors import (
  86. NoIndexPresent,
  87. NotBlobError,
  88. NotCommitError,
  89. NotGitRepository,
  90. NotTagError,
  91. NotTreeError,
  92. RefFormatError,
  93. )
  94. from .file import GitFile
  95. from .hooks import (
  96. CommitMsgShellHook,
  97. Hook,
  98. PostCommitShellHook,
  99. PostReceiveShellHook,
  100. PreCommitShellHook,
  101. )
  102. from .object_store import (
  103. DiskObjectStore,
  104. MemoryObjectStore,
  105. MissingObjectFinder,
  106. ObjectStoreGraphWalker,
  107. PackBasedObjectStore,
  108. PackCapableObjectStore,
  109. find_shallow,
  110. peel_sha,
  111. )
  112. from .objects import (
  113. Blob,
  114. Commit,
  115. ObjectID,
  116. RawObjectID,
  117. ShaFile,
  118. Tag,
  119. Tree,
  120. check_hexsha,
  121. valid_hexsha,
  122. )
  123. from .pack import generate_unpacked_objects
  124. from .refs import (
  125. HEADREF,
  126. LOCAL_TAG_PREFIX, # noqa: F401
  127. SYMREF, # noqa: F401
  128. DictRefsContainer,
  129. DiskRefsContainer,
  130. Ref,
  131. RefsContainer,
  132. _set_default_branch,
  133. _set_head,
  134. _set_origin_head,
  135. check_ref_format, # noqa: F401
  136. extract_branch_name,
  137. is_per_worktree_ref,
  138. local_branch_name,
  139. read_packed_refs, # noqa: F401
  140. read_packed_refs_with_peeled, # noqa: F401
  141. write_packed_refs, # noqa: F401
  142. )
  143. CONTROLDIR = ".git"
  144. OBJECTDIR = "objects"
  145. DEFAULT_OFS_DELTA = True
  146. T = TypeVar("T", bound="ShaFile")
  147. REFSDIR = "refs"
  148. REFSDIR_TAGS = "tags"
  149. REFSDIR_HEADS = "heads"
  150. INDEX_FILENAME = "index"
  151. COMMONDIR = "commondir"
  152. GITDIR = "gitdir"
  153. WORKTREES = "worktrees"
  154. BASE_DIRECTORIES = [
  155. ["branches"],
  156. [REFSDIR],
  157. [REFSDIR, REFSDIR_TAGS],
  158. [REFSDIR, REFSDIR_HEADS],
  159. ["hooks"],
  160. ["info"],
  161. ]
  162. DEFAULT_BRANCH = b"master"
  163. class InvalidUserIdentity(Exception):
  164. """User identity is not of the format 'user <email>'."""
  165. def __init__(self, identity: str) -> None:
  166. """Initialize InvalidUserIdentity exception."""
  167. self.identity = identity
  168. class DefaultIdentityNotFound(Exception):
  169. """Default identity could not be determined."""
  170. # TODO(jelmer): Cache?
  171. def _get_default_identity() -> tuple[str, str]:
  172. import socket
  173. for name in ("LOGNAME", "USER", "LNAME", "USERNAME"):
  174. username = os.environ.get(name)
  175. if username:
  176. break
  177. else:
  178. username = None
  179. try:
  180. import pwd
  181. except ImportError:
  182. fullname = None
  183. else:
  184. try:
  185. entry = pwd.getpwuid(os.getuid()) # type: ignore[attr-defined,unused-ignore]
  186. except KeyError:
  187. fullname = None
  188. else:
  189. if getattr(entry, "gecos", None):
  190. fullname = entry.pw_gecos.split(",")[0]
  191. else:
  192. fullname = None
  193. if username is None:
  194. username = entry.pw_name
  195. if not fullname:
  196. if username is None:
  197. raise DefaultIdentityNotFound("no username found")
  198. fullname = username
  199. email = os.environ.get("EMAIL")
  200. if email is None:
  201. if username is None:
  202. raise DefaultIdentityNotFound("no username found")
  203. email = f"{username}@{socket.gethostname()}"
  204. return (fullname, email)
  205. def get_user_identity(config: "StackedConfig", kind: str | None = None) -> bytes:
  206. """Determine the identity to use for new commits.
  207. If kind is set, this first checks
  208. GIT_${KIND}_NAME and GIT_${KIND}_EMAIL.
  209. If those variables are not set, then it will fall back
  210. to reading the user.name and user.email settings from
  211. the specified configuration.
  212. If that also fails, then it will fall back to using
  213. the current users' identity as obtained from the host
  214. system (e.g. the gecos field, $EMAIL, $USER@$(hostname -f).
  215. Args:
  216. config: Configuration stack to read from
  217. kind: Optional kind to return identity for,
  218. usually either "AUTHOR" or "COMMITTER".
  219. Returns:
  220. A user identity
  221. """
  222. user: bytes | None = None
  223. email: bytes | None = None
  224. if kind:
  225. user_uc = os.environ.get("GIT_" + kind + "_NAME")
  226. if user_uc is not None:
  227. user = user_uc.encode("utf-8")
  228. email_uc = os.environ.get("GIT_" + kind + "_EMAIL")
  229. if email_uc is not None:
  230. email = email_uc.encode("utf-8")
  231. if user is None:
  232. try:
  233. user = config.get(("user",), "name")
  234. except KeyError:
  235. user = None
  236. if email is None:
  237. try:
  238. email = config.get(("user",), "email")
  239. except KeyError:
  240. email = None
  241. default_user, default_email = _get_default_identity()
  242. if user is None:
  243. user = default_user.encode("utf-8")
  244. if email is None:
  245. email = default_email.encode("utf-8")
  246. if email.startswith(b"<") and email.endswith(b">"):
  247. email = email[1:-1]
  248. return user + b" <" + email + b">"
  249. def check_user_identity(identity: bytes) -> None:
  250. """Verify that a user identity is formatted correctly.
  251. Args:
  252. identity: User identity bytestring
  253. Raises:
  254. InvalidUserIdentity: Raised when identity is invalid
  255. """
  256. try:
  257. _fst, snd = identity.split(b" <", 1)
  258. except ValueError as exc:
  259. raise InvalidUserIdentity(identity.decode("utf-8", "replace")) from exc
  260. if b">" not in snd:
  261. raise InvalidUserIdentity(identity.decode("utf-8", "replace"))
  262. if b"\0" in identity or b"\n" in identity:
  263. raise InvalidUserIdentity(identity.decode("utf-8", "replace"))
  264. def parse_graftpoints(
  265. graftpoints: Iterable[bytes],
  266. ) -> dict[ObjectID, list[ObjectID]]:
  267. """Convert a list of graftpoints into a dict.
  268. Args:
  269. graftpoints: Iterator of graftpoint lines
  270. Each line is formatted as:
  271. <commit sha1> <parent sha1> [<parent sha1>]*
  272. Resulting dictionary is:
  273. <commit sha1>: [<parent sha1>*]
  274. https://git.wiki.kernel.org/index.php/GraftPoint
  275. """
  276. grafts: dict[ObjectID, list[ObjectID]] = {}
  277. for line in graftpoints:
  278. raw_graft = line.split(None, 1)
  279. commit = ObjectID(raw_graft[0])
  280. if len(raw_graft) == 2:
  281. parents = [ObjectID(p) for p in raw_graft[1].split()]
  282. else:
  283. parents = []
  284. for sha in [commit, *parents]:
  285. check_hexsha(sha, "Invalid graftpoint")
  286. grafts[commit] = parents
  287. return grafts
  288. def serialize_graftpoints(graftpoints: Mapping[ObjectID, Sequence[ObjectID]]) -> bytes:
  289. """Convert a dictionary of grafts into string.
  290. The graft dictionary is:
  291. <commit sha1>: [<parent sha1>*]
  292. Each line is formatted as:
  293. <commit sha1> <parent sha1> [<parent sha1>]*
  294. https://git.wiki.kernel.org/index.php/GraftPoint
  295. """
  296. graft_lines = []
  297. for commit, parents in graftpoints.items():
  298. if parents:
  299. graft_lines.append(commit + b" " + b" ".join(parents))
  300. else:
  301. graft_lines.append(commit)
  302. return b"\n".join(graft_lines)
  303. def _set_filesystem_hidden(path: str) -> None:
  304. """Mark path as to be hidden if supported by platform and filesystem.
  305. On win32 uses SetFileAttributesW api:
  306. <https://docs.microsoft.com/windows/desktop/api/fileapi/nf-fileapi-setfileattributesw>
  307. """
  308. if sys.platform == "win32":
  309. import ctypes
  310. from ctypes.wintypes import BOOL, DWORD, LPCWSTR
  311. FILE_ATTRIBUTE_HIDDEN = 2
  312. SetFileAttributesW = ctypes.WINFUNCTYPE(BOOL, LPCWSTR, DWORD)(
  313. ("SetFileAttributesW", ctypes.windll.kernel32)
  314. )
  315. if isinstance(path, bytes):
  316. path = os.fsdecode(path)
  317. if not SetFileAttributesW(path, FILE_ATTRIBUTE_HIDDEN):
  318. pass # Could raise or log `ctypes.WinError()` here
  319. # Could implement other platform specific filesystem hiding here
  320. def parse_shared_repository(
  321. value: str | bytes | bool,
  322. ) -> tuple[int | None, int | None]:
  323. """Parse core.sharedRepository configuration value.
  324. Args:
  325. value: Configuration value (string, bytes, or boolean)
  326. Returns:
  327. tuple of (file_mask, directory_mask) or (None, None) if not shared
  328. The masks are permission bits to apply via chmod.
  329. """
  330. if isinstance(value, bytes):
  331. value = value.decode("utf-8", errors="replace")
  332. # Handle boolean values
  333. if isinstance(value, bool):
  334. if value:
  335. # true = group (same as "group")
  336. return (0o664, 0o2775)
  337. else:
  338. # false = umask (use system umask, no adjustment)
  339. return (None, None)
  340. # Handle string values
  341. value_lower = value.lower()
  342. if value_lower in ("false", "0", ""):
  343. # Use umask (no adjustment)
  344. return (None, None)
  345. if value_lower in ("true", "1", "group"):
  346. # Group writable (with setgid bit)
  347. return (0o664, 0o2775)
  348. if value_lower in ("all", "world", "everybody", "2"):
  349. # World readable/writable (with setgid bit)
  350. return (0o666, 0o2777)
  351. if value_lower == "umask":
  352. # Explicitly use umask
  353. return (None, None)
  354. # Try to parse as octal
  355. if value.startswith("0"):
  356. try:
  357. mode = int(value, 8)
  358. # For directories, add execute bits where read bits are set
  359. # and add setgid bit for shared repositories
  360. dir_mode = mode | 0o2000 # Add setgid bit
  361. if mode & 0o004:
  362. dir_mode |= 0o001
  363. if mode & 0o040:
  364. dir_mode |= 0o010
  365. if mode & 0o400:
  366. dir_mode |= 0o100
  367. return (mode, dir_mode)
  368. except ValueError:
  369. pass
  370. # Default to umask for unrecognized values
  371. return (None, None)
  372. class ParentsProvider:
  373. """Provider for commit parent information."""
  374. def __init__(
  375. self,
  376. store: "BaseObjectStore",
  377. grafts: dict[ObjectID, list[ObjectID]] = {},
  378. shallows: Iterable[ObjectID] = [],
  379. ) -> None:
  380. """Initialize ParentsProvider.
  381. Args:
  382. store: Object store to use
  383. grafts: Graft information
  384. shallows: Shallow commit SHAs
  385. """
  386. self.store = store
  387. self.grafts = grafts
  388. self.shallows = set(shallows)
  389. # Get commit graph once at initialization for performance
  390. self.commit_graph = store.get_commit_graph()
  391. def get_parents(
  392. self, commit_id: ObjectID, commit: Commit | None = None
  393. ) -> list[ObjectID]:
  394. """Get parents for a commit using the parents provider."""
  395. try:
  396. return self.grafts[commit_id]
  397. except KeyError:
  398. pass
  399. if commit_id in self.shallows:
  400. return []
  401. # Try to use commit graph for faster parent lookup
  402. if self.commit_graph:
  403. parents = self.commit_graph.get_parents(commit_id)
  404. if parents is not None:
  405. return parents
  406. # Fallback to reading the commit object
  407. if commit is None:
  408. obj = self.store[commit_id]
  409. assert isinstance(obj, Commit)
  410. commit = obj
  411. result: list[ObjectID] = commit.parents
  412. return result
  413. class BaseRepo:
  414. """Base class for a git repository.
  415. This base class is meant to be used for Repository implementations that e.g.
  416. work on top of a different transport than a standard filesystem path.
  417. Attributes:
  418. object_store: Dictionary-like object for accessing
  419. the objects
  420. refs: Dictionary-like object with the refs in this
  421. repository
  422. """
  423. def __init__(
  424. self, object_store: "PackCapableObjectStore", refs: RefsContainer
  425. ) -> None:
  426. """Open a repository.
  427. This shouldn't be called directly, but rather through one of the
  428. base classes, such as MemoryRepo or Repo.
  429. Args:
  430. object_store: Object store to use
  431. refs: Refs container to use
  432. """
  433. self.object_store = object_store
  434. self.refs = refs
  435. self._graftpoints: dict[ObjectID, list[ObjectID]] = {}
  436. self.hooks: dict[str, Hook] = {}
  437. def _determine_file_mode(self) -> bool:
  438. """Probe the file-system to determine whether permissions can be trusted.
  439. Returns: True if permissions can be trusted, False otherwise.
  440. """
  441. raise NotImplementedError(self._determine_file_mode)
  442. def _determine_symlinks(self) -> bool:
  443. """Probe the filesystem to determine whether symlinks can be created.
  444. Returns: True if symlinks can be created, False otherwise.
  445. """
  446. # For now, just mimic the old behaviour
  447. return sys.platform != "win32"
  448. def _init_files(
  449. self,
  450. bare: bool,
  451. symlinks: bool | None = None,
  452. format: int | None = None,
  453. shared_repository: str | bool | None = None,
  454. ) -> None:
  455. """Initialize a default set of named files."""
  456. from .config import ConfigFile
  457. self._put_named_file("description", b"Unnamed repository")
  458. f = BytesIO()
  459. cf = ConfigFile()
  460. if format is None:
  461. format = 0
  462. if format not in (0, 1):
  463. raise ValueError(f"Unsupported repository format version: {format}")
  464. cf.set("core", "repositoryformatversion", str(format))
  465. if self._determine_file_mode():
  466. cf.set("core", "filemode", True)
  467. else:
  468. cf.set("core", "filemode", False)
  469. if symlinks is None and not bare:
  470. symlinks = self._determine_symlinks()
  471. if symlinks is False:
  472. cf.set("core", "symlinks", symlinks)
  473. cf.set("core", "bare", bare)
  474. cf.set("core", "logallrefupdates", True)
  475. # Set shared repository if specified
  476. if shared_repository is not None:
  477. if isinstance(shared_repository, bool):
  478. cf.set("core", "sharedRepository", shared_repository)
  479. else:
  480. cf.set("core", "sharedRepository", shared_repository)
  481. cf.write_to_file(f)
  482. self._put_named_file("config", f.getvalue())
  483. self._put_named_file(os.path.join("info", "exclude"), b"")
  484. def get_named_file(self, path: str) -> BinaryIO | None:
  485. """Get a file from the control dir with a specific name.
  486. Although the filename should be interpreted as a filename relative to
  487. the control dir in a disk-based Repo, the object returned need not be
  488. pointing to a file in that location.
  489. Args:
  490. path: The path to the file, relative to the control dir.
  491. Returns: An open file object, or None if the file does not exist.
  492. """
  493. raise NotImplementedError(self.get_named_file)
  494. def _put_named_file(self, path: str, contents: bytes) -> None:
  495. """Write a file to the control dir with the given name and contents.
  496. Args:
  497. path: The path to the file, relative to the control dir.
  498. contents: A string to write to the file.
  499. """
  500. raise NotImplementedError(self._put_named_file)
  501. def _del_named_file(self, path: str) -> None:
  502. """Delete a file in the control directory with the given name."""
  503. raise NotImplementedError(self._del_named_file)
  504. def open_index(self) -> "Index":
  505. """Open the index for this repository.
  506. Raises:
  507. NoIndexPresent: If no index is present
  508. Returns: The matching `Index`
  509. """
  510. raise NotImplementedError(self.open_index)
  511. def fetch(
  512. self,
  513. target: "BaseRepo",
  514. determine_wants: Callable[[Mapping[Ref, ObjectID], int | None], list[ObjectID]]
  515. | None = None,
  516. progress: Callable[..., None] | None = None,
  517. depth: int | None = None,
  518. ) -> dict[Ref, ObjectID]:
  519. """Fetch objects into another repository.
  520. Args:
  521. target: The target repository
  522. determine_wants: Optional function to determine what refs to
  523. fetch.
  524. progress: Optional progress function
  525. depth: Optional shallow fetch depth
  526. Returns: The local refs
  527. """
  528. if determine_wants is None:
  529. determine_wants = target.object_store.determine_wants_all
  530. count, pack_data = self.fetch_pack_data(
  531. determine_wants,
  532. target.get_graph_walker(),
  533. progress=progress,
  534. depth=depth,
  535. )
  536. target.object_store.add_pack_data(count, pack_data, progress)
  537. return self.get_refs()
  538. def fetch_pack_data(
  539. self,
  540. determine_wants: Callable[[Mapping[Ref, ObjectID], int | None], list[ObjectID]],
  541. graph_walker: "GraphWalker",
  542. progress: Callable[[bytes], None] | None,
  543. *,
  544. get_tagged: Callable[[], dict[ObjectID, ObjectID]] | None = None,
  545. depth: int | None = None,
  546. ) -> tuple[int, Iterator["UnpackedObject"]]:
  547. """Fetch the pack data required for a set of revisions.
  548. Args:
  549. determine_wants: Function that takes a dictionary with heads
  550. and returns the list of heads to fetch.
  551. graph_walker: Object that can iterate over the list of revisions
  552. to fetch and has an "ack" method that will be called to acknowledge
  553. that a revision is present.
  554. progress: Simple progress function that will be called with
  555. updated progress strings.
  556. get_tagged: Function that returns a dict of pointed-to sha ->
  557. tag sha for including tags.
  558. depth: Shallow fetch depth
  559. Returns: count and iterator over pack data
  560. """
  561. missing_objects = self.find_missing_objects(
  562. determine_wants, graph_walker, progress, get_tagged=get_tagged, depth=depth
  563. )
  564. if missing_objects is None:
  565. return 0, iter([])
  566. remote_has = missing_objects.get_remote_has()
  567. object_ids = list(missing_objects)
  568. return len(object_ids), generate_unpacked_objects(
  569. self.object_store, object_ids, progress=progress, other_haves=remote_has
  570. )
  571. def find_missing_objects(
  572. self,
  573. determine_wants: Callable[[Mapping[Ref, ObjectID], int | None], list[ObjectID]],
  574. graph_walker: "GraphWalker",
  575. progress: Callable[[bytes], None] | None,
  576. *,
  577. get_tagged: Callable[[], dict[ObjectID, ObjectID]] | None = None,
  578. depth: int | None = None,
  579. ) -> MissingObjectFinder | None:
  580. """Fetch the missing objects required for a set of revisions.
  581. Args:
  582. determine_wants: Function that takes a dictionary with heads
  583. and returns the list of heads to fetch.
  584. graph_walker: Object that can iterate over the list of revisions
  585. to fetch and has an "ack" method that will be called to acknowledge
  586. that a revision is present.
  587. progress: Simple progress function that will be called with
  588. updated progress strings.
  589. get_tagged: Function that returns a dict of pointed-to sha ->
  590. tag sha for including tags.
  591. depth: Shallow fetch depth
  592. Returns: iterator over objects, with __len__ implemented
  593. """
  594. import logging
  595. # Filter out refs pointing to missing objects to avoid errors downstream.
  596. # This makes Dulwich more robust when dealing with broken refs on disk.
  597. # Previously serialize_refs() did this filtering as a side-effect.
  598. all_refs = self.get_refs()
  599. refs: dict[Ref, ObjectID] = {}
  600. for ref, sha in all_refs.items():
  601. if sha in self.object_store:
  602. refs[ref] = sha
  603. else:
  604. logging.warning(
  605. "ref %s points at non-present sha %s",
  606. ref.decode("utf-8", "replace"),
  607. sha.decode("ascii"),
  608. )
  609. wants = determine_wants(refs, depth)
  610. if not isinstance(wants, list):
  611. raise TypeError("determine_wants() did not return a list")
  612. current_shallow = set(getattr(graph_walker, "shallow", set()))
  613. if depth not in (None, 0):
  614. assert depth is not None
  615. shallow, not_shallow = find_shallow(self.object_store, wants, depth)
  616. # Only update if graph_walker has shallow attribute
  617. if hasattr(graph_walker, "shallow"):
  618. graph_walker.shallow.update(shallow - not_shallow)
  619. new_shallow = graph_walker.shallow - current_shallow
  620. unshallow = not_shallow & current_shallow
  621. setattr(graph_walker, "unshallow", unshallow)
  622. if hasattr(graph_walker, "update_shallow"):
  623. graph_walker.update_shallow(new_shallow, unshallow)
  624. else:
  625. unshallow = getattr(graph_walker, "unshallow", set())
  626. if wants == []:
  627. # TODO(dborowitz): find a way to short-circuit that doesn't change
  628. # this interface.
  629. if getattr(graph_walker, "shallow", set()) or unshallow:
  630. # Do not send a pack in shallow short-circuit path
  631. return None
  632. # Return an actual MissingObjectFinder with empty wants
  633. return MissingObjectFinder(
  634. self.object_store,
  635. haves=[],
  636. wants=[],
  637. )
  638. # If the graph walker is set up with an implementation that can
  639. # ACK/NAK to the wire, it will write data to the client through
  640. # this call as a side-effect.
  641. haves = self.object_store.find_common_revisions(graph_walker)
  642. # Deal with shallow requests separately because the haves do
  643. # not reflect what objects are missing
  644. if getattr(graph_walker, "shallow", set()) or unshallow:
  645. # TODO: filter the haves commits from iter_shas. the specific
  646. # commits aren't missing.
  647. haves = []
  648. parents_provider = ParentsProvider(self.object_store, shallows=current_shallow)
  649. def get_parents(commit: Commit) -> list[ObjectID]:
  650. """Get parents for a commit using the parents provider.
  651. Args:
  652. commit: Commit object
  653. Returns:
  654. List of parent commit SHAs
  655. """
  656. return parents_provider.get_parents(commit.id, commit)
  657. return MissingObjectFinder(
  658. self.object_store,
  659. haves=haves,
  660. wants=wants,
  661. shallow=getattr(graph_walker, "shallow", set()),
  662. progress=progress,
  663. get_tagged=get_tagged,
  664. get_parents=get_parents,
  665. )
  666. def generate_pack_data(
  667. self,
  668. have: set[ObjectID],
  669. want: set[ObjectID],
  670. *,
  671. shallow: set[ObjectID] | None = None,
  672. progress: Callable[[str], None] | None = None,
  673. ofs_delta: bool | None = None,
  674. ) -> tuple[int, Iterator["UnpackedObject"]]:
  675. """Generate pack data objects for a set of wants/haves.
  676. Args:
  677. have: List of SHA1s of objects that should not be sent
  678. want: List of SHA1s of objects that should be sent
  679. shallow: Set of shallow commit SHA1s to skip (defaults to repo's shallow commits)
  680. ofs_delta: Whether OFS deltas can be included
  681. progress: Optional progress reporting method
  682. """
  683. if shallow is None:
  684. shallow = self.get_shallow()
  685. return self.object_store.generate_pack_data(
  686. have,
  687. want,
  688. shallow=shallow,
  689. progress=progress,
  690. ofs_delta=ofs_delta if ofs_delta is not None else DEFAULT_OFS_DELTA,
  691. )
  692. def get_graph_walker(
  693. self, heads: list[ObjectID] | None = None
  694. ) -> ObjectStoreGraphWalker:
  695. """Retrieve a graph walker.
  696. A graph walker is used by a remote repository (or proxy)
  697. to find out which objects are present in this repository.
  698. Args:
  699. heads: Repository heads to use (optional)
  700. Returns: A graph walker object
  701. """
  702. if heads is None:
  703. heads = [
  704. sha
  705. for sha in self.refs.as_dict(Ref(b"refs/heads")).values()
  706. if sha in self.object_store
  707. ]
  708. parents_provider = ParentsProvider(self.object_store)
  709. return ObjectStoreGraphWalker(
  710. heads,
  711. parents_provider.get_parents,
  712. shallow=self.get_shallow(),
  713. update_shallow=self.update_shallow,
  714. )
  715. def get_refs(self) -> dict[Ref, ObjectID]:
  716. """Get dictionary with all refs.
  717. Returns: A ``dict`` mapping ref names to SHA1s
  718. """
  719. return self.refs.as_dict()
  720. def head(self) -> ObjectID:
  721. """Return the SHA1 pointed at by HEAD."""
  722. # TODO: move this method to WorkTree
  723. return self.refs[HEADREF]
  724. def _get_object(self, sha: bytes, cls: type[T]) -> T:
  725. assert len(sha) in (20, 40)
  726. obj_id = ObjectID(sha) if len(sha) == 40 else RawObjectID(sha)
  727. ret = self.get_object(obj_id)
  728. if not isinstance(ret, cls):
  729. if cls is Commit:
  730. raise NotCommitError(ret.id)
  731. elif cls is Blob:
  732. raise NotBlobError(ret.id)
  733. elif cls is Tree:
  734. raise NotTreeError(ret.id)
  735. elif cls is Tag:
  736. raise NotTagError(ret.id)
  737. else:
  738. raise Exception(f"Type invalid: {ret.type_name!r} != {cls.type_name!r}")
  739. return ret
  740. def get_object(self, sha: ObjectID | RawObjectID) -> ShaFile:
  741. """Retrieve the object with the specified SHA.
  742. Args:
  743. sha: SHA to retrieve
  744. Returns: A ShaFile object
  745. Raises:
  746. KeyError: when the object can not be found
  747. """
  748. return self.object_store[sha]
  749. def parents_provider(self) -> ParentsProvider:
  750. """Get a parents provider for this repository.
  751. Returns:
  752. ParentsProvider instance configured with grafts and shallows
  753. """
  754. return ParentsProvider(
  755. self.object_store,
  756. grafts=self._graftpoints,
  757. shallows=self.get_shallow(),
  758. )
  759. def get_parents(
  760. self, sha: ObjectID, commit: Commit | None = None
  761. ) -> list[ObjectID]:
  762. """Retrieve the parents of a specific commit.
  763. If the specific commit is a graftpoint, the graft parents
  764. will be returned instead.
  765. Args:
  766. sha: SHA of the commit for which to retrieve the parents
  767. commit: Optional commit matching the sha
  768. Returns: List of parents
  769. """
  770. return self.parents_provider().get_parents(sha, commit)
  771. def get_config(self) -> "ConfigFile":
  772. """Retrieve the config object.
  773. Returns: `ConfigFile` object for the ``.git/config`` file.
  774. """
  775. raise NotImplementedError(self.get_config)
  776. def get_worktree_config(self) -> "ConfigFile":
  777. """Retrieve the worktree config object."""
  778. raise NotImplementedError(self.get_worktree_config)
  779. def get_description(self) -> bytes | None:
  780. """Retrieve the description for this repository.
  781. Returns: Bytes with the description of the repository
  782. as set by the user.
  783. """
  784. raise NotImplementedError(self.get_description)
  785. def set_description(self, description: bytes) -> None:
  786. """Set the description for this repository.
  787. Args:
  788. description: Text to set as description for this repository.
  789. """
  790. raise NotImplementedError(self.set_description)
  791. def get_rebase_state_manager(self) -> "RebaseStateManager":
  792. """Get the appropriate rebase state manager for this repository.
  793. Returns: RebaseStateManager instance
  794. """
  795. raise NotImplementedError(self.get_rebase_state_manager)
  796. def get_blob_normalizer(self) -> "FilterBlobNormalizer":
  797. """Return a BlobNormalizer object for checkin/checkout operations.
  798. Returns: BlobNormalizer instance
  799. """
  800. raise NotImplementedError(self.get_blob_normalizer)
  801. def get_gitattributes(self, tree: bytes | None = None) -> "GitAttributes":
  802. """Read gitattributes for the repository.
  803. Args:
  804. tree: Tree SHA to read .gitattributes from (defaults to HEAD)
  805. Returns:
  806. GitAttributes object that can be used to match paths
  807. """
  808. raise NotImplementedError(self.get_gitattributes)
  809. def get_config_stack(self) -> "StackedConfig":
  810. """Return a config stack for this repository.
  811. This stack accesses the configuration for both this repository
  812. itself (.git/config) and the global configuration, which usually
  813. lives in ~/.gitconfig.
  814. Returns: `Config` instance for this repository
  815. """
  816. from .config import ConfigFile, StackedConfig
  817. local_config = self.get_config()
  818. backends: list[ConfigFile] = [local_config]
  819. if local_config.get_boolean((b"extensions",), b"worktreeconfig", False):
  820. backends.append(self.get_worktree_config())
  821. backends += StackedConfig.default_backends()
  822. return StackedConfig(backends, writable=local_config)
  823. def get_shallow(self) -> set[ObjectID]:
  824. """Get the set of shallow commits.
  825. Returns: Set of shallow commits.
  826. """
  827. f = self.get_named_file("shallow")
  828. if f is None:
  829. return set()
  830. with f:
  831. return {ObjectID(line.strip()) for line in f}
  832. def update_shallow(
  833. self, new_shallow: set[ObjectID] | None, new_unshallow: set[ObjectID] | None
  834. ) -> None:
  835. """Update the list of shallow objects.
  836. Args:
  837. new_shallow: Newly shallow objects
  838. new_unshallow: Newly no longer shallow objects
  839. """
  840. shallow = self.get_shallow()
  841. if new_shallow:
  842. shallow.update(new_shallow)
  843. if new_unshallow:
  844. shallow.difference_update(new_unshallow)
  845. if shallow:
  846. self._put_named_file("shallow", b"".join([sha + b"\n" for sha in shallow]))
  847. else:
  848. self._del_named_file("shallow")
  849. def get_peeled(self, ref: Ref) -> ObjectID:
  850. """Get the peeled value of a ref.
  851. Args:
  852. ref: The refname to peel.
  853. Returns: The fully-peeled SHA1 of a tag object, after peeling all
  854. intermediate tags; if the original ref does not point to a tag,
  855. this will equal the original SHA1.
  856. """
  857. cached = self.refs.get_peeled(ref)
  858. if cached is not None:
  859. return cached
  860. return peel_sha(self.object_store, self.refs[ref])[1].id
  861. @property
  862. def notes(self) -> "Notes":
  863. """Access notes functionality for this repository.
  864. Returns:
  865. Notes object for accessing notes
  866. """
  867. from .notes import Notes
  868. return Notes(self.object_store, self.refs)
  869. def get_walker(
  870. self,
  871. include: Sequence[ObjectID] | None = None,
  872. exclude: Sequence[ObjectID] | None = None,
  873. order: str = "date",
  874. reverse: bool = False,
  875. max_entries: int | None = None,
  876. paths: Sequence[bytes] | None = None,
  877. rename_detector: "RenameDetector | None" = None,
  878. follow: bool = False,
  879. since: int | None = None,
  880. until: int | None = None,
  881. queue_cls: type | None = None,
  882. ) -> "Walker":
  883. """Obtain a walker for this repository.
  884. Args:
  885. include: Iterable of SHAs of commits to include along with their
  886. ancestors. Defaults to [HEAD]
  887. exclude: Iterable of SHAs of commits to exclude along with their
  888. ancestors, overriding includes.
  889. order: ORDER_* constant specifying the order of results.
  890. Anything other than ORDER_DATE may result in O(n) memory usage.
  891. reverse: If True, reverse the order of output, requiring O(n)
  892. memory.
  893. max_entries: The maximum number of entries to yield, or None for
  894. no limit.
  895. paths: Iterable of file or subtree paths to show entries for.
  896. rename_detector: diff.RenameDetector object for detecting
  897. renames.
  898. follow: If True, follow path across renames/copies. Forces a
  899. default rename_detector.
  900. since: Timestamp to list commits after.
  901. until: Timestamp to list commits before.
  902. queue_cls: A class to use for a queue of commits, supporting the
  903. iterator protocol. The constructor takes a single argument, the Walker.
  904. Returns: A `Walker` object
  905. """
  906. from .walk import Walker, _CommitTimeQueue
  907. if include is None:
  908. include = [self.head()]
  909. # Pass all arguments to Walker explicitly to avoid type issues with **kwargs
  910. return Walker(
  911. self.object_store,
  912. include,
  913. exclude=exclude,
  914. order=order,
  915. reverse=reverse,
  916. max_entries=max_entries,
  917. paths=paths,
  918. rename_detector=rename_detector,
  919. follow=follow,
  920. since=since,
  921. until=until,
  922. get_parents=lambda commit: self.get_parents(commit.id, commit),
  923. queue_cls=queue_cls if queue_cls is not None else _CommitTimeQueue,
  924. )
  925. def __getitem__(self, name: ObjectID | Ref | bytes) -> "ShaFile":
  926. """Retrieve a Git object by SHA1 or ref.
  927. Args:
  928. name: A Git object SHA1 or a ref name
  929. Returns: A `ShaFile` object, such as a Commit or Blob
  930. Raises:
  931. KeyError: when the specified ref or object does not exist
  932. """
  933. if not isinstance(name, bytes):
  934. raise TypeError(f"'name' must be bytestring, not {type(name).__name__:.80}")
  935. if len(name) in (20, 40):
  936. try:
  937. # Try as ObjectID/RawObjectID
  938. return self.object_store[
  939. ObjectID(name) if len(name) == 40 else RawObjectID(name)
  940. ]
  941. except (KeyError, ValueError):
  942. pass
  943. try:
  944. return self.object_store[self.refs[Ref(name)]]
  945. except RefFormatError as exc:
  946. raise KeyError(name) from exc
  947. def __contains__(self, name: bytes) -> bool:
  948. """Check if a specific Git object or ref is present.
  949. Args:
  950. name: Git object SHA1 or ref name
  951. """
  952. if len(name) == 20:
  953. return RawObjectID(name) in self.object_store or Ref(name) in self.refs
  954. elif len(name) == 40 and valid_hexsha(name):
  955. return ObjectID(name) in self.object_store or Ref(name) in self.refs
  956. else:
  957. return Ref(name) in self.refs
  958. def __setitem__(self, name: bytes, value: ShaFile | bytes) -> None:
  959. """Set a ref.
  960. Args:
  961. name: ref name
  962. value: Ref value - either a ShaFile object, or a hex sha
  963. """
  964. if name.startswith(b"refs/") or name == HEADREF:
  965. ref_name = Ref(name)
  966. if isinstance(value, ShaFile):
  967. self.refs[ref_name] = value.id
  968. elif isinstance(value, bytes):
  969. self.refs[ref_name] = ObjectID(value)
  970. else:
  971. raise TypeError(value)
  972. else:
  973. raise ValueError(name)
  974. def __delitem__(self, name: bytes) -> None:
  975. """Remove a ref.
  976. Args:
  977. name: Name of the ref to remove
  978. """
  979. if name.startswith(b"refs/") or name == HEADREF:
  980. del self.refs[Ref(name)]
  981. else:
  982. raise ValueError(name)
  983. def _get_user_identity(
  984. self, config: "StackedConfig", kind: str | None = None
  985. ) -> bytes:
  986. """Determine the identity to use for new commits."""
  987. warnings.warn(
  988. "use get_user_identity() rather than Repo._get_user_identity",
  989. DeprecationWarning,
  990. )
  991. return get_user_identity(config)
  992. def _add_graftpoints(
  993. self, updated_graftpoints: dict[ObjectID, list[ObjectID]]
  994. ) -> None:
  995. """Add or modify graftpoints.
  996. Args:
  997. updated_graftpoints: Dict of commit shas to list of parent shas
  998. """
  999. # Simple validation
  1000. for commit, parents in updated_graftpoints.items():
  1001. for sha in [commit, *parents]:
  1002. check_hexsha(sha, "Invalid graftpoint")
  1003. self._graftpoints.update(updated_graftpoints)
  1004. def _remove_graftpoints(self, to_remove: Sequence[ObjectID] = ()) -> None:
  1005. """Remove graftpoints.
  1006. Args:
  1007. to_remove: List of commit shas
  1008. """
  1009. for sha in to_remove:
  1010. del self._graftpoints[sha]
  1011. def _read_heads(self, name: str) -> list[ObjectID]:
  1012. f = self.get_named_file(name)
  1013. if f is None:
  1014. return []
  1015. with f:
  1016. return [ObjectID(line.strip()) for line in f.readlines() if line.strip()]
  1017. def get_worktree(self) -> "WorkTree":
  1018. """Get the working tree for this repository.
  1019. Returns:
  1020. WorkTree instance for performing working tree operations
  1021. Raises:
  1022. NotImplementedError: If the repository doesn't support working trees
  1023. """
  1024. raise NotImplementedError(
  1025. "Working tree operations not supported by this repository type"
  1026. )
  1027. @replace_me(remove_in="0.26.0")
  1028. def do_commit(
  1029. self,
  1030. message: bytes | None = None,
  1031. committer: bytes | None = None,
  1032. author: bytes | None = None,
  1033. commit_timestamp: float | None = None,
  1034. commit_timezone: int | None = None,
  1035. author_timestamp: float | None = None,
  1036. author_timezone: int | None = None,
  1037. tree: ObjectID | None = None,
  1038. encoding: bytes | None = None,
  1039. ref: Ref | None = HEADREF,
  1040. merge_heads: list[ObjectID] | None = None,
  1041. no_verify: bool = False,
  1042. sign: bool = False,
  1043. ) -> bytes:
  1044. """Create a new commit.
  1045. If not specified, committer and author default to
  1046. get_user_identity(..., 'COMMITTER')
  1047. and get_user_identity(..., 'AUTHOR') respectively.
  1048. Args:
  1049. message: Commit message (bytes or callable that takes (repo, commit)
  1050. and returns bytes)
  1051. committer: Committer fullname
  1052. author: Author fullname
  1053. commit_timestamp: Commit timestamp (defaults to now)
  1054. commit_timezone: Commit timestamp timezone (defaults to GMT)
  1055. author_timestamp: Author timestamp (defaults to commit
  1056. timestamp)
  1057. author_timezone: Author timestamp timezone
  1058. (defaults to commit timestamp timezone)
  1059. tree: SHA1 of the tree root to use (if not specified the
  1060. current index will be committed).
  1061. encoding: Encoding
  1062. ref: Optional ref to commit to (defaults to current branch).
  1063. If None, creates a dangling commit without updating any ref.
  1064. merge_heads: Merge heads (defaults to .git/MERGE_HEAD)
  1065. no_verify: Skip pre-commit and commit-msg hooks
  1066. sign: GPG Sign the commit (bool, defaults to False,
  1067. pass True to use default GPG key,
  1068. pass a str containing Key ID to use a specific GPG key)
  1069. Returns:
  1070. New commit SHA1
  1071. """
  1072. return self.get_worktree().commit(
  1073. message=message,
  1074. committer=committer,
  1075. author=author,
  1076. commit_timestamp=commit_timestamp,
  1077. commit_timezone=commit_timezone,
  1078. author_timestamp=author_timestamp,
  1079. author_timezone=author_timezone,
  1080. tree=tree,
  1081. encoding=encoding,
  1082. ref=ref,
  1083. merge_heads=merge_heads,
  1084. no_verify=no_verify,
  1085. sign=sign,
  1086. )
  1087. def read_gitfile(f: BinaryIO) -> str:
  1088. """Read a ``.git`` file.
  1089. The first line of the file should start with "gitdir: "
  1090. Args:
  1091. f: File-like object to read from
  1092. Returns: A path
  1093. """
  1094. cs = f.read()
  1095. if not cs.startswith(b"gitdir: "):
  1096. raise ValueError("Expected file to start with 'gitdir: '")
  1097. return cs[len(b"gitdir: ") :].rstrip(b"\r\n").decode("utf-8")
  1098. class UnsupportedVersion(Exception):
  1099. """Unsupported repository version."""
  1100. def __init__(self, version: int) -> None:
  1101. """Initialize UnsupportedVersion exception.
  1102. Args:
  1103. version: The unsupported repository version
  1104. """
  1105. self.version = version
  1106. class UnsupportedExtension(Exception):
  1107. """Unsupported repository extension."""
  1108. def __init__(self, extension: str) -> None:
  1109. """Initialize UnsupportedExtension exception.
  1110. Args:
  1111. extension: The unsupported repository extension
  1112. """
  1113. self.extension = extension
  1114. class Repo(BaseRepo):
  1115. """A git repository backed by local disk.
  1116. To open an existing repository, call the constructor with
  1117. the path of the repository.
  1118. To create a new repository, use the Repo.init class method.
  1119. Note that a repository object may hold on to resources such
  1120. as file handles for performance reasons; call .close() to free
  1121. up those resources.
  1122. Attributes:
  1123. path: Path to the working copy (if it exists) or repository control
  1124. directory (if the repository is bare)
  1125. bare: Whether this is a bare repository
  1126. """
  1127. path: str
  1128. bare: bool
  1129. object_store: DiskObjectStore
  1130. filter_context: "FilterContext | None"
  1131. def __init__(
  1132. self,
  1133. root: str | bytes | os.PathLike[str],
  1134. object_store: PackBasedObjectStore | None = None,
  1135. bare: bool | None = None,
  1136. ) -> None:
  1137. """Open a repository on disk.
  1138. Args:
  1139. root: Path to the repository's root.
  1140. object_store: ObjectStore to use; if omitted, we use the
  1141. repository's default object store
  1142. bare: True if this is a bare repository.
  1143. """
  1144. root = os.fspath(root)
  1145. if isinstance(root, bytes):
  1146. root = os.fsdecode(root)
  1147. hidden_path = os.path.join(root, CONTROLDIR)
  1148. if bare is None:
  1149. if os.path.isfile(hidden_path) or os.path.isdir(
  1150. os.path.join(hidden_path, OBJECTDIR)
  1151. ):
  1152. bare = False
  1153. elif os.path.isdir(os.path.join(root, OBJECTDIR)) and os.path.isdir(
  1154. os.path.join(root, REFSDIR)
  1155. ):
  1156. bare = True
  1157. else:
  1158. raise NotGitRepository(
  1159. "No git repository was found at {path}".format(**dict(path=root))
  1160. )
  1161. self.bare = bare
  1162. if bare is False:
  1163. if os.path.isfile(hidden_path):
  1164. with open(hidden_path, "rb") as f:
  1165. path = read_gitfile(f)
  1166. self._controldir = os.path.join(root, path)
  1167. else:
  1168. self._controldir = hidden_path
  1169. else:
  1170. self._controldir = root
  1171. commondir = self.get_named_file(COMMONDIR)
  1172. if commondir is not None:
  1173. with commondir:
  1174. self._commondir = os.path.join(
  1175. self.controldir(),
  1176. os.fsdecode(commondir.read().rstrip(b"\r\n")),
  1177. )
  1178. else:
  1179. self._commondir = self._controldir
  1180. self.path = root
  1181. # Initialize refs early so they're available for config condition matchers
  1182. self.refs = DiskRefsContainer(
  1183. self.commondir(), self._controldir, logger=self._write_reflog
  1184. )
  1185. # Initialize worktrees container
  1186. from .worktree import WorkTreeContainer
  1187. self.worktrees = WorkTreeContainer(self)
  1188. config = self.get_config()
  1189. try:
  1190. repository_format_version = config.get("core", "repositoryformatversion")
  1191. format_version = (
  1192. 0
  1193. if repository_format_version is None
  1194. else int(repository_format_version)
  1195. )
  1196. except KeyError:
  1197. format_version = 0
  1198. if format_version not in (0, 1):
  1199. raise UnsupportedVersion(format_version)
  1200. # Track extensions we encounter
  1201. has_reftable_extension = False
  1202. for extension, value in config.items((b"extensions",)):
  1203. if extension.lower() == b"refstorage":
  1204. if value == b"reftable":
  1205. has_reftable_extension = True
  1206. else:
  1207. raise UnsupportedExtension(f"refStorage = {value.decode()}")
  1208. elif extension.lower() not in (b"worktreeconfig",):
  1209. raise UnsupportedExtension(extension.decode("utf-8"))
  1210. if object_store is None:
  1211. # Get shared repository permissions from config
  1212. try:
  1213. shared_value = config.get(("core",), "sharedRepository")
  1214. file_mode, dir_mode = parse_shared_repository(shared_value)
  1215. except KeyError:
  1216. file_mode, dir_mode = None, None
  1217. object_store = DiskObjectStore.from_config(
  1218. os.path.join(self.commondir(), OBJECTDIR),
  1219. config,
  1220. file_mode=file_mode,
  1221. dir_mode=dir_mode,
  1222. )
  1223. # Use reftable if extension is configured
  1224. if has_reftable_extension:
  1225. from .reftable import ReftableRefsContainer
  1226. self.refs = ReftableRefsContainer(self.commondir())
  1227. # Update worktrees container after refs change
  1228. self.worktrees = WorkTreeContainer(self)
  1229. BaseRepo.__init__(self, object_store, self.refs)
  1230. self._graftpoints = {}
  1231. graft_file = self.get_named_file(
  1232. os.path.join("info", "grafts"), basedir=self.commondir()
  1233. )
  1234. if graft_file:
  1235. with graft_file:
  1236. self._graftpoints.update(parse_graftpoints(graft_file))
  1237. graft_file = self.get_named_file("shallow", basedir=self.commondir())
  1238. if graft_file:
  1239. with graft_file:
  1240. self._graftpoints.update(parse_graftpoints(graft_file))
  1241. self.hooks["pre-commit"] = PreCommitShellHook(self.path, self.controldir())
  1242. self.hooks["commit-msg"] = CommitMsgShellHook(self.controldir())
  1243. self.hooks["post-commit"] = PostCommitShellHook(self.controldir())
  1244. self.hooks["post-receive"] = PostReceiveShellHook(self.controldir())
  1245. # Initialize filter context as None, will be created lazily
  1246. self.filter_context = None
  1247. def get_worktree(self) -> "WorkTree":
  1248. """Get the working tree for this repository.
  1249. Returns:
  1250. WorkTree instance for performing working tree operations
  1251. """
  1252. from .worktree import WorkTree
  1253. return WorkTree(self, self.path)
  1254. def _write_reflog(
  1255. self,
  1256. ref: bytes,
  1257. old_sha: bytes,
  1258. new_sha: bytes,
  1259. committer: bytes | None,
  1260. timestamp: int | None,
  1261. timezone: int | None,
  1262. message: bytes,
  1263. ) -> None:
  1264. from .reflog import format_reflog_line
  1265. path = self._reflog_path(ref)
  1266. # Get shared repository permissions
  1267. file_mode, dir_mode = self._get_shared_repository_permissions()
  1268. # Create directory with appropriate permissions
  1269. parent_dir = os.path.dirname(path)
  1270. # Create directory tree, setting permissions on each level if needed
  1271. parts = []
  1272. current = parent_dir
  1273. while current and not os.path.exists(current):
  1274. parts.append(current)
  1275. current = os.path.dirname(current)
  1276. parts.reverse()
  1277. for part in parts:
  1278. os.mkdir(part)
  1279. if dir_mode is not None:
  1280. os.chmod(part, dir_mode)
  1281. if committer is None:
  1282. config = self.get_config_stack()
  1283. committer = get_user_identity(config)
  1284. check_user_identity(committer)
  1285. if timestamp is None:
  1286. timestamp = int(time.time())
  1287. if timezone is None:
  1288. timezone = 0 # FIXME
  1289. with open(path, "ab") as f:
  1290. f.write(
  1291. format_reflog_line(
  1292. old_sha, new_sha, committer, timestamp, timezone, message
  1293. )
  1294. + b"\n"
  1295. )
  1296. # Set file permissions (open() respects umask, so we need chmod to set the actual mode)
  1297. # Always chmod to ensure correct permissions even if file already existed
  1298. if file_mode is not None:
  1299. os.chmod(path, file_mode)
  1300. def _reflog_path(self, ref: bytes) -> str:
  1301. if ref.startswith((b"main-worktree/", b"worktrees/")):
  1302. raise NotImplementedError(f"refs {ref.decode()} are not supported")
  1303. base = self.controldir() if is_per_worktree_ref(ref) else self.commondir()
  1304. return os.path.join(base, "logs", os.fsdecode(ref))
  1305. def read_reflog(self, ref: bytes) -> Generator[reflog.Entry, None, None]:
  1306. """Read reflog entries for a reference.
  1307. Args:
  1308. ref: Reference name (e.g. b'HEAD', b'refs/heads/master')
  1309. Yields:
  1310. reflog.Entry objects in chronological order (oldest first)
  1311. """
  1312. from .reflog import read_reflog
  1313. path = self._reflog_path(ref)
  1314. try:
  1315. with open(path, "rb") as f:
  1316. yield from read_reflog(f)
  1317. except FileNotFoundError:
  1318. return
  1319. @classmethod
  1320. def discover(cls, start: str | bytes | os.PathLike[str] = ".") -> "Repo":
  1321. """Iterate parent directories to discover a repository.
  1322. Return a Repo object for the first parent directory that looks like a
  1323. Git repository.
  1324. Args:
  1325. start: The directory to start discovery from (defaults to '.')
  1326. """
  1327. path = os.path.abspath(start)
  1328. while True:
  1329. try:
  1330. return cls(path)
  1331. except NotGitRepository:
  1332. new_path, _tail = os.path.split(path)
  1333. if new_path == path: # Root reached
  1334. break
  1335. path = new_path
  1336. start_str = os.fspath(start)
  1337. if isinstance(start_str, bytes):
  1338. start_str = start_str.decode("utf-8")
  1339. raise NotGitRepository(f"No git repository was found at {start_str}")
  1340. def controldir(self) -> str:
  1341. """Return the path of the control directory."""
  1342. return self._controldir
  1343. def commondir(self) -> str:
  1344. """Return the path of the common directory.
  1345. For a main working tree, it is identical to controldir().
  1346. For a linked working tree, it is the control directory of the
  1347. main working tree.
  1348. """
  1349. return self._commondir
  1350. def _determine_file_mode(self) -> bool:
  1351. """Probe the file-system to determine whether permissions can be trusted.
  1352. Returns: True if permissions can be trusted, False otherwise.
  1353. """
  1354. fname = os.path.join(self.path, ".probe-permissions")
  1355. with open(fname, "w") as f:
  1356. f.write("")
  1357. st1 = os.lstat(fname)
  1358. try:
  1359. os.chmod(fname, st1.st_mode ^ stat.S_IXUSR)
  1360. except PermissionError:
  1361. return False
  1362. st2 = os.lstat(fname)
  1363. os.unlink(fname)
  1364. mode_differs = st1.st_mode != st2.st_mode
  1365. st2_has_exec = (st2.st_mode & stat.S_IXUSR) != 0
  1366. return mode_differs and st2_has_exec
  1367. def _determine_symlinks(self) -> bool:
  1368. """Probe the filesystem to determine whether symlinks can be created.
  1369. Returns: True if symlinks can be created, False otherwise.
  1370. """
  1371. # TODO(jelmer): Actually probe disk / look at filesystem
  1372. return sys.platform != "win32"
  1373. def _get_shared_repository_permissions(
  1374. self,
  1375. ) -> tuple[int | None, int | None]:
  1376. """Get shared repository file and directory permissions from config.
  1377. Returns:
  1378. tuple of (file_mask, directory_mask) or (None, None) if not shared
  1379. """
  1380. try:
  1381. config = self.get_config()
  1382. value = config.get(("core",), "sharedRepository")
  1383. return parse_shared_repository(value)
  1384. except KeyError:
  1385. return (None, None)
  1386. def _put_named_file(self, path: str, contents: bytes) -> None:
  1387. """Write a file to the control dir with the given name and contents.
  1388. Args:
  1389. path: The path to the file, relative to the control dir.
  1390. contents: A string to write to the file.
  1391. """
  1392. path = path.lstrip(os.path.sep)
  1393. # Get shared repository permissions
  1394. file_mode, _ = self._get_shared_repository_permissions()
  1395. # Create file with appropriate permissions
  1396. if file_mode is not None:
  1397. with GitFile(
  1398. os.path.join(self.controldir(), path), "wb", mask=file_mode
  1399. ) as f:
  1400. f.write(contents)
  1401. else:
  1402. with GitFile(os.path.join(self.controldir(), path), "wb") as f:
  1403. f.write(contents)
  1404. def _del_named_file(self, path: str) -> None:
  1405. try:
  1406. os.unlink(os.path.join(self.controldir(), path))
  1407. except FileNotFoundError:
  1408. return
  1409. def get_named_file(
  1410. self,
  1411. path: str | bytes,
  1412. basedir: str | None = None,
  1413. ) -> BinaryIO | None:
  1414. """Get a file from the control dir with a specific name.
  1415. Although the filename should be interpreted as a filename relative to
  1416. the control dir in a disk-based Repo, the object returned need not be
  1417. pointing to a file in that location.
  1418. Args:
  1419. path: The path to the file, relative to the control dir.
  1420. basedir: Optional argument that specifies an alternative to the
  1421. control dir.
  1422. Returns: An open file object, or None if the file does not exist.
  1423. """
  1424. # TODO(dborowitz): sanitize filenames, since this is used directly by
  1425. # the dumb web serving code.
  1426. if basedir is None:
  1427. basedir = self.controldir()
  1428. if isinstance(path, bytes):
  1429. path = path.decode("utf-8")
  1430. path = path.lstrip(os.path.sep)
  1431. try:
  1432. return open(os.path.join(basedir, path), "rb")
  1433. except FileNotFoundError:
  1434. return None
  1435. def index_path(self) -> str:
  1436. """Return path to the index file."""
  1437. return os.path.join(self.controldir(), INDEX_FILENAME)
  1438. def open_index(self) -> "Index":
  1439. """Open the index for this repository.
  1440. Raises:
  1441. NoIndexPresent: If no index is present
  1442. Returns: The matching `Index`
  1443. """
  1444. from .index import Index
  1445. if not self.has_index():
  1446. raise NoIndexPresent
  1447. # Check for manyFiles feature configuration
  1448. config = self.get_config_stack()
  1449. many_files = config.get_boolean(b"feature", b"manyFiles", False)
  1450. skip_hash = False
  1451. index_version = None
  1452. if many_files:
  1453. # When feature.manyFiles is enabled, set index.version=4 and index.skipHash=true
  1454. try:
  1455. index_version_str = config.get(b"index", b"version")
  1456. index_version = int(index_version_str)
  1457. except KeyError:
  1458. index_version = 4 # Default to version 4 for manyFiles
  1459. skip_hash = config.get_boolean(b"index", b"skipHash", True)
  1460. else:
  1461. # Check for explicit index settings
  1462. try:
  1463. index_version_str = config.get(b"index", b"version")
  1464. index_version = int(index_version_str)
  1465. except KeyError:
  1466. index_version = None
  1467. skip_hash = config.get_boolean(b"index", b"skipHash", False)
  1468. # Get shared repository permissions for index file
  1469. file_mode, _ = self._get_shared_repository_permissions()
  1470. return Index(
  1471. self.index_path(),
  1472. skip_hash=skip_hash,
  1473. version=index_version,
  1474. file_mode=file_mode,
  1475. )
  1476. def has_index(self) -> bool:
  1477. """Check if an index is present."""
  1478. # Bare repos must never have index files; non-bare repos may have a
  1479. # missing index file, which is treated as empty.
  1480. return not self.bare
  1481. @replace_me(remove_in="0.26.0")
  1482. def stage(
  1483. self,
  1484. fs_paths: str
  1485. | bytes
  1486. | os.PathLike[str]
  1487. | Iterable[str | bytes | os.PathLike[str]],
  1488. ) -> None:
  1489. """Stage a set of paths.
  1490. Args:
  1491. fs_paths: List of paths, relative to the repository path
  1492. """
  1493. return self.get_worktree().stage(fs_paths)
  1494. @replace_me(remove_in="0.26.0")
  1495. def unstage(self, fs_paths: Sequence[str]) -> None:
  1496. """Unstage specific file in the index.
  1497. Args:
  1498. fs_paths: a list of files to unstage,
  1499. relative to the repository path.
  1500. """
  1501. return self.get_worktree().unstage(fs_paths)
  1502. def clone(
  1503. self,
  1504. target_path: str | bytes | os.PathLike[str],
  1505. *,
  1506. mkdir: bool = True,
  1507. bare: bool = False,
  1508. origin: bytes = b"origin",
  1509. checkout: bool | None = None,
  1510. branch: bytes | None = None,
  1511. progress: Callable[[str], None] | None = None,
  1512. depth: int | None = None,
  1513. symlinks: bool | None = None,
  1514. ) -> "Repo":
  1515. """Clone this repository.
  1516. Args:
  1517. target_path: Target path
  1518. mkdir: Create the target directory
  1519. bare: Whether to create a bare repository
  1520. checkout: Whether or not to check-out HEAD after cloning
  1521. origin: Base name for refs in target repository
  1522. cloned from this repository
  1523. branch: Optional branch or tag to be used as HEAD in the new repository
  1524. instead of this repository's HEAD.
  1525. progress: Optional progress function
  1526. depth: Depth at which to fetch
  1527. symlinks: Symlinks setting (default to autodetect)
  1528. Returns: Created repository as `Repo`
  1529. """
  1530. encoded_path = os.fsencode(self.path)
  1531. if mkdir:
  1532. os.mkdir(target_path)
  1533. try:
  1534. if not bare:
  1535. target = Repo.init(target_path, symlinks=symlinks)
  1536. if checkout is None:
  1537. checkout = True
  1538. else:
  1539. if checkout:
  1540. raise ValueError("checkout and bare are incompatible")
  1541. target = Repo.init_bare(target_path)
  1542. try:
  1543. target_config = target.get_config()
  1544. target_config.set((b"remote", origin), b"url", encoded_path)
  1545. target_config.set(
  1546. (b"remote", origin),
  1547. b"fetch",
  1548. b"+refs/heads/*:refs/remotes/" + origin + b"/*",
  1549. )
  1550. target_config.write_to_path()
  1551. ref_message = b"clone: from " + encoded_path
  1552. self.fetch(target, depth=depth)
  1553. target.refs.import_refs(
  1554. Ref(b"refs/remotes/" + origin),
  1555. self.refs.as_dict(Ref(b"refs/heads")),
  1556. message=ref_message,
  1557. )
  1558. target.refs.import_refs(
  1559. Ref(b"refs/tags"),
  1560. self.refs.as_dict(Ref(b"refs/tags")),
  1561. message=ref_message,
  1562. )
  1563. head_chain, origin_sha = self.refs.follow(HEADREF)
  1564. origin_head = head_chain[-1] if head_chain else None
  1565. if origin_sha and not origin_head:
  1566. # set detached HEAD
  1567. target.refs[HEADREF] = origin_sha
  1568. else:
  1569. _set_origin_head(target.refs, origin, origin_head)
  1570. head_ref = _set_default_branch(
  1571. target.refs, origin, origin_head, branch, ref_message
  1572. )
  1573. # Update target head
  1574. if head_ref:
  1575. head = _set_head(target.refs, head_ref, ref_message)
  1576. else:
  1577. head = None
  1578. if checkout and head is not None:
  1579. target.get_worktree().reset_index()
  1580. except BaseException:
  1581. target.close()
  1582. raise
  1583. except BaseException:
  1584. if mkdir:
  1585. import shutil
  1586. shutil.rmtree(target_path)
  1587. raise
  1588. return target
  1589. @replace_me(remove_in="0.26.0")
  1590. def reset_index(self, tree: ObjectID | None = None) -> None:
  1591. """Reset the index back to a specific tree.
  1592. Args:
  1593. tree: Tree SHA to reset to, None for current HEAD tree.
  1594. """
  1595. return self.get_worktree().reset_index(tree)
  1596. def _get_config_condition_matchers(self) -> dict[str, "ConditionMatcher"]:
  1597. """Get condition matchers for includeIf conditions.
  1598. Returns a dict of condition prefix to matcher function.
  1599. """
  1600. from pathlib import Path
  1601. from .config import ConditionMatcher, match_glob_pattern
  1602. # Add gitdir matchers
  1603. def match_gitdir(pattern: str, case_sensitive: bool = True) -> bool:
  1604. """Match gitdir against a pattern.
  1605. Args:
  1606. pattern: Pattern to match against
  1607. case_sensitive: Whether to match case-sensitively
  1608. Returns:
  1609. True if gitdir matches pattern
  1610. """
  1611. # Handle relative patterns (starting with ./)
  1612. if pattern.startswith("./"):
  1613. # Can't handle relative patterns without config directory context
  1614. return False
  1615. # Normalize repository path
  1616. try:
  1617. repo_path = str(Path(self._controldir).resolve())
  1618. except (OSError, ValueError):
  1619. return False
  1620. # Expand ~ in pattern and normalize
  1621. pattern = os.path.expanduser(pattern)
  1622. # Normalize pattern following Git's rules
  1623. pattern = pattern.replace("\\", "/")
  1624. if not pattern.startswith(("~/", "./", "/", "**")):
  1625. # Check for Windows absolute path
  1626. if len(pattern) >= 2 and pattern[1] == ":":
  1627. pass
  1628. else:
  1629. pattern = "**/" + pattern
  1630. if pattern.endswith("/"):
  1631. pattern = pattern + "**"
  1632. # Use the existing _match_gitdir_pattern function
  1633. from .config import _match_gitdir_pattern
  1634. pattern_bytes = pattern.encode("utf-8", errors="replace")
  1635. repo_path_bytes = repo_path.encode("utf-8", errors="replace")
  1636. return _match_gitdir_pattern(
  1637. repo_path_bytes, pattern_bytes, ignorecase=not case_sensitive
  1638. )
  1639. # Add onbranch matcher
  1640. def match_onbranch(pattern: str) -> bool:
  1641. """Match current branch against a pattern.
  1642. Args:
  1643. pattern: Pattern to match against
  1644. Returns:
  1645. True if current branch matches pattern
  1646. """
  1647. try:
  1648. # Get the current branch using refs
  1649. ref_chain, _ = self.refs.follow(HEADREF)
  1650. head_ref = ref_chain[-1] # Get the final resolved ref
  1651. except KeyError:
  1652. pass
  1653. else:
  1654. if head_ref and head_ref.startswith(b"refs/heads/"):
  1655. # Extract branch name from ref
  1656. branch = extract_branch_name(head_ref).decode(
  1657. "utf-8", errors="replace"
  1658. )
  1659. return match_glob_pattern(branch, pattern)
  1660. return False
  1661. matchers: dict[str, ConditionMatcher] = {
  1662. "onbranch:": match_onbranch,
  1663. "gitdir:": lambda pattern: match_gitdir(pattern, True),
  1664. "gitdir/i:": lambda pattern: match_gitdir(pattern, False),
  1665. }
  1666. return matchers
  1667. def get_worktree_config(self) -> "ConfigFile":
  1668. """Get the worktree-specific config.
  1669. Returns:
  1670. ConfigFile object for the worktree config
  1671. """
  1672. from .config import ConfigFile
  1673. path = os.path.join(self.commondir(), "config.worktree")
  1674. try:
  1675. # Pass condition matchers for includeIf evaluation
  1676. condition_matchers = self._get_config_condition_matchers()
  1677. return ConfigFile.from_path(path, condition_matchers=condition_matchers)
  1678. except FileNotFoundError:
  1679. cf = ConfigFile()
  1680. cf.path = path
  1681. return cf
  1682. def get_config(self) -> "ConfigFile":
  1683. """Retrieve the config object.
  1684. Returns: `ConfigFile` object for the ``.git/config`` file.
  1685. """
  1686. from .config import ConfigFile
  1687. path = os.path.join(self._commondir, "config")
  1688. try:
  1689. # Pass condition matchers for includeIf evaluation
  1690. condition_matchers = self._get_config_condition_matchers()
  1691. return ConfigFile.from_path(path, condition_matchers=condition_matchers)
  1692. except FileNotFoundError:
  1693. ret = ConfigFile()
  1694. ret.path = path
  1695. return ret
  1696. def get_rebase_state_manager(self) -> "RebaseStateManager":
  1697. """Get the appropriate rebase state manager for this repository.
  1698. Returns: DiskRebaseStateManager instance
  1699. """
  1700. import os
  1701. from .rebase import DiskRebaseStateManager
  1702. path = os.path.join(self.controldir(), "rebase-merge")
  1703. return DiskRebaseStateManager(path)
  1704. def get_description(self) -> bytes | None:
  1705. """Retrieve the description of this repository.
  1706. Returns: Description as bytes or None.
  1707. """
  1708. path = os.path.join(self._controldir, "description")
  1709. try:
  1710. with GitFile(path, "rb") as f:
  1711. return f.read()
  1712. except FileNotFoundError:
  1713. return None
  1714. def __repr__(self) -> str:
  1715. """Return string representation of this repository."""
  1716. return f"<Repo at {self.path!r}>"
  1717. def set_description(self, description: bytes) -> None:
  1718. """Set the description for this repository.
  1719. Args:
  1720. description: Text to set as description for this repository.
  1721. """
  1722. self._put_named_file("description", description)
  1723. @classmethod
  1724. def _init_maybe_bare(
  1725. cls,
  1726. path: str | bytes | os.PathLike[str],
  1727. controldir: str | bytes | os.PathLike[str],
  1728. bare: bool,
  1729. object_store: PackBasedObjectStore | None = None,
  1730. config: "StackedConfig | None" = None,
  1731. default_branch: bytes | None = None,
  1732. symlinks: bool | None = None,
  1733. format: int | None = None,
  1734. shared_repository: str | bool | None = None,
  1735. ) -> "Repo":
  1736. path = os.fspath(path)
  1737. if isinstance(path, bytes):
  1738. path = os.fsdecode(path)
  1739. controldir = os.fspath(controldir)
  1740. if isinstance(controldir, bytes):
  1741. controldir = os.fsdecode(controldir)
  1742. # Determine shared repository permissions early
  1743. file_mode: int | None = None
  1744. dir_mode: int | None = None
  1745. if shared_repository is not None:
  1746. file_mode, dir_mode = parse_shared_repository(shared_repository)
  1747. # Create base directories with appropriate permissions
  1748. for d in BASE_DIRECTORIES:
  1749. dir_path = os.path.join(controldir, *d)
  1750. os.mkdir(dir_path)
  1751. if dir_mode is not None:
  1752. os.chmod(dir_path, dir_mode)
  1753. if object_store is None:
  1754. object_store = DiskObjectStore.init(
  1755. os.path.join(controldir, OBJECTDIR),
  1756. file_mode=file_mode,
  1757. dir_mode=dir_mode,
  1758. )
  1759. ret = cls(path, bare=bare, object_store=object_store)
  1760. if default_branch is None:
  1761. if config is None:
  1762. from .config import StackedConfig
  1763. config = StackedConfig.default()
  1764. try:
  1765. default_branch = config.get("init", "defaultBranch")
  1766. except KeyError:
  1767. default_branch = DEFAULT_BRANCH
  1768. ret.refs.set_symbolic_ref(HEADREF, local_branch_name(default_branch))
  1769. ret._init_files(
  1770. bare=bare,
  1771. symlinks=symlinks,
  1772. format=format,
  1773. shared_repository=shared_repository,
  1774. )
  1775. return ret
  1776. @classmethod
  1777. def init(
  1778. cls,
  1779. path: str | bytes | os.PathLike[str],
  1780. *,
  1781. mkdir: bool = False,
  1782. config: "StackedConfig | None" = None,
  1783. default_branch: bytes | None = None,
  1784. symlinks: bool | None = None,
  1785. format: int | None = None,
  1786. shared_repository: str | bool | None = None,
  1787. ) -> "Repo":
  1788. """Create a new repository.
  1789. Args:
  1790. path: Path in which to create the repository
  1791. mkdir: Whether to create the directory
  1792. config: Configuration object
  1793. default_branch: Default branch name
  1794. symlinks: Whether to support symlinks
  1795. format: Repository format version (defaults to 0)
  1796. shared_repository: Shared repository setting (group, all, umask, or octal)
  1797. Returns: `Repo` instance
  1798. """
  1799. path = os.fspath(path)
  1800. if isinstance(path, bytes):
  1801. path = os.fsdecode(path)
  1802. if mkdir:
  1803. os.mkdir(path)
  1804. controldir = os.path.join(path, CONTROLDIR)
  1805. os.mkdir(controldir)
  1806. _set_filesystem_hidden(controldir)
  1807. return cls._init_maybe_bare(
  1808. path,
  1809. controldir,
  1810. False,
  1811. config=config,
  1812. default_branch=default_branch,
  1813. symlinks=symlinks,
  1814. format=format,
  1815. shared_repository=shared_repository,
  1816. )
  1817. @classmethod
  1818. def _init_new_working_directory(
  1819. cls,
  1820. path: str | bytes | os.PathLike[str],
  1821. main_repo: "Repo",
  1822. identifier: str | None = None,
  1823. mkdir: bool = False,
  1824. ) -> "Repo":
  1825. """Create a new working directory linked to a repository.
  1826. Args:
  1827. path: Path in which to create the working tree.
  1828. main_repo: Main repository to reference
  1829. identifier: Worktree identifier
  1830. mkdir: Whether to create the directory
  1831. Returns: `Repo` instance
  1832. """
  1833. path = os.fspath(path)
  1834. if isinstance(path, bytes):
  1835. path = os.fsdecode(path)
  1836. if mkdir:
  1837. os.mkdir(path)
  1838. if identifier is None:
  1839. identifier = os.path.basename(path)
  1840. # Ensure we use absolute path for the worktree control directory
  1841. main_controldir = os.path.abspath(main_repo.controldir())
  1842. main_worktreesdir = os.path.join(main_controldir, WORKTREES)
  1843. worktree_controldir = os.path.join(main_worktreesdir, identifier)
  1844. gitdirfile = os.path.join(path, CONTROLDIR)
  1845. with open(gitdirfile, "wb") as f:
  1846. f.write(b"gitdir: " + os.fsencode(worktree_controldir) + b"\n")
  1847. # Get shared repository permissions from main repository
  1848. _, dir_mode = main_repo._get_shared_repository_permissions()
  1849. # Create directories with appropriate permissions
  1850. try:
  1851. os.mkdir(main_worktreesdir)
  1852. if dir_mode is not None:
  1853. os.chmod(main_worktreesdir, dir_mode)
  1854. except FileExistsError:
  1855. pass
  1856. try:
  1857. os.mkdir(worktree_controldir)
  1858. if dir_mode is not None:
  1859. os.chmod(worktree_controldir, dir_mode)
  1860. except FileExistsError:
  1861. pass
  1862. with open(os.path.join(worktree_controldir, GITDIR), "wb") as f:
  1863. f.write(os.fsencode(gitdirfile) + b"\n")
  1864. with open(os.path.join(worktree_controldir, COMMONDIR), "wb") as f:
  1865. f.write(b"../..\n")
  1866. with open(os.path.join(worktree_controldir, "HEAD"), "wb") as f:
  1867. f.write(main_repo.head() + b"\n")
  1868. r = cls(os.path.normpath(path))
  1869. r.get_worktree().reset_index()
  1870. return r
  1871. @classmethod
  1872. def init_bare(
  1873. cls,
  1874. path: str | bytes | os.PathLike[str],
  1875. *,
  1876. mkdir: bool = False,
  1877. object_store: PackBasedObjectStore | None = None,
  1878. config: "StackedConfig | None" = None,
  1879. default_branch: bytes | None = None,
  1880. format: int | None = None,
  1881. shared_repository: str | bool | None = None,
  1882. ) -> "Repo":
  1883. """Create a new bare repository.
  1884. ``path`` should already exist and be an empty directory.
  1885. Args:
  1886. path: Path to create bare repository in
  1887. mkdir: Whether to create the directory
  1888. object_store: Object store to use
  1889. config: Configuration object
  1890. default_branch: Default branch name
  1891. format: Repository format version (defaults to 0)
  1892. shared_repository: Shared repository setting (group, all, umask, or octal)
  1893. Returns: a `Repo` instance
  1894. """
  1895. path = os.fspath(path)
  1896. if isinstance(path, bytes):
  1897. path = os.fsdecode(path)
  1898. if mkdir:
  1899. os.mkdir(path)
  1900. return cls._init_maybe_bare(
  1901. path,
  1902. path,
  1903. True,
  1904. object_store=object_store,
  1905. config=config,
  1906. default_branch=default_branch,
  1907. format=format,
  1908. shared_repository=shared_repository,
  1909. )
  1910. create = init_bare
  1911. def close(self) -> None:
  1912. """Close any files opened by this repository."""
  1913. self.object_store.close()
  1914. # Clean up filter context if it was created
  1915. if self.filter_context is not None:
  1916. self.filter_context.close()
  1917. self.filter_context = None
  1918. def __enter__(self) -> "Repo":
  1919. """Enter context manager."""
  1920. return self
  1921. def __exit__(
  1922. self,
  1923. exc_type: type[BaseException] | None,
  1924. exc_val: BaseException | None,
  1925. exc_tb: TracebackType | None,
  1926. ) -> None:
  1927. """Exit context manager and close repository."""
  1928. self.close()
  1929. def _read_gitattributes(self) -> dict[bytes, dict[bytes, bytes]]:
  1930. """Read .gitattributes file from working tree.
  1931. Returns:
  1932. Dictionary mapping file patterns to attributes
  1933. """
  1934. gitattributes = {}
  1935. gitattributes_path = os.path.join(self.path, ".gitattributes")
  1936. if os.path.exists(gitattributes_path):
  1937. with open(gitattributes_path, "rb") as f:
  1938. for line in f:
  1939. line = line.strip()
  1940. if not line or line.startswith(b"#"):
  1941. continue
  1942. parts = line.split()
  1943. if len(parts) < 2:
  1944. continue
  1945. pattern = parts[0]
  1946. attrs = {}
  1947. for attr in parts[1:]:
  1948. if attr.startswith(b"-"):
  1949. # Unset attribute
  1950. attrs[attr[1:]] = b"false"
  1951. elif b"=" in attr:
  1952. # Set to value
  1953. key, value = attr.split(b"=", 1)
  1954. attrs[key] = value
  1955. else:
  1956. # Set attribute
  1957. attrs[attr] = b"true"
  1958. gitattributes[pattern] = attrs
  1959. return gitattributes
  1960. def get_blob_normalizer(self) -> "FilterBlobNormalizer":
  1961. """Return a BlobNormalizer object."""
  1962. from .filters import FilterBlobNormalizer, FilterContext, FilterRegistry
  1963. # Get fresh configuration and GitAttributes
  1964. config_stack = self.get_config_stack()
  1965. git_attributes = self.get_gitattributes()
  1966. # Lazily create FilterContext if needed
  1967. if self.filter_context is None:
  1968. filter_registry = FilterRegistry(config_stack, self)
  1969. self.filter_context = FilterContext(filter_registry)
  1970. else:
  1971. # Refresh the context with current config to handle config changes
  1972. self.filter_context.refresh_config(config_stack)
  1973. # Return a new FilterBlobNormalizer with the context
  1974. return FilterBlobNormalizer(
  1975. config_stack, git_attributes, filter_context=self.filter_context
  1976. )
  1977. def get_gitattributes(self, tree: bytes | None = None) -> "GitAttributes":
  1978. """Read gitattributes for the repository.
  1979. Args:
  1980. tree: Tree SHA to read .gitattributes from (defaults to HEAD)
  1981. Returns:
  1982. GitAttributes object that can be used to match paths
  1983. """
  1984. from .attrs import (
  1985. GitAttributes,
  1986. Pattern,
  1987. parse_git_attributes,
  1988. )
  1989. patterns = []
  1990. # Read system gitattributes (TODO: implement this)
  1991. # Read global gitattributes (TODO: implement this)
  1992. # Read repository .gitattributes from index/tree
  1993. if tree is None:
  1994. try:
  1995. # Try to get from HEAD
  1996. head = self[b"HEAD"]
  1997. if isinstance(head, Tag):
  1998. _cls, obj = head.object
  1999. head = self.get_object(obj)
  2000. assert isinstance(head, Commit)
  2001. tree = head.tree
  2002. except KeyError:
  2003. # No HEAD, no attributes from tree
  2004. pass
  2005. if tree is not None:
  2006. try:
  2007. tree_obj = self[tree]
  2008. assert isinstance(tree_obj, Tree)
  2009. if b".gitattributes" in tree_obj:
  2010. _, attrs_sha = tree_obj[b".gitattributes"]
  2011. attrs_blob = self[attrs_sha]
  2012. if isinstance(attrs_blob, Blob):
  2013. attrs_data = BytesIO(attrs_blob.data)
  2014. for pattern_bytes, attrs in parse_git_attributes(attrs_data):
  2015. pattern = Pattern(pattern_bytes)
  2016. patterns.append((pattern, attrs))
  2017. except (KeyError, NotTreeError):
  2018. pass
  2019. # Read .git/info/attributes
  2020. info_attrs_path = os.path.join(self.controldir(), "info", "attributes")
  2021. if os.path.exists(info_attrs_path):
  2022. with open(info_attrs_path, "rb") as f:
  2023. for pattern_bytes, attrs in parse_git_attributes(f):
  2024. pattern = Pattern(pattern_bytes)
  2025. patterns.append((pattern, attrs))
  2026. # Read .gitattributes from working directory (if it exists)
  2027. working_attrs_path = os.path.join(self.path, ".gitattributes")
  2028. if os.path.exists(working_attrs_path):
  2029. with open(working_attrs_path, "rb") as f:
  2030. for pattern_bytes, attrs in parse_git_attributes(f):
  2031. pattern = Pattern(pattern_bytes)
  2032. patterns.append((pattern, attrs))
  2033. return GitAttributes(patterns)
  2034. @replace_me(remove_in="0.26.0")
  2035. def _sparse_checkout_file_path(self) -> str:
  2036. """Return the path of the sparse-checkout file in this repo's control dir."""
  2037. return self.get_worktree()._sparse_checkout_file_path()
  2038. @replace_me(remove_in="0.26.0")
  2039. def configure_for_cone_mode(self) -> None:
  2040. """Ensure the repository is configured for cone-mode sparse-checkout."""
  2041. return self.get_worktree().configure_for_cone_mode()
  2042. @replace_me(remove_in="0.26.0")
  2043. def infer_cone_mode(self) -> bool:
  2044. """Return True if 'core.sparseCheckoutCone' is set to 'true' in config, else False."""
  2045. return self.get_worktree().infer_cone_mode()
  2046. @replace_me(remove_in="0.26.0")
  2047. def get_sparse_checkout_patterns(self) -> list[str]:
  2048. """Return a list of sparse-checkout patterns from info/sparse-checkout.
  2049. Returns:
  2050. A list of patterns. Returns an empty list if the file is missing.
  2051. """
  2052. return self.get_worktree().get_sparse_checkout_patterns()
  2053. @replace_me(remove_in="0.26.0")
  2054. def set_sparse_checkout_patterns(self, patterns: Sequence[str]) -> None:
  2055. """Write the given sparse-checkout patterns into info/sparse-checkout.
  2056. Creates the info/ directory if it does not exist.
  2057. Args:
  2058. patterns: A list of gitignore-style patterns to store.
  2059. """
  2060. return self.get_worktree().set_sparse_checkout_patterns(patterns)
  2061. @replace_me(remove_in="0.26.0")
  2062. def set_cone_mode_patterns(self, dirs: Sequence[str] | None = None) -> None:
  2063. """Write the given cone-mode directory patterns into info/sparse-checkout.
  2064. For each directory to include, add an inclusion line that "undoes" the prior
  2065. ``!/*/`` 'exclude' that re-includes that directory and everything under it.
  2066. Never add the same line twice.
  2067. """
  2068. return self.get_worktree().set_cone_mode_patterns(dirs)
  2069. class MemoryRepo(BaseRepo):
  2070. """Repo that stores refs, objects, and named files in memory.
  2071. MemoryRepos are always bare: they have no working tree and no index, since
  2072. those have a stronger dependency on the filesystem.
  2073. """
  2074. filter_context: "FilterContext | None"
  2075. def __init__(self) -> None:
  2076. """Create a new repository in memory."""
  2077. from .config import ConfigFile
  2078. self._reflog: list[Any] = []
  2079. refs_container = DictRefsContainer({}, logger=self._append_reflog)
  2080. BaseRepo.__init__(self, MemoryObjectStore(), refs_container)
  2081. self._named_files: dict[str, bytes] = {}
  2082. self.bare = True
  2083. self._config = ConfigFile()
  2084. self._description: bytes | None = None
  2085. self.filter_context = None
  2086. def _append_reflog(
  2087. self,
  2088. ref: bytes,
  2089. old_sha: bytes | None,
  2090. new_sha: bytes | None,
  2091. committer: bytes | None,
  2092. timestamp: int | None,
  2093. timezone: int | None,
  2094. message: bytes | None,
  2095. ) -> None:
  2096. self._reflog.append(
  2097. (ref, old_sha, new_sha, committer, timestamp, timezone, message)
  2098. )
  2099. def set_description(self, description: bytes) -> None:
  2100. """Set the description for this repository.
  2101. Args:
  2102. description: Text to set as description
  2103. """
  2104. self._description = description
  2105. def get_description(self) -> bytes | None:
  2106. """Get the description of this repository.
  2107. Returns:
  2108. Repository description as bytes
  2109. """
  2110. return self._description
  2111. def _determine_file_mode(self) -> bool:
  2112. """Probe the file-system to determine whether permissions can be trusted.
  2113. Returns: True if permissions can be trusted, False otherwise.
  2114. """
  2115. return sys.platform != "win32"
  2116. def _determine_symlinks(self) -> bool:
  2117. """Probe the file-system to determine whether permissions can be trusted.
  2118. Returns: True if permissions can be trusted, False otherwise.
  2119. """
  2120. return sys.platform != "win32"
  2121. def _put_named_file(self, path: str, contents: bytes) -> None:
  2122. """Write a file to the control dir with the given name and contents.
  2123. Args:
  2124. path: The path to the file, relative to the control dir.
  2125. contents: A string to write to the file.
  2126. """
  2127. self._named_files[path] = contents
  2128. def _del_named_file(self, path: str) -> None:
  2129. try:
  2130. del self._named_files[path]
  2131. except KeyError:
  2132. pass
  2133. def get_named_file(
  2134. self,
  2135. path: str | bytes,
  2136. basedir: str | None = None,
  2137. ) -> BytesIO | None:
  2138. """Get a file from the control dir with a specific name.
  2139. Although the filename should be interpreted as a filename relative to
  2140. the control dir in a disk-baked Repo, the object returned need not be
  2141. pointing to a file in that location.
  2142. Args:
  2143. path: The path to the file, relative to the control dir.
  2144. basedir: Optional base directory for the path
  2145. Returns: An open file object, or None if the file does not exist.
  2146. """
  2147. path_str = path.decode() if isinstance(path, bytes) else path
  2148. contents = self._named_files.get(path_str, None)
  2149. if contents is None:
  2150. return None
  2151. return BytesIO(contents)
  2152. def open_index(self) -> "Index":
  2153. """Fail to open index for this repo, since it is bare.
  2154. Raises:
  2155. NoIndexPresent: Raised when no index is present
  2156. """
  2157. raise NoIndexPresent
  2158. def get_config(self) -> "ConfigFile":
  2159. """Retrieve the config object.
  2160. Returns: `ConfigFile` object.
  2161. """
  2162. return self._config
  2163. def get_rebase_state_manager(self) -> "RebaseStateManager":
  2164. """Get the appropriate rebase state manager for this repository.
  2165. Returns: MemoryRebaseStateManager instance
  2166. """
  2167. from .rebase import MemoryRebaseStateManager
  2168. return MemoryRebaseStateManager(self)
  2169. def get_blob_normalizer(self) -> "FilterBlobNormalizer":
  2170. """Return a BlobNormalizer object for checkin/checkout operations."""
  2171. from .filters import FilterBlobNormalizer, FilterContext, FilterRegistry
  2172. # Get fresh configuration and GitAttributes
  2173. config_stack = self.get_config_stack()
  2174. git_attributes = self.get_gitattributes()
  2175. # Lazily create FilterContext if needed
  2176. if self.filter_context is None:
  2177. filter_registry = FilterRegistry(config_stack, self)
  2178. self.filter_context = FilterContext(filter_registry)
  2179. else:
  2180. # Refresh the context with current config to handle config changes
  2181. self.filter_context.refresh_config(config_stack)
  2182. # Return a new FilterBlobNormalizer with the context
  2183. return FilterBlobNormalizer(
  2184. config_stack, git_attributes, filter_context=self.filter_context
  2185. )
  2186. def get_gitattributes(self, tree: bytes | None = None) -> "GitAttributes":
  2187. """Read gitattributes for the repository."""
  2188. from .attrs import GitAttributes
  2189. # Memory repos don't have working trees or gitattributes files
  2190. # Return empty GitAttributes
  2191. return GitAttributes([])
  2192. def close(self) -> None:
  2193. """Close any resources opened by this repository."""
  2194. # Clean up filter context if it was created
  2195. if self.filter_context is not None:
  2196. self.filter_context.close()
  2197. self.filter_context = None
  2198. def do_commit(
  2199. self,
  2200. message: bytes | None = None,
  2201. committer: bytes | None = None,
  2202. author: bytes | None = None,
  2203. commit_timestamp: float | None = None,
  2204. commit_timezone: int | None = None,
  2205. author_timestamp: float | None = None,
  2206. author_timezone: int | None = None,
  2207. tree: ObjectID | None = None,
  2208. encoding: bytes | None = None,
  2209. ref: Ref | None = HEADREF,
  2210. merge_heads: list[ObjectID] | None = None,
  2211. no_verify: bool = False,
  2212. sign: bool = False,
  2213. ) -> bytes:
  2214. """Create a new commit.
  2215. This is a simplified implementation for in-memory repositories that
  2216. doesn't support worktree operations or hooks.
  2217. Args:
  2218. message: Commit message
  2219. committer: Committer fullname
  2220. author: Author fullname
  2221. commit_timestamp: Commit timestamp (defaults to now)
  2222. commit_timezone: Commit timestamp timezone (defaults to GMT)
  2223. author_timestamp: Author timestamp (defaults to commit timestamp)
  2224. author_timezone: Author timestamp timezone (defaults to commit timezone)
  2225. tree: SHA1 of the tree root to use
  2226. encoding: Encoding
  2227. ref: Optional ref to commit to (defaults to current branch).
  2228. If None, creates a dangling commit without updating any ref.
  2229. merge_heads: Merge heads
  2230. no_verify: Skip pre-commit and commit-msg hooks (ignored for MemoryRepo)
  2231. sign: GPG Sign the commit (ignored for MemoryRepo)
  2232. Returns:
  2233. New commit SHA1
  2234. """
  2235. import time
  2236. from .objects import Commit
  2237. if tree is None:
  2238. raise ValueError("tree must be specified for MemoryRepo")
  2239. c = Commit()
  2240. if len(tree) != 40:
  2241. raise ValueError("tree must be a 40-byte hex sha string")
  2242. c.tree = tree
  2243. config = self.get_config_stack()
  2244. if merge_heads is None:
  2245. merge_heads = []
  2246. if committer is None:
  2247. committer = get_user_identity(config, kind="COMMITTER")
  2248. check_user_identity(committer)
  2249. c.committer = committer
  2250. if commit_timestamp is None:
  2251. commit_timestamp = time.time()
  2252. c.commit_time = int(commit_timestamp)
  2253. if commit_timezone is None:
  2254. commit_timezone = 0
  2255. c.commit_timezone = commit_timezone
  2256. if author is None:
  2257. author = get_user_identity(config, kind="AUTHOR")
  2258. c.author = author
  2259. check_user_identity(author)
  2260. if author_timestamp is None:
  2261. author_timestamp = commit_timestamp
  2262. c.author_time = int(author_timestamp)
  2263. if author_timezone is None:
  2264. author_timezone = commit_timezone
  2265. c.author_timezone = author_timezone
  2266. if encoding is None:
  2267. try:
  2268. encoding = config.get(("i18n",), "commitEncoding")
  2269. except KeyError:
  2270. pass
  2271. if encoding is not None:
  2272. c.encoding = encoding
  2273. # Handle message (for MemoryRepo, we don't support callable messages)
  2274. if callable(message):
  2275. message = message(self, c)
  2276. if message is None:
  2277. raise ValueError("Message callback returned None")
  2278. if message is None:
  2279. raise ValueError("No commit message specified")
  2280. c.message = message
  2281. if ref is None:
  2282. # Create a dangling commit
  2283. c.parents = merge_heads
  2284. self.object_store.add_object(c)
  2285. else:
  2286. try:
  2287. old_head = self.refs[ref]
  2288. c.parents = [old_head, *merge_heads]
  2289. self.object_store.add_object(c)
  2290. ok = self.refs.set_if_equals(
  2291. ref,
  2292. old_head,
  2293. c.id,
  2294. message=b"commit: " + message,
  2295. committer=committer,
  2296. timestamp=int(commit_timestamp),
  2297. timezone=commit_timezone,
  2298. )
  2299. except KeyError:
  2300. c.parents = merge_heads
  2301. self.object_store.add_object(c)
  2302. ok = self.refs.add_if_new(
  2303. ref,
  2304. c.id,
  2305. message=b"commit: " + message,
  2306. committer=committer,
  2307. timestamp=int(commit_timestamp),
  2308. timezone=commit_timezone,
  2309. )
  2310. if not ok:
  2311. from .errors import CommitError
  2312. raise CommitError(f"{ref!r} changed during commit")
  2313. return c.id
  2314. @classmethod
  2315. def init_bare(
  2316. cls,
  2317. objects: Iterable[ShaFile],
  2318. refs: Mapping[Ref, ObjectID],
  2319. format: int | None = None,
  2320. ) -> "MemoryRepo":
  2321. """Create a new bare repository in memory.
  2322. Args:
  2323. objects: Objects for the new repository,
  2324. as iterable
  2325. refs: Refs as dictionary, mapping names
  2326. to object SHA1s
  2327. format: Repository format version (defaults to 0)
  2328. """
  2329. ret = cls()
  2330. for obj in objects:
  2331. ret.object_store.add_object(obj)
  2332. for refname, sha in refs.items():
  2333. ret.refs.add_if_new(refname, sha)
  2334. ret._init_files(bare=True, format=format)
  2335. return ret