porcelain.py 138 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235
  1. # porcelain.py -- Porcelain-like layer on top of Dulwich
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as public by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Simple wrapper that provides porcelain-like functions on top of Dulwich.
  22. Currently implemented:
  23. * archive
  24. * add
  25. * branch{_create,_delete,_list}
  26. * check_ignore
  27. * checkout
  28. * checkout_branch
  29. * clone
  30. * cone mode{_init, _set, _add}
  31. * commit
  32. * commit_tree
  33. * daemon
  34. * describe
  35. * diff_tree
  36. * fetch
  37. * filter_branch
  38. * for_each_ref
  39. * init
  40. * ls_files
  41. * ls_remote
  42. * ls_tree
  43. * merge
  44. * merge_tree
  45. * mv/move
  46. * prune
  47. * pull
  48. * push
  49. * rm
  50. * remote{_add}
  51. * receive_pack
  52. * reset
  53. * revert
  54. * sparse_checkout
  55. * submodule_add
  56. * submodule_init
  57. * submodule_list
  58. * rev_list
  59. * tag{_create,_delete,_list}
  60. * upload_pack
  61. * update_server_info
  62. * status
  63. * symbolic_ref
  64. These functions are meant to behave similarly to the git subcommands.
  65. Differences in behaviour are considered bugs.
  66. Note: one of the consequences of this is that paths tend to be
  67. interpreted relative to the current working directory rather than relative
  68. to the repository root.
  69. Functions should generally accept both unicode strings and bytestrings
  70. """
  71. import datetime
  72. import fnmatch
  73. import os
  74. import posixpath
  75. import stat
  76. import sys
  77. import time
  78. from collections import namedtuple
  79. from contextlib import closing, contextmanager
  80. from dataclasses import dataclass
  81. from io import BytesIO, RawIOBase
  82. from pathlib import Path
  83. from typing import Optional, Union
  84. from . import replace_me
  85. from .archive import tar_stream
  86. from .client import get_transport_and_path
  87. from .config import Config, ConfigFile, StackedConfig, read_submodules
  88. from .diff_tree import (
  89. CHANGE_ADD,
  90. CHANGE_COPY,
  91. CHANGE_DELETE,
  92. CHANGE_MODIFY,
  93. CHANGE_RENAME,
  94. RENAME_CHANGE_TYPES,
  95. )
  96. from .errors import SendPackError
  97. from .graph import can_fast_forward
  98. from .ignore import IgnoreFilterManager
  99. from .index import (
  100. _fs_to_tree_path,
  101. blob_from_path_and_stat,
  102. build_file_from_blob,
  103. get_unstaged_changes,
  104. update_working_tree,
  105. )
  106. from .object_store import tree_lookup_path
  107. from .objects import (
  108. Commit,
  109. Tag,
  110. format_timezone,
  111. parse_timezone,
  112. pretty_format_tree_entry,
  113. )
  114. from .objectspec import (
  115. parse_commit,
  116. parse_object,
  117. parse_ref,
  118. parse_reftuples,
  119. parse_tree,
  120. )
  121. from .pack import write_pack_from_container, write_pack_index
  122. from .patch import write_tree_diff
  123. from .protocol import ZERO_SHA, Protocol
  124. from .refs import (
  125. LOCAL_BRANCH_PREFIX,
  126. LOCAL_NOTES_PREFIX,
  127. LOCAL_TAG_PREFIX,
  128. Ref,
  129. SymrefLoop,
  130. _import_remote_refs,
  131. )
  132. from .repo import BaseRepo, Repo, get_user_identity
  133. from .server import (
  134. FileSystemBackend,
  135. ReceivePackHandler,
  136. TCPGitServer,
  137. UploadPackHandler,
  138. )
  139. from .server import update_server_info as server_update_server_info
  140. from .sparse_patterns import (
  141. SparseCheckoutConflictError,
  142. apply_included_paths,
  143. determine_included_paths,
  144. )
  145. # Module level tuple definition for status output
  146. GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
  147. @dataclass
  148. class CountObjectsResult:
  149. """Result of counting objects in a repository.
  150. Attributes:
  151. count: Number of loose objects
  152. size: Total size of loose objects in bytes
  153. in_pack: Number of objects in pack files
  154. packs: Number of pack files
  155. size_pack: Total size of pack files in bytes
  156. """
  157. count: int
  158. size: int
  159. in_pack: Optional[int] = None
  160. packs: Optional[int] = None
  161. size_pack: Optional[int] = None
  162. class NoneStream(RawIOBase):
  163. """Fallback if stdout or stderr are unavailable, does nothing."""
  164. def read(self, size=-1) -> None:
  165. return None
  166. def readall(self) -> bytes:
  167. return b""
  168. def readinto(self, b) -> None:
  169. return None
  170. def write(self, b) -> None:
  171. return None
  172. default_bytes_out_stream = getattr(sys.stdout, "buffer", None) or NoneStream()
  173. default_bytes_err_stream = getattr(sys.stderr, "buffer", None) or NoneStream()
  174. DEFAULT_ENCODING = "utf-8"
  175. class Error(Exception):
  176. """Porcelain-based error."""
  177. def __init__(self, msg) -> None:
  178. super().__init__(msg)
  179. class RemoteExists(Error):
  180. """Raised when the remote already exists."""
  181. class TimezoneFormatError(Error):
  182. """Raised when the timezone cannot be determined from a given string."""
  183. class CheckoutError(Error):
  184. """Indicates that a checkout cannot be performed."""
  185. def parse_timezone_format(tz_str):
  186. """Parse given string and attempt to return a timezone offset.
  187. Different formats are considered in the following order:
  188. - Git internal format: <unix timestamp> <timezone offset>
  189. - RFC 2822: e.g. Mon, 20 Nov 1995 19:12:08 -0500
  190. - ISO 8601: e.g. 1995-11-20T19:12:08-0500
  191. Args:
  192. tz_str: datetime string
  193. Returns: Timezone offset as integer
  194. Raises:
  195. TimezoneFormatError: if timezone information cannot be extracted
  196. """
  197. import re
  198. # Git internal format
  199. internal_format_pattern = re.compile("^[0-9]+ [+-][0-9]{,4}$")
  200. if re.match(internal_format_pattern, tz_str):
  201. try:
  202. tz_internal = parse_timezone(tz_str.split(" ")[1].encode(DEFAULT_ENCODING))
  203. return tz_internal[0]
  204. except ValueError:
  205. pass
  206. # RFC 2822
  207. import email.utils
  208. rfc_2822 = email.utils.parsedate_tz(tz_str)
  209. if rfc_2822:
  210. return rfc_2822[9]
  211. # ISO 8601
  212. # Supported offsets:
  213. # sHHMM, sHH:MM, sHH
  214. iso_8601_pattern = re.compile(
  215. "[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$"
  216. )
  217. match = re.search(iso_8601_pattern, tz_str)
  218. total_secs = 0
  219. if match:
  220. sign, hours, minutes = match.groups()
  221. total_secs += int(hours) * 3600
  222. if minutes:
  223. total_secs += int(minutes) * 60
  224. total_secs = -total_secs if sign == "-" else total_secs
  225. return total_secs
  226. # YYYY.MM.DD, MM/DD/YYYY, DD.MM.YYYY contain no timezone information
  227. raise TimezoneFormatError(tz_str)
  228. def get_user_timezones():
  229. """Retrieve local timezone as described in
  230. https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
  231. Returns: A tuple containing author timezone, committer timezone.
  232. """
  233. local_timezone = time.localtime().tm_gmtoff
  234. if os.environ.get("GIT_AUTHOR_DATE"):
  235. author_timezone = parse_timezone_format(os.environ["GIT_AUTHOR_DATE"])
  236. else:
  237. author_timezone = local_timezone
  238. if os.environ.get("GIT_COMMITTER_DATE"):
  239. commit_timezone = parse_timezone_format(os.environ["GIT_COMMITTER_DATE"])
  240. else:
  241. commit_timezone = local_timezone
  242. return author_timezone, commit_timezone
  243. def open_repo(path_or_repo: Union[str, os.PathLike, BaseRepo]):
  244. """Open an argument that can be a repository or a path for a repository."""
  245. if isinstance(path_or_repo, BaseRepo):
  246. return path_or_repo
  247. return Repo(path_or_repo)
  248. @contextmanager
  249. def _noop_context_manager(obj):
  250. """Context manager that has the same api as closing but does nothing."""
  251. yield obj
  252. def open_repo_closing(path_or_repo: Union[str, os.PathLike, BaseRepo]):
  253. """Open an argument that can be a repository or a path for a repository.
  254. returns a context manager that will close the repo on exit if the argument
  255. is a path, else does nothing if the argument is a repo.
  256. """
  257. if isinstance(path_or_repo, BaseRepo):
  258. return _noop_context_manager(path_or_repo)
  259. return closing(Repo(path_or_repo))
  260. def path_to_tree_path(repopath, path, tree_encoding=DEFAULT_ENCODING):
  261. """Convert a path to a path usable in an index, e.g. bytes and relative to
  262. the repository root.
  263. Args:
  264. repopath: Repository path, absolute or relative to the cwd
  265. path: A path, absolute or relative to the cwd
  266. Returns: A path formatted for use in e.g. an index
  267. """
  268. # Resolve might returns a relative path on Windows
  269. # https://bugs.python.org/issue38671
  270. if sys.platform == "win32":
  271. path = os.path.abspath(path)
  272. path = Path(path)
  273. resolved_path = path.resolve()
  274. # Resolve and abspath seems to behave differently regarding symlinks,
  275. # as we are doing abspath on the file path, we need to do the same on
  276. # the repo path or they might not match
  277. if sys.platform == "win32":
  278. repopath = os.path.abspath(repopath)
  279. repopath = Path(repopath).resolve()
  280. try:
  281. relpath = resolved_path.relative_to(repopath)
  282. except ValueError:
  283. # If path is a symlink that points to a file outside the repo, we
  284. # want the relpath for the link itself, not the resolved target
  285. if path.is_symlink():
  286. parent = path.parent.resolve()
  287. relpath = (parent / path.name).relative_to(repopath)
  288. else:
  289. raise
  290. if sys.platform == "win32":
  291. return str(relpath).replace(os.path.sep, "/").encode(tree_encoding)
  292. else:
  293. return bytes(relpath)
  294. class DivergedBranches(Error):
  295. """Branches have diverged and fast-forward is not possible."""
  296. def __init__(self, current_sha, new_sha) -> None:
  297. self.current_sha = current_sha
  298. self.new_sha = new_sha
  299. def check_diverged(repo, current_sha, new_sha) -> None:
  300. """Check if updating to a sha can be done with fast forwarding.
  301. Args:
  302. repo: Repository object
  303. current_sha: Current head sha
  304. new_sha: New head sha
  305. """
  306. try:
  307. can = can_fast_forward(repo, current_sha, new_sha)
  308. except KeyError:
  309. can = False
  310. if not can:
  311. raise DivergedBranches(current_sha, new_sha)
  312. def archive(
  313. repo,
  314. committish=None,
  315. outstream=default_bytes_out_stream,
  316. errstream=default_bytes_err_stream,
  317. ) -> None:
  318. """Create an archive.
  319. Args:
  320. repo: Path of repository for which to generate an archive.
  321. committish: Commit SHA1 or ref to use
  322. outstream: Output stream (defaults to stdout)
  323. errstream: Error stream (defaults to stderr)
  324. """
  325. if committish is None:
  326. committish = "HEAD"
  327. with open_repo_closing(repo) as repo_obj:
  328. c = parse_commit(repo_obj, committish)
  329. for chunk in tar_stream(
  330. repo_obj.object_store, repo_obj.object_store[c.tree], c.commit_time
  331. ):
  332. outstream.write(chunk)
  333. def update_server_info(repo=".") -> None:
  334. """Update server info files for a repository.
  335. Args:
  336. repo: path to the repository
  337. """
  338. with open_repo_closing(repo) as r:
  339. server_update_server_info(r)
  340. def symbolic_ref(repo, ref_name, force=False) -> None:
  341. """Set git symbolic ref into HEAD.
  342. Args:
  343. repo: path to the repository
  344. ref_name: short name of the new ref
  345. force: force settings without checking if it exists in refs/heads
  346. """
  347. with open_repo_closing(repo) as repo_obj:
  348. ref_path = _make_branch_ref(ref_name)
  349. if not force and ref_path not in repo_obj.refs.keys():
  350. raise Error(f"fatal: ref `{ref_name}` is not a ref")
  351. repo_obj.refs.set_symbolic_ref(b"HEAD", ref_path)
  352. def pack_refs(repo, all=False) -> None:
  353. with open_repo_closing(repo) as repo_obj:
  354. repo_obj.refs.pack_refs(all=all)
  355. def commit(
  356. repo=".",
  357. message=None,
  358. author=None,
  359. author_timezone=None,
  360. committer=None,
  361. commit_timezone=None,
  362. encoding=None,
  363. no_verify=False,
  364. signoff=False,
  365. ):
  366. """Create a new commit.
  367. Args:
  368. repo: Path to repository
  369. message: Optional commit message
  370. author: Optional author name and email
  371. author_timezone: Author timestamp timezone
  372. committer: Optional committer name and email
  373. commit_timezone: Commit timestamp timezone
  374. no_verify: Skip pre-commit and commit-msg hooks
  375. signoff: GPG Sign the commit (bool, defaults to False,
  376. pass True to use default GPG key,
  377. pass a str containing Key ID to use a specific GPG key)
  378. Returns: SHA1 of the new commit
  379. """
  380. # FIXME: Support --all argument
  381. if getattr(message, "encode", None):
  382. message = message.encode(encoding or DEFAULT_ENCODING)
  383. if getattr(author, "encode", None):
  384. author = author.encode(encoding or DEFAULT_ENCODING)
  385. if getattr(committer, "encode", None):
  386. committer = committer.encode(encoding or DEFAULT_ENCODING)
  387. local_timezone = get_user_timezones()
  388. if author_timezone is None:
  389. author_timezone = local_timezone[0]
  390. if commit_timezone is None:
  391. commit_timezone = local_timezone[1]
  392. with open_repo_closing(repo) as r:
  393. return r.do_commit(
  394. message=message,
  395. author=author,
  396. author_timezone=author_timezone,
  397. committer=committer,
  398. commit_timezone=commit_timezone,
  399. encoding=encoding,
  400. no_verify=no_verify,
  401. sign=signoff if isinstance(signoff, (str, bool)) else None,
  402. )
  403. def commit_tree(repo, tree, message=None, author=None, committer=None):
  404. """Create a new commit object.
  405. Args:
  406. repo: Path to repository
  407. tree: An existing tree object
  408. author: Optional author name and email
  409. committer: Optional committer name and email
  410. """
  411. with open_repo_closing(repo) as r:
  412. return r.do_commit(
  413. message=message, tree=tree, committer=committer, author=author
  414. )
  415. def init(
  416. path: Union[str, os.PathLike] = ".", *, bare=False, symlinks: Optional[bool] = None
  417. ):
  418. """Create a new git repository.
  419. Args:
  420. path: Path to repository.
  421. bare: Whether to create a bare repository.
  422. symlinks: Whether to create actual symlinks (defaults to autodetect)
  423. Returns: A Repo instance
  424. """
  425. if not os.path.exists(path):
  426. os.mkdir(path)
  427. if bare:
  428. return Repo.init_bare(path)
  429. else:
  430. return Repo.init(path, symlinks=symlinks)
  431. def clone(
  432. source,
  433. target: Optional[Union[str, os.PathLike]] = None,
  434. bare=False,
  435. checkout=None,
  436. errstream=default_bytes_err_stream,
  437. outstream=None,
  438. origin: Optional[str] = "origin",
  439. depth: Optional[int] = None,
  440. branch: Optional[Union[str, bytes]] = None,
  441. config: Optional[Config] = None,
  442. filter_spec=None,
  443. protocol_version: Optional[int] = None,
  444. recurse_submodules: bool = False,
  445. **kwargs,
  446. ):
  447. """Clone a local or remote git repository.
  448. Args:
  449. source: Path or URL for source repository
  450. target: Path to target repository (optional)
  451. bare: Whether or not to create a bare repository
  452. checkout: Whether or not to check-out HEAD after cloning
  453. errstream: Optional stream to write progress to
  454. outstream: Optional stream to write progress to (deprecated)
  455. origin: Name of remote from the repository used to clone
  456. depth: Depth to fetch at
  457. branch: Optional branch or tag to be used as HEAD in the new repository
  458. instead of the cloned repository's HEAD.
  459. config: Configuration to use
  460. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  461. Only used if the server supports the Git protocol-v2 'filter'
  462. feature, and ignored otherwise.
  463. protocol_version: desired Git protocol version. By default the highest
  464. mutually supported protocol version will be used.
  465. recurse_submodules: Whether to initialize and clone submodules
  466. Keyword Args:
  467. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  468. bytestring/string.
  469. Returns: The new repository
  470. """
  471. if outstream is not None:
  472. import warnings
  473. warnings.warn(
  474. "outstream= has been deprecated in favour of errstream=.",
  475. DeprecationWarning,
  476. stacklevel=3,
  477. )
  478. # TODO(jelmer): Capture logging output and stream to errstream
  479. if config is None:
  480. config = StackedConfig.default()
  481. if checkout is None:
  482. checkout = not bare
  483. if checkout and bare:
  484. raise Error("checkout and bare are incompatible")
  485. if target is None:
  486. target = source.split("/")[-1]
  487. if isinstance(branch, str):
  488. branch = branch.encode(DEFAULT_ENCODING)
  489. mkdir = not os.path.exists(target)
  490. (client, path) = get_transport_and_path(source, config=config, **kwargs)
  491. if filter_spec:
  492. filter_spec = filter_spec.encode("ascii")
  493. repo = client.clone(
  494. path,
  495. target,
  496. mkdir=mkdir,
  497. bare=bare,
  498. origin=origin,
  499. checkout=checkout,
  500. branch=branch,
  501. progress=errstream.write,
  502. depth=depth,
  503. filter_spec=filter_spec,
  504. protocol_version=protocol_version,
  505. )
  506. # Initialize and update submodules if requested
  507. if recurse_submodules and not bare:
  508. try:
  509. submodule_init(repo)
  510. submodule_update(repo, init=True)
  511. except FileNotFoundError as e:
  512. # .gitmodules file doesn't exist - no submodules to process
  513. import logging
  514. logging.debug("No .gitmodules file found: %s", e)
  515. except KeyError as e:
  516. # Submodule configuration missing
  517. import logging
  518. logging.warning("Submodule configuration error: %s", e)
  519. if errstream:
  520. errstream.write(
  521. f"Warning: Submodule configuration error: {e}\n".encode()
  522. )
  523. return repo
  524. def add(repo: Union[str, os.PathLike, BaseRepo] = ".", paths=None):
  525. """Add files to the staging area.
  526. Args:
  527. repo: Repository for the files
  528. paths: Paths to add. If None, stages all untracked and modified files from the
  529. current working directory (mimicking 'git add .' behavior).
  530. Returns: Tuple with set of added files and ignored files
  531. If the repository contains ignored directories, the returned set will
  532. contain the path to an ignored directory (with trailing slash). Individual
  533. files within ignored directories will not be returned.
  534. Note: When paths=None, this function adds all untracked and modified files
  535. from the entire repository, mimicking 'git add -A' behavior.
  536. """
  537. ignored = set()
  538. with open_repo_closing(repo) as r:
  539. repo_path = Path(r.path).resolve()
  540. ignore_manager = IgnoreFilterManager.from_repo(r)
  541. # Get unstaged changes once for the entire operation
  542. index = r.open_index()
  543. normalizer = r.get_blob_normalizer()
  544. filter_callback = normalizer.checkin_normalize
  545. all_unstaged_paths = list(get_unstaged_changes(index, r.path, filter_callback))
  546. if not paths:
  547. # When no paths specified, add all untracked and modified files from repo root
  548. paths = [str(repo_path)]
  549. relpaths = []
  550. if not isinstance(paths, list):
  551. paths = [paths]
  552. for p in paths:
  553. path = Path(p)
  554. if not path.is_absolute():
  555. # Make relative paths relative to the repo directory
  556. path = repo_path / path
  557. # Don't resolve symlinks completely - only resolve the parent directory
  558. # to avoid issues when symlinks point outside the repository
  559. if path.is_symlink():
  560. # For symlinks, resolve only the parent directory
  561. parent_resolved = path.parent.resolve()
  562. resolved_path = parent_resolved / path.name
  563. else:
  564. # For regular files/dirs, resolve normally
  565. resolved_path = path.resolve()
  566. try:
  567. relpath = str(resolved_path.relative_to(repo_path)).replace(os.sep, "/")
  568. except ValueError as e:
  569. # Path is not within the repository
  570. raise ValueError(
  571. f"Path {p} is not within repository {repo_path}"
  572. ) from e
  573. # Handle directories by scanning their contents
  574. if resolved_path.is_dir():
  575. # Check if the directory itself is ignored
  576. dir_relpath = posixpath.join(relpath, "") if relpath != "." else ""
  577. if dir_relpath and ignore_manager.is_ignored(dir_relpath):
  578. ignored.add(dir_relpath)
  579. continue
  580. # When adding a directory, add all untracked files within it
  581. current_untracked = list(
  582. get_untracked_paths(
  583. str(resolved_path),
  584. str(repo_path),
  585. index,
  586. )
  587. )
  588. for untracked_path in current_untracked:
  589. # If we're scanning a subdirectory, adjust the path
  590. if relpath != ".":
  591. untracked_path = posixpath.join(relpath, untracked_path)
  592. if not ignore_manager.is_ignored(untracked_path):
  593. relpaths.append(untracked_path)
  594. else:
  595. ignored.add(untracked_path)
  596. # Also add unstaged (modified) files within this directory
  597. for unstaged_path in all_unstaged_paths:
  598. if isinstance(unstaged_path, bytes):
  599. unstaged_path_str = unstaged_path.decode("utf-8")
  600. else:
  601. unstaged_path_str = unstaged_path
  602. # Check if this unstaged file is within the directory we're processing
  603. unstaged_full_path = repo_path / unstaged_path_str
  604. try:
  605. unstaged_full_path.relative_to(resolved_path)
  606. # File is within this directory, add it
  607. if not ignore_manager.is_ignored(unstaged_path_str):
  608. relpaths.append(unstaged_path_str)
  609. else:
  610. ignored.add(unstaged_path_str)
  611. except ValueError:
  612. # File is not within this directory, skip it
  613. continue
  614. continue
  615. # FIXME: Support patterns
  616. if ignore_manager.is_ignored(relpath):
  617. ignored.add(relpath)
  618. continue
  619. relpaths.append(relpath)
  620. r.stage(relpaths)
  621. return (relpaths, ignored)
  622. def _is_subdir(subdir, parentdir):
  623. """Check whether subdir is parentdir or a subdir of parentdir.
  624. If parentdir or subdir is a relative path, it will be disamgibuated
  625. relative to the pwd.
  626. """
  627. parentdir_abs = os.path.realpath(parentdir) + os.path.sep
  628. subdir_abs = os.path.realpath(subdir) + os.path.sep
  629. return subdir_abs.startswith(parentdir_abs)
  630. # TODO: option to remove ignored files also, in line with `git clean -fdx`
  631. def clean(repo=".", target_dir=None) -> None:
  632. """Remove any untracked files from the target directory recursively.
  633. Equivalent to running ``git clean -fd`` in target_dir.
  634. Args:
  635. repo: Repository where the files may be tracked
  636. target_dir: Directory to clean - current directory if None
  637. """
  638. if target_dir is None:
  639. target_dir = os.getcwd()
  640. with open_repo_closing(repo) as r:
  641. if not _is_subdir(target_dir, r.path):
  642. raise Error("target_dir must be in the repo's working dir")
  643. config = r.get_config_stack()
  644. config.get_boolean((b"clean",), b"requireForce", True)
  645. # TODO(jelmer): if require_force is set, then make sure that -f, -i or
  646. # -n is specified.
  647. index = r.open_index()
  648. ignore_manager = IgnoreFilterManager.from_repo(r)
  649. paths_in_wd = _walk_working_dir_paths(target_dir, r.path)
  650. # Reverse file visit order, so that files and subdirectories are
  651. # removed before containing directory
  652. for ap, is_dir in reversed(list(paths_in_wd)):
  653. if is_dir:
  654. # All subdirectories and files have been removed if untracked,
  655. # so dir contains no tracked files iff it is empty.
  656. is_empty = len(os.listdir(ap)) == 0
  657. if is_empty:
  658. os.rmdir(ap)
  659. else:
  660. ip = path_to_tree_path(r.path, ap)
  661. is_tracked = ip in index
  662. rp = os.path.relpath(ap, r.path)
  663. is_ignored = ignore_manager.is_ignored(rp)
  664. if not is_tracked and not is_ignored:
  665. os.remove(ap)
  666. def remove(repo=".", paths=None, cached=False) -> None:
  667. """Remove files from the staging area.
  668. Args:
  669. repo: Repository for the files
  670. paths: Paths to remove. Can be absolute or relative to the repository root.
  671. """
  672. with open_repo_closing(repo) as r:
  673. index = r.open_index()
  674. for p in paths:
  675. # If path is absolute, use it as-is. Otherwise, treat it as relative to repo
  676. if os.path.isabs(p):
  677. full_path = p
  678. else:
  679. # Treat relative paths as relative to the repository root
  680. full_path = os.path.join(r.path, p)
  681. tree_path = path_to_tree_path(r.path, full_path)
  682. # Convert to bytes for file operations
  683. full_path_bytes = os.fsencode(full_path)
  684. try:
  685. index_sha = index[tree_path].sha
  686. except KeyError as exc:
  687. raise Error(f"{p} did not match any files") from exc
  688. if not cached:
  689. try:
  690. st = os.lstat(full_path_bytes)
  691. except OSError:
  692. pass
  693. else:
  694. try:
  695. blob = blob_from_path_and_stat(full_path_bytes, st)
  696. except OSError:
  697. pass
  698. else:
  699. try:
  700. committed_sha = tree_lookup_path(
  701. r.__getitem__, r[r.head()].tree, tree_path
  702. )[1]
  703. except KeyError:
  704. committed_sha = None
  705. if blob.id != index_sha and index_sha != committed_sha:
  706. raise Error(
  707. "file has staged content differing "
  708. f"from both the file and head: {p}"
  709. )
  710. if index_sha != committed_sha:
  711. raise Error(f"file has staged changes: {p}")
  712. os.remove(full_path_bytes)
  713. del index[tree_path]
  714. index.write()
  715. rm = remove
  716. def mv(
  717. repo: Union[str, os.PathLike, BaseRepo],
  718. source: Union[str, bytes, os.PathLike],
  719. destination: Union[str, bytes, os.PathLike],
  720. force: bool = False,
  721. ) -> None:
  722. """Move or rename a file, directory, or symlink.
  723. Args:
  724. repo: Path to the repository
  725. source: Path to move from
  726. destination: Path to move to
  727. force: Force move even if destination exists
  728. Raises:
  729. Error: If source doesn't exist, is not tracked, or destination already exists (without force)
  730. """
  731. with open_repo_closing(repo) as r:
  732. index = r.open_index()
  733. # Handle paths - convert to string if necessary
  734. if isinstance(source, bytes):
  735. source = source.decode(sys.getfilesystemencoding())
  736. elif hasattr(source, "__fspath__"):
  737. source = os.fspath(source)
  738. else:
  739. source = str(source)
  740. if isinstance(destination, bytes):
  741. destination = destination.decode(sys.getfilesystemencoding())
  742. elif hasattr(destination, "__fspath__"):
  743. destination = os.fspath(destination)
  744. else:
  745. destination = str(destination)
  746. # Get full paths
  747. if os.path.isabs(source):
  748. source_full_path = source
  749. else:
  750. # Treat relative paths as relative to the repository root
  751. source_full_path = os.path.join(r.path, source)
  752. if os.path.isabs(destination):
  753. destination_full_path = destination
  754. else:
  755. # Treat relative paths as relative to the repository root
  756. destination_full_path = os.path.join(r.path, destination)
  757. # Check if destination is a directory
  758. if os.path.isdir(destination_full_path):
  759. # Move source into destination directory
  760. basename = os.path.basename(source_full_path)
  761. destination_full_path = os.path.join(destination_full_path, basename)
  762. # Convert to tree paths for index
  763. source_tree_path = path_to_tree_path(r.path, source_full_path)
  764. destination_tree_path = path_to_tree_path(r.path, destination_full_path)
  765. # Check if source exists in index
  766. if source_tree_path not in index:
  767. raise Error(f"source '{source}' is not under version control")
  768. # Check if source exists in filesystem
  769. if not os.path.exists(source_full_path):
  770. raise Error(f"source '{source}' does not exist")
  771. # Check if destination already exists
  772. if os.path.exists(destination_full_path) and not force:
  773. raise Error(f"destination '{destination}' already exists (use -f to force)")
  774. # Check if destination is already in index
  775. if destination_tree_path in index and not force:
  776. raise Error(
  777. f"destination '{destination}' already exists in index (use -f to force)"
  778. )
  779. # Get the index entry for the source
  780. source_entry = index[source_tree_path]
  781. # Convert to bytes for file operations
  782. source_full_path_bytes = os.fsencode(source_full_path)
  783. destination_full_path_bytes = os.fsencode(destination_full_path)
  784. # Create parent directory for destination if needed
  785. dest_dir = os.path.dirname(destination_full_path_bytes)
  786. if dest_dir and not os.path.exists(dest_dir):
  787. os.makedirs(dest_dir)
  788. # Move the file in the filesystem
  789. if os.path.exists(destination_full_path_bytes) and force:
  790. os.remove(destination_full_path_bytes)
  791. os.rename(source_full_path_bytes, destination_full_path_bytes)
  792. # Update the index
  793. del index[source_tree_path]
  794. index[destination_tree_path] = source_entry
  795. index.write()
  796. move = mv
  797. def commit_decode(commit, contents, default_encoding=DEFAULT_ENCODING):
  798. if commit.encoding:
  799. encoding = commit.encoding.decode("ascii")
  800. else:
  801. encoding = default_encoding
  802. return contents.decode(encoding, "replace")
  803. def commit_encode(commit, contents, default_encoding=DEFAULT_ENCODING):
  804. if commit.encoding:
  805. encoding = commit.encoding.decode("ascii")
  806. else:
  807. encoding = default_encoding
  808. return contents.encode(encoding)
  809. def print_commit(commit, decode, outstream=sys.stdout) -> None:
  810. """Write a human-readable commit log entry.
  811. Args:
  812. commit: A `Commit` object
  813. outstream: A stream file to write to
  814. """
  815. outstream.write("-" * 50 + "\n")
  816. outstream.write("commit: " + commit.id.decode("ascii") + "\n")
  817. if len(commit.parents) > 1:
  818. outstream.write(
  819. "merge: "
  820. + "...".join([c.decode("ascii") for c in commit.parents[1:]])
  821. + "\n"
  822. )
  823. outstream.write("Author: " + decode(commit.author) + "\n")
  824. if commit.author != commit.committer:
  825. outstream.write("Committer: " + decode(commit.committer) + "\n")
  826. time_tuple = time.gmtime(commit.author_time + commit.author_timezone)
  827. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  828. timezone_str = format_timezone(commit.author_timezone).decode("ascii")
  829. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  830. if commit.message:
  831. outstream.write("\n")
  832. outstream.write(decode(commit.message) + "\n")
  833. outstream.write("\n")
  834. def print_tag(tag, decode, outstream=sys.stdout) -> None:
  835. """Write a human-readable tag.
  836. Args:
  837. tag: A `Tag` object
  838. decode: Function for decoding bytes to unicode string
  839. outstream: A stream to write to
  840. """
  841. outstream.write("Tagger: " + decode(tag.tagger) + "\n")
  842. time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone)
  843. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  844. timezone_str = format_timezone(tag.tag_timezone).decode("ascii")
  845. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  846. outstream.write("\n")
  847. outstream.write(decode(tag.message))
  848. outstream.write("\n")
  849. def show_blob(repo, blob, decode, outstream=sys.stdout) -> None:
  850. """Write a blob to a stream.
  851. Args:
  852. repo: A `Repo` object
  853. blob: A `Blob` object
  854. decode: Function for decoding bytes to unicode string
  855. outstream: A stream file to write to
  856. """
  857. outstream.write(decode(blob.data))
  858. def show_commit(repo, commit, decode, outstream=sys.stdout) -> None:
  859. """Show a commit to a stream.
  860. Args:
  861. repo: A `Repo` object
  862. commit: A `Commit` object
  863. decode: Function for decoding bytes to unicode string
  864. outstream: Stream to write to
  865. """
  866. print_commit(commit, decode=decode, outstream=outstream)
  867. if commit.parents:
  868. parent_commit = repo[commit.parents[0]]
  869. base_tree = parent_commit.tree
  870. else:
  871. base_tree = None
  872. diffstream = BytesIO()
  873. write_tree_diff(diffstream, repo.object_store, base_tree, commit.tree)
  874. diffstream.seek(0)
  875. outstream.write(commit_decode(commit, diffstream.getvalue()))
  876. def show_tree(repo, tree, decode, outstream=sys.stdout) -> None:
  877. """Print a tree to a stream.
  878. Args:
  879. repo: A `Repo` object
  880. tree: A `Tree` object
  881. decode: Function for decoding bytes to unicode string
  882. outstream: Stream to write to
  883. """
  884. for n in tree:
  885. outstream.write(decode(n) + "\n")
  886. def show_tag(repo, tag, decode, outstream=sys.stdout) -> None:
  887. """Print a tag to a stream.
  888. Args:
  889. repo: A `Repo` object
  890. tag: A `Tag` object
  891. decode: Function for decoding bytes to unicode string
  892. outstream: Stream to write to
  893. """
  894. print_tag(tag, decode, outstream)
  895. show_object(repo, repo[tag.object[1]], decode, outstream)
  896. def show_object(repo, obj, decode, outstream):
  897. return {
  898. b"tree": show_tree,
  899. b"blob": show_blob,
  900. b"commit": show_commit,
  901. b"tag": show_tag,
  902. }[obj.type_name](repo, obj, decode, outstream)
  903. def print_name_status(changes):
  904. """Print a simple status summary, listing changed files."""
  905. for change in changes:
  906. if not change:
  907. continue
  908. if isinstance(change, list):
  909. change = change[0]
  910. if change.type == CHANGE_ADD:
  911. path1 = change.new.path
  912. path2 = ""
  913. kind = "A"
  914. elif change.type == CHANGE_DELETE:
  915. path1 = change.old.path
  916. path2 = ""
  917. kind = "D"
  918. elif change.type == CHANGE_MODIFY:
  919. path1 = change.new.path
  920. path2 = ""
  921. kind = "M"
  922. elif change.type in RENAME_CHANGE_TYPES:
  923. path1 = change.old.path
  924. path2 = change.new.path
  925. if change.type == CHANGE_RENAME:
  926. kind = "R"
  927. elif change.type == CHANGE_COPY:
  928. kind = "C"
  929. yield "%-8s%-20s%-20s" % (kind, path1, path2) # noqa: UP031
  930. def log(
  931. repo=".",
  932. paths=None,
  933. outstream=sys.stdout,
  934. max_entries=None,
  935. reverse=False,
  936. name_status=False,
  937. ) -> None:
  938. """Write commit logs.
  939. Args:
  940. repo: Path to repository
  941. paths: Optional set of specific paths to print entries for
  942. outstream: Stream to write log output to
  943. reverse: Reverse order in which entries are printed
  944. name_status: Print name status
  945. max_entries: Optional maximum number of entries to display
  946. """
  947. with open_repo_closing(repo) as r:
  948. try:
  949. include = [r.head()]
  950. except KeyError:
  951. include = []
  952. walker = r.get_walker(
  953. include=include, max_entries=max_entries, paths=paths, reverse=reverse
  954. )
  955. for entry in walker:
  956. def decode(x):
  957. return commit_decode(entry.commit, x)
  958. print_commit(entry.commit, decode, outstream)
  959. if name_status:
  960. outstream.writelines(
  961. [line + "\n" for line in print_name_status(entry.changes())]
  962. )
  963. # TODO(jelmer): better default for encoding?
  964. def show(
  965. repo=".",
  966. objects=None,
  967. outstream=sys.stdout,
  968. default_encoding=DEFAULT_ENCODING,
  969. ) -> None:
  970. """Print the changes in a commit.
  971. Args:
  972. repo: Path to repository
  973. objects: Objects to show (defaults to [HEAD])
  974. outstream: Stream to write to
  975. default_encoding: Default encoding to use if none is set in the
  976. commit
  977. """
  978. if objects is None:
  979. objects = ["HEAD"]
  980. if not isinstance(objects, list):
  981. objects = [objects]
  982. with open_repo_closing(repo) as r:
  983. for objectish in objects:
  984. o = parse_object(r, objectish)
  985. if isinstance(o, Commit):
  986. def decode(x):
  987. return commit_decode(o, x, default_encoding)
  988. else:
  989. def decode(x):
  990. return x.decode(default_encoding)
  991. show_object(r, o, decode, outstream)
  992. def diff_tree(repo, old_tree, new_tree, outstream=default_bytes_out_stream) -> None:
  993. """Compares the content and mode of blobs found via two tree objects.
  994. Args:
  995. repo: Path to repository
  996. old_tree: Id of old tree
  997. new_tree: Id of new tree
  998. outstream: Stream to write to
  999. """
  1000. with open_repo_closing(repo) as r:
  1001. write_tree_diff(outstream, r.object_store, old_tree, new_tree)
  1002. def rev_list(repo, commits, outstream=sys.stdout) -> None:
  1003. """Lists commit objects in reverse chronological order.
  1004. Args:
  1005. repo: Path to repository
  1006. commits: Commits over which to iterate
  1007. outstream: Stream to write to
  1008. """
  1009. with open_repo_closing(repo) as r:
  1010. for entry in r.get_walker(include=[r[c].id for c in commits]):
  1011. outstream.write(entry.commit.id + b"\n")
  1012. def _canonical_part(url: str) -> str:
  1013. name = url.rsplit("/", 1)[-1]
  1014. if name.endswith(".git"):
  1015. name = name[:-4]
  1016. return name
  1017. def submodule_add(repo, url, path=None, name=None) -> None:
  1018. """Add a new submodule.
  1019. Args:
  1020. repo: Path to repository
  1021. url: URL of repository to add as submodule
  1022. path: Path where submodule should live
  1023. name: Name for the submodule
  1024. """
  1025. with open_repo_closing(repo) as r:
  1026. if path is None:
  1027. path = os.path.relpath(_canonical_part(url), r.path)
  1028. if name is None:
  1029. name = path
  1030. # TODO(jelmer): Move this logic to dulwich.submodule
  1031. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1032. try:
  1033. config = ConfigFile.from_path(gitmodules_path)
  1034. except FileNotFoundError:
  1035. config = ConfigFile()
  1036. config.path = gitmodules_path
  1037. config.set(("submodule", name), "url", url)
  1038. config.set(("submodule", name), "path", path)
  1039. config.write_to_path()
  1040. def submodule_init(repo) -> None:
  1041. """Initialize submodules.
  1042. Args:
  1043. repo: Path to repository
  1044. """
  1045. with open_repo_closing(repo) as r:
  1046. config = r.get_config()
  1047. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1048. for path, url, name in read_submodules(gitmodules_path):
  1049. config.set((b"submodule", name), b"active", True)
  1050. config.set((b"submodule", name), b"url", url)
  1051. config.write_to_path()
  1052. def submodule_list(repo):
  1053. """List submodules.
  1054. Args:
  1055. repo: Path to repository
  1056. """
  1057. from .submodule import iter_cached_submodules
  1058. with open_repo_closing(repo) as r:
  1059. for path, sha in iter_cached_submodules(r.object_store, r[r.head()].tree):
  1060. yield path, sha.decode(DEFAULT_ENCODING)
  1061. def submodule_update(repo, paths=None, init=False, force=False, errstream=None) -> None:
  1062. """Update submodules.
  1063. Args:
  1064. repo: Path to repository
  1065. paths: Optional list of specific submodule paths to update. If None, updates all.
  1066. init: If True, initialize submodules first
  1067. force: Force update even if local changes exist
  1068. """
  1069. from .client import get_transport_and_path
  1070. from .index import build_index_from_tree
  1071. from .submodule import iter_cached_submodules
  1072. with open_repo_closing(repo) as r:
  1073. if init:
  1074. submodule_init(r)
  1075. config = r.get_config()
  1076. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1077. # Get list of submodules to update
  1078. submodules_to_update = []
  1079. for path, sha in iter_cached_submodules(r.object_store, r[r.head()].tree):
  1080. path_str = (
  1081. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1082. )
  1083. if paths is None or path_str in paths:
  1084. submodules_to_update.append((path, sha))
  1085. # Read submodule configuration
  1086. for path, target_sha in submodules_to_update:
  1087. path_str = (
  1088. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1089. )
  1090. # Find the submodule name from .gitmodules
  1091. submodule_name = None
  1092. for sm_path, sm_url, sm_name in read_submodules(gitmodules_path):
  1093. if sm_path == path:
  1094. submodule_name = sm_name
  1095. break
  1096. if not submodule_name:
  1097. continue
  1098. # Get the URL from config
  1099. section = (
  1100. b"submodule",
  1101. submodule_name
  1102. if isinstance(submodule_name, bytes)
  1103. else submodule_name.encode(),
  1104. )
  1105. try:
  1106. url = config.get(section, b"url")
  1107. if isinstance(url, bytes):
  1108. url = url.decode(DEFAULT_ENCODING)
  1109. except KeyError:
  1110. # URL not in config, skip this submodule
  1111. continue
  1112. # Get or create the submodule repository paths
  1113. submodule_path = os.path.join(r.path, path_str)
  1114. submodule_git_dir = os.path.join(r.path, ".git", "modules", path_str)
  1115. # Clone or fetch the submodule
  1116. if not os.path.exists(submodule_git_dir):
  1117. # Clone the submodule as bare repository
  1118. os.makedirs(os.path.dirname(submodule_git_dir), exist_ok=True)
  1119. # Clone to the git directory
  1120. sub_repo = clone(url, submodule_git_dir, bare=True, checkout=False)
  1121. sub_repo.close()
  1122. # Create the submodule directory if it doesn't exist
  1123. if not os.path.exists(submodule_path):
  1124. os.makedirs(submodule_path)
  1125. # Create .git file in the submodule directory
  1126. depth = path_str.count("/") + 1
  1127. relative_git_dir = "../" * depth + ".git/modules/" + path_str
  1128. git_file_path = os.path.join(submodule_path, ".git")
  1129. with open(git_file_path, "w") as f:
  1130. f.write(f"gitdir: {relative_git_dir}\n")
  1131. # Set up working directory configuration
  1132. with open_repo_closing(submodule_git_dir) as sub_repo:
  1133. sub_config = sub_repo.get_config()
  1134. sub_config.set(
  1135. (b"core",),
  1136. b"worktree",
  1137. os.path.abspath(submodule_path).encode(),
  1138. )
  1139. sub_config.write_to_path()
  1140. # Checkout the target commit
  1141. sub_repo.refs[b"HEAD"] = target_sha
  1142. # Build the index and checkout files
  1143. tree = sub_repo[target_sha]
  1144. if hasattr(tree, "tree"): # If it's a commit, get the tree
  1145. tree_id = tree.tree
  1146. else:
  1147. tree_id = target_sha
  1148. build_index_from_tree(
  1149. submodule_path,
  1150. sub_repo.index_path(),
  1151. sub_repo.object_store,
  1152. tree_id,
  1153. )
  1154. else:
  1155. # Fetch and checkout in existing submodule
  1156. with open_repo_closing(submodule_git_dir) as sub_repo:
  1157. # Fetch from remote
  1158. client, path_segments = get_transport_and_path(url)
  1159. client.fetch(path_segments, sub_repo)
  1160. # Update to the target commit
  1161. sub_repo.refs[b"HEAD"] = target_sha
  1162. # Reset the working directory
  1163. reset(sub_repo, "hard", target_sha)
  1164. def tag_create(
  1165. repo,
  1166. tag: Union[str, bytes],
  1167. author: Optional[Union[str, bytes]] = None,
  1168. message: Optional[Union[str, bytes]] = None,
  1169. annotated=False,
  1170. objectish: Union[str, bytes] = "HEAD",
  1171. tag_time=None,
  1172. tag_timezone=None,
  1173. sign: bool = False,
  1174. encoding: str = DEFAULT_ENCODING,
  1175. ) -> None:
  1176. """Creates a tag in git via dulwich calls.
  1177. Args:
  1178. repo: Path to repository
  1179. tag: tag string
  1180. author: tag author (optional, if annotated is set)
  1181. message: tag message (optional)
  1182. annotated: whether to create an annotated tag
  1183. objectish: object the tag should point at, defaults to HEAD
  1184. tag_time: Optional time for annotated tag
  1185. tag_timezone: Optional timezone for annotated tag
  1186. sign: GPG Sign the tag (bool, defaults to False,
  1187. pass True to use default GPG key,
  1188. pass a str containing Key ID to use a specific GPG key)
  1189. """
  1190. with open_repo_closing(repo) as r:
  1191. object = parse_object(r, objectish)
  1192. if isinstance(tag, str):
  1193. tag = tag.encode(encoding)
  1194. if annotated:
  1195. # Create the tag object
  1196. tag_obj = Tag()
  1197. if author is None:
  1198. author = get_user_identity(r.get_config_stack())
  1199. elif isinstance(author, str):
  1200. author = author.encode(encoding)
  1201. else:
  1202. assert isinstance(author, bytes)
  1203. tag_obj.tagger = author
  1204. if isinstance(message, str):
  1205. message = message.encode(encoding)
  1206. elif isinstance(message, bytes):
  1207. pass
  1208. else:
  1209. message = b""
  1210. tag_obj.message = message + "\n".encode(encoding)
  1211. tag_obj.name = tag
  1212. tag_obj.object = (type(object), object.id)
  1213. if tag_time is None:
  1214. tag_time = int(time.time())
  1215. tag_obj.tag_time = tag_time
  1216. if tag_timezone is None:
  1217. tag_timezone = get_user_timezones()[1]
  1218. elif isinstance(tag_timezone, str):
  1219. tag_timezone = parse_timezone(tag_timezone.encode())
  1220. tag_obj.tag_timezone = tag_timezone
  1221. # Check if we should sign the tag
  1222. should_sign = sign
  1223. if sign is None:
  1224. # Check tag.gpgSign configuration when sign is not explicitly set
  1225. config = r.get_config_stack()
  1226. try:
  1227. should_sign = config.get_boolean((b"tag",), b"gpgSign")
  1228. except KeyError:
  1229. should_sign = False # Default to not signing if no config
  1230. if should_sign:
  1231. keyid = sign if isinstance(sign, str) else None
  1232. # If sign is True but no keyid specified, check user.signingKey config
  1233. if should_sign is True and keyid is None:
  1234. config = r.get_config_stack()
  1235. try:
  1236. keyid = config.get((b"user",), b"signingKey").decode("ascii")
  1237. except KeyError:
  1238. # No user.signingKey configured, will use default GPG key
  1239. pass
  1240. tag_obj.sign(keyid)
  1241. r.object_store.add_object(tag_obj)
  1242. tag_id = tag_obj.id
  1243. else:
  1244. tag_id = object.id
  1245. r.refs[_make_tag_ref(tag)] = tag_id
  1246. def tag_list(repo, outstream=sys.stdout):
  1247. """List all tags.
  1248. Args:
  1249. repo: Path to repository
  1250. outstream: Stream to write tags to
  1251. """
  1252. with open_repo_closing(repo) as r:
  1253. tags = sorted(r.refs.as_dict(b"refs/tags"))
  1254. return tags
  1255. def tag_delete(repo, name) -> None:
  1256. """Remove a tag.
  1257. Args:
  1258. repo: Path to repository
  1259. name: Name of tag to remove
  1260. """
  1261. with open_repo_closing(repo) as r:
  1262. if isinstance(name, bytes):
  1263. names = [name]
  1264. elif isinstance(name, list):
  1265. names = name
  1266. else:
  1267. raise Error(f"Unexpected tag name type {name!r}")
  1268. for name in names:
  1269. del r.refs[_make_tag_ref(name)]
  1270. def _make_notes_ref(name: bytes) -> bytes:
  1271. """Make a notes ref name."""
  1272. if name.startswith(b"refs/notes/"):
  1273. return name
  1274. return LOCAL_NOTES_PREFIX + name
  1275. def notes_add(
  1276. repo, object_sha, note, ref=b"commits", author=None, committer=None, message=None
  1277. ):
  1278. """Add or update a note for an object.
  1279. Args:
  1280. repo: Path to repository
  1281. object_sha: SHA of the object to annotate
  1282. note: Note content
  1283. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1284. author: Author identity (defaults to committer)
  1285. committer: Committer identity (defaults to config)
  1286. message: Commit message for the notes update
  1287. Returns:
  1288. SHA of the new notes commit
  1289. """
  1290. with open_repo_closing(repo) as r:
  1291. # Parse the object to get its SHA
  1292. obj = parse_object(r, object_sha)
  1293. object_sha = obj.id
  1294. if isinstance(note, str):
  1295. note = note.encode(DEFAULT_ENCODING)
  1296. if isinstance(ref, str):
  1297. ref = ref.encode(DEFAULT_ENCODING)
  1298. notes_ref = _make_notes_ref(ref)
  1299. config = r.get_config_stack()
  1300. return r.notes.set_note(
  1301. object_sha,
  1302. note,
  1303. notes_ref,
  1304. author=author,
  1305. committer=committer,
  1306. message=message,
  1307. config=config,
  1308. )
  1309. def notes_remove(
  1310. repo, object_sha, ref=b"commits", author=None, committer=None, message=None
  1311. ):
  1312. """Remove a note for an object.
  1313. Args:
  1314. repo: Path to repository
  1315. object_sha: SHA of the object to remove notes from
  1316. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1317. author: Author identity (defaults to committer)
  1318. committer: Committer identity (defaults to config)
  1319. message: Commit message for the notes removal
  1320. Returns:
  1321. SHA of the new notes commit, or None if no note existed
  1322. """
  1323. with open_repo_closing(repo) as r:
  1324. # Parse the object to get its SHA
  1325. obj = parse_object(r, object_sha)
  1326. object_sha = obj.id
  1327. if isinstance(ref, str):
  1328. ref = ref.encode(DEFAULT_ENCODING)
  1329. notes_ref = _make_notes_ref(ref)
  1330. config = r.get_config_stack()
  1331. return r.notes.remove_note(
  1332. object_sha,
  1333. notes_ref,
  1334. author=author,
  1335. committer=committer,
  1336. message=message,
  1337. config=config,
  1338. )
  1339. def notes_show(repo, object_sha, ref=b"commits"):
  1340. """Show the note for an object.
  1341. Args:
  1342. repo: Path to repository
  1343. object_sha: SHA of the object
  1344. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1345. Returns:
  1346. Note content as bytes, or None if no note exists
  1347. """
  1348. with open_repo_closing(repo) as r:
  1349. # Parse the object to get its SHA
  1350. obj = parse_object(r, object_sha)
  1351. object_sha = obj.id
  1352. if isinstance(ref, str):
  1353. ref = ref.encode(DEFAULT_ENCODING)
  1354. notes_ref = _make_notes_ref(ref)
  1355. config = r.get_config_stack()
  1356. return r.notes.get_note(object_sha, notes_ref, config=config)
  1357. def notes_list(repo, ref=b"commits"):
  1358. """List all notes in a notes ref.
  1359. Args:
  1360. repo: Path to repository
  1361. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1362. Returns:
  1363. List of tuples of (object_sha, note_content)
  1364. """
  1365. with open_repo_closing(repo) as r:
  1366. if isinstance(ref, str):
  1367. ref = ref.encode(DEFAULT_ENCODING)
  1368. notes_ref = _make_notes_ref(ref)
  1369. config = r.get_config_stack()
  1370. return r.notes.list_notes(notes_ref, config=config)
  1371. def reset(repo, mode, treeish="HEAD") -> None:
  1372. """Reset current HEAD to the specified state.
  1373. Args:
  1374. repo: Path to repository
  1375. mode: Mode ("hard", "soft", "mixed")
  1376. treeish: Treeish to reset to
  1377. """
  1378. with open_repo_closing(repo) as r:
  1379. # Parse the target tree
  1380. tree = parse_tree(r, treeish)
  1381. target_commit = parse_commit(r, treeish)
  1382. # Update HEAD to point to the target commit
  1383. r.refs[b"HEAD"] = target_commit.id
  1384. if mode == "soft":
  1385. # Soft reset: only update HEAD, leave index and working tree unchanged
  1386. return
  1387. elif mode == "mixed":
  1388. # Mixed reset: update HEAD and index, but leave working tree unchanged
  1389. from .index import IndexEntry
  1390. from .object_store import iter_tree_contents
  1391. # Open the index
  1392. index = r.open_index()
  1393. # Clear the current index
  1394. index.clear()
  1395. # Populate index from the target tree
  1396. for entry in iter_tree_contents(r.object_store, tree.id):
  1397. # Create an IndexEntry from the tree entry
  1398. # Use zeros for filesystem-specific fields since we're not touching the working tree
  1399. index_entry = IndexEntry(
  1400. ctime=(0, 0),
  1401. mtime=(0, 0),
  1402. dev=0,
  1403. ino=0,
  1404. mode=entry.mode,
  1405. uid=0,
  1406. gid=0,
  1407. size=0, # Size will be 0 since we're not reading from disk
  1408. sha=entry.sha,
  1409. flags=0,
  1410. )
  1411. index[entry.path] = index_entry
  1412. # Write the updated index
  1413. index.write()
  1414. elif mode == "hard":
  1415. # Hard reset: update HEAD, index, and working tree
  1416. # Get current HEAD tree for comparison
  1417. try:
  1418. current_head = r.refs[b"HEAD"]
  1419. current_tree = r[current_head].tree
  1420. except KeyError:
  1421. current_tree = None
  1422. # Get configuration for working directory update
  1423. config = r.get_config()
  1424. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  1425. # Import validation functions
  1426. from .index import validate_path_element_default, validate_path_element_ntfs
  1427. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  1428. validate_path_element = validate_path_element_ntfs
  1429. else:
  1430. validate_path_element = validate_path_element_default
  1431. if config.get_boolean(b"core", b"symlinks", True):
  1432. # Import symlink function
  1433. from .index import symlink
  1434. symlink_fn = symlink
  1435. else:
  1436. def symlink_fn( # type: ignore
  1437. source, target, target_is_directory=False, *, dir_fd=None
  1438. ) -> None:
  1439. mode = "w" + ("b" if isinstance(source, bytes) else "")
  1440. with open(target, mode) as f:
  1441. f.write(source)
  1442. # Update working tree and index
  1443. blob_normalizer = r.get_blob_normalizer()
  1444. update_working_tree(
  1445. r,
  1446. current_tree,
  1447. tree.id,
  1448. honor_filemode=honor_filemode,
  1449. validate_path_element=validate_path_element,
  1450. symlink_fn=symlink_fn,
  1451. force_remove_untracked=True,
  1452. blob_normalizer=blob_normalizer,
  1453. )
  1454. else:
  1455. raise Error(f"Invalid reset mode: {mode}")
  1456. def get_remote_repo(
  1457. repo: Repo, remote_location: Optional[Union[str, bytes]] = None
  1458. ) -> tuple[Optional[str], str]:
  1459. config = repo.get_config()
  1460. if remote_location is None:
  1461. remote_location = get_branch_remote(repo)
  1462. if isinstance(remote_location, str):
  1463. encoded_location = remote_location.encode()
  1464. else:
  1465. encoded_location = remote_location
  1466. section = (b"remote", encoded_location)
  1467. remote_name: Optional[str] = None
  1468. if config.has_section(section):
  1469. remote_name = encoded_location.decode()
  1470. encoded_location = config.get(section, "url")
  1471. else:
  1472. remote_name = None
  1473. return (remote_name, encoded_location.decode())
  1474. def push(
  1475. repo,
  1476. remote_location=None,
  1477. refspecs=None,
  1478. outstream=default_bytes_out_stream,
  1479. errstream=default_bytes_err_stream,
  1480. force=False,
  1481. **kwargs,
  1482. ):
  1483. """Remote push with dulwich via dulwich.client.
  1484. Args:
  1485. repo: Path to repository
  1486. remote_location: Location of the remote
  1487. refspecs: Refs to push to remote
  1488. outstream: A stream file to write output
  1489. errstream: A stream file to write errors
  1490. force: Force overwriting refs
  1491. """
  1492. # Open the repo
  1493. with open_repo_closing(repo) as r:
  1494. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1495. # Check if mirror mode is enabled
  1496. mirror_mode = False
  1497. if remote_name:
  1498. try:
  1499. mirror_mode = r.get_config_stack().get_boolean(
  1500. (b"remote", remote_name.encode()), b"mirror"
  1501. )
  1502. except KeyError:
  1503. pass
  1504. if mirror_mode:
  1505. # Mirror mode: push all refs and delete non-existent ones
  1506. refspecs = []
  1507. for ref in r.refs.keys():
  1508. # Push all refs to the same name on remote
  1509. refspecs.append(ref + b":" + ref)
  1510. elif refspecs is None:
  1511. refspecs = [active_branch(r)]
  1512. # Get the client and path
  1513. client, path = get_transport_and_path(
  1514. remote_location, config=r.get_config_stack(), **kwargs
  1515. )
  1516. selected_refs = []
  1517. remote_changed_refs = {}
  1518. def update_refs(refs):
  1519. selected_refs.extend(parse_reftuples(r.refs, refs, refspecs, force=force))
  1520. new_refs = {}
  1521. # In mirror mode, delete remote refs that don't exist locally
  1522. if mirror_mode:
  1523. local_refs = set(r.refs.keys())
  1524. for remote_ref in refs.keys():
  1525. if remote_ref not in local_refs:
  1526. new_refs[remote_ref] = ZERO_SHA
  1527. remote_changed_refs[remote_ref] = None
  1528. # TODO: Handle selected_refs == {None: None}
  1529. for lh, rh, force_ref in selected_refs:
  1530. if lh is None:
  1531. new_refs[rh] = ZERO_SHA
  1532. remote_changed_refs[rh] = None
  1533. else:
  1534. try:
  1535. localsha = r.refs[lh]
  1536. except KeyError as exc:
  1537. raise Error(f"No valid ref {lh} in local repository") from exc
  1538. if not force_ref and rh in refs:
  1539. check_diverged(r, refs[rh], localsha)
  1540. new_refs[rh] = localsha
  1541. remote_changed_refs[rh] = localsha
  1542. return new_refs
  1543. err_encoding = getattr(errstream, "encoding", None) or DEFAULT_ENCODING
  1544. remote_location = client.get_url(path)
  1545. try:
  1546. result = client.send_pack(
  1547. path,
  1548. update_refs,
  1549. generate_pack_data=r.generate_pack_data,
  1550. progress=errstream.write,
  1551. )
  1552. except SendPackError as exc:
  1553. raise Error(
  1554. "Push to " + remote_location + " failed -> " + exc.args[0].decode(),
  1555. ) from exc
  1556. else:
  1557. errstream.write(
  1558. b"Push to " + remote_location.encode(err_encoding) + b" successful.\n"
  1559. )
  1560. for ref, error in (result.ref_status or {}).items():
  1561. if error is not None:
  1562. errstream.write(
  1563. b"Push of ref %s failed: %s\n" % (ref, error.encode(err_encoding))
  1564. )
  1565. else:
  1566. errstream.write(b"Ref %s updated\n" % ref)
  1567. if remote_name is not None:
  1568. _import_remote_refs(r.refs, remote_name, remote_changed_refs)
  1569. return result
  1570. # Trigger auto GC if needed
  1571. from .gc import maybe_auto_gc
  1572. with open_repo_closing(repo) as r:
  1573. maybe_auto_gc(r)
  1574. def pull(
  1575. repo,
  1576. remote_location=None,
  1577. refspecs=None,
  1578. outstream=default_bytes_out_stream,
  1579. errstream=default_bytes_err_stream,
  1580. fast_forward=True,
  1581. ff_only=False,
  1582. force=False,
  1583. filter_spec=None,
  1584. protocol_version=None,
  1585. **kwargs,
  1586. ) -> None:
  1587. """Pull from remote via dulwich.client.
  1588. Args:
  1589. repo: Path to repository
  1590. remote_location: Location of the remote
  1591. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  1592. bytestring/string.
  1593. outstream: A stream file to write to output
  1594. errstream: A stream file to write to errors
  1595. fast_forward: If True, raise an exception when fast-forward is not possible
  1596. ff_only: If True, only allow fast-forward merges. Raises DivergedBranches
  1597. when branches have diverged rather than performing a merge.
  1598. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  1599. Only used if the server supports the Git protocol-v2 'filter'
  1600. feature, and ignored otherwise.
  1601. protocol_version: desired Git protocol version. By default the highest
  1602. mutually supported protocol version will be used
  1603. """
  1604. # Open the repo
  1605. with open_repo_closing(repo) as r:
  1606. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1607. selected_refs = []
  1608. if refspecs is None:
  1609. refspecs = [b"HEAD"]
  1610. def determine_wants(remote_refs, *args, **kwargs):
  1611. selected_refs.extend(
  1612. parse_reftuples(remote_refs, r.refs, refspecs, force=force)
  1613. )
  1614. return [
  1615. remote_refs[lh]
  1616. for (lh, rh, force_ref) in selected_refs
  1617. if remote_refs[lh] not in r.object_store
  1618. ]
  1619. client, path = get_transport_and_path(
  1620. remote_location, config=r.get_config_stack(), **kwargs
  1621. )
  1622. if filter_spec:
  1623. filter_spec = filter_spec.encode("ascii")
  1624. fetch_result = client.fetch(
  1625. path,
  1626. r,
  1627. progress=errstream.write,
  1628. determine_wants=determine_wants,
  1629. filter_spec=filter_spec,
  1630. protocol_version=protocol_version,
  1631. )
  1632. # Store the old HEAD tree before making changes
  1633. try:
  1634. old_head = r.refs[b"HEAD"]
  1635. old_tree_id = r[old_head].tree
  1636. except KeyError:
  1637. old_tree_id = None
  1638. merged = False
  1639. for lh, rh, force_ref in selected_refs:
  1640. if not force_ref and rh in r.refs:
  1641. try:
  1642. check_diverged(r, r.refs.follow(rh)[1], fetch_result.refs[lh])
  1643. except DivergedBranches as exc:
  1644. if ff_only or fast_forward:
  1645. raise
  1646. else:
  1647. # Perform merge
  1648. merge_result, conflicts = _do_merge(r, fetch_result.refs[lh])
  1649. if conflicts:
  1650. raise Error(
  1651. f"Merge conflicts occurred: {conflicts}"
  1652. ) from exc
  1653. merged = True
  1654. # Skip updating ref since merge already updated HEAD
  1655. continue
  1656. r.refs[rh] = fetch_result.refs[lh]
  1657. # Only update HEAD if we didn't perform a merge
  1658. if selected_refs and not merged:
  1659. r[b"HEAD"] = fetch_result.refs[selected_refs[0][1]]
  1660. # Update working tree to match the new HEAD
  1661. # Skip if merge was performed as merge already updates the working tree
  1662. if not merged and old_tree_id is not None:
  1663. new_tree_id = r[b"HEAD"].tree
  1664. blob_normalizer = r.get_blob_normalizer()
  1665. update_working_tree(
  1666. r, old_tree_id, new_tree_id, blob_normalizer=blob_normalizer
  1667. )
  1668. if remote_name is not None:
  1669. _import_remote_refs(r.refs, remote_name, fetch_result.refs)
  1670. # Trigger auto GC if needed
  1671. from .gc import maybe_auto_gc
  1672. with open_repo_closing(repo) as r:
  1673. maybe_auto_gc(r)
  1674. def status(repo=".", ignored=False, untracked_files="normal"):
  1675. """Returns staged, unstaged, and untracked changes relative to the HEAD.
  1676. Args:
  1677. repo: Path to repository or repository object
  1678. ignored: Whether to include ignored files in untracked
  1679. untracked_files: How to handle untracked files, defaults to "all":
  1680. "no": do not return untracked files
  1681. "normal": return untracked directories, not their contents
  1682. "all": include all files in untracked directories
  1683. Using untracked_files="no" can be faster than "all" when the worktree
  1684. contains many untracked files/directories.
  1685. Using untracked_files="normal" provides a good balance, only showing
  1686. directories that are entirely untracked without listing all their contents.
  1687. Returns: GitStatus tuple,
  1688. staged - dict with lists of staged paths (diff index/HEAD)
  1689. unstaged - list of unstaged paths (diff index/working-tree)
  1690. untracked - list of untracked, un-ignored & non-.git paths
  1691. """
  1692. with open_repo_closing(repo) as r:
  1693. # 1. Get status of staged
  1694. tracked_changes = get_tree_changes(r)
  1695. # 2. Get status of unstaged
  1696. index = r.open_index()
  1697. normalizer = r.get_blob_normalizer()
  1698. filter_callback = normalizer.checkin_normalize
  1699. unstaged_changes = list(get_unstaged_changes(index, r.path, filter_callback))
  1700. untracked_paths = get_untracked_paths(
  1701. r.path,
  1702. r.path,
  1703. index,
  1704. exclude_ignored=not ignored,
  1705. untracked_files=untracked_files,
  1706. )
  1707. if sys.platform == "win32":
  1708. untracked_changes = [
  1709. path.replace(os.path.sep, "/") for path in untracked_paths
  1710. ]
  1711. else:
  1712. untracked_changes = list(untracked_paths)
  1713. return GitStatus(tracked_changes, unstaged_changes, untracked_changes)
  1714. def _walk_working_dir_paths(frompath, basepath, prune_dirnames=None):
  1715. """Get path, is_dir for files in working dir from frompath.
  1716. Args:
  1717. frompath: Path to begin walk
  1718. basepath: Path to compare to
  1719. prune_dirnames: Optional callback to prune dirnames during os.walk
  1720. dirnames will be set to result of prune_dirnames(dirpath, dirnames)
  1721. """
  1722. for dirpath, dirnames, filenames in os.walk(frompath):
  1723. # Skip .git and below.
  1724. if ".git" in dirnames:
  1725. dirnames.remove(".git")
  1726. if dirpath != basepath:
  1727. continue
  1728. if ".git" in filenames:
  1729. filenames.remove(".git")
  1730. if dirpath != basepath:
  1731. continue
  1732. if dirpath != frompath:
  1733. yield dirpath, True
  1734. for filename in filenames:
  1735. filepath = os.path.join(dirpath, filename)
  1736. yield filepath, False
  1737. if prune_dirnames:
  1738. dirnames[:] = prune_dirnames(dirpath, dirnames)
  1739. def get_untracked_paths(
  1740. frompath, basepath, index, exclude_ignored=False, untracked_files="all"
  1741. ):
  1742. """Get untracked paths.
  1743. Args:
  1744. frompath: Path to walk
  1745. basepath: Path to compare to
  1746. index: Index to check against
  1747. exclude_ignored: Whether to exclude ignored paths
  1748. untracked_files: How to handle untracked files:
  1749. - "no": return an empty list
  1750. - "all": return all files in untracked directories
  1751. - "normal": return untracked directories without listing their contents
  1752. Note: ignored directories will never be walked for performance reasons.
  1753. If exclude_ignored is False, only the path to an ignored directory will
  1754. be yielded, no files inside the directory will be returned
  1755. """
  1756. if untracked_files not in ("no", "all", "normal"):
  1757. raise ValueError("untracked_files must be one of (no, all, normal)")
  1758. if untracked_files == "no":
  1759. return
  1760. with open_repo_closing(basepath) as r:
  1761. ignore_manager = IgnoreFilterManager.from_repo(r)
  1762. ignored_dirs = []
  1763. # List to store untracked directories found during traversal
  1764. untracked_dir_list = []
  1765. def prune_dirnames(dirpath, dirnames):
  1766. for i in range(len(dirnames) - 1, -1, -1):
  1767. path = os.path.join(dirpath, dirnames[i])
  1768. ip = os.path.join(os.path.relpath(path, basepath), "")
  1769. # Check if directory is ignored
  1770. if ignore_manager.is_ignored(ip):
  1771. if not exclude_ignored:
  1772. ignored_dirs.append(
  1773. os.path.join(os.path.relpath(path, frompath), "")
  1774. )
  1775. del dirnames[i]
  1776. continue
  1777. # For "normal" mode, check if the directory is entirely untracked
  1778. if untracked_files == "normal":
  1779. # Convert directory path to tree path for index lookup
  1780. dir_tree_path = path_to_tree_path(basepath, path)
  1781. # Check if any file in this directory is tracked
  1782. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  1783. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  1784. if not has_tracked_files:
  1785. # This directory is entirely untracked
  1786. # Check if it should be excluded due to ignore rules
  1787. is_ignored = ignore_manager.is_ignored(
  1788. os.path.relpath(path, basepath)
  1789. )
  1790. if not exclude_ignored or not is_ignored:
  1791. rel_path = os.path.join(os.path.relpath(path, frompath), "")
  1792. untracked_dir_list.append(rel_path)
  1793. del dirnames[i]
  1794. return dirnames
  1795. # For "all" mode, use the original behavior
  1796. if untracked_files == "all":
  1797. for ap, is_dir in _walk_working_dir_paths(
  1798. frompath, basepath, prune_dirnames=prune_dirnames
  1799. ):
  1800. if not is_dir:
  1801. ip = path_to_tree_path(basepath, ap)
  1802. if ip not in index:
  1803. if not exclude_ignored or not ignore_manager.is_ignored(
  1804. os.path.relpath(ap, basepath)
  1805. ):
  1806. yield os.path.relpath(ap, frompath)
  1807. else: # "normal" mode
  1808. # Walk directories, handling both files and directories
  1809. for ap, is_dir in _walk_working_dir_paths(
  1810. frompath, basepath, prune_dirnames=prune_dirnames
  1811. ):
  1812. # This part won't be reached for pruned directories
  1813. if is_dir:
  1814. # Check if this directory is entirely untracked
  1815. dir_tree_path = path_to_tree_path(basepath, ap)
  1816. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  1817. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  1818. if not has_tracked_files:
  1819. if not exclude_ignored or not ignore_manager.is_ignored(
  1820. os.path.relpath(ap, basepath)
  1821. ):
  1822. yield os.path.join(os.path.relpath(ap, frompath), "")
  1823. else:
  1824. # Check individual files in directories that contain tracked files
  1825. ip = path_to_tree_path(basepath, ap)
  1826. if ip not in index:
  1827. if not exclude_ignored or not ignore_manager.is_ignored(
  1828. os.path.relpath(ap, basepath)
  1829. ):
  1830. yield os.path.relpath(ap, frompath)
  1831. # Yield any untracked directories found during pruning
  1832. yield from untracked_dir_list
  1833. yield from ignored_dirs
  1834. def get_tree_changes(repo):
  1835. """Return add/delete/modify changes to tree by comparing index to HEAD.
  1836. Args:
  1837. repo: repo path or object
  1838. Returns: dict with lists for each type of change
  1839. """
  1840. with open_repo_closing(repo) as r:
  1841. index = r.open_index()
  1842. # Compares the Index to the HEAD & determines changes
  1843. # Iterate through the changes and report add/delete/modify
  1844. # TODO: call out to dulwich.diff_tree somehow.
  1845. tracked_changes = {
  1846. "add": [],
  1847. "delete": [],
  1848. "modify": [],
  1849. }
  1850. try:
  1851. tree_id = r[b"HEAD"].tree
  1852. except KeyError:
  1853. tree_id = None
  1854. for change in index.changes_from_tree(r.object_store, tree_id):
  1855. if not change[0][0]:
  1856. tracked_changes["add"].append(change[0][1])
  1857. elif not change[0][1]:
  1858. tracked_changes["delete"].append(change[0][0])
  1859. elif change[0][0] == change[0][1]:
  1860. tracked_changes["modify"].append(change[0][0])
  1861. else:
  1862. raise NotImplementedError("git mv ops not yet supported")
  1863. return tracked_changes
  1864. def daemon(path=".", address=None, port=None) -> None:
  1865. """Run a daemon serving Git requests over TCP/IP.
  1866. Args:
  1867. path: Path to the directory to serve.
  1868. address: Optional address to listen on (defaults to ::)
  1869. port: Optional port to listen on (defaults to TCP_GIT_PORT)
  1870. """
  1871. # TODO(jelmer): Support git-daemon-export-ok and --export-all.
  1872. backend = FileSystemBackend(path)
  1873. server = TCPGitServer(backend, address, port)
  1874. server.serve_forever()
  1875. def web_daemon(path=".", address=None, port=None) -> None:
  1876. """Run a daemon serving Git requests over HTTP.
  1877. Args:
  1878. path: Path to the directory to serve
  1879. address: Optional address to listen on (defaults to ::)
  1880. port: Optional port to listen on (defaults to 80)
  1881. """
  1882. from .web import (
  1883. WSGIRequestHandlerLogger,
  1884. WSGIServerLogger,
  1885. make_server,
  1886. make_wsgi_chain,
  1887. )
  1888. backend = FileSystemBackend(path)
  1889. app = make_wsgi_chain(backend)
  1890. server = make_server(
  1891. address,
  1892. port,
  1893. app,
  1894. handler_class=WSGIRequestHandlerLogger,
  1895. server_class=WSGIServerLogger,
  1896. )
  1897. server.serve_forever()
  1898. def upload_pack(path=".", inf=None, outf=None) -> int:
  1899. """Upload a pack file after negotiating its contents using smart protocol.
  1900. Args:
  1901. path: Path to the repository
  1902. inf: Input stream to communicate with client
  1903. outf: Output stream to communicate with client
  1904. """
  1905. if outf is None:
  1906. outf = getattr(sys.stdout, "buffer", sys.stdout)
  1907. if inf is None:
  1908. inf = getattr(sys.stdin, "buffer", sys.stdin)
  1909. path = os.path.expanduser(path)
  1910. backend = FileSystemBackend(path)
  1911. def send_fn(data) -> None:
  1912. outf.write(data)
  1913. outf.flush()
  1914. proto = Protocol(inf.read, send_fn)
  1915. handler = UploadPackHandler(backend, [path], proto)
  1916. # FIXME: Catch exceptions and write a single-line summary to outf.
  1917. handler.handle()
  1918. return 0
  1919. def receive_pack(path=".", inf=None, outf=None) -> int:
  1920. """Receive a pack file after negotiating its contents using smart protocol.
  1921. Args:
  1922. path: Path to the repository
  1923. inf: Input stream to communicate with client
  1924. outf: Output stream to communicate with client
  1925. """
  1926. if outf is None:
  1927. outf = getattr(sys.stdout, "buffer", sys.stdout)
  1928. if inf is None:
  1929. inf = getattr(sys.stdin, "buffer", sys.stdin)
  1930. path = os.path.expanduser(path)
  1931. backend = FileSystemBackend(path)
  1932. def send_fn(data) -> None:
  1933. outf.write(data)
  1934. outf.flush()
  1935. proto = Protocol(inf.read, send_fn)
  1936. handler = ReceivePackHandler(backend, [path], proto)
  1937. # FIXME: Catch exceptions and write a single-line summary to outf.
  1938. handler.handle()
  1939. return 0
  1940. def _make_branch_ref(name: Union[str, bytes]) -> Ref:
  1941. if isinstance(name, str):
  1942. name = name.encode(DEFAULT_ENCODING)
  1943. return LOCAL_BRANCH_PREFIX + name
  1944. def _make_tag_ref(name: Union[str, bytes]) -> Ref:
  1945. if isinstance(name, str):
  1946. name = name.encode(DEFAULT_ENCODING)
  1947. return LOCAL_TAG_PREFIX + name
  1948. def branch_delete(repo, name) -> None:
  1949. """Delete a branch.
  1950. Args:
  1951. repo: Path to the repository
  1952. name: Name of the branch
  1953. """
  1954. with open_repo_closing(repo) as r:
  1955. if isinstance(name, list):
  1956. names = name
  1957. else:
  1958. names = [name]
  1959. for name in names:
  1960. del r.refs[_make_branch_ref(name)]
  1961. def branch_create(repo, name, objectish=None, force=False) -> None:
  1962. """Create a branch.
  1963. Args:
  1964. repo: Path to the repository
  1965. name: Name of the new branch
  1966. objectish: Target object to point new branch at (defaults to HEAD)
  1967. force: Force creation of branch, even if it already exists
  1968. """
  1969. with open_repo_closing(repo) as r:
  1970. if objectish is None:
  1971. objectish = "HEAD"
  1972. object = parse_object(r, objectish)
  1973. refname = _make_branch_ref(name)
  1974. ref_message = b"branch: Created from " + objectish.encode(DEFAULT_ENCODING)
  1975. if force:
  1976. r.refs.set_if_equals(refname, None, object.id, message=ref_message)
  1977. else:
  1978. if not r.refs.add_if_new(refname, object.id, message=ref_message):
  1979. raise Error(f"Branch with name {name} already exists.")
  1980. def branch_list(repo):
  1981. """List all branches.
  1982. Args:
  1983. repo: Path to the repository
  1984. Returns:
  1985. List of branch names (without refs/heads/ prefix)
  1986. """
  1987. with open_repo_closing(repo) as r:
  1988. branches = list(r.refs.keys(base=LOCAL_BRANCH_PREFIX))
  1989. # Check for branch.sort configuration
  1990. config = r.get_config_stack()
  1991. try:
  1992. sort_key = config.get((b"branch",), b"sort").decode()
  1993. except KeyError:
  1994. # Default is refname (alphabetical)
  1995. sort_key = "refname"
  1996. # Parse sort key
  1997. reverse = False
  1998. if sort_key.startswith("-"):
  1999. reverse = True
  2000. sort_key = sort_key[1:]
  2001. # Apply sorting
  2002. if sort_key == "refname":
  2003. # Simple alphabetical sort (default)
  2004. branches.sort(reverse=reverse)
  2005. elif sort_key in ("committerdate", "authordate"):
  2006. # Sort by date
  2007. def get_commit_date(branch_name):
  2008. ref = LOCAL_BRANCH_PREFIX + branch_name
  2009. sha = r.refs[ref]
  2010. commit = r.object_store[sha]
  2011. if sort_key == "committerdate":
  2012. return commit.commit_time
  2013. else: # authordate
  2014. return commit.author_time
  2015. # Sort branches by date
  2016. # Note: Python's sort naturally orders smaller values first (ascending)
  2017. # For dates, this means oldest first by default
  2018. # Use a stable sort with branch name as secondary key for consistent ordering
  2019. if reverse:
  2020. # For reverse sort, we want newest dates first but alphabetical names second
  2021. branches.sort(key=lambda b: (-get_commit_date(b), b))
  2022. else:
  2023. branches.sort(key=lambda b: (get_commit_date(b), b))
  2024. else:
  2025. # Unknown sort key, fall back to default
  2026. branches.sort()
  2027. return branches
  2028. def active_branch(repo):
  2029. """Return the active branch in the repository, if any.
  2030. Args:
  2031. repo: Repository to open
  2032. Returns:
  2033. branch name
  2034. Raises:
  2035. KeyError: if the repository does not have a working tree
  2036. IndexError: if HEAD is floating
  2037. """
  2038. with open_repo_closing(repo) as r:
  2039. active_ref = r.refs.follow(b"HEAD")[0][1]
  2040. if not active_ref.startswith(LOCAL_BRANCH_PREFIX):
  2041. raise ValueError(active_ref)
  2042. return active_ref[len(LOCAL_BRANCH_PREFIX) :]
  2043. def get_branch_remote(repo):
  2044. """Return the active branch's remote name, if any.
  2045. Args:
  2046. repo: Repository to open
  2047. Returns:
  2048. remote name
  2049. Raises:
  2050. KeyError: if the repository does not have a working tree
  2051. """
  2052. with open_repo_closing(repo) as r:
  2053. branch_name = active_branch(r.path)
  2054. config = r.get_config()
  2055. try:
  2056. remote_name = config.get((b"branch", branch_name), b"remote")
  2057. except KeyError:
  2058. remote_name = b"origin"
  2059. return remote_name
  2060. def get_branch_merge(repo, branch_name=None):
  2061. """Return the branch's merge reference (upstream branch), if any.
  2062. Args:
  2063. repo: Repository to open
  2064. branch_name: Name of the branch (defaults to active branch)
  2065. Returns:
  2066. merge reference name (e.g. b"refs/heads/main")
  2067. Raises:
  2068. KeyError: if the branch does not have a merge configuration
  2069. """
  2070. with open_repo_closing(repo) as r:
  2071. if branch_name is None:
  2072. branch_name = active_branch(r.path)
  2073. config = r.get_config()
  2074. return config.get((b"branch", branch_name), b"merge")
  2075. def set_branch_tracking(repo, branch_name, remote_name, remote_ref):
  2076. """Set up branch tracking configuration.
  2077. Args:
  2078. repo: Repository to open
  2079. branch_name: Name of the local branch
  2080. remote_name: Name of the remote (e.g. b"origin")
  2081. remote_ref: Remote reference to track (e.g. b"refs/heads/main")
  2082. """
  2083. with open_repo_closing(repo) as r:
  2084. config = r.get_config()
  2085. config.set((b"branch", branch_name), b"remote", remote_name)
  2086. config.set((b"branch", branch_name), b"merge", remote_ref)
  2087. config.write_to_path()
  2088. def fetch(
  2089. repo,
  2090. remote_location=None,
  2091. outstream=sys.stdout,
  2092. errstream=default_bytes_err_stream,
  2093. message=None,
  2094. depth=None,
  2095. prune=False,
  2096. prune_tags=False,
  2097. force=False,
  2098. **kwargs,
  2099. ):
  2100. """Fetch objects from a remote server.
  2101. Args:
  2102. repo: Path to the repository
  2103. remote_location: String identifying a remote server
  2104. outstream: Output stream (defaults to stdout)
  2105. errstream: Error stream (defaults to stderr)
  2106. message: Reflog message (defaults to b"fetch: from <remote_name>")
  2107. depth: Depth to fetch at
  2108. prune: Prune remote removed refs
  2109. prune_tags: Prune reomte removed tags
  2110. Returns:
  2111. Dictionary with refs on the remote
  2112. """
  2113. with open_repo_closing(repo) as r:
  2114. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  2115. if message is None:
  2116. message = b"fetch: from " + remote_location.encode(DEFAULT_ENCODING)
  2117. client, path = get_transport_and_path(
  2118. remote_location, config=r.get_config_stack(), **kwargs
  2119. )
  2120. fetch_result = client.fetch(path, r, progress=errstream.write, depth=depth)
  2121. if remote_name is not None:
  2122. _import_remote_refs(
  2123. r.refs,
  2124. remote_name,
  2125. fetch_result.refs,
  2126. message,
  2127. prune=prune,
  2128. prune_tags=prune_tags,
  2129. )
  2130. # Trigger auto GC if needed
  2131. from .gc import maybe_auto_gc
  2132. with open_repo_closing(repo) as r:
  2133. maybe_auto_gc(r)
  2134. return fetch_result
  2135. def for_each_ref(
  2136. repo: Union[Repo, str] = ".",
  2137. pattern: Optional[Union[str, bytes]] = None,
  2138. ) -> list[tuple[bytes, bytes, bytes]]:
  2139. """Iterate over all refs that match the (optional) pattern.
  2140. Args:
  2141. repo: Path to the repository
  2142. pattern: Optional glob (7) patterns to filter the refs with
  2143. Returns: List of bytes tuples with: (sha, object_type, ref_name)
  2144. """
  2145. if isinstance(pattern, str):
  2146. pattern = os.fsencode(pattern)
  2147. with open_repo_closing(repo) as r:
  2148. refs = r.get_refs()
  2149. if pattern:
  2150. matching_refs: dict[bytes, bytes] = {}
  2151. pattern_parts = pattern.split(b"/")
  2152. for ref, sha in refs.items():
  2153. matches = False
  2154. # git for-each-ref uses glob (7) style patterns, but fnmatch
  2155. # is greedy and also matches slashes, unlike glob.glob.
  2156. # We have to check parts of the pattern individually.
  2157. # See https://github.com/python/cpython/issues/72904
  2158. ref_parts = ref.split(b"/")
  2159. if len(ref_parts) > len(pattern_parts):
  2160. continue
  2161. for pat, ref_part in zip(pattern_parts, ref_parts):
  2162. matches = fnmatch.fnmatchcase(ref_part, pat)
  2163. if not matches:
  2164. break
  2165. if matches:
  2166. matching_refs[ref] = sha
  2167. refs = matching_refs
  2168. ret: list[tuple[bytes, bytes, bytes]] = [
  2169. (sha, r.get_object(sha).type_name, ref)
  2170. for ref, sha in sorted(
  2171. refs.items(),
  2172. key=lambda ref_sha: ref_sha[0],
  2173. )
  2174. if ref != b"HEAD"
  2175. ]
  2176. return ret
  2177. def ls_remote(remote, config: Optional[Config] = None, **kwargs):
  2178. """List the refs in a remote.
  2179. Args:
  2180. remote: Remote repository location
  2181. config: Configuration to use
  2182. Returns:
  2183. LsRemoteResult object with refs and symrefs
  2184. """
  2185. if config is None:
  2186. config = StackedConfig.default()
  2187. client, host_path = get_transport_and_path(remote, config=config, **kwargs)
  2188. return client.get_refs(host_path)
  2189. def repack(repo) -> None:
  2190. """Repack loose files in a repository.
  2191. Currently this only packs loose objects.
  2192. Args:
  2193. repo: Path to the repository
  2194. """
  2195. with open_repo_closing(repo) as r:
  2196. r.object_store.pack_loose_objects()
  2197. def pack_objects(
  2198. repo,
  2199. object_ids,
  2200. packf,
  2201. idxf,
  2202. delta_window_size=None,
  2203. deltify=None,
  2204. reuse_deltas=True,
  2205. pack_index_version=None,
  2206. ) -> None:
  2207. """Pack objects into a file.
  2208. Args:
  2209. repo: Path to the repository
  2210. object_ids: List of object ids to write
  2211. packf: File-like object to write to
  2212. idxf: File-like object to write to (can be None)
  2213. delta_window_size: Sliding window size for searching for deltas;
  2214. Set to None for default window size.
  2215. deltify: Whether to deltify objects
  2216. reuse_deltas: Allow reuse of existing deltas while deltifying
  2217. pack_index_version: Pack index version to use (1, 2, or 3). If None, uses default version.
  2218. """
  2219. with open_repo_closing(repo) as r:
  2220. entries, data_sum = write_pack_from_container(
  2221. packf.write,
  2222. r.object_store,
  2223. [(oid, None) for oid in object_ids],
  2224. deltify=deltify,
  2225. delta_window_size=delta_window_size,
  2226. reuse_deltas=reuse_deltas,
  2227. )
  2228. if idxf is not None:
  2229. entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
  2230. write_pack_index(idxf, entries, data_sum, version=pack_index_version)
  2231. def ls_tree(
  2232. repo,
  2233. treeish=b"HEAD",
  2234. outstream=sys.stdout,
  2235. recursive=False,
  2236. name_only=False,
  2237. ) -> None:
  2238. """List contents of a tree.
  2239. Args:
  2240. repo: Path to the repository
  2241. treeish: Tree id to list
  2242. outstream: Output stream (defaults to stdout)
  2243. recursive: Whether to recursively list files
  2244. name_only: Only print item name
  2245. """
  2246. def list_tree(store, treeid, base) -> None:
  2247. for name, mode, sha in store[treeid].iteritems():
  2248. if base:
  2249. name = posixpath.join(base, name)
  2250. if name_only:
  2251. outstream.write(name + b"\n")
  2252. else:
  2253. outstream.write(pretty_format_tree_entry(name, mode, sha))
  2254. if stat.S_ISDIR(mode) and recursive:
  2255. list_tree(store, sha, name)
  2256. with open_repo_closing(repo) as r:
  2257. tree = parse_tree(r, treeish)
  2258. list_tree(r.object_store, tree.id, "")
  2259. def remote_add(repo, name: Union[bytes, str], url: Union[bytes, str]) -> None:
  2260. """Add a remote.
  2261. Args:
  2262. repo: Path to the repository
  2263. name: Remote name
  2264. url: Remote URL
  2265. """
  2266. if not isinstance(name, bytes):
  2267. name = name.encode(DEFAULT_ENCODING)
  2268. if not isinstance(url, bytes):
  2269. url = url.encode(DEFAULT_ENCODING)
  2270. with open_repo_closing(repo) as r:
  2271. c = r.get_config()
  2272. section = (b"remote", name)
  2273. if c.has_section(section):
  2274. raise RemoteExists(section)
  2275. c.set(section, b"url", url)
  2276. c.write_to_path()
  2277. def remote_remove(repo: Repo, name: Union[bytes, str]) -> None:
  2278. """Remove a remote.
  2279. Args:
  2280. repo: Path to the repository
  2281. name: Remote name
  2282. """
  2283. if not isinstance(name, bytes):
  2284. name = name.encode(DEFAULT_ENCODING)
  2285. with open_repo_closing(repo) as r:
  2286. c = r.get_config()
  2287. section = (b"remote", name)
  2288. del c[section]
  2289. c.write_to_path()
  2290. def _quote_path(path: str) -> str:
  2291. """Quote a path using C-style quoting similar to git's core.quotePath.
  2292. Args:
  2293. path: Path to quote
  2294. Returns:
  2295. Quoted path string
  2296. """
  2297. # Check if path needs quoting (non-ASCII or special characters)
  2298. needs_quoting = False
  2299. for char in path:
  2300. if ord(char) > 127 or char in '"\\':
  2301. needs_quoting = True
  2302. break
  2303. if not needs_quoting:
  2304. return path
  2305. # Apply C-style quoting
  2306. quoted = '"'
  2307. for char in path:
  2308. if ord(char) > 127:
  2309. # Non-ASCII character, encode as octal escape
  2310. utf8_bytes = char.encode("utf-8")
  2311. for byte in utf8_bytes:
  2312. quoted += f"\\{byte:03o}"
  2313. elif char == '"':
  2314. quoted += '\\"'
  2315. elif char == "\\":
  2316. quoted += "\\\\"
  2317. else:
  2318. quoted += char
  2319. quoted += '"'
  2320. return quoted
  2321. def check_ignore(repo, paths, no_index=False, quote_path=True):
  2322. r"""Debug gitignore files.
  2323. Args:
  2324. repo: Path to the repository
  2325. paths: List of paths to check for
  2326. no_index: Don't check index
  2327. quote_path: If True, quote non-ASCII characters in returned paths using
  2328. C-style octal escapes (e.g. "тест.txt" becomes "\\321\\202\\320\\265\\321\\201\\321\\202.txt").
  2329. If False, return raw unicode paths.
  2330. Returns: List of ignored files
  2331. """
  2332. with open_repo_closing(repo) as r:
  2333. index = r.open_index()
  2334. ignore_manager = IgnoreFilterManager.from_repo(r)
  2335. for original_path in paths:
  2336. if not no_index and path_to_tree_path(r.path, original_path) in index:
  2337. continue
  2338. # Preserve whether the original path had a trailing slash
  2339. had_trailing_slash = original_path.endswith(("/", os.path.sep))
  2340. if os.path.isabs(original_path):
  2341. path = os.path.relpath(original_path, r.path)
  2342. # Normalize Windows paths to use forward slashes
  2343. if os.path.sep != "/":
  2344. path = path.replace(os.path.sep, "/")
  2345. else:
  2346. path = original_path
  2347. # Restore trailing slash if it was in the original
  2348. if had_trailing_slash and not path.endswith("/"):
  2349. path = path + "/"
  2350. # For directories, check with trailing slash to get correct ignore behavior
  2351. test_path = path
  2352. path_without_slash = path.rstrip("/")
  2353. is_directory = os.path.isdir(os.path.join(r.path, path_without_slash))
  2354. # If this is a directory path, ensure we test it correctly
  2355. if is_directory and not path.endswith("/"):
  2356. test_path = path + "/"
  2357. if ignore_manager.is_ignored(test_path):
  2358. # Return relative path (like git does) when absolute path was provided
  2359. if os.path.isabs(original_path):
  2360. output_path = path
  2361. else:
  2362. output_path = original_path
  2363. yield _quote_path(output_path) if quote_path else output_path
  2364. def update_head(repo, target, detached=False, new_branch=None) -> None:
  2365. """Update HEAD to point at a new branch/commit.
  2366. Note that this does not actually update the working tree.
  2367. Args:
  2368. repo: Path to the repository
  2369. detached: Create a detached head
  2370. target: Branch or committish to switch to
  2371. new_branch: New branch to create
  2372. """
  2373. with open_repo_closing(repo) as r:
  2374. if new_branch is not None:
  2375. to_set = _make_branch_ref(new_branch)
  2376. else:
  2377. to_set = b"HEAD"
  2378. if detached:
  2379. # TODO(jelmer): Provide some way so that the actual ref gets
  2380. # updated rather than what it points to, so the delete isn't
  2381. # necessary.
  2382. del r.refs[to_set]
  2383. r.refs[to_set] = parse_commit(r, target).id
  2384. else:
  2385. r.refs.set_symbolic_ref(to_set, parse_ref(r, target))
  2386. if new_branch is not None:
  2387. r.refs.set_symbolic_ref(b"HEAD", to_set)
  2388. def checkout(
  2389. repo,
  2390. target: Union[bytes, str],
  2391. force: bool = False,
  2392. new_branch: Optional[Union[bytes, str]] = None,
  2393. ) -> None:
  2394. """Switch to a branch or commit, updating both HEAD and the working tree.
  2395. This is similar to 'git checkout', allowing you to switch to a branch,
  2396. tag, or specific commit. Unlike update_head, this function also updates
  2397. the working tree to match the target.
  2398. Args:
  2399. repo: Path to repository or repository object
  2400. target: Branch name, tag, or commit SHA to checkout
  2401. force: Force checkout even if there are local changes
  2402. new_branch: Create a new branch at target (like git checkout -b)
  2403. Raises:
  2404. CheckoutError: If checkout cannot be performed due to conflicts
  2405. KeyError: If the target reference cannot be found
  2406. """
  2407. with open_repo_closing(repo) as r:
  2408. if isinstance(target, str):
  2409. target = target.encode(DEFAULT_ENCODING)
  2410. if isinstance(new_branch, str):
  2411. new_branch = new_branch.encode(DEFAULT_ENCODING)
  2412. # Parse the target to get the commit
  2413. target_commit = parse_commit(r, target)
  2414. target_tree_id = target_commit.tree
  2415. # Get current HEAD tree for comparison
  2416. try:
  2417. current_head = r.refs[b"HEAD"]
  2418. current_tree_id = r[current_head].tree
  2419. except KeyError:
  2420. # No HEAD yet (empty repo)
  2421. current_tree_id = None
  2422. # Check for uncommitted changes if not forcing
  2423. if not force and current_tree_id is not None:
  2424. status_report = status(r)
  2425. changes = []
  2426. # staged is a dict with 'add', 'delete', 'modify' keys
  2427. if isinstance(status_report.staged, dict):
  2428. changes.extend(status_report.staged.get("add", []))
  2429. changes.extend(status_report.staged.get("delete", []))
  2430. changes.extend(status_report.staged.get("modify", []))
  2431. # unstaged is a list
  2432. changes.extend(status_report.unstaged)
  2433. if changes:
  2434. # Check if any changes would conflict with checkout
  2435. target_tree = r[target_tree_id]
  2436. for change in changes:
  2437. if isinstance(change, str):
  2438. change = change.encode(DEFAULT_ENCODING)
  2439. try:
  2440. target_tree.lookup_path(r.object_store.__getitem__, change)
  2441. # File exists in target tree - would overwrite local changes
  2442. raise CheckoutError(
  2443. f"Your local changes to '{change.decode()}' would be "
  2444. "overwritten by checkout. Please commit or stash before switching."
  2445. )
  2446. except KeyError:
  2447. # File doesn't exist in target tree - change can be preserved
  2448. pass
  2449. # Get configuration for working directory update
  2450. config = r.get_config()
  2451. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  2452. # Import validation functions
  2453. from .index import validate_path_element_default, validate_path_element_ntfs
  2454. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  2455. validate_path_element = validate_path_element_ntfs
  2456. else:
  2457. validate_path_element = validate_path_element_default
  2458. if config.get_boolean(b"core", b"symlinks", True):
  2459. # Import symlink function
  2460. from .index import symlink
  2461. symlink_fn = symlink
  2462. else:
  2463. def symlink_fn(source, target) -> None: # type: ignore
  2464. mode = "w" + ("b" if isinstance(source, bytes) else "")
  2465. with open(target, mode) as f:
  2466. f.write(source)
  2467. # Update working tree
  2468. update_working_tree(
  2469. r,
  2470. current_tree_id,
  2471. target_tree_id,
  2472. honor_filemode=honor_filemode,
  2473. validate_path_element=validate_path_element,
  2474. symlink_fn=symlink_fn,
  2475. force_remove_untracked=force,
  2476. )
  2477. # Update HEAD
  2478. if new_branch:
  2479. # Create new branch and switch to it
  2480. branch_create(r, new_branch, objectish=target_commit.id.decode("ascii"))
  2481. update_head(r, new_branch)
  2482. # Set up tracking if creating from a remote branch
  2483. from .refs import LOCAL_REMOTE_PREFIX, parse_remote_ref
  2484. if target.startswith(LOCAL_REMOTE_PREFIX):
  2485. try:
  2486. remote_name, branch_name = parse_remote_ref(target)
  2487. # Set tracking to refs/heads/<branch> on the remote
  2488. set_branch_tracking(
  2489. r, new_branch, remote_name, b"refs/heads/" + branch_name
  2490. )
  2491. except ValueError:
  2492. # Invalid remote ref format, skip tracking setup
  2493. pass
  2494. else:
  2495. # Check if target is a branch name (with or without refs/heads/ prefix)
  2496. branch_ref = None
  2497. if target in r.refs.keys():
  2498. if target.startswith(LOCAL_BRANCH_PREFIX):
  2499. branch_ref = target
  2500. else:
  2501. # Try adding refs/heads/ prefix
  2502. potential_branch = _make_branch_ref(target)
  2503. if potential_branch in r.refs.keys():
  2504. branch_ref = potential_branch
  2505. if branch_ref:
  2506. # It's a branch - update HEAD symbolically
  2507. update_head(r, branch_ref)
  2508. else:
  2509. # It's a tag, other ref, or commit SHA - detached HEAD
  2510. update_head(r, target_commit.id.decode("ascii"), detached=True)
  2511. def reset_file(repo, file_path: str, target: bytes = b"HEAD", symlink_fn=None) -> None:
  2512. """Reset the file to specific commit or branch.
  2513. Args:
  2514. repo: dulwich Repo object
  2515. file_path: file to reset, relative to the repository path
  2516. target: branch or commit or b'HEAD' to reset
  2517. """
  2518. tree = parse_tree(repo, treeish=target)
  2519. tree_path = _fs_to_tree_path(file_path)
  2520. file_entry = tree.lookup_path(repo.object_store.__getitem__, tree_path)
  2521. full_path = os.path.join(os.fsencode(repo.path), tree_path)
  2522. blob = repo.object_store[file_entry[1]]
  2523. mode = file_entry[0]
  2524. build_file_from_blob(blob, mode, full_path, symlink_fn=symlink_fn)
  2525. @replace_me(since="0.22.9", remove_in="0.24.0")
  2526. def checkout_branch(repo, target: Union[bytes, str], force: bool = False) -> None:
  2527. """Switch branches or restore working tree files.
  2528. This is now a wrapper around the general checkout() function.
  2529. Preserved for backward compatibility.
  2530. Args:
  2531. repo: dulwich Repo object
  2532. target: branch name or commit sha to checkout
  2533. force: true or not to force checkout
  2534. """
  2535. # Simply delegate to the new checkout function
  2536. return checkout(repo, target, force=force)
  2537. def sparse_checkout(
  2538. repo, patterns=None, force: bool = False, cone: Union[bool, None] = None
  2539. ):
  2540. """Perform a sparse checkout in the repository (either 'full' or 'cone mode').
  2541. Perform sparse checkout in either 'cone' (directory-based) mode or
  2542. 'full pattern' (.gitignore) mode, depending on the ``cone`` parameter.
  2543. If ``cone`` is ``None``, the mode is inferred from the repository's
  2544. ``core.sparseCheckoutCone`` config setting.
  2545. Steps:
  2546. 1) If ``patterns`` is provided, write them to ``.git/info/sparse-checkout``.
  2547. 2) Determine which paths in the index are included vs. excluded.
  2548. - If ``cone=True``, use "cone-compatible" directory-based logic.
  2549. - If ``cone=False``, use standard .gitignore-style matching.
  2550. 3) Update the index's skip-worktree bits and add/remove files in
  2551. the working tree accordingly.
  2552. 4) If ``force=False``, refuse to remove files that have local modifications.
  2553. Args:
  2554. repo: Path to the repository or a Repo object.
  2555. patterns: Optional list of sparse-checkout patterns to write.
  2556. force: Whether to force removal of locally modified files (default False).
  2557. cone: Boolean indicating cone mode (True/False). If None, read from config.
  2558. Returns:
  2559. None
  2560. """
  2561. with open_repo_closing(repo) as repo_obj:
  2562. # --- 0) Possibly infer 'cone' from config ---
  2563. if cone is None:
  2564. cone = repo_obj.infer_cone_mode()
  2565. # --- 1) Read or write patterns ---
  2566. if patterns is None:
  2567. lines = repo_obj.get_sparse_checkout_patterns()
  2568. if lines is None:
  2569. raise Error("No sparse checkout patterns found.")
  2570. else:
  2571. lines = patterns
  2572. repo_obj.set_sparse_checkout_patterns(patterns)
  2573. # --- 2) Determine the set of included paths ---
  2574. included_paths = determine_included_paths(repo_obj, lines, cone)
  2575. # --- 3) Apply those results to the index & working tree ---
  2576. try:
  2577. apply_included_paths(repo_obj, included_paths, force=force)
  2578. except SparseCheckoutConflictError as exc:
  2579. raise CheckoutError(*exc.args) from exc
  2580. def cone_mode_init(repo):
  2581. """Initialize a repository to use sparse checkout in 'cone' mode.
  2582. Sets ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` in the config.
  2583. Writes an initial ``.git/info/sparse-checkout`` file that includes only
  2584. top-level files (and excludes all subdirectories), e.g. ``["/*", "!/*/"]``.
  2585. Then performs a sparse checkout to update the working tree accordingly.
  2586. If no directories are specified, then only top-level files are included:
  2587. https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling
  2588. Args:
  2589. repo: Path to the repository or a Repo object.
  2590. Returns:
  2591. None
  2592. """
  2593. with open_repo_closing(repo) as repo_obj:
  2594. repo_obj.configure_for_cone_mode()
  2595. patterns = ["/*", "!/*/"] # root-level files only
  2596. sparse_checkout(repo_obj, patterns, force=True, cone=True)
  2597. def cone_mode_set(repo, dirs, force=False):
  2598. """Overwrite the existing 'cone-mode' sparse patterns with a new set of directories.
  2599. Ensures ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` are enabled.
  2600. Writes new patterns so that only the specified directories (and top-level files)
  2601. remain in the working tree, and applies the sparse checkout update.
  2602. Args:
  2603. repo: Path to the repository or a Repo object.
  2604. dirs: List of directory names to include.
  2605. force: Whether to forcibly discard local modifications (default False).
  2606. Returns:
  2607. None
  2608. """
  2609. with open_repo_closing(repo) as repo_obj:
  2610. repo_obj.configure_for_cone_mode()
  2611. repo_obj.set_cone_mode_patterns(dirs=dirs)
  2612. new_patterns = repo_obj.get_sparse_checkout_patterns()
  2613. # Finally, apply the patterns and update the working tree
  2614. sparse_checkout(repo_obj, new_patterns, force=force, cone=True)
  2615. def cone_mode_add(repo, dirs, force=False):
  2616. """Add new directories to the existing 'cone-mode' sparse-checkout patterns.
  2617. Reads the current patterns from ``.git/info/sparse-checkout``, adds pattern
  2618. lines to include the specified directories, and then performs a sparse
  2619. checkout to update the working tree accordingly.
  2620. Args:
  2621. repo: Path to the repository or a Repo object.
  2622. dirs: List of directory names to add to the sparse-checkout.
  2623. force: Whether to forcibly discard local modifications (default False).
  2624. Returns:
  2625. None
  2626. """
  2627. with open_repo_closing(repo) as repo_obj:
  2628. repo_obj.configure_for_cone_mode()
  2629. # Do not pass base patterns as dirs
  2630. base_patterns = ["/*", "!/*/"]
  2631. existing_dirs = [
  2632. pat.strip("/")
  2633. for pat in repo_obj.get_sparse_checkout_patterns()
  2634. if pat not in base_patterns
  2635. ]
  2636. added_dirs = existing_dirs + (dirs or [])
  2637. repo_obj.set_cone_mode_patterns(dirs=added_dirs)
  2638. new_patterns = repo_obj.get_sparse_checkout_patterns()
  2639. sparse_checkout(repo_obj, patterns=new_patterns, force=force, cone=True)
  2640. def check_mailmap(repo, contact):
  2641. """Check canonical name and email of contact.
  2642. Args:
  2643. repo: Path to the repository
  2644. contact: Contact name and/or email
  2645. Returns: Canonical contact data
  2646. """
  2647. with open_repo_closing(repo) as r:
  2648. from .mailmap import Mailmap
  2649. try:
  2650. mailmap = Mailmap.from_path(os.path.join(r.path, ".mailmap"))
  2651. except FileNotFoundError:
  2652. mailmap = Mailmap()
  2653. return mailmap.lookup(contact)
  2654. def fsck(repo):
  2655. """Check a repository.
  2656. Args:
  2657. repo: A path to the repository
  2658. Returns: Iterator over errors/warnings
  2659. """
  2660. with open_repo_closing(repo) as r:
  2661. # TODO(jelmer): check pack files
  2662. # TODO(jelmer): check graph
  2663. # TODO(jelmer): check refs
  2664. for sha in r.object_store:
  2665. o = r.object_store[sha]
  2666. try:
  2667. o.check()
  2668. except Exception as e:
  2669. yield (sha, e)
  2670. def stash_list(repo):
  2671. """List all stashes in a repository."""
  2672. with open_repo_closing(repo) as r:
  2673. from .stash import Stash
  2674. stash = Stash.from_repo(r)
  2675. return enumerate(list(stash.stashes()))
  2676. def stash_push(repo) -> None:
  2677. """Push a new stash onto the stack."""
  2678. with open_repo_closing(repo) as r:
  2679. from .stash import Stash
  2680. stash = Stash.from_repo(r)
  2681. stash.push()
  2682. def stash_pop(repo) -> None:
  2683. """Pop a stash from the stack."""
  2684. with open_repo_closing(repo) as r:
  2685. from .stash import Stash
  2686. stash = Stash.from_repo(r)
  2687. stash.pop(0)
  2688. def stash_drop(repo, index) -> None:
  2689. """Drop a stash from the stack."""
  2690. with open_repo_closing(repo) as r:
  2691. from .stash import Stash
  2692. stash = Stash.from_repo(r)
  2693. stash.drop(index)
  2694. def ls_files(repo):
  2695. """List all files in an index."""
  2696. with open_repo_closing(repo) as r:
  2697. return sorted(r.open_index())
  2698. def find_unique_abbrev(object_store, object_id):
  2699. """For now, just return 7 characters."""
  2700. # TODO(jelmer): Add some logic here to return a number of characters that
  2701. # scales relative with the size of the repository
  2702. return object_id.decode("ascii")[:7]
  2703. def describe(repo, abbrev=None):
  2704. """Describe the repository version.
  2705. Args:
  2706. repo: git repository
  2707. abbrev: number of characters of commit to take, default is 7
  2708. Returns: a string description of the current git revision
  2709. Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh".
  2710. """
  2711. abbrev_slice = slice(0, abbrev if abbrev is not None else 7)
  2712. # Get the repository
  2713. with open_repo_closing(repo) as r:
  2714. # Get a list of all tags
  2715. refs = r.get_refs()
  2716. tags = {}
  2717. for key, value in refs.items():
  2718. key = key.decode()
  2719. obj = r.get_object(value)
  2720. if "tags" not in key:
  2721. continue
  2722. _, tag = key.rsplit("/", 1)
  2723. try:
  2724. # Annotated tag case
  2725. commit = obj.object
  2726. commit = r.get_object(commit[1])
  2727. except AttributeError:
  2728. # Lightweight tag case - obj is already the commit
  2729. commit = obj
  2730. tags[tag] = [
  2731. datetime.datetime(*time.gmtime(commit.commit_time)[:6]),
  2732. commit.id.decode("ascii"),
  2733. ]
  2734. sorted_tags = sorted(tags.items(), key=lambda tag: tag[1][0], reverse=True)
  2735. # Get the latest commit
  2736. latest_commit = r[r.head()]
  2737. # If there are no tags, return the latest commit
  2738. if len(sorted_tags) == 0:
  2739. if abbrev is not None:
  2740. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  2741. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  2742. # We're now 0 commits from the top
  2743. commit_count = 0
  2744. # Walk through all commits
  2745. walker = r.get_walker()
  2746. for entry in walker:
  2747. # Check if tag
  2748. commit_id = entry.commit.id.decode("ascii")
  2749. for tag in sorted_tags:
  2750. tag_name = tag[0]
  2751. tag_commit = tag[1][1]
  2752. if commit_id == tag_commit:
  2753. if commit_count == 0:
  2754. return tag_name
  2755. else:
  2756. return "{}-{}-g{}".format(
  2757. tag_name,
  2758. commit_count,
  2759. latest_commit.id.decode("ascii")[abbrev_slice],
  2760. )
  2761. commit_count += 1
  2762. # Return plain commit if no parent tag can be found
  2763. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  2764. def get_object_by_path(repo, path, committish=None):
  2765. """Get an object by path.
  2766. Args:
  2767. repo: A path to the repository
  2768. path: Path to look up
  2769. committish: Commit to look up path in
  2770. Returns: A `ShaFile` object
  2771. """
  2772. if committish is None:
  2773. committish = "HEAD"
  2774. # Get the repository
  2775. with open_repo_closing(repo) as r:
  2776. commit = parse_commit(r, committish)
  2777. base_tree = commit.tree
  2778. if not isinstance(path, bytes):
  2779. path = commit_encode(commit, path)
  2780. (mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path)
  2781. return r[sha]
  2782. def write_tree(repo):
  2783. """Write a tree object from the index.
  2784. Args:
  2785. repo: Repository for which to write tree
  2786. Returns: tree id for the tree that was written
  2787. """
  2788. with open_repo_closing(repo) as r:
  2789. return r.open_index().commit(r.object_store)
  2790. def _do_merge(
  2791. r,
  2792. merge_commit_id,
  2793. no_commit=False,
  2794. no_ff=False,
  2795. message=None,
  2796. author=None,
  2797. committer=None,
  2798. ):
  2799. """Internal merge implementation that operates on an open repository.
  2800. Args:
  2801. r: Open repository object
  2802. merge_commit_id: SHA of commit to merge
  2803. no_commit: If True, do not create a merge commit
  2804. no_ff: If True, force creation of a merge commit
  2805. message: Optional merge commit message
  2806. author: Optional author for merge commit
  2807. committer: Optional committer for merge commit
  2808. Returns:
  2809. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  2810. if no_commit=True or there were conflicts
  2811. """
  2812. from .graph import find_merge_base
  2813. from .merge import three_way_merge
  2814. # Get HEAD commit
  2815. try:
  2816. head_commit_id = r.refs[b"HEAD"]
  2817. except KeyError:
  2818. raise Error("No HEAD reference found")
  2819. head_commit = r[head_commit_id]
  2820. merge_commit = r[merge_commit_id]
  2821. # Check if fast-forward is possible
  2822. merge_bases = find_merge_base(r, [head_commit_id, merge_commit_id])
  2823. if not merge_bases:
  2824. raise Error("No common ancestor found")
  2825. # Use the first merge base
  2826. base_commit_id = merge_bases[0]
  2827. # Check if we're trying to merge the same commit
  2828. if head_commit_id == merge_commit_id:
  2829. # Already up to date
  2830. return (None, [])
  2831. # Check for fast-forward
  2832. if base_commit_id == head_commit_id and not no_ff:
  2833. # Fast-forward merge
  2834. r.refs[b"HEAD"] = merge_commit_id
  2835. # Update the working directory
  2836. update_working_tree(r, head_commit.tree, merge_commit.tree)
  2837. return (merge_commit_id, [])
  2838. if base_commit_id == merge_commit_id:
  2839. # Already up to date
  2840. return (None, [])
  2841. # Perform three-way merge
  2842. base_commit = r[base_commit_id]
  2843. merged_tree, conflicts = three_way_merge(
  2844. r.object_store, base_commit, head_commit, merge_commit
  2845. )
  2846. # Add merged tree to object store
  2847. r.object_store.add_object(merged_tree)
  2848. # Update index and working directory
  2849. update_working_tree(r, head_commit.tree, merged_tree.id)
  2850. if conflicts or no_commit:
  2851. # Don't create a commit if there are conflicts or no_commit is True
  2852. return (None, conflicts)
  2853. # Create merge commit
  2854. merge_commit_obj = Commit()
  2855. merge_commit_obj.tree = merged_tree.id
  2856. merge_commit_obj.parents = [head_commit_id, merge_commit_id]
  2857. # Set author/committer
  2858. if author is None:
  2859. author = get_user_identity(r.get_config_stack())
  2860. if committer is None:
  2861. committer = author
  2862. merge_commit_obj.author = author
  2863. merge_commit_obj.committer = committer
  2864. # Set timestamps
  2865. timestamp = int(time.time())
  2866. timezone = 0 # UTC
  2867. merge_commit_obj.author_time = timestamp
  2868. merge_commit_obj.author_timezone = timezone
  2869. merge_commit_obj.commit_time = timestamp
  2870. merge_commit_obj.commit_timezone = timezone
  2871. # Set commit message
  2872. if message is None:
  2873. message = f"Merge commit '{merge_commit_id.decode()[:7]}'\n"
  2874. merge_commit_obj.message = message.encode() if isinstance(message, str) else message
  2875. # Add commit to object store
  2876. r.object_store.add_object(merge_commit_obj)
  2877. # Update HEAD
  2878. r.refs[b"HEAD"] = merge_commit_obj.id
  2879. return (merge_commit_obj.id, [])
  2880. def merge(
  2881. repo,
  2882. committish,
  2883. no_commit=False,
  2884. no_ff=False,
  2885. message=None,
  2886. author=None,
  2887. committer=None,
  2888. ):
  2889. """Merge a commit into the current branch.
  2890. Args:
  2891. repo: Repository to merge into
  2892. committish: Commit to merge
  2893. no_commit: If True, do not create a merge commit
  2894. no_ff: If True, force creation of a merge commit
  2895. message: Optional merge commit message
  2896. author: Optional author for merge commit
  2897. committer: Optional committer for merge commit
  2898. Returns:
  2899. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  2900. if no_commit=True or there were conflicts
  2901. Raises:
  2902. Error: If there is no HEAD reference or commit cannot be found
  2903. """
  2904. with open_repo_closing(repo) as r:
  2905. # Parse the commit to merge
  2906. try:
  2907. merge_commit_id = parse_commit(r, committish).id
  2908. except KeyError:
  2909. raise Error(f"Cannot find commit '{committish}'")
  2910. result = _do_merge(
  2911. r, merge_commit_id, no_commit, no_ff, message, author, committer
  2912. )
  2913. # Trigger auto GC if needed
  2914. from .gc import maybe_auto_gc
  2915. maybe_auto_gc(r)
  2916. return result
  2917. def unpack_objects(pack_path, target="."):
  2918. """Unpack objects from a pack file into the repository.
  2919. Args:
  2920. pack_path: Path to the pack file to unpack
  2921. target: Path to the repository to unpack into
  2922. Returns:
  2923. Number of objects unpacked
  2924. """
  2925. from .pack import Pack
  2926. with open_repo_closing(target) as r:
  2927. pack_basename = os.path.splitext(pack_path)[0]
  2928. with Pack(pack_basename) as pack:
  2929. count = 0
  2930. for unpacked in pack.iter_unpacked():
  2931. obj = unpacked.sha_file()
  2932. r.object_store.add_object(obj)
  2933. count += 1
  2934. return count
  2935. def merge_tree(repo, base_tree, our_tree, their_tree):
  2936. """Perform a three-way tree merge without touching the working directory.
  2937. This is similar to git merge-tree, performing a merge at the tree level
  2938. without creating commits or updating any references.
  2939. Args:
  2940. repo: Repository containing the trees
  2941. base_tree: Tree-ish of the common ancestor (or None for no common ancestor)
  2942. our_tree: Tree-ish of our side of the merge
  2943. their_tree: Tree-ish of their side of the merge
  2944. Returns:
  2945. tuple: A tuple of (merged_tree_id, conflicts) where:
  2946. - merged_tree_id is the SHA-1 of the merged tree
  2947. - conflicts is a list of paths (as bytes) that had conflicts
  2948. Raises:
  2949. KeyError: If any of the tree-ish arguments cannot be resolved
  2950. """
  2951. from .merge import Merger
  2952. with open_repo_closing(repo) as r:
  2953. # Resolve tree-ish arguments to actual trees
  2954. base = parse_tree(r, base_tree) if base_tree else None
  2955. ours = parse_tree(r, our_tree)
  2956. theirs = parse_tree(r, their_tree)
  2957. # Perform the merge
  2958. merger = Merger(r.object_store)
  2959. merged_tree, conflicts = merger.merge_trees(base, ours, theirs)
  2960. # Add the merged tree to the object store
  2961. r.object_store.add_object(merged_tree)
  2962. return merged_tree.id, conflicts
  2963. def cherry_pick(
  2964. repo,
  2965. committish,
  2966. no_commit=False,
  2967. continue_=False,
  2968. abort=False,
  2969. ):
  2970. r"""Cherry-pick a commit onto the current branch.
  2971. Args:
  2972. repo: Repository to cherry-pick into
  2973. committish: Commit to cherry-pick
  2974. no_commit: If True, do not create a commit after applying changes
  2975. continue\_: Continue an in-progress cherry-pick after resolving conflicts
  2976. abort: Abort an in-progress cherry-pick
  2977. Returns:
  2978. The SHA of the newly created commit, or None if no_commit=True or there were conflicts
  2979. Raises:
  2980. Error: If there is no HEAD reference, commit cannot be found, or operation fails
  2981. """
  2982. from .merge import three_way_merge
  2983. with open_repo_closing(repo) as r:
  2984. # Handle abort
  2985. if abort:
  2986. # Clean up any cherry-pick state
  2987. try:
  2988. os.remove(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"))
  2989. except FileNotFoundError:
  2990. pass
  2991. try:
  2992. os.remove(os.path.join(r.controldir(), "MERGE_MSG"))
  2993. except FileNotFoundError:
  2994. pass
  2995. # Reset index to HEAD
  2996. r.reset_index(r[b"HEAD"].tree)
  2997. return None
  2998. # Handle continue
  2999. if continue_:
  3000. # Check if there's a cherry-pick in progress
  3001. cherry_pick_head_path = os.path.join(r.controldir(), "CHERRY_PICK_HEAD")
  3002. try:
  3003. with open(cherry_pick_head_path, "rb") as f:
  3004. cherry_pick_commit_id = f.read().strip()
  3005. cherry_pick_commit = r[cherry_pick_commit_id]
  3006. except FileNotFoundError:
  3007. raise Error("No cherry-pick in progress")
  3008. # Check for unresolved conflicts
  3009. conflicts = list(r.open_index().conflicts())
  3010. if conflicts:
  3011. raise Error("Unresolved conflicts remain")
  3012. # Create the commit
  3013. tree_id = r.open_index().commit(r.object_store)
  3014. # Read saved message if any
  3015. merge_msg_path = os.path.join(r.controldir(), "MERGE_MSG")
  3016. try:
  3017. with open(merge_msg_path, "rb") as f:
  3018. message = f.read()
  3019. except FileNotFoundError:
  3020. message = cherry_pick_commit.message
  3021. new_commit = r.do_commit(
  3022. message=message,
  3023. tree=tree_id,
  3024. author=cherry_pick_commit.author,
  3025. author_timestamp=cherry_pick_commit.author_time,
  3026. author_timezone=cherry_pick_commit.author_timezone,
  3027. )
  3028. # Clean up state files
  3029. try:
  3030. os.remove(cherry_pick_head_path)
  3031. except FileNotFoundError:
  3032. pass
  3033. try:
  3034. os.remove(merge_msg_path)
  3035. except FileNotFoundError:
  3036. pass
  3037. return new_commit
  3038. # Normal cherry-pick operation
  3039. # Get current HEAD
  3040. try:
  3041. head_commit = r[b"HEAD"]
  3042. except KeyError:
  3043. raise Error("No HEAD reference found")
  3044. # Parse the commit to cherry-pick
  3045. try:
  3046. cherry_pick_commit = parse_commit(r, committish)
  3047. except KeyError:
  3048. raise Error(f"Cannot find commit '{committish}'")
  3049. # Check if commit has parents
  3050. if not cherry_pick_commit.parents:
  3051. raise Error("Cannot cherry-pick root commit")
  3052. # Get parent of cherry-pick commit
  3053. parent_commit = r[cherry_pick_commit.parents[0]]
  3054. # Perform three-way merge
  3055. try:
  3056. merged_tree, conflicts = three_way_merge(
  3057. r.object_store, parent_commit, head_commit, cherry_pick_commit
  3058. )
  3059. except Exception as e:
  3060. raise Error(f"Cherry-pick failed: {e}")
  3061. # Add merged tree to object store
  3062. r.object_store.add_object(merged_tree)
  3063. # Update working tree and index
  3064. # Reset index to match merged tree
  3065. r.reset_index(merged_tree.id)
  3066. # Update working tree from the new index
  3067. update_working_tree(r, head_commit.tree, merged_tree.id)
  3068. if conflicts:
  3069. # Save state for later continuation
  3070. with open(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"), "wb") as f:
  3071. f.write(cherry_pick_commit.id + b"\n")
  3072. # Save commit message
  3073. with open(os.path.join(r.controldir(), "MERGE_MSG"), "wb") as f:
  3074. f.write(cherry_pick_commit.message)
  3075. raise Error(
  3076. f"Conflicts in: {', '.join(c.decode('utf-8', 'replace') for c in conflicts)}\n"
  3077. f"Fix conflicts and run 'dulwich cherry-pick --continue'"
  3078. )
  3079. if no_commit:
  3080. return None
  3081. # Create the commit
  3082. new_commit = r.do_commit(
  3083. message=cherry_pick_commit.message,
  3084. tree=merged_tree.id,
  3085. author=cherry_pick_commit.author,
  3086. author_timestamp=cherry_pick_commit.author_time,
  3087. author_timezone=cherry_pick_commit.author_timezone,
  3088. )
  3089. return new_commit
  3090. def revert(
  3091. repo,
  3092. commits,
  3093. no_commit=False,
  3094. message=None,
  3095. author=None,
  3096. committer=None,
  3097. ):
  3098. """Revert one or more commits.
  3099. This creates a new commit that undoes the changes introduced by the
  3100. specified commits. Unlike reset, revert creates a new commit that
  3101. preserves history.
  3102. Args:
  3103. repo: Path to repository or repository object
  3104. commits: List of commit-ish (SHA, ref, etc.) to revert, or a single commit-ish
  3105. no_commit: If True, apply changes to index/working tree but don't commit
  3106. message: Optional commit message (default: "Revert <original subject>")
  3107. author: Optional author for revert commit
  3108. committer: Optional committer for revert commit
  3109. Returns:
  3110. SHA1 of the new revert commit, or None if no_commit=True
  3111. Raises:
  3112. Error: If revert fails due to conflicts or other issues
  3113. """
  3114. from .merge import three_way_merge
  3115. # Normalize commits to a list
  3116. if isinstance(commits, (str, bytes)):
  3117. commits = [commits]
  3118. with open_repo_closing(repo) as r:
  3119. # Convert string refs to bytes
  3120. commits_to_revert = []
  3121. for commit_ref in commits:
  3122. if isinstance(commit_ref, str):
  3123. commit_ref = commit_ref.encode("utf-8")
  3124. commit = parse_commit(r, commit_ref)
  3125. commits_to_revert.append(commit)
  3126. # Get current HEAD
  3127. try:
  3128. head_commit_id = r.refs[b"HEAD"]
  3129. except KeyError:
  3130. raise Error("No HEAD reference found")
  3131. head_commit = r[head_commit_id]
  3132. current_tree = head_commit.tree
  3133. # Process commits in order
  3134. for commit_to_revert in commits_to_revert:
  3135. # For revert, we want to apply the inverse of the commit
  3136. # This means using the commit's tree as "base" and its parent as "theirs"
  3137. if not commit_to_revert.parents:
  3138. raise Error(
  3139. f"Cannot revert commit {commit_to_revert.id} - it has no parents"
  3140. )
  3141. # For simplicity, we only handle commits with one parent (no merge commits)
  3142. if len(commit_to_revert.parents) > 1:
  3143. raise Error(
  3144. f"Cannot revert merge commit {commit_to_revert.id} - not yet implemented"
  3145. )
  3146. parent_commit = r[commit_to_revert.parents[0]]
  3147. # Perform three-way merge:
  3148. # - base: the commit we're reverting (what we want to remove)
  3149. # - ours: current HEAD (what we have now)
  3150. # - theirs: parent of commit being reverted (what we want to go back to)
  3151. merged_tree, conflicts = three_way_merge(
  3152. r.object_store,
  3153. commit_to_revert, # base
  3154. r[head_commit_id], # ours
  3155. parent_commit, # theirs
  3156. )
  3157. if conflicts:
  3158. # Update working tree with conflicts
  3159. update_working_tree(r, current_tree, merged_tree.id)
  3160. conflicted_paths = [c.decode("utf-8", "replace") for c in conflicts]
  3161. raise Error(f"Conflicts while reverting: {', '.join(conflicted_paths)}")
  3162. # Add merged tree to object store
  3163. r.object_store.add_object(merged_tree)
  3164. # Update working tree
  3165. update_working_tree(r, current_tree, merged_tree.id)
  3166. current_tree = merged_tree.id
  3167. if not no_commit:
  3168. # Create revert commit
  3169. revert_commit = Commit()
  3170. revert_commit.tree = merged_tree.id
  3171. revert_commit.parents = [head_commit_id]
  3172. # Set author/committer
  3173. if author is None:
  3174. author = get_user_identity(r.get_config_stack())
  3175. if committer is None:
  3176. committer = author
  3177. revert_commit.author = author
  3178. revert_commit.committer = committer
  3179. # Set timestamps
  3180. timestamp = int(time.time())
  3181. timezone = 0 # UTC
  3182. revert_commit.author_time = timestamp
  3183. revert_commit.author_timezone = timezone
  3184. revert_commit.commit_time = timestamp
  3185. revert_commit.commit_timezone = timezone
  3186. # Set message
  3187. if message is None:
  3188. # Extract original commit subject
  3189. original_message = commit_to_revert.message
  3190. if isinstance(original_message, bytes):
  3191. original_message = original_message.decode("utf-8", "replace")
  3192. subject = original_message.split("\n")[0]
  3193. message = f'Revert "{subject}"\n\nThis reverts commit {commit_to_revert.id.decode("ascii")}.'.encode()
  3194. elif isinstance(message, str):
  3195. message = message.encode("utf-8")
  3196. revert_commit.message = message
  3197. # Add commit to object store
  3198. r.object_store.add_object(revert_commit)
  3199. # Update HEAD
  3200. r.refs[b"HEAD"] = revert_commit.id
  3201. head_commit_id = revert_commit.id
  3202. return head_commit_id if not no_commit else None
  3203. def gc(
  3204. repo,
  3205. auto: bool = False,
  3206. aggressive: bool = False,
  3207. prune: bool = True,
  3208. grace_period: Optional[int] = 1209600, # 2 weeks default
  3209. dry_run: bool = False,
  3210. progress=None,
  3211. ):
  3212. """Run garbage collection on a repository.
  3213. Args:
  3214. repo: Path to the repository or a Repo object
  3215. auto: If True, only run gc if needed
  3216. aggressive: If True, use more aggressive settings
  3217. prune: If True, prune unreachable objects
  3218. grace_period: Grace period in seconds for pruning (default 2 weeks)
  3219. dry_run: If True, only report what would be done
  3220. progress: Optional progress callback
  3221. Returns:
  3222. GCStats object with garbage collection statistics
  3223. """
  3224. from .gc import garbage_collect
  3225. with open_repo_closing(repo) as r:
  3226. return garbage_collect(
  3227. r,
  3228. auto=auto,
  3229. aggressive=aggressive,
  3230. prune=prune,
  3231. grace_period=grace_period,
  3232. dry_run=dry_run,
  3233. progress=progress,
  3234. )
  3235. def prune(
  3236. repo,
  3237. grace_period: Optional[int] = None,
  3238. dry_run: bool = False,
  3239. progress=None,
  3240. ):
  3241. """Prune/clean up a repository's object store.
  3242. This removes temporary files that were left behind by interrupted
  3243. pack operations.
  3244. Args:
  3245. repo: Path to the repository or a Repo object
  3246. grace_period: Grace period in seconds for removing temporary files
  3247. (default 2 weeks)
  3248. dry_run: If True, only report what would be done
  3249. progress: Optional progress callback
  3250. """
  3251. with open_repo_closing(repo) as r:
  3252. if progress:
  3253. progress("Pruning temporary files")
  3254. if not dry_run:
  3255. r.object_store.prune(grace_period=grace_period)
  3256. def count_objects(repo=".", verbose=False) -> CountObjectsResult:
  3257. """Count unpacked objects and their disk usage.
  3258. Args:
  3259. repo: Path to repository or repository object
  3260. verbose: Whether to return verbose information
  3261. Returns:
  3262. CountObjectsResult object with detailed statistics
  3263. """
  3264. with open_repo_closing(repo) as r:
  3265. object_store = r.object_store
  3266. # Count loose objects
  3267. loose_count = 0
  3268. loose_size = 0
  3269. for sha in object_store._iter_loose_objects():
  3270. loose_count += 1
  3271. path = object_store._get_shafile_path(sha)
  3272. try:
  3273. stat_info = os.stat(path)
  3274. # Git uses disk usage, not file size. st_blocks is always in
  3275. # 512-byte blocks per POSIX standard
  3276. if hasattr(stat_info, "st_blocks"):
  3277. # Available on Linux and macOS
  3278. loose_size += stat_info.st_blocks * 512 # type: ignore
  3279. else:
  3280. # Fallback for Windows
  3281. loose_size += stat_info.st_size
  3282. except FileNotFoundError:
  3283. # Object may have been removed between iteration and stat
  3284. pass
  3285. if not verbose:
  3286. return CountObjectsResult(count=loose_count, size=loose_size)
  3287. # Count pack information
  3288. pack_count = len(object_store.packs)
  3289. in_pack_count = 0
  3290. pack_size = 0
  3291. for pack in object_store.packs:
  3292. in_pack_count += len(pack)
  3293. # Get pack file size
  3294. pack_path = pack._data_path
  3295. try:
  3296. pack_size += os.path.getsize(pack_path)
  3297. except FileNotFoundError:
  3298. pass
  3299. # Get index file size
  3300. idx_path = pack._idx_path
  3301. try:
  3302. pack_size += os.path.getsize(idx_path)
  3303. except FileNotFoundError:
  3304. pass
  3305. return CountObjectsResult(
  3306. count=loose_count,
  3307. size=loose_size,
  3308. in_pack=in_pack_count,
  3309. packs=pack_count,
  3310. size_pack=pack_size,
  3311. )
  3312. def rebase(
  3313. repo: Union[Repo, str],
  3314. upstream: Union[bytes, str],
  3315. onto: Optional[Union[bytes, str]] = None,
  3316. branch: Optional[Union[bytes, str]] = None,
  3317. abort: bool = False,
  3318. continue_rebase: bool = False,
  3319. skip: bool = False,
  3320. ) -> list[bytes]:
  3321. """Rebase commits onto another branch.
  3322. Args:
  3323. repo: Repository to rebase in
  3324. upstream: Upstream branch/commit to rebase onto
  3325. onto: Specific commit to rebase onto (defaults to upstream)
  3326. branch: Branch to rebase (defaults to current branch)
  3327. abort: Abort an in-progress rebase
  3328. continue_rebase: Continue an in-progress rebase
  3329. skip: Skip current commit and continue rebase
  3330. Returns:
  3331. List of new commit SHAs created by rebase
  3332. Raises:
  3333. Error: If rebase fails or conflicts occur
  3334. """
  3335. from .rebase import RebaseConflict, RebaseError, Rebaser
  3336. with open_repo_closing(repo) as r:
  3337. rebaser = Rebaser(r)
  3338. if abort:
  3339. try:
  3340. rebaser.abort()
  3341. return []
  3342. except RebaseError as e:
  3343. raise Error(str(e))
  3344. if continue_rebase:
  3345. try:
  3346. result = rebaser.continue_()
  3347. if result is None:
  3348. # Rebase complete
  3349. return []
  3350. elif isinstance(result, tuple) and result[1]:
  3351. # Still have conflicts
  3352. raise Error(
  3353. f"Conflicts in: {', '.join(f.decode('utf-8', 'replace') for f in result[1])}"
  3354. )
  3355. except RebaseError as e:
  3356. raise Error(str(e))
  3357. # Convert string refs to bytes
  3358. if isinstance(upstream, str):
  3359. upstream = upstream.encode("utf-8")
  3360. if isinstance(onto, str):
  3361. onto = onto.encode("utf-8") if onto else None
  3362. if isinstance(branch, str):
  3363. branch = branch.encode("utf-8") if branch else None
  3364. try:
  3365. # Start rebase
  3366. rebaser.start(upstream, onto, branch)
  3367. # Continue rebase automatically
  3368. result = rebaser.continue_()
  3369. if result is not None:
  3370. # Conflicts
  3371. raise RebaseConflict(result[1])
  3372. # Return the SHAs of the rebased commits
  3373. return [c.id for c in rebaser._done]
  3374. except RebaseConflict as e:
  3375. raise Error(str(e))
  3376. except RebaseError as e:
  3377. raise Error(str(e))
  3378. def annotate(repo, path, committish=None):
  3379. """Annotate the history of a file.
  3380. :param repo: Path to the repository
  3381. :param path: Path to annotate
  3382. :param committish: Commit id to find path in
  3383. :return: List of ((Commit, TreeChange), line) tuples
  3384. """
  3385. if committish is None:
  3386. committish = "HEAD"
  3387. from dulwich.annotate import annotate_lines
  3388. with open_repo_closing(repo) as r:
  3389. commit_id = parse_commit(r, committish).id
  3390. # Ensure path is bytes
  3391. if isinstance(path, str):
  3392. path = path.encode()
  3393. return annotate_lines(r.object_store, commit_id, path)
  3394. blame = annotate
  3395. def filter_branch(
  3396. repo=".",
  3397. branch="HEAD",
  3398. *,
  3399. filter_fn=None,
  3400. filter_author=None,
  3401. filter_committer=None,
  3402. filter_message=None,
  3403. tree_filter=None,
  3404. index_filter=None,
  3405. parent_filter=None,
  3406. commit_filter=None,
  3407. subdirectory_filter=None,
  3408. prune_empty=False,
  3409. tag_name_filter=None,
  3410. force=False,
  3411. keep_original=True,
  3412. refs=None,
  3413. ):
  3414. """Rewrite branch history by creating new commits with filtered properties.
  3415. This is similar to git filter-branch, allowing you to rewrite commit
  3416. history by modifying trees, parents, author, committer, or commit messages.
  3417. Args:
  3418. repo: Path to repository
  3419. branch: Branch to rewrite (defaults to HEAD)
  3420. filter_fn: Optional callable that takes a Commit object and returns
  3421. a dict of updated fields (author, committer, message, etc.)
  3422. filter_author: Optional callable that takes author bytes and returns
  3423. updated author bytes or None to keep unchanged
  3424. filter_committer: Optional callable that takes committer bytes and returns
  3425. updated committer bytes or None to keep unchanged
  3426. filter_message: Optional callable that takes commit message bytes
  3427. and returns updated message bytes
  3428. tree_filter: Optional callable that takes (tree_sha, temp_dir) and returns
  3429. new tree SHA after modifying working directory
  3430. index_filter: Optional callable that takes (tree_sha, temp_index_path) and
  3431. returns new tree SHA after modifying index
  3432. parent_filter: Optional callable that takes parent list and returns
  3433. modified parent list
  3434. commit_filter: Optional callable that takes (Commit, tree_sha) and returns
  3435. new commit SHA or None to skip commit
  3436. subdirectory_filter: Optional subdirectory path to extract as new root
  3437. prune_empty: Whether to prune commits that become empty
  3438. tag_name_filter: Optional callable to rename tags
  3439. force: Force operation even if branch has been filtered before
  3440. keep_original: Keep original refs under refs/original/
  3441. refs: List of refs to rewrite (defaults to [branch])
  3442. Returns:
  3443. Dict mapping old commit SHAs to new commit SHAs
  3444. Raises:
  3445. Error: If branch is already filtered and force is False
  3446. """
  3447. from .filter_branch import CommitFilter, filter_refs
  3448. with open_repo_closing(repo) as r:
  3449. # Parse branch/committish
  3450. if isinstance(branch, str):
  3451. branch = branch.encode()
  3452. # Determine which refs to process
  3453. if refs is None:
  3454. if branch == b"HEAD":
  3455. # Resolve HEAD to actual branch
  3456. try:
  3457. resolved = r.refs.follow(b"HEAD")
  3458. if resolved and resolved[0]:
  3459. # resolved is a list of (refname, sha) tuples
  3460. resolved_ref = resolved[0][-1]
  3461. if resolved_ref and resolved_ref != b"HEAD":
  3462. refs = [resolved_ref]
  3463. else:
  3464. # HEAD points directly to a commit
  3465. refs = [b"HEAD"]
  3466. else:
  3467. refs = [b"HEAD"]
  3468. except SymrefLoop:
  3469. refs = [b"HEAD"]
  3470. else:
  3471. # Convert branch name to full ref if needed
  3472. if not branch.startswith(b"refs/"):
  3473. branch = b"refs/heads/" + branch
  3474. refs = [branch]
  3475. # Convert subdirectory filter to bytes if needed
  3476. if subdirectory_filter and isinstance(subdirectory_filter, str):
  3477. subdirectory_filter = subdirectory_filter.encode()
  3478. # Create commit filter
  3479. commit_filter = CommitFilter(
  3480. r.object_store,
  3481. filter_fn=filter_fn,
  3482. filter_author=filter_author,
  3483. filter_committer=filter_committer,
  3484. filter_message=filter_message,
  3485. tree_filter=tree_filter,
  3486. index_filter=index_filter,
  3487. parent_filter=parent_filter,
  3488. commit_filter=commit_filter,
  3489. subdirectory_filter=subdirectory_filter,
  3490. prune_empty=prune_empty,
  3491. tag_name_filter=tag_name_filter,
  3492. )
  3493. # Tag callback for renaming tags
  3494. def rename_tag(old_ref, new_ref):
  3495. # Copy tag to new name
  3496. r.refs[new_ref] = r.refs[old_ref]
  3497. # Delete old tag
  3498. del r.refs[old_ref]
  3499. # Filter refs
  3500. try:
  3501. return filter_refs(
  3502. r.refs,
  3503. r.object_store,
  3504. refs,
  3505. commit_filter,
  3506. keep_original=keep_original,
  3507. force=force,
  3508. tag_callback=rename_tag if tag_name_filter else None,
  3509. )
  3510. except ValueError as e:
  3511. raise Error(str(e)) from e