porcelain.py 181 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547
  1. # porcelain.py -- Porcelain-like layer on top of Dulwich
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as published by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Simple wrapper that provides porcelain-like functions on top of Dulwich.
  22. Currently implemented:
  23. * archive
  24. * add
  25. * bisect{_start,_bad,_good,_skip,_reset,_log,_replay}
  26. * branch{_create,_delete,_list}
  27. * check_ignore
  28. * checkout
  29. * checkout_branch
  30. * clone
  31. * cone mode{_init, _set, _add}
  32. * commit
  33. * commit_tree
  34. * daemon
  35. * describe
  36. * diff_tree
  37. * fetch
  38. * filter_branch
  39. * for_each_ref
  40. * init
  41. * ls_files
  42. * ls_remote
  43. * ls_tree
  44. * merge
  45. * merge_tree
  46. * mv/move
  47. * prune
  48. * pull
  49. * push
  50. * rm
  51. * remote{_add}
  52. * receive_pack
  53. * reset
  54. * revert
  55. * sparse_checkout
  56. * submodule_add
  57. * submodule_init
  58. * submodule_list
  59. * rev_list
  60. * tag{_create,_delete,_list}
  61. * upload_pack
  62. * update_server_info
  63. * write_commit_graph
  64. * status
  65. * symbolic_ref
  66. These functions are meant to behave similarly to the git subcommands.
  67. Differences in behaviour are considered bugs.
  68. Note: one of the consequences of this is that paths tend to be
  69. interpreted relative to the current working directory rather than relative
  70. to the repository root.
  71. Functions should generally accept both unicode strings and bytestrings
  72. """
  73. import datetime
  74. import fnmatch
  75. import os
  76. import posixpath
  77. import stat
  78. import sys
  79. import time
  80. from collections import namedtuple
  81. from contextlib import closing, contextmanager
  82. from dataclasses import dataclass
  83. from io import BytesIO, RawIOBase
  84. from pathlib import Path
  85. from typing import Optional, Union
  86. from . import replace_me
  87. from .archive import tar_stream
  88. from .bisect import BisectState
  89. from .client import get_transport_and_path
  90. from .config import Config, ConfigFile, StackedConfig, read_submodules
  91. from .diff_tree import (
  92. CHANGE_ADD,
  93. CHANGE_COPY,
  94. CHANGE_DELETE,
  95. CHANGE_MODIFY,
  96. CHANGE_RENAME,
  97. RENAME_CHANGE_TYPES,
  98. )
  99. from .errors import SendPackError
  100. from .graph import can_fast_forward
  101. from .ignore import IgnoreFilterManager
  102. from .index import (
  103. IndexEntry,
  104. _fs_to_tree_path,
  105. blob_from_path_and_stat,
  106. build_file_from_blob,
  107. build_index_from_tree,
  108. get_unstaged_changes,
  109. index_entry_from_stat,
  110. symlink,
  111. update_working_tree,
  112. validate_path_element_default,
  113. validate_path_element_hfs,
  114. validate_path_element_ntfs,
  115. )
  116. from .object_store import tree_lookup_path
  117. from .objects import (
  118. Blob,
  119. Commit,
  120. Tag,
  121. Tree,
  122. format_timezone,
  123. parse_timezone,
  124. pretty_format_tree_entry,
  125. )
  126. from .objectspec import (
  127. parse_commit,
  128. parse_object,
  129. parse_ref,
  130. parse_reftuples,
  131. parse_tree,
  132. )
  133. from .pack import write_pack_from_container, write_pack_index
  134. from .patch import (
  135. get_summary,
  136. write_commit_patch,
  137. write_object_diff,
  138. write_tree_diff,
  139. )
  140. from .protocol import ZERO_SHA, Protocol
  141. from .refs import (
  142. LOCAL_BRANCH_PREFIX,
  143. LOCAL_NOTES_PREFIX,
  144. LOCAL_TAG_PREFIX,
  145. Ref,
  146. SymrefLoop,
  147. _import_remote_refs,
  148. )
  149. from .repo import BaseRepo, Repo, get_user_identity
  150. from .server import (
  151. FileSystemBackend,
  152. ReceivePackHandler,
  153. TCPGitServer,
  154. UploadPackHandler,
  155. )
  156. from .server import update_server_info as server_update_server_info
  157. from .sparse_patterns import (
  158. SparseCheckoutConflictError,
  159. apply_included_paths,
  160. determine_included_paths,
  161. )
  162. # Module level tuple definition for status output
  163. GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
  164. @dataclass
  165. class CountObjectsResult:
  166. """Result of counting objects in a repository.
  167. Attributes:
  168. count: Number of loose objects
  169. size: Total size of loose objects in bytes
  170. in_pack: Number of objects in pack files
  171. packs: Number of pack files
  172. size_pack: Total size of pack files in bytes
  173. """
  174. count: int
  175. size: int
  176. in_pack: Optional[int] = None
  177. packs: Optional[int] = None
  178. size_pack: Optional[int] = None
  179. class NoneStream(RawIOBase):
  180. """Fallback if stdout or stderr are unavailable, does nothing."""
  181. def read(self, size=-1) -> None:
  182. return None
  183. def readall(self) -> bytes:
  184. return b""
  185. def readinto(self, b) -> None:
  186. return None
  187. def write(self, b) -> None:
  188. return None
  189. default_bytes_out_stream = getattr(sys.stdout, "buffer", None) or NoneStream()
  190. default_bytes_err_stream = getattr(sys.stderr, "buffer", None) or NoneStream()
  191. DEFAULT_ENCODING = "utf-8"
  192. class Error(Exception):
  193. """Porcelain-based error."""
  194. def __init__(self, msg) -> None:
  195. super().__init__(msg)
  196. class RemoteExists(Error):
  197. """Raised when the remote already exists."""
  198. class TimezoneFormatError(Error):
  199. """Raised when the timezone cannot be determined from a given string."""
  200. class CheckoutError(Error):
  201. """Indicates that a checkout cannot be performed."""
  202. def parse_timezone_format(tz_str):
  203. """Parse given string and attempt to return a timezone offset.
  204. Different formats are considered in the following order:
  205. - Git internal format: <unix timestamp> <timezone offset>
  206. - RFC 2822: e.g. Mon, 20 Nov 1995 19:12:08 -0500
  207. - ISO 8601: e.g. 1995-11-20T19:12:08-0500
  208. Args:
  209. tz_str: datetime string
  210. Returns: Timezone offset as integer
  211. Raises:
  212. TimezoneFormatError: if timezone information cannot be extracted
  213. """
  214. import re
  215. # Git internal format
  216. internal_format_pattern = re.compile("^[0-9]+ [+-][0-9]{,4}$")
  217. if re.match(internal_format_pattern, tz_str):
  218. try:
  219. tz_internal = parse_timezone(tz_str.split(" ")[1].encode(DEFAULT_ENCODING))
  220. return tz_internal[0]
  221. except ValueError:
  222. pass
  223. # RFC 2822
  224. import email.utils
  225. rfc_2822 = email.utils.parsedate_tz(tz_str)
  226. if rfc_2822:
  227. return rfc_2822[9]
  228. # ISO 8601
  229. # Supported offsets:
  230. # sHHMM, sHH:MM, sHH
  231. iso_8601_pattern = re.compile(
  232. "[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$"
  233. )
  234. match = re.search(iso_8601_pattern, tz_str)
  235. total_secs = 0
  236. if match:
  237. sign, hours, minutes = match.groups()
  238. total_secs += int(hours) * 3600
  239. if minutes:
  240. total_secs += int(minutes) * 60
  241. total_secs = -total_secs if sign == "-" else total_secs
  242. return total_secs
  243. # YYYY.MM.DD, MM/DD/YYYY, DD.MM.YYYY contain no timezone information
  244. raise TimezoneFormatError(tz_str)
  245. def get_user_timezones():
  246. """Retrieve local timezone as described in
  247. https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
  248. Returns: A tuple containing author timezone, committer timezone.
  249. """
  250. local_timezone = time.localtime().tm_gmtoff
  251. if os.environ.get("GIT_AUTHOR_DATE"):
  252. author_timezone = parse_timezone_format(os.environ["GIT_AUTHOR_DATE"])
  253. else:
  254. author_timezone = local_timezone
  255. if os.environ.get("GIT_COMMITTER_DATE"):
  256. commit_timezone = parse_timezone_format(os.environ["GIT_COMMITTER_DATE"])
  257. else:
  258. commit_timezone = local_timezone
  259. return author_timezone, commit_timezone
  260. def open_repo(path_or_repo: Union[str, os.PathLike, BaseRepo]):
  261. """Open an argument that can be a repository or a path for a repository."""
  262. if isinstance(path_or_repo, BaseRepo):
  263. return path_or_repo
  264. return Repo(path_or_repo)
  265. @contextmanager
  266. def _noop_context_manager(obj):
  267. """Context manager that has the same api as closing but does nothing."""
  268. yield obj
  269. def open_repo_closing(path_or_repo: Union[str, os.PathLike, BaseRepo]):
  270. """Open an argument that can be a repository or a path for a repository.
  271. returns a context manager that will close the repo on exit if the argument
  272. is a path, else does nothing if the argument is a repo.
  273. """
  274. if isinstance(path_or_repo, BaseRepo):
  275. return _noop_context_manager(path_or_repo)
  276. return closing(Repo(path_or_repo))
  277. def path_to_tree_path(repopath, path, tree_encoding=DEFAULT_ENCODING):
  278. """Convert a path to a path usable in an index, e.g. bytes and relative to
  279. the repository root.
  280. Args:
  281. repopath: Repository path, absolute or relative to the cwd
  282. path: A path, absolute or relative to the cwd
  283. Returns: A path formatted for use in e.g. an index
  284. """
  285. # Resolve might returns a relative path on Windows
  286. # https://bugs.python.org/issue38671
  287. if sys.platform == "win32":
  288. path = os.path.abspath(path)
  289. path = Path(path)
  290. resolved_path = path.resolve()
  291. # Resolve and abspath seems to behave differently regarding symlinks,
  292. # as we are doing abspath on the file path, we need to do the same on
  293. # the repo path or they might not match
  294. if sys.platform == "win32":
  295. repopath = os.path.abspath(repopath)
  296. repopath = Path(repopath).resolve()
  297. try:
  298. relpath = resolved_path.relative_to(repopath)
  299. except ValueError:
  300. # If path is a symlink that points to a file outside the repo, we
  301. # want the relpath for the link itself, not the resolved target
  302. if path.is_symlink():
  303. parent = path.parent.resolve()
  304. relpath = (parent / path.name).relative_to(repopath)
  305. else:
  306. raise
  307. if sys.platform == "win32":
  308. return str(relpath).replace(os.path.sep, "/").encode(tree_encoding)
  309. else:
  310. return bytes(relpath)
  311. class DivergedBranches(Error):
  312. """Branches have diverged and fast-forward is not possible."""
  313. def __init__(self, current_sha, new_sha) -> None:
  314. self.current_sha = current_sha
  315. self.new_sha = new_sha
  316. def check_diverged(repo, current_sha, new_sha) -> None:
  317. """Check if updating to a sha can be done with fast forwarding.
  318. Args:
  319. repo: Repository object
  320. current_sha: Current head sha
  321. new_sha: New head sha
  322. """
  323. try:
  324. can = can_fast_forward(repo, current_sha, new_sha)
  325. except KeyError:
  326. can = False
  327. if not can:
  328. raise DivergedBranches(current_sha, new_sha)
  329. def archive(
  330. repo,
  331. committish: Optional[Union[str, bytes, Commit, Tag]] = None,
  332. outstream=default_bytes_out_stream,
  333. errstream=default_bytes_err_stream,
  334. ) -> None:
  335. """Create an archive.
  336. Args:
  337. repo: Path of repository for which to generate an archive.
  338. committish: Commit SHA1 or ref to use
  339. outstream: Output stream (defaults to stdout)
  340. errstream: Error stream (defaults to stderr)
  341. """
  342. if committish is None:
  343. committish = "HEAD"
  344. with open_repo_closing(repo) as repo_obj:
  345. c = parse_commit(repo_obj, committish)
  346. for chunk in tar_stream(
  347. repo_obj.object_store, repo_obj.object_store[c.tree], c.commit_time
  348. ):
  349. outstream.write(chunk)
  350. def update_server_info(repo=".") -> None:
  351. """Update server info files for a repository.
  352. Args:
  353. repo: path to the repository
  354. """
  355. with open_repo_closing(repo) as r:
  356. server_update_server_info(r)
  357. def write_commit_graph(repo=".", reachable=True) -> None:
  358. """Write a commit graph file for a repository.
  359. Args:
  360. repo: path to the repository or a Repo object
  361. reachable: if True, include all commits reachable from refs.
  362. if False, only include direct ref targets.
  363. """
  364. with open_repo_closing(repo) as r:
  365. # Get all refs
  366. refs = list(r.refs.as_dict().values())
  367. if refs:
  368. r.object_store.write_commit_graph(refs, reachable=reachable)
  369. def symbolic_ref(repo, ref_name, force=False) -> None:
  370. """Set git symbolic ref into HEAD.
  371. Args:
  372. repo: path to the repository
  373. ref_name: short name of the new ref
  374. force: force settings without checking if it exists in refs/heads
  375. """
  376. with open_repo_closing(repo) as repo_obj:
  377. ref_path = _make_branch_ref(ref_name)
  378. if not force and ref_path not in repo_obj.refs.keys():
  379. raise Error(f"fatal: ref `{ref_name}` is not a ref")
  380. repo_obj.refs.set_symbolic_ref(b"HEAD", ref_path)
  381. def pack_refs(repo, all=False) -> None:
  382. with open_repo_closing(repo) as repo_obj:
  383. repo_obj.refs.pack_refs(all=all)
  384. def commit(
  385. repo=".",
  386. message=None,
  387. author=None,
  388. author_timezone=None,
  389. committer=None,
  390. commit_timezone=None,
  391. encoding=None,
  392. no_verify=False,
  393. signoff=False,
  394. ):
  395. """Create a new commit.
  396. Args:
  397. repo: Path to repository
  398. message: Optional commit message
  399. author: Optional author name and email
  400. author_timezone: Author timestamp timezone
  401. committer: Optional committer name and email
  402. commit_timezone: Commit timestamp timezone
  403. no_verify: Skip pre-commit and commit-msg hooks
  404. signoff: GPG Sign the commit (bool, defaults to False,
  405. pass True to use default GPG key,
  406. pass a str containing Key ID to use a specific GPG key)
  407. Returns: SHA1 of the new commit
  408. """
  409. # FIXME: Support --all argument
  410. if getattr(message, "encode", None):
  411. message = message.encode(encoding or DEFAULT_ENCODING)
  412. if getattr(author, "encode", None):
  413. author = author.encode(encoding or DEFAULT_ENCODING)
  414. if getattr(committer, "encode", None):
  415. committer = committer.encode(encoding or DEFAULT_ENCODING)
  416. local_timezone = get_user_timezones()
  417. if author_timezone is None:
  418. author_timezone = local_timezone[0]
  419. if commit_timezone is None:
  420. commit_timezone = local_timezone[1]
  421. with open_repo_closing(repo) as r:
  422. return r.do_commit(
  423. message=message,
  424. author=author,
  425. author_timezone=author_timezone,
  426. committer=committer,
  427. commit_timezone=commit_timezone,
  428. encoding=encoding,
  429. no_verify=no_verify,
  430. sign=signoff if isinstance(signoff, (str, bool)) else None,
  431. )
  432. def commit_tree(repo, tree, message=None, author=None, committer=None):
  433. """Create a new commit object.
  434. Args:
  435. repo: Path to repository
  436. tree: An existing tree object
  437. author: Optional author name and email
  438. committer: Optional committer name and email
  439. """
  440. with open_repo_closing(repo) as r:
  441. return r.do_commit(
  442. message=message, tree=tree, committer=committer, author=author
  443. )
  444. def init(
  445. path: Union[str, os.PathLike] = ".", *, bare=False, symlinks: Optional[bool] = None
  446. ):
  447. """Create a new git repository.
  448. Args:
  449. path: Path to repository.
  450. bare: Whether to create a bare repository.
  451. symlinks: Whether to create actual symlinks (defaults to autodetect)
  452. Returns: A Repo instance
  453. """
  454. if not os.path.exists(path):
  455. os.mkdir(path)
  456. if bare:
  457. return Repo.init_bare(path)
  458. else:
  459. return Repo.init(path, symlinks=symlinks)
  460. def clone(
  461. source,
  462. target: Optional[Union[str, os.PathLike]] = None,
  463. bare=False,
  464. checkout=None,
  465. errstream=default_bytes_err_stream,
  466. outstream=None,
  467. origin: Optional[str] = "origin",
  468. depth: Optional[int] = None,
  469. branch: Optional[Union[str, bytes]] = None,
  470. config: Optional[Config] = None,
  471. filter_spec=None,
  472. protocol_version: Optional[int] = None,
  473. recurse_submodules: bool = False,
  474. **kwargs,
  475. ):
  476. """Clone a local or remote git repository.
  477. Args:
  478. source: Path or URL for source repository
  479. target: Path to target repository (optional)
  480. bare: Whether or not to create a bare repository
  481. checkout: Whether or not to check-out HEAD after cloning
  482. errstream: Optional stream to write progress to
  483. outstream: Optional stream to write progress to (deprecated)
  484. origin: Name of remote from the repository used to clone
  485. depth: Depth to fetch at
  486. branch: Optional branch or tag to be used as HEAD in the new repository
  487. instead of the cloned repository's HEAD.
  488. config: Configuration to use
  489. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  490. Only used if the server supports the Git protocol-v2 'filter'
  491. feature, and ignored otherwise.
  492. protocol_version: desired Git protocol version. By default the highest
  493. mutually supported protocol version will be used.
  494. recurse_submodules: Whether to initialize and clone submodules
  495. Keyword Args:
  496. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  497. bytestring/string.
  498. Returns: The new repository
  499. """
  500. if outstream is not None:
  501. import warnings
  502. warnings.warn(
  503. "outstream= has been deprecated in favour of errstream=.",
  504. DeprecationWarning,
  505. stacklevel=3,
  506. )
  507. # TODO(jelmer): Capture logging output and stream to errstream
  508. if config is None:
  509. config = StackedConfig.default()
  510. if checkout is None:
  511. checkout = not bare
  512. if checkout and bare:
  513. raise Error("checkout and bare are incompatible")
  514. if target is None:
  515. target = source.split("/")[-1]
  516. if isinstance(branch, str):
  517. branch = branch.encode(DEFAULT_ENCODING)
  518. mkdir = not os.path.exists(target)
  519. (client, path) = get_transport_and_path(source, config=config, **kwargs)
  520. if filter_spec:
  521. filter_spec = filter_spec.encode("ascii")
  522. repo = client.clone(
  523. path,
  524. target,
  525. mkdir=mkdir,
  526. bare=bare,
  527. origin=origin,
  528. checkout=checkout,
  529. branch=branch,
  530. progress=errstream.write,
  531. depth=depth,
  532. filter_spec=filter_spec,
  533. protocol_version=protocol_version,
  534. )
  535. # Initialize and update submodules if requested
  536. if recurse_submodules and not bare:
  537. try:
  538. submodule_init(repo)
  539. submodule_update(repo, init=True)
  540. except FileNotFoundError as e:
  541. # .gitmodules file doesn't exist - no submodules to process
  542. import logging
  543. logging.debug("No .gitmodules file found: %s", e)
  544. except KeyError as e:
  545. # Submodule configuration missing
  546. import logging
  547. logging.warning("Submodule configuration error: %s", e)
  548. if errstream:
  549. errstream.write(
  550. f"Warning: Submodule configuration error: {e}\n".encode()
  551. )
  552. return repo
  553. def add(repo: Union[str, os.PathLike, BaseRepo] = ".", paths=None):
  554. """Add files to the staging area.
  555. Args:
  556. repo: Repository for the files
  557. paths: Paths to add. If None, stages all untracked and modified files from the
  558. current working directory (mimicking 'git add .' behavior).
  559. Returns: Tuple with set of added files and ignored files
  560. If the repository contains ignored directories, the returned set will
  561. contain the path to an ignored directory (with trailing slash). Individual
  562. files within ignored directories will not be returned.
  563. Note: When paths=None, this function adds all untracked and modified files
  564. from the entire repository, mimicking 'git add -A' behavior.
  565. """
  566. ignored = set()
  567. with open_repo_closing(repo) as r:
  568. repo_path = Path(r.path).resolve()
  569. ignore_manager = IgnoreFilterManager.from_repo(r)
  570. # Get unstaged changes once for the entire operation
  571. index = r.open_index()
  572. normalizer = r.get_blob_normalizer()
  573. filter_callback = normalizer.checkin_normalize
  574. all_unstaged_paths = list(get_unstaged_changes(index, r.path, filter_callback))
  575. if not paths:
  576. # When no paths specified, add all untracked and modified files from repo root
  577. paths = [str(repo_path)]
  578. relpaths = []
  579. if not isinstance(paths, list):
  580. paths = [paths]
  581. for p in paths:
  582. path = Path(p)
  583. if not path.is_absolute():
  584. # Make relative paths relative to the repo directory
  585. path = repo_path / path
  586. # Don't resolve symlinks completely - only resolve the parent directory
  587. # to avoid issues when symlinks point outside the repository
  588. if path.is_symlink():
  589. # For symlinks, resolve only the parent directory
  590. parent_resolved = path.parent.resolve()
  591. resolved_path = parent_resolved / path.name
  592. else:
  593. # For regular files/dirs, resolve normally
  594. resolved_path = path.resolve()
  595. try:
  596. relpath = str(resolved_path.relative_to(repo_path)).replace(os.sep, "/")
  597. except ValueError as e:
  598. # Path is not within the repository
  599. raise ValueError(
  600. f"Path {p} is not within repository {repo_path}"
  601. ) from e
  602. # Handle directories by scanning their contents
  603. if resolved_path.is_dir():
  604. # Check if the directory itself is ignored
  605. dir_relpath = posixpath.join(relpath, "") if relpath != "." else ""
  606. if dir_relpath and ignore_manager.is_ignored(dir_relpath):
  607. ignored.add(dir_relpath)
  608. continue
  609. # When adding a directory, add all untracked files within it
  610. current_untracked = list(
  611. get_untracked_paths(
  612. str(resolved_path),
  613. str(repo_path),
  614. index,
  615. )
  616. )
  617. for untracked_path in current_untracked:
  618. # If we're scanning a subdirectory, adjust the path
  619. if relpath != ".":
  620. untracked_path = posixpath.join(relpath, untracked_path)
  621. if not ignore_manager.is_ignored(untracked_path):
  622. relpaths.append(untracked_path)
  623. else:
  624. ignored.add(untracked_path)
  625. # Also add unstaged (modified) files within this directory
  626. for unstaged_path in all_unstaged_paths:
  627. if isinstance(unstaged_path, bytes):
  628. unstaged_path_str = unstaged_path.decode("utf-8")
  629. else:
  630. unstaged_path_str = unstaged_path
  631. # Check if this unstaged file is within the directory we're processing
  632. unstaged_full_path = repo_path / unstaged_path_str
  633. try:
  634. unstaged_full_path.relative_to(resolved_path)
  635. # File is within this directory, add it
  636. if not ignore_manager.is_ignored(unstaged_path_str):
  637. relpaths.append(unstaged_path_str)
  638. else:
  639. ignored.add(unstaged_path_str)
  640. except ValueError:
  641. # File is not within this directory, skip it
  642. continue
  643. continue
  644. # FIXME: Support patterns
  645. if ignore_manager.is_ignored(relpath):
  646. ignored.add(relpath)
  647. continue
  648. relpaths.append(relpath)
  649. r.stage(relpaths)
  650. return (relpaths, ignored)
  651. def _is_subdir(subdir, parentdir):
  652. """Check whether subdir is parentdir or a subdir of parentdir.
  653. If parentdir or subdir is a relative path, it will be disamgibuated
  654. relative to the pwd.
  655. """
  656. parentdir_abs = os.path.realpath(parentdir) + os.path.sep
  657. subdir_abs = os.path.realpath(subdir) + os.path.sep
  658. return subdir_abs.startswith(parentdir_abs)
  659. # TODO: option to remove ignored files also, in line with `git clean -fdx`
  660. def clean(repo=".", target_dir=None) -> None:
  661. """Remove any untracked files from the target directory recursively.
  662. Equivalent to running ``git clean -fd`` in target_dir.
  663. Args:
  664. repo: Repository where the files may be tracked
  665. target_dir: Directory to clean - current directory if None
  666. """
  667. if target_dir is None:
  668. target_dir = os.getcwd()
  669. with open_repo_closing(repo) as r:
  670. if not _is_subdir(target_dir, r.path):
  671. raise Error("target_dir must be in the repo's working dir")
  672. config = r.get_config_stack()
  673. config.get_boolean((b"clean",), b"requireForce", True)
  674. # TODO(jelmer): if require_force is set, then make sure that -f, -i or
  675. # -n is specified.
  676. index = r.open_index()
  677. ignore_manager = IgnoreFilterManager.from_repo(r)
  678. paths_in_wd = _walk_working_dir_paths(target_dir, r.path)
  679. # Reverse file visit order, so that files and subdirectories are
  680. # removed before containing directory
  681. for ap, is_dir in reversed(list(paths_in_wd)):
  682. if is_dir:
  683. # All subdirectories and files have been removed if untracked,
  684. # so dir contains no tracked files iff it is empty.
  685. is_empty = len(os.listdir(ap)) == 0
  686. if is_empty:
  687. os.rmdir(ap)
  688. else:
  689. ip = path_to_tree_path(r.path, ap)
  690. is_tracked = ip in index
  691. rp = os.path.relpath(ap, r.path)
  692. is_ignored = ignore_manager.is_ignored(rp)
  693. if not is_tracked and not is_ignored:
  694. os.remove(ap)
  695. def remove(repo=".", paths=None, cached=False) -> None:
  696. """Remove files from the staging area.
  697. Args:
  698. repo: Repository for the files
  699. paths: Paths to remove. Can be absolute or relative to the repository root.
  700. """
  701. with open_repo_closing(repo) as r:
  702. index = r.open_index()
  703. blob_normalizer = r.get_blob_normalizer()
  704. for p in paths:
  705. # If path is absolute, use it as-is. Otherwise, treat it as relative to repo
  706. if os.path.isabs(p):
  707. full_path = p
  708. else:
  709. # Treat relative paths as relative to the repository root
  710. full_path = os.path.join(r.path, p)
  711. tree_path = path_to_tree_path(r.path, full_path)
  712. # Convert to bytes for file operations
  713. full_path_bytes = os.fsencode(full_path)
  714. try:
  715. index_sha = index[tree_path].sha
  716. except KeyError as exc:
  717. raise Error(f"{p} did not match any files") from exc
  718. if not cached:
  719. try:
  720. st = os.lstat(full_path_bytes)
  721. except OSError:
  722. pass
  723. else:
  724. try:
  725. blob = blob_from_path_and_stat(full_path_bytes, st)
  726. # Apply checkin normalization to compare apples to apples
  727. if blob_normalizer is not None:
  728. blob = blob_normalizer.checkin_normalize(blob, tree_path)
  729. except OSError:
  730. pass
  731. else:
  732. try:
  733. committed_sha = tree_lookup_path(
  734. r.__getitem__, r[r.head()].tree, tree_path
  735. )[1]
  736. except KeyError:
  737. committed_sha = None
  738. if blob.id != index_sha and index_sha != committed_sha:
  739. raise Error(
  740. "file has staged content differing "
  741. f"from both the file and head: {p}"
  742. )
  743. if index_sha != committed_sha:
  744. raise Error(f"file has staged changes: {p}")
  745. os.remove(full_path_bytes)
  746. del index[tree_path]
  747. index.write()
  748. rm = remove
  749. def mv(
  750. repo: Union[str, os.PathLike, BaseRepo],
  751. source: Union[str, bytes, os.PathLike],
  752. destination: Union[str, bytes, os.PathLike],
  753. force: bool = False,
  754. ) -> None:
  755. """Move or rename a file, directory, or symlink.
  756. Args:
  757. repo: Path to the repository
  758. source: Path to move from
  759. destination: Path to move to
  760. force: Force move even if destination exists
  761. Raises:
  762. Error: If source doesn't exist, is not tracked, or destination already exists (without force)
  763. """
  764. with open_repo_closing(repo) as r:
  765. index = r.open_index()
  766. # Handle paths - convert to string if necessary
  767. if isinstance(source, bytes):
  768. source = source.decode(sys.getfilesystemencoding())
  769. elif hasattr(source, "__fspath__"):
  770. source = os.fspath(source)
  771. else:
  772. source = str(source)
  773. if isinstance(destination, bytes):
  774. destination = destination.decode(sys.getfilesystemencoding())
  775. elif hasattr(destination, "__fspath__"):
  776. destination = os.fspath(destination)
  777. else:
  778. destination = str(destination)
  779. # Get full paths
  780. if os.path.isabs(source):
  781. source_full_path = source
  782. else:
  783. # Treat relative paths as relative to the repository root
  784. source_full_path = os.path.join(r.path, source)
  785. if os.path.isabs(destination):
  786. destination_full_path = destination
  787. else:
  788. # Treat relative paths as relative to the repository root
  789. destination_full_path = os.path.join(r.path, destination)
  790. # Check if destination is a directory
  791. if os.path.isdir(destination_full_path):
  792. # Move source into destination directory
  793. basename = os.path.basename(source_full_path)
  794. destination_full_path = os.path.join(destination_full_path, basename)
  795. # Convert to tree paths for index
  796. source_tree_path = path_to_tree_path(r.path, source_full_path)
  797. destination_tree_path = path_to_tree_path(r.path, destination_full_path)
  798. # Check if source exists in index
  799. if source_tree_path not in index:
  800. raise Error(f"source '{source}' is not under version control")
  801. # Check if source exists in filesystem
  802. if not os.path.exists(source_full_path):
  803. raise Error(f"source '{source}' does not exist")
  804. # Check if destination already exists
  805. if os.path.exists(destination_full_path) and not force:
  806. raise Error(f"destination '{destination}' already exists (use -f to force)")
  807. # Check if destination is already in index
  808. if destination_tree_path in index and not force:
  809. raise Error(
  810. f"destination '{destination}' already exists in index (use -f to force)"
  811. )
  812. # Get the index entry for the source
  813. source_entry = index[source_tree_path]
  814. # Convert to bytes for file operations
  815. source_full_path_bytes = os.fsencode(source_full_path)
  816. destination_full_path_bytes = os.fsencode(destination_full_path)
  817. # Create parent directory for destination if needed
  818. dest_dir = os.path.dirname(destination_full_path_bytes)
  819. if dest_dir and not os.path.exists(dest_dir):
  820. os.makedirs(dest_dir)
  821. # Move the file in the filesystem
  822. if os.path.exists(destination_full_path_bytes) and force:
  823. os.remove(destination_full_path_bytes)
  824. os.rename(source_full_path_bytes, destination_full_path_bytes)
  825. # Update the index
  826. del index[source_tree_path]
  827. index[destination_tree_path] = source_entry
  828. index.write()
  829. move = mv
  830. def commit_decode(commit, contents, default_encoding=DEFAULT_ENCODING):
  831. if commit.encoding:
  832. encoding = commit.encoding.decode("ascii")
  833. else:
  834. encoding = default_encoding
  835. return contents.decode(encoding, "replace")
  836. def commit_encode(commit, contents, default_encoding=DEFAULT_ENCODING):
  837. if commit.encoding:
  838. encoding = commit.encoding.decode("ascii")
  839. else:
  840. encoding = default_encoding
  841. return contents.encode(encoding)
  842. def print_commit(commit, decode, outstream=sys.stdout) -> None:
  843. """Write a human-readable commit log entry.
  844. Args:
  845. commit: A `Commit` object
  846. outstream: A stream file to write to
  847. """
  848. outstream.write("-" * 50 + "\n")
  849. outstream.write("commit: " + commit.id.decode("ascii") + "\n")
  850. if len(commit.parents) > 1:
  851. outstream.write(
  852. "merge: "
  853. + "...".join([c.decode("ascii") for c in commit.parents[1:]])
  854. + "\n"
  855. )
  856. outstream.write("Author: " + decode(commit.author) + "\n")
  857. if commit.author != commit.committer:
  858. outstream.write("Committer: " + decode(commit.committer) + "\n")
  859. time_tuple = time.gmtime(commit.author_time + commit.author_timezone)
  860. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  861. timezone_str = format_timezone(commit.author_timezone).decode("ascii")
  862. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  863. if commit.message:
  864. outstream.write("\n")
  865. outstream.write(decode(commit.message) + "\n")
  866. outstream.write("\n")
  867. def print_tag(tag, decode, outstream=sys.stdout) -> None:
  868. """Write a human-readable tag.
  869. Args:
  870. tag: A `Tag` object
  871. decode: Function for decoding bytes to unicode string
  872. outstream: A stream to write to
  873. """
  874. outstream.write("Tagger: " + decode(tag.tagger) + "\n")
  875. time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone)
  876. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  877. timezone_str = format_timezone(tag.tag_timezone).decode("ascii")
  878. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  879. outstream.write("\n")
  880. outstream.write(decode(tag.message))
  881. outstream.write("\n")
  882. def show_blob(repo, blob, decode, outstream=sys.stdout) -> None:
  883. """Write a blob to a stream.
  884. Args:
  885. repo: A `Repo` object
  886. blob: A `Blob` object
  887. decode: Function for decoding bytes to unicode string
  888. outstream: A stream file to write to
  889. """
  890. outstream.write(decode(blob.data))
  891. def show_commit(repo, commit, decode, outstream=sys.stdout) -> None:
  892. """Show a commit to a stream.
  893. Args:
  894. repo: A `Repo` object
  895. commit: A `Commit` object
  896. decode: Function for decoding bytes to unicode string
  897. outstream: Stream to write to
  898. """
  899. print_commit(commit, decode=decode, outstream=outstream)
  900. if commit.parents:
  901. parent_commit = repo[commit.parents[0]]
  902. base_tree = parent_commit.tree
  903. else:
  904. base_tree = None
  905. diffstream = BytesIO()
  906. write_tree_diff(diffstream, repo.object_store, base_tree, commit.tree)
  907. diffstream.seek(0)
  908. outstream.write(commit_decode(commit, diffstream.getvalue()))
  909. def show_tree(repo, tree, decode, outstream=sys.stdout) -> None:
  910. """Print a tree to a stream.
  911. Args:
  912. repo: A `Repo` object
  913. tree: A `Tree` object
  914. decode: Function for decoding bytes to unicode string
  915. outstream: Stream to write to
  916. """
  917. for n in tree:
  918. outstream.write(decode(n) + "\n")
  919. def show_tag(repo, tag, decode, outstream=sys.stdout) -> None:
  920. """Print a tag to a stream.
  921. Args:
  922. repo: A `Repo` object
  923. tag: A `Tag` object
  924. decode: Function for decoding bytes to unicode string
  925. outstream: Stream to write to
  926. """
  927. print_tag(tag, decode, outstream)
  928. show_object(repo, repo[tag.object[1]], decode, outstream)
  929. def show_object(repo, obj, decode, outstream):
  930. return {
  931. b"tree": show_tree,
  932. b"blob": show_blob,
  933. b"commit": show_commit,
  934. b"tag": show_tag,
  935. }[obj.type_name](repo, obj, decode, outstream)
  936. def print_name_status(changes):
  937. """Print a simple status summary, listing changed files."""
  938. for change in changes:
  939. if not change:
  940. continue
  941. if isinstance(change, list):
  942. change = change[0]
  943. if change.type == CHANGE_ADD:
  944. path1 = change.new.path
  945. path2 = ""
  946. kind = "A"
  947. elif change.type == CHANGE_DELETE:
  948. path1 = change.old.path
  949. path2 = ""
  950. kind = "D"
  951. elif change.type == CHANGE_MODIFY:
  952. path1 = change.new.path
  953. path2 = ""
  954. kind = "M"
  955. elif change.type in RENAME_CHANGE_TYPES:
  956. path1 = change.old.path
  957. path2 = change.new.path
  958. if change.type == CHANGE_RENAME:
  959. kind = "R"
  960. elif change.type == CHANGE_COPY:
  961. kind = "C"
  962. yield "%-8s%-20s%-20s" % (kind, path1, path2) # noqa: UP031
  963. def log(
  964. repo=".",
  965. paths=None,
  966. outstream=sys.stdout,
  967. max_entries=None,
  968. reverse=False,
  969. name_status=False,
  970. ) -> None:
  971. """Write commit logs.
  972. Args:
  973. repo: Path to repository
  974. paths: Optional set of specific paths to print entries for
  975. outstream: Stream to write log output to
  976. reverse: Reverse order in which entries are printed
  977. name_status: Print name status
  978. max_entries: Optional maximum number of entries to display
  979. """
  980. with open_repo_closing(repo) as r:
  981. try:
  982. include = [r.head()]
  983. except KeyError:
  984. include = []
  985. walker = r.get_walker(
  986. include=include, max_entries=max_entries, paths=paths, reverse=reverse
  987. )
  988. for entry in walker:
  989. def decode(x):
  990. return commit_decode(entry.commit, x)
  991. print_commit(entry.commit, decode, outstream)
  992. if name_status:
  993. outstream.writelines(
  994. [line + "\n" for line in print_name_status(entry.changes())]
  995. )
  996. # TODO(jelmer): better default for encoding?
  997. def show(
  998. repo=".",
  999. objects=None,
  1000. outstream=sys.stdout,
  1001. default_encoding=DEFAULT_ENCODING,
  1002. ) -> None:
  1003. """Print the changes in a commit.
  1004. Args:
  1005. repo: Path to repository
  1006. objects: Objects to show (defaults to [HEAD])
  1007. outstream: Stream to write to
  1008. default_encoding: Default encoding to use if none is set in the
  1009. commit
  1010. """
  1011. if objects is None:
  1012. objects = ["HEAD"]
  1013. if not isinstance(objects, list):
  1014. objects = [objects]
  1015. with open_repo_closing(repo) as r:
  1016. for objectish in objects:
  1017. o = parse_object(r, objectish)
  1018. if isinstance(o, Commit):
  1019. def decode(x):
  1020. return commit_decode(o, x, default_encoding)
  1021. else:
  1022. def decode(x):
  1023. return x.decode(default_encoding)
  1024. show_object(r, o, decode, outstream)
  1025. def diff_tree(repo, old_tree, new_tree, outstream=default_bytes_out_stream) -> None:
  1026. """Compares the content and mode of blobs found via two tree objects.
  1027. Args:
  1028. repo: Path to repository
  1029. old_tree: Id of old tree
  1030. new_tree: Id of new tree
  1031. outstream: Stream to write to
  1032. """
  1033. with open_repo_closing(repo) as r:
  1034. write_tree_diff(outstream, r.object_store, old_tree, new_tree)
  1035. def diff(
  1036. repo=".",
  1037. commit=None,
  1038. commit2=None,
  1039. staged=False,
  1040. paths=None,
  1041. outstream=default_bytes_out_stream,
  1042. ) -> None:
  1043. """Show diff.
  1044. Args:
  1045. repo: Path to repository
  1046. commit: First commit to compare. If staged is True, compare
  1047. index to this commit. If staged is False, compare working tree
  1048. to this commit. If None, defaults to HEAD for staged and index
  1049. for unstaged.
  1050. commit2: Second commit to compare against first commit. If provided,
  1051. show diff between commit and commit2 (ignoring staged flag).
  1052. staged: If True, show staged changes (index vs commit).
  1053. If False, show unstaged changes (working tree vs commit/index).
  1054. Ignored if commit2 is provided.
  1055. paths: Optional list of paths to limit diff
  1056. outstream: Stream to write to
  1057. """
  1058. from . import diff as diff_module
  1059. with open_repo_closing(repo) as r:
  1060. # Normalize paths to bytes
  1061. if paths is not None and paths: # Check if paths is not empty
  1062. byte_paths = []
  1063. for p in paths:
  1064. if isinstance(p, str):
  1065. byte_paths.append(p.encode("utf-8"))
  1066. else:
  1067. byte_paths.append(p)
  1068. paths = byte_paths
  1069. elif paths == []: # Convert empty list to None
  1070. paths = None
  1071. # Resolve commit refs to SHAs if provided
  1072. if commit is not None:
  1073. if isinstance(commit, Commit):
  1074. # Already a Commit object
  1075. commit_sha = commit.id
  1076. commit_obj = commit
  1077. else:
  1078. # parse_commit handles both refs and SHAs, and always returns a Commit object
  1079. commit_obj = parse_commit(r, commit)
  1080. commit_sha = commit_obj.id
  1081. else:
  1082. commit_sha = None
  1083. commit_obj = None
  1084. if commit2 is not None:
  1085. # Compare two commits
  1086. if isinstance(commit2, Commit):
  1087. commit2_obj = commit2
  1088. else:
  1089. commit2_obj = parse_commit(r, commit2)
  1090. # Get trees from commits
  1091. old_tree = commit_obj.tree if commit_obj else None
  1092. new_tree = commit2_obj.tree
  1093. # Use tree_changes to get the changes and apply path filtering
  1094. changes = r.object_store.tree_changes(old_tree, new_tree)
  1095. for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in changes:
  1096. # Skip if paths are specified and this change doesn't match
  1097. if paths:
  1098. path_to_check = newpath or oldpath
  1099. if not any(
  1100. path_to_check == p or path_to_check.startswith(p + b"/")
  1101. for p in paths
  1102. ):
  1103. continue
  1104. write_object_diff(
  1105. outstream,
  1106. r.object_store,
  1107. (oldpath, oldmode, oldsha),
  1108. (newpath, newmode, newsha),
  1109. )
  1110. elif staged:
  1111. # Show staged changes (index vs commit)
  1112. diff_module.diff_index_to_tree(r, outstream, commit_sha, paths)
  1113. elif commit is not None:
  1114. # Compare working tree to a specific commit
  1115. assert (
  1116. commit_sha is not None
  1117. ) # mypy: commit_sha is set when commit is not None
  1118. diff_module.diff_working_tree_to_tree(r, outstream, commit_sha, paths)
  1119. else:
  1120. # Compare working tree to index
  1121. diff_module.diff_working_tree_to_index(r, outstream, paths)
  1122. def rev_list(repo, commits, outstream=sys.stdout) -> None:
  1123. """Lists commit objects in reverse chronological order.
  1124. Args:
  1125. repo: Path to repository
  1126. commits: Commits over which to iterate
  1127. outstream: Stream to write to
  1128. """
  1129. with open_repo_closing(repo) as r:
  1130. for entry in r.get_walker(include=[r[c].id for c in commits]):
  1131. outstream.write(entry.commit.id + b"\n")
  1132. def _canonical_part(url: str) -> str:
  1133. name = url.rsplit("/", 1)[-1]
  1134. if name.endswith(".git"):
  1135. name = name[:-4]
  1136. return name
  1137. def submodule_add(repo, url, path=None, name=None) -> None:
  1138. """Add a new submodule.
  1139. Args:
  1140. repo: Path to repository
  1141. url: URL of repository to add as submodule
  1142. path: Path where submodule should live
  1143. name: Name for the submodule
  1144. """
  1145. with open_repo_closing(repo) as r:
  1146. if path is None:
  1147. path = os.path.relpath(_canonical_part(url), r.path)
  1148. if name is None:
  1149. name = path
  1150. # TODO(jelmer): Move this logic to dulwich.submodule
  1151. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1152. try:
  1153. config = ConfigFile.from_path(gitmodules_path)
  1154. except FileNotFoundError:
  1155. config = ConfigFile()
  1156. config.path = gitmodules_path
  1157. config.set(("submodule", name), "url", url)
  1158. config.set(("submodule", name), "path", path)
  1159. config.write_to_path()
  1160. def submodule_init(repo) -> None:
  1161. """Initialize submodules.
  1162. Args:
  1163. repo: Path to repository
  1164. """
  1165. with open_repo_closing(repo) as r:
  1166. config = r.get_config()
  1167. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1168. for path, url, name in read_submodules(gitmodules_path):
  1169. config.set((b"submodule", name), b"active", True)
  1170. config.set((b"submodule", name), b"url", url)
  1171. config.write_to_path()
  1172. def submodule_list(repo):
  1173. """List submodules.
  1174. Args:
  1175. repo: Path to repository
  1176. """
  1177. from .submodule import iter_cached_submodules
  1178. with open_repo_closing(repo) as r:
  1179. for path, sha in iter_cached_submodules(r.object_store, r[r.head()].tree):
  1180. yield path, sha.decode(DEFAULT_ENCODING)
  1181. def submodule_update(repo, paths=None, init=False, force=False, errstream=None) -> None:
  1182. """Update submodules.
  1183. Args:
  1184. repo: Path to repository
  1185. paths: Optional list of specific submodule paths to update. If None, updates all.
  1186. init: If True, initialize submodules first
  1187. force: Force update even if local changes exist
  1188. """
  1189. from .submodule import iter_cached_submodules
  1190. with open_repo_closing(repo) as r:
  1191. if init:
  1192. submodule_init(r)
  1193. config = r.get_config()
  1194. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1195. # Get list of submodules to update
  1196. submodules_to_update = []
  1197. for path, sha in iter_cached_submodules(r.object_store, r[r.head()].tree):
  1198. path_str = (
  1199. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1200. )
  1201. if paths is None or path_str in paths:
  1202. submodules_to_update.append((path, sha))
  1203. # Read submodule configuration
  1204. for path, target_sha in submodules_to_update:
  1205. path_str = (
  1206. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1207. )
  1208. # Find the submodule name from .gitmodules
  1209. submodule_name = None
  1210. for sm_path, sm_url, sm_name in read_submodules(gitmodules_path):
  1211. if sm_path == path:
  1212. submodule_name = sm_name
  1213. break
  1214. if not submodule_name:
  1215. continue
  1216. # Get the URL from config
  1217. section = (
  1218. b"submodule",
  1219. submodule_name
  1220. if isinstance(submodule_name, bytes)
  1221. else submodule_name.encode(),
  1222. )
  1223. try:
  1224. url = config.get(section, b"url")
  1225. if isinstance(url, bytes):
  1226. url = url.decode(DEFAULT_ENCODING)
  1227. except KeyError:
  1228. # URL not in config, skip this submodule
  1229. continue
  1230. # Get or create the submodule repository paths
  1231. submodule_path = os.path.join(r.path, path_str)
  1232. submodule_git_dir = os.path.join(r.path, ".git", "modules", path_str)
  1233. # Clone or fetch the submodule
  1234. if not os.path.exists(submodule_git_dir):
  1235. # Clone the submodule as bare repository
  1236. os.makedirs(os.path.dirname(submodule_git_dir), exist_ok=True)
  1237. # Clone to the git directory
  1238. sub_repo = clone(url, submodule_git_dir, bare=True, checkout=False)
  1239. sub_repo.close()
  1240. # Create the submodule directory if it doesn't exist
  1241. if not os.path.exists(submodule_path):
  1242. os.makedirs(submodule_path)
  1243. # Create .git file in the submodule directory
  1244. depth = path_str.count("/") + 1
  1245. relative_git_dir = "../" * depth + ".git/modules/" + path_str
  1246. git_file_path = os.path.join(submodule_path, ".git")
  1247. with open(git_file_path, "w") as f:
  1248. f.write(f"gitdir: {relative_git_dir}\n")
  1249. # Set up working directory configuration
  1250. with open_repo_closing(submodule_git_dir) as sub_repo:
  1251. sub_config = sub_repo.get_config()
  1252. sub_config.set(
  1253. (b"core",),
  1254. b"worktree",
  1255. os.path.abspath(submodule_path).encode(),
  1256. )
  1257. sub_config.write_to_path()
  1258. # Checkout the target commit
  1259. sub_repo.refs[b"HEAD"] = target_sha
  1260. # Build the index and checkout files
  1261. tree = sub_repo[target_sha]
  1262. if hasattr(tree, "tree"): # If it's a commit, get the tree
  1263. tree_id = tree.tree
  1264. else:
  1265. tree_id = target_sha
  1266. build_index_from_tree(
  1267. submodule_path,
  1268. sub_repo.index_path(),
  1269. sub_repo.object_store,
  1270. tree_id,
  1271. )
  1272. else:
  1273. # Fetch and checkout in existing submodule
  1274. with open_repo_closing(submodule_git_dir) as sub_repo:
  1275. # Fetch from remote
  1276. client, path_segments = get_transport_and_path(url)
  1277. client.fetch(path_segments, sub_repo)
  1278. # Update to the target commit
  1279. sub_repo.refs[b"HEAD"] = target_sha
  1280. # Reset the working directory
  1281. reset(sub_repo, "hard", target_sha)
  1282. def tag_create(
  1283. repo,
  1284. tag: Union[str, bytes],
  1285. author: Optional[Union[str, bytes]] = None,
  1286. message: Optional[Union[str, bytes]] = None,
  1287. annotated=False,
  1288. objectish: Union[str, bytes] = "HEAD",
  1289. tag_time=None,
  1290. tag_timezone=None,
  1291. sign: bool = False,
  1292. encoding: str = DEFAULT_ENCODING,
  1293. ) -> None:
  1294. """Creates a tag in git via dulwich calls.
  1295. Args:
  1296. repo: Path to repository
  1297. tag: tag string
  1298. author: tag author (optional, if annotated is set)
  1299. message: tag message (optional)
  1300. annotated: whether to create an annotated tag
  1301. objectish: object the tag should point at, defaults to HEAD
  1302. tag_time: Optional time for annotated tag
  1303. tag_timezone: Optional timezone for annotated tag
  1304. sign: GPG Sign the tag (bool, defaults to False,
  1305. pass True to use default GPG key,
  1306. pass a str containing Key ID to use a specific GPG key)
  1307. """
  1308. with open_repo_closing(repo) as r:
  1309. object = parse_object(r, objectish)
  1310. if isinstance(tag, str):
  1311. tag = tag.encode(encoding)
  1312. if annotated:
  1313. # Create the tag object
  1314. tag_obj = Tag()
  1315. if author is None:
  1316. author = get_user_identity(r.get_config_stack())
  1317. elif isinstance(author, str):
  1318. author = author.encode(encoding)
  1319. else:
  1320. assert isinstance(author, bytes)
  1321. tag_obj.tagger = author
  1322. if isinstance(message, str):
  1323. message = message.encode(encoding)
  1324. elif isinstance(message, bytes):
  1325. pass
  1326. else:
  1327. message = b""
  1328. tag_obj.message = message + "\n".encode(encoding)
  1329. tag_obj.name = tag
  1330. tag_obj.object = (type(object), object.id)
  1331. if tag_time is None:
  1332. tag_time = int(time.time())
  1333. tag_obj.tag_time = tag_time
  1334. if tag_timezone is None:
  1335. tag_timezone = get_user_timezones()[1]
  1336. elif isinstance(tag_timezone, str):
  1337. tag_timezone = parse_timezone(tag_timezone.encode())
  1338. tag_obj.tag_timezone = tag_timezone
  1339. # Check if we should sign the tag
  1340. should_sign = sign
  1341. if sign is None:
  1342. # Check tag.gpgSign configuration when sign is not explicitly set
  1343. config = r.get_config_stack()
  1344. try:
  1345. should_sign = config.get_boolean((b"tag",), b"gpgSign")
  1346. except KeyError:
  1347. should_sign = False # Default to not signing if no config
  1348. if should_sign:
  1349. keyid = sign if isinstance(sign, str) else None
  1350. # If sign is True but no keyid specified, check user.signingKey config
  1351. if should_sign is True and keyid is None:
  1352. config = r.get_config_stack()
  1353. try:
  1354. keyid = config.get((b"user",), b"signingKey").decode("ascii")
  1355. except KeyError:
  1356. # No user.signingKey configured, will use default GPG key
  1357. pass
  1358. tag_obj.sign(keyid)
  1359. r.object_store.add_object(tag_obj)
  1360. tag_id = tag_obj.id
  1361. else:
  1362. tag_id = object.id
  1363. r.refs[_make_tag_ref(tag)] = tag_id
  1364. def tag_list(repo, outstream=sys.stdout):
  1365. """List all tags.
  1366. Args:
  1367. repo: Path to repository
  1368. outstream: Stream to write tags to
  1369. """
  1370. with open_repo_closing(repo) as r:
  1371. tags = sorted(r.refs.as_dict(b"refs/tags"))
  1372. return tags
  1373. def tag_delete(repo, name) -> None:
  1374. """Remove a tag.
  1375. Args:
  1376. repo: Path to repository
  1377. name: Name of tag to remove
  1378. """
  1379. with open_repo_closing(repo) as r:
  1380. if isinstance(name, bytes):
  1381. names = [name]
  1382. elif isinstance(name, list):
  1383. names = name
  1384. else:
  1385. raise Error(f"Unexpected tag name type {name!r}")
  1386. for name in names:
  1387. del r.refs[_make_tag_ref(name)]
  1388. def _make_notes_ref(name: bytes) -> bytes:
  1389. """Make a notes ref name."""
  1390. if name.startswith(b"refs/notes/"):
  1391. return name
  1392. return LOCAL_NOTES_PREFIX + name
  1393. def notes_add(
  1394. repo, object_sha, note, ref=b"commits", author=None, committer=None, message=None
  1395. ):
  1396. """Add or update a note for an object.
  1397. Args:
  1398. repo: Path to repository
  1399. object_sha: SHA of the object to annotate
  1400. note: Note content
  1401. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1402. author: Author identity (defaults to committer)
  1403. committer: Committer identity (defaults to config)
  1404. message: Commit message for the notes update
  1405. Returns:
  1406. SHA of the new notes commit
  1407. """
  1408. with open_repo_closing(repo) as r:
  1409. # Parse the object to get its SHA
  1410. obj = parse_object(r, object_sha)
  1411. object_sha = obj.id
  1412. if isinstance(note, str):
  1413. note = note.encode(DEFAULT_ENCODING)
  1414. if isinstance(ref, str):
  1415. ref = ref.encode(DEFAULT_ENCODING)
  1416. notes_ref = _make_notes_ref(ref)
  1417. config = r.get_config_stack()
  1418. return r.notes.set_note(
  1419. object_sha,
  1420. note,
  1421. notes_ref,
  1422. author=author,
  1423. committer=committer,
  1424. message=message,
  1425. config=config,
  1426. )
  1427. def notes_remove(
  1428. repo, object_sha, ref=b"commits", author=None, committer=None, message=None
  1429. ):
  1430. """Remove a note for an object.
  1431. Args:
  1432. repo: Path to repository
  1433. object_sha: SHA of the object to remove notes from
  1434. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1435. author: Author identity (defaults to committer)
  1436. committer: Committer identity (defaults to config)
  1437. message: Commit message for the notes removal
  1438. Returns:
  1439. SHA of the new notes commit, or None if no note existed
  1440. """
  1441. with open_repo_closing(repo) as r:
  1442. # Parse the object to get its SHA
  1443. obj = parse_object(r, object_sha)
  1444. object_sha = obj.id
  1445. if isinstance(ref, str):
  1446. ref = ref.encode(DEFAULT_ENCODING)
  1447. notes_ref = _make_notes_ref(ref)
  1448. config = r.get_config_stack()
  1449. return r.notes.remove_note(
  1450. object_sha,
  1451. notes_ref,
  1452. author=author,
  1453. committer=committer,
  1454. message=message,
  1455. config=config,
  1456. )
  1457. def notes_show(repo, object_sha, ref=b"commits"):
  1458. """Show the note for an object.
  1459. Args:
  1460. repo: Path to repository
  1461. object_sha: SHA of the object
  1462. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1463. Returns:
  1464. Note content as bytes, or None if no note exists
  1465. """
  1466. with open_repo_closing(repo) as r:
  1467. # Parse the object to get its SHA
  1468. obj = parse_object(r, object_sha)
  1469. object_sha = obj.id
  1470. if isinstance(ref, str):
  1471. ref = ref.encode(DEFAULT_ENCODING)
  1472. notes_ref = _make_notes_ref(ref)
  1473. config = r.get_config_stack()
  1474. return r.notes.get_note(object_sha, notes_ref, config=config)
  1475. def notes_list(repo, ref=b"commits"):
  1476. """List all notes in a notes ref.
  1477. Args:
  1478. repo: Path to repository
  1479. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1480. Returns:
  1481. List of tuples of (object_sha, note_content)
  1482. """
  1483. with open_repo_closing(repo) as r:
  1484. if isinstance(ref, str):
  1485. ref = ref.encode(DEFAULT_ENCODING)
  1486. notes_ref = _make_notes_ref(ref)
  1487. config = r.get_config_stack()
  1488. return r.notes.list_notes(notes_ref, config=config)
  1489. def reset(repo, mode, treeish: Union[str, bytes, Commit, Tree, Tag] = "HEAD") -> None:
  1490. """Reset current HEAD to the specified state.
  1491. Args:
  1492. repo: Path to repository
  1493. mode: Mode ("hard", "soft", "mixed")
  1494. treeish: Treeish to reset to
  1495. """
  1496. with open_repo_closing(repo) as r:
  1497. # Parse the target tree
  1498. tree = parse_tree(r, treeish)
  1499. # Only parse as commit if treeish is not a Tree object
  1500. if isinstance(treeish, Tree):
  1501. # For Tree objects, we can't determine the commit, skip updating HEAD
  1502. target_commit = None
  1503. else:
  1504. target_commit = parse_commit(r, treeish)
  1505. # Update HEAD to point to the target commit
  1506. if target_commit is not None:
  1507. r.refs[b"HEAD"] = target_commit.id
  1508. if mode == "soft":
  1509. # Soft reset: only update HEAD, leave index and working tree unchanged
  1510. return
  1511. elif mode == "mixed":
  1512. # Mixed reset: update HEAD and index, but leave working tree unchanged
  1513. from .object_store import iter_tree_contents
  1514. # Open the index
  1515. index = r.open_index()
  1516. # Clear the current index
  1517. index.clear()
  1518. # Populate index from the target tree
  1519. for entry in iter_tree_contents(r.object_store, tree.id):
  1520. # Create an IndexEntry from the tree entry
  1521. # Use zeros for filesystem-specific fields since we're not touching the working tree
  1522. index_entry = IndexEntry(
  1523. ctime=(0, 0),
  1524. mtime=(0, 0),
  1525. dev=0,
  1526. ino=0,
  1527. mode=entry.mode,
  1528. uid=0,
  1529. gid=0,
  1530. size=0, # Size will be 0 since we're not reading from disk
  1531. sha=entry.sha,
  1532. flags=0,
  1533. )
  1534. index[entry.path] = index_entry
  1535. # Write the updated index
  1536. index.write()
  1537. elif mode == "hard":
  1538. # Hard reset: update HEAD, index, and working tree
  1539. # Get current HEAD tree for comparison
  1540. try:
  1541. current_head = r.refs[b"HEAD"]
  1542. current_tree = r[current_head].tree
  1543. except KeyError:
  1544. current_tree = None
  1545. # Get configuration for working directory update
  1546. config = r.get_config()
  1547. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  1548. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  1549. validate_path_element = validate_path_element_ntfs
  1550. elif config.get_boolean(
  1551. b"core", b"core.protectHFS", sys.platform == "darwin"
  1552. ):
  1553. validate_path_element = validate_path_element_hfs
  1554. else:
  1555. validate_path_element = validate_path_element_default
  1556. if config.get_boolean(b"core", b"symlinks", True):
  1557. symlink_fn = symlink
  1558. else:
  1559. def symlink_fn( # type: ignore
  1560. source, target, target_is_directory=False, *, dir_fd=None
  1561. ) -> None:
  1562. mode = "w" + ("b" if isinstance(source, bytes) else "")
  1563. with open(target, mode) as f:
  1564. f.write(source)
  1565. # Update working tree and index
  1566. blob_normalizer = r.get_blob_normalizer()
  1567. update_working_tree(
  1568. r,
  1569. current_tree,
  1570. tree.id,
  1571. honor_filemode=honor_filemode,
  1572. validate_path_element=validate_path_element,
  1573. symlink_fn=symlink_fn,
  1574. force_remove_untracked=True,
  1575. blob_normalizer=blob_normalizer,
  1576. )
  1577. else:
  1578. raise Error(f"Invalid reset mode: {mode}")
  1579. def get_remote_repo(
  1580. repo: Repo, remote_location: Optional[Union[str, bytes]] = None
  1581. ) -> tuple[Optional[str], str]:
  1582. config = repo.get_config()
  1583. if remote_location is None:
  1584. remote_location = get_branch_remote(repo)
  1585. if isinstance(remote_location, str):
  1586. encoded_location = remote_location.encode()
  1587. else:
  1588. encoded_location = remote_location
  1589. section = (b"remote", encoded_location)
  1590. remote_name: Optional[str] = None
  1591. if config.has_section(section):
  1592. remote_name = encoded_location.decode()
  1593. encoded_location = config.get(section, "url")
  1594. else:
  1595. remote_name = None
  1596. return (remote_name, encoded_location.decode())
  1597. def push(
  1598. repo,
  1599. remote_location=None,
  1600. refspecs=None,
  1601. outstream=default_bytes_out_stream,
  1602. errstream=default_bytes_err_stream,
  1603. force=False,
  1604. **kwargs,
  1605. ):
  1606. """Remote push with dulwich via dulwich.client.
  1607. Args:
  1608. repo: Path to repository
  1609. remote_location: Location of the remote
  1610. refspecs: Refs to push to remote
  1611. outstream: A stream file to write output
  1612. errstream: A stream file to write errors
  1613. force: Force overwriting refs
  1614. """
  1615. # Open the repo
  1616. with open_repo_closing(repo) as r:
  1617. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1618. # Check if mirror mode is enabled
  1619. mirror_mode = False
  1620. if remote_name:
  1621. try:
  1622. mirror_mode = r.get_config_stack().get_boolean(
  1623. (b"remote", remote_name.encode()), b"mirror"
  1624. )
  1625. except KeyError:
  1626. pass
  1627. if mirror_mode:
  1628. # Mirror mode: push all refs and delete non-existent ones
  1629. refspecs = []
  1630. for ref in r.refs.keys():
  1631. # Push all refs to the same name on remote
  1632. refspecs.append(ref + b":" + ref)
  1633. elif refspecs is None:
  1634. refspecs = [active_branch(r)]
  1635. # Get the client and path
  1636. client, path = get_transport_and_path(
  1637. remote_location, config=r.get_config_stack(), **kwargs
  1638. )
  1639. selected_refs = []
  1640. remote_changed_refs = {}
  1641. def update_refs(refs):
  1642. selected_refs.extend(parse_reftuples(r.refs, refs, refspecs, force=force))
  1643. new_refs = {}
  1644. # In mirror mode, delete remote refs that don't exist locally
  1645. if mirror_mode:
  1646. local_refs = set(r.refs.keys())
  1647. for remote_ref in refs.keys():
  1648. if remote_ref not in local_refs:
  1649. new_refs[remote_ref] = ZERO_SHA
  1650. remote_changed_refs[remote_ref] = None
  1651. # TODO: Handle selected_refs == {None: None}
  1652. for lh, rh, force_ref in selected_refs:
  1653. if lh is None:
  1654. new_refs[rh] = ZERO_SHA
  1655. remote_changed_refs[rh] = None
  1656. else:
  1657. try:
  1658. localsha = r.refs[lh]
  1659. except KeyError as exc:
  1660. raise Error(f"No valid ref {lh} in local repository") from exc
  1661. if not force_ref and rh in refs:
  1662. check_diverged(r, refs[rh], localsha)
  1663. new_refs[rh] = localsha
  1664. remote_changed_refs[rh] = localsha
  1665. return new_refs
  1666. err_encoding = getattr(errstream, "encoding", None) or DEFAULT_ENCODING
  1667. remote_location = client.get_url(path)
  1668. try:
  1669. result = client.send_pack(
  1670. path,
  1671. update_refs,
  1672. generate_pack_data=r.generate_pack_data,
  1673. progress=errstream.write,
  1674. )
  1675. except SendPackError as exc:
  1676. raise Error(
  1677. "Push to " + remote_location + " failed -> " + exc.args[0].decode(),
  1678. ) from exc
  1679. else:
  1680. errstream.write(
  1681. b"Push to " + remote_location.encode(err_encoding) + b" successful.\n"
  1682. )
  1683. for ref, error in (result.ref_status or {}).items():
  1684. if error is not None:
  1685. errstream.write(
  1686. b"Push of ref %s failed: %s\n" % (ref, error.encode(err_encoding))
  1687. )
  1688. else:
  1689. errstream.write(b"Ref %s updated\n" % ref)
  1690. if remote_name is not None:
  1691. _import_remote_refs(r.refs, remote_name, remote_changed_refs)
  1692. return result
  1693. # Trigger auto GC if needed
  1694. from .gc import maybe_auto_gc
  1695. with open_repo_closing(repo) as r:
  1696. maybe_auto_gc(r)
  1697. def pull(
  1698. repo,
  1699. remote_location=None,
  1700. refspecs=None,
  1701. outstream=default_bytes_out_stream,
  1702. errstream=default_bytes_err_stream,
  1703. fast_forward=True,
  1704. ff_only=False,
  1705. force=False,
  1706. filter_spec=None,
  1707. protocol_version=None,
  1708. **kwargs,
  1709. ) -> None:
  1710. """Pull from remote via dulwich.client.
  1711. Args:
  1712. repo: Path to repository
  1713. remote_location: Location of the remote
  1714. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  1715. bytestring/string.
  1716. outstream: A stream file to write to output
  1717. errstream: A stream file to write to errors
  1718. fast_forward: If True, raise an exception when fast-forward is not possible
  1719. ff_only: If True, only allow fast-forward merges. Raises DivergedBranches
  1720. when branches have diverged rather than performing a merge.
  1721. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  1722. Only used if the server supports the Git protocol-v2 'filter'
  1723. feature, and ignored otherwise.
  1724. protocol_version: desired Git protocol version. By default the highest
  1725. mutually supported protocol version will be used
  1726. """
  1727. # Open the repo
  1728. with open_repo_closing(repo) as r:
  1729. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1730. selected_refs = []
  1731. if refspecs is None:
  1732. refspecs = [b"HEAD"]
  1733. def determine_wants(remote_refs, *args, **kwargs):
  1734. selected_refs.extend(
  1735. parse_reftuples(remote_refs, r.refs, refspecs, force=force)
  1736. )
  1737. return [
  1738. remote_refs[lh]
  1739. for (lh, rh, force_ref) in selected_refs
  1740. if remote_refs[lh] not in r.object_store
  1741. ]
  1742. client, path = get_transport_and_path(
  1743. remote_location, config=r.get_config_stack(), **kwargs
  1744. )
  1745. if filter_spec:
  1746. filter_spec = filter_spec.encode("ascii")
  1747. fetch_result = client.fetch(
  1748. path,
  1749. r,
  1750. progress=errstream.write,
  1751. determine_wants=determine_wants,
  1752. filter_spec=filter_spec,
  1753. protocol_version=protocol_version,
  1754. )
  1755. # Store the old HEAD tree before making changes
  1756. try:
  1757. old_head = r.refs[b"HEAD"]
  1758. old_tree_id = r[old_head].tree
  1759. except KeyError:
  1760. old_tree_id = None
  1761. merged = False
  1762. for lh, rh, force_ref in selected_refs:
  1763. if not force_ref and rh in r.refs:
  1764. try:
  1765. check_diverged(r, r.refs.follow(rh)[1], fetch_result.refs[lh])
  1766. except DivergedBranches as exc:
  1767. if ff_only or fast_forward:
  1768. raise
  1769. else:
  1770. # Perform merge
  1771. merge_result, conflicts = _do_merge(r, fetch_result.refs[lh])
  1772. if conflicts:
  1773. raise Error(
  1774. f"Merge conflicts occurred: {conflicts}"
  1775. ) from exc
  1776. merged = True
  1777. # Skip updating ref since merge already updated HEAD
  1778. continue
  1779. r.refs[rh] = fetch_result.refs[lh]
  1780. # Only update HEAD if we didn't perform a merge
  1781. if selected_refs and not merged:
  1782. r[b"HEAD"] = fetch_result.refs[selected_refs[0][1]]
  1783. # Update working tree to match the new HEAD
  1784. # Skip if merge was performed as merge already updates the working tree
  1785. if not merged and old_tree_id is not None:
  1786. new_tree_id = r[b"HEAD"].tree
  1787. blob_normalizer = r.get_blob_normalizer()
  1788. update_working_tree(
  1789. r, old_tree_id, new_tree_id, blob_normalizer=blob_normalizer
  1790. )
  1791. if remote_name is not None:
  1792. _import_remote_refs(r.refs, remote_name, fetch_result.refs)
  1793. # Trigger auto GC if needed
  1794. from .gc import maybe_auto_gc
  1795. with open_repo_closing(repo) as r:
  1796. maybe_auto_gc(r)
  1797. def status(repo=".", ignored=False, untracked_files="normal"):
  1798. """Returns staged, unstaged, and untracked changes relative to the HEAD.
  1799. Args:
  1800. repo: Path to repository or repository object
  1801. ignored: Whether to include ignored files in untracked
  1802. untracked_files: How to handle untracked files, defaults to "all":
  1803. "no": do not return untracked files
  1804. "normal": return untracked directories, not their contents
  1805. "all": include all files in untracked directories
  1806. Using untracked_files="no" can be faster than "all" when the worktree
  1807. contains many untracked files/directories.
  1808. Using untracked_files="normal" provides a good balance, only showing
  1809. directories that are entirely untracked without listing all their contents.
  1810. Returns: GitStatus tuple,
  1811. staged - dict with lists of staged paths (diff index/HEAD)
  1812. unstaged - list of unstaged paths (diff index/working-tree)
  1813. untracked - list of untracked, un-ignored & non-.git paths
  1814. """
  1815. with open_repo_closing(repo) as r:
  1816. # 1. Get status of staged
  1817. tracked_changes = get_tree_changes(r)
  1818. # 2. Get status of unstaged
  1819. index = r.open_index()
  1820. normalizer = r.get_blob_normalizer()
  1821. filter_callback = normalizer.checkin_normalize
  1822. unstaged_changes = list(get_unstaged_changes(index, r.path, filter_callback))
  1823. untracked_paths = get_untracked_paths(
  1824. r.path,
  1825. r.path,
  1826. index,
  1827. exclude_ignored=not ignored,
  1828. untracked_files=untracked_files,
  1829. )
  1830. if sys.platform == "win32":
  1831. untracked_changes = [
  1832. path.replace(os.path.sep, "/") for path in untracked_paths
  1833. ]
  1834. else:
  1835. untracked_changes = list(untracked_paths)
  1836. return GitStatus(tracked_changes, unstaged_changes, untracked_changes)
  1837. def _walk_working_dir_paths(frompath, basepath, prune_dirnames=None):
  1838. """Get path, is_dir for files in working dir from frompath.
  1839. Args:
  1840. frompath: Path to begin walk
  1841. basepath: Path to compare to
  1842. prune_dirnames: Optional callback to prune dirnames during os.walk
  1843. dirnames will be set to result of prune_dirnames(dirpath, dirnames)
  1844. """
  1845. for dirpath, dirnames, filenames in os.walk(frompath):
  1846. # Skip .git and below.
  1847. if ".git" in dirnames:
  1848. dirnames.remove(".git")
  1849. if dirpath != basepath:
  1850. continue
  1851. if ".git" in filenames:
  1852. filenames.remove(".git")
  1853. if dirpath != basepath:
  1854. continue
  1855. if dirpath != frompath:
  1856. yield dirpath, True
  1857. for filename in filenames:
  1858. filepath = os.path.join(dirpath, filename)
  1859. yield filepath, False
  1860. if prune_dirnames:
  1861. dirnames[:] = prune_dirnames(dirpath, dirnames)
  1862. def get_untracked_paths(
  1863. frompath, basepath, index, exclude_ignored=False, untracked_files="all"
  1864. ):
  1865. """Get untracked paths.
  1866. Args:
  1867. frompath: Path to walk
  1868. basepath: Path to compare to
  1869. index: Index to check against
  1870. exclude_ignored: Whether to exclude ignored paths
  1871. untracked_files: How to handle untracked files:
  1872. - "no": return an empty list
  1873. - "all": return all files in untracked directories
  1874. - "normal": return untracked directories without listing their contents
  1875. Note: ignored directories will never be walked for performance reasons.
  1876. If exclude_ignored is False, only the path to an ignored directory will
  1877. be yielded, no files inside the directory will be returned
  1878. """
  1879. if untracked_files not in ("no", "all", "normal"):
  1880. raise ValueError("untracked_files must be one of (no, all, normal)")
  1881. if untracked_files == "no":
  1882. return
  1883. with open_repo_closing(basepath) as r:
  1884. ignore_manager = IgnoreFilterManager.from_repo(r)
  1885. ignored_dirs = []
  1886. # List to store untracked directories found during traversal
  1887. untracked_dir_list = []
  1888. def prune_dirnames(dirpath, dirnames):
  1889. for i in range(len(dirnames) - 1, -1, -1):
  1890. path = os.path.join(dirpath, dirnames[i])
  1891. ip = os.path.join(os.path.relpath(path, basepath), "")
  1892. # Check if directory is ignored
  1893. if ignore_manager.is_ignored(ip):
  1894. if not exclude_ignored:
  1895. ignored_dirs.append(
  1896. os.path.join(os.path.relpath(path, frompath), "")
  1897. )
  1898. del dirnames[i]
  1899. continue
  1900. # For "normal" mode, check if the directory is entirely untracked
  1901. if untracked_files == "normal":
  1902. # Convert directory path to tree path for index lookup
  1903. dir_tree_path = path_to_tree_path(basepath, path)
  1904. # Check if any file in this directory is tracked
  1905. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  1906. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  1907. if not has_tracked_files:
  1908. # This directory is entirely untracked
  1909. # Check if it should be excluded due to ignore rules
  1910. is_ignored = ignore_manager.is_ignored(
  1911. os.path.relpath(path, basepath)
  1912. )
  1913. if not exclude_ignored or not is_ignored:
  1914. rel_path = os.path.join(os.path.relpath(path, frompath), "")
  1915. untracked_dir_list.append(rel_path)
  1916. del dirnames[i]
  1917. return dirnames
  1918. # For "all" mode, use the original behavior
  1919. if untracked_files == "all":
  1920. for ap, is_dir in _walk_working_dir_paths(
  1921. frompath, basepath, prune_dirnames=prune_dirnames
  1922. ):
  1923. if not is_dir:
  1924. ip = path_to_tree_path(basepath, ap)
  1925. if ip not in index:
  1926. if not exclude_ignored or not ignore_manager.is_ignored(
  1927. os.path.relpath(ap, basepath)
  1928. ):
  1929. yield os.path.relpath(ap, frompath)
  1930. else: # "normal" mode
  1931. # Walk directories, handling both files and directories
  1932. for ap, is_dir in _walk_working_dir_paths(
  1933. frompath, basepath, prune_dirnames=prune_dirnames
  1934. ):
  1935. # This part won't be reached for pruned directories
  1936. if is_dir:
  1937. # Check if this directory is entirely untracked
  1938. dir_tree_path = path_to_tree_path(basepath, ap)
  1939. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  1940. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  1941. if not has_tracked_files:
  1942. if not exclude_ignored or not ignore_manager.is_ignored(
  1943. os.path.relpath(ap, basepath)
  1944. ):
  1945. yield os.path.join(os.path.relpath(ap, frompath), "")
  1946. else:
  1947. # Check individual files in directories that contain tracked files
  1948. ip = path_to_tree_path(basepath, ap)
  1949. if ip not in index:
  1950. if not exclude_ignored or not ignore_manager.is_ignored(
  1951. os.path.relpath(ap, basepath)
  1952. ):
  1953. yield os.path.relpath(ap, frompath)
  1954. # Yield any untracked directories found during pruning
  1955. yield from untracked_dir_list
  1956. yield from ignored_dirs
  1957. def get_tree_changes(repo):
  1958. """Return add/delete/modify changes to tree by comparing index to HEAD.
  1959. Args:
  1960. repo: repo path or object
  1961. Returns: dict with lists for each type of change
  1962. """
  1963. with open_repo_closing(repo) as r:
  1964. index = r.open_index()
  1965. # Compares the Index to the HEAD & determines changes
  1966. # Iterate through the changes and report add/delete/modify
  1967. # TODO: call out to dulwich.diff_tree somehow.
  1968. tracked_changes = {
  1969. "add": [],
  1970. "delete": [],
  1971. "modify": [],
  1972. }
  1973. try:
  1974. tree_id = r[b"HEAD"].tree
  1975. except KeyError:
  1976. tree_id = None
  1977. for change in index.changes_from_tree(r.object_store, tree_id):
  1978. if not change[0][0]:
  1979. tracked_changes["add"].append(change[0][1])
  1980. elif not change[0][1]:
  1981. tracked_changes["delete"].append(change[0][0])
  1982. elif change[0][0] == change[0][1]:
  1983. tracked_changes["modify"].append(change[0][0])
  1984. else:
  1985. raise NotImplementedError("git mv ops not yet supported")
  1986. return tracked_changes
  1987. def daemon(path=".", address=None, port=None) -> None:
  1988. """Run a daemon serving Git requests over TCP/IP.
  1989. Args:
  1990. path: Path to the directory to serve.
  1991. address: Optional address to listen on (defaults to ::)
  1992. port: Optional port to listen on (defaults to TCP_GIT_PORT)
  1993. """
  1994. # TODO(jelmer): Support git-daemon-export-ok and --export-all.
  1995. backend = FileSystemBackend(path)
  1996. server = TCPGitServer(backend, address, port)
  1997. server.serve_forever()
  1998. def web_daemon(path=".", address=None, port=None) -> None:
  1999. """Run a daemon serving Git requests over HTTP.
  2000. Args:
  2001. path: Path to the directory to serve
  2002. address: Optional address to listen on (defaults to ::)
  2003. port: Optional port to listen on (defaults to 80)
  2004. """
  2005. from .web import (
  2006. WSGIRequestHandlerLogger,
  2007. WSGIServerLogger,
  2008. make_server,
  2009. make_wsgi_chain,
  2010. )
  2011. backend = FileSystemBackend(path)
  2012. app = make_wsgi_chain(backend)
  2013. server = make_server(
  2014. address,
  2015. port,
  2016. app,
  2017. handler_class=WSGIRequestHandlerLogger,
  2018. server_class=WSGIServerLogger,
  2019. )
  2020. server.serve_forever()
  2021. def upload_pack(path=".", inf=None, outf=None) -> int:
  2022. """Upload a pack file after negotiating its contents using smart protocol.
  2023. Args:
  2024. path: Path to the repository
  2025. inf: Input stream to communicate with client
  2026. outf: Output stream to communicate with client
  2027. """
  2028. if outf is None:
  2029. outf = getattr(sys.stdout, "buffer", sys.stdout)
  2030. if inf is None:
  2031. inf = getattr(sys.stdin, "buffer", sys.stdin)
  2032. path = os.path.expanduser(path)
  2033. backend = FileSystemBackend(path)
  2034. def send_fn(data) -> None:
  2035. outf.write(data)
  2036. outf.flush()
  2037. proto = Protocol(inf.read, send_fn)
  2038. handler = UploadPackHandler(backend, [path], proto)
  2039. # FIXME: Catch exceptions and write a single-line summary to outf.
  2040. handler.handle()
  2041. return 0
  2042. def receive_pack(path=".", inf=None, outf=None) -> int:
  2043. """Receive a pack file after negotiating its contents using smart protocol.
  2044. Args:
  2045. path: Path to the repository
  2046. inf: Input stream to communicate with client
  2047. outf: Output stream to communicate with client
  2048. """
  2049. if outf is None:
  2050. outf = getattr(sys.stdout, "buffer", sys.stdout)
  2051. if inf is None:
  2052. inf = getattr(sys.stdin, "buffer", sys.stdin)
  2053. path = os.path.expanduser(path)
  2054. backend = FileSystemBackend(path)
  2055. def send_fn(data) -> None:
  2056. outf.write(data)
  2057. outf.flush()
  2058. proto = Protocol(inf.read, send_fn)
  2059. handler = ReceivePackHandler(backend, [path], proto)
  2060. # FIXME: Catch exceptions and write a single-line summary to outf.
  2061. handler.handle()
  2062. return 0
  2063. def _make_branch_ref(name: Union[str, bytes]) -> Ref:
  2064. if isinstance(name, str):
  2065. name = name.encode(DEFAULT_ENCODING)
  2066. return LOCAL_BRANCH_PREFIX + name
  2067. def _make_tag_ref(name: Union[str, bytes]) -> Ref:
  2068. if isinstance(name, str):
  2069. name = name.encode(DEFAULT_ENCODING)
  2070. return LOCAL_TAG_PREFIX + name
  2071. def branch_delete(repo, name) -> None:
  2072. """Delete a branch.
  2073. Args:
  2074. repo: Path to the repository
  2075. name: Name of the branch
  2076. """
  2077. with open_repo_closing(repo) as r:
  2078. if isinstance(name, list):
  2079. names = name
  2080. else:
  2081. names = [name]
  2082. for name in names:
  2083. del r.refs[_make_branch_ref(name)]
  2084. def branch_create(repo, name, objectish=None, force=False) -> None:
  2085. """Create a branch.
  2086. Args:
  2087. repo: Path to the repository
  2088. name: Name of the new branch
  2089. objectish: Target object to point new branch at (defaults to HEAD)
  2090. force: Force creation of branch, even if it already exists
  2091. """
  2092. with open_repo_closing(repo) as r:
  2093. if objectish is None:
  2094. objectish = "HEAD"
  2095. # Try to expand branch shorthand before parsing
  2096. original_objectish = objectish
  2097. objectish_bytes = (
  2098. objectish.encode(DEFAULT_ENCODING)
  2099. if isinstance(objectish, str)
  2100. else objectish
  2101. )
  2102. if b"refs/remotes/" + objectish_bytes in r.refs:
  2103. objectish = b"refs/remotes/" + objectish_bytes
  2104. elif b"refs/heads/" + objectish_bytes in r.refs:
  2105. objectish = b"refs/heads/" + objectish_bytes
  2106. object = parse_object(r, objectish)
  2107. refname = _make_branch_ref(name)
  2108. ref_message = (
  2109. b"branch: Created from " + original_objectish.encode(DEFAULT_ENCODING)
  2110. if isinstance(original_objectish, str)
  2111. else b"branch: Created from " + original_objectish
  2112. )
  2113. if force:
  2114. r.refs.set_if_equals(refname, None, object.id, message=ref_message)
  2115. else:
  2116. if not r.refs.add_if_new(refname, object.id, message=ref_message):
  2117. raise Error(f"Branch with name {name} already exists.")
  2118. # Check if we should set up tracking
  2119. config = r.get_config_stack()
  2120. try:
  2121. auto_setup_merge = config.get((b"branch",), b"autoSetupMerge").decode()
  2122. except KeyError:
  2123. auto_setup_merge = "true" # Default value
  2124. # Determine if the objectish refers to a remote-tracking branch
  2125. objectish_ref = None
  2126. if original_objectish != "HEAD":
  2127. # Try to resolve objectish as a ref
  2128. objectish_bytes = (
  2129. original_objectish.encode(DEFAULT_ENCODING)
  2130. if isinstance(original_objectish, str)
  2131. else original_objectish
  2132. )
  2133. if objectish_bytes in r.refs:
  2134. objectish_ref = objectish_bytes
  2135. elif b"refs/remotes/" + objectish_bytes in r.refs:
  2136. objectish_ref = b"refs/remotes/" + objectish_bytes
  2137. elif b"refs/heads/" + objectish_bytes in r.refs:
  2138. objectish_ref = b"refs/heads/" + objectish_bytes
  2139. else:
  2140. # HEAD might point to a remote-tracking branch
  2141. head_ref = r.refs.follow(b"HEAD")[0][1]
  2142. if head_ref.startswith(b"refs/remotes/"):
  2143. objectish_ref = head_ref
  2144. # Set up tracking if appropriate
  2145. if objectish_ref and (
  2146. (auto_setup_merge == "always")
  2147. or (
  2148. auto_setup_merge == "true"
  2149. and objectish_ref.startswith(b"refs/remotes/")
  2150. )
  2151. ):
  2152. # Extract remote name and branch from the ref
  2153. if objectish_ref.startswith(b"refs/remotes/"):
  2154. parts = objectish_ref[len(b"refs/remotes/") :].split(b"/", 1)
  2155. if len(parts) == 2:
  2156. remote_name = parts[0]
  2157. remote_branch = b"refs/heads/" + parts[1]
  2158. # Set up tracking
  2159. config = r.get_config()
  2160. branch_name_bytes = (
  2161. name.encode(DEFAULT_ENCODING) if isinstance(name, str) else name
  2162. )
  2163. config.set((b"branch", branch_name_bytes), b"remote", remote_name)
  2164. config.set((b"branch", branch_name_bytes), b"merge", remote_branch)
  2165. config.write_to_path()
  2166. def branch_list(repo):
  2167. """List all branches.
  2168. Args:
  2169. repo: Path to the repository
  2170. Returns:
  2171. List of branch names (without refs/heads/ prefix)
  2172. """
  2173. with open_repo_closing(repo) as r:
  2174. branches = list(r.refs.keys(base=LOCAL_BRANCH_PREFIX))
  2175. # Check for branch.sort configuration
  2176. config = r.get_config_stack()
  2177. try:
  2178. sort_key = config.get((b"branch",), b"sort").decode()
  2179. except KeyError:
  2180. # Default is refname (alphabetical)
  2181. sort_key = "refname"
  2182. # Parse sort key
  2183. reverse = False
  2184. if sort_key.startswith("-"):
  2185. reverse = True
  2186. sort_key = sort_key[1:]
  2187. # Apply sorting
  2188. if sort_key == "refname":
  2189. # Simple alphabetical sort (default)
  2190. branches.sort(reverse=reverse)
  2191. elif sort_key in ("committerdate", "authordate"):
  2192. # Sort by date
  2193. def get_commit_date(branch_name):
  2194. ref = LOCAL_BRANCH_PREFIX + branch_name
  2195. sha = r.refs[ref]
  2196. commit = r.object_store[sha]
  2197. if sort_key == "committerdate":
  2198. return commit.commit_time
  2199. else: # authordate
  2200. return commit.author_time
  2201. # Sort branches by date
  2202. # Note: Python's sort naturally orders smaller values first (ascending)
  2203. # For dates, this means oldest first by default
  2204. # Use a stable sort with branch name as secondary key for consistent ordering
  2205. if reverse:
  2206. # For reverse sort, we want newest dates first but alphabetical names second
  2207. branches.sort(key=lambda b: (-get_commit_date(b), b))
  2208. else:
  2209. branches.sort(key=lambda b: (get_commit_date(b), b))
  2210. else:
  2211. # Unknown sort key, fall back to default
  2212. branches.sort()
  2213. return branches
  2214. def active_branch(repo):
  2215. """Return the active branch in the repository, if any.
  2216. Args:
  2217. repo: Repository to open
  2218. Returns:
  2219. branch name
  2220. Raises:
  2221. KeyError: if the repository does not have a working tree
  2222. IndexError: if HEAD is floating
  2223. """
  2224. with open_repo_closing(repo) as r:
  2225. active_ref = r.refs.follow(b"HEAD")[0][1]
  2226. if not active_ref.startswith(LOCAL_BRANCH_PREFIX):
  2227. raise ValueError(active_ref)
  2228. return active_ref[len(LOCAL_BRANCH_PREFIX) :]
  2229. def get_branch_remote(repo):
  2230. """Return the active branch's remote name, if any.
  2231. Args:
  2232. repo: Repository to open
  2233. Returns:
  2234. remote name
  2235. Raises:
  2236. KeyError: if the repository does not have a working tree
  2237. """
  2238. with open_repo_closing(repo) as r:
  2239. branch_name = active_branch(r.path)
  2240. config = r.get_config()
  2241. try:
  2242. remote_name = config.get((b"branch", branch_name), b"remote")
  2243. except KeyError:
  2244. remote_name = b"origin"
  2245. return remote_name
  2246. def get_branch_merge(repo, branch_name=None):
  2247. """Return the branch's merge reference (upstream branch), if any.
  2248. Args:
  2249. repo: Repository to open
  2250. branch_name: Name of the branch (defaults to active branch)
  2251. Returns:
  2252. merge reference name (e.g. b"refs/heads/main")
  2253. Raises:
  2254. KeyError: if the branch does not have a merge configuration
  2255. """
  2256. with open_repo_closing(repo) as r:
  2257. if branch_name is None:
  2258. branch_name = active_branch(r.path)
  2259. config = r.get_config()
  2260. return config.get((b"branch", branch_name), b"merge")
  2261. def set_branch_tracking(repo, branch_name, remote_name, remote_ref):
  2262. """Set up branch tracking configuration.
  2263. Args:
  2264. repo: Repository to open
  2265. branch_name: Name of the local branch
  2266. remote_name: Name of the remote (e.g. b"origin")
  2267. remote_ref: Remote reference to track (e.g. b"refs/heads/main")
  2268. """
  2269. with open_repo_closing(repo) as r:
  2270. config = r.get_config()
  2271. config.set((b"branch", branch_name), b"remote", remote_name)
  2272. config.set((b"branch", branch_name), b"merge", remote_ref)
  2273. config.write_to_path()
  2274. def fetch(
  2275. repo,
  2276. remote_location=None,
  2277. outstream=sys.stdout,
  2278. errstream=default_bytes_err_stream,
  2279. message=None,
  2280. depth=None,
  2281. prune=False,
  2282. prune_tags=False,
  2283. force=False,
  2284. **kwargs,
  2285. ):
  2286. """Fetch objects from a remote server.
  2287. Args:
  2288. repo: Path to the repository
  2289. remote_location: String identifying a remote server
  2290. outstream: Output stream (defaults to stdout)
  2291. errstream: Error stream (defaults to stderr)
  2292. message: Reflog message (defaults to b"fetch: from <remote_name>")
  2293. depth: Depth to fetch at
  2294. prune: Prune remote removed refs
  2295. prune_tags: Prune reomte removed tags
  2296. Returns:
  2297. Dictionary with refs on the remote
  2298. """
  2299. with open_repo_closing(repo) as r:
  2300. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  2301. if message is None:
  2302. message = b"fetch: from " + remote_location.encode(DEFAULT_ENCODING)
  2303. client, path = get_transport_and_path(
  2304. remote_location, config=r.get_config_stack(), **kwargs
  2305. )
  2306. fetch_result = client.fetch(path, r, progress=errstream.write, depth=depth)
  2307. if remote_name is not None:
  2308. _import_remote_refs(
  2309. r.refs,
  2310. remote_name,
  2311. fetch_result.refs,
  2312. message,
  2313. prune=prune,
  2314. prune_tags=prune_tags,
  2315. )
  2316. # Trigger auto GC if needed
  2317. from .gc import maybe_auto_gc
  2318. with open_repo_closing(repo) as r:
  2319. maybe_auto_gc(r)
  2320. return fetch_result
  2321. def for_each_ref(
  2322. repo: Union[Repo, str] = ".",
  2323. pattern: Optional[Union[str, bytes]] = None,
  2324. ) -> list[tuple[bytes, bytes, bytes]]:
  2325. """Iterate over all refs that match the (optional) pattern.
  2326. Args:
  2327. repo: Path to the repository
  2328. pattern: Optional glob (7) patterns to filter the refs with
  2329. Returns: List of bytes tuples with: (sha, object_type, ref_name)
  2330. """
  2331. if isinstance(pattern, str):
  2332. pattern = os.fsencode(pattern)
  2333. with open_repo_closing(repo) as r:
  2334. refs = r.get_refs()
  2335. if pattern:
  2336. matching_refs: dict[bytes, bytes] = {}
  2337. pattern_parts = pattern.split(b"/")
  2338. for ref, sha in refs.items():
  2339. matches = False
  2340. # git for-each-ref uses glob (7) style patterns, but fnmatch
  2341. # is greedy and also matches slashes, unlike glob.glob.
  2342. # We have to check parts of the pattern individually.
  2343. # See https://github.com/python/cpython/issues/72904
  2344. ref_parts = ref.split(b"/")
  2345. if len(ref_parts) > len(pattern_parts):
  2346. continue
  2347. for pat, ref_part in zip(pattern_parts, ref_parts):
  2348. matches = fnmatch.fnmatchcase(ref_part, pat)
  2349. if not matches:
  2350. break
  2351. if matches:
  2352. matching_refs[ref] = sha
  2353. refs = matching_refs
  2354. ret: list[tuple[bytes, bytes, bytes]] = [
  2355. (sha, r.get_object(sha).type_name, ref)
  2356. for ref, sha in sorted(
  2357. refs.items(),
  2358. key=lambda ref_sha: ref_sha[0],
  2359. )
  2360. if ref != b"HEAD"
  2361. ]
  2362. return ret
  2363. def ls_remote(remote, config: Optional[Config] = None, **kwargs):
  2364. """List the refs in a remote.
  2365. Args:
  2366. remote: Remote repository location
  2367. config: Configuration to use
  2368. Returns:
  2369. LsRemoteResult object with refs and symrefs
  2370. """
  2371. if config is None:
  2372. config = StackedConfig.default()
  2373. client, host_path = get_transport_and_path(remote, config=config, **kwargs)
  2374. return client.get_refs(host_path)
  2375. def repack(repo) -> None:
  2376. """Repack loose files in a repository.
  2377. Currently this only packs loose objects.
  2378. Args:
  2379. repo: Path to the repository
  2380. """
  2381. with open_repo_closing(repo) as r:
  2382. r.object_store.pack_loose_objects()
  2383. def pack_objects(
  2384. repo,
  2385. object_ids,
  2386. packf,
  2387. idxf,
  2388. delta_window_size=None,
  2389. deltify=None,
  2390. reuse_deltas=True,
  2391. pack_index_version=None,
  2392. ) -> None:
  2393. """Pack objects into a file.
  2394. Args:
  2395. repo: Path to the repository
  2396. object_ids: List of object ids to write
  2397. packf: File-like object to write to
  2398. idxf: File-like object to write to (can be None)
  2399. delta_window_size: Sliding window size for searching for deltas;
  2400. Set to None for default window size.
  2401. deltify: Whether to deltify objects
  2402. reuse_deltas: Allow reuse of existing deltas while deltifying
  2403. pack_index_version: Pack index version to use (1, 2, or 3). If None, uses default version.
  2404. """
  2405. with open_repo_closing(repo) as r:
  2406. entries, data_sum = write_pack_from_container(
  2407. packf.write,
  2408. r.object_store,
  2409. [(oid, None) for oid in object_ids],
  2410. deltify=deltify,
  2411. delta_window_size=delta_window_size,
  2412. reuse_deltas=reuse_deltas,
  2413. )
  2414. if idxf is not None:
  2415. entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
  2416. write_pack_index(idxf, entries, data_sum, version=pack_index_version)
  2417. def ls_tree(
  2418. repo,
  2419. treeish: Union[str, bytes, Commit, Tree, Tag] = b"HEAD",
  2420. outstream=sys.stdout,
  2421. recursive=False,
  2422. name_only=False,
  2423. ) -> None:
  2424. """List contents of a tree.
  2425. Args:
  2426. repo: Path to the repository
  2427. treeish: Tree id to list
  2428. outstream: Output stream (defaults to stdout)
  2429. recursive: Whether to recursively list files
  2430. name_only: Only print item name
  2431. """
  2432. def list_tree(store, treeid, base) -> None:
  2433. for name, mode, sha in store[treeid].iteritems():
  2434. if base:
  2435. name = posixpath.join(base, name)
  2436. if name_only:
  2437. outstream.write(name + b"\n")
  2438. else:
  2439. outstream.write(pretty_format_tree_entry(name, mode, sha))
  2440. if stat.S_ISDIR(mode) and recursive:
  2441. list_tree(store, sha, name)
  2442. with open_repo_closing(repo) as r:
  2443. tree = parse_tree(r, treeish)
  2444. list_tree(r.object_store, tree.id, "")
  2445. def remote_add(repo, name: Union[bytes, str], url: Union[bytes, str]) -> None:
  2446. """Add a remote.
  2447. Args:
  2448. repo: Path to the repository
  2449. name: Remote name
  2450. url: Remote URL
  2451. """
  2452. if not isinstance(name, bytes):
  2453. name = name.encode(DEFAULT_ENCODING)
  2454. if not isinstance(url, bytes):
  2455. url = url.encode(DEFAULT_ENCODING)
  2456. with open_repo_closing(repo) as r:
  2457. c = r.get_config()
  2458. section = (b"remote", name)
  2459. if c.has_section(section):
  2460. raise RemoteExists(section)
  2461. c.set(section, b"url", url)
  2462. c.write_to_path()
  2463. def remote_remove(repo: Repo, name: Union[bytes, str]) -> None:
  2464. """Remove a remote.
  2465. Args:
  2466. repo: Path to the repository
  2467. name: Remote name
  2468. """
  2469. if not isinstance(name, bytes):
  2470. name = name.encode(DEFAULT_ENCODING)
  2471. with open_repo_closing(repo) as r:
  2472. c = r.get_config()
  2473. section = (b"remote", name)
  2474. del c[section]
  2475. c.write_to_path()
  2476. def _quote_path(path: str) -> str:
  2477. """Quote a path using C-style quoting similar to git's core.quotePath.
  2478. Args:
  2479. path: Path to quote
  2480. Returns:
  2481. Quoted path string
  2482. """
  2483. # Check if path needs quoting (non-ASCII or special characters)
  2484. needs_quoting = False
  2485. for char in path:
  2486. if ord(char) > 127 or char in '"\\':
  2487. needs_quoting = True
  2488. break
  2489. if not needs_quoting:
  2490. return path
  2491. # Apply C-style quoting
  2492. quoted = '"'
  2493. for char in path:
  2494. if ord(char) > 127:
  2495. # Non-ASCII character, encode as octal escape
  2496. utf8_bytes = char.encode("utf-8")
  2497. for byte in utf8_bytes:
  2498. quoted += f"\\{byte:03o}"
  2499. elif char == '"':
  2500. quoted += '\\"'
  2501. elif char == "\\":
  2502. quoted += "\\\\"
  2503. else:
  2504. quoted += char
  2505. quoted += '"'
  2506. return quoted
  2507. def check_ignore(repo, paths, no_index=False, quote_path=True):
  2508. r"""Debug gitignore files.
  2509. Args:
  2510. repo: Path to the repository
  2511. paths: List of paths to check for
  2512. no_index: Don't check index
  2513. quote_path: If True, quote non-ASCII characters in returned paths using
  2514. C-style octal escapes (e.g. "тест.txt" becomes "\\321\\202\\320\\265\\321\\201\\321\\202.txt").
  2515. If False, return raw unicode paths.
  2516. Returns: List of ignored files
  2517. """
  2518. with open_repo_closing(repo) as r:
  2519. index = r.open_index()
  2520. ignore_manager = IgnoreFilterManager.from_repo(r)
  2521. for original_path in paths:
  2522. if not no_index and path_to_tree_path(r.path, original_path) in index:
  2523. continue
  2524. # Preserve whether the original path had a trailing slash
  2525. had_trailing_slash = original_path.endswith(("/", os.path.sep))
  2526. if os.path.isabs(original_path):
  2527. path = os.path.relpath(original_path, r.path)
  2528. # Normalize Windows paths to use forward slashes
  2529. if os.path.sep != "/":
  2530. path = path.replace(os.path.sep, "/")
  2531. else:
  2532. path = original_path
  2533. # Restore trailing slash if it was in the original
  2534. if had_trailing_slash and not path.endswith("/"):
  2535. path = path + "/"
  2536. # For directories, check with trailing slash to get correct ignore behavior
  2537. test_path = path
  2538. path_without_slash = path.rstrip("/")
  2539. is_directory = os.path.isdir(os.path.join(r.path, path_without_slash))
  2540. # If this is a directory path, ensure we test it correctly
  2541. if is_directory and not path.endswith("/"):
  2542. test_path = path + "/"
  2543. if ignore_manager.is_ignored(test_path):
  2544. # Return relative path (like git does) when absolute path was provided
  2545. if os.path.isabs(original_path):
  2546. output_path = path
  2547. else:
  2548. output_path = original_path
  2549. yield _quote_path(output_path) if quote_path else output_path
  2550. def update_head(repo, target, detached=False, new_branch=None) -> None:
  2551. """Update HEAD to point at a new branch/commit.
  2552. Note that this does not actually update the working tree.
  2553. Args:
  2554. repo: Path to the repository
  2555. detached: Create a detached head
  2556. target: Branch or committish to switch to
  2557. new_branch: New branch to create
  2558. """
  2559. with open_repo_closing(repo) as r:
  2560. if new_branch is not None:
  2561. to_set = _make_branch_ref(new_branch)
  2562. else:
  2563. to_set = b"HEAD"
  2564. if detached:
  2565. # TODO(jelmer): Provide some way so that the actual ref gets
  2566. # updated rather than what it points to, so the delete isn't
  2567. # necessary.
  2568. del r.refs[to_set]
  2569. r.refs[to_set] = parse_commit(r, target).id
  2570. else:
  2571. r.refs.set_symbolic_ref(to_set, parse_ref(r, target))
  2572. if new_branch is not None:
  2573. r.refs.set_symbolic_ref(b"HEAD", to_set)
  2574. def checkout(
  2575. repo,
  2576. target: Optional[Union[str, bytes, Commit, Tag]] = None,
  2577. force: bool = False,
  2578. new_branch: Optional[Union[bytes, str]] = None,
  2579. paths: Optional[list[Union[bytes, str]]] = None,
  2580. ) -> None:
  2581. """Switch to a branch or commit, updating both HEAD and the working tree.
  2582. This is similar to 'git checkout', allowing you to switch to a branch,
  2583. tag, or specific commit. Unlike update_head, this function also updates
  2584. the working tree to match the target.
  2585. Args:
  2586. repo: Path to repository or repository object
  2587. target: Branch name, tag, or commit SHA to checkout. If None and paths is specified,
  2588. restores files from HEAD
  2589. force: Force checkout even if there are local changes
  2590. new_branch: Create a new branch at target (like git checkout -b)
  2591. paths: List of specific paths to checkout. If specified, only these paths are updated
  2592. and HEAD is not changed
  2593. Raises:
  2594. CheckoutError: If checkout cannot be performed due to conflicts
  2595. KeyError: If the target reference cannot be found
  2596. """
  2597. with open_repo_closing(repo) as r:
  2598. # Store the original target for later reference checks
  2599. original_target = target
  2600. # Handle path-specific checkout (like git checkout -- <paths>)
  2601. if paths is not None:
  2602. # Convert paths to bytes
  2603. byte_paths = []
  2604. for path in paths:
  2605. if isinstance(path, str):
  2606. byte_paths.append(path.encode(DEFAULT_ENCODING))
  2607. else:
  2608. byte_paths.append(path)
  2609. # If no target specified, use HEAD
  2610. if target is None:
  2611. try:
  2612. target = r.refs[b"HEAD"]
  2613. except KeyError:
  2614. raise CheckoutError("No HEAD reference found")
  2615. else:
  2616. if isinstance(target, str):
  2617. target = target.encode(DEFAULT_ENCODING)
  2618. # Get the target commit and tree
  2619. target_commit = parse_commit(r, target)
  2620. target_tree = r[target_commit.tree]
  2621. # Get blob normalizer for line ending conversion
  2622. blob_normalizer = r.get_blob_normalizer()
  2623. # Restore specified paths from target tree
  2624. for path in byte_paths:
  2625. try:
  2626. # Look up the path in the target tree
  2627. mode, sha = target_tree.lookup_path(
  2628. r.object_store.__getitem__, path
  2629. )
  2630. obj = r[sha]
  2631. # Create directories if needed
  2632. # Handle path as string
  2633. if isinstance(path, bytes):
  2634. path_str = path.decode(DEFAULT_ENCODING)
  2635. else:
  2636. path_str = path
  2637. file_path = os.path.join(r.path, path_str)
  2638. os.makedirs(os.path.dirname(file_path), exist_ok=True)
  2639. # Write the file content
  2640. if stat.S_ISREG(mode):
  2641. # Apply checkout filters (smudge)
  2642. if blob_normalizer:
  2643. obj = blob_normalizer.checkout_normalize(obj, path)
  2644. flags = os.O_WRONLY | os.O_CREAT | os.O_TRUNC
  2645. if sys.platform == "win32":
  2646. flags |= os.O_BINARY
  2647. with os.fdopen(os.open(file_path, flags, mode), "wb") as f:
  2648. f.write(obj.data)
  2649. # Update the index
  2650. r.stage(path)
  2651. except KeyError:
  2652. # Path doesn't exist in target tree
  2653. pass
  2654. return
  2655. # Normal checkout (switching branches/commits)
  2656. if target is None:
  2657. raise ValueError("Target must be specified for branch/commit checkout")
  2658. if isinstance(target, str):
  2659. target_bytes = target.encode(DEFAULT_ENCODING)
  2660. elif isinstance(target, bytes):
  2661. target_bytes = target
  2662. else:
  2663. # For Commit/Tag objects, we'll use their SHA
  2664. target_bytes = target.id
  2665. if isinstance(new_branch, str):
  2666. new_branch = new_branch.encode(DEFAULT_ENCODING)
  2667. # Parse the target to get the commit
  2668. assert (
  2669. original_target is not None
  2670. ) # Guaranteed by earlier check for normal checkout
  2671. target_commit = parse_commit(r, original_target)
  2672. target_tree_id = target_commit.tree
  2673. # Get current HEAD tree for comparison
  2674. try:
  2675. current_head = r.refs[b"HEAD"]
  2676. current_tree_id = r[current_head].tree
  2677. except KeyError:
  2678. # No HEAD yet (empty repo)
  2679. current_tree_id = None
  2680. # Check for uncommitted changes if not forcing
  2681. if not force and current_tree_id is not None:
  2682. status_report = status(r)
  2683. changes = []
  2684. # staged is a dict with 'add', 'delete', 'modify' keys
  2685. if isinstance(status_report.staged, dict):
  2686. changes.extend(status_report.staged.get("add", []))
  2687. changes.extend(status_report.staged.get("delete", []))
  2688. changes.extend(status_report.staged.get("modify", []))
  2689. # unstaged is a list
  2690. changes.extend(status_report.unstaged)
  2691. if changes:
  2692. # Check if any changes would conflict with checkout
  2693. target_tree = r[target_tree_id]
  2694. for change in changes:
  2695. if isinstance(change, str):
  2696. change = change.encode(DEFAULT_ENCODING)
  2697. try:
  2698. target_tree.lookup_path(r.object_store.__getitem__, change)
  2699. # File exists in target tree - would overwrite local changes
  2700. raise CheckoutError(
  2701. f"Your local changes to '{change.decode()}' would be "
  2702. "overwritten by checkout. Please commit or stash before switching."
  2703. )
  2704. except KeyError:
  2705. # File doesn't exist in target tree - change can be preserved
  2706. pass
  2707. # Get configuration for working directory update
  2708. config = r.get_config()
  2709. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  2710. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  2711. validate_path_element = validate_path_element_ntfs
  2712. else:
  2713. validate_path_element = validate_path_element_default
  2714. if config.get_boolean(b"core", b"symlinks", True):
  2715. symlink_fn = symlink
  2716. else:
  2717. def symlink_fn(source, target) -> None: # type: ignore
  2718. mode = "w" + ("b" if isinstance(source, bytes) else "")
  2719. with open(target, mode) as f:
  2720. f.write(source)
  2721. # Get blob normalizer for line ending conversion
  2722. blob_normalizer = r.get_blob_normalizer()
  2723. # Update working tree
  2724. update_working_tree(
  2725. r,
  2726. current_tree_id,
  2727. target_tree_id,
  2728. honor_filemode=honor_filemode,
  2729. validate_path_element=validate_path_element,
  2730. symlink_fn=symlink_fn,
  2731. force_remove_untracked=force,
  2732. blob_normalizer=blob_normalizer,
  2733. )
  2734. # Update HEAD
  2735. if new_branch:
  2736. # Create new branch and switch to it
  2737. branch_create(r, new_branch, objectish=target_commit.id.decode("ascii"))
  2738. update_head(r, new_branch)
  2739. # Set up tracking if creating from a remote branch
  2740. from .refs import LOCAL_REMOTE_PREFIX, parse_remote_ref
  2741. if isinstance(original_target, bytes) and target_bytes.startswith(
  2742. LOCAL_REMOTE_PREFIX
  2743. ):
  2744. try:
  2745. remote_name, branch_name = parse_remote_ref(target_bytes)
  2746. # Set tracking to refs/heads/<branch> on the remote
  2747. set_branch_tracking(
  2748. r, new_branch, remote_name, b"refs/heads/" + branch_name
  2749. )
  2750. except ValueError:
  2751. # Invalid remote ref format, skip tracking setup
  2752. pass
  2753. else:
  2754. # Check if target is a branch name (with or without refs/heads/ prefix)
  2755. branch_ref = None
  2756. if (
  2757. isinstance(original_target, (str, bytes))
  2758. and target_bytes in r.refs.keys()
  2759. ):
  2760. if target_bytes.startswith(LOCAL_BRANCH_PREFIX):
  2761. branch_ref = target_bytes
  2762. else:
  2763. # Try adding refs/heads/ prefix
  2764. potential_branch = (
  2765. _make_branch_ref(target_bytes)
  2766. if isinstance(original_target, (str, bytes))
  2767. else None
  2768. )
  2769. if potential_branch in r.refs.keys():
  2770. branch_ref = potential_branch
  2771. if branch_ref:
  2772. # It's a branch - update HEAD symbolically
  2773. update_head(r, branch_ref)
  2774. else:
  2775. # It's a tag, other ref, or commit SHA - detached HEAD
  2776. update_head(r, target_commit.id.decode("ascii"), detached=True)
  2777. def reset_file(
  2778. repo,
  2779. file_path: str,
  2780. target: Union[str, bytes, Commit, Tree, Tag] = b"HEAD",
  2781. symlink_fn=None,
  2782. ) -> None:
  2783. """Reset the file to specific commit or branch.
  2784. Args:
  2785. repo: dulwich Repo object
  2786. file_path: file to reset, relative to the repository path
  2787. target: branch or commit or b'HEAD' to reset
  2788. """
  2789. tree = parse_tree(repo, treeish=target)
  2790. tree_path = _fs_to_tree_path(file_path)
  2791. file_entry = tree.lookup_path(repo.object_store.__getitem__, tree_path)
  2792. full_path = os.path.join(os.fsencode(repo.path), tree_path)
  2793. blob = repo.object_store[file_entry[1]]
  2794. mode = file_entry[0]
  2795. build_file_from_blob(blob, mode, full_path, symlink_fn=symlink_fn)
  2796. @replace_me(since="0.22.9", remove_in="0.24.0")
  2797. def checkout_branch(repo, target: Union[bytes, str], force: bool = False) -> None:
  2798. """Switch branches or restore working tree files.
  2799. This is now a wrapper around the general checkout() function.
  2800. Preserved for backward compatibility.
  2801. Args:
  2802. repo: dulwich Repo object
  2803. target: branch name or commit sha to checkout
  2804. force: true or not to force checkout
  2805. """
  2806. # Simply delegate to the new checkout function
  2807. return checkout(repo, target, force=force)
  2808. def sparse_checkout(
  2809. repo, patterns=None, force: bool = False, cone: Union[bool, None] = None
  2810. ):
  2811. """Perform a sparse checkout in the repository (either 'full' or 'cone mode').
  2812. Perform sparse checkout in either 'cone' (directory-based) mode or
  2813. 'full pattern' (.gitignore) mode, depending on the ``cone`` parameter.
  2814. If ``cone`` is ``None``, the mode is inferred from the repository's
  2815. ``core.sparseCheckoutCone`` config setting.
  2816. Steps:
  2817. 1) If ``patterns`` is provided, write them to ``.git/info/sparse-checkout``.
  2818. 2) Determine which paths in the index are included vs. excluded.
  2819. - If ``cone=True``, use "cone-compatible" directory-based logic.
  2820. - If ``cone=False``, use standard .gitignore-style matching.
  2821. 3) Update the index's skip-worktree bits and add/remove files in
  2822. the working tree accordingly.
  2823. 4) If ``force=False``, refuse to remove files that have local modifications.
  2824. Args:
  2825. repo: Path to the repository or a Repo object.
  2826. patterns: Optional list of sparse-checkout patterns to write.
  2827. force: Whether to force removal of locally modified files (default False).
  2828. cone: Boolean indicating cone mode (True/False). If None, read from config.
  2829. Returns:
  2830. None
  2831. """
  2832. with open_repo_closing(repo) as repo_obj:
  2833. # --- 0) Possibly infer 'cone' from config ---
  2834. if cone is None:
  2835. cone = repo_obj.infer_cone_mode()
  2836. # --- 1) Read or write patterns ---
  2837. if patterns is None:
  2838. lines = repo_obj.get_sparse_checkout_patterns()
  2839. if lines is None:
  2840. raise Error("No sparse checkout patterns found.")
  2841. else:
  2842. lines = patterns
  2843. repo_obj.set_sparse_checkout_patterns(patterns)
  2844. # --- 2) Determine the set of included paths ---
  2845. included_paths = determine_included_paths(repo_obj, lines, cone)
  2846. # --- 3) Apply those results to the index & working tree ---
  2847. try:
  2848. apply_included_paths(repo_obj, included_paths, force=force)
  2849. except SparseCheckoutConflictError as exc:
  2850. raise CheckoutError(*exc.args) from exc
  2851. def cone_mode_init(repo):
  2852. """Initialize a repository to use sparse checkout in 'cone' mode.
  2853. Sets ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` in the config.
  2854. Writes an initial ``.git/info/sparse-checkout`` file that includes only
  2855. top-level files (and excludes all subdirectories), e.g. ``["/*", "!/*/"]``.
  2856. Then performs a sparse checkout to update the working tree accordingly.
  2857. If no directories are specified, then only top-level files are included:
  2858. https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling
  2859. Args:
  2860. repo: Path to the repository or a Repo object.
  2861. Returns:
  2862. None
  2863. """
  2864. with open_repo_closing(repo) as repo_obj:
  2865. repo_obj.configure_for_cone_mode()
  2866. patterns = ["/*", "!/*/"] # root-level files only
  2867. sparse_checkout(repo_obj, patterns, force=True, cone=True)
  2868. def cone_mode_set(repo, dirs, force=False):
  2869. """Overwrite the existing 'cone-mode' sparse patterns with a new set of directories.
  2870. Ensures ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` are enabled.
  2871. Writes new patterns so that only the specified directories (and top-level files)
  2872. remain in the working tree, and applies the sparse checkout update.
  2873. Args:
  2874. repo: Path to the repository or a Repo object.
  2875. dirs: List of directory names to include.
  2876. force: Whether to forcibly discard local modifications (default False).
  2877. Returns:
  2878. None
  2879. """
  2880. with open_repo_closing(repo) as repo_obj:
  2881. repo_obj.configure_for_cone_mode()
  2882. repo_obj.set_cone_mode_patterns(dirs=dirs)
  2883. new_patterns = repo_obj.get_sparse_checkout_patterns()
  2884. # Finally, apply the patterns and update the working tree
  2885. sparse_checkout(repo_obj, new_patterns, force=force, cone=True)
  2886. def cone_mode_add(repo, dirs, force=False):
  2887. """Add new directories to the existing 'cone-mode' sparse-checkout patterns.
  2888. Reads the current patterns from ``.git/info/sparse-checkout``, adds pattern
  2889. lines to include the specified directories, and then performs a sparse
  2890. checkout to update the working tree accordingly.
  2891. Args:
  2892. repo: Path to the repository or a Repo object.
  2893. dirs: List of directory names to add to the sparse-checkout.
  2894. force: Whether to forcibly discard local modifications (default False).
  2895. Returns:
  2896. None
  2897. """
  2898. with open_repo_closing(repo) as repo_obj:
  2899. repo_obj.configure_for_cone_mode()
  2900. # Do not pass base patterns as dirs
  2901. base_patterns = ["/*", "!/*/"]
  2902. existing_dirs = [
  2903. pat.strip("/")
  2904. for pat in repo_obj.get_sparse_checkout_patterns()
  2905. if pat not in base_patterns
  2906. ]
  2907. added_dirs = existing_dirs + (dirs or [])
  2908. repo_obj.set_cone_mode_patterns(dirs=added_dirs)
  2909. new_patterns = repo_obj.get_sparse_checkout_patterns()
  2910. sparse_checkout(repo_obj, patterns=new_patterns, force=force, cone=True)
  2911. def check_mailmap(repo, contact):
  2912. """Check canonical name and email of contact.
  2913. Args:
  2914. repo: Path to the repository
  2915. contact: Contact name and/or email
  2916. Returns: Canonical contact data
  2917. """
  2918. with open_repo_closing(repo) as r:
  2919. from .mailmap import Mailmap
  2920. try:
  2921. mailmap = Mailmap.from_path(os.path.join(r.path, ".mailmap"))
  2922. except FileNotFoundError:
  2923. mailmap = Mailmap()
  2924. return mailmap.lookup(contact)
  2925. def fsck(repo):
  2926. """Check a repository.
  2927. Args:
  2928. repo: A path to the repository
  2929. Returns: Iterator over errors/warnings
  2930. """
  2931. with open_repo_closing(repo) as r:
  2932. # TODO(jelmer): check pack files
  2933. # TODO(jelmer): check graph
  2934. # TODO(jelmer): check refs
  2935. for sha in r.object_store:
  2936. o = r.object_store[sha]
  2937. try:
  2938. o.check()
  2939. except Exception as e:
  2940. yield (sha, e)
  2941. def stash_list(repo):
  2942. """List all stashes in a repository."""
  2943. with open_repo_closing(repo) as r:
  2944. from .stash import Stash
  2945. stash = Stash.from_repo(r)
  2946. return enumerate(list(stash.stashes()))
  2947. def stash_push(repo) -> None:
  2948. """Push a new stash onto the stack."""
  2949. with open_repo_closing(repo) as r:
  2950. from .stash import Stash
  2951. stash = Stash.from_repo(r)
  2952. stash.push()
  2953. def stash_pop(repo) -> None:
  2954. """Pop a stash from the stack."""
  2955. with open_repo_closing(repo) as r:
  2956. from .stash import Stash
  2957. stash = Stash.from_repo(r)
  2958. stash.pop(0)
  2959. def stash_drop(repo, index) -> None:
  2960. """Drop a stash from the stack."""
  2961. with open_repo_closing(repo) as r:
  2962. from .stash import Stash
  2963. stash = Stash.from_repo(r)
  2964. stash.drop(index)
  2965. def ls_files(repo):
  2966. """List all files in an index."""
  2967. with open_repo_closing(repo) as r:
  2968. return sorted(r.open_index())
  2969. def find_unique_abbrev(object_store, object_id, min_length=7):
  2970. """Find the shortest unique abbreviation for an object ID.
  2971. Args:
  2972. object_store: Object store to search in
  2973. object_id: The full object ID to abbreviate
  2974. min_length: Minimum length of abbreviation (default 7)
  2975. Returns:
  2976. The shortest unique prefix of the object ID (at least min_length chars)
  2977. """
  2978. if isinstance(object_id, bytes):
  2979. hex_id = object_id.decode("ascii")
  2980. else:
  2981. hex_id = object_id
  2982. # Start with minimum length
  2983. for length in range(min_length, len(hex_id) + 1):
  2984. prefix = hex_id[:length]
  2985. matches = 0
  2986. # Check if this prefix is unique
  2987. for obj_id in object_store:
  2988. if obj_id.decode("ascii").startswith(prefix):
  2989. matches += 1
  2990. if matches > 1:
  2991. # Not unique, need more characters
  2992. break
  2993. if matches == 1:
  2994. # Found unique prefix
  2995. return prefix
  2996. # If we get here, return the full ID
  2997. return hex_id
  2998. def describe(repo, abbrev=None):
  2999. """Describe the repository version.
  3000. Args:
  3001. repo: git repository
  3002. abbrev: number of characters of commit to take, default is 7
  3003. Returns: a string description of the current git revision
  3004. Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh".
  3005. """
  3006. abbrev_slice = slice(0, abbrev if abbrev is not None else 7)
  3007. # Get the repository
  3008. with open_repo_closing(repo) as r:
  3009. # Get a list of all tags
  3010. refs = r.get_refs()
  3011. tags = {}
  3012. for key, value in refs.items():
  3013. key = key.decode()
  3014. obj = r.get_object(value)
  3015. if "tags" not in key:
  3016. continue
  3017. _, tag = key.rsplit("/", 1)
  3018. try:
  3019. # Annotated tag case
  3020. commit = obj.object
  3021. commit = r.get_object(commit[1])
  3022. except AttributeError:
  3023. # Lightweight tag case - obj is already the commit
  3024. commit = obj
  3025. tags[tag] = [
  3026. datetime.datetime(*time.gmtime(commit.commit_time)[:6]),
  3027. commit.id.decode("ascii"),
  3028. ]
  3029. sorted_tags = sorted(tags.items(), key=lambda tag: tag[1][0], reverse=True)
  3030. # Get the latest commit
  3031. latest_commit = r[r.head()]
  3032. # If there are no tags, return the latest commit
  3033. if len(sorted_tags) == 0:
  3034. if abbrev is not None:
  3035. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  3036. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  3037. # We're now 0 commits from the top
  3038. commit_count = 0
  3039. # Walk through all commits
  3040. walker = r.get_walker()
  3041. for entry in walker:
  3042. # Check if tag
  3043. commit_id = entry.commit.id.decode("ascii")
  3044. for tag in sorted_tags:
  3045. tag_name = tag[0]
  3046. tag_commit = tag[1][1]
  3047. if commit_id == tag_commit:
  3048. if commit_count == 0:
  3049. return tag_name
  3050. else:
  3051. if abbrev is not None:
  3052. abbrev_hash = latest_commit.id.decode("ascii")[abbrev_slice]
  3053. else:
  3054. abbrev_hash = find_unique_abbrev(
  3055. r.object_store, latest_commit.id
  3056. )
  3057. return f"{tag_name}-{commit_count}-g{abbrev_hash}"
  3058. commit_count += 1
  3059. # Return plain commit if no parent tag can be found
  3060. if abbrev is not None:
  3061. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  3062. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  3063. def get_object_by_path(
  3064. repo, path, committish: Optional[Union[str, bytes, Commit, Tag]] = None
  3065. ):
  3066. """Get an object by path.
  3067. Args:
  3068. repo: A path to the repository
  3069. path: Path to look up
  3070. committish: Commit to look up path in
  3071. Returns: A `ShaFile` object
  3072. """
  3073. if committish is None:
  3074. committish = "HEAD"
  3075. # Get the repository
  3076. with open_repo_closing(repo) as r:
  3077. commit = parse_commit(r, committish)
  3078. base_tree = commit.tree
  3079. if not isinstance(path, bytes):
  3080. path = commit_encode(commit, path)
  3081. (mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path)
  3082. return r[sha]
  3083. def write_tree(repo):
  3084. """Write a tree object from the index.
  3085. Args:
  3086. repo: Repository for which to write tree
  3087. Returns: tree id for the tree that was written
  3088. """
  3089. with open_repo_closing(repo) as r:
  3090. return r.open_index().commit(r.object_store)
  3091. def _do_merge(
  3092. r,
  3093. merge_commit_id,
  3094. no_commit=False,
  3095. no_ff=False,
  3096. message=None,
  3097. author=None,
  3098. committer=None,
  3099. ):
  3100. """Internal merge implementation that operates on an open repository.
  3101. Args:
  3102. r: Open repository object
  3103. merge_commit_id: SHA of commit to merge
  3104. no_commit: If True, do not create a merge commit
  3105. no_ff: If True, force creation of a merge commit
  3106. message: Optional merge commit message
  3107. author: Optional author for merge commit
  3108. committer: Optional committer for merge commit
  3109. Returns:
  3110. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  3111. if no_commit=True or there were conflicts
  3112. """
  3113. from .graph import find_merge_base
  3114. from .merge import three_way_merge
  3115. # Get HEAD commit
  3116. try:
  3117. head_commit_id = r.refs[b"HEAD"]
  3118. except KeyError:
  3119. raise Error("No HEAD reference found")
  3120. head_commit = r[head_commit_id]
  3121. merge_commit = r[merge_commit_id]
  3122. # Check if fast-forward is possible
  3123. merge_bases = find_merge_base(r, [head_commit_id, merge_commit_id])
  3124. if not merge_bases:
  3125. raise Error("No common ancestor found")
  3126. # Use the first merge base
  3127. base_commit_id = merge_bases[0]
  3128. # Check if we're trying to merge the same commit
  3129. if head_commit_id == merge_commit_id:
  3130. # Already up to date
  3131. return (None, [])
  3132. # Check for fast-forward
  3133. if base_commit_id == head_commit_id and not no_ff:
  3134. # Fast-forward merge
  3135. r.refs[b"HEAD"] = merge_commit_id
  3136. # Update the working directory
  3137. update_working_tree(r, head_commit.tree, merge_commit.tree)
  3138. return (merge_commit_id, [])
  3139. if base_commit_id == merge_commit_id:
  3140. # Already up to date
  3141. return (None, [])
  3142. # Perform three-way merge
  3143. base_commit = r[base_commit_id]
  3144. gitattributes = r.get_gitattributes()
  3145. config = r.get_config()
  3146. merged_tree, conflicts = three_way_merge(
  3147. r.object_store, base_commit, head_commit, merge_commit, gitattributes, config
  3148. )
  3149. # Add merged tree to object store
  3150. r.object_store.add_object(merged_tree)
  3151. # Update index and working directory
  3152. update_working_tree(r, head_commit.tree, merged_tree.id)
  3153. if conflicts or no_commit:
  3154. # Don't create a commit if there are conflicts or no_commit is True
  3155. return (None, conflicts)
  3156. # Create merge commit
  3157. merge_commit_obj = Commit()
  3158. merge_commit_obj.tree = merged_tree.id
  3159. merge_commit_obj.parents = [head_commit_id, merge_commit_id]
  3160. # Set author/committer
  3161. if author is None:
  3162. author = get_user_identity(r.get_config_stack())
  3163. if committer is None:
  3164. committer = author
  3165. merge_commit_obj.author = author
  3166. merge_commit_obj.committer = committer
  3167. # Set timestamps
  3168. timestamp = int(time.time())
  3169. timezone = 0 # UTC
  3170. merge_commit_obj.author_time = timestamp
  3171. merge_commit_obj.author_timezone = timezone
  3172. merge_commit_obj.commit_time = timestamp
  3173. merge_commit_obj.commit_timezone = timezone
  3174. # Set commit message
  3175. if message is None:
  3176. message = f"Merge commit '{merge_commit_id.decode()[:7]}'\n"
  3177. merge_commit_obj.message = message.encode() if isinstance(message, str) else message
  3178. # Add commit to object store
  3179. r.object_store.add_object(merge_commit_obj)
  3180. # Update HEAD
  3181. r.refs[b"HEAD"] = merge_commit_obj.id
  3182. return (merge_commit_obj.id, [])
  3183. def merge(
  3184. repo,
  3185. committish: Union[str, bytes, Commit, Tag],
  3186. no_commit=False,
  3187. no_ff=False,
  3188. message=None,
  3189. author=None,
  3190. committer=None,
  3191. ):
  3192. """Merge a commit into the current branch.
  3193. Args:
  3194. repo: Repository to merge into
  3195. committish: Commit to merge
  3196. no_commit: If True, do not create a merge commit
  3197. no_ff: If True, force creation of a merge commit
  3198. message: Optional merge commit message
  3199. author: Optional author for merge commit
  3200. committer: Optional committer for merge commit
  3201. Returns:
  3202. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  3203. if no_commit=True or there were conflicts
  3204. Raises:
  3205. Error: If there is no HEAD reference or commit cannot be found
  3206. """
  3207. with open_repo_closing(repo) as r:
  3208. # Parse the commit to merge
  3209. try:
  3210. merge_commit_id = parse_commit(r, committish).id
  3211. except KeyError:
  3212. raise Error(
  3213. f"Cannot find commit '{committish.decode() if isinstance(committish, bytes) else committish}'"
  3214. )
  3215. result = _do_merge(
  3216. r, merge_commit_id, no_commit, no_ff, message, author, committer
  3217. )
  3218. # Trigger auto GC if needed
  3219. from .gc import maybe_auto_gc
  3220. maybe_auto_gc(r)
  3221. return result
  3222. def unpack_objects(pack_path, target="."):
  3223. """Unpack objects from a pack file into the repository.
  3224. Args:
  3225. pack_path: Path to the pack file to unpack
  3226. target: Path to the repository to unpack into
  3227. Returns:
  3228. Number of objects unpacked
  3229. """
  3230. from .pack import Pack
  3231. with open_repo_closing(target) as r:
  3232. pack_basename = os.path.splitext(pack_path)[0]
  3233. with Pack(pack_basename) as pack:
  3234. count = 0
  3235. for unpacked in pack.iter_unpacked():
  3236. obj = unpacked.sha_file()
  3237. r.object_store.add_object(obj)
  3238. count += 1
  3239. return count
  3240. def merge_tree(
  3241. repo,
  3242. base_tree: Optional[Union[str, bytes, Tree, Commit, Tag]],
  3243. our_tree: Union[str, bytes, Tree, Commit, Tag],
  3244. their_tree: Union[str, bytes, Tree, Commit, Tag],
  3245. ):
  3246. """Perform a three-way tree merge without touching the working directory.
  3247. This is similar to git merge-tree, performing a merge at the tree level
  3248. without creating commits or updating any references.
  3249. Args:
  3250. repo: Repository containing the trees
  3251. base_tree: Tree-ish of the common ancestor (or None for no common ancestor)
  3252. our_tree: Tree-ish of our side of the merge
  3253. their_tree: Tree-ish of their side of the merge
  3254. Returns:
  3255. tuple: A tuple of (merged_tree_id, conflicts) where:
  3256. - merged_tree_id is the SHA-1 of the merged tree
  3257. - conflicts is a list of paths (as bytes) that had conflicts
  3258. Raises:
  3259. KeyError: If any of the tree-ish arguments cannot be resolved
  3260. """
  3261. from .merge import Merger
  3262. with open_repo_closing(repo) as r:
  3263. # Resolve tree-ish arguments to actual trees
  3264. base = parse_tree(r, base_tree) if base_tree else None
  3265. ours = parse_tree(r, our_tree)
  3266. theirs = parse_tree(r, their_tree)
  3267. # Perform the merge
  3268. gitattributes = r.get_gitattributes()
  3269. config = r.get_config()
  3270. merger = Merger(r.object_store, gitattributes, config)
  3271. merged_tree, conflicts = merger.merge_trees(base, ours, theirs)
  3272. # Add the merged tree to the object store
  3273. r.object_store.add_object(merged_tree)
  3274. return merged_tree.id, conflicts
  3275. def cherry_pick(
  3276. repo,
  3277. committish: Union[str, bytes, Commit, Tag, None],
  3278. no_commit=False,
  3279. continue_=False,
  3280. abort=False,
  3281. ):
  3282. r"""Cherry-pick a commit onto the current branch.
  3283. Args:
  3284. repo: Repository to cherry-pick into
  3285. committish: Commit to cherry-pick (can be None only when ``continue_`` or abort is True)
  3286. no_commit: If True, do not create a commit after applying changes
  3287. ``continue_``: Continue an in-progress cherry-pick after resolving conflicts
  3288. abort: Abort an in-progress cherry-pick
  3289. Returns:
  3290. The SHA of the newly created commit, or None if no_commit=True or there were conflicts
  3291. Raises:
  3292. Error: If there is no HEAD reference, commit cannot be found, or operation fails
  3293. """
  3294. from .merge import three_way_merge
  3295. # Validate that committish is provided when needed
  3296. if not (continue_ or abort) and committish is None:
  3297. raise ValueError("committish is required when not using --continue or --abort")
  3298. with open_repo_closing(repo) as r:
  3299. # Handle abort
  3300. if abort:
  3301. # Clean up any cherry-pick state
  3302. try:
  3303. os.remove(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"))
  3304. except FileNotFoundError:
  3305. pass
  3306. try:
  3307. os.remove(os.path.join(r.controldir(), "MERGE_MSG"))
  3308. except FileNotFoundError:
  3309. pass
  3310. # Reset index to HEAD
  3311. r.reset_index(r[b"HEAD"].tree)
  3312. return None
  3313. # Handle continue
  3314. if continue_:
  3315. # Check if there's a cherry-pick in progress
  3316. cherry_pick_head_path = os.path.join(r.controldir(), "CHERRY_PICK_HEAD")
  3317. try:
  3318. with open(cherry_pick_head_path, "rb") as f:
  3319. cherry_pick_commit_id = f.read().strip()
  3320. cherry_pick_commit = r[cherry_pick_commit_id]
  3321. except FileNotFoundError:
  3322. raise Error("No cherry-pick in progress")
  3323. # Check for unresolved conflicts
  3324. conflicts = list(r.open_index().conflicts())
  3325. if conflicts:
  3326. raise Error("Unresolved conflicts remain")
  3327. # Create the commit
  3328. tree_id = r.open_index().commit(r.object_store)
  3329. # Read saved message if any
  3330. merge_msg_path = os.path.join(r.controldir(), "MERGE_MSG")
  3331. try:
  3332. with open(merge_msg_path, "rb") as f:
  3333. message = f.read()
  3334. except FileNotFoundError:
  3335. message = cherry_pick_commit.message
  3336. new_commit = r.do_commit(
  3337. message=message,
  3338. tree=tree_id,
  3339. author=cherry_pick_commit.author,
  3340. author_timestamp=cherry_pick_commit.author_time,
  3341. author_timezone=cherry_pick_commit.author_timezone,
  3342. )
  3343. # Clean up state files
  3344. try:
  3345. os.remove(cherry_pick_head_path)
  3346. except FileNotFoundError:
  3347. pass
  3348. try:
  3349. os.remove(merge_msg_path)
  3350. except FileNotFoundError:
  3351. pass
  3352. return new_commit
  3353. # Normal cherry-pick operation
  3354. # Get current HEAD
  3355. try:
  3356. head_commit = r[b"HEAD"]
  3357. except KeyError:
  3358. raise Error("No HEAD reference found")
  3359. # Parse the commit to cherry-pick
  3360. # committish cannot be None here due to validation above
  3361. assert committish is not None
  3362. try:
  3363. cherry_pick_commit = parse_commit(r, committish)
  3364. except KeyError:
  3365. raise Error(
  3366. f"Cannot find commit '{committish.decode() if isinstance(committish, bytes) else committish}'"
  3367. )
  3368. # Check if commit has parents
  3369. if not cherry_pick_commit.parents:
  3370. raise Error("Cannot cherry-pick root commit")
  3371. # Get parent of cherry-pick commit
  3372. parent_commit = r[cherry_pick_commit.parents[0]]
  3373. # Perform three-way merge
  3374. try:
  3375. merged_tree, conflicts = three_way_merge(
  3376. r.object_store, parent_commit, head_commit, cherry_pick_commit
  3377. )
  3378. except Exception as e:
  3379. raise Error(f"Cherry-pick failed: {e}")
  3380. # Add merged tree to object store
  3381. r.object_store.add_object(merged_tree)
  3382. # Update working tree and index
  3383. # Reset index to match merged tree
  3384. r.reset_index(merged_tree.id)
  3385. # Update working tree from the new index
  3386. update_working_tree(r, head_commit.tree, merged_tree.id)
  3387. if conflicts:
  3388. # Save state for later continuation
  3389. with open(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"), "wb") as f:
  3390. f.write(cherry_pick_commit.id + b"\n")
  3391. # Save commit message
  3392. with open(os.path.join(r.controldir(), "MERGE_MSG"), "wb") as f:
  3393. f.write(cherry_pick_commit.message)
  3394. raise Error(
  3395. f"Conflicts in: {', '.join(c.decode('utf-8', 'replace') for c in conflicts)}\n"
  3396. f"Fix conflicts and run 'dulwich cherry-pick --continue'"
  3397. )
  3398. if no_commit:
  3399. return None
  3400. # Create the commit
  3401. new_commit = r.do_commit(
  3402. message=cherry_pick_commit.message,
  3403. tree=merged_tree.id,
  3404. author=cherry_pick_commit.author,
  3405. author_timestamp=cherry_pick_commit.author_time,
  3406. author_timezone=cherry_pick_commit.author_timezone,
  3407. )
  3408. return new_commit
  3409. def revert(
  3410. repo,
  3411. commits: Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]],
  3412. no_commit=False,
  3413. message=None,
  3414. author=None,
  3415. committer=None,
  3416. ):
  3417. """Revert one or more commits.
  3418. This creates a new commit that undoes the changes introduced by the
  3419. specified commits. Unlike reset, revert creates a new commit that
  3420. preserves history.
  3421. Args:
  3422. repo: Path to repository or repository object
  3423. commits: List of commit-ish (SHA, ref, etc.) to revert, or a single commit-ish
  3424. no_commit: If True, apply changes to index/working tree but don't commit
  3425. message: Optional commit message (default: "Revert <original subject>")
  3426. author: Optional author for revert commit
  3427. committer: Optional committer for revert commit
  3428. Returns:
  3429. SHA1 of the new revert commit, or None if no_commit=True
  3430. Raises:
  3431. Error: If revert fails due to conflicts or other issues
  3432. """
  3433. from .merge import three_way_merge
  3434. # Normalize commits to a list
  3435. if isinstance(commits, (str, bytes, Commit, Tag)):
  3436. commits = [commits]
  3437. with open_repo_closing(repo) as r:
  3438. # Convert string refs to bytes
  3439. commits_to_revert = []
  3440. for commit_ref in commits:
  3441. if isinstance(commit_ref, str):
  3442. commit_ref = commit_ref.encode("utf-8")
  3443. commit = parse_commit(r, commit_ref)
  3444. commits_to_revert.append(commit)
  3445. # Get current HEAD
  3446. try:
  3447. head_commit_id = r.refs[b"HEAD"]
  3448. except KeyError:
  3449. raise Error("No HEAD reference found")
  3450. head_commit = r[head_commit_id]
  3451. current_tree = head_commit.tree
  3452. # Process commits in order
  3453. for commit_to_revert in commits_to_revert:
  3454. # For revert, we want to apply the inverse of the commit
  3455. # This means using the commit's tree as "base" and its parent as "theirs"
  3456. if not commit_to_revert.parents:
  3457. raise Error(
  3458. f"Cannot revert commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - it has no parents"
  3459. )
  3460. # For simplicity, we only handle commits with one parent (no merge commits)
  3461. if len(commit_to_revert.parents) > 1:
  3462. raise Error(
  3463. f"Cannot revert merge commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - not yet implemented"
  3464. )
  3465. parent_commit = r[commit_to_revert.parents[0]]
  3466. # Perform three-way merge:
  3467. # - base: the commit we're reverting (what we want to remove)
  3468. # - ours: current HEAD (what we have now)
  3469. # - theirs: parent of commit being reverted (what we want to go back to)
  3470. merged_tree, conflicts = three_way_merge(
  3471. r.object_store,
  3472. commit_to_revert, # base
  3473. r[head_commit_id], # ours
  3474. parent_commit, # theirs
  3475. )
  3476. if conflicts:
  3477. # Update working tree with conflicts
  3478. update_working_tree(r, current_tree, merged_tree.id)
  3479. conflicted_paths = [c.decode("utf-8", "replace") for c in conflicts]
  3480. raise Error(f"Conflicts while reverting: {', '.join(conflicted_paths)}")
  3481. # Add merged tree to object store
  3482. r.object_store.add_object(merged_tree)
  3483. # Update working tree
  3484. update_working_tree(r, current_tree, merged_tree.id)
  3485. current_tree = merged_tree.id
  3486. if not no_commit:
  3487. # Create revert commit
  3488. revert_commit = Commit()
  3489. revert_commit.tree = merged_tree.id
  3490. revert_commit.parents = [head_commit_id]
  3491. # Set author/committer
  3492. if author is None:
  3493. author = get_user_identity(r.get_config_stack())
  3494. if committer is None:
  3495. committer = author
  3496. revert_commit.author = author
  3497. revert_commit.committer = committer
  3498. # Set timestamps
  3499. timestamp = int(time.time())
  3500. timezone = 0 # UTC
  3501. revert_commit.author_time = timestamp
  3502. revert_commit.author_timezone = timezone
  3503. revert_commit.commit_time = timestamp
  3504. revert_commit.commit_timezone = timezone
  3505. # Set message
  3506. if message is None:
  3507. # Extract original commit subject
  3508. original_message = commit_to_revert.message
  3509. if isinstance(original_message, bytes):
  3510. original_message = original_message.decode("utf-8", "replace")
  3511. subject = original_message.split("\n")[0]
  3512. message = f'Revert "{subject}"\n\nThis reverts commit {commit_to_revert.id.decode("ascii")}.'.encode()
  3513. elif isinstance(message, str):
  3514. message = message.encode("utf-8")
  3515. revert_commit.message = message
  3516. # Add commit to object store
  3517. r.object_store.add_object(revert_commit)
  3518. # Update HEAD
  3519. r.refs[b"HEAD"] = revert_commit.id
  3520. head_commit_id = revert_commit.id
  3521. return head_commit_id if not no_commit else None
  3522. def gc(
  3523. repo,
  3524. auto: bool = False,
  3525. aggressive: bool = False,
  3526. prune: bool = True,
  3527. grace_period: Optional[int] = 1209600, # 2 weeks default
  3528. dry_run: bool = False,
  3529. progress=None,
  3530. ):
  3531. """Run garbage collection on a repository.
  3532. Args:
  3533. repo: Path to the repository or a Repo object
  3534. auto: If True, only run gc if needed
  3535. aggressive: If True, use more aggressive settings
  3536. prune: If True, prune unreachable objects
  3537. grace_period: Grace period in seconds for pruning (default 2 weeks)
  3538. dry_run: If True, only report what would be done
  3539. progress: Optional progress callback
  3540. Returns:
  3541. GCStats object with garbage collection statistics
  3542. """
  3543. from .gc import garbage_collect
  3544. with open_repo_closing(repo) as r:
  3545. return garbage_collect(
  3546. r,
  3547. auto=auto,
  3548. aggressive=aggressive,
  3549. prune=prune,
  3550. grace_period=grace_period,
  3551. dry_run=dry_run,
  3552. progress=progress,
  3553. )
  3554. def prune(
  3555. repo,
  3556. grace_period: Optional[int] = None,
  3557. dry_run: bool = False,
  3558. progress=None,
  3559. ):
  3560. """Prune/clean up a repository's object store.
  3561. This removes temporary files that were left behind by interrupted
  3562. pack operations.
  3563. Args:
  3564. repo: Path to the repository or a Repo object
  3565. grace_period: Grace period in seconds for removing temporary files
  3566. (default 2 weeks)
  3567. dry_run: If True, only report what would be done
  3568. progress: Optional progress callback
  3569. """
  3570. with open_repo_closing(repo) as r:
  3571. if progress:
  3572. progress("Pruning temporary files")
  3573. if not dry_run:
  3574. r.object_store.prune(grace_period=grace_period)
  3575. def count_objects(repo=".", verbose=False) -> CountObjectsResult:
  3576. """Count unpacked objects and their disk usage.
  3577. Args:
  3578. repo: Path to repository or repository object
  3579. verbose: Whether to return verbose information
  3580. Returns:
  3581. CountObjectsResult object with detailed statistics
  3582. """
  3583. with open_repo_closing(repo) as r:
  3584. object_store = r.object_store
  3585. # Count loose objects
  3586. loose_count = 0
  3587. loose_size = 0
  3588. for sha in object_store._iter_loose_objects():
  3589. loose_count += 1
  3590. path = object_store._get_shafile_path(sha)
  3591. try:
  3592. stat_info = os.stat(path)
  3593. # Git uses disk usage, not file size. st_blocks is always in
  3594. # 512-byte blocks per POSIX standard
  3595. if hasattr(stat_info, "st_blocks"):
  3596. # Available on Linux and macOS
  3597. loose_size += stat_info.st_blocks * 512 # type: ignore
  3598. else:
  3599. # Fallback for Windows
  3600. loose_size += stat_info.st_size
  3601. except FileNotFoundError:
  3602. # Object may have been removed between iteration and stat
  3603. pass
  3604. if not verbose:
  3605. return CountObjectsResult(count=loose_count, size=loose_size)
  3606. # Count pack information
  3607. pack_count = len(object_store.packs)
  3608. in_pack_count = 0
  3609. pack_size = 0
  3610. for pack in object_store.packs:
  3611. in_pack_count += len(pack)
  3612. # Get pack file size
  3613. pack_path = pack._data_path
  3614. try:
  3615. pack_size += os.path.getsize(pack_path)
  3616. except FileNotFoundError:
  3617. pass
  3618. # Get index file size
  3619. idx_path = pack._idx_path
  3620. try:
  3621. pack_size += os.path.getsize(idx_path)
  3622. except FileNotFoundError:
  3623. pass
  3624. return CountObjectsResult(
  3625. count=loose_count,
  3626. size=loose_size,
  3627. in_pack=in_pack_count,
  3628. packs=pack_count,
  3629. size_pack=pack_size,
  3630. )
  3631. def rebase(
  3632. repo: Union[Repo, str],
  3633. upstream: Union[bytes, str],
  3634. onto: Optional[Union[bytes, str]] = None,
  3635. branch: Optional[Union[bytes, str]] = None,
  3636. abort: bool = False,
  3637. continue_rebase: bool = False,
  3638. skip: bool = False,
  3639. ) -> list[bytes]:
  3640. """Rebase commits onto another branch.
  3641. Args:
  3642. repo: Repository to rebase in
  3643. upstream: Upstream branch/commit to rebase onto
  3644. onto: Specific commit to rebase onto (defaults to upstream)
  3645. branch: Branch to rebase (defaults to current branch)
  3646. abort: Abort an in-progress rebase
  3647. continue_rebase: Continue an in-progress rebase
  3648. skip: Skip current commit and continue rebase
  3649. Returns:
  3650. List of new commit SHAs created by rebase
  3651. Raises:
  3652. Error: If rebase fails or conflicts occur
  3653. """
  3654. from .rebase import RebaseConflict, RebaseError, Rebaser
  3655. with open_repo_closing(repo) as r:
  3656. rebaser = Rebaser(r)
  3657. if abort:
  3658. try:
  3659. rebaser.abort()
  3660. return []
  3661. except RebaseError as e:
  3662. raise Error(str(e))
  3663. if continue_rebase:
  3664. try:
  3665. result = rebaser.continue_()
  3666. if result is None:
  3667. # Rebase complete
  3668. return []
  3669. elif isinstance(result, tuple) and result[1]:
  3670. # Still have conflicts
  3671. raise Error(
  3672. f"Conflicts in: {', '.join(f.decode('utf-8', 'replace') for f in result[1])}"
  3673. )
  3674. except RebaseError as e:
  3675. raise Error(str(e))
  3676. # Convert string refs to bytes
  3677. if isinstance(upstream, str):
  3678. upstream = upstream.encode("utf-8")
  3679. if isinstance(onto, str):
  3680. onto = onto.encode("utf-8") if onto else None
  3681. if isinstance(branch, str):
  3682. branch = branch.encode("utf-8") if branch else None
  3683. try:
  3684. # Start rebase
  3685. rebaser.start(upstream, onto, branch)
  3686. # Continue rebase automatically
  3687. result = rebaser.continue_()
  3688. if result is not None:
  3689. # Conflicts
  3690. raise RebaseConflict(result[1])
  3691. # Return the SHAs of the rebased commits
  3692. return [c.id for c in rebaser._done]
  3693. except RebaseConflict as e:
  3694. raise Error(str(e))
  3695. except RebaseError as e:
  3696. raise Error(str(e))
  3697. def annotate(repo, path, committish: Optional[Union[str, bytes, Commit, Tag]] = None):
  3698. """Annotate the history of a file.
  3699. :param repo: Path to the repository
  3700. :param path: Path to annotate
  3701. :param committish: Commit id to find path in
  3702. :return: List of ((Commit, TreeChange), line) tuples
  3703. """
  3704. if committish is None:
  3705. committish = "HEAD"
  3706. from dulwich.annotate import annotate_lines
  3707. with open_repo_closing(repo) as r:
  3708. commit_id = parse_commit(r, committish).id
  3709. # Ensure path is bytes
  3710. if isinstance(path, str):
  3711. path = path.encode()
  3712. return annotate_lines(r.object_store, commit_id, path)
  3713. blame = annotate
  3714. def filter_branch(
  3715. repo=".",
  3716. branch="HEAD",
  3717. *,
  3718. filter_fn=None,
  3719. filter_author=None,
  3720. filter_committer=None,
  3721. filter_message=None,
  3722. tree_filter=None,
  3723. index_filter=None,
  3724. parent_filter=None,
  3725. commit_filter=None,
  3726. subdirectory_filter=None,
  3727. prune_empty=False,
  3728. tag_name_filter=None,
  3729. force=False,
  3730. keep_original=True,
  3731. refs=None,
  3732. ):
  3733. """Rewrite branch history by creating new commits with filtered properties.
  3734. This is similar to git filter-branch, allowing you to rewrite commit
  3735. history by modifying trees, parents, author, committer, or commit messages.
  3736. Args:
  3737. repo: Path to repository
  3738. branch: Branch to rewrite (defaults to HEAD)
  3739. filter_fn: Optional callable that takes a Commit object and returns
  3740. a dict of updated fields (author, committer, message, etc.)
  3741. filter_author: Optional callable that takes author bytes and returns
  3742. updated author bytes or None to keep unchanged
  3743. filter_committer: Optional callable that takes committer bytes and returns
  3744. updated committer bytes or None to keep unchanged
  3745. filter_message: Optional callable that takes commit message bytes
  3746. and returns updated message bytes
  3747. tree_filter: Optional callable that takes (tree_sha, temp_dir) and returns
  3748. new tree SHA after modifying working directory
  3749. index_filter: Optional callable that takes (tree_sha, temp_index_path) and
  3750. returns new tree SHA after modifying index
  3751. parent_filter: Optional callable that takes parent list and returns
  3752. modified parent list
  3753. commit_filter: Optional callable that takes (Commit, tree_sha) and returns
  3754. new commit SHA or None to skip commit
  3755. subdirectory_filter: Optional subdirectory path to extract as new root
  3756. prune_empty: Whether to prune commits that become empty
  3757. tag_name_filter: Optional callable to rename tags
  3758. force: Force operation even if branch has been filtered before
  3759. keep_original: Keep original refs under refs/original/
  3760. refs: List of refs to rewrite (defaults to [branch])
  3761. Returns:
  3762. Dict mapping old commit SHAs to new commit SHAs
  3763. Raises:
  3764. Error: If branch is already filtered and force is False
  3765. """
  3766. from .filter_branch import CommitFilter, filter_refs
  3767. with open_repo_closing(repo) as r:
  3768. # Parse branch/committish
  3769. if isinstance(branch, str):
  3770. branch = branch.encode()
  3771. # Determine which refs to process
  3772. if refs is None:
  3773. if branch == b"HEAD":
  3774. # Resolve HEAD to actual branch
  3775. try:
  3776. resolved = r.refs.follow(b"HEAD")
  3777. if resolved and resolved[0]:
  3778. # resolved is a list of (refname, sha) tuples
  3779. resolved_ref = resolved[0][-1]
  3780. if resolved_ref and resolved_ref != b"HEAD":
  3781. refs = [resolved_ref]
  3782. else:
  3783. # HEAD points directly to a commit
  3784. refs = [b"HEAD"]
  3785. else:
  3786. refs = [b"HEAD"]
  3787. except SymrefLoop:
  3788. refs = [b"HEAD"]
  3789. else:
  3790. # Convert branch name to full ref if needed
  3791. if not branch.startswith(b"refs/"):
  3792. branch = b"refs/heads/" + branch
  3793. refs = [branch]
  3794. # Convert subdirectory filter to bytes if needed
  3795. if subdirectory_filter and isinstance(subdirectory_filter, str):
  3796. subdirectory_filter = subdirectory_filter.encode()
  3797. # Create commit filter
  3798. commit_filter = CommitFilter(
  3799. r.object_store,
  3800. filter_fn=filter_fn,
  3801. filter_author=filter_author,
  3802. filter_committer=filter_committer,
  3803. filter_message=filter_message,
  3804. tree_filter=tree_filter,
  3805. index_filter=index_filter,
  3806. parent_filter=parent_filter,
  3807. commit_filter=commit_filter,
  3808. subdirectory_filter=subdirectory_filter,
  3809. prune_empty=prune_empty,
  3810. tag_name_filter=tag_name_filter,
  3811. )
  3812. # Tag callback for renaming tags
  3813. def rename_tag(old_ref, new_ref):
  3814. # Copy tag to new name
  3815. r.refs[new_ref] = r.refs[old_ref]
  3816. # Delete old tag
  3817. del r.refs[old_ref]
  3818. # Filter refs
  3819. try:
  3820. return filter_refs(
  3821. r.refs,
  3822. r.object_store,
  3823. refs,
  3824. commit_filter,
  3825. keep_original=keep_original,
  3826. force=force,
  3827. tag_callback=rename_tag if tag_name_filter else None,
  3828. )
  3829. except ValueError as e:
  3830. raise Error(str(e)) from e
  3831. def format_patch(
  3832. repo=".",
  3833. committish=None,
  3834. outstream=sys.stdout,
  3835. outdir=None,
  3836. n=1,
  3837. stdout=False,
  3838. version=None,
  3839. ) -> list[str]:
  3840. """Generate patches suitable for git am.
  3841. Args:
  3842. repo: Path to repository
  3843. committish: Commit-ish or commit range to generate patches for.
  3844. Can be a single commit id, or a tuple of (start, end) commit ids
  3845. for a range. If None, formats the last n commits from HEAD.
  3846. outstream: Stream to write to if stdout=True
  3847. outdir: Directory to write patch files to (default: current directory)
  3848. n: Number of patches to generate if committish is None
  3849. stdout: Write patches to stdout instead of files
  3850. version: Version string to include in patches (default: Dulwich version)
  3851. Returns:
  3852. List of patch filenames that were created (empty if stdout=True)
  3853. """
  3854. if outdir is None:
  3855. outdir = "."
  3856. filenames = []
  3857. with open_repo_closing(repo) as r:
  3858. # Determine which commits to format
  3859. commits_to_format = []
  3860. if committish is None:
  3861. # Get the last n commits from HEAD
  3862. try:
  3863. walker = r.get_walker()
  3864. for entry in walker:
  3865. commits_to_format.append(entry.commit)
  3866. if len(commits_to_format) >= n:
  3867. break
  3868. commits_to_format.reverse()
  3869. except KeyError:
  3870. # No HEAD or empty repository
  3871. pass
  3872. elif isinstance(committish, tuple):
  3873. # Handle commit range (start, end)
  3874. start_id, end_id = committish
  3875. # Walk from end back to start
  3876. walker = r.get_walker(include=[end_id], exclude=[start_id])
  3877. for entry in walker:
  3878. commits_to_format.append(entry.commit)
  3879. commits_to_format.reverse()
  3880. else:
  3881. # Single commit
  3882. commit = r.object_store[committish]
  3883. commits_to_format.append(commit)
  3884. # Generate patches
  3885. total = len(commits_to_format)
  3886. for i, commit in enumerate(commits_to_format, 1):
  3887. # Get the parent
  3888. if commit.parents:
  3889. parent_id = commit.parents[0]
  3890. parent = r.object_store[parent_id]
  3891. else:
  3892. parent = None
  3893. # Generate the diff
  3894. from io import BytesIO
  3895. diff_content = BytesIO()
  3896. if parent:
  3897. write_tree_diff(
  3898. diff_content,
  3899. r.object_store,
  3900. parent.tree,
  3901. commit.tree,
  3902. )
  3903. else:
  3904. # Initial commit - diff against empty tree
  3905. write_tree_diff(
  3906. diff_content,
  3907. r.object_store,
  3908. None,
  3909. commit.tree,
  3910. )
  3911. # Generate patch with commit metadata
  3912. if stdout:
  3913. write_commit_patch(
  3914. outstream.buffer if hasattr(outstream, "buffer") else outstream,
  3915. commit,
  3916. diff_content.getvalue(),
  3917. (i, total),
  3918. version=version,
  3919. )
  3920. else:
  3921. # Generate filename
  3922. summary = get_summary(commit)
  3923. filename = os.path.join(outdir, f"{i:04d}-{summary}.patch")
  3924. with open(filename, "wb") as f:
  3925. write_commit_patch(
  3926. f,
  3927. commit,
  3928. diff_content.getvalue(),
  3929. (i, total),
  3930. version=version,
  3931. )
  3932. filenames.append(filename)
  3933. return filenames
  3934. def bisect_start(
  3935. repo=".",
  3936. bad: Optional[Union[str, bytes, Commit, Tag]] = None,
  3937. good: Optional[
  3938. Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]]
  3939. ] = None,
  3940. paths=None,
  3941. no_checkout=False,
  3942. term_bad="bad",
  3943. term_good="good",
  3944. ):
  3945. """Start a new bisect session.
  3946. Args:
  3947. repo: Path to repository or a Repo object
  3948. bad: The bad commit (defaults to HEAD)
  3949. good: List of good commits or a single good commit
  3950. paths: Optional paths to limit bisect to
  3951. no_checkout: If True, don't checkout commits during bisect
  3952. term_bad: Term to use for bad commits (default: "bad")
  3953. term_good: Term to use for good commits (default: "good")
  3954. """
  3955. with open_repo_closing(repo) as r:
  3956. state = BisectState(r)
  3957. # Convert single good commit to list
  3958. if good is not None and not isinstance(good, list):
  3959. good = [good]
  3960. # Parse commits
  3961. bad_sha = parse_commit(r, bad).id if bad else None
  3962. good_shas = [parse_commit(r, g).id for g in good] if good else None
  3963. state.start(bad_sha, good_shas, paths, no_checkout, term_bad, term_good)
  3964. # Return the next commit to test if we have both good and bad
  3965. if bad_sha and good_shas:
  3966. next_sha = state._find_next_commit()
  3967. if next_sha and not no_checkout:
  3968. # Checkout the next commit
  3969. old_tree = r[r.head()].tree if r.head() else None
  3970. r.refs[b"HEAD"] = next_sha
  3971. commit = r[next_sha]
  3972. update_working_tree(r, old_tree, commit.tree)
  3973. return next_sha
  3974. def bisect_bad(repo=".", rev: Optional[Union[str, bytes, Commit, Tag]] = None):
  3975. """Mark a commit as bad.
  3976. Args:
  3977. repo: Path to repository or a Repo object
  3978. rev: Commit to mark as bad (defaults to HEAD)
  3979. Returns:
  3980. The SHA of the next commit to test, or None if bisect is complete
  3981. """
  3982. with open_repo_closing(repo) as r:
  3983. state = BisectState(r)
  3984. rev_sha = parse_commit(r, rev).id if rev else None
  3985. next_sha = state.mark_bad(rev_sha)
  3986. if next_sha:
  3987. # Checkout the next commit
  3988. old_tree = r[r.head()].tree if r.head() else None
  3989. r.refs[b"HEAD"] = next_sha
  3990. commit = r[next_sha]
  3991. update_working_tree(r, old_tree, commit.tree)
  3992. return next_sha
  3993. def bisect_good(repo=".", rev: Optional[Union[str, bytes, Commit, Tag]] = None):
  3994. """Mark a commit as good.
  3995. Args:
  3996. repo: Path to repository or a Repo object
  3997. rev: Commit to mark as good (defaults to HEAD)
  3998. Returns:
  3999. The SHA of the next commit to test, or None if bisect is complete
  4000. """
  4001. with open_repo_closing(repo) as r:
  4002. state = BisectState(r)
  4003. rev_sha = parse_commit(r, rev).id if rev else None
  4004. next_sha = state.mark_good(rev_sha)
  4005. if next_sha:
  4006. # Checkout the next commit
  4007. old_tree = r[r.head()].tree if r.head() else None
  4008. r.refs[b"HEAD"] = next_sha
  4009. commit = r[next_sha]
  4010. update_working_tree(r, old_tree, commit.tree)
  4011. return next_sha
  4012. def bisect_skip(
  4013. repo=".",
  4014. revs: Optional[
  4015. Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]]
  4016. ] = None,
  4017. ):
  4018. """Skip one or more commits.
  4019. Args:
  4020. repo: Path to repository or a Repo object
  4021. revs: List of commits to skip (defaults to [HEAD])
  4022. Returns:
  4023. The SHA of the next commit to test, or None if bisect is complete
  4024. """
  4025. with open_repo_closing(repo) as r:
  4026. state = BisectState(r)
  4027. if revs is None:
  4028. rev_shas = None
  4029. else:
  4030. # Convert single rev to list
  4031. if not isinstance(revs, list):
  4032. revs = [revs]
  4033. rev_shas = [parse_commit(r, rev).id for rev in revs]
  4034. next_sha = state.skip(rev_shas)
  4035. if next_sha:
  4036. # Checkout the next commit
  4037. old_tree = r[r.head()].tree if r.head() else None
  4038. r.refs[b"HEAD"] = next_sha
  4039. commit = r[next_sha]
  4040. update_working_tree(r, old_tree, commit.tree)
  4041. return next_sha
  4042. def bisect_reset(repo=".", commit: Optional[Union[str, bytes, Commit, Tag]] = None):
  4043. """Reset bisect state and return to original branch/commit.
  4044. Args:
  4045. repo: Path to repository or a Repo object
  4046. commit: Optional commit to reset to (defaults to original branch/commit)
  4047. """
  4048. with open_repo_closing(repo) as r:
  4049. state = BisectState(r)
  4050. # Get old tree before reset
  4051. try:
  4052. old_tree = r[r.head()].tree
  4053. except KeyError:
  4054. old_tree = None
  4055. commit_sha = parse_commit(r, commit).id if commit else None
  4056. state.reset(commit_sha)
  4057. # Update working tree to new HEAD
  4058. try:
  4059. new_head = r.head()
  4060. if new_head:
  4061. new_commit = r[new_head]
  4062. update_working_tree(r, old_tree, new_commit.tree)
  4063. except KeyError:
  4064. # No HEAD after reset
  4065. pass
  4066. def bisect_log(repo="."):
  4067. """Get the bisect log.
  4068. Args:
  4069. repo: Path to repository or a Repo object
  4070. Returns:
  4071. The bisect log as a string
  4072. """
  4073. with open_repo_closing(repo) as r:
  4074. state = BisectState(r)
  4075. return state.get_log()
  4076. def bisect_replay(repo, log_file):
  4077. """Replay a bisect log.
  4078. Args:
  4079. repo: Path to repository or a Repo object
  4080. log_file: Path to the log file or file-like object
  4081. """
  4082. with open_repo_closing(repo) as r:
  4083. state = BisectState(r)
  4084. if isinstance(log_file, str):
  4085. with open(log_file) as f:
  4086. log_content = f.read()
  4087. else:
  4088. log_content = log_file.read()
  4089. state.replay(log_content)
  4090. def reflog(repo=".", ref=b"HEAD", all=False):
  4091. """Show reflog entries for a reference or all references.
  4092. Args:
  4093. repo: Path to repository or a Repo object
  4094. ref: Reference name (defaults to HEAD)
  4095. all: If True, show reflogs for all refs (ignores ref parameter)
  4096. Yields:
  4097. If all=False: ReflogEntry objects
  4098. If all=True: Tuples of (ref_name, ReflogEntry) for all refs with reflogs
  4099. """
  4100. import os
  4101. from .reflog import iter_reflogs
  4102. if isinstance(ref, str):
  4103. ref = ref.encode("utf-8")
  4104. with open_repo_closing(repo) as r:
  4105. if not all:
  4106. yield from r.read_reflog(ref)
  4107. else:
  4108. logs_dir = os.path.join(r.controldir(), "logs")
  4109. # Use iter_reflogs to discover all reflogs
  4110. for ref_bytes in iter_reflogs(logs_dir):
  4111. # Read the reflog entries for this ref
  4112. for entry in r.read_reflog(ref_bytes):
  4113. yield (ref_bytes, entry)
  4114. def lfs_track(repo=".", patterns=None):
  4115. """Track file patterns with Git LFS.
  4116. Args:
  4117. repo: Path to repository
  4118. patterns: List of file patterns to track (e.g., ["*.bin", "*.pdf"])
  4119. If None, returns current tracked patterns
  4120. Returns:
  4121. List of tracked patterns
  4122. """
  4123. from .attrs import GitAttributes
  4124. with open_repo_closing(repo) as r:
  4125. gitattributes_path = os.path.join(r.path, ".gitattributes")
  4126. # Load existing GitAttributes
  4127. if os.path.exists(gitattributes_path):
  4128. gitattributes = GitAttributes.from_file(gitattributes_path)
  4129. else:
  4130. gitattributes = GitAttributes()
  4131. if patterns is None:
  4132. # Return current LFS tracked patterns
  4133. tracked = []
  4134. for pattern_obj, attrs in gitattributes:
  4135. if attrs.get(b"filter") == b"lfs":
  4136. tracked.append(pattern_obj.pattern.decode())
  4137. return tracked
  4138. # Add new patterns
  4139. for pattern in patterns:
  4140. # Ensure pattern is bytes
  4141. if isinstance(pattern, str):
  4142. pattern = pattern.encode()
  4143. # Set LFS attributes for the pattern
  4144. gitattributes.set_attribute(pattern, b"filter", b"lfs")
  4145. gitattributes.set_attribute(pattern, b"diff", b"lfs")
  4146. gitattributes.set_attribute(pattern, b"merge", b"lfs")
  4147. gitattributes.set_attribute(pattern, b"text", False)
  4148. # Write updated attributes
  4149. gitattributes.write_to_file(gitattributes_path)
  4150. # Stage the .gitattributes file
  4151. add(r, [".gitattributes"])
  4152. return lfs_track(r) # Return updated list
  4153. def lfs_untrack(repo=".", patterns=None):
  4154. """Untrack file patterns from Git LFS.
  4155. Args:
  4156. repo: Path to repository
  4157. patterns: List of file patterns to untrack
  4158. Returns:
  4159. List of remaining tracked patterns
  4160. """
  4161. from .attrs import GitAttributes
  4162. if not patterns:
  4163. return lfs_track(repo)
  4164. with open_repo_closing(repo) as r:
  4165. gitattributes_path = os.path.join(r.path, ".gitattributes")
  4166. if not os.path.exists(gitattributes_path):
  4167. return []
  4168. # Load existing GitAttributes
  4169. gitattributes = GitAttributes.from_file(gitattributes_path)
  4170. # Remove specified patterns
  4171. for pattern in patterns:
  4172. if isinstance(pattern, str):
  4173. pattern = pattern.encode()
  4174. # Check if pattern is tracked by LFS
  4175. for pattern_obj, attrs in list(gitattributes):
  4176. if pattern_obj.pattern == pattern and attrs.get(b"filter") == b"lfs":
  4177. gitattributes.remove_pattern(pattern)
  4178. break
  4179. # Write updated attributes
  4180. gitattributes.write_to_file(gitattributes_path)
  4181. # Stage the .gitattributes file
  4182. add(r, [".gitattributes"])
  4183. return lfs_track(r) # Return updated list
  4184. def lfs_init(repo="."):
  4185. """Initialize Git LFS in a repository.
  4186. Args:
  4187. repo: Path to repository
  4188. Returns:
  4189. None
  4190. """
  4191. from .lfs import LFSStore
  4192. with open_repo_closing(repo) as r:
  4193. # Create LFS store
  4194. LFSStore.from_repo(r, create=True)
  4195. # Set up Git config for LFS
  4196. config = r.get_config()
  4197. config.set((b"filter", b"lfs"), b"process", b"git-lfs filter-process")
  4198. config.set((b"filter", b"lfs"), b"required", b"true")
  4199. config.set((b"filter", b"lfs"), b"clean", b"git-lfs clean -- %f")
  4200. config.set((b"filter", b"lfs"), b"smudge", b"git-lfs smudge -- %f")
  4201. config.write_to_path()
  4202. def lfs_clean(repo=".", path=None):
  4203. """Clean a file by converting it to an LFS pointer.
  4204. Args:
  4205. repo: Path to repository
  4206. path: Path to file to clean (relative to repo root)
  4207. Returns:
  4208. LFS pointer content as bytes
  4209. """
  4210. from .lfs import LFSFilterDriver, LFSStore
  4211. with open_repo_closing(repo) as r:
  4212. if path is None:
  4213. raise ValueError("Path must be specified")
  4214. # Get LFS store
  4215. lfs_store = LFSStore.from_repo(r)
  4216. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  4217. # Read file content
  4218. full_path = os.path.join(r.path, path)
  4219. with open(full_path, "rb") as f:
  4220. content = f.read()
  4221. # Clean the content (convert to LFS pointer)
  4222. return filter_driver.clean(content)
  4223. def lfs_smudge(repo=".", pointer_content=None):
  4224. """Smudge an LFS pointer by retrieving the actual content.
  4225. Args:
  4226. repo: Path to repository
  4227. pointer_content: LFS pointer content as bytes
  4228. Returns:
  4229. Actual file content as bytes
  4230. """
  4231. from .lfs import LFSFilterDriver, LFSStore
  4232. with open_repo_closing(repo) as r:
  4233. if pointer_content is None:
  4234. raise ValueError("Pointer content must be specified")
  4235. # Get LFS store
  4236. lfs_store = LFSStore.from_repo(r)
  4237. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  4238. # Smudge the pointer (retrieve actual content)
  4239. return filter_driver.smudge(pointer_content)
  4240. def lfs_ls_files(repo=".", ref=None):
  4241. """List files tracked by Git LFS.
  4242. Args:
  4243. repo: Path to repository
  4244. ref: Git ref to check (defaults to HEAD)
  4245. Returns:
  4246. List of (path, oid, size) tuples for LFS files
  4247. """
  4248. from .lfs import LFSPointer
  4249. from .object_store import iter_tree_contents
  4250. with open_repo_closing(repo) as r:
  4251. if ref is None:
  4252. ref = b"HEAD"
  4253. elif isinstance(ref, str):
  4254. ref = ref.encode()
  4255. # Get the commit and tree
  4256. try:
  4257. commit = r[ref]
  4258. tree = r[commit.tree]
  4259. except KeyError:
  4260. return []
  4261. lfs_files = []
  4262. # Walk the tree
  4263. for path, mode, sha in iter_tree_contents(r.object_store, tree.id):
  4264. if not stat.S_ISREG(mode):
  4265. continue
  4266. # Check if it's an LFS pointer
  4267. obj = r.object_store[sha]
  4268. pointer = LFSPointer.from_bytes(obj.data)
  4269. if pointer is not None:
  4270. lfs_files.append((path.decode(), pointer.oid, pointer.size))
  4271. return lfs_files
  4272. def lfs_migrate(repo=".", include=None, exclude=None, everything=False):
  4273. """Migrate files to Git LFS.
  4274. Args:
  4275. repo: Path to repository
  4276. include: Patterns of files to include
  4277. exclude: Patterns of files to exclude
  4278. everything: Migrate all files above a certain size
  4279. Returns:
  4280. Number of migrated files
  4281. """
  4282. from .lfs import LFSFilterDriver, LFSStore
  4283. with open_repo_closing(repo) as r:
  4284. # Initialize LFS if needed
  4285. lfs_store = LFSStore.from_repo(r, create=True)
  4286. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  4287. # Get current index
  4288. index = r.open_index()
  4289. migrated = 0
  4290. # Determine files to migrate
  4291. files_to_migrate = []
  4292. if everything:
  4293. # Migrate all files above 100MB
  4294. for path, entry in index.items():
  4295. full_path = os.path.join(r.path, path.decode())
  4296. if os.path.exists(full_path):
  4297. size = os.path.getsize(full_path)
  4298. if size > 100 * 1024 * 1024: # 100MB
  4299. files_to_migrate.append(path.decode())
  4300. else:
  4301. # Use include/exclude patterns
  4302. for path, entry in index.items():
  4303. path_str = path.decode()
  4304. # Check include patterns
  4305. if include:
  4306. matched = any(
  4307. fnmatch.fnmatch(path_str, pattern) for pattern in include
  4308. )
  4309. if not matched:
  4310. continue
  4311. # Check exclude patterns
  4312. if exclude:
  4313. excluded = any(
  4314. fnmatch.fnmatch(path_str, pattern) for pattern in exclude
  4315. )
  4316. if excluded:
  4317. continue
  4318. files_to_migrate.append(path_str)
  4319. # Migrate files
  4320. for path in files_to_migrate:
  4321. full_path = os.path.join(r.path, path)
  4322. if not os.path.exists(full_path):
  4323. continue
  4324. # Read file content
  4325. with open(full_path, "rb") as f:
  4326. content = f.read()
  4327. # Convert to LFS pointer
  4328. pointer_content = filter_driver.clean(content)
  4329. # Write pointer back to file
  4330. with open(full_path, "wb") as f:
  4331. f.write(pointer_content)
  4332. # Create blob for pointer content and update index
  4333. blob = Blob()
  4334. blob.data = pointer_content
  4335. r.object_store.add_object(blob)
  4336. st = os.stat(full_path)
  4337. index_entry = index_entry_from_stat(st, blob.id, 0)
  4338. index[path.encode()] = index_entry
  4339. migrated += 1
  4340. # Write updated index
  4341. index.write()
  4342. # Track patterns if include was specified
  4343. if include:
  4344. lfs_track(r, include)
  4345. return migrated
  4346. def lfs_pointer_check(repo=".", paths=None):
  4347. """Check if files are valid LFS pointers.
  4348. Args:
  4349. repo: Path to repository
  4350. paths: List of file paths to check (if None, check all files)
  4351. Returns:
  4352. Dict mapping paths to LFSPointer objects (or None if not a pointer)
  4353. """
  4354. from .lfs import LFSPointer
  4355. with open_repo_closing(repo) as r:
  4356. results = {}
  4357. if paths is None:
  4358. # Check all files in index
  4359. index = r.open_index()
  4360. paths = [path.decode() for path in index]
  4361. for path in paths:
  4362. full_path = os.path.join(r.path, path)
  4363. if os.path.exists(full_path):
  4364. try:
  4365. with open(full_path, "rb") as f:
  4366. content = f.read()
  4367. pointer = LFSPointer.from_bytes(content)
  4368. results[path] = pointer
  4369. except OSError:
  4370. results[path] = None
  4371. else:
  4372. results[path] = None
  4373. return results
  4374. def lfs_fetch(repo=".", remote="origin", refs=None):
  4375. """Fetch LFS objects from remote.
  4376. Args:
  4377. repo: Path to repository
  4378. remote: Remote name (default: origin)
  4379. refs: Specific refs to fetch LFS objects for (default: all refs)
  4380. Returns:
  4381. Number of objects fetched
  4382. """
  4383. from .lfs import LFSClient, LFSPointer, LFSStore
  4384. with open_repo_closing(repo) as r:
  4385. # Get LFS server URL from config
  4386. config = r.get_config()
  4387. lfs_url = config.get((b"lfs",), b"url")
  4388. if not lfs_url:
  4389. # Try remote URL
  4390. remote_url = config.get((b"remote", remote.encode()), b"url")
  4391. if remote_url:
  4392. # Append /info/lfs to remote URL
  4393. remote_url = remote_url.decode()
  4394. if remote_url.endswith(".git"):
  4395. remote_url = remote_url[:-4]
  4396. lfs_url = f"{remote_url}/info/lfs"
  4397. else:
  4398. raise ValueError(f"No LFS URL configured for remote {remote}")
  4399. else:
  4400. lfs_url = lfs_url.decode()
  4401. # Get authentication
  4402. auth = None
  4403. # TODO: Support credential helpers and other auth methods
  4404. # Create LFS client and store
  4405. client = LFSClient(lfs_url, auth)
  4406. store = LFSStore.from_repo(r)
  4407. # Find all LFS pointers in the refs
  4408. pointers_to_fetch = []
  4409. if refs is None:
  4410. # Get all refs
  4411. refs = list(r.refs.keys())
  4412. for ref in refs:
  4413. if isinstance(ref, str):
  4414. ref = ref.encode()
  4415. try:
  4416. commit = r[r.refs[ref]]
  4417. except KeyError:
  4418. continue
  4419. # Walk the commit tree
  4420. for entry in r.object_store.iter_tree_contents(commit.tree):
  4421. try:
  4422. obj = r.object_store[entry.sha]
  4423. if obj.type_name == b"blob":
  4424. pointer = LFSPointer.from_bytes(obj.data)
  4425. if pointer and pointer.is_valid_oid():
  4426. # Check if we already have it
  4427. try:
  4428. store.open_object(pointer.oid)
  4429. except KeyError:
  4430. pointers_to_fetch.append((pointer.oid, pointer.size))
  4431. except KeyError:
  4432. pass
  4433. # Fetch missing objects
  4434. fetched = 0
  4435. for oid, size in pointers_to_fetch:
  4436. try:
  4437. content = client.download(oid, size)
  4438. store.write_object([content])
  4439. fetched += 1
  4440. except Exception as e:
  4441. # Log error but continue
  4442. print(f"Failed to fetch {oid}: {e}")
  4443. return fetched
  4444. def lfs_pull(repo=".", remote="origin"):
  4445. """Pull LFS objects for current checkout.
  4446. Args:
  4447. repo: Path to repository
  4448. remote: Remote name (default: origin)
  4449. Returns:
  4450. Number of objects fetched
  4451. """
  4452. from .lfs import LFSPointer, LFSStore
  4453. with open_repo_closing(repo) as r:
  4454. # First do a fetch for HEAD
  4455. fetched = lfs_fetch(repo, remote, [b"HEAD"])
  4456. # Then checkout LFS files in working directory
  4457. store = LFSStore.from_repo(r)
  4458. index = r.open_index()
  4459. for path, entry in index.items():
  4460. full_path = os.path.join(r.path, path.decode())
  4461. if os.path.exists(full_path):
  4462. with open(full_path, "rb") as f:
  4463. content = f.read()
  4464. pointer = LFSPointer.from_bytes(content)
  4465. if pointer and pointer.is_valid_oid():
  4466. try:
  4467. # Replace pointer with actual content
  4468. with store.open_object(pointer.oid) as lfs_file:
  4469. lfs_content = lfs_file.read()
  4470. with open(full_path, "wb") as f:
  4471. f.write(lfs_content)
  4472. except KeyError:
  4473. # Object not available
  4474. pass
  4475. return fetched
  4476. def lfs_push(repo=".", remote="origin", refs=None):
  4477. """Push LFS objects to remote.
  4478. Args:
  4479. repo: Path to repository
  4480. remote: Remote name (default: origin)
  4481. refs: Specific refs to push LFS objects for (default: current branch)
  4482. Returns:
  4483. Number of objects pushed
  4484. """
  4485. from .lfs import LFSClient, LFSPointer, LFSStore
  4486. with open_repo_closing(repo) as r:
  4487. # Get LFS server URL from config
  4488. config = r.get_config()
  4489. lfs_url = config.get((b"lfs",), b"url")
  4490. if not lfs_url:
  4491. # Try remote URL
  4492. remote_url = config.get((b"remote", remote.encode()), b"url")
  4493. if remote_url:
  4494. # Append /info/lfs to remote URL
  4495. remote_url = remote_url.decode()
  4496. if remote_url.endswith(".git"):
  4497. remote_url = remote_url[:-4]
  4498. lfs_url = f"{remote_url}/info/lfs"
  4499. else:
  4500. raise ValueError(f"No LFS URL configured for remote {remote}")
  4501. else:
  4502. lfs_url = lfs_url.decode()
  4503. # Get authentication
  4504. auth = None
  4505. # TODO: Support credential helpers and other auth methods
  4506. # Create LFS client and store
  4507. client = LFSClient(lfs_url, auth)
  4508. store = LFSStore.from_repo(r)
  4509. # Find all LFS objects to push
  4510. if refs is None:
  4511. # Push current branch
  4512. refs = [r.refs.read_ref(b"HEAD")]
  4513. objects_to_push = set()
  4514. for ref in refs:
  4515. if isinstance(ref, str):
  4516. ref = ref.encode()
  4517. try:
  4518. if ref.startswith(b"refs/"):
  4519. commit = r[r.refs[ref]]
  4520. else:
  4521. commit = r[ref]
  4522. except KeyError:
  4523. continue
  4524. # Walk the commit tree
  4525. for entry in r.object_store.iter_tree_contents(commit.tree):
  4526. try:
  4527. obj = r.object_store[entry.sha]
  4528. if obj.type_name == b"blob":
  4529. pointer = LFSPointer.from_bytes(obj.data)
  4530. if pointer and pointer.is_valid_oid():
  4531. objects_to_push.add((pointer.oid, pointer.size))
  4532. except KeyError:
  4533. pass
  4534. # Push objects
  4535. pushed = 0
  4536. for oid, size in objects_to_push:
  4537. try:
  4538. with store.open_object(oid) as f:
  4539. content = f.read()
  4540. client.upload(oid, size, content)
  4541. pushed += 1
  4542. except KeyError:
  4543. # Object not in local store
  4544. print(f"Warning: LFS object {oid} not found locally")
  4545. except Exception as e:
  4546. # Log error but continue
  4547. print(f"Failed to push {oid}: {e}")
  4548. return pushed
  4549. def lfs_status(repo="."):
  4550. """Show status of LFS files.
  4551. Args:
  4552. repo: Path to repository
  4553. Returns:
  4554. Dict with status information
  4555. """
  4556. from .lfs import LFSPointer, LFSStore
  4557. with open_repo_closing(repo) as r:
  4558. store = LFSStore.from_repo(r)
  4559. index = r.open_index()
  4560. status = {
  4561. "tracked": [],
  4562. "not_staged": [],
  4563. "not_committed": [],
  4564. "not_pushed": [],
  4565. "missing": [],
  4566. }
  4567. # Check working directory files
  4568. for path, entry in index.items():
  4569. path_str = path.decode()
  4570. full_path = os.path.join(r.path, path_str)
  4571. if os.path.exists(full_path):
  4572. with open(full_path, "rb") as f:
  4573. content = f.read()
  4574. pointer = LFSPointer.from_bytes(content)
  4575. if pointer and pointer.is_valid_oid():
  4576. status["tracked"].append(path_str)
  4577. # Check if object exists locally
  4578. try:
  4579. store.open_object(pointer.oid)
  4580. except KeyError:
  4581. status["missing"].append(path_str)
  4582. # Check if file has been modified
  4583. try:
  4584. staged_obj = r.object_store[entry.binsha]
  4585. staged_pointer = LFSPointer.from_bytes(staged_obj.data)
  4586. if staged_pointer and staged_pointer.oid != pointer.oid:
  4587. status["not_staged"].append(path_str)
  4588. except KeyError:
  4589. pass
  4590. # TODO: Check for not committed and not pushed files
  4591. return status