porcelain.py 201 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128
  1. # e porcelain.py -- Porcelain-like layer on top of Dulwich
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as published by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Simple wrapper that provides porcelain-like functions on top of Dulwich.
  22. Currently implemented:
  23. * archive
  24. * add
  25. * bisect{_start,_bad,_good,_skip,_reset,_log,_replay}
  26. * branch{_create,_delete,_list}
  27. * check_ignore
  28. * checkout
  29. * checkout_branch
  30. * clone
  31. * cone mode{_init, _set, _add}
  32. * commit
  33. * commit_tree
  34. * daemon
  35. * describe
  36. * diff_tree
  37. * fetch
  38. * filter_branch
  39. * for_each_ref
  40. * init
  41. * ls_files
  42. * ls_remote
  43. * ls_tree
  44. * merge
  45. * merge_tree
  46. * mv/move
  47. * prune
  48. * pull
  49. * push
  50. * rm
  51. * remote{_add}
  52. * receive_pack
  53. * reset
  54. * revert
  55. * sparse_checkout
  56. * submodule_add
  57. * submodule_init
  58. * submodule_list
  59. * rev_list
  60. * tag{_create,_delete,_list}
  61. * upload_pack
  62. * update_server_info
  63. * write_commit_graph
  64. * status
  65. * symbolic_ref
  66. * worktree{_add,_list,_remove,_prune,_lock,_unlock,_move}
  67. These functions are meant to behave similarly to the git subcommands.
  68. Differences in behaviour are considered bugs.
  69. Note: one of the consequences of this is that paths tend to be
  70. interpreted relative to the current working directory rather than relative
  71. to the repository root.
  72. Functions should generally accept both unicode strings and bytestrings
  73. """
  74. import datetime
  75. import fnmatch
  76. import logging
  77. import os
  78. import posixpath
  79. import stat
  80. import sys
  81. import time
  82. from collections import namedtuple
  83. from collections.abc import Iterator
  84. from contextlib import AbstractContextManager, closing, contextmanager
  85. from dataclasses import dataclass
  86. from io import BytesIO, RawIOBase
  87. from pathlib import Path
  88. from typing import BinaryIO, Optional, TypeVar, Union, cast, overload
  89. from . import replace_me
  90. from .archive import tar_stream
  91. from .bisect import BisectState
  92. from .client import get_transport_and_path
  93. from .config import Config, ConfigFile, StackedConfig, read_submodules
  94. from .diff_tree import (
  95. CHANGE_ADD,
  96. CHANGE_COPY,
  97. CHANGE_DELETE,
  98. CHANGE_MODIFY,
  99. CHANGE_RENAME,
  100. RENAME_CHANGE_TYPES,
  101. TreeChange,
  102. tree_changes,
  103. )
  104. from .errors import SendPackError
  105. from .graph import can_fast_forward
  106. from .ignore import IgnoreFilterManager
  107. from .index import (
  108. ConflictedIndexEntry,
  109. IndexEntry,
  110. _fs_to_tree_path,
  111. blob_from_path_and_stat,
  112. build_file_from_blob,
  113. build_index_from_tree,
  114. get_unstaged_changes,
  115. index_entry_from_stat,
  116. symlink,
  117. update_working_tree,
  118. validate_path_element_default,
  119. validate_path_element_hfs,
  120. validate_path_element_ntfs,
  121. )
  122. from .object_store import tree_lookup_path
  123. from .objects import (
  124. Blob,
  125. Commit,
  126. Tag,
  127. Tree,
  128. format_timezone,
  129. parse_timezone,
  130. pretty_format_tree_entry,
  131. )
  132. from .objectspec import (
  133. parse_commit,
  134. parse_object,
  135. parse_ref,
  136. parse_reftuples,
  137. parse_tree,
  138. )
  139. from .pack import write_pack_from_container, write_pack_index
  140. from .patch import (
  141. get_summary,
  142. write_commit_patch,
  143. write_object_diff,
  144. write_tree_diff,
  145. )
  146. from .protocol import ZERO_SHA, Protocol
  147. from .refs import (
  148. LOCAL_BRANCH_PREFIX,
  149. LOCAL_NOTES_PREFIX,
  150. LOCAL_TAG_PREFIX,
  151. Ref,
  152. SymrefLoop,
  153. _import_remote_refs,
  154. )
  155. from .repo import BaseRepo, Repo, get_user_identity
  156. from .server import (
  157. FileSystemBackend,
  158. ReceivePackHandler,
  159. TCPGitServer,
  160. UploadPackHandler,
  161. )
  162. from .server import update_server_info as server_update_server_info
  163. from .sparse_patterns import (
  164. SparseCheckoutConflictError,
  165. apply_included_paths,
  166. determine_included_paths,
  167. )
  168. # Module level tuple definition for status output
  169. GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
  170. # TypeVar for preserving BaseRepo subclass types
  171. T = TypeVar("T", bound="BaseRepo")
  172. # Type alias for common repository parameter pattern
  173. RepoPath = Union[str, os.PathLike, Repo]
  174. @dataclass
  175. class CountObjectsResult:
  176. """Result of counting objects in a repository.
  177. Attributes:
  178. count: Number of loose objects
  179. size: Total size of loose objects in bytes
  180. in_pack: Number of objects in pack files
  181. packs: Number of pack files
  182. size_pack: Total size of pack files in bytes
  183. """
  184. count: int
  185. size: int
  186. in_pack: Optional[int] = None
  187. packs: Optional[int] = None
  188. size_pack: Optional[int] = None
  189. class NoneStream(RawIOBase):
  190. """Fallback if stdout or stderr are unavailable, does nothing."""
  191. def read(self, size=-1) -> None:
  192. """Read bytes (no-op for NoneStream).
  193. Args:
  194. size: Number of bytes to read
  195. Returns:
  196. None
  197. """
  198. return None
  199. def readall(self) -> bytes:
  200. """Read all bytes (returns empty bytes).
  201. Returns:
  202. Empty bytes object
  203. """
  204. return b""
  205. def readinto(self, b) -> None:
  206. """Read bytes into buffer (no-op for NoneStream).
  207. Args:
  208. b: Buffer to read into
  209. Returns:
  210. None
  211. """
  212. return None
  213. def write(self, b) -> None:
  214. """Write bytes (no-op for NoneStream).
  215. Args:
  216. b: Bytes to write
  217. Returns:
  218. None
  219. """
  220. return None
  221. default_bytes_out_stream = getattr(sys.stdout, "buffer", None) or NoneStream()
  222. default_bytes_err_stream = getattr(sys.stderr, "buffer", None) or NoneStream()
  223. DEFAULT_ENCODING = "utf-8"
  224. class Error(Exception):
  225. """Porcelain-based error."""
  226. def __init__(self, msg) -> None:
  227. """Initialize an Error.
  228. Args:
  229. msg: Error message
  230. """
  231. super().__init__(msg)
  232. class RemoteExists(Error):
  233. """Raised when the remote already exists."""
  234. class TimezoneFormatError(Error):
  235. """Raised when the timezone cannot be determined from a given string."""
  236. class CheckoutError(Error):
  237. """Indicates that a checkout cannot be performed."""
  238. def parse_timezone_format(tz_str):
  239. """Parse given string and attempt to return a timezone offset.
  240. Different formats are considered in the following order:
  241. - Git internal format: <unix timestamp> <timezone offset>
  242. - RFC 2822: e.g. Mon, 20 Nov 1995 19:12:08 -0500
  243. - ISO 8601: e.g. 1995-11-20T19:12:08-0500
  244. Args:
  245. tz_str: datetime string
  246. Returns: Timezone offset as integer
  247. Raises:
  248. TimezoneFormatError: if timezone information cannot be extracted
  249. """
  250. import re
  251. # Git internal format
  252. internal_format_pattern = re.compile("^[0-9]+ [+-][0-9]{,4}$")
  253. if re.match(internal_format_pattern, tz_str):
  254. try:
  255. tz_internal = parse_timezone(tz_str.split(" ")[1].encode(DEFAULT_ENCODING))
  256. return tz_internal[0]
  257. except ValueError:
  258. pass
  259. # RFC 2822
  260. import email.utils
  261. rfc_2822 = email.utils.parsedate_tz(tz_str)
  262. if rfc_2822:
  263. return rfc_2822[9]
  264. # ISO 8601
  265. # Supported offsets:
  266. # sHHMM, sHH:MM, sHH
  267. iso_8601_pattern = re.compile(
  268. "[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$"
  269. )
  270. match = re.search(iso_8601_pattern, tz_str)
  271. total_secs = 0
  272. if match:
  273. sign, hours, minutes = match.groups()
  274. total_secs += int(hours) * 3600
  275. if minutes:
  276. total_secs += int(minutes) * 60
  277. total_secs = -total_secs if sign == "-" else total_secs
  278. return total_secs
  279. # YYYY.MM.DD, MM/DD/YYYY, DD.MM.YYYY contain no timezone information
  280. raise TimezoneFormatError(tz_str)
  281. def get_user_timezones():
  282. """Retrieve local timezone as described in
  283. https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
  284. Returns: A tuple containing author timezone, committer timezone.
  285. """
  286. local_timezone = time.localtime().tm_gmtoff
  287. if os.environ.get("GIT_AUTHOR_DATE"):
  288. author_timezone = parse_timezone_format(os.environ["GIT_AUTHOR_DATE"])
  289. else:
  290. author_timezone = local_timezone
  291. if os.environ.get("GIT_COMMITTER_DATE"):
  292. commit_timezone = parse_timezone_format(os.environ["GIT_COMMITTER_DATE"])
  293. else:
  294. commit_timezone = local_timezone
  295. return author_timezone, commit_timezone
  296. @overload
  297. def open_repo(path_or_repo: T) -> AbstractContextManager[T]: ...
  298. @overload
  299. def open_repo(
  300. path_or_repo: Union[str, os.PathLike],
  301. ) -> AbstractContextManager[Repo]: ...
  302. def open_repo(
  303. path_or_repo: Union[str, os.PathLike, T],
  304. ) -> AbstractContextManager[Union[T, Repo]]:
  305. """Open an argument that can be a repository or a path for a repository."""
  306. if isinstance(path_or_repo, BaseRepo):
  307. return _noop_context_manager(path_or_repo)
  308. return Repo(path_or_repo)
  309. @contextmanager
  310. def _noop_context_manager(obj):
  311. """Context manager that has the same api as closing but does nothing."""
  312. yield obj
  313. @overload
  314. def open_repo_closing(path_or_repo: T) -> AbstractContextManager[T]: ...
  315. @overload
  316. def open_repo_closing(
  317. path_or_repo: Union[str, os.PathLike],
  318. ) -> AbstractContextManager[Repo]: ...
  319. def open_repo_closing(
  320. path_or_repo: Union[str, os.PathLike, T],
  321. ) -> AbstractContextManager[Union[T, Repo]]:
  322. """Open an argument that can be a repository or a path for a repository.
  323. returns a context manager that will close the repo on exit if the argument
  324. is a path, else does nothing if the argument is a repo.
  325. """
  326. if isinstance(path_or_repo, BaseRepo):
  327. return _noop_context_manager(path_or_repo)
  328. return closing(Repo(path_or_repo))
  329. def path_to_tree_path(
  330. repopath: Union[str, os.PathLike], path, tree_encoding=DEFAULT_ENCODING
  331. ):
  332. """Convert a path to a path usable in an index, e.g. bytes and relative to
  333. the repository root.
  334. Args:
  335. repopath: Repository path, absolute or relative to the cwd
  336. path: A path, absolute or relative to the cwd
  337. Returns: A path formatted for use in e.g. an index
  338. """
  339. # Resolve might returns a relative path on Windows
  340. # https://bugs.python.org/issue38671
  341. if sys.platform == "win32":
  342. path = os.path.abspath(path)
  343. path = Path(path)
  344. resolved_path = path.resolve()
  345. # Resolve and abspath seems to behave differently regarding symlinks,
  346. # as we are doing abspath on the file path, we need to do the same on
  347. # the repo path or they might not match
  348. if sys.platform == "win32":
  349. repopath = os.path.abspath(repopath)
  350. repopath = Path(repopath).resolve()
  351. try:
  352. relpath = resolved_path.relative_to(repopath)
  353. except ValueError:
  354. # If path is a symlink that points to a file outside the repo, we
  355. # want the relpath for the link itself, not the resolved target
  356. if path.is_symlink():
  357. parent = path.parent.resolve()
  358. relpath = (parent / path.name).relative_to(repopath)
  359. else:
  360. raise
  361. if sys.platform == "win32":
  362. return str(relpath).replace(os.path.sep, "/").encode(tree_encoding)
  363. else:
  364. return bytes(relpath)
  365. class DivergedBranches(Error):
  366. """Branches have diverged and fast-forward is not possible."""
  367. def __init__(self, current_sha, new_sha) -> None:
  368. """Initialize a DivergedBranches error.
  369. Args:
  370. current_sha: SHA of the current branch head
  371. new_sha: SHA of the new branch head
  372. """
  373. self.current_sha = current_sha
  374. self.new_sha = new_sha
  375. def check_diverged(repo, current_sha, new_sha) -> None:
  376. """Check if updating to a sha can be done with fast forwarding.
  377. Args:
  378. repo: Repository object
  379. current_sha: Current head sha
  380. new_sha: New head sha
  381. """
  382. try:
  383. can = can_fast_forward(repo, current_sha, new_sha)
  384. except KeyError:
  385. can = False
  386. if not can:
  387. raise DivergedBranches(current_sha, new_sha)
  388. def archive(
  389. repo,
  390. committish: Optional[Union[str, bytes, Commit, Tag]] = None,
  391. outstream=default_bytes_out_stream,
  392. errstream=default_bytes_err_stream,
  393. ) -> None:
  394. """Create an archive.
  395. Args:
  396. repo: Path of repository for which to generate an archive.
  397. committish: Commit SHA1 or ref to use
  398. outstream: Output stream (defaults to stdout)
  399. errstream: Error stream (defaults to stderr)
  400. """
  401. if committish is None:
  402. committish = "HEAD"
  403. with open_repo_closing(repo) as repo_obj:
  404. c = parse_commit(repo_obj, committish)
  405. for chunk in tar_stream(
  406. repo_obj.object_store, repo_obj.object_store[c.tree], c.commit_time
  407. ):
  408. outstream.write(chunk)
  409. def update_server_info(repo: RepoPath = ".") -> None:
  410. """Update server info files for a repository.
  411. Args:
  412. repo: path to the repository
  413. """
  414. with open_repo_closing(repo) as r:
  415. server_update_server_info(r)
  416. def write_commit_graph(repo: RepoPath = ".", reachable=True) -> None:
  417. """Write a commit graph file for a repository.
  418. Args:
  419. repo: path to the repository or a Repo object
  420. reachable: if True, include all commits reachable from refs.
  421. if False, only include direct ref targets.
  422. """
  423. with open_repo_closing(repo) as r:
  424. # Get all refs
  425. refs = list(r.refs.as_dict().values())
  426. if refs:
  427. r.object_store.write_commit_graph(refs, reachable=reachable)
  428. def symbolic_ref(repo: RepoPath, ref_name, force=False) -> None:
  429. """Set git symbolic ref into HEAD.
  430. Args:
  431. repo: path to the repository
  432. ref_name: short name of the new ref
  433. force: force settings without checking if it exists in refs/heads
  434. """
  435. with open_repo_closing(repo) as repo_obj:
  436. ref_path = _make_branch_ref(ref_name)
  437. if not force and ref_path not in repo_obj.refs.keys():
  438. raise Error(f"fatal: ref `{ref_name}` is not a ref")
  439. repo_obj.refs.set_symbolic_ref(b"HEAD", ref_path)
  440. def pack_refs(repo: RepoPath, all=False) -> None:
  441. """Pack loose refs into a single file.
  442. Args:
  443. repo: Path to the repository
  444. all: If True, pack all refs; if False, only pack already-packed refs
  445. """
  446. with open_repo_closing(repo) as repo_obj:
  447. repo_obj.refs.pack_refs(all=all)
  448. def commit(
  449. repo=".",
  450. message=None,
  451. author=None,
  452. author_timezone=None,
  453. committer=None,
  454. commit_timezone=None,
  455. encoding=None,
  456. no_verify=False,
  457. signoff=False,
  458. all=False,
  459. amend=False,
  460. ):
  461. """Create a new commit.
  462. Args:
  463. repo: Path to repository
  464. message: Optional commit message (string/bytes or callable that takes
  465. (repo, commit) and returns bytes)
  466. author: Optional author name and email
  467. author_timezone: Author timestamp timezone
  468. committer: Optional committer name and email
  469. commit_timezone: Commit timestamp timezone
  470. no_verify: Skip pre-commit and commit-msg hooks
  471. signoff: GPG Sign the commit (bool, defaults to False,
  472. pass True to use default GPG key,
  473. pass a str containing Key ID to use a specific GPG key)
  474. all: Automatically stage all tracked files that have been modified
  475. amend: Replace the tip of the current branch by creating a new commit
  476. Returns: SHA1 of the new commit
  477. """
  478. if getattr(message, "encode", None):
  479. message = message.encode(encoding or DEFAULT_ENCODING)
  480. if getattr(author, "encode", None):
  481. author = author.encode(encoding or DEFAULT_ENCODING)
  482. if getattr(committer, "encode", None):
  483. committer = committer.encode(encoding or DEFAULT_ENCODING)
  484. local_timezone = get_user_timezones()
  485. if author_timezone is None:
  486. author_timezone = local_timezone[0]
  487. if commit_timezone is None:
  488. commit_timezone = local_timezone[1]
  489. with open_repo_closing(repo) as r:
  490. # Handle amend logic
  491. merge_heads = None
  492. if amend:
  493. try:
  494. head_commit = r[r.head()]
  495. except KeyError:
  496. raise ValueError("Cannot amend: no existing commit found")
  497. # If message not provided, use the message from the current HEAD
  498. if message is None:
  499. message = head_commit.message
  500. # If author not provided, use the author from the current HEAD
  501. if author is None:
  502. author = head_commit.author
  503. if author_timezone is None:
  504. author_timezone = head_commit.author_timezone
  505. # Use the parent(s) of the current HEAD as our parent(s)
  506. merge_heads = list(head_commit.parents)
  507. # If -a flag is used, stage all modified tracked files
  508. if all:
  509. index = r.open_index()
  510. normalizer = r.get_blob_normalizer()
  511. filter_callback = normalizer.checkin_normalize
  512. unstaged_changes = list(
  513. get_unstaged_changes(index, r.path, filter_callback)
  514. )
  515. if unstaged_changes:
  516. # Convert bytes paths to strings for add function
  517. modified_files = []
  518. for path in unstaged_changes:
  519. if isinstance(path, bytes):
  520. path = path.decode()
  521. modified_files.append(path)
  522. add(r, paths=modified_files)
  523. commit_kwargs = {
  524. "message": message,
  525. "author": author,
  526. "author_timezone": author_timezone,
  527. "committer": committer,
  528. "commit_timezone": commit_timezone,
  529. "encoding": encoding,
  530. "no_verify": no_verify,
  531. "sign": signoff if isinstance(signoff, (str, bool)) else None,
  532. "merge_heads": merge_heads,
  533. }
  534. # For amend, create dangling commit to avoid adding current HEAD as parent
  535. if amend:
  536. commit_kwargs["ref"] = None
  537. commit_sha = r.get_worktree().commit(**commit_kwargs)
  538. # Update HEAD to point to the new commit
  539. r.refs[b"HEAD"] = commit_sha
  540. return commit_sha
  541. else:
  542. return r.get_worktree().commit(**commit_kwargs)
  543. def commit_tree(
  544. repo: RepoPath,
  545. tree,
  546. message=None,
  547. author=None,
  548. committer=None,
  549. ):
  550. """Create a new commit object.
  551. Args:
  552. repo: Path to repository
  553. tree: An existing tree object
  554. author: Optional author name and email
  555. committer: Optional committer name and email
  556. """
  557. with open_repo_closing(repo) as r:
  558. return r.get_worktree().commit(
  559. message=message, tree=tree, committer=committer, author=author
  560. )
  561. def init(
  562. path: Union[str, os.PathLike] = ".", *, bare=False, symlinks: Optional[bool] = None
  563. ):
  564. """Create a new git repository.
  565. Args:
  566. path: Path to repository.
  567. bare: Whether to create a bare repository.
  568. symlinks: Whether to create actual symlinks (defaults to autodetect)
  569. Returns: A Repo instance
  570. """
  571. if not os.path.exists(path):
  572. os.mkdir(path)
  573. if bare:
  574. return Repo.init_bare(path)
  575. else:
  576. return Repo.init(path, symlinks=symlinks)
  577. def clone(
  578. source,
  579. target: Optional[Union[str, os.PathLike]] = None,
  580. bare=False,
  581. checkout=None,
  582. errstream=default_bytes_err_stream,
  583. outstream=None,
  584. origin: Optional[str] = "origin",
  585. depth: Optional[int] = None,
  586. branch: Optional[Union[str, bytes]] = None,
  587. config: Optional[Config] = None,
  588. filter_spec=None,
  589. protocol_version: Optional[int] = None,
  590. recurse_submodules: bool = False,
  591. **kwargs,
  592. ):
  593. """Clone a local or remote git repository.
  594. Args:
  595. source: Path or URL for source repository
  596. target: Path to target repository (optional)
  597. bare: Whether or not to create a bare repository
  598. checkout: Whether or not to check-out HEAD after cloning
  599. errstream: Optional stream to write progress to
  600. outstream: Optional stream to write progress to (deprecated)
  601. origin: Name of remote from the repository used to clone
  602. depth: Depth to fetch at
  603. branch: Optional branch or tag to be used as HEAD in the new repository
  604. instead of the cloned repository's HEAD.
  605. config: Configuration to use
  606. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  607. Only used if the server supports the Git protocol-v2 'filter'
  608. feature, and ignored otherwise.
  609. protocol_version: desired Git protocol version. By default the highest
  610. mutually supported protocol version will be used.
  611. recurse_submodules: Whether to initialize and clone submodules
  612. Keyword Args:
  613. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  614. bytestring/string.
  615. Returns: The new repository
  616. """
  617. if outstream is not None:
  618. import warnings
  619. warnings.warn(
  620. "outstream= has been deprecated in favour of errstream=.",
  621. DeprecationWarning,
  622. stacklevel=3,
  623. )
  624. # TODO(jelmer): Capture logging output and stream to errstream
  625. if config is None:
  626. config = StackedConfig.default()
  627. if checkout is None:
  628. checkout = not bare
  629. if checkout and bare:
  630. raise Error("checkout and bare are incompatible")
  631. if target is None:
  632. target = source.split("/")[-1]
  633. if isinstance(branch, str):
  634. branch = branch.encode(DEFAULT_ENCODING)
  635. mkdir = not os.path.exists(target)
  636. (client, path) = get_transport_and_path(source, config=config, **kwargs)
  637. if filter_spec:
  638. filter_spec = filter_spec.encode("ascii")
  639. repo = client.clone(
  640. path,
  641. target,
  642. mkdir=mkdir,
  643. bare=bare,
  644. origin=origin,
  645. checkout=checkout,
  646. branch=branch,
  647. progress=errstream.write,
  648. depth=depth,
  649. filter_spec=filter_spec,
  650. protocol_version=protocol_version,
  651. )
  652. # Initialize and update submodules if requested
  653. if recurse_submodules and not bare:
  654. try:
  655. submodule_init(repo)
  656. submodule_update(repo, init=True)
  657. except FileNotFoundError as e:
  658. # .gitmodules file doesn't exist - no submodules to process
  659. logging.debug("No .gitmodules file found: %s", e)
  660. except KeyError as e:
  661. # Submodule configuration missing
  662. logging.warning("Submodule configuration error: %s", e)
  663. if errstream:
  664. errstream.write(
  665. f"Warning: Submodule configuration error: {e}\n".encode()
  666. )
  667. return repo
  668. def add(repo: Union[str, os.PathLike, Repo] = ".", paths=None):
  669. """Add files to the staging area.
  670. Args:
  671. repo: Repository for the files
  672. paths: Paths to add. If None, stages all untracked and modified files from the
  673. current working directory (mimicking 'git add .' behavior).
  674. Returns: Tuple with set of added files and ignored files
  675. If the repository contains ignored directories, the returned set will
  676. contain the path to an ignored directory (with trailing slash). Individual
  677. files within ignored directories will not be returned.
  678. Note: When paths=None, this function adds all untracked and modified files
  679. from the entire repository, mimicking 'git add -A' behavior.
  680. """
  681. ignored = set()
  682. with open_repo_closing(repo) as r:
  683. repo_path = Path(r.path).resolve()
  684. ignore_manager = IgnoreFilterManager.from_repo(r)
  685. # Get unstaged changes once for the entire operation
  686. index = r.open_index()
  687. normalizer = r.get_blob_normalizer()
  688. filter_callback = normalizer.checkin_normalize
  689. all_unstaged_paths = list(get_unstaged_changes(index, r.path, filter_callback))
  690. if not paths:
  691. # When no paths specified, add all untracked and modified files from repo root
  692. paths = [str(repo_path)]
  693. relpaths = []
  694. if not isinstance(paths, list):
  695. paths = [paths]
  696. for p in paths:
  697. path = Path(p)
  698. if not path.is_absolute():
  699. # Make relative paths relative to the repo directory
  700. path = repo_path / path
  701. # Don't resolve symlinks completely - only resolve the parent directory
  702. # to avoid issues when symlinks point outside the repository
  703. if path.is_symlink():
  704. # For symlinks, resolve only the parent directory
  705. parent_resolved = path.parent.resolve()
  706. resolved_path = parent_resolved / path.name
  707. else:
  708. # For regular files/dirs, resolve normally
  709. resolved_path = path.resolve()
  710. try:
  711. relpath = str(resolved_path.relative_to(repo_path)).replace(os.sep, "/")
  712. except ValueError as e:
  713. # Path is not within the repository
  714. raise ValueError(
  715. f"Path {p} is not within repository {repo_path}"
  716. ) from e
  717. # Handle directories by scanning their contents
  718. if resolved_path.is_dir():
  719. # Check if the directory itself is ignored
  720. dir_relpath = posixpath.join(relpath, "") if relpath != "." else ""
  721. if dir_relpath and ignore_manager.is_ignored(dir_relpath):
  722. ignored.add(dir_relpath)
  723. continue
  724. # When adding a directory, add all untracked files within it
  725. current_untracked = list(
  726. get_untracked_paths(
  727. str(resolved_path),
  728. str(repo_path),
  729. index,
  730. )
  731. )
  732. for untracked_path in current_untracked:
  733. # If we're scanning a subdirectory, adjust the path
  734. if relpath != ".":
  735. untracked_path = posixpath.join(relpath, untracked_path)
  736. if not ignore_manager.is_ignored(untracked_path):
  737. relpaths.append(untracked_path)
  738. else:
  739. ignored.add(untracked_path)
  740. # Also add unstaged (modified) files within this directory
  741. for unstaged_path in all_unstaged_paths:
  742. if isinstance(unstaged_path, bytes):
  743. unstaged_path_str = unstaged_path.decode("utf-8")
  744. else:
  745. unstaged_path_str = unstaged_path
  746. # Check if this unstaged file is within the directory we're processing
  747. unstaged_full_path = repo_path / unstaged_path_str
  748. try:
  749. unstaged_full_path.relative_to(resolved_path)
  750. # File is within this directory, add it
  751. if not ignore_manager.is_ignored(unstaged_path_str):
  752. relpaths.append(unstaged_path_str)
  753. else:
  754. ignored.add(unstaged_path_str)
  755. except ValueError:
  756. # File is not within this directory, skip it
  757. continue
  758. continue
  759. # FIXME: Support patterns
  760. if ignore_manager.is_ignored(relpath):
  761. ignored.add(relpath)
  762. continue
  763. relpaths.append(relpath)
  764. r.get_worktree().stage(relpaths)
  765. return (relpaths, ignored)
  766. def _is_subdir(subdir, parentdir):
  767. """Check whether subdir is parentdir or a subdir of parentdir.
  768. If parentdir or subdir is a relative path, it will be disamgibuated
  769. relative to the pwd.
  770. """
  771. parentdir_abs = os.path.realpath(parentdir) + os.path.sep
  772. subdir_abs = os.path.realpath(subdir) + os.path.sep
  773. return subdir_abs.startswith(parentdir_abs)
  774. # TODO: option to remove ignored files also, in line with `git clean -fdx`
  775. def clean(repo: Union[str, os.PathLike, Repo] = ".", target_dir=None) -> None:
  776. """Remove any untracked files from the target directory recursively.
  777. Equivalent to running ``git clean -fd`` in target_dir.
  778. Args:
  779. repo: Repository where the files may be tracked
  780. target_dir: Directory to clean - current directory if None
  781. """
  782. if target_dir is None:
  783. target_dir = os.getcwd()
  784. with open_repo_closing(repo) as r:
  785. if not _is_subdir(target_dir, r.path):
  786. raise Error("target_dir must be in the repo's working dir")
  787. config = r.get_config_stack()
  788. config.get_boolean((b"clean",), b"requireForce", True)
  789. # TODO(jelmer): if require_force is set, then make sure that -f, -i or
  790. # -n is specified.
  791. index = r.open_index()
  792. ignore_manager = IgnoreFilterManager.from_repo(r)
  793. paths_in_wd = _walk_working_dir_paths(target_dir, r.path)
  794. # Reverse file visit order, so that files and subdirectories are
  795. # removed before containing directory
  796. for ap, is_dir in reversed(list(paths_in_wd)):
  797. if is_dir:
  798. # All subdirectories and files have been removed if untracked,
  799. # so dir contains no tracked files iff it is empty.
  800. is_empty = len(os.listdir(ap)) == 0
  801. if is_empty:
  802. os.rmdir(ap)
  803. else:
  804. ip = path_to_tree_path(r.path, ap)
  805. is_tracked = ip in index
  806. rp = os.path.relpath(ap, r.path)
  807. is_ignored = ignore_manager.is_ignored(rp)
  808. if not is_tracked and not is_ignored:
  809. os.remove(ap)
  810. def remove(repo: Union[str, os.PathLike, Repo] = ".", paths=None, cached=False) -> None:
  811. """Remove files from the staging area.
  812. Args:
  813. repo: Repository for the files
  814. paths: Paths to remove. Can be absolute or relative to the repository root.
  815. """
  816. with open_repo_closing(repo) as r:
  817. index = r.open_index()
  818. blob_normalizer = r.get_blob_normalizer()
  819. for p in paths:
  820. # If path is absolute, use it as-is. Otherwise, treat it as relative to repo
  821. if os.path.isabs(p):
  822. full_path = p
  823. else:
  824. # Treat relative paths as relative to the repository root
  825. full_path = os.path.join(r.path, p)
  826. tree_path = path_to_tree_path(r.path, full_path)
  827. # Convert to bytes for file operations
  828. full_path_bytes = os.fsencode(full_path)
  829. try:
  830. entry = index[tree_path]
  831. if isinstance(entry, ConflictedIndexEntry):
  832. raise Error(f"{p} has conflicts in the index")
  833. index_sha = entry.sha
  834. except KeyError as exc:
  835. raise Error(f"{p} did not match any files") from exc
  836. if not cached:
  837. try:
  838. st = os.lstat(full_path_bytes)
  839. except OSError:
  840. pass
  841. else:
  842. try:
  843. blob = blob_from_path_and_stat(full_path_bytes, st)
  844. # Apply checkin normalization to compare apples to apples
  845. if blob_normalizer is not None:
  846. blob = blob_normalizer.checkin_normalize(blob, tree_path)
  847. except OSError:
  848. pass
  849. else:
  850. try:
  851. committed_sha = tree_lookup_path(
  852. r.__getitem__, r[r.head()].tree, tree_path
  853. )[1]
  854. except KeyError:
  855. committed_sha = None
  856. if blob.id != index_sha and index_sha != committed_sha:
  857. raise Error(
  858. "file has staged content differing "
  859. f"from both the file and head: {p}"
  860. )
  861. if index_sha != committed_sha:
  862. raise Error(f"file has staged changes: {p}")
  863. os.remove(full_path_bytes)
  864. del index[tree_path]
  865. index.write()
  866. rm = remove
  867. def mv(
  868. repo: Union[str, os.PathLike, Repo],
  869. source: Union[str, bytes, os.PathLike],
  870. destination: Union[str, bytes, os.PathLike],
  871. force: bool = False,
  872. ) -> None:
  873. """Move or rename a file, directory, or symlink.
  874. Args:
  875. repo: Path to the repository
  876. source: Path to move from
  877. destination: Path to move to
  878. force: Force move even if destination exists
  879. Raises:
  880. Error: If source doesn't exist, is not tracked, or destination already exists (without force)
  881. """
  882. with open_repo_closing(repo) as r:
  883. index = r.open_index()
  884. # Handle paths - convert to string if necessary
  885. if isinstance(source, bytes):
  886. source = source.decode(sys.getfilesystemencoding())
  887. elif hasattr(source, "__fspath__"):
  888. source = os.fspath(source)
  889. else:
  890. source = str(source)
  891. if isinstance(destination, bytes):
  892. destination = destination.decode(sys.getfilesystemencoding())
  893. elif hasattr(destination, "__fspath__"):
  894. destination = os.fspath(destination)
  895. else:
  896. destination = str(destination)
  897. # Get full paths
  898. if os.path.isabs(source):
  899. source_full_path = source
  900. else:
  901. # Treat relative paths as relative to the repository root
  902. source_full_path = os.path.join(r.path, source)
  903. if os.path.isabs(destination):
  904. destination_full_path = destination
  905. else:
  906. # Treat relative paths as relative to the repository root
  907. destination_full_path = os.path.join(r.path, destination)
  908. # Check if destination is a directory
  909. if os.path.isdir(destination_full_path):
  910. # Move source into destination directory
  911. basename = os.path.basename(source_full_path)
  912. destination_full_path = os.path.join(destination_full_path, basename)
  913. # Convert to tree paths for index
  914. source_tree_path = path_to_tree_path(r.path, source_full_path)
  915. destination_tree_path = path_to_tree_path(r.path, destination_full_path)
  916. # Check if source exists in index
  917. if source_tree_path not in index:
  918. raise Error(f"source '{source}' is not under version control")
  919. # Check if source exists in filesystem
  920. if not os.path.exists(source_full_path):
  921. raise Error(f"source '{source}' does not exist")
  922. # Check if destination already exists
  923. if os.path.exists(destination_full_path) and not force:
  924. raise Error(f"destination '{destination}' already exists (use -f to force)")
  925. # Check if destination is already in index
  926. if destination_tree_path in index and not force:
  927. raise Error(
  928. f"destination '{destination}' already exists in index (use -f to force)"
  929. )
  930. # Get the index entry for the source
  931. source_entry = index[source_tree_path]
  932. # Convert to bytes for file operations
  933. source_full_path_bytes = os.fsencode(source_full_path)
  934. destination_full_path_bytes = os.fsencode(destination_full_path)
  935. # Create parent directory for destination if needed
  936. dest_dir = os.path.dirname(destination_full_path_bytes)
  937. if dest_dir and not os.path.exists(dest_dir):
  938. os.makedirs(dest_dir)
  939. # Move the file in the filesystem
  940. if os.path.exists(destination_full_path_bytes) and force:
  941. os.remove(destination_full_path_bytes)
  942. os.rename(source_full_path_bytes, destination_full_path_bytes)
  943. # Update the index
  944. del index[source_tree_path]
  945. index[destination_tree_path] = source_entry
  946. index.write()
  947. move = mv
  948. def commit_decode(commit, contents, default_encoding=DEFAULT_ENCODING):
  949. """Decode commit message contents to unicode.
  950. Args:
  951. commit: Commit object
  952. contents: Raw commit message bytes
  953. default_encoding: Encoding to use if none specified in commit
  954. Returns:
  955. Decoded commit message as unicode string
  956. """
  957. if commit.encoding:
  958. encoding = commit.encoding.decode("ascii")
  959. else:
  960. encoding = default_encoding
  961. return contents.decode(encoding, "replace")
  962. def commit_encode(commit, contents, default_encoding=DEFAULT_ENCODING):
  963. """Encode commit message contents to bytes.
  964. Args:
  965. commit: Commit object
  966. contents: Commit message as unicode string
  967. default_encoding: Encoding to use if none specified in commit
  968. Returns:
  969. Encoded commit message as bytes
  970. """
  971. if commit.encoding:
  972. encoding = commit.encoding.decode("ascii")
  973. else:
  974. encoding = default_encoding
  975. return contents.encode(encoding)
  976. def print_commit(commit, decode, outstream=sys.stdout) -> None:
  977. """Write a human-readable commit log entry.
  978. Args:
  979. commit: A `Commit` object
  980. outstream: A stream file to write to
  981. """
  982. outstream.write("-" * 50 + "\n")
  983. outstream.write("commit: " + commit.id.decode("ascii") + "\n")
  984. if len(commit.parents) > 1:
  985. outstream.write(
  986. "merge: "
  987. + "...".join([c.decode("ascii") for c in commit.parents[1:]])
  988. + "\n"
  989. )
  990. outstream.write("Author: " + decode(commit.author) + "\n")
  991. if commit.author != commit.committer:
  992. outstream.write("Committer: " + decode(commit.committer) + "\n")
  993. time_tuple = time.gmtime(commit.author_time + commit.author_timezone)
  994. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  995. timezone_str = format_timezone(commit.author_timezone).decode("ascii")
  996. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  997. if commit.message:
  998. outstream.write("\n")
  999. outstream.write(decode(commit.message) + "\n")
  1000. outstream.write("\n")
  1001. def print_tag(tag, decode, outstream=sys.stdout) -> None:
  1002. """Write a human-readable tag.
  1003. Args:
  1004. tag: A `Tag` object
  1005. decode: Function for decoding bytes to unicode string
  1006. outstream: A stream to write to
  1007. """
  1008. outstream.write("Tagger: " + decode(tag.tagger) + "\n")
  1009. time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone)
  1010. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  1011. timezone_str = format_timezone(tag.tag_timezone).decode("ascii")
  1012. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  1013. outstream.write("\n")
  1014. outstream.write(decode(tag.message))
  1015. outstream.write("\n")
  1016. def show_blob(repo: RepoPath, blob, decode, outstream=sys.stdout) -> None:
  1017. """Write a blob to a stream.
  1018. Args:
  1019. repo: A `Repo` object
  1020. blob: A `Blob` object
  1021. decode: Function for decoding bytes to unicode string
  1022. outstream: A stream file to write to
  1023. """
  1024. outstream.write(decode(blob.data))
  1025. def show_commit(repo: RepoPath, commit, decode, outstream=sys.stdout) -> None:
  1026. """Show a commit to a stream.
  1027. Args:
  1028. repo: A `Repo` object
  1029. commit: A `Commit` object
  1030. decode: Function for decoding bytes to unicode string
  1031. outstream: Stream to write to
  1032. """
  1033. from .diff import ColorizedDiffStream
  1034. # Create a wrapper for ColorizedDiffStream to handle string/bytes conversion
  1035. class _StreamWrapper:
  1036. def __init__(self, stream):
  1037. self.stream = stream
  1038. def write(self, data):
  1039. if isinstance(data, str):
  1040. # Convert string to bytes for ColorizedDiffStream
  1041. self.stream.write(data.encode("utf-8"))
  1042. else:
  1043. self.stream.write(data)
  1044. with open_repo_closing(repo) as r:
  1045. # Use wrapper for ColorizedDiffStream, direct stream for others
  1046. if isinstance(outstream, ColorizedDiffStream):
  1047. wrapped_stream = _StreamWrapper(outstream)
  1048. print_commit(commit, decode=decode, outstream=wrapped_stream)
  1049. # Write diff directly to the ColorizedDiffStream as bytes
  1050. # Type cast since ColorizedDiffStream implements the BinaryIO interface we need
  1051. write_tree_diff(
  1052. cast(BinaryIO, outstream),
  1053. r.object_store,
  1054. commit.parents[0] if commit.parents else None,
  1055. commit.tree,
  1056. )
  1057. else:
  1058. print_commit(commit, decode=decode, outstream=outstream)
  1059. if commit.parents:
  1060. parent_commit = r[commit.parents[0]]
  1061. base_tree = parent_commit.tree
  1062. else:
  1063. base_tree = None
  1064. # Traditional path: buffer diff and write as decoded text
  1065. diffstream = BytesIO()
  1066. write_tree_diff(diffstream, r.object_store, base_tree, commit.tree)
  1067. diffstream.seek(0)
  1068. outstream.write(commit_decode(commit, diffstream.getvalue()))
  1069. def show_tree(repo: RepoPath, tree, decode, outstream=sys.stdout) -> None:
  1070. """Print a tree to a stream.
  1071. Args:
  1072. repo: A `Repo` object
  1073. tree: A `Tree` object
  1074. decode: Function for decoding bytes to unicode string
  1075. outstream: Stream to write to
  1076. """
  1077. for n in tree:
  1078. outstream.write(decode(n) + "\n")
  1079. def show_tag(repo: RepoPath, tag, decode, outstream=sys.stdout) -> None:
  1080. """Print a tag to a stream.
  1081. Args:
  1082. repo: A `Repo` object
  1083. tag: A `Tag` object
  1084. decode: Function for decoding bytes to unicode string
  1085. outstream: Stream to write to
  1086. """
  1087. with open_repo_closing(repo) as r:
  1088. print_tag(tag, decode, outstream)
  1089. show_object(repo, r[tag.object[1]], decode, outstream)
  1090. def show_object(repo: RepoPath, obj, decode, outstream):
  1091. """Display a git object.
  1092. Args:
  1093. repo: Path to the repository
  1094. obj: Git object to display (blob, tree, commit, or tag)
  1095. decode: Function for decoding bytes to unicode string
  1096. outstream: Stream to write output to
  1097. Returns:
  1098. Result of the appropriate show_* function
  1099. """
  1100. return {
  1101. b"tree": show_tree,
  1102. b"blob": show_blob,
  1103. b"commit": show_commit,
  1104. b"tag": show_tag,
  1105. }[obj.type_name](repo, obj, decode, outstream)
  1106. def print_name_status(changes):
  1107. """Print a simple status summary, listing changed files."""
  1108. for change in changes:
  1109. if not change:
  1110. continue
  1111. if isinstance(change, list):
  1112. change = change[0]
  1113. if change.type == CHANGE_ADD:
  1114. path1 = change.new.path
  1115. path2 = ""
  1116. kind = "A"
  1117. elif change.type == CHANGE_DELETE:
  1118. path1 = change.old.path
  1119. path2 = ""
  1120. kind = "D"
  1121. elif change.type == CHANGE_MODIFY:
  1122. path1 = change.new.path
  1123. path2 = ""
  1124. kind = "M"
  1125. elif change.type in RENAME_CHANGE_TYPES:
  1126. path1 = change.old.path
  1127. path2 = change.new.path
  1128. if change.type == CHANGE_RENAME:
  1129. kind = "R"
  1130. elif change.type == CHANGE_COPY:
  1131. kind = "C"
  1132. yield "%-8s%-20s%-20s" % (kind, path1, path2) # noqa: UP031
  1133. def log(
  1134. repo=".",
  1135. paths=None,
  1136. outstream=sys.stdout,
  1137. max_entries=None,
  1138. reverse=False,
  1139. name_status=False,
  1140. ) -> None:
  1141. """Write commit logs.
  1142. Args:
  1143. repo: Path to repository
  1144. paths: Optional set of specific paths to print entries for
  1145. outstream: Stream to write log output to
  1146. reverse: Reverse order in which entries are printed
  1147. name_status: Print name status
  1148. max_entries: Optional maximum number of entries to display
  1149. """
  1150. with open_repo_closing(repo) as r:
  1151. try:
  1152. include = [r.head()]
  1153. except KeyError:
  1154. include = []
  1155. walker = r.get_walker(
  1156. include=include, max_entries=max_entries, paths=paths, reverse=reverse
  1157. )
  1158. for entry in walker:
  1159. def decode(x):
  1160. return commit_decode(entry.commit, x)
  1161. print_commit(entry.commit, decode, outstream)
  1162. if name_status:
  1163. outstream.writelines(
  1164. [line + "\n" for line in print_name_status(entry.changes())]
  1165. )
  1166. # TODO(jelmer): better default for encoding?
  1167. def show(
  1168. repo=".",
  1169. objects=None,
  1170. outstream=sys.stdout,
  1171. default_encoding=DEFAULT_ENCODING,
  1172. ) -> None:
  1173. """Print the changes in a commit.
  1174. Args:
  1175. repo: Path to repository
  1176. objects: Objects to show (defaults to [HEAD])
  1177. outstream: Stream to write to
  1178. default_encoding: Default encoding to use if none is set in the
  1179. commit
  1180. """
  1181. if objects is None:
  1182. objects = ["HEAD"]
  1183. if not isinstance(objects, list):
  1184. objects = [objects]
  1185. with open_repo_closing(repo) as r:
  1186. for objectish in objects:
  1187. o = parse_object(r, objectish)
  1188. if isinstance(o, Commit):
  1189. def decode(x):
  1190. return commit_decode(o, x, default_encoding)
  1191. else:
  1192. def decode(x):
  1193. return x.decode(default_encoding)
  1194. show_object(r, o, decode, outstream)
  1195. def diff_tree(
  1196. repo: RepoPath,
  1197. old_tree,
  1198. new_tree,
  1199. outstream=default_bytes_out_stream,
  1200. ) -> None:
  1201. """Compares the content and mode of blobs found via two tree objects.
  1202. Args:
  1203. repo: Path to repository
  1204. old_tree: Id of old tree
  1205. new_tree: Id of new tree
  1206. outstream: Stream to write to
  1207. """
  1208. with open_repo_closing(repo) as r:
  1209. write_tree_diff(outstream, r.object_store, old_tree, new_tree)
  1210. def diff(
  1211. repo=".",
  1212. commit=None,
  1213. commit2=None,
  1214. staged=False,
  1215. paths=None,
  1216. outstream=default_bytes_out_stream,
  1217. ) -> None:
  1218. """Show diff.
  1219. Args:
  1220. repo: Path to repository
  1221. commit: First commit to compare. If staged is True, compare
  1222. index to this commit. If staged is False, compare working tree
  1223. to this commit. If None, defaults to HEAD for staged and index
  1224. for unstaged.
  1225. commit2: Second commit to compare against first commit. If provided,
  1226. show diff between commit and commit2 (ignoring staged flag).
  1227. staged: If True, show staged changes (index vs commit).
  1228. If False, show unstaged changes (working tree vs commit/index).
  1229. Ignored if commit2 is provided.
  1230. paths: Optional list of paths to limit diff
  1231. outstream: Stream to write to
  1232. """
  1233. from . import diff as diff_module
  1234. with open_repo_closing(repo) as r:
  1235. # Normalize paths to bytes
  1236. if paths is not None and paths: # Check if paths is not empty
  1237. byte_paths = []
  1238. for p in paths:
  1239. if isinstance(p, str):
  1240. byte_paths.append(p.encode("utf-8"))
  1241. else:
  1242. byte_paths.append(p)
  1243. paths = byte_paths
  1244. elif paths == []: # Convert empty list to None
  1245. paths = None
  1246. # Resolve commit refs to SHAs if provided
  1247. if commit is not None:
  1248. if isinstance(commit, Commit):
  1249. # Already a Commit object
  1250. commit_sha = commit.id
  1251. commit_obj = commit
  1252. else:
  1253. # parse_commit handles both refs and SHAs, and always returns a Commit object
  1254. commit_obj = parse_commit(r, commit)
  1255. commit_sha = commit_obj.id
  1256. else:
  1257. commit_sha = None
  1258. commit_obj = None
  1259. if commit2 is not None:
  1260. # Compare two commits
  1261. if isinstance(commit2, Commit):
  1262. commit2_obj = commit2
  1263. else:
  1264. commit2_obj = parse_commit(r, commit2)
  1265. # Get trees from commits
  1266. old_tree = commit_obj.tree if commit_obj else None
  1267. new_tree = commit2_obj.tree
  1268. # Use tree_changes to get the changes and apply path filtering
  1269. changes = r.object_store.tree_changes(old_tree, new_tree)
  1270. for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in changes:
  1271. # Skip if paths are specified and this change doesn't match
  1272. if paths:
  1273. path_to_check = newpath or oldpath
  1274. if not any(
  1275. path_to_check == p or path_to_check.startswith(p + b"/")
  1276. for p in paths
  1277. ):
  1278. continue
  1279. write_object_diff(
  1280. outstream,
  1281. r.object_store,
  1282. (oldpath, oldmode, oldsha),
  1283. (newpath, newmode, newsha),
  1284. )
  1285. elif staged:
  1286. # Show staged changes (index vs commit)
  1287. diff_module.diff_index_to_tree(r, outstream, commit_sha, paths)
  1288. elif commit is not None:
  1289. # Compare working tree to a specific commit
  1290. assert (
  1291. commit_sha is not None
  1292. ) # mypy: commit_sha is set when commit is not None
  1293. diff_module.diff_working_tree_to_tree(r, outstream, commit_sha, paths)
  1294. else:
  1295. # Compare working tree to index
  1296. diff_module.diff_working_tree_to_index(r, outstream, paths)
  1297. def rev_list(repo: RepoPath, commits, outstream=sys.stdout) -> None:
  1298. """Lists commit objects in reverse chronological order.
  1299. Args:
  1300. repo: Path to repository
  1301. commits: Commits over which to iterate
  1302. outstream: Stream to write to
  1303. """
  1304. with open_repo_closing(repo) as r:
  1305. for entry in r.get_walker(include=[r[c].id for c in commits]):
  1306. outstream.write(entry.commit.id + b"\n")
  1307. def _canonical_part(url: str) -> str:
  1308. name = url.rsplit("/", 1)[-1]
  1309. if name.endswith(".git"):
  1310. name = name[:-4]
  1311. return name
  1312. def submodule_add(
  1313. repo: Union[str, os.PathLike, Repo], url, path=None, name=None
  1314. ) -> None:
  1315. """Add a new submodule.
  1316. Args:
  1317. repo: Path to repository
  1318. url: URL of repository to add as submodule
  1319. path: Path where submodule should live
  1320. name: Name for the submodule
  1321. """
  1322. with open_repo_closing(repo) as r:
  1323. if path is None:
  1324. path = os.path.relpath(_canonical_part(url), r.path)
  1325. if name is None:
  1326. name = path
  1327. # TODO(jelmer): Move this logic to dulwich.submodule
  1328. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1329. try:
  1330. config = ConfigFile.from_path(gitmodules_path)
  1331. except FileNotFoundError:
  1332. config = ConfigFile()
  1333. config.path = gitmodules_path
  1334. config.set(("submodule", name), "url", url)
  1335. config.set(("submodule", name), "path", path)
  1336. config.write_to_path()
  1337. def submodule_init(repo: Union[str, os.PathLike, Repo]) -> None:
  1338. """Initialize submodules.
  1339. Args:
  1340. repo: Path to repository
  1341. """
  1342. with open_repo_closing(repo) as r:
  1343. config = r.get_config()
  1344. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1345. for path, url, name in read_submodules(gitmodules_path):
  1346. config.set((b"submodule", name), b"active", True)
  1347. config.set((b"submodule", name), b"url", url)
  1348. config.write_to_path()
  1349. def submodule_list(repo: RepoPath):
  1350. """List submodules.
  1351. Args:
  1352. repo: Path to repository
  1353. """
  1354. from .submodule import iter_cached_submodules
  1355. with open_repo_closing(repo) as r:
  1356. for path, sha in iter_cached_submodules(r.object_store, r[r.head()].tree):
  1357. yield path, sha.decode(DEFAULT_ENCODING)
  1358. def submodule_update(
  1359. repo: Union[str, os.PathLike, Repo],
  1360. paths=None,
  1361. init=False,
  1362. force=False,
  1363. errstream=None,
  1364. ) -> None:
  1365. """Update submodules.
  1366. Args:
  1367. repo: Path to repository
  1368. paths: Optional list of specific submodule paths to update. If None, updates all.
  1369. init: If True, initialize submodules first
  1370. force: Force update even if local changes exist
  1371. """
  1372. from .submodule import iter_cached_submodules
  1373. with open_repo_closing(repo) as r:
  1374. if init:
  1375. submodule_init(r)
  1376. config = r.get_config()
  1377. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1378. # Get list of submodules to update
  1379. submodules_to_update = []
  1380. for path, sha in iter_cached_submodules(r.object_store, r[r.head()].tree):
  1381. path_str = (
  1382. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1383. )
  1384. if paths is None or path_str in paths:
  1385. submodules_to_update.append((path, sha))
  1386. # Read submodule configuration
  1387. for path, target_sha in submodules_to_update:
  1388. path_str = (
  1389. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1390. )
  1391. # Find the submodule name from .gitmodules
  1392. submodule_name: Optional[bytes] = None
  1393. for sm_path, sm_url, sm_name in read_submodules(gitmodules_path):
  1394. if sm_path == path:
  1395. submodule_name = sm_name
  1396. break
  1397. if not submodule_name:
  1398. continue
  1399. # Get the URL from config
  1400. section = (
  1401. b"submodule",
  1402. submodule_name
  1403. if isinstance(submodule_name, bytes)
  1404. else submodule_name.encode(),
  1405. )
  1406. try:
  1407. url_value = config.get(section, b"url")
  1408. if isinstance(url_value, bytes):
  1409. url = url_value.decode(DEFAULT_ENCODING)
  1410. else:
  1411. url = url_value
  1412. except KeyError:
  1413. # URL not in config, skip this submodule
  1414. continue
  1415. # Get or create the submodule repository paths
  1416. submodule_path = os.path.join(r.path, path_str)
  1417. submodule_git_dir = os.path.join(r.path, ".git", "modules", path_str)
  1418. # Clone or fetch the submodule
  1419. if not os.path.exists(submodule_git_dir):
  1420. # Clone the submodule as bare repository
  1421. os.makedirs(os.path.dirname(submodule_git_dir), exist_ok=True)
  1422. # Clone to the git directory
  1423. sub_repo = clone(url, submodule_git_dir, bare=True, checkout=False)
  1424. sub_repo.close()
  1425. # Create the submodule directory if it doesn't exist
  1426. if not os.path.exists(submodule_path):
  1427. os.makedirs(submodule_path)
  1428. # Create .git file in the submodule directory
  1429. depth = path_str.count("/") + 1
  1430. relative_git_dir = "../" * depth + ".git/modules/" + path_str
  1431. git_file_path = os.path.join(submodule_path, ".git")
  1432. with open(git_file_path, "w") as f:
  1433. f.write(f"gitdir: {relative_git_dir}\n")
  1434. # Set up working directory configuration
  1435. with open_repo_closing(submodule_git_dir) as sub_repo:
  1436. sub_config = sub_repo.get_config()
  1437. sub_config.set(
  1438. (b"core",),
  1439. b"worktree",
  1440. os.path.abspath(submodule_path).encode(),
  1441. )
  1442. sub_config.write_to_path()
  1443. # Checkout the target commit
  1444. sub_repo.refs[b"HEAD"] = target_sha
  1445. # Build the index and checkout files
  1446. tree = sub_repo[target_sha]
  1447. if hasattr(tree, "tree"): # If it's a commit, get the tree
  1448. tree_id = tree.tree
  1449. else:
  1450. tree_id = target_sha
  1451. build_index_from_tree(
  1452. submodule_path,
  1453. sub_repo.index_path(),
  1454. sub_repo.object_store,
  1455. tree_id,
  1456. )
  1457. else:
  1458. # Fetch and checkout in existing submodule
  1459. with open_repo_closing(submodule_git_dir) as sub_repo:
  1460. # Fetch from remote
  1461. client, path_segments = get_transport_and_path(url)
  1462. client.fetch(path_segments, sub_repo)
  1463. # Update to the target commit
  1464. sub_repo.refs[b"HEAD"] = target_sha
  1465. # Reset the working directory
  1466. reset(sub_repo, "hard", target_sha)
  1467. def tag_create(
  1468. repo,
  1469. tag: Union[str, bytes],
  1470. author: Optional[Union[str, bytes]] = None,
  1471. message: Optional[Union[str, bytes]] = None,
  1472. annotated=False,
  1473. objectish: Union[str, bytes] = "HEAD",
  1474. tag_time=None,
  1475. tag_timezone=None,
  1476. sign: bool = False,
  1477. encoding: str = DEFAULT_ENCODING,
  1478. ) -> None:
  1479. """Creates a tag in git via dulwich calls.
  1480. Args:
  1481. repo: Path to repository
  1482. tag: tag string
  1483. author: tag author (optional, if annotated is set)
  1484. message: tag message (optional)
  1485. annotated: whether to create an annotated tag
  1486. objectish: object the tag should point at, defaults to HEAD
  1487. tag_time: Optional time for annotated tag
  1488. tag_timezone: Optional timezone for annotated tag
  1489. sign: GPG Sign the tag (bool, defaults to False,
  1490. pass True to use default GPG key,
  1491. pass a str containing Key ID to use a specific GPG key)
  1492. """
  1493. with open_repo_closing(repo) as r:
  1494. object = parse_object(r, objectish)
  1495. if isinstance(tag, str):
  1496. tag = tag.encode(encoding)
  1497. if annotated:
  1498. # Create the tag object
  1499. tag_obj = Tag()
  1500. if author is None:
  1501. author = get_user_identity(r.get_config_stack())
  1502. elif isinstance(author, str):
  1503. author = author.encode(encoding)
  1504. else:
  1505. assert isinstance(author, bytes)
  1506. tag_obj.tagger = author
  1507. if isinstance(message, str):
  1508. message = message.encode(encoding)
  1509. elif isinstance(message, bytes):
  1510. pass
  1511. else:
  1512. message = b""
  1513. tag_obj.message = message + "\n".encode(encoding)
  1514. tag_obj.name = tag
  1515. tag_obj.object = (type(object), object.id)
  1516. if tag_time is None:
  1517. tag_time = int(time.time())
  1518. tag_obj.tag_time = tag_time
  1519. if tag_timezone is None:
  1520. tag_timezone = get_user_timezones()[1]
  1521. elif isinstance(tag_timezone, str):
  1522. tag_timezone = parse_timezone(tag_timezone.encode())
  1523. tag_obj.tag_timezone = tag_timezone
  1524. # Check if we should sign the tag
  1525. should_sign = sign
  1526. if sign is None:
  1527. # Check tag.gpgSign configuration when sign is not explicitly set
  1528. config = r.get_config_stack()
  1529. try:
  1530. should_sign = config.get_boolean((b"tag",), b"gpgSign")
  1531. except KeyError:
  1532. should_sign = False # Default to not signing if no config
  1533. if should_sign:
  1534. keyid = sign if isinstance(sign, str) else None
  1535. # If sign is True but no keyid specified, check user.signingKey config
  1536. if should_sign is True and keyid is None:
  1537. config = r.get_config_stack()
  1538. try:
  1539. keyid = config.get((b"user",), b"signingKey").decode("ascii")
  1540. except KeyError:
  1541. # No user.signingKey configured, will use default GPG key
  1542. pass
  1543. tag_obj.sign(keyid)
  1544. r.object_store.add_object(tag_obj)
  1545. tag_id = tag_obj.id
  1546. else:
  1547. tag_id = object.id
  1548. r.refs[_make_tag_ref(tag)] = tag_id
  1549. def tag_list(repo: RepoPath, outstream=sys.stdout):
  1550. """List all tags.
  1551. Args:
  1552. repo: Path to repository
  1553. outstream: Stream to write tags to
  1554. """
  1555. with open_repo_closing(repo) as r:
  1556. tags = sorted(r.refs.as_dict(b"refs/tags"))
  1557. return tags
  1558. def tag_delete(repo: RepoPath, name) -> None:
  1559. """Remove a tag.
  1560. Args:
  1561. repo: Path to repository
  1562. name: Name of tag to remove
  1563. """
  1564. with open_repo_closing(repo) as r:
  1565. if isinstance(name, bytes):
  1566. names = [name]
  1567. elif isinstance(name, list):
  1568. names = name
  1569. else:
  1570. raise Error(f"Unexpected tag name type {name!r}")
  1571. for name in names:
  1572. del r.refs[_make_tag_ref(name)]
  1573. def _make_notes_ref(name: bytes) -> bytes:
  1574. """Make a notes ref name."""
  1575. if name.startswith(b"refs/notes/"):
  1576. return name
  1577. return LOCAL_NOTES_PREFIX + name
  1578. def notes_add(
  1579. repo, object_sha, note, ref=b"commits", author=None, committer=None, message=None
  1580. ):
  1581. """Add or update a note for an object.
  1582. Args:
  1583. repo: Path to repository
  1584. object_sha: SHA of the object to annotate
  1585. note: Note content
  1586. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1587. author: Author identity (defaults to committer)
  1588. committer: Committer identity (defaults to config)
  1589. message: Commit message for the notes update
  1590. Returns:
  1591. SHA of the new notes commit
  1592. """
  1593. with open_repo_closing(repo) as r:
  1594. # Parse the object to get its SHA
  1595. obj = parse_object(r, object_sha)
  1596. object_sha = obj.id
  1597. if isinstance(note, str):
  1598. note = note.encode(DEFAULT_ENCODING)
  1599. if isinstance(ref, str):
  1600. ref = ref.encode(DEFAULT_ENCODING)
  1601. notes_ref = _make_notes_ref(ref)
  1602. config = r.get_config_stack()
  1603. return r.notes.set_note(
  1604. object_sha,
  1605. note,
  1606. notes_ref,
  1607. author=author,
  1608. committer=committer,
  1609. message=message,
  1610. config=config,
  1611. )
  1612. def notes_remove(
  1613. repo, object_sha, ref=b"commits", author=None, committer=None, message=None
  1614. ):
  1615. """Remove a note for an object.
  1616. Args:
  1617. repo: Path to repository
  1618. object_sha: SHA of the object to remove notes from
  1619. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1620. author: Author identity (defaults to committer)
  1621. committer: Committer identity (defaults to config)
  1622. message: Commit message for the notes removal
  1623. Returns:
  1624. SHA of the new notes commit, or None if no note existed
  1625. """
  1626. with open_repo_closing(repo) as r:
  1627. # Parse the object to get its SHA
  1628. obj = parse_object(r, object_sha)
  1629. object_sha = obj.id
  1630. if isinstance(ref, str):
  1631. ref = ref.encode(DEFAULT_ENCODING)
  1632. notes_ref = _make_notes_ref(ref)
  1633. config = r.get_config_stack()
  1634. return r.notes.remove_note(
  1635. object_sha,
  1636. notes_ref,
  1637. author=author,
  1638. committer=committer,
  1639. message=message,
  1640. config=config,
  1641. )
  1642. def notes_show(repo: Union[str, os.PathLike, Repo], object_sha, ref=b"commits"):
  1643. """Show the note for an object.
  1644. Args:
  1645. repo: Path to repository
  1646. object_sha: SHA of the object
  1647. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1648. Returns:
  1649. Note content as bytes, or None if no note exists
  1650. """
  1651. with open_repo_closing(repo) as r:
  1652. # Parse the object to get its SHA
  1653. obj = parse_object(r, object_sha)
  1654. object_sha = obj.id
  1655. if isinstance(ref, str):
  1656. ref = ref.encode(DEFAULT_ENCODING)
  1657. notes_ref = _make_notes_ref(ref)
  1658. config = r.get_config_stack()
  1659. return r.notes.get_note(object_sha, notes_ref, config=config)
  1660. def notes_list(repo: RepoPath, ref=b"commits"):
  1661. """List all notes in a notes ref.
  1662. Args:
  1663. repo: Path to repository
  1664. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1665. Returns:
  1666. List of tuples of (object_sha, note_content)
  1667. """
  1668. with open_repo_closing(repo) as r:
  1669. if isinstance(ref, str):
  1670. ref = ref.encode(DEFAULT_ENCODING)
  1671. notes_ref = _make_notes_ref(ref)
  1672. config = r.get_config_stack()
  1673. return r.notes.list_notes(notes_ref, config=config)
  1674. def reset(
  1675. repo: Union[str, os.PathLike, Repo],
  1676. mode,
  1677. treeish: Union[str, bytes, Commit, Tree, Tag] = "HEAD",
  1678. ) -> None:
  1679. """Reset current HEAD to the specified state.
  1680. Args:
  1681. repo: Path to repository
  1682. mode: Mode ("hard", "soft", "mixed")
  1683. treeish: Treeish to reset to
  1684. """
  1685. with open_repo_closing(repo) as r:
  1686. # Parse the target tree
  1687. tree = parse_tree(r, treeish)
  1688. # Only parse as commit if treeish is not a Tree object
  1689. if isinstance(treeish, Tree):
  1690. # For Tree objects, we can't determine the commit, skip updating HEAD
  1691. target_commit = None
  1692. else:
  1693. target_commit = parse_commit(r, treeish)
  1694. # Update HEAD to point to the target commit
  1695. if target_commit is not None:
  1696. r.refs[b"HEAD"] = target_commit.id
  1697. if mode == "soft":
  1698. # Soft reset: only update HEAD, leave index and working tree unchanged
  1699. return
  1700. elif mode == "mixed":
  1701. # Mixed reset: update HEAD and index, but leave working tree unchanged
  1702. from .object_store import iter_tree_contents
  1703. # Open the index
  1704. index = r.open_index()
  1705. # Clear the current index
  1706. index.clear()
  1707. # Populate index from the target tree
  1708. for entry in iter_tree_contents(r.object_store, tree.id):
  1709. # Create an IndexEntry from the tree entry
  1710. # Use zeros for filesystem-specific fields since we're not touching the working tree
  1711. index_entry = IndexEntry(
  1712. ctime=(0, 0),
  1713. mtime=(0, 0),
  1714. dev=0,
  1715. ino=0,
  1716. mode=entry.mode,
  1717. uid=0,
  1718. gid=0,
  1719. size=0, # Size will be 0 since we're not reading from disk
  1720. sha=entry.sha,
  1721. flags=0,
  1722. )
  1723. index[entry.path] = index_entry
  1724. # Write the updated index
  1725. index.write()
  1726. elif mode == "hard":
  1727. # Hard reset: update HEAD, index, and working tree
  1728. # Get configuration for working directory update
  1729. config = r.get_config()
  1730. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  1731. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  1732. validate_path_element = validate_path_element_ntfs
  1733. elif config.get_boolean(
  1734. b"core", b"core.protectHFS", sys.platform == "darwin"
  1735. ):
  1736. validate_path_element = validate_path_element_hfs
  1737. else:
  1738. validate_path_element = validate_path_element_default
  1739. if config.get_boolean(b"core", b"symlinks", True):
  1740. symlink_fn = symlink
  1741. else:
  1742. def symlink_fn( # type: ignore
  1743. source, target, target_is_directory=False, *, dir_fd=None
  1744. ) -> None:
  1745. mode = "w" + ("b" if isinstance(source, bytes) else "")
  1746. with open(target, mode) as f:
  1747. f.write(source)
  1748. # Update working tree and index
  1749. blob_normalizer = r.get_blob_normalizer()
  1750. # For reset --hard, use current index tree as old tree to get proper deletions
  1751. index = r.open_index()
  1752. if len(index) > 0:
  1753. index_tree_id = index.commit(r.object_store)
  1754. else:
  1755. # Empty index
  1756. index_tree_id = None
  1757. changes = tree_changes(
  1758. r.object_store, index_tree_id, tree.id, want_unchanged=True
  1759. )
  1760. update_working_tree(
  1761. r,
  1762. index_tree_id,
  1763. tree.id,
  1764. change_iterator=changes,
  1765. honor_filemode=honor_filemode,
  1766. validate_path_element=validate_path_element,
  1767. symlink_fn=symlink_fn,
  1768. force_remove_untracked=True,
  1769. blob_normalizer=blob_normalizer,
  1770. allow_overwrite_modified=True, # Allow overwriting modified files
  1771. )
  1772. else:
  1773. raise Error(f"Invalid reset mode: {mode}")
  1774. def get_remote_repo(
  1775. repo: Repo, remote_location: Optional[Union[str, bytes]] = None
  1776. ) -> tuple[Optional[str], str]:
  1777. """Get the remote repository information.
  1778. Args:
  1779. repo: Local repository object
  1780. remote_location: Optional remote name or URL; defaults to branch remote
  1781. Returns:
  1782. Tuple of (remote_name, remote_url) where remote_name may be None
  1783. if remote_location is a URL rather than a configured remote
  1784. """
  1785. config = repo.get_config()
  1786. if remote_location is None:
  1787. remote_location = get_branch_remote(repo)
  1788. if isinstance(remote_location, str):
  1789. encoded_location = remote_location.encode()
  1790. else:
  1791. encoded_location = remote_location
  1792. section = (b"remote", encoded_location)
  1793. remote_name: Optional[str] = None
  1794. if config.has_section(section):
  1795. remote_name = encoded_location.decode()
  1796. encoded_location = config.get(section, "url")
  1797. else:
  1798. remote_name = None
  1799. return (remote_name, encoded_location.decode())
  1800. def push(
  1801. repo,
  1802. remote_location=None,
  1803. refspecs=None,
  1804. outstream=default_bytes_out_stream,
  1805. errstream=default_bytes_err_stream,
  1806. force=False,
  1807. **kwargs,
  1808. ):
  1809. """Remote push with dulwich via dulwich.client.
  1810. Args:
  1811. repo: Path to repository
  1812. remote_location: Location of the remote
  1813. refspecs: Refs to push to remote
  1814. outstream: A stream file to write output
  1815. errstream: A stream file to write errors
  1816. force: Force overwriting refs
  1817. """
  1818. # Open the repo
  1819. with open_repo_closing(repo) as r:
  1820. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1821. # Check if mirror mode is enabled
  1822. mirror_mode = False
  1823. if remote_name:
  1824. try:
  1825. mirror_mode = r.get_config_stack().get_boolean(
  1826. (b"remote", remote_name.encode()), b"mirror"
  1827. )
  1828. except KeyError:
  1829. pass
  1830. if mirror_mode:
  1831. # Mirror mode: push all refs and delete non-existent ones
  1832. refspecs = []
  1833. for ref in r.refs.keys():
  1834. # Push all refs to the same name on remote
  1835. refspecs.append(ref + b":" + ref)
  1836. elif refspecs is None:
  1837. refspecs = [active_branch(r)]
  1838. # Get the client and path
  1839. client, path = get_transport_and_path(
  1840. remote_location, config=r.get_config_stack(), **kwargs
  1841. )
  1842. selected_refs = []
  1843. remote_changed_refs = {}
  1844. def update_refs(refs):
  1845. selected_refs.extend(parse_reftuples(r.refs, refs, refspecs, force=force))
  1846. new_refs = {}
  1847. # In mirror mode, delete remote refs that don't exist locally
  1848. if mirror_mode:
  1849. local_refs = set(r.refs.keys())
  1850. for remote_ref in refs.keys():
  1851. if remote_ref not in local_refs:
  1852. new_refs[remote_ref] = ZERO_SHA
  1853. remote_changed_refs[remote_ref] = None
  1854. # TODO: Handle selected_refs == {None: None}
  1855. for lh, rh, force_ref in selected_refs:
  1856. if lh is None:
  1857. new_refs[rh] = ZERO_SHA
  1858. remote_changed_refs[rh] = None
  1859. else:
  1860. try:
  1861. localsha = r.refs[lh]
  1862. except KeyError as exc:
  1863. raise Error(f"No valid ref {lh} in local repository") from exc
  1864. if not force_ref and rh in refs:
  1865. check_diverged(r, refs[rh], localsha)
  1866. new_refs[rh] = localsha
  1867. remote_changed_refs[rh] = localsha
  1868. return new_refs
  1869. err_encoding = getattr(errstream, "encoding", None) or DEFAULT_ENCODING
  1870. remote_location = client.get_url(path)
  1871. try:
  1872. result = client.send_pack(
  1873. path,
  1874. update_refs,
  1875. generate_pack_data=r.generate_pack_data,
  1876. progress=errstream.write,
  1877. )
  1878. except SendPackError as exc:
  1879. raise Error(
  1880. "Push to " + remote_location + " failed -> " + exc.args[0].decode(),
  1881. ) from exc
  1882. else:
  1883. errstream.write(
  1884. b"Push to " + remote_location.encode(err_encoding) + b" successful.\n"
  1885. )
  1886. for ref, error in (result.ref_status or {}).items():
  1887. if error is not None:
  1888. errstream.write(
  1889. b"Push of ref %s failed: %s\n" % (ref, error.encode(err_encoding))
  1890. )
  1891. else:
  1892. errstream.write(b"Ref %s updated\n" % ref)
  1893. if remote_name is not None:
  1894. _import_remote_refs(r.refs, remote_name, remote_changed_refs)
  1895. return result
  1896. # Trigger auto GC if needed
  1897. from .gc import maybe_auto_gc
  1898. with open_repo_closing(repo) as r:
  1899. maybe_auto_gc(r)
  1900. def pull(
  1901. repo,
  1902. remote_location=None,
  1903. refspecs=None,
  1904. outstream=default_bytes_out_stream,
  1905. errstream=default_bytes_err_stream,
  1906. fast_forward=True,
  1907. ff_only=False,
  1908. force=False,
  1909. filter_spec=None,
  1910. protocol_version=None,
  1911. **kwargs,
  1912. ) -> None:
  1913. """Pull from remote via dulwich.client.
  1914. Args:
  1915. repo: Path to repository
  1916. remote_location: Location of the remote
  1917. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  1918. bytestring/string.
  1919. outstream: A stream file to write to output
  1920. errstream: A stream file to write to errors
  1921. fast_forward: If True, raise an exception when fast-forward is not possible
  1922. ff_only: If True, only allow fast-forward merges. Raises DivergedBranches
  1923. when branches have diverged rather than performing a merge.
  1924. force: If True, allow overwriting local changes in the working tree.
  1925. If False, pull will abort if it would overwrite uncommitted changes.
  1926. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  1927. Only used if the server supports the Git protocol-v2 'filter'
  1928. feature, and ignored otherwise.
  1929. protocol_version: desired Git protocol version. By default the highest
  1930. mutually supported protocol version will be used
  1931. """
  1932. # Open the repo
  1933. with open_repo_closing(repo) as r:
  1934. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  1935. selected_refs = []
  1936. if refspecs is None:
  1937. refspecs = [b"HEAD"]
  1938. def determine_wants(remote_refs, *args, **kwargs):
  1939. selected_refs.extend(
  1940. parse_reftuples(remote_refs, r.refs, refspecs, force=force)
  1941. )
  1942. return [
  1943. remote_refs[lh]
  1944. for (lh, rh, force_ref) in selected_refs
  1945. if remote_refs[lh] not in r.object_store
  1946. ]
  1947. client, path = get_transport_and_path(
  1948. remote_location, config=r.get_config_stack(), **kwargs
  1949. )
  1950. if filter_spec:
  1951. filter_spec = filter_spec.encode("ascii")
  1952. fetch_result = client.fetch(
  1953. path,
  1954. r,
  1955. progress=errstream.write,
  1956. determine_wants=determine_wants,
  1957. filter_spec=filter_spec,
  1958. protocol_version=protocol_version,
  1959. )
  1960. # Store the old HEAD tree before making changes
  1961. try:
  1962. old_head = r.refs[b"HEAD"]
  1963. old_tree_id = r[old_head].tree
  1964. except KeyError:
  1965. old_tree_id = None
  1966. merged = False
  1967. for lh, rh, force_ref in selected_refs:
  1968. if not force_ref and rh in r.refs:
  1969. try:
  1970. check_diverged(r, r.refs.follow(rh)[1], fetch_result.refs[lh])
  1971. except DivergedBranches as exc:
  1972. if ff_only or fast_forward:
  1973. raise
  1974. else:
  1975. # Perform merge
  1976. merge_result, conflicts = _do_merge(r, fetch_result.refs[lh])
  1977. if conflicts:
  1978. raise Error(
  1979. f"Merge conflicts occurred: {conflicts}"
  1980. ) from exc
  1981. merged = True
  1982. # Skip updating ref since merge already updated HEAD
  1983. continue
  1984. r.refs[rh] = fetch_result.refs[lh]
  1985. # Only update HEAD if we didn't perform a merge
  1986. if selected_refs and not merged:
  1987. r[b"HEAD"] = fetch_result.refs[selected_refs[0][1]]
  1988. # Update working tree to match the new HEAD
  1989. # Skip if merge was performed as merge already updates the working tree
  1990. if not merged and old_tree_id is not None:
  1991. new_tree_id = r[b"HEAD"].tree
  1992. blob_normalizer = r.get_blob_normalizer()
  1993. changes = tree_changes(r.object_store, old_tree_id, new_tree_id)
  1994. update_working_tree(
  1995. r,
  1996. old_tree_id,
  1997. new_tree_id,
  1998. change_iterator=changes,
  1999. blob_normalizer=blob_normalizer,
  2000. allow_overwrite_modified=force,
  2001. )
  2002. if remote_name is not None:
  2003. _import_remote_refs(r.refs, remote_name, fetch_result.refs)
  2004. # Trigger auto GC if needed
  2005. from .gc import maybe_auto_gc
  2006. with open_repo_closing(repo) as r:
  2007. maybe_auto_gc(r)
  2008. def status(
  2009. repo: Union[str, os.PathLike, Repo] = ".",
  2010. ignored=False,
  2011. untracked_files="normal",
  2012. ):
  2013. """Returns staged, unstaged, and untracked changes relative to the HEAD.
  2014. Args:
  2015. repo: Path to repository or repository object
  2016. ignored: Whether to include ignored files in untracked
  2017. untracked_files: How to handle untracked files, defaults to "all":
  2018. "no": do not return untracked files
  2019. "normal": return untracked directories, not their contents
  2020. "all": include all files in untracked directories
  2021. Using untracked_files="no" can be faster than "all" when the worktree
  2022. contains many untracked files/directories.
  2023. Using untracked_files="normal" provides a good balance, only showing
  2024. directories that are entirely untracked without listing all their contents.
  2025. Returns: GitStatus tuple,
  2026. staged - dict with lists of staged paths (diff index/HEAD)
  2027. unstaged - list of unstaged paths (diff index/working-tree)
  2028. untracked - list of untracked, un-ignored & non-.git paths
  2029. """
  2030. with open_repo_closing(repo) as r:
  2031. # 1. Get status of staged
  2032. tracked_changes = get_tree_changes(r)
  2033. # 2. Get status of unstaged
  2034. index = r.open_index()
  2035. normalizer = r.get_blob_normalizer()
  2036. filter_callback = normalizer.checkin_normalize
  2037. unstaged_changes = list(get_unstaged_changes(index, r.path, filter_callback))
  2038. untracked_paths = get_untracked_paths(
  2039. r.path,
  2040. r.path,
  2041. index,
  2042. exclude_ignored=not ignored,
  2043. untracked_files=untracked_files,
  2044. )
  2045. if sys.platform == "win32":
  2046. untracked_changes = [
  2047. path.replace(os.path.sep, "/") for path in untracked_paths
  2048. ]
  2049. else:
  2050. untracked_changes = list(untracked_paths)
  2051. return GitStatus(tracked_changes, unstaged_changes, untracked_changes)
  2052. def _walk_working_dir_paths(frompath, basepath, prune_dirnames=None):
  2053. """Get path, is_dir for files in working dir from frompath.
  2054. Args:
  2055. frompath: Path to begin walk
  2056. basepath: Path to compare to
  2057. prune_dirnames: Optional callback to prune dirnames during os.walk
  2058. dirnames will be set to result of prune_dirnames(dirpath, dirnames)
  2059. """
  2060. for dirpath, dirnames, filenames in os.walk(frompath):
  2061. # Skip .git and below.
  2062. if ".git" in dirnames:
  2063. dirnames.remove(".git")
  2064. if dirpath != basepath:
  2065. continue
  2066. if ".git" in filenames:
  2067. filenames.remove(".git")
  2068. if dirpath != basepath:
  2069. continue
  2070. if dirpath != frompath:
  2071. yield dirpath, True
  2072. for filename in filenames:
  2073. filepath = os.path.join(dirpath, filename)
  2074. yield filepath, False
  2075. if prune_dirnames:
  2076. dirnames[:] = prune_dirnames(dirpath, dirnames)
  2077. def get_untracked_paths(
  2078. frompath, basepath, index, exclude_ignored=False, untracked_files="all"
  2079. ):
  2080. """Get untracked paths.
  2081. Args:
  2082. frompath: Path to walk
  2083. basepath: Path to compare to
  2084. index: Index to check against
  2085. exclude_ignored: Whether to exclude ignored paths
  2086. untracked_files: How to handle untracked files:
  2087. - "no": return an empty list
  2088. - "all": return all files in untracked directories
  2089. - "normal": return untracked directories without listing their contents
  2090. Note: ignored directories will never be walked for performance reasons.
  2091. If exclude_ignored is False, only the path to an ignored directory will
  2092. be yielded, no files inside the directory will be returned
  2093. """
  2094. if untracked_files not in ("no", "all", "normal"):
  2095. raise ValueError("untracked_files must be one of (no, all, normal)")
  2096. if untracked_files == "no":
  2097. return
  2098. with open_repo_closing(basepath) as r:
  2099. ignore_manager = IgnoreFilterManager.from_repo(r)
  2100. ignored_dirs = []
  2101. # List to store untracked directories found during traversal
  2102. untracked_dir_list = []
  2103. def directory_has_non_ignored_files(dir_path, base_rel_path):
  2104. """Recursively check if directory contains any non-ignored files."""
  2105. try:
  2106. for entry in os.listdir(dir_path):
  2107. entry_path = os.path.join(dir_path, entry)
  2108. rel_entry = os.path.join(base_rel_path, entry)
  2109. if os.path.isfile(entry_path):
  2110. if ignore_manager.is_ignored(rel_entry) is not True:
  2111. return True
  2112. elif os.path.isdir(entry_path):
  2113. if directory_has_non_ignored_files(entry_path, rel_entry):
  2114. return True
  2115. return False
  2116. except OSError:
  2117. # If we can't read the directory, assume it has non-ignored files
  2118. return True
  2119. def prune_dirnames(dirpath, dirnames):
  2120. for i in range(len(dirnames) - 1, -1, -1):
  2121. path = os.path.join(dirpath, dirnames[i])
  2122. ip = os.path.join(os.path.relpath(path, basepath), "")
  2123. # Check if directory is ignored
  2124. if ignore_manager.is_ignored(ip) is True:
  2125. if not exclude_ignored:
  2126. ignored_dirs.append(
  2127. os.path.join(os.path.relpath(path, frompath), "")
  2128. )
  2129. del dirnames[i]
  2130. continue
  2131. # For "normal" mode, check if the directory is entirely untracked
  2132. if untracked_files == "normal":
  2133. # Convert directory path to tree path for index lookup
  2134. dir_tree_path = path_to_tree_path(basepath, path)
  2135. # Check if any file in this directory is tracked
  2136. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  2137. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  2138. if not has_tracked_files:
  2139. # This directory is entirely untracked
  2140. rel_path_base = os.path.relpath(path, basepath)
  2141. rel_path_from = os.path.join(os.path.relpath(path, frompath), "")
  2142. # If excluding ignored, check if directory contains any non-ignored files
  2143. if exclude_ignored:
  2144. if not directory_has_non_ignored_files(path, rel_path_base):
  2145. # Directory only contains ignored files, skip it
  2146. del dirnames[i]
  2147. continue
  2148. # Check if it should be excluded due to ignore rules
  2149. is_ignored = ignore_manager.is_ignored(rel_path_base)
  2150. if not exclude_ignored or not is_ignored:
  2151. untracked_dir_list.append(rel_path_from)
  2152. del dirnames[i]
  2153. return dirnames
  2154. # For "all" mode, use the original behavior
  2155. if untracked_files == "all":
  2156. for ap, is_dir in _walk_working_dir_paths(
  2157. frompath, basepath, prune_dirnames=prune_dirnames
  2158. ):
  2159. if not is_dir:
  2160. ip = path_to_tree_path(basepath, ap)
  2161. if ip not in index:
  2162. if not exclude_ignored or not ignore_manager.is_ignored(
  2163. os.path.relpath(ap, basepath)
  2164. ):
  2165. yield os.path.relpath(ap, frompath)
  2166. else: # "normal" mode
  2167. # Walk directories, handling both files and directories
  2168. for ap, is_dir in _walk_working_dir_paths(
  2169. frompath, basepath, prune_dirnames=prune_dirnames
  2170. ):
  2171. # This part won't be reached for pruned directories
  2172. if is_dir:
  2173. # Check if this directory is entirely untracked
  2174. dir_tree_path = path_to_tree_path(basepath, ap)
  2175. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  2176. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  2177. if not has_tracked_files:
  2178. if not exclude_ignored or not ignore_manager.is_ignored(
  2179. os.path.relpath(ap, basepath)
  2180. ):
  2181. yield os.path.join(os.path.relpath(ap, frompath), "")
  2182. else:
  2183. # Check individual files in directories that contain tracked files
  2184. ip = path_to_tree_path(basepath, ap)
  2185. if ip not in index:
  2186. if not exclude_ignored or not ignore_manager.is_ignored(
  2187. os.path.relpath(ap, basepath)
  2188. ):
  2189. yield os.path.relpath(ap, frompath)
  2190. # Yield any untracked directories found during pruning
  2191. yield from untracked_dir_list
  2192. yield from ignored_dirs
  2193. def get_tree_changes(repo: RepoPath):
  2194. """Return add/delete/modify changes to tree by comparing index to HEAD.
  2195. Args:
  2196. repo: repo path or object
  2197. Returns: dict with lists for each type of change
  2198. """
  2199. with open_repo_closing(repo) as r:
  2200. index = r.open_index()
  2201. # Compares the Index to the HEAD & determines changes
  2202. # Iterate through the changes and report add/delete/modify
  2203. # TODO: call out to dulwich.diff_tree somehow.
  2204. tracked_changes: dict[str, list[Union[str, bytes]]] = {
  2205. "add": [],
  2206. "delete": [],
  2207. "modify": [],
  2208. }
  2209. try:
  2210. tree_id = r[b"HEAD"].tree
  2211. except KeyError:
  2212. tree_id = None
  2213. for change in index.changes_from_tree(r.object_store, tree_id):
  2214. if not change[0][0]:
  2215. assert change[0][1] is not None
  2216. tracked_changes["add"].append(change[0][1])
  2217. elif not change[0][1]:
  2218. assert change[0][0] is not None
  2219. tracked_changes["delete"].append(change[0][0])
  2220. elif change[0][0] == change[0][1]:
  2221. assert change[0][0] is not None
  2222. tracked_changes["modify"].append(change[0][0])
  2223. else:
  2224. raise NotImplementedError("git mv ops not yet supported")
  2225. return tracked_changes
  2226. def daemon(path=".", address=None, port=None) -> None:
  2227. """Run a daemon serving Git requests over TCP/IP.
  2228. Args:
  2229. path: Path to the directory to serve.
  2230. address: Optional address to listen on (defaults to ::)
  2231. port: Optional port to listen on (defaults to TCP_GIT_PORT)
  2232. """
  2233. # TODO(jelmer): Support git-daemon-export-ok and --export-all.
  2234. backend = FileSystemBackend(path)
  2235. server = TCPGitServer(backend, address, port)
  2236. server.serve_forever()
  2237. def web_daemon(path=".", address=None, port=None) -> None:
  2238. """Run a daemon serving Git requests over HTTP.
  2239. Args:
  2240. path: Path to the directory to serve
  2241. address: Optional address to listen on (defaults to ::)
  2242. port: Optional port to listen on (defaults to 80)
  2243. """
  2244. from .web import (
  2245. WSGIRequestHandlerLogger,
  2246. WSGIServerLogger,
  2247. make_server,
  2248. make_wsgi_chain,
  2249. )
  2250. backend = FileSystemBackend(path)
  2251. app = make_wsgi_chain(backend)
  2252. server = make_server(
  2253. address,
  2254. port,
  2255. app,
  2256. handler_class=WSGIRequestHandlerLogger,
  2257. server_class=WSGIServerLogger,
  2258. )
  2259. server.serve_forever()
  2260. def upload_pack(path=".", inf=None, outf=None) -> int:
  2261. """Upload a pack file after negotiating its contents using smart protocol.
  2262. Args:
  2263. path: Path to the repository
  2264. inf: Input stream to communicate with client
  2265. outf: Output stream to communicate with client
  2266. """
  2267. if outf is None:
  2268. outf = getattr(sys.stdout, "buffer", sys.stdout)
  2269. if inf is None:
  2270. inf = getattr(sys.stdin, "buffer", sys.stdin)
  2271. path = os.path.expanduser(path)
  2272. backend = FileSystemBackend(path)
  2273. def send_fn(data) -> None:
  2274. outf.write(data)
  2275. outf.flush()
  2276. proto = Protocol(inf.read, send_fn)
  2277. handler = UploadPackHandler(backend, [path], proto)
  2278. # FIXME: Catch exceptions and write a single-line summary to outf.
  2279. handler.handle()
  2280. return 0
  2281. def receive_pack(path=".", inf=None, outf=None) -> int:
  2282. """Receive a pack file after negotiating its contents using smart protocol.
  2283. Args:
  2284. path: Path to the repository
  2285. inf: Input stream to communicate with client
  2286. outf: Output stream to communicate with client
  2287. """
  2288. if outf is None:
  2289. outf = getattr(sys.stdout, "buffer", sys.stdout)
  2290. if inf is None:
  2291. inf = getattr(sys.stdin, "buffer", sys.stdin)
  2292. path = os.path.expanduser(path)
  2293. backend = FileSystemBackend(path)
  2294. def send_fn(data) -> None:
  2295. outf.write(data)
  2296. outf.flush()
  2297. proto = Protocol(inf.read, send_fn)
  2298. handler = ReceivePackHandler(backend, [path], proto)
  2299. # FIXME: Catch exceptions and write a single-line summary to outf.
  2300. handler.handle()
  2301. return 0
  2302. def _make_branch_ref(name: Union[str, bytes]) -> Ref:
  2303. if isinstance(name, str):
  2304. name = name.encode(DEFAULT_ENCODING)
  2305. return LOCAL_BRANCH_PREFIX + name
  2306. def _make_tag_ref(name: Union[str, bytes]) -> Ref:
  2307. if isinstance(name, str):
  2308. name = name.encode(DEFAULT_ENCODING)
  2309. return LOCAL_TAG_PREFIX + name
  2310. def branch_delete(repo: RepoPath, name) -> None:
  2311. """Delete a branch.
  2312. Args:
  2313. repo: Path to the repository
  2314. name: Name of the branch
  2315. """
  2316. with open_repo_closing(repo) as r:
  2317. if isinstance(name, list):
  2318. names = name
  2319. else:
  2320. names = [name]
  2321. for name in names:
  2322. del r.refs[_make_branch_ref(name)]
  2323. def branch_create(
  2324. repo: Union[str, os.PathLike, Repo], name, objectish=None, force=False
  2325. ) -> None:
  2326. """Create a branch.
  2327. Args:
  2328. repo: Path to the repository
  2329. name: Name of the new branch
  2330. objectish: Target object to point new branch at (defaults to HEAD)
  2331. force: Force creation of branch, even if it already exists
  2332. """
  2333. with open_repo_closing(repo) as r:
  2334. if objectish is None:
  2335. objectish = "HEAD"
  2336. # Try to expand branch shorthand before parsing
  2337. original_objectish = objectish
  2338. objectish_bytes = (
  2339. objectish.encode(DEFAULT_ENCODING)
  2340. if isinstance(objectish, str)
  2341. else objectish
  2342. )
  2343. if b"refs/remotes/" + objectish_bytes in r.refs:
  2344. objectish = b"refs/remotes/" + objectish_bytes
  2345. elif b"refs/heads/" + objectish_bytes in r.refs:
  2346. objectish = b"refs/heads/" + objectish_bytes
  2347. object = parse_object(r, objectish)
  2348. refname = _make_branch_ref(name)
  2349. ref_message = (
  2350. b"branch: Created from " + original_objectish.encode(DEFAULT_ENCODING)
  2351. if isinstance(original_objectish, str)
  2352. else b"branch: Created from " + original_objectish
  2353. )
  2354. if force:
  2355. r.refs.set_if_equals(refname, None, object.id, message=ref_message)
  2356. else:
  2357. if not r.refs.add_if_new(refname, object.id, message=ref_message):
  2358. raise Error(f"Branch with name {name} already exists.")
  2359. # Check if we should set up tracking
  2360. config = r.get_config_stack()
  2361. try:
  2362. auto_setup_merge = config.get((b"branch",), b"autoSetupMerge").decode()
  2363. except KeyError:
  2364. auto_setup_merge = "true" # Default value
  2365. # Determine if the objectish refers to a remote-tracking branch
  2366. objectish_ref = None
  2367. if original_objectish != "HEAD":
  2368. # Try to resolve objectish as a ref
  2369. objectish_bytes = (
  2370. original_objectish.encode(DEFAULT_ENCODING)
  2371. if isinstance(original_objectish, str)
  2372. else original_objectish
  2373. )
  2374. if objectish_bytes in r.refs:
  2375. objectish_ref = objectish_bytes
  2376. elif b"refs/remotes/" + objectish_bytes in r.refs:
  2377. objectish_ref = b"refs/remotes/" + objectish_bytes
  2378. elif b"refs/heads/" + objectish_bytes in r.refs:
  2379. objectish_ref = b"refs/heads/" + objectish_bytes
  2380. else:
  2381. # HEAD might point to a remote-tracking branch
  2382. head_ref = r.refs.follow(b"HEAD")[0][1]
  2383. if head_ref.startswith(b"refs/remotes/"):
  2384. objectish_ref = head_ref
  2385. # Set up tracking if appropriate
  2386. if objectish_ref and (
  2387. (auto_setup_merge == "always")
  2388. or (
  2389. auto_setup_merge == "true"
  2390. and objectish_ref.startswith(b"refs/remotes/")
  2391. )
  2392. ):
  2393. # Extract remote name and branch from the ref
  2394. if objectish_ref.startswith(b"refs/remotes/"):
  2395. parts = objectish_ref[len(b"refs/remotes/") :].split(b"/", 1)
  2396. if len(parts) == 2:
  2397. remote_name = parts[0]
  2398. remote_branch = b"refs/heads/" + parts[1]
  2399. # Set up tracking
  2400. repo_config = r.get_config()
  2401. branch_name_bytes = (
  2402. name.encode(DEFAULT_ENCODING) if isinstance(name, str) else name
  2403. )
  2404. repo_config.set(
  2405. (b"branch", branch_name_bytes), b"remote", remote_name
  2406. )
  2407. repo_config.set(
  2408. (b"branch", branch_name_bytes), b"merge", remote_branch
  2409. )
  2410. repo_config.write_to_path()
  2411. def branch_list(repo: RepoPath):
  2412. """List all branches.
  2413. Args:
  2414. repo: Path to the repository
  2415. Returns:
  2416. List of branch names (without refs/heads/ prefix)
  2417. """
  2418. with open_repo_closing(repo) as r:
  2419. branches = list(r.refs.keys(base=LOCAL_BRANCH_PREFIX))
  2420. # Check for branch.sort configuration
  2421. config = r.get_config_stack()
  2422. try:
  2423. sort_key = config.get((b"branch",), b"sort").decode()
  2424. except KeyError:
  2425. # Default is refname (alphabetical)
  2426. sort_key = "refname"
  2427. # Parse sort key
  2428. reverse = False
  2429. if sort_key.startswith("-"):
  2430. reverse = True
  2431. sort_key = sort_key[1:]
  2432. # Apply sorting
  2433. if sort_key == "refname":
  2434. # Simple alphabetical sort (default)
  2435. branches.sort(reverse=reverse)
  2436. elif sort_key in ("committerdate", "authordate"):
  2437. # Sort by date
  2438. def get_commit_date(branch_name):
  2439. ref = LOCAL_BRANCH_PREFIX + branch_name
  2440. sha = r.refs[ref]
  2441. commit = r.object_store[sha]
  2442. if sort_key == "committerdate":
  2443. return commit.commit_time
  2444. else: # authordate
  2445. return commit.author_time
  2446. # Sort branches by date
  2447. # Note: Python's sort naturally orders smaller values first (ascending)
  2448. # For dates, this means oldest first by default
  2449. # Use a stable sort with branch name as secondary key for consistent ordering
  2450. if reverse:
  2451. # For reverse sort, we want newest dates first but alphabetical names second
  2452. branches.sort(key=lambda b: (-get_commit_date(b), b))
  2453. else:
  2454. branches.sort(key=lambda b: (get_commit_date(b), b))
  2455. else:
  2456. # Unknown sort key, fall back to default
  2457. branches.sort()
  2458. return branches
  2459. def active_branch(repo: RepoPath):
  2460. """Return the active branch in the repository, if any.
  2461. Args:
  2462. repo: Repository to open
  2463. Returns:
  2464. branch name
  2465. Raises:
  2466. KeyError: if the repository does not have a working tree
  2467. IndexError: if HEAD is floating
  2468. """
  2469. with open_repo_closing(repo) as r:
  2470. active_ref = r.refs.follow(b"HEAD")[0][1]
  2471. if not active_ref.startswith(LOCAL_BRANCH_PREFIX):
  2472. raise ValueError(active_ref)
  2473. return active_ref[len(LOCAL_BRANCH_PREFIX) :]
  2474. def get_branch_remote(repo: Union[str, os.PathLike, Repo]):
  2475. """Return the active branch's remote name, if any.
  2476. Args:
  2477. repo: Repository to open
  2478. Returns:
  2479. remote name
  2480. Raises:
  2481. KeyError: if the repository does not have a working tree
  2482. """
  2483. with open_repo_closing(repo) as r:
  2484. branch_name = active_branch(r.path)
  2485. config = r.get_config()
  2486. try:
  2487. remote_name = config.get((b"branch", branch_name), b"remote")
  2488. except KeyError:
  2489. remote_name = b"origin"
  2490. return remote_name
  2491. def get_branch_merge(repo: RepoPath, branch_name=None):
  2492. """Return the branch's merge reference (upstream branch), if any.
  2493. Args:
  2494. repo: Repository to open
  2495. branch_name: Name of the branch (defaults to active branch)
  2496. Returns:
  2497. merge reference name (e.g. b"refs/heads/main")
  2498. Raises:
  2499. KeyError: if the branch does not have a merge configuration
  2500. """
  2501. with open_repo_closing(repo) as r:
  2502. if branch_name is None:
  2503. branch_name = active_branch(r.path)
  2504. config = r.get_config()
  2505. return config.get((b"branch", branch_name), b"merge")
  2506. def set_branch_tracking(
  2507. repo: Union[str, os.PathLike, Repo], branch_name, remote_name, remote_ref
  2508. ):
  2509. """Set up branch tracking configuration.
  2510. Args:
  2511. repo: Repository to open
  2512. branch_name: Name of the local branch
  2513. remote_name: Name of the remote (e.g. b"origin")
  2514. remote_ref: Remote reference to track (e.g. b"refs/heads/main")
  2515. """
  2516. with open_repo_closing(repo) as r:
  2517. config = r.get_config()
  2518. config.set((b"branch", branch_name), b"remote", remote_name)
  2519. config.set((b"branch", branch_name), b"merge", remote_ref)
  2520. config.write_to_path()
  2521. def fetch(
  2522. repo,
  2523. remote_location=None,
  2524. outstream=sys.stdout,
  2525. errstream=default_bytes_err_stream,
  2526. message=None,
  2527. depth=None,
  2528. prune=False,
  2529. prune_tags=False,
  2530. force=False,
  2531. **kwargs,
  2532. ):
  2533. """Fetch objects from a remote server.
  2534. Args:
  2535. repo: Path to the repository
  2536. remote_location: String identifying a remote server
  2537. outstream: Output stream (defaults to stdout)
  2538. errstream: Error stream (defaults to stderr)
  2539. message: Reflog message (defaults to b"fetch: from <remote_name>")
  2540. depth: Depth to fetch at
  2541. prune: Prune remote removed refs
  2542. prune_tags: Prune reomte removed tags
  2543. Returns:
  2544. Dictionary with refs on the remote
  2545. """
  2546. with open_repo_closing(repo) as r:
  2547. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  2548. if message is None:
  2549. message = b"fetch: from " + remote_location.encode(DEFAULT_ENCODING)
  2550. client, path = get_transport_and_path(
  2551. remote_location, config=r.get_config_stack(), **kwargs
  2552. )
  2553. fetch_result = client.fetch(path, r, progress=errstream.write, depth=depth)
  2554. if remote_name is not None:
  2555. _import_remote_refs(
  2556. r.refs,
  2557. remote_name,
  2558. fetch_result.refs,
  2559. message,
  2560. prune=prune,
  2561. prune_tags=prune_tags,
  2562. )
  2563. # Trigger auto GC if needed
  2564. from .gc import maybe_auto_gc
  2565. with open_repo_closing(repo) as r:
  2566. maybe_auto_gc(r)
  2567. return fetch_result
  2568. def for_each_ref(
  2569. repo: Union[Repo, str] = ".",
  2570. pattern: Optional[Union[str, bytes]] = None,
  2571. ) -> list[tuple[bytes, bytes, bytes]]:
  2572. """Iterate over all refs that match the (optional) pattern.
  2573. Args:
  2574. repo: Path to the repository
  2575. pattern: Optional glob (7) patterns to filter the refs with
  2576. Returns: List of bytes tuples with: (sha, object_type, ref_name)
  2577. """
  2578. if isinstance(pattern, str):
  2579. pattern = os.fsencode(pattern)
  2580. with open_repo_closing(repo) as r:
  2581. refs = r.get_refs()
  2582. if pattern:
  2583. matching_refs: dict[bytes, bytes] = {}
  2584. pattern_parts = pattern.split(b"/")
  2585. for ref, sha in refs.items():
  2586. matches = False
  2587. # git for-each-ref uses glob (7) style patterns, but fnmatch
  2588. # is greedy and also matches slashes, unlike glob.glob.
  2589. # We have to check parts of the pattern individually.
  2590. # See https://github.com/python/cpython/issues/72904
  2591. ref_parts = ref.split(b"/")
  2592. if len(ref_parts) > len(pattern_parts):
  2593. continue
  2594. for pat, ref_part in zip(pattern_parts, ref_parts):
  2595. matches = fnmatch.fnmatchcase(ref_part, pat)
  2596. if not matches:
  2597. break
  2598. if matches:
  2599. matching_refs[ref] = sha
  2600. refs = matching_refs
  2601. ret: list[tuple[bytes, bytes, bytes]] = [
  2602. (sha, r.get_object(sha).type_name, ref)
  2603. for ref, sha in sorted(
  2604. refs.items(),
  2605. key=lambda ref_sha: ref_sha[0],
  2606. )
  2607. if ref != b"HEAD"
  2608. ]
  2609. return ret
  2610. def ls_remote(remote, config: Optional[Config] = None, **kwargs):
  2611. """List the refs in a remote.
  2612. Args:
  2613. remote: Remote repository location
  2614. config: Configuration to use
  2615. Returns:
  2616. LsRemoteResult object with refs and symrefs
  2617. """
  2618. if config is None:
  2619. config = StackedConfig.default()
  2620. client, host_path = get_transport_and_path(remote, config=config, **kwargs)
  2621. return client.get_refs(host_path)
  2622. def repack(repo: RepoPath) -> None:
  2623. """Repack loose files in a repository.
  2624. Currently this only packs loose objects.
  2625. Args:
  2626. repo: Path to the repository
  2627. """
  2628. with open_repo_closing(repo) as r:
  2629. r.object_store.pack_loose_objects()
  2630. def pack_objects(
  2631. repo,
  2632. object_ids,
  2633. packf,
  2634. idxf,
  2635. delta_window_size=None,
  2636. deltify=None,
  2637. reuse_deltas=True,
  2638. pack_index_version=None,
  2639. ) -> None:
  2640. """Pack objects into a file.
  2641. Args:
  2642. repo: Path to the repository
  2643. object_ids: List of object ids to write
  2644. packf: File-like object to write to
  2645. idxf: File-like object to write to (can be None)
  2646. delta_window_size: Sliding window size for searching for deltas;
  2647. Set to None for default window size.
  2648. deltify: Whether to deltify objects
  2649. reuse_deltas: Allow reuse of existing deltas while deltifying
  2650. pack_index_version: Pack index version to use (1, 2, or 3). If None, uses default version.
  2651. """
  2652. with open_repo_closing(repo) as r:
  2653. entries, data_sum = write_pack_from_container(
  2654. packf.write,
  2655. r.object_store,
  2656. [(oid, None) for oid in object_ids],
  2657. deltify=deltify,
  2658. delta_window_size=delta_window_size,
  2659. reuse_deltas=reuse_deltas,
  2660. )
  2661. if idxf is not None:
  2662. entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
  2663. write_pack_index(idxf, entries, data_sum, version=pack_index_version)
  2664. def ls_tree(
  2665. repo,
  2666. treeish: Union[str, bytes, Commit, Tree, Tag] = b"HEAD",
  2667. outstream=sys.stdout,
  2668. recursive=False,
  2669. name_only=False,
  2670. ) -> None:
  2671. """List contents of a tree.
  2672. Args:
  2673. repo: Path to the repository
  2674. treeish: Tree id to list
  2675. outstream: Output stream (defaults to stdout)
  2676. recursive: Whether to recursively list files
  2677. name_only: Only print item name
  2678. """
  2679. def list_tree(store, treeid, base) -> None:
  2680. for name, mode, sha in store[treeid].iteritems():
  2681. if base:
  2682. name = posixpath.join(base, name)
  2683. if name_only:
  2684. outstream.write(name + b"\n")
  2685. else:
  2686. outstream.write(pretty_format_tree_entry(name, mode, sha))
  2687. if stat.S_ISDIR(mode) and recursive:
  2688. list_tree(store, sha, name)
  2689. with open_repo_closing(repo) as r:
  2690. tree = parse_tree(r, treeish)
  2691. list_tree(r.object_store, tree.id, "")
  2692. def remote_add(
  2693. repo: RepoPath,
  2694. name: Union[bytes, str],
  2695. url: Union[bytes, str],
  2696. ) -> None:
  2697. """Add a remote.
  2698. Args:
  2699. repo: Path to the repository
  2700. name: Remote name
  2701. url: Remote URL
  2702. """
  2703. if not isinstance(name, bytes):
  2704. name = name.encode(DEFAULT_ENCODING)
  2705. if not isinstance(url, bytes):
  2706. url = url.encode(DEFAULT_ENCODING)
  2707. with open_repo_closing(repo) as r:
  2708. c = r.get_config()
  2709. section = (b"remote", name)
  2710. if c.has_section(section):
  2711. raise RemoteExists(section)
  2712. c.set(section, b"url", url)
  2713. c.write_to_path()
  2714. def remote_remove(repo: Repo, name: Union[bytes, str]) -> None:
  2715. """Remove a remote.
  2716. Args:
  2717. repo: Path to the repository
  2718. name: Remote name
  2719. """
  2720. if not isinstance(name, bytes):
  2721. name = name.encode(DEFAULT_ENCODING)
  2722. with open_repo_closing(repo) as r:
  2723. c = r.get_config()
  2724. section = (b"remote", name)
  2725. del c[section]
  2726. c.write_to_path()
  2727. def _quote_path(path: str) -> str:
  2728. """Quote a path using C-style quoting similar to git's core.quotePath.
  2729. Args:
  2730. path: Path to quote
  2731. Returns:
  2732. Quoted path string
  2733. """
  2734. # Check if path needs quoting (non-ASCII or special characters)
  2735. needs_quoting = False
  2736. for char in path:
  2737. if ord(char) > 127 or char in '"\\':
  2738. needs_quoting = True
  2739. break
  2740. if not needs_quoting:
  2741. return path
  2742. # Apply C-style quoting
  2743. quoted = '"'
  2744. for char in path:
  2745. if ord(char) > 127:
  2746. # Non-ASCII character, encode as octal escape
  2747. utf8_bytes = char.encode("utf-8")
  2748. for byte in utf8_bytes:
  2749. quoted += f"\\{byte:03o}"
  2750. elif char == '"':
  2751. quoted += '\\"'
  2752. elif char == "\\":
  2753. quoted += "\\\\"
  2754. else:
  2755. quoted += char
  2756. quoted += '"'
  2757. return quoted
  2758. def check_ignore(repo: RepoPath, paths, no_index=False, quote_path=True):
  2759. r"""Debug gitignore files.
  2760. Args:
  2761. repo: Path to the repository
  2762. paths: List of paths to check for
  2763. no_index: Don't check index
  2764. quote_path: If True, quote non-ASCII characters in returned paths using
  2765. C-style octal escapes (e.g. "тест.txt" becomes "\\321\\202\\320\\265\\321\\201\\321\\202.txt").
  2766. If False, return raw unicode paths.
  2767. Returns: List of ignored files
  2768. """
  2769. with open_repo_closing(repo) as r:
  2770. index = r.open_index()
  2771. ignore_manager = IgnoreFilterManager.from_repo(r)
  2772. for original_path in paths:
  2773. if not no_index and path_to_tree_path(r.path, original_path) in index:
  2774. continue
  2775. # Preserve whether the original path had a trailing slash
  2776. had_trailing_slash = original_path.endswith(("/", os.path.sep))
  2777. if os.path.isabs(original_path):
  2778. path = os.path.relpath(original_path, r.path)
  2779. # Normalize Windows paths to use forward slashes
  2780. if os.path.sep != "/":
  2781. path = path.replace(os.path.sep, "/")
  2782. else:
  2783. path = original_path
  2784. # Restore trailing slash if it was in the original
  2785. if had_trailing_slash and not path.endswith("/"):
  2786. path = path + "/"
  2787. # For directories, check with trailing slash to get correct ignore behavior
  2788. test_path = path
  2789. path_without_slash = path.rstrip("/")
  2790. is_directory = os.path.isdir(os.path.join(r.path, path_without_slash))
  2791. # If this is a directory path, ensure we test it correctly
  2792. if is_directory and not path.endswith("/"):
  2793. test_path = path + "/"
  2794. if ignore_manager.is_ignored(test_path):
  2795. # Return relative path (like git does) when absolute path was provided
  2796. if os.path.isabs(original_path):
  2797. output_path = path
  2798. else:
  2799. output_path = original_path
  2800. yield _quote_path(output_path) if quote_path else output_path
  2801. def update_head(repo: RepoPath, target, detached=False, new_branch=None) -> None:
  2802. """Update HEAD to point at a new branch/commit.
  2803. Note that this does not actually update the working tree.
  2804. Args:
  2805. repo: Path to the repository
  2806. detached: Create a detached head
  2807. target: Branch or committish to switch to
  2808. new_branch: New branch to create
  2809. """
  2810. with open_repo_closing(repo) as r:
  2811. if new_branch is not None:
  2812. to_set = _make_branch_ref(new_branch)
  2813. else:
  2814. to_set = b"HEAD"
  2815. if detached:
  2816. # TODO(jelmer): Provide some way so that the actual ref gets
  2817. # updated rather than what it points to, so the delete isn't
  2818. # necessary.
  2819. del r.refs[to_set]
  2820. r.refs[to_set] = parse_commit(r, target).id
  2821. else:
  2822. r.refs.set_symbolic_ref(to_set, parse_ref(r, target))
  2823. if new_branch is not None:
  2824. r.refs.set_symbolic_ref(b"HEAD", to_set)
  2825. def checkout(
  2826. repo: Union[str, os.PathLike, Repo],
  2827. target: Optional[Union[str, bytes, Commit, Tag]] = None,
  2828. force: bool = False,
  2829. new_branch: Optional[Union[bytes, str]] = None,
  2830. paths: Optional[list[Union[bytes, str]]] = None,
  2831. ) -> None:
  2832. """Switch to a branch or commit, updating both HEAD and the working tree.
  2833. This is similar to 'git checkout', allowing you to switch to a branch,
  2834. tag, or specific commit. Unlike update_head, this function also updates
  2835. the working tree to match the target.
  2836. Args:
  2837. repo: Path to repository or repository object
  2838. target: Branch name, tag, or commit SHA to checkout. If None and paths is specified,
  2839. restores files from HEAD
  2840. force: Force checkout even if there are local changes
  2841. new_branch: Create a new branch at target (like git checkout -b)
  2842. paths: List of specific paths to checkout. If specified, only these paths are updated
  2843. and HEAD is not changed
  2844. Raises:
  2845. CheckoutError: If checkout cannot be performed due to conflicts
  2846. KeyError: If the target reference cannot be found
  2847. """
  2848. with open_repo_closing(repo) as r:
  2849. # Store the original target for later reference checks
  2850. original_target = target
  2851. worktree = r.get_worktree()
  2852. # Handle path-specific checkout (like git checkout -- <paths>)
  2853. if paths is not None:
  2854. # Convert paths to bytes
  2855. byte_paths = []
  2856. for path in paths:
  2857. if isinstance(path, str):
  2858. byte_paths.append(path.encode(DEFAULT_ENCODING))
  2859. else:
  2860. byte_paths.append(path)
  2861. # If no target specified, use HEAD
  2862. if target is None:
  2863. try:
  2864. target = r.refs[b"HEAD"]
  2865. except KeyError:
  2866. raise CheckoutError("No HEAD reference found")
  2867. else:
  2868. if isinstance(target, str):
  2869. target = target.encode(DEFAULT_ENCODING)
  2870. # Get the target commit and tree
  2871. target_tree = parse_tree(r, target)
  2872. # Get blob normalizer for line ending conversion
  2873. blob_normalizer = r.get_blob_normalizer()
  2874. # Restore specified paths from target tree
  2875. for path in byte_paths:
  2876. try:
  2877. # Look up the path in the target tree
  2878. mode, sha = target_tree.lookup_path(
  2879. r.object_store.__getitem__, path
  2880. )
  2881. obj = r[sha]
  2882. assert isinstance(obj, Blob), "Expected a Blob object"
  2883. except KeyError:
  2884. # Path doesn't exist in target tree
  2885. pass
  2886. else:
  2887. # Create directories if needed
  2888. # Handle path as string
  2889. if isinstance(path, bytes):
  2890. path_str = path.decode(DEFAULT_ENCODING)
  2891. else:
  2892. path_str = path
  2893. file_path = os.path.join(r.path, path_str)
  2894. os.makedirs(os.path.dirname(file_path), exist_ok=True)
  2895. # Write the file content
  2896. if stat.S_ISREG(mode):
  2897. # Apply checkout filters (smudge)
  2898. if blob_normalizer:
  2899. obj = blob_normalizer.checkout_normalize(obj, path)
  2900. flags = os.O_WRONLY | os.O_CREAT | os.O_TRUNC
  2901. if sys.platform == "win32":
  2902. flags |= os.O_BINARY
  2903. with os.fdopen(os.open(file_path, flags, mode), "wb") as f:
  2904. f.write(obj.data)
  2905. # Update the index
  2906. worktree.stage(path)
  2907. return
  2908. # Normal checkout (switching branches/commits)
  2909. if target is None:
  2910. raise ValueError("Target must be specified for branch/commit checkout")
  2911. if isinstance(target, str):
  2912. target_bytes = target.encode(DEFAULT_ENCODING)
  2913. elif isinstance(target, bytes):
  2914. target_bytes = target
  2915. else:
  2916. # For Commit/Tag objects, we'll use their SHA
  2917. target_bytes = target.id
  2918. if isinstance(new_branch, str):
  2919. new_branch = new_branch.encode(DEFAULT_ENCODING)
  2920. # Parse the target to get the commit
  2921. assert (
  2922. original_target is not None
  2923. ) # Guaranteed by earlier check for normal checkout
  2924. target_commit = parse_commit(r, original_target)
  2925. target_tree_id = target_commit.tree
  2926. # Get current HEAD tree for comparison
  2927. try:
  2928. current_head = r.refs[b"HEAD"]
  2929. current_commit = r[current_head]
  2930. assert isinstance(current_commit, Commit), "Expected a Commit object"
  2931. current_tree_id = current_commit.tree
  2932. except KeyError:
  2933. # No HEAD yet (empty repo)
  2934. current_tree_id = None
  2935. # Check for uncommitted changes if not forcing
  2936. if not force and current_tree_id is not None:
  2937. status_report = status(r)
  2938. changes = []
  2939. # staged is a dict with 'add', 'delete', 'modify' keys
  2940. if isinstance(status_report.staged, dict):
  2941. changes.extend(status_report.staged.get("add", []))
  2942. changes.extend(status_report.staged.get("delete", []))
  2943. changes.extend(status_report.staged.get("modify", []))
  2944. # unstaged is a list
  2945. changes.extend(status_report.unstaged)
  2946. if changes:
  2947. # Check if any changes would conflict with checkout
  2948. target_tree = r[target_tree_id]
  2949. assert isinstance(target_tree, Tree), "Expected a Tree object"
  2950. for change in changes:
  2951. if isinstance(change, str):
  2952. change = change.encode(DEFAULT_ENCODING)
  2953. try:
  2954. target_tree.lookup_path(r.object_store.__getitem__, change)
  2955. except KeyError:
  2956. # File doesn't exist in target tree - change can be preserved
  2957. pass
  2958. else:
  2959. # File exists in target tree - would overwrite local changes
  2960. raise CheckoutError(
  2961. f"Your local changes to '{change.decode()}' would be "
  2962. "overwritten by checkout. Please commit or stash before switching."
  2963. )
  2964. # Get configuration for working directory update
  2965. config = r.get_config()
  2966. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  2967. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  2968. validate_path_element = validate_path_element_ntfs
  2969. else:
  2970. validate_path_element = validate_path_element_default
  2971. if config.get_boolean(b"core", b"symlinks", True):
  2972. symlink_fn = symlink
  2973. else:
  2974. def symlink_fn(source, target) -> None: # type: ignore
  2975. mode = "w" + ("b" if isinstance(source, bytes) else "")
  2976. with open(target, mode) as f:
  2977. f.write(source)
  2978. # Get blob normalizer for line ending conversion
  2979. blob_normalizer = r.get_blob_normalizer()
  2980. # Update working tree
  2981. tree_change_iterator: Iterator[TreeChange] = tree_changes(
  2982. r.object_store, current_tree_id, target_tree_id
  2983. )
  2984. update_working_tree(
  2985. r,
  2986. current_tree_id,
  2987. target_tree_id,
  2988. change_iterator=tree_change_iterator,
  2989. honor_filemode=honor_filemode,
  2990. validate_path_element=validate_path_element,
  2991. symlink_fn=symlink_fn,
  2992. force_remove_untracked=force,
  2993. blob_normalizer=blob_normalizer,
  2994. allow_overwrite_modified=force,
  2995. )
  2996. # Update HEAD
  2997. if new_branch:
  2998. # Create new branch and switch to it
  2999. branch_create(r, new_branch, objectish=target_commit.id.decode("ascii"))
  3000. update_head(r, new_branch)
  3001. # Set up tracking if creating from a remote branch
  3002. from .refs import LOCAL_REMOTE_PREFIX, parse_remote_ref
  3003. if isinstance(original_target, bytes) and target_bytes.startswith(
  3004. LOCAL_REMOTE_PREFIX
  3005. ):
  3006. try:
  3007. remote_name, branch_name = parse_remote_ref(target_bytes)
  3008. # Set tracking to refs/heads/<branch> on the remote
  3009. set_branch_tracking(
  3010. r, new_branch, remote_name, b"refs/heads/" + branch_name
  3011. )
  3012. except ValueError:
  3013. # Invalid remote ref format, skip tracking setup
  3014. pass
  3015. else:
  3016. # Check if target is a branch name (with or without refs/heads/ prefix)
  3017. branch_ref = None
  3018. if (
  3019. isinstance(original_target, (str, bytes))
  3020. and target_bytes in r.refs.keys()
  3021. ):
  3022. if target_bytes.startswith(LOCAL_BRANCH_PREFIX):
  3023. branch_ref = target_bytes
  3024. else:
  3025. # Try adding refs/heads/ prefix
  3026. potential_branch = (
  3027. _make_branch_ref(target_bytes)
  3028. if isinstance(original_target, (str, bytes))
  3029. else None
  3030. )
  3031. if potential_branch in r.refs.keys():
  3032. branch_ref = potential_branch
  3033. if branch_ref:
  3034. # It's a branch - update HEAD symbolically
  3035. update_head(r, branch_ref)
  3036. else:
  3037. # It's a tag, other ref, or commit SHA - detached HEAD
  3038. update_head(r, target_commit.id.decode("ascii"), detached=True)
  3039. def reset_file(
  3040. repo,
  3041. file_path: str,
  3042. target: Union[str, bytes, Commit, Tree, Tag] = b"HEAD",
  3043. symlink_fn=None,
  3044. ) -> None:
  3045. """Reset the file to specific commit or branch.
  3046. Args:
  3047. repo: dulwich Repo object
  3048. file_path: file to reset, relative to the repository path
  3049. target: branch or commit or b'HEAD' to reset
  3050. """
  3051. tree = parse_tree(repo, treeish=target)
  3052. tree_path = _fs_to_tree_path(file_path)
  3053. file_entry = tree.lookup_path(repo.object_store.__getitem__, tree_path)
  3054. full_path = os.path.join(os.fsencode(repo.path), tree_path)
  3055. blob = repo.object_store[file_entry[1]]
  3056. mode = file_entry[0]
  3057. build_file_from_blob(blob, mode, full_path, symlink_fn=symlink_fn)
  3058. @replace_me(since="0.22.9", remove_in="0.24.0")
  3059. def checkout_branch(
  3060. repo: Union[str, os.PathLike, Repo],
  3061. target: Union[bytes, str],
  3062. force: bool = False,
  3063. ) -> None:
  3064. """Switch branches or restore working tree files.
  3065. This is now a wrapper around the general checkout() function.
  3066. Preserved for backward compatibility.
  3067. Args:
  3068. repo: dulwich Repo object
  3069. target: branch name or commit sha to checkout
  3070. force: true or not to force checkout
  3071. """
  3072. # Simply delegate to the new checkout function
  3073. return checkout(repo, target, force=force)
  3074. def sparse_checkout(
  3075. repo: Union[str, os.PathLike, Repo],
  3076. patterns=None,
  3077. force: bool = False,
  3078. cone: Union[bool, None] = None,
  3079. ):
  3080. """Perform a sparse checkout in the repository (either 'full' or 'cone mode').
  3081. Perform sparse checkout in either 'cone' (directory-based) mode or
  3082. 'full pattern' (.gitignore) mode, depending on the ``cone`` parameter.
  3083. If ``cone`` is ``None``, the mode is inferred from the repository's
  3084. ``core.sparseCheckoutCone`` config setting.
  3085. Steps:
  3086. 1) If ``patterns`` is provided, write them to ``.git/info/sparse-checkout``.
  3087. 2) Determine which paths in the index are included vs. excluded.
  3088. - If ``cone=True``, use "cone-compatible" directory-based logic.
  3089. - If ``cone=False``, use standard .gitignore-style matching.
  3090. 3) Update the index's skip-worktree bits and add/remove files in
  3091. the working tree accordingly.
  3092. 4) If ``force=False``, refuse to remove files that have local modifications.
  3093. Args:
  3094. repo: Path to the repository or a Repo object.
  3095. patterns: Optional list of sparse-checkout patterns to write.
  3096. force: Whether to force removal of locally modified files (default False).
  3097. cone: Boolean indicating cone mode (True/False). If None, read from config.
  3098. Returns:
  3099. None
  3100. """
  3101. with open_repo_closing(repo) as repo_obj:
  3102. # --- 0) Possibly infer 'cone' from config ---
  3103. if cone is None:
  3104. cone = repo_obj.get_worktree().infer_cone_mode()
  3105. # --- 1) Read or write patterns ---
  3106. if patterns is None:
  3107. lines = repo_obj.get_worktree().get_sparse_checkout_patterns()
  3108. if lines is None:
  3109. raise Error("No sparse checkout patterns found.")
  3110. else:
  3111. lines = patterns
  3112. repo_obj.get_worktree().set_sparse_checkout_patterns(patterns)
  3113. # --- 2) Determine the set of included paths ---
  3114. index = repo_obj.open_index()
  3115. included_paths = determine_included_paths(index, lines, cone)
  3116. # --- 3) Apply those results to the index & working tree ---
  3117. try:
  3118. apply_included_paths(repo_obj, included_paths, force=force)
  3119. except SparseCheckoutConflictError as exc:
  3120. raise CheckoutError(*exc.args) from exc
  3121. def cone_mode_init(repo: Union[str, os.PathLike, Repo]):
  3122. """Initialize a repository to use sparse checkout in 'cone' mode.
  3123. Sets ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` in the config.
  3124. Writes an initial ``.git/info/sparse-checkout`` file that includes only
  3125. top-level files (and excludes all subdirectories), e.g. ``["/*", "!/*/"]``.
  3126. Then performs a sparse checkout to update the working tree accordingly.
  3127. If no directories are specified, then only top-level files are included:
  3128. https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling
  3129. Args:
  3130. repo: Path to the repository or a Repo object.
  3131. Returns:
  3132. None
  3133. """
  3134. with open_repo_closing(repo) as repo_obj:
  3135. repo_obj.get_worktree().configure_for_cone_mode()
  3136. patterns = ["/*", "!/*/"] # root-level files only
  3137. sparse_checkout(repo_obj, patterns, force=True, cone=True)
  3138. def cone_mode_set(repo: Union[str, os.PathLike, Repo], dirs, force=False):
  3139. """Overwrite the existing 'cone-mode' sparse patterns with a new set of directories.
  3140. Ensures ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` are enabled.
  3141. Writes new patterns so that only the specified directories (and top-level files)
  3142. remain in the working tree, and applies the sparse checkout update.
  3143. Args:
  3144. repo: Path to the repository or a Repo object.
  3145. dirs: List of directory names to include.
  3146. force: Whether to forcibly discard local modifications (default False).
  3147. Returns:
  3148. None
  3149. """
  3150. with open_repo_closing(repo) as repo_obj:
  3151. repo_obj.get_worktree().configure_for_cone_mode()
  3152. repo_obj.get_worktree().set_cone_mode_patterns(dirs=dirs)
  3153. new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
  3154. # Finally, apply the patterns and update the working tree
  3155. sparse_checkout(repo_obj, new_patterns, force=force, cone=True)
  3156. def cone_mode_add(repo: Union[str, os.PathLike, Repo], dirs, force=False):
  3157. """Add new directories to the existing 'cone-mode' sparse-checkout patterns.
  3158. Reads the current patterns from ``.git/info/sparse-checkout``, adds pattern
  3159. lines to include the specified directories, and then performs a sparse
  3160. checkout to update the working tree accordingly.
  3161. Args:
  3162. repo: Path to the repository or a Repo object.
  3163. dirs: List of directory names to add to the sparse-checkout.
  3164. force: Whether to forcibly discard local modifications (default False).
  3165. Returns:
  3166. None
  3167. """
  3168. with open_repo_closing(repo) as repo_obj:
  3169. repo_obj.get_worktree().configure_for_cone_mode()
  3170. # Do not pass base patterns as dirs
  3171. base_patterns = ["/*", "!/*/"]
  3172. existing_dirs = [
  3173. pat.strip("/")
  3174. for pat in repo_obj.get_worktree().get_sparse_checkout_patterns()
  3175. if pat not in base_patterns
  3176. ]
  3177. added_dirs = existing_dirs + (dirs or [])
  3178. repo_obj.get_worktree().set_cone_mode_patterns(dirs=added_dirs)
  3179. new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
  3180. sparse_checkout(repo_obj, patterns=new_patterns, force=force, cone=True)
  3181. def check_mailmap(repo: RepoPath, contact):
  3182. """Check canonical name and email of contact.
  3183. Args:
  3184. repo: Path to the repository
  3185. contact: Contact name and/or email
  3186. Returns: Canonical contact data
  3187. """
  3188. with open_repo_closing(repo) as r:
  3189. from .mailmap import Mailmap
  3190. try:
  3191. mailmap = Mailmap.from_path(os.path.join(r.path, ".mailmap"))
  3192. except FileNotFoundError:
  3193. mailmap = Mailmap()
  3194. return mailmap.lookup(contact)
  3195. def fsck(repo: RepoPath):
  3196. """Check a repository.
  3197. Args:
  3198. repo: A path to the repository
  3199. Returns: Iterator over errors/warnings
  3200. """
  3201. with open_repo_closing(repo) as r:
  3202. # TODO(jelmer): check pack files
  3203. # TODO(jelmer): check graph
  3204. # TODO(jelmer): check refs
  3205. for sha in r.object_store:
  3206. o = r.object_store[sha]
  3207. try:
  3208. o.check()
  3209. except Exception as e:
  3210. yield (sha, e)
  3211. def stash_list(repo: Union[str, os.PathLike, Repo]):
  3212. """List all stashes in a repository."""
  3213. with open_repo_closing(repo) as r:
  3214. from .stash import Stash
  3215. stash = Stash.from_repo(r)
  3216. return enumerate(list(stash.stashes()))
  3217. def stash_push(repo: Union[str, os.PathLike, Repo]) -> None:
  3218. """Push a new stash onto the stack."""
  3219. with open_repo_closing(repo) as r:
  3220. from .stash import Stash
  3221. stash = Stash.from_repo(r)
  3222. stash.push()
  3223. def stash_pop(repo: Union[str, os.PathLike, Repo]) -> None:
  3224. """Pop a stash from the stack."""
  3225. with open_repo_closing(repo) as r:
  3226. from .stash import Stash
  3227. stash = Stash.from_repo(r)
  3228. stash.pop(0)
  3229. def stash_drop(repo: Union[str, os.PathLike, Repo], index) -> None:
  3230. """Drop a stash from the stack."""
  3231. with open_repo_closing(repo) as r:
  3232. from .stash import Stash
  3233. stash = Stash.from_repo(r)
  3234. stash.drop(index)
  3235. def ls_files(repo: RepoPath):
  3236. """List all files in an index."""
  3237. with open_repo_closing(repo) as r:
  3238. return sorted(r.open_index())
  3239. def find_unique_abbrev(object_store, object_id, min_length=7):
  3240. """Find the shortest unique abbreviation for an object ID.
  3241. Args:
  3242. object_store: Object store to search in
  3243. object_id: The full object ID to abbreviate
  3244. min_length: Minimum length of abbreviation (default 7)
  3245. Returns:
  3246. The shortest unique prefix of the object ID (at least min_length chars)
  3247. """
  3248. if isinstance(object_id, bytes):
  3249. hex_id = object_id.decode("ascii")
  3250. else:
  3251. hex_id = object_id
  3252. # Start with minimum length
  3253. for length in range(min_length, len(hex_id) + 1):
  3254. prefix = hex_id[:length]
  3255. matches = 0
  3256. # Check if this prefix is unique
  3257. for obj_id in object_store:
  3258. if obj_id.decode("ascii").startswith(prefix):
  3259. matches += 1
  3260. if matches > 1:
  3261. # Not unique, need more characters
  3262. break
  3263. if matches == 1:
  3264. # Found unique prefix
  3265. return prefix
  3266. # If we get here, return the full ID
  3267. return hex_id
  3268. def describe(repo: Union[str, os.PathLike, Repo], abbrev=None):
  3269. """Describe the repository version.
  3270. Args:
  3271. repo: git repository
  3272. abbrev: number of characters of commit to take, default is 7
  3273. Returns: a string description of the current git revision
  3274. Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh".
  3275. """
  3276. abbrev_slice = slice(0, abbrev if abbrev is not None else 7)
  3277. # Get the repository
  3278. with open_repo_closing(repo) as r:
  3279. # Get a list of all tags
  3280. refs = r.get_refs()
  3281. tags = {}
  3282. for key, value in refs.items():
  3283. key_str = key.decode()
  3284. obj = r.get_object(value)
  3285. if "tags" not in key_str:
  3286. continue
  3287. _, tag = key_str.rsplit("/", 1)
  3288. if isinstance(obj, Tag):
  3289. # Annotated tag case
  3290. commit = r.get_object(obj.object[1])
  3291. else:
  3292. # Lightweight tag case - obj is already the commit
  3293. commit = obj
  3294. if not isinstance(commit, Commit):
  3295. raise AssertionError(
  3296. f"Expected Commit object, got {type(commit).__name__}"
  3297. )
  3298. tags[tag] = [
  3299. datetime.datetime(*time.gmtime(commit.commit_time)[:6]),
  3300. commit.id.decode("ascii"),
  3301. ]
  3302. sorted_tags = sorted(tags.items(), key=lambda tag: tag[1][0], reverse=True) # type: ignore[arg-type, return-value]
  3303. # Get the latest commit
  3304. latest_commit = r[r.head()]
  3305. # If there are no tags, return the latest commit
  3306. if len(sorted_tags) == 0:
  3307. if abbrev is not None:
  3308. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  3309. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  3310. # We're now 0 commits from the top
  3311. commit_count = 0
  3312. # Walk through all commits
  3313. walker = r.get_walker()
  3314. for entry in walker:
  3315. # Check if tag
  3316. commit_id = entry.commit.id.decode("ascii")
  3317. for tag_item in sorted_tags:
  3318. tag_name = tag_item[0]
  3319. tag_commit = tag_item[1][1]
  3320. if commit_id == tag_commit:
  3321. if commit_count == 0:
  3322. return tag_name
  3323. else:
  3324. if abbrev is not None:
  3325. abbrev_hash = latest_commit.id.decode("ascii")[abbrev_slice]
  3326. else:
  3327. abbrev_hash = find_unique_abbrev(
  3328. r.object_store, latest_commit.id
  3329. )
  3330. return f"{tag_name}-{commit_count}-g{abbrev_hash}"
  3331. commit_count += 1
  3332. # Return plain commit if no parent tag can be found
  3333. if abbrev is not None:
  3334. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  3335. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  3336. def get_object_by_path(
  3337. repo, path, committish: Optional[Union[str, bytes, Commit, Tag]] = None
  3338. ):
  3339. """Get an object by path.
  3340. Args:
  3341. repo: A path to the repository
  3342. path: Path to look up
  3343. committish: Commit to look up path in
  3344. Returns: A `ShaFile` object
  3345. """
  3346. if committish is None:
  3347. committish = "HEAD"
  3348. # Get the repository
  3349. with open_repo_closing(repo) as r:
  3350. commit = parse_commit(r, committish)
  3351. base_tree = commit.tree
  3352. if not isinstance(path, bytes):
  3353. path = commit_encode(commit, path)
  3354. (mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path)
  3355. return r[sha]
  3356. def write_tree(repo: RepoPath):
  3357. """Write a tree object from the index.
  3358. Args:
  3359. repo: Repository for which to write tree
  3360. Returns: tree id for the tree that was written
  3361. """
  3362. with open_repo_closing(repo) as r:
  3363. return r.open_index().commit(r.object_store)
  3364. def _do_merge(
  3365. r,
  3366. merge_commit_id,
  3367. no_commit=False,
  3368. no_ff=False,
  3369. message=None,
  3370. author=None,
  3371. committer=None,
  3372. ):
  3373. """Internal merge implementation that operates on an open repository.
  3374. Args:
  3375. r: Open repository object
  3376. merge_commit_id: SHA of commit to merge
  3377. no_commit: If True, do not create a merge commit
  3378. no_ff: If True, force creation of a merge commit
  3379. message: Optional merge commit message
  3380. author: Optional author for merge commit
  3381. committer: Optional committer for merge commit
  3382. Returns:
  3383. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  3384. if no_commit=True or there were conflicts
  3385. """
  3386. from .graph import find_merge_base
  3387. from .merge import three_way_merge
  3388. # Get HEAD commit
  3389. try:
  3390. head_commit_id = r.refs[b"HEAD"]
  3391. except KeyError:
  3392. raise Error("No HEAD reference found")
  3393. head_commit = r[head_commit_id]
  3394. merge_commit = r[merge_commit_id]
  3395. # Check if fast-forward is possible
  3396. merge_bases = find_merge_base(r, [head_commit_id, merge_commit_id])
  3397. if not merge_bases:
  3398. raise Error("No common ancestor found")
  3399. # Use the first merge base
  3400. base_commit_id = merge_bases[0]
  3401. # Check if we're trying to merge the same commit
  3402. if head_commit_id == merge_commit_id:
  3403. # Already up to date
  3404. return (None, [])
  3405. # Check for fast-forward
  3406. if base_commit_id == head_commit_id and not no_ff:
  3407. # Fast-forward merge
  3408. r.refs[b"HEAD"] = merge_commit_id
  3409. # Update the working directory
  3410. changes = tree_changes(r.object_store, head_commit.tree, merge_commit.tree)
  3411. update_working_tree(
  3412. r, head_commit.tree, merge_commit.tree, change_iterator=changes
  3413. )
  3414. return (merge_commit_id, [])
  3415. if base_commit_id == merge_commit_id:
  3416. # Already up to date
  3417. return (None, [])
  3418. # Perform three-way merge
  3419. base_commit = r[base_commit_id]
  3420. gitattributes = r.get_gitattributes()
  3421. config = r.get_config()
  3422. merged_tree, conflicts = three_way_merge(
  3423. r.object_store, base_commit, head_commit, merge_commit, gitattributes, config
  3424. )
  3425. # Add merged tree to object store
  3426. r.object_store.add_object(merged_tree)
  3427. # Update index and working directory
  3428. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  3429. update_working_tree(r, head_commit.tree, merged_tree.id, change_iterator=changes)
  3430. if conflicts or no_commit:
  3431. # Don't create a commit if there are conflicts or no_commit is True
  3432. return (None, conflicts)
  3433. # Create merge commit
  3434. merge_commit_obj = Commit()
  3435. merge_commit_obj.tree = merged_tree.id
  3436. merge_commit_obj.parents = [head_commit_id, merge_commit_id]
  3437. # Set author/committer
  3438. if author is None:
  3439. author = get_user_identity(r.get_config_stack())
  3440. if committer is None:
  3441. committer = author
  3442. merge_commit_obj.author = author
  3443. merge_commit_obj.committer = committer
  3444. # Set timestamps
  3445. timestamp = int(time.time())
  3446. timezone = 0 # UTC
  3447. merge_commit_obj.author_time = timestamp
  3448. merge_commit_obj.author_timezone = timezone
  3449. merge_commit_obj.commit_time = timestamp
  3450. merge_commit_obj.commit_timezone = timezone
  3451. # Set commit message
  3452. if message is None:
  3453. message = f"Merge commit '{merge_commit_id.decode()[:7]}'\n"
  3454. merge_commit_obj.message = message.encode() if isinstance(message, str) else message
  3455. # Add commit to object store
  3456. r.object_store.add_object(merge_commit_obj)
  3457. # Update HEAD
  3458. r.refs[b"HEAD"] = merge_commit_obj.id
  3459. return (merge_commit_obj.id, [])
  3460. def merge(
  3461. repo: Union[str, os.PathLike, Repo],
  3462. committish: Union[str, bytes, Commit, Tag],
  3463. no_commit=False,
  3464. no_ff=False,
  3465. message=None,
  3466. author=None,
  3467. committer=None,
  3468. ):
  3469. """Merge a commit into the current branch.
  3470. Args:
  3471. repo: Repository to merge into
  3472. committish: Commit to merge
  3473. no_commit: If True, do not create a merge commit
  3474. no_ff: If True, force creation of a merge commit
  3475. message: Optional merge commit message
  3476. author: Optional author for merge commit
  3477. committer: Optional committer for merge commit
  3478. Returns:
  3479. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  3480. if no_commit=True or there were conflicts
  3481. Raises:
  3482. Error: If there is no HEAD reference or commit cannot be found
  3483. """
  3484. with open_repo_closing(repo) as r:
  3485. # Parse the commit to merge
  3486. try:
  3487. merge_commit_id = parse_commit(r, committish).id
  3488. except KeyError:
  3489. raise Error(
  3490. f"Cannot find commit '{committish.decode() if isinstance(committish, bytes) else committish}'"
  3491. )
  3492. result = _do_merge(
  3493. r, merge_commit_id, no_commit, no_ff, message, author, committer
  3494. )
  3495. # Trigger auto GC if needed
  3496. from .gc import maybe_auto_gc
  3497. maybe_auto_gc(r)
  3498. return result
  3499. def unpack_objects(pack_path, target="."):
  3500. """Unpack objects from a pack file into the repository.
  3501. Args:
  3502. pack_path: Path to the pack file to unpack
  3503. target: Path to the repository to unpack into
  3504. Returns:
  3505. Number of objects unpacked
  3506. """
  3507. from .pack import Pack
  3508. with open_repo_closing(target) as r:
  3509. pack_basename = os.path.splitext(pack_path)[0]
  3510. with Pack(pack_basename) as pack:
  3511. count = 0
  3512. for unpacked in pack.iter_unpacked():
  3513. obj = unpacked.sha_file()
  3514. r.object_store.add_object(obj)
  3515. count += 1
  3516. return count
  3517. def merge_tree(
  3518. repo,
  3519. base_tree: Optional[Union[str, bytes, Tree, Commit, Tag]],
  3520. our_tree: Union[str, bytes, Tree, Commit, Tag],
  3521. their_tree: Union[str, bytes, Tree, Commit, Tag],
  3522. ):
  3523. """Perform a three-way tree merge without touching the working directory.
  3524. This is similar to git merge-tree, performing a merge at the tree level
  3525. without creating commits or updating any references.
  3526. Args:
  3527. repo: Repository containing the trees
  3528. base_tree: Tree-ish of the common ancestor (or None for no common ancestor)
  3529. our_tree: Tree-ish of our side of the merge
  3530. their_tree: Tree-ish of their side of the merge
  3531. Returns:
  3532. tuple: A tuple of (merged_tree_id, conflicts) where:
  3533. - merged_tree_id is the SHA-1 of the merged tree
  3534. - conflicts is a list of paths (as bytes) that had conflicts
  3535. Raises:
  3536. KeyError: If any of the tree-ish arguments cannot be resolved
  3537. """
  3538. from .merge import Merger
  3539. with open_repo_closing(repo) as r:
  3540. # Resolve tree-ish arguments to actual trees
  3541. base = parse_tree(r, base_tree) if base_tree else None
  3542. ours = parse_tree(r, our_tree)
  3543. theirs = parse_tree(r, their_tree)
  3544. # Perform the merge
  3545. gitattributes = r.get_gitattributes()
  3546. config = r.get_config()
  3547. merger = Merger(r.object_store, gitattributes, config)
  3548. merged_tree, conflicts = merger.merge_trees(base, ours, theirs)
  3549. # Add the merged tree to the object store
  3550. r.object_store.add_object(merged_tree)
  3551. return merged_tree.id, conflicts
  3552. def cherry_pick(
  3553. repo: Union[str, os.PathLike, Repo],
  3554. committish: Union[str, bytes, Commit, Tag, None],
  3555. no_commit=False,
  3556. continue_=False,
  3557. abort=False,
  3558. ):
  3559. r"""Cherry-pick a commit onto the current branch.
  3560. Args:
  3561. repo: Repository to cherry-pick into
  3562. committish: Commit to cherry-pick (can be None only when ``continue_`` or abort is True)
  3563. no_commit: If True, do not create a commit after applying changes
  3564. ``continue_``: Continue an in-progress cherry-pick after resolving conflicts
  3565. abort: Abort an in-progress cherry-pick
  3566. Returns:
  3567. The SHA of the newly created commit, or None if no_commit=True or there were conflicts
  3568. Raises:
  3569. Error: If there is no HEAD reference, commit cannot be found, or operation fails
  3570. """
  3571. from .merge import three_way_merge
  3572. # Validate that committish is provided when needed
  3573. if not (continue_ or abort) and committish is None:
  3574. raise ValueError("committish is required when not using --continue or --abort")
  3575. with open_repo_closing(repo) as r:
  3576. # Handle abort
  3577. if abort:
  3578. # Clean up any cherry-pick state
  3579. try:
  3580. os.remove(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"))
  3581. except FileNotFoundError:
  3582. pass
  3583. try:
  3584. os.remove(os.path.join(r.controldir(), "MERGE_MSG"))
  3585. except FileNotFoundError:
  3586. pass
  3587. # Reset index to HEAD
  3588. r.get_worktree().reset_index(r[b"HEAD"].tree)
  3589. return None
  3590. # Handle continue
  3591. if continue_:
  3592. # Check if there's a cherry-pick in progress
  3593. cherry_pick_head_path = os.path.join(r.controldir(), "CHERRY_PICK_HEAD")
  3594. try:
  3595. with open(cherry_pick_head_path, "rb") as f:
  3596. cherry_pick_commit_id = f.read().strip()
  3597. cherry_pick_commit = r[cherry_pick_commit_id]
  3598. except FileNotFoundError:
  3599. raise Error("No cherry-pick in progress")
  3600. # Check for unresolved conflicts
  3601. if r.open_index().has_conflicts():
  3602. raise Error("Unresolved conflicts remain")
  3603. # Create the commit
  3604. tree_id = r.open_index().commit(r.object_store)
  3605. # Read saved message if any
  3606. merge_msg_path = os.path.join(r.controldir(), "MERGE_MSG")
  3607. try:
  3608. with open(merge_msg_path, "rb") as f:
  3609. message = f.read()
  3610. except FileNotFoundError:
  3611. message = cherry_pick_commit.message
  3612. new_commit = r.get_worktree().commit(
  3613. message=message,
  3614. tree=tree_id,
  3615. author=cherry_pick_commit.author,
  3616. author_timestamp=cherry_pick_commit.author_time,
  3617. author_timezone=cherry_pick_commit.author_timezone,
  3618. )
  3619. # Clean up state files
  3620. try:
  3621. os.remove(cherry_pick_head_path)
  3622. except FileNotFoundError:
  3623. pass
  3624. try:
  3625. os.remove(merge_msg_path)
  3626. except FileNotFoundError:
  3627. pass
  3628. return new_commit
  3629. # Normal cherry-pick operation
  3630. # Get current HEAD
  3631. try:
  3632. head_commit = r[b"HEAD"]
  3633. except KeyError:
  3634. raise Error("No HEAD reference found")
  3635. # Parse the commit to cherry-pick
  3636. # committish cannot be None here due to validation above
  3637. assert committish is not None
  3638. try:
  3639. cherry_pick_commit = parse_commit(r, committish)
  3640. except KeyError:
  3641. raise Error(
  3642. f"Cannot find commit '{committish.decode() if isinstance(committish, bytes) else committish}'"
  3643. )
  3644. # Check if commit has parents
  3645. if not cherry_pick_commit.parents:
  3646. raise Error("Cannot cherry-pick root commit")
  3647. # Get parent of cherry-pick commit
  3648. parent_commit = r[cherry_pick_commit.parents[0]]
  3649. # Perform three-way merge
  3650. merged_tree, conflicts = three_way_merge(
  3651. r.object_store, parent_commit, head_commit, cherry_pick_commit
  3652. )
  3653. # Add merged tree to object store
  3654. r.object_store.add_object(merged_tree)
  3655. # Update working tree and index
  3656. # Reset index to match merged tree
  3657. r.get_worktree().reset_index(merged_tree.id)
  3658. # Update working tree from the new index
  3659. # Allow overwriting because we're applying the merge result
  3660. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  3661. update_working_tree(
  3662. r,
  3663. head_commit.tree,
  3664. merged_tree.id,
  3665. change_iterator=changes,
  3666. allow_overwrite_modified=True,
  3667. )
  3668. if conflicts:
  3669. # Save state for later continuation
  3670. with open(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"), "wb") as f:
  3671. f.write(cherry_pick_commit.id + b"\n")
  3672. # Save commit message
  3673. with open(os.path.join(r.controldir(), "MERGE_MSG"), "wb") as f:
  3674. f.write(cherry_pick_commit.message)
  3675. raise Error(
  3676. f"Conflicts in: {', '.join(c.decode('utf-8', 'replace') for c in conflicts)}\n"
  3677. f"Fix conflicts and run 'dulwich cherry-pick --continue'"
  3678. )
  3679. if no_commit:
  3680. return None
  3681. # Create the commit
  3682. new_commit = r.get_worktree().commit(
  3683. message=cherry_pick_commit.message,
  3684. tree=merged_tree.id,
  3685. author=cherry_pick_commit.author,
  3686. author_timestamp=cherry_pick_commit.author_time,
  3687. author_timezone=cherry_pick_commit.author_timezone,
  3688. )
  3689. return new_commit
  3690. def revert(
  3691. repo: Union[str, os.PathLike, Repo],
  3692. commits: Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]],
  3693. no_commit=False,
  3694. message=None,
  3695. author=None,
  3696. committer=None,
  3697. ):
  3698. """Revert one or more commits.
  3699. This creates a new commit that undoes the changes introduced by the
  3700. specified commits. Unlike reset, revert creates a new commit that
  3701. preserves history.
  3702. Args:
  3703. repo: Path to repository or repository object
  3704. commits: List of commit-ish (SHA, ref, etc.) to revert, or a single commit-ish
  3705. no_commit: If True, apply changes to index/working tree but don't commit
  3706. message: Optional commit message (default: "Revert <original subject>")
  3707. author: Optional author for revert commit
  3708. committer: Optional committer for revert commit
  3709. Returns:
  3710. SHA1 of the new revert commit, or None if no_commit=True
  3711. Raises:
  3712. Error: If revert fails due to conflicts or other issues
  3713. """
  3714. from .merge import three_way_merge
  3715. # Normalize commits to a list
  3716. if isinstance(commits, (str, bytes, Commit, Tag)):
  3717. commits = [commits]
  3718. with open_repo_closing(repo) as r:
  3719. # Convert string refs to bytes
  3720. commits_to_revert = []
  3721. for commit_ref in commits:
  3722. if isinstance(commit_ref, str):
  3723. commit_ref = commit_ref.encode("utf-8")
  3724. commit = parse_commit(r, commit_ref)
  3725. commits_to_revert.append(commit)
  3726. # Get current HEAD
  3727. try:
  3728. head_commit_id = r.refs[b"HEAD"]
  3729. except KeyError:
  3730. raise Error("No HEAD reference found")
  3731. head_commit = r[head_commit_id]
  3732. current_tree = head_commit.tree
  3733. # Process commits in order
  3734. for commit_to_revert in commits_to_revert:
  3735. # For revert, we want to apply the inverse of the commit
  3736. # This means using the commit's tree as "base" and its parent as "theirs"
  3737. if not commit_to_revert.parents:
  3738. raise Error(
  3739. f"Cannot revert commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - it has no parents"
  3740. )
  3741. # For simplicity, we only handle commits with one parent (no merge commits)
  3742. if len(commit_to_revert.parents) > 1:
  3743. raise Error(
  3744. f"Cannot revert merge commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - not yet implemented"
  3745. )
  3746. parent_commit = r[commit_to_revert.parents[0]]
  3747. # Perform three-way merge:
  3748. # - base: the commit we're reverting (what we want to remove)
  3749. # - ours: current HEAD (what we have now)
  3750. # - theirs: parent of commit being reverted (what we want to go back to)
  3751. merged_tree, conflicts = three_way_merge(
  3752. r.object_store,
  3753. commit_to_revert, # base
  3754. r[head_commit_id], # ours
  3755. parent_commit, # theirs
  3756. )
  3757. if conflicts:
  3758. # Update working tree with conflicts
  3759. changes = tree_changes(r.object_store, current_tree, merged_tree.id)
  3760. update_working_tree(
  3761. r, current_tree, merged_tree.id, change_iterator=changes
  3762. )
  3763. conflicted_paths = [c.decode("utf-8", "replace") for c in conflicts]
  3764. raise Error(f"Conflicts while reverting: {', '.join(conflicted_paths)}")
  3765. # Add merged tree to object store
  3766. r.object_store.add_object(merged_tree)
  3767. # Update working tree
  3768. changes = tree_changes(r.object_store, current_tree, merged_tree.id)
  3769. update_working_tree(
  3770. r, current_tree, merged_tree.id, change_iterator=changes
  3771. )
  3772. current_tree = merged_tree.id
  3773. if not no_commit:
  3774. # Create revert commit
  3775. revert_commit = Commit()
  3776. revert_commit.tree = merged_tree.id
  3777. revert_commit.parents = [head_commit_id]
  3778. # Set author/committer
  3779. if author is None:
  3780. author = get_user_identity(r.get_config_stack())
  3781. if committer is None:
  3782. committer = author
  3783. revert_commit.author = author
  3784. revert_commit.committer = committer
  3785. # Set timestamps
  3786. timestamp = int(time.time())
  3787. timezone = 0 # UTC
  3788. revert_commit.author_time = timestamp
  3789. revert_commit.author_timezone = timezone
  3790. revert_commit.commit_time = timestamp
  3791. revert_commit.commit_timezone = timezone
  3792. # Set message
  3793. if message is None:
  3794. # Extract original commit subject
  3795. original_message = commit_to_revert.message
  3796. if isinstance(original_message, bytes):
  3797. original_message = original_message.decode("utf-8", "replace")
  3798. subject = original_message.split("\n")[0]
  3799. message = f'Revert "{subject}"\n\nThis reverts commit {commit_to_revert.id.decode("ascii")}.'.encode()
  3800. elif isinstance(message, str):
  3801. message = message.encode("utf-8")
  3802. revert_commit.message = message
  3803. # Add commit to object store
  3804. r.object_store.add_object(revert_commit)
  3805. # Update HEAD
  3806. r.refs[b"HEAD"] = revert_commit.id
  3807. head_commit_id = revert_commit.id
  3808. return head_commit_id if not no_commit else None
  3809. def gc(
  3810. repo,
  3811. auto: bool = False,
  3812. aggressive: bool = False,
  3813. prune: bool = True,
  3814. grace_period: Optional[int] = 1209600, # 2 weeks default
  3815. dry_run: bool = False,
  3816. progress=None,
  3817. ):
  3818. """Run garbage collection on a repository.
  3819. Args:
  3820. repo: Path to the repository or a Repo object
  3821. auto: If True, only run gc if needed
  3822. aggressive: If True, use more aggressive settings
  3823. prune: If True, prune unreachable objects
  3824. grace_period: Grace period in seconds for pruning (default 2 weeks)
  3825. dry_run: If True, only report what would be done
  3826. progress: Optional progress callback
  3827. Returns:
  3828. GCStats object with garbage collection statistics
  3829. """
  3830. from .gc import garbage_collect
  3831. with open_repo_closing(repo) as r:
  3832. return garbage_collect(
  3833. r,
  3834. auto=auto,
  3835. aggressive=aggressive,
  3836. prune=prune,
  3837. grace_period=grace_period,
  3838. dry_run=dry_run,
  3839. progress=progress,
  3840. )
  3841. def prune(
  3842. repo,
  3843. grace_period: Optional[int] = None,
  3844. dry_run: bool = False,
  3845. progress=None,
  3846. ):
  3847. """Prune/clean up a repository's object store.
  3848. This removes temporary files that were left behind by interrupted
  3849. pack operations.
  3850. Args:
  3851. repo: Path to the repository or a Repo object
  3852. grace_period: Grace period in seconds for removing temporary files
  3853. (default 2 weeks)
  3854. dry_run: If True, only report what would be done
  3855. progress: Optional progress callback
  3856. """
  3857. with open_repo_closing(repo) as r:
  3858. if progress:
  3859. progress("Pruning temporary files")
  3860. if not dry_run:
  3861. r.object_store.prune(grace_period=grace_period)
  3862. def count_objects(repo: RepoPath = ".", verbose=False) -> CountObjectsResult:
  3863. """Count unpacked objects and their disk usage.
  3864. Args:
  3865. repo: Path to repository or repository object
  3866. verbose: Whether to return verbose information
  3867. Returns:
  3868. CountObjectsResult object with detailed statistics
  3869. """
  3870. with open_repo_closing(repo) as r:
  3871. object_store = r.object_store
  3872. # Count loose objects
  3873. loose_count = 0
  3874. loose_size = 0
  3875. for sha in object_store._iter_loose_objects():
  3876. loose_count += 1
  3877. from .object_store import DiskObjectStore
  3878. assert isinstance(object_store, DiskObjectStore)
  3879. path = object_store._get_shafile_path(sha)
  3880. try:
  3881. stat_info = os.stat(path)
  3882. # Git uses disk usage, not file size. st_blocks is always in
  3883. # 512-byte blocks per POSIX standard
  3884. if hasattr(stat_info, "st_blocks"):
  3885. # Available on Linux and macOS
  3886. loose_size += stat_info.st_blocks * 512 # type: ignore
  3887. else:
  3888. # Fallback for Windows
  3889. loose_size += stat_info.st_size
  3890. except FileNotFoundError:
  3891. # Object may have been removed between iteration and stat
  3892. pass
  3893. if not verbose:
  3894. return CountObjectsResult(count=loose_count, size=loose_size)
  3895. # Count pack information
  3896. pack_count = len(object_store.packs)
  3897. in_pack_count = 0
  3898. pack_size = 0
  3899. for pack in object_store.packs:
  3900. in_pack_count += len(pack)
  3901. # Get pack file size
  3902. pack_path = pack._data_path
  3903. try:
  3904. pack_size += os.path.getsize(pack_path)
  3905. except FileNotFoundError:
  3906. pass
  3907. # Get index file size
  3908. idx_path = pack._idx_path
  3909. try:
  3910. pack_size += os.path.getsize(idx_path)
  3911. except FileNotFoundError:
  3912. pass
  3913. return CountObjectsResult(
  3914. count=loose_count,
  3915. size=loose_size,
  3916. in_pack=in_pack_count,
  3917. packs=pack_count,
  3918. size_pack=pack_size,
  3919. )
  3920. def is_interactive_rebase(repo: Union[Repo, str]) -> bool:
  3921. """Check if an interactive rebase is in progress.
  3922. Args:
  3923. repo: Repository to check
  3924. Returns:
  3925. True if interactive rebase is in progress, False otherwise
  3926. """
  3927. with open_repo_closing(repo) as r:
  3928. state_manager = r.get_rebase_state_manager()
  3929. if not state_manager.exists():
  3930. return False
  3931. # Check if todo file exists
  3932. todo = state_manager.load_todo()
  3933. return todo is not None
  3934. def rebase(
  3935. repo: Union[Repo, str],
  3936. upstream: Union[bytes, str],
  3937. onto: Optional[Union[bytes, str]] = None,
  3938. branch: Optional[Union[bytes, str]] = None,
  3939. abort: bool = False,
  3940. continue_rebase: bool = False,
  3941. skip: bool = False,
  3942. interactive: bool = False,
  3943. edit_todo: bool = False,
  3944. ) -> list[bytes]:
  3945. """Rebase commits onto another branch.
  3946. Args:
  3947. repo: Repository to rebase in
  3948. upstream: Upstream branch/commit to rebase onto
  3949. onto: Specific commit to rebase onto (defaults to upstream)
  3950. branch: Branch to rebase (defaults to current branch)
  3951. abort: Abort an in-progress rebase
  3952. continue_rebase: Continue an in-progress rebase
  3953. skip: Skip current commit and continue rebase
  3954. interactive: Start an interactive rebase
  3955. edit_todo: Edit the todo list of an interactive rebase
  3956. Returns:
  3957. List of new commit SHAs created by rebase
  3958. Raises:
  3959. Error: If rebase fails or conflicts occur
  3960. """
  3961. from .cli import launch_editor
  3962. from .rebase import (
  3963. RebaseConflict,
  3964. RebaseError,
  3965. Rebaser,
  3966. process_interactive_rebase,
  3967. start_interactive,
  3968. )
  3969. from .rebase import (
  3970. edit_todo as edit_todo_func,
  3971. )
  3972. with open_repo_closing(repo) as r:
  3973. rebaser = Rebaser(r)
  3974. if abort:
  3975. try:
  3976. rebaser.abort()
  3977. return []
  3978. except RebaseError as e:
  3979. raise Error(str(e))
  3980. if edit_todo:
  3981. # Edit the todo list of an interactive rebase
  3982. try:
  3983. edit_todo_func(r, launch_editor)
  3984. print("Todo list updated. Continue with 'rebase --continue'")
  3985. return []
  3986. except RebaseError as e:
  3987. raise Error(str(e))
  3988. if continue_rebase:
  3989. try:
  3990. if interactive:
  3991. # Continue interactive rebase
  3992. is_complete, pause_reason = process_interactive_rebase(
  3993. r, editor_callback=launch_editor
  3994. )
  3995. if is_complete:
  3996. return [c.id for c in rebaser._done]
  3997. else:
  3998. if pause_reason == "conflict":
  3999. raise Error("Conflicts detected. Resolve and continue.")
  4000. elif pause_reason == "edit":
  4001. print("Stopped for editing. Make changes and continue.")
  4002. elif pause_reason == "break":
  4003. print("Rebase paused at break. Continue when ready.")
  4004. else:
  4005. print(f"Rebase paused: {pause_reason}")
  4006. return []
  4007. else:
  4008. # Continue regular rebase
  4009. result = rebaser.continue_()
  4010. if result is None:
  4011. # Rebase complete
  4012. return [c.id for c in rebaser._done]
  4013. elif isinstance(result, tuple) and result[1]:
  4014. # Still have conflicts
  4015. raise Error(
  4016. f"Conflicts in: {', '.join(f.decode('utf-8', 'replace') for f in result[1])}"
  4017. )
  4018. except RebaseError as e:
  4019. raise Error(str(e))
  4020. # Convert string refs to bytes
  4021. if isinstance(upstream, str):
  4022. upstream = upstream.encode("utf-8")
  4023. if isinstance(onto, str):
  4024. onto = onto.encode("utf-8") if onto else None
  4025. if isinstance(branch, str):
  4026. branch = branch.encode("utf-8") if branch else None
  4027. try:
  4028. if interactive:
  4029. # Start interactive rebase
  4030. todo = start_interactive(r, upstream, onto, branch, launch_editor)
  4031. # Process the todo list
  4032. is_complete, pause_reason = process_interactive_rebase(
  4033. r, todo, editor_callback=launch_editor
  4034. )
  4035. if is_complete:
  4036. return [c.id for c in rebaser._done]
  4037. else:
  4038. if pause_reason == "conflict":
  4039. raise Error("Conflicts detected. Resolve and continue.")
  4040. elif pause_reason == "edit":
  4041. print("Stopped for editing. Make changes and continue.")
  4042. elif pause_reason == "break":
  4043. print("Rebase paused at break. Continue when ready.")
  4044. else:
  4045. print(f"Rebase paused: {pause_reason}")
  4046. return []
  4047. else:
  4048. # Regular rebase
  4049. rebaser.start(upstream, onto, branch)
  4050. # Continue rebase automatically
  4051. result = rebaser.continue_()
  4052. if result is not None:
  4053. # Conflicts
  4054. raise RebaseConflict(result[1])
  4055. # Return the SHAs of the rebased commits
  4056. return [c.id for c in rebaser._done]
  4057. except RebaseConflict as e:
  4058. raise Error(str(e))
  4059. except RebaseError as e:
  4060. raise Error(str(e))
  4061. def annotate(
  4062. repo: RepoPath,
  4063. path,
  4064. committish: Optional[Union[str, bytes, Commit, Tag]] = None,
  4065. ):
  4066. """Annotate the history of a file.
  4067. :param repo: Path to the repository
  4068. :param path: Path to annotate
  4069. :param committish: Commit id to find path in
  4070. :return: List of ((Commit, TreeChange), line) tuples
  4071. """
  4072. if committish is None:
  4073. committish = "HEAD"
  4074. from dulwich.annotate import annotate_lines
  4075. with open_repo_closing(repo) as r:
  4076. commit_id = parse_commit(r, committish).id
  4077. # Ensure path is bytes
  4078. if isinstance(path, str):
  4079. path = path.encode()
  4080. return annotate_lines(r.object_store, commit_id, path)
  4081. blame = annotate
  4082. def filter_branch(
  4083. repo=".",
  4084. branch="HEAD",
  4085. *,
  4086. filter_fn=None,
  4087. filter_author=None,
  4088. filter_committer=None,
  4089. filter_message=None,
  4090. tree_filter=None,
  4091. index_filter=None,
  4092. parent_filter=None,
  4093. commit_filter=None,
  4094. subdirectory_filter=None,
  4095. prune_empty=False,
  4096. tag_name_filter=None,
  4097. force=False,
  4098. keep_original=True,
  4099. refs=None,
  4100. ):
  4101. """Rewrite branch history by creating new commits with filtered properties.
  4102. This is similar to git filter-branch, allowing you to rewrite commit
  4103. history by modifying trees, parents, author, committer, or commit messages.
  4104. Args:
  4105. repo: Path to repository
  4106. branch: Branch to rewrite (defaults to HEAD)
  4107. filter_fn: Optional callable that takes a Commit object and returns
  4108. a dict of updated fields (author, committer, message, etc.)
  4109. filter_author: Optional callable that takes author bytes and returns
  4110. updated author bytes or None to keep unchanged
  4111. filter_committer: Optional callable that takes committer bytes and returns
  4112. updated committer bytes or None to keep unchanged
  4113. filter_message: Optional callable that takes commit message bytes
  4114. and returns updated message bytes
  4115. tree_filter: Optional callable that takes (tree_sha, temp_dir) and returns
  4116. new tree SHA after modifying working directory
  4117. index_filter: Optional callable that takes (tree_sha, temp_index_path) and
  4118. returns new tree SHA after modifying index
  4119. parent_filter: Optional callable that takes parent list and returns
  4120. modified parent list
  4121. commit_filter: Optional callable that takes (Commit, tree_sha) and returns
  4122. new commit SHA or None to skip commit
  4123. subdirectory_filter: Optional subdirectory path to extract as new root
  4124. prune_empty: Whether to prune commits that become empty
  4125. tag_name_filter: Optional callable to rename tags
  4126. force: Force operation even if branch has been filtered before
  4127. keep_original: Keep original refs under refs/original/
  4128. refs: List of refs to rewrite (defaults to [branch])
  4129. Returns:
  4130. Dict mapping old commit SHAs to new commit SHAs
  4131. Raises:
  4132. Error: If branch is already filtered and force is False
  4133. """
  4134. from .filter_branch import CommitFilter, filter_refs
  4135. with open_repo_closing(repo) as r:
  4136. # Parse branch/committish
  4137. if isinstance(branch, str):
  4138. branch = branch.encode()
  4139. # Determine which refs to process
  4140. if refs is None:
  4141. if branch == b"HEAD":
  4142. # Resolve HEAD to actual branch
  4143. try:
  4144. resolved = r.refs.follow(b"HEAD")
  4145. if resolved and resolved[0]:
  4146. # resolved is a list of (refname, sha) tuples
  4147. resolved_ref = resolved[0][-1]
  4148. if resolved_ref and resolved_ref != b"HEAD":
  4149. refs = [resolved_ref]
  4150. else:
  4151. # HEAD points directly to a commit
  4152. refs = [b"HEAD"]
  4153. else:
  4154. refs = [b"HEAD"]
  4155. except SymrefLoop:
  4156. refs = [b"HEAD"]
  4157. else:
  4158. # Convert branch name to full ref if needed
  4159. if not branch.startswith(b"refs/"):
  4160. branch = b"refs/heads/" + branch
  4161. refs = [branch]
  4162. # Convert subdirectory filter to bytes if needed
  4163. if subdirectory_filter and isinstance(subdirectory_filter, str):
  4164. subdirectory_filter = subdirectory_filter.encode()
  4165. # Create commit filter
  4166. commit_filter = CommitFilter(
  4167. r.object_store,
  4168. filter_fn=filter_fn,
  4169. filter_author=filter_author,
  4170. filter_committer=filter_committer,
  4171. filter_message=filter_message,
  4172. tree_filter=tree_filter,
  4173. index_filter=index_filter,
  4174. parent_filter=parent_filter,
  4175. commit_filter=commit_filter,
  4176. subdirectory_filter=subdirectory_filter,
  4177. prune_empty=prune_empty,
  4178. tag_name_filter=tag_name_filter,
  4179. )
  4180. # Tag callback for renaming tags
  4181. def rename_tag(old_ref, new_ref):
  4182. # Copy tag to new name
  4183. r.refs[new_ref] = r.refs[old_ref]
  4184. # Delete old tag
  4185. del r.refs[old_ref]
  4186. # Filter refs
  4187. try:
  4188. return filter_refs(
  4189. r.refs,
  4190. r.object_store,
  4191. refs,
  4192. commit_filter,
  4193. keep_original=keep_original,
  4194. force=force,
  4195. tag_callback=rename_tag if tag_name_filter else None,
  4196. )
  4197. except ValueError as e:
  4198. raise Error(str(e)) from e
  4199. def format_patch(
  4200. repo=".",
  4201. committish=None,
  4202. outstream=sys.stdout,
  4203. outdir=None,
  4204. n=1,
  4205. stdout=False,
  4206. version=None,
  4207. ) -> list[str]:
  4208. """Generate patches suitable for git am.
  4209. Args:
  4210. repo: Path to repository
  4211. committish: Commit-ish or commit range to generate patches for.
  4212. Can be a single commit id, or a tuple of (start, end) commit ids
  4213. for a range. If None, formats the last n commits from HEAD.
  4214. outstream: Stream to write to if stdout=True
  4215. outdir: Directory to write patch files to (default: current directory)
  4216. n: Number of patches to generate if committish is None
  4217. stdout: Write patches to stdout instead of files
  4218. version: Version string to include in patches (default: Dulwich version)
  4219. Returns:
  4220. List of patch filenames that were created (empty if stdout=True)
  4221. """
  4222. if outdir is None:
  4223. outdir = "."
  4224. filenames = []
  4225. with open_repo_closing(repo) as r:
  4226. # Determine which commits to format
  4227. commits_to_format = []
  4228. if committish is None:
  4229. # Get the last n commits from HEAD
  4230. try:
  4231. walker = r.get_walker()
  4232. for entry in walker:
  4233. commits_to_format.append(entry.commit)
  4234. if len(commits_to_format) >= n:
  4235. break
  4236. commits_to_format.reverse()
  4237. except KeyError:
  4238. # No HEAD or empty repository
  4239. pass
  4240. elif isinstance(committish, tuple):
  4241. # Handle commit range (start, end)
  4242. start_commit, end_commit = committish
  4243. # Extract commit IDs from commit objects if needed
  4244. from .objects import Commit
  4245. start_id = (
  4246. start_commit.id if isinstance(start_commit, Commit) else start_commit
  4247. )
  4248. end_id = end_commit.id if isinstance(end_commit, Commit) else end_commit
  4249. # Walk from end back to start
  4250. walker = r.get_walker(include=[end_id], exclude=[start_id])
  4251. for entry in walker:
  4252. commits_to_format.append(entry.commit)
  4253. commits_to_format.reverse()
  4254. else:
  4255. # Single commit
  4256. commit = r.object_store[committish]
  4257. commits_to_format.append(commit)
  4258. # Generate patches
  4259. total = len(commits_to_format)
  4260. for i, commit in enumerate(commits_to_format, 1):
  4261. # Get the parent
  4262. if commit.parents:
  4263. parent_id = commit.parents[0]
  4264. parent = r.object_store[parent_id]
  4265. else:
  4266. parent = None
  4267. # Generate the diff
  4268. from io import BytesIO
  4269. diff_content = BytesIO()
  4270. if parent:
  4271. write_tree_diff(
  4272. diff_content,
  4273. r.object_store,
  4274. parent.tree,
  4275. commit.tree,
  4276. )
  4277. else:
  4278. # Initial commit - diff against empty tree
  4279. write_tree_diff(
  4280. diff_content,
  4281. r.object_store,
  4282. None,
  4283. commit.tree,
  4284. )
  4285. # Generate patch with commit metadata
  4286. if stdout:
  4287. write_commit_patch(
  4288. outstream.buffer if hasattr(outstream, "buffer") else outstream,
  4289. commit,
  4290. diff_content.getvalue(),
  4291. (i, total),
  4292. version=version,
  4293. )
  4294. else:
  4295. # Generate filename
  4296. summary = get_summary(commit)
  4297. filename = os.path.join(outdir, f"{i:04d}-{summary}.patch")
  4298. with open(filename, "wb") as f:
  4299. write_commit_patch(
  4300. f,
  4301. commit,
  4302. diff_content.getvalue(),
  4303. (i, total),
  4304. version=version,
  4305. )
  4306. filenames.append(filename)
  4307. return filenames
  4308. def bisect_start(
  4309. repo: Union[str, os.PathLike, Repo] = ".",
  4310. bad: Optional[Union[str, bytes, Commit, Tag]] = None,
  4311. good: Optional[
  4312. Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]]
  4313. ] = None,
  4314. paths=None,
  4315. no_checkout=False,
  4316. term_bad="bad",
  4317. term_good="good",
  4318. ):
  4319. """Start a new bisect session.
  4320. Args:
  4321. repo: Path to repository or a Repo object
  4322. bad: The bad commit (defaults to HEAD)
  4323. good: List of good commits or a single good commit
  4324. paths: Optional paths to limit bisect to
  4325. no_checkout: If True, don't checkout commits during bisect
  4326. term_bad: Term to use for bad commits (default: "bad")
  4327. term_good: Term to use for good commits (default: "good")
  4328. """
  4329. with open_repo_closing(repo) as r:
  4330. state = BisectState(r)
  4331. # Convert single good commit to list
  4332. if good is not None and not isinstance(good, list):
  4333. good = [good]
  4334. # Parse commits
  4335. bad_sha = parse_commit(r, bad).id if bad else None
  4336. good_shas = [parse_commit(r, g).id for g in good] if good else None
  4337. state.start(bad_sha, good_shas, paths, no_checkout, term_bad, term_good)
  4338. # Return the next commit to test if we have both good and bad
  4339. if bad_sha and good_shas:
  4340. next_sha = state._find_next_commit()
  4341. if next_sha and not no_checkout:
  4342. # Checkout the next commit
  4343. old_tree = r[r.head()].tree if r.head() else None
  4344. r.refs[b"HEAD"] = next_sha
  4345. commit = r[next_sha]
  4346. changes = tree_changes(r.object_store, old_tree, commit.tree)
  4347. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  4348. return next_sha
  4349. def bisect_bad(
  4350. repo: Union[str, os.PathLike, Repo] = ".",
  4351. rev: Optional[Union[str, bytes, Commit, Tag]] = None,
  4352. ):
  4353. """Mark a commit as bad.
  4354. Args:
  4355. repo: Path to repository or a Repo object
  4356. rev: Commit to mark as bad (defaults to HEAD)
  4357. Returns:
  4358. The SHA of the next commit to test, or None if bisect is complete
  4359. """
  4360. with open_repo_closing(repo) as r:
  4361. state = BisectState(r)
  4362. rev_sha = parse_commit(r, rev).id if rev else None
  4363. next_sha = state.mark_bad(rev_sha)
  4364. if next_sha:
  4365. # Checkout the next commit
  4366. old_tree = r[r.head()].tree if r.head() else None
  4367. r.refs[b"HEAD"] = next_sha
  4368. commit = r[next_sha]
  4369. changes = tree_changes(r.object_store, old_tree, commit.tree)
  4370. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  4371. return next_sha
  4372. def bisect_good(
  4373. repo: Union[str, os.PathLike, Repo] = ".",
  4374. rev: Optional[Union[str, bytes, Commit, Tag]] = None,
  4375. ):
  4376. """Mark a commit as good.
  4377. Args:
  4378. repo: Path to repository or a Repo object
  4379. rev: Commit to mark as good (defaults to HEAD)
  4380. Returns:
  4381. The SHA of the next commit to test, or None if bisect is complete
  4382. """
  4383. with open_repo_closing(repo) as r:
  4384. state = BisectState(r)
  4385. rev_sha = parse_commit(r, rev).id if rev else None
  4386. next_sha = state.mark_good(rev_sha)
  4387. if next_sha:
  4388. # Checkout the next commit
  4389. old_tree = r[r.head()].tree if r.head() else None
  4390. r.refs[b"HEAD"] = next_sha
  4391. commit = r[next_sha]
  4392. changes = tree_changes(r.object_store, old_tree, commit.tree)
  4393. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  4394. return next_sha
  4395. def bisect_skip(
  4396. repo: Union[str, os.PathLike, Repo] = ".",
  4397. revs: Optional[
  4398. Union[str, bytes, Commit, Tag, list[Union[str, bytes, Commit, Tag]]]
  4399. ] = None,
  4400. ):
  4401. """Skip one or more commits.
  4402. Args:
  4403. repo: Path to repository or a Repo object
  4404. revs: List of commits to skip (defaults to [HEAD])
  4405. Returns:
  4406. The SHA of the next commit to test, or None if bisect is complete
  4407. """
  4408. with open_repo_closing(repo) as r:
  4409. state = BisectState(r)
  4410. if revs is None:
  4411. rev_shas = None
  4412. else:
  4413. # Convert single rev to list
  4414. if not isinstance(revs, list):
  4415. revs = [revs]
  4416. rev_shas = [parse_commit(r, rev).id for rev in revs]
  4417. next_sha = state.skip(rev_shas)
  4418. if next_sha:
  4419. # Checkout the next commit
  4420. old_tree = r[r.head()].tree if r.head() else None
  4421. r.refs[b"HEAD"] = next_sha
  4422. commit = r[next_sha]
  4423. changes = tree_changes(r.object_store, old_tree, commit.tree)
  4424. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  4425. return next_sha
  4426. def bisect_reset(
  4427. repo: Union[str, os.PathLike, Repo] = ".",
  4428. commit: Optional[Union[str, bytes, Commit, Tag]] = None,
  4429. ):
  4430. """Reset bisect state and return to original branch/commit.
  4431. Args:
  4432. repo: Path to repository or a Repo object
  4433. commit: Optional commit to reset to (defaults to original branch/commit)
  4434. """
  4435. with open_repo_closing(repo) as r:
  4436. state = BisectState(r)
  4437. # Get old tree before reset
  4438. try:
  4439. old_tree = r[r.head()].tree
  4440. except KeyError:
  4441. old_tree = None
  4442. commit_sha = parse_commit(r, commit).id if commit else None
  4443. state.reset(commit_sha)
  4444. # Update working tree to new HEAD
  4445. try:
  4446. new_head = r.head()
  4447. if new_head:
  4448. new_commit = r[new_head]
  4449. changes = tree_changes(r.object_store, old_tree, new_commit.tree)
  4450. update_working_tree(
  4451. r, old_tree, new_commit.tree, change_iterator=changes
  4452. )
  4453. except KeyError:
  4454. # No HEAD after reset
  4455. pass
  4456. def bisect_log(repo: Union[str, os.PathLike, Repo] = "."):
  4457. """Get the bisect log.
  4458. Args:
  4459. repo: Path to repository or a Repo object
  4460. Returns:
  4461. The bisect log as a string
  4462. """
  4463. with open_repo_closing(repo) as r:
  4464. state = BisectState(r)
  4465. return state.get_log()
  4466. def bisect_replay(repo: Union[str, os.PathLike, Repo], log_file):
  4467. """Replay a bisect log.
  4468. Args:
  4469. repo: Path to repository or a Repo object
  4470. log_file: Path to the log file or file-like object
  4471. """
  4472. with open_repo_closing(repo) as r:
  4473. state = BisectState(r)
  4474. if isinstance(log_file, str):
  4475. with open(log_file) as f:
  4476. log_content = f.read()
  4477. else:
  4478. log_content = log_file.read()
  4479. state.replay(log_content)
  4480. def reflog(repo: RepoPath = ".", ref=b"HEAD", all=False):
  4481. """Show reflog entries for a reference or all references.
  4482. Args:
  4483. repo: Path to repository or a Repo object
  4484. ref: Reference name (defaults to HEAD)
  4485. all: If True, show reflogs for all refs (ignores ref parameter)
  4486. Yields:
  4487. If all=False: ReflogEntry objects
  4488. If all=True: Tuples of (ref_name, ReflogEntry) for all refs with reflogs
  4489. """
  4490. import os
  4491. from .reflog import iter_reflogs
  4492. if isinstance(ref, str):
  4493. ref = ref.encode("utf-8")
  4494. with open_repo_closing(repo) as r:
  4495. if not all:
  4496. yield from r.read_reflog(ref)
  4497. else:
  4498. logs_dir = os.path.join(r.controldir(), "logs")
  4499. # Use iter_reflogs to discover all reflogs
  4500. for ref_bytes in iter_reflogs(logs_dir):
  4501. # Read the reflog entries for this ref
  4502. for entry in r.read_reflog(ref_bytes):
  4503. yield (ref_bytes, entry)
  4504. def lfs_track(repo: Union[str, os.PathLike, Repo] = ".", patterns=None):
  4505. """Track file patterns with Git LFS.
  4506. Args:
  4507. repo: Path to repository
  4508. patterns: List of file patterns to track (e.g., ["*.bin", "*.pdf"])
  4509. If None, returns current tracked patterns
  4510. Returns:
  4511. List of tracked patterns
  4512. """
  4513. from .attrs import GitAttributes
  4514. with open_repo_closing(repo) as r:
  4515. gitattributes_path = os.path.join(r.path, ".gitattributes")
  4516. # Load existing GitAttributes
  4517. if os.path.exists(gitattributes_path):
  4518. gitattributes = GitAttributes.from_file(gitattributes_path)
  4519. else:
  4520. gitattributes = GitAttributes()
  4521. if patterns is None:
  4522. # Return current LFS tracked patterns
  4523. tracked = []
  4524. for pattern_obj, attrs in gitattributes:
  4525. if attrs.get(b"filter") == b"lfs":
  4526. tracked.append(pattern_obj.pattern.decode())
  4527. return tracked
  4528. # Add new patterns
  4529. for pattern in patterns:
  4530. # Ensure pattern is bytes
  4531. if isinstance(pattern, str):
  4532. pattern = pattern.encode()
  4533. # Set LFS attributes for the pattern
  4534. gitattributes.set_attribute(pattern, b"filter", b"lfs")
  4535. gitattributes.set_attribute(pattern, b"diff", b"lfs")
  4536. gitattributes.set_attribute(pattern, b"merge", b"lfs")
  4537. gitattributes.set_attribute(pattern, b"text", False)
  4538. # Write updated attributes
  4539. gitattributes.write_to_file(gitattributes_path)
  4540. # Stage the .gitattributes file
  4541. add(r, [".gitattributes"])
  4542. return lfs_track(r) # Return updated list
  4543. def lfs_untrack(repo: Union[str, os.PathLike, Repo] = ".", patterns=None):
  4544. """Untrack file patterns from Git LFS.
  4545. Args:
  4546. repo: Path to repository
  4547. patterns: List of file patterns to untrack
  4548. Returns:
  4549. List of remaining tracked patterns
  4550. """
  4551. from .attrs import GitAttributes
  4552. if not patterns:
  4553. return lfs_track(repo)
  4554. with open_repo_closing(repo) as r:
  4555. gitattributes_path = os.path.join(r.path, ".gitattributes")
  4556. if not os.path.exists(gitattributes_path):
  4557. return []
  4558. # Load existing GitAttributes
  4559. gitattributes = GitAttributes.from_file(gitattributes_path)
  4560. # Remove specified patterns
  4561. for pattern in patterns:
  4562. if isinstance(pattern, str):
  4563. pattern = pattern.encode()
  4564. # Check if pattern is tracked by LFS
  4565. for pattern_obj, attrs in list(gitattributes):
  4566. if pattern_obj.pattern == pattern and attrs.get(b"filter") == b"lfs":
  4567. gitattributes.remove_pattern(pattern)
  4568. break
  4569. # Write updated attributes
  4570. gitattributes.write_to_file(gitattributes_path)
  4571. # Stage the .gitattributes file
  4572. add(r, [".gitattributes"])
  4573. return lfs_track(r) # Return updated list
  4574. def lfs_init(repo: Union[str, os.PathLike, Repo] = "."):
  4575. """Initialize Git LFS in a repository.
  4576. Args:
  4577. repo: Path to repository
  4578. Returns:
  4579. None
  4580. """
  4581. from .lfs import LFSStore
  4582. with open_repo_closing(repo) as r:
  4583. # Create LFS store
  4584. LFSStore.from_repo(r, create=True)
  4585. # Set up Git config for LFS
  4586. config = r.get_config()
  4587. config.set((b"filter", b"lfs"), b"process", b"git-lfs filter-process")
  4588. config.set((b"filter", b"lfs"), b"required", b"true")
  4589. config.set((b"filter", b"lfs"), b"clean", b"git-lfs clean -- %f")
  4590. config.set((b"filter", b"lfs"), b"smudge", b"git-lfs smudge -- %f")
  4591. config.write_to_path()
  4592. def lfs_clean(repo: Union[str, os.PathLike, Repo] = ".", path=None):
  4593. """Clean a file by converting it to an LFS pointer.
  4594. Args:
  4595. repo: Path to repository
  4596. path: Path to file to clean (relative to repo root)
  4597. Returns:
  4598. LFS pointer content as bytes
  4599. """
  4600. from .lfs import LFSFilterDriver, LFSStore
  4601. with open_repo_closing(repo) as r:
  4602. if path is None:
  4603. raise ValueError("Path must be specified")
  4604. # Get LFS store
  4605. lfs_store = LFSStore.from_repo(r)
  4606. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  4607. # Read file content
  4608. full_path = os.path.join(r.path, path)
  4609. with open(full_path, "rb") as f:
  4610. content = f.read()
  4611. # Clean the content (convert to LFS pointer)
  4612. return filter_driver.clean(content)
  4613. def lfs_smudge(repo: Union[str, os.PathLike, Repo] = ".", pointer_content=None):
  4614. """Smudge an LFS pointer by retrieving the actual content.
  4615. Args:
  4616. repo: Path to repository
  4617. pointer_content: LFS pointer content as bytes
  4618. Returns:
  4619. Actual file content as bytes
  4620. """
  4621. from .lfs import LFSFilterDriver, LFSStore
  4622. with open_repo_closing(repo) as r:
  4623. if pointer_content is None:
  4624. raise ValueError("Pointer content must be specified")
  4625. # Get LFS store
  4626. lfs_store = LFSStore.from_repo(r)
  4627. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  4628. # Smudge the pointer (retrieve actual content)
  4629. return filter_driver.smudge(pointer_content)
  4630. def lfs_ls_files(repo: Union[str, os.PathLike, Repo] = ".", ref=None):
  4631. """List files tracked by Git LFS.
  4632. Args:
  4633. repo: Path to repository
  4634. ref: Git ref to check (defaults to HEAD)
  4635. Returns:
  4636. List of (path, oid, size) tuples for LFS files
  4637. """
  4638. from .lfs import LFSPointer
  4639. from .object_store import iter_tree_contents
  4640. with open_repo_closing(repo) as r:
  4641. if ref is None:
  4642. ref = b"HEAD"
  4643. elif isinstance(ref, str):
  4644. ref = ref.encode()
  4645. # Get the commit and tree
  4646. try:
  4647. commit = r[ref]
  4648. tree = r[commit.tree]
  4649. except KeyError:
  4650. return []
  4651. lfs_files = []
  4652. # Walk the tree
  4653. for path, mode, sha in iter_tree_contents(r.object_store, tree.id):
  4654. if not stat.S_ISREG(mode):
  4655. continue
  4656. # Check if it's an LFS pointer
  4657. obj = r.object_store[sha]
  4658. if not isinstance(obj, Blob):
  4659. raise AssertionError(f"Expected Blob object, got {type(obj).__name__}")
  4660. pointer = LFSPointer.from_bytes(obj.data)
  4661. if pointer is not None:
  4662. lfs_files.append((path.decode(), pointer.oid, pointer.size))
  4663. return lfs_files
  4664. def lfs_migrate(
  4665. repo: Union[str, os.PathLike, Repo] = ".",
  4666. include=None,
  4667. exclude=None,
  4668. everything=False,
  4669. ):
  4670. """Migrate files to Git LFS.
  4671. Args:
  4672. repo: Path to repository
  4673. include: Patterns of files to include
  4674. exclude: Patterns of files to exclude
  4675. everything: Migrate all files above a certain size
  4676. Returns:
  4677. Number of migrated files
  4678. """
  4679. from .lfs import LFSFilterDriver, LFSStore
  4680. with open_repo_closing(repo) as r:
  4681. # Initialize LFS if needed
  4682. lfs_store = LFSStore.from_repo(r, create=True)
  4683. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  4684. # Get current index
  4685. index = r.open_index()
  4686. migrated = 0
  4687. # Determine files to migrate
  4688. files_to_migrate = []
  4689. if everything:
  4690. # Migrate all files above 100MB
  4691. for path, entry in index.items():
  4692. full_path = os.path.join(r.path, path.decode())
  4693. if os.path.exists(full_path):
  4694. size = os.path.getsize(full_path)
  4695. if size > 100 * 1024 * 1024: # 100MB
  4696. files_to_migrate.append(path.decode())
  4697. else:
  4698. # Use include/exclude patterns
  4699. for path, entry in index.items():
  4700. path_str = path.decode()
  4701. # Check include patterns
  4702. if include:
  4703. matched = any(
  4704. fnmatch.fnmatch(path_str, pattern) for pattern in include
  4705. )
  4706. if not matched:
  4707. continue
  4708. # Check exclude patterns
  4709. if exclude:
  4710. excluded = any(
  4711. fnmatch.fnmatch(path_str, pattern) for pattern in exclude
  4712. )
  4713. if excluded:
  4714. continue
  4715. files_to_migrate.append(path_str)
  4716. # Migrate files
  4717. for path_str in files_to_migrate:
  4718. full_path = os.path.join(r.path, path_str)
  4719. if not os.path.exists(full_path):
  4720. continue
  4721. # Read file content
  4722. with open(full_path, "rb") as f:
  4723. content = f.read()
  4724. # Convert to LFS pointer
  4725. pointer_content = filter_driver.clean(content)
  4726. # Write pointer back to file
  4727. with open(full_path, "wb") as f:
  4728. f.write(pointer_content)
  4729. # Create blob for pointer content and update index
  4730. blob = Blob()
  4731. blob.data = pointer_content
  4732. r.object_store.add_object(blob)
  4733. st = os.stat(full_path)
  4734. index_entry = index_entry_from_stat(st, blob.id, 0)
  4735. path_bytes = path_str.encode() if isinstance(path_str, str) else path_str
  4736. index[path_bytes] = index_entry
  4737. migrated += 1
  4738. # Write updated index
  4739. index.write()
  4740. # Track patterns if include was specified
  4741. if include:
  4742. lfs_track(r, include)
  4743. return migrated
  4744. def lfs_pointer_check(repo: Union[str, os.PathLike, Repo] = ".", paths=None):
  4745. """Check if files are valid LFS pointers.
  4746. Args:
  4747. repo: Path to repository
  4748. paths: List of file paths to check (if None, check all files)
  4749. Returns:
  4750. Dict mapping paths to LFSPointer objects (or None if not a pointer)
  4751. """
  4752. from .lfs import LFSPointer
  4753. with open_repo_closing(repo) as r:
  4754. results = {}
  4755. if paths is None:
  4756. # Check all files in index
  4757. index = r.open_index()
  4758. paths = [path.decode() for path in index]
  4759. for path in paths:
  4760. full_path = os.path.join(r.path, path)
  4761. if os.path.exists(full_path):
  4762. try:
  4763. with open(full_path, "rb") as f:
  4764. content = f.read()
  4765. pointer = LFSPointer.from_bytes(content)
  4766. results[path] = pointer
  4767. except OSError:
  4768. results[path] = None
  4769. else:
  4770. results[path] = None
  4771. return results
  4772. def lfs_fetch(repo: Union[str, os.PathLike, Repo] = ".", remote="origin", refs=None):
  4773. """Fetch LFS objects from remote.
  4774. Args:
  4775. repo: Path to repository
  4776. remote: Remote name (default: origin)
  4777. refs: Specific refs to fetch LFS objects for (default: all refs)
  4778. Returns:
  4779. Number of objects fetched
  4780. """
  4781. from .lfs import LFSClient, LFSPointer, LFSStore
  4782. with open_repo_closing(repo) as r:
  4783. # Get LFS server URL from config
  4784. config = r.get_config()
  4785. lfs_url_bytes = config.get((b"lfs",), b"url")
  4786. if not lfs_url_bytes:
  4787. # Try remote URL
  4788. remote_url = config.get((b"remote", remote.encode()), b"url")
  4789. if remote_url:
  4790. # Append /info/lfs to remote URL
  4791. remote_url_str = remote_url.decode()
  4792. if remote_url_str.endswith(".git"):
  4793. remote_url_str = remote_url_str[:-4]
  4794. lfs_url = f"{remote_url_str}/info/lfs"
  4795. else:
  4796. raise ValueError(f"No LFS URL configured for remote {remote}")
  4797. else:
  4798. lfs_url = lfs_url_bytes.decode()
  4799. # Get authentication
  4800. auth = None
  4801. # TODO: Support credential helpers and other auth methods
  4802. # Create LFS client and store
  4803. client = LFSClient(lfs_url, auth)
  4804. store = LFSStore.from_repo(r)
  4805. # Find all LFS pointers in the refs
  4806. pointers_to_fetch = []
  4807. if refs is None:
  4808. # Get all refs
  4809. refs = list(r.refs.keys())
  4810. for ref in refs:
  4811. if isinstance(ref, str):
  4812. ref = ref.encode()
  4813. try:
  4814. commit = r[r.refs[ref]]
  4815. except KeyError:
  4816. continue
  4817. # Walk the commit tree
  4818. for entry in r.object_store.iter_tree_contents(commit.tree):
  4819. try:
  4820. obj = r.object_store[entry.sha]
  4821. except KeyError:
  4822. pass
  4823. else:
  4824. if isinstance(obj, Blob):
  4825. pointer = LFSPointer.from_bytes(obj.data)
  4826. if pointer and pointer.is_valid_oid():
  4827. # Check if we already have it
  4828. try:
  4829. store.open_object(pointer.oid)
  4830. except KeyError:
  4831. pointers_to_fetch.append((pointer.oid, pointer.size))
  4832. # Fetch missing objects
  4833. fetched = 0
  4834. for oid, size in pointers_to_fetch:
  4835. content = client.download(oid, size)
  4836. store.write_object([content])
  4837. fetched += 1
  4838. return fetched
  4839. def lfs_pull(repo: Union[str, os.PathLike, Repo] = ".", remote="origin"):
  4840. """Pull LFS objects for current checkout.
  4841. Args:
  4842. repo: Path to repository
  4843. remote: Remote name (default: origin)
  4844. Returns:
  4845. Number of objects fetched
  4846. """
  4847. from .lfs import LFSPointer, LFSStore
  4848. with open_repo_closing(repo) as r:
  4849. # First do a fetch for HEAD
  4850. fetched = lfs_fetch(repo, remote, [b"HEAD"])
  4851. # Then checkout LFS files in working directory
  4852. store = LFSStore.from_repo(r)
  4853. index = r.open_index()
  4854. for path, entry in index.items():
  4855. full_path = os.path.join(r.path, path.decode())
  4856. if os.path.exists(full_path):
  4857. with open(full_path, "rb") as f:
  4858. content = f.read()
  4859. pointer = LFSPointer.from_bytes(content)
  4860. if pointer and pointer.is_valid_oid():
  4861. try:
  4862. # Replace pointer with actual content
  4863. with store.open_object(pointer.oid) as lfs_file:
  4864. lfs_content = lfs_file.read()
  4865. with open(full_path, "wb") as f:
  4866. f.write(lfs_content)
  4867. except KeyError:
  4868. # Object not available
  4869. pass
  4870. return fetched
  4871. def lfs_push(repo: Union[str, os.PathLike, Repo] = ".", remote="origin", refs=None):
  4872. """Push LFS objects to remote.
  4873. Args:
  4874. repo: Path to repository
  4875. remote: Remote name (default: origin)
  4876. refs: Specific refs to push LFS objects for (default: current branch)
  4877. Returns:
  4878. Number of objects pushed
  4879. """
  4880. from .lfs import LFSClient, LFSPointer, LFSStore
  4881. with open_repo_closing(repo) as r:
  4882. # Get LFS server URL from config
  4883. config = r.get_config()
  4884. lfs_url_bytes = config.get((b"lfs",), b"url")
  4885. if not lfs_url_bytes:
  4886. # Try remote URL
  4887. remote_url = config.get((b"remote", remote.encode()), b"url")
  4888. if remote_url:
  4889. # Append /info/lfs to remote URL
  4890. remote_url_str = remote_url.decode()
  4891. if remote_url_str.endswith(".git"):
  4892. remote_url_str = remote_url_str[:-4]
  4893. lfs_url = f"{remote_url_str}/info/lfs"
  4894. else:
  4895. raise ValueError(f"No LFS URL configured for remote {remote}")
  4896. else:
  4897. lfs_url = lfs_url_bytes.decode()
  4898. # Get authentication
  4899. auth = None
  4900. # TODO: Support credential helpers and other auth methods
  4901. # Create LFS client and store
  4902. client = LFSClient(lfs_url, auth)
  4903. store = LFSStore.from_repo(r)
  4904. # Find all LFS objects to push
  4905. if refs is None:
  4906. # Push current branch
  4907. refs = [r.refs.read_ref(b"HEAD")]
  4908. objects_to_push = set()
  4909. for ref in refs:
  4910. if isinstance(ref, str):
  4911. ref = ref.encode()
  4912. try:
  4913. if ref.startswith(b"refs/"):
  4914. commit = r[r.refs[ref]]
  4915. else:
  4916. commit = r[ref]
  4917. except KeyError:
  4918. continue
  4919. # Walk the commit tree
  4920. for entry in r.object_store.iter_tree_contents(commit.tree):
  4921. try:
  4922. obj = r.object_store[entry.sha]
  4923. except KeyError:
  4924. pass
  4925. else:
  4926. if isinstance(obj, Blob):
  4927. pointer = LFSPointer.from_bytes(obj.data)
  4928. if pointer and pointer.is_valid_oid():
  4929. objects_to_push.add((pointer.oid, pointer.size))
  4930. # Push objects
  4931. pushed = 0
  4932. for oid, size in objects_to_push:
  4933. try:
  4934. with store.open_object(oid) as f:
  4935. content = f.read()
  4936. except KeyError:
  4937. # Object not in local store
  4938. logging.warn("LFS object %s not found locally", oid)
  4939. else:
  4940. client.upload(oid, size, content)
  4941. pushed += 1
  4942. return pushed
  4943. def lfs_status(repo: Union[str, os.PathLike, Repo] = "."):
  4944. """Show status of LFS files.
  4945. Args:
  4946. repo: Path to repository
  4947. Returns:
  4948. Dict with status information
  4949. """
  4950. from .lfs import LFSPointer, LFSStore
  4951. with open_repo_closing(repo) as r:
  4952. store = LFSStore.from_repo(r)
  4953. index = r.open_index()
  4954. status: dict[str, list[str]] = {
  4955. "tracked": [],
  4956. "not_staged": [],
  4957. "not_committed": [],
  4958. "not_pushed": [],
  4959. "missing": [],
  4960. }
  4961. # Check working directory files
  4962. for path, entry in index.items():
  4963. path_str = path.decode()
  4964. full_path = os.path.join(r.path, path_str)
  4965. if os.path.exists(full_path):
  4966. with open(full_path, "rb") as f:
  4967. content = f.read()
  4968. pointer = LFSPointer.from_bytes(content)
  4969. if pointer and pointer.is_valid_oid():
  4970. status["tracked"].append(path_str)
  4971. # Check if object exists locally
  4972. try:
  4973. store.open_object(pointer.oid)
  4974. except KeyError:
  4975. status["missing"].append(path_str)
  4976. # Check if file has been modified
  4977. if isinstance(entry, ConflictedIndexEntry):
  4978. continue # Skip conflicted entries
  4979. try:
  4980. staged_obj = r.object_store[entry.sha]
  4981. except KeyError:
  4982. pass
  4983. else:
  4984. if not isinstance(staged_obj, Blob):
  4985. raise AssertionError(
  4986. f"Expected Blob object, got {type(staged_obj).__name__}"
  4987. )
  4988. staged_pointer = LFSPointer.from_bytes(staged_obj.data)
  4989. if staged_pointer and staged_pointer.oid != pointer.oid:
  4990. status["not_staged"].append(path_str)
  4991. # TODO: Check for not committed and not pushed files
  4992. return status
  4993. def worktree_list(repo="."):
  4994. """List all worktrees for a repository.
  4995. Args:
  4996. repo: Path to repository
  4997. Returns:
  4998. List of WorkTreeInfo objects
  4999. """
  5000. from .worktree import list_worktrees
  5001. with open_repo_closing(repo) as r:
  5002. return list_worktrees(r)
  5003. def worktree_add(
  5004. repo=".", path=None, branch=None, commit=None, detach=False, force=False
  5005. ):
  5006. """Add a new worktree.
  5007. Args:
  5008. repo: Path to repository
  5009. path: Path for new worktree
  5010. branch: Branch to checkout (creates if doesn't exist)
  5011. commit: Specific commit to checkout
  5012. detach: Create with detached HEAD
  5013. force: Force creation even if branch is already checked out
  5014. Returns:
  5015. Path to the newly created worktree
  5016. """
  5017. from .worktree import add_worktree
  5018. if path is None:
  5019. raise ValueError("Path is required for worktree add")
  5020. with open_repo_closing(repo) as r:
  5021. wt_repo = add_worktree(
  5022. r, path, branch=branch, commit=commit, detach=detach, force=force
  5023. )
  5024. return wt_repo.path
  5025. def worktree_remove(repo=".", path=None, force=False):
  5026. """Remove a worktree.
  5027. Args:
  5028. repo: Path to repository
  5029. path: Path to worktree to remove
  5030. force: Force removal even if there are local changes
  5031. """
  5032. from .worktree import remove_worktree
  5033. if path is None:
  5034. raise ValueError("Path is required for worktree remove")
  5035. with open_repo_closing(repo) as r:
  5036. remove_worktree(r, path, force=force)
  5037. def worktree_prune(repo=".", dry_run=False, expire=None):
  5038. """Prune worktree administrative files.
  5039. Args:
  5040. repo: Path to repository
  5041. dry_run: Only show what would be removed
  5042. expire: Only prune worktrees older than this many seconds
  5043. Returns:
  5044. List of pruned worktree names
  5045. """
  5046. from .worktree import prune_worktrees
  5047. with open_repo_closing(repo) as r:
  5048. return prune_worktrees(r, expire=expire, dry_run=dry_run)
  5049. def worktree_lock(repo=".", path=None, reason=None):
  5050. """Lock a worktree to prevent it from being pruned.
  5051. Args:
  5052. repo: Path to repository
  5053. path: Path to worktree to lock
  5054. reason: Optional reason for locking
  5055. """
  5056. from .worktree import lock_worktree
  5057. if path is None:
  5058. raise ValueError("Path is required for worktree lock")
  5059. with open_repo_closing(repo) as r:
  5060. lock_worktree(r, path, reason=reason)
  5061. def worktree_unlock(repo=".", path=None):
  5062. """Unlock a worktree.
  5063. Args:
  5064. repo: Path to repository
  5065. path: Path to worktree to unlock
  5066. """
  5067. from .worktree import unlock_worktree
  5068. if path is None:
  5069. raise ValueError("Path is required for worktree unlock")
  5070. with open_repo_closing(repo) as r:
  5071. unlock_worktree(r, path)
  5072. def worktree_move(repo=".", old_path=None, new_path=None):
  5073. """Move a worktree to a new location.
  5074. Args:
  5075. repo: Path to repository
  5076. old_path: Current path of worktree
  5077. new_path: New path for worktree
  5078. """
  5079. from .worktree import move_worktree
  5080. if old_path is None or new_path is None:
  5081. raise ValueError("Both old_path and new_path are required for worktree move")
  5082. with open_repo_closing(repo) as r:
  5083. move_worktree(r, old_path, new_path)