porcelain.py 291 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582
  1. # e porcelain.py -- Porcelain-like layer on top of Dulwich
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as published by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Simple wrapper that provides porcelain-like functions on top of Dulwich.
  22. Currently implemented:
  23. * archive
  24. * add
  25. * bisect{_start,_bad,_good,_skip,_reset,_log,_replay}
  26. * branch{_create,_delete,_list}
  27. * check_ignore
  28. * checkout
  29. * checkout_branch
  30. * clone
  31. * cone mode{_init, _set, _add}
  32. * commit
  33. * commit_tree
  34. * daemon
  35. * describe
  36. * diff_tree
  37. * fetch
  38. * filter_branch
  39. * for_each_ref
  40. * grep
  41. * init
  42. * interpret_trailers
  43. * ls_files
  44. * ls_remote
  45. * ls_tree
  46. * mailsplit
  47. * merge
  48. * merge_tree
  49. * mv/move
  50. * prune
  51. * pull
  52. * push
  53. * rm
  54. * remote{_add}
  55. * receive_pack
  56. * replace{_create,_delete,_list}
  57. * reset
  58. * revert
  59. * sparse_checkout
  60. * submodule_add
  61. * submodule_init
  62. * submodule_list
  63. * rev_list
  64. * tag{_create,_delete,_list}
  65. * upload_pack
  66. * update_server_info
  67. * var
  68. * write_commit_graph
  69. * status
  70. * shortlog
  71. * symbolic_ref
  72. * worktree{_add,_list,_remove,_prune,_lock,_unlock,_move}
  73. These functions are meant to behave similarly to the git subcommands.
  74. Differences in behaviour are considered bugs.
  75. Note: one of the consequences of this is that paths tend to be
  76. interpreted relative to the current working directory rather than relative
  77. to the repository root.
  78. Functions should generally accept both unicode strings and bytestrings
  79. """
  80. import datetime
  81. import fnmatch
  82. import logging
  83. import os
  84. import posixpath
  85. import re
  86. import stat
  87. import sys
  88. import time
  89. from collections import namedtuple
  90. from collections.abc import Iterable, Iterator, Sequence
  91. from collections.abc import Set as AbstractSet
  92. from contextlib import AbstractContextManager, closing, contextmanager
  93. from dataclasses import dataclass
  94. from io import BytesIO, RawIOBase
  95. from pathlib import Path
  96. from typing import (
  97. IO,
  98. TYPE_CHECKING,
  99. Any,
  100. BinaryIO,
  101. Callable,
  102. Optional,
  103. TextIO,
  104. TypeVar,
  105. Union,
  106. cast,
  107. overload,
  108. )
  109. if sys.version_info >= (3, 12):
  110. from collections.abc import Buffer
  111. from typing import override
  112. else:
  113. from typing_extensions import Buffer, override
  114. if TYPE_CHECKING:
  115. from .filter_branch import CommitData
  116. from .gc import GCStats
  117. from .maintenance import MaintenanceResult
  118. from . import replace_me
  119. from .archive import tar_stream
  120. from .bisect import BisectState
  121. from .client import (
  122. FetchPackResult,
  123. LsRemoteResult,
  124. SendPackResult,
  125. get_transport_and_path,
  126. )
  127. from .config import Config, ConfigFile, StackedConfig, read_submodules
  128. from .diff_tree import (
  129. CHANGE_ADD,
  130. CHANGE_COPY,
  131. CHANGE_DELETE,
  132. CHANGE_MODIFY,
  133. CHANGE_RENAME,
  134. RENAME_CHANGE_TYPES,
  135. TreeChange,
  136. tree_changes,
  137. )
  138. from .errors import SendPackError
  139. from .graph import can_fast_forward
  140. from .ignore import IgnoreFilterManager
  141. from .index import (
  142. ConflictedIndexEntry,
  143. Index,
  144. IndexEntry,
  145. _fs_to_tree_path,
  146. blob_from_path_and_stat,
  147. build_file_from_blob,
  148. build_index_from_tree,
  149. get_unstaged_changes,
  150. index_entry_from_stat,
  151. symlink,
  152. update_working_tree,
  153. validate_path_element_default,
  154. validate_path_element_hfs,
  155. validate_path_element_ntfs,
  156. )
  157. from .object_store import BaseObjectStore, tree_lookup_path
  158. from .objects import (
  159. Blob,
  160. Commit,
  161. Tag,
  162. Tree,
  163. TreeEntry,
  164. format_timezone,
  165. parse_timezone,
  166. pretty_format_tree_entry,
  167. )
  168. from .objectspec import (
  169. parse_commit,
  170. parse_object,
  171. parse_ref,
  172. parse_reftuples,
  173. parse_tree,
  174. )
  175. from .pack import UnpackedObject, write_pack_from_container, write_pack_index
  176. from .patch import (
  177. get_summary,
  178. write_commit_patch,
  179. write_object_diff,
  180. write_tree_diff,
  181. )
  182. from .protocol import ZERO_SHA, Protocol
  183. from .refs import (
  184. LOCAL_BRANCH_PREFIX,
  185. LOCAL_NOTES_PREFIX,
  186. LOCAL_REMOTE_PREFIX,
  187. LOCAL_REPLACE_PREFIX,
  188. LOCAL_TAG_PREFIX,
  189. Ref,
  190. SymrefLoop,
  191. _import_remote_refs,
  192. filter_ref_prefix,
  193. local_branch_name,
  194. local_replace_name,
  195. local_tag_name,
  196. shorten_ref_name,
  197. )
  198. from .repo import BaseRepo, Repo, get_user_identity
  199. from .server import (
  200. FileSystemBackend,
  201. ReceivePackHandler,
  202. TCPGitServer,
  203. UploadPackHandler,
  204. )
  205. from .server import update_server_info as server_update_server_info
  206. from .sparse_patterns import (
  207. SparseCheckoutConflictError,
  208. apply_included_paths,
  209. determine_included_paths,
  210. )
  211. from .trailers import add_trailer_to_message, format_trailers, parse_trailers
  212. # Module level tuple definition for status output
  213. GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
  214. # TypeVar for preserving BaseRepo subclass types
  215. T = TypeVar("T", bound="BaseRepo")
  216. # Type alias for common repository parameter pattern
  217. RepoPath = Union[str, os.PathLike[str], Repo]
  218. @dataclass
  219. class CountObjectsResult:
  220. """Result of counting objects in a repository.
  221. Attributes:
  222. count: Number of loose objects
  223. size: Total size of loose objects in bytes
  224. in_pack: Number of objects in pack files
  225. packs: Number of pack files
  226. size_pack: Total size of pack files in bytes
  227. """
  228. count: int
  229. size: int
  230. in_pack: Optional[int] = None
  231. packs: Optional[int] = None
  232. size_pack: Optional[int] = None
  233. class NoneStream(RawIOBase):
  234. """Fallback if stdout or stderr are unavailable, does nothing."""
  235. def read(self, size: int = -1) -> None:
  236. """Read from stream (returns None as this is a null stream)."""
  237. return None
  238. def readall(self) -> bytes:
  239. """Read all bytes (returns empty bytes).
  240. Returns:
  241. Empty bytes object
  242. """
  243. return b""
  244. @override
  245. def readinto(self, b: Buffer) -> Optional[int]:
  246. return 0
  247. @override
  248. def write(self, b: Buffer) -> Optional[int]:
  249. # All Buffer implementations (bytes, bytearray, memoryview) support len()
  250. return len(b) if b else 0 # type: ignore[arg-type]
  251. default_bytes_out_stream: BinaryIO = cast(
  252. BinaryIO, getattr(sys.stdout, "buffer", None) or NoneStream()
  253. )
  254. default_bytes_err_stream: BinaryIO = cast(
  255. BinaryIO, getattr(sys.stderr, "buffer", None) or NoneStream()
  256. )
  257. DEFAULT_ENCODING = "utf-8"
  258. class Error(Exception):
  259. """Porcelain-based error."""
  260. def __init__(self, msg: str) -> None:
  261. """Initialize Error with message."""
  262. super().__init__(msg)
  263. class RemoteExists(Error):
  264. """Raised when the remote already exists."""
  265. class TimezoneFormatError(Error):
  266. """Raised when the timezone cannot be determined from a given string."""
  267. class CheckoutError(Error):
  268. """Indicates that a checkout cannot be performed."""
  269. def parse_timezone_format(tz_str: str) -> int:
  270. """Parse given string and attempt to return a timezone offset.
  271. Different formats are considered in the following order:
  272. - Git internal format: <unix timestamp> <timezone offset>
  273. - RFC 2822: e.g. Mon, 20 Nov 1995 19:12:08 -0500
  274. - ISO 8601: e.g. 1995-11-20T19:12:08-0500
  275. Args:
  276. tz_str: datetime string
  277. Returns: Timezone offset as integer
  278. Raises:
  279. TimezoneFormatError: if timezone information cannot be extracted
  280. """
  281. import re
  282. # Git internal format
  283. internal_format_pattern = re.compile("^[0-9]+ [+-][0-9]{,4}$")
  284. if re.match(internal_format_pattern, tz_str):
  285. try:
  286. tz_internal = parse_timezone(tz_str.split(" ")[1].encode(DEFAULT_ENCODING))
  287. return tz_internal[0]
  288. except ValueError:
  289. pass
  290. # RFC 2822
  291. import email.utils
  292. rfc_2822 = email.utils.parsedate_tz(tz_str)
  293. if rfc_2822 and rfc_2822[9] is not None:
  294. return rfc_2822[9]
  295. # ISO 8601
  296. # Supported offsets:
  297. # sHHMM, sHH:MM, sHH
  298. iso_8601_pattern = re.compile(
  299. "[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$"
  300. )
  301. match = re.search(iso_8601_pattern, tz_str)
  302. total_secs = 0
  303. if match:
  304. sign, hours, minutes = match.groups()
  305. total_secs += int(hours) * 3600
  306. if minutes:
  307. total_secs += int(minutes) * 60
  308. total_secs = -total_secs if sign == "-" else total_secs
  309. return total_secs
  310. # YYYY.MM.DD, MM/DD/YYYY, DD.MM.YYYY contain no timezone information
  311. raise TimezoneFormatError(tz_str)
  312. def get_user_timezones() -> tuple[int, int]:
  313. """Retrieve local timezone as described in git documentation.
  314. https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
  315. Returns: A tuple containing author timezone, committer timezone.
  316. """
  317. local_timezone = time.localtime().tm_gmtoff
  318. if os.environ.get("GIT_AUTHOR_DATE"):
  319. author_timezone = parse_timezone_format(os.environ["GIT_AUTHOR_DATE"])
  320. else:
  321. author_timezone = local_timezone
  322. if os.environ.get("GIT_COMMITTER_DATE"):
  323. commit_timezone = parse_timezone_format(os.environ["GIT_COMMITTER_DATE"])
  324. else:
  325. commit_timezone = local_timezone
  326. return author_timezone, commit_timezone
  327. @overload
  328. def open_repo(path_or_repo: T) -> AbstractContextManager[T]: ...
  329. @overload
  330. def open_repo(
  331. path_or_repo: Union[str, os.PathLike[str]],
  332. ) -> AbstractContextManager[Repo]: ...
  333. def open_repo(
  334. path_or_repo: Union[str, os.PathLike[str], T],
  335. ) -> AbstractContextManager[Union[T, Repo]]:
  336. """Open an argument that can be a repository or a path for a repository."""
  337. if isinstance(path_or_repo, BaseRepo):
  338. return _noop_context_manager(path_or_repo)
  339. return Repo(path_or_repo)
  340. @contextmanager
  341. def _noop_context_manager(obj: T) -> Iterator[T]:
  342. """Context manager that has the same api as closing but does nothing."""
  343. yield obj
  344. @overload
  345. def open_repo_closing(path_or_repo: T) -> AbstractContextManager[T]: ...
  346. @overload
  347. def open_repo_closing(
  348. path_or_repo: Union[str, bytes, os.PathLike[str]],
  349. ) -> AbstractContextManager[Repo]: ...
  350. def open_repo_closing(
  351. path_or_repo: Union[str, bytes, os.PathLike[str], T],
  352. ) -> AbstractContextManager[Union[T, Repo]]:
  353. """Open an argument that can be a repository or a path for a repository.
  354. returns a context manager that will close the repo on exit if the argument
  355. is a path, else does nothing if the argument is a repo.
  356. """
  357. if isinstance(path_or_repo, BaseRepo):
  358. return _noop_context_manager(path_or_repo)
  359. return closing(Repo(path_or_repo))
  360. def path_to_tree_path(
  361. repopath: Union[str, bytes, os.PathLike[str]],
  362. path: Union[str, bytes, os.PathLike[str]],
  363. tree_encoding: str = DEFAULT_ENCODING,
  364. ) -> bytes:
  365. """Convert a path to a path usable in an index, e.g. bytes and relative to the repository root.
  366. Args:
  367. repopath: Repository path, absolute or relative to the cwd
  368. path: A path, absolute or relative to the cwd
  369. tree_encoding: Encoding to use for tree paths
  370. Returns: A path formatted for use in e.g. an index
  371. """
  372. # Resolve might returns a relative path on Windows
  373. # https://bugs.python.org/issue38671
  374. if sys.platform == "win32":
  375. path = os.path.abspath(path)
  376. # Convert bytes paths to str for Path
  377. if isinstance(path, bytes):
  378. path = os.fsdecode(path)
  379. path = Path(path)
  380. resolved_path = path.resolve()
  381. # Resolve and abspath seems to behave differently regarding symlinks,
  382. # as we are doing abspath on the file path, we need to do the same on
  383. # the repo path or they might not match
  384. if sys.platform == "win32":
  385. repopath = os.path.abspath(repopath)
  386. # Convert bytes paths to str for Path
  387. if isinstance(repopath, bytes):
  388. repopath = os.fsdecode(repopath)
  389. repopath = Path(repopath).resolve()
  390. try:
  391. relpath = resolved_path.relative_to(repopath)
  392. except ValueError:
  393. # If path is a symlink that points to a file outside the repo, we
  394. # want the relpath for the link itself, not the resolved target
  395. if path.is_symlink():
  396. parent = path.parent.resolve()
  397. relpath = (parent / path.name).relative_to(repopath)
  398. else:
  399. raise
  400. if sys.platform == "win32":
  401. return str(relpath).replace(os.path.sep, "/").encode(tree_encoding)
  402. else:
  403. return bytes(relpath)
  404. class DivergedBranches(Error):
  405. """Branches have diverged and fast-forward is not possible."""
  406. def __init__(self, current_sha: bytes, new_sha: bytes) -> None:
  407. """Initialize DivergedBranches error with current and new SHA values."""
  408. self.current_sha = current_sha
  409. self.new_sha = new_sha
  410. def check_diverged(repo: BaseRepo, current_sha: bytes, new_sha: bytes) -> None:
  411. """Check if updating to a sha can be done with fast forwarding.
  412. Args:
  413. repo: Repository object
  414. current_sha: Current head sha
  415. new_sha: New head sha
  416. """
  417. try:
  418. can = can_fast_forward(repo, current_sha, new_sha)
  419. except KeyError:
  420. can = False
  421. if not can:
  422. raise DivergedBranches(current_sha, new_sha)
  423. def archive(
  424. repo: Union[str, BaseRepo],
  425. committish: Optional[Union[str, bytes, Commit, Tag]] = None,
  426. outstream: Union[BinaryIO, RawIOBase] = default_bytes_out_stream,
  427. errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
  428. ) -> None:
  429. """Create an archive.
  430. Args:
  431. repo: Path of repository for which to generate an archive.
  432. committish: Commit SHA1 or ref to use
  433. outstream: Output stream (defaults to stdout)
  434. errstream: Error stream (defaults to stderr)
  435. """
  436. if committish is None:
  437. committish = "HEAD"
  438. with open_repo_closing(repo) as repo_obj:
  439. c = parse_commit(repo_obj, committish)
  440. tree = repo_obj.object_store[c.tree]
  441. assert isinstance(tree, Tree)
  442. for chunk in tar_stream(repo_obj.object_store, tree, c.commit_time):
  443. outstream.write(chunk)
  444. def update_server_info(repo: RepoPath = ".") -> None:
  445. """Update server info files for a repository.
  446. Args:
  447. repo: path to the repository
  448. """
  449. with open_repo_closing(repo) as r:
  450. server_update_server_info(r)
  451. def write_commit_graph(repo: RepoPath = ".", reachable: bool = True) -> None:
  452. """Write a commit graph file for a repository.
  453. Args:
  454. repo: path to the repository or a Repo object
  455. reachable: if True, include all commits reachable from refs.
  456. if False, only include direct ref targets.
  457. """
  458. with open_repo_closing(repo) as r:
  459. # Get all refs
  460. refs = list(r.refs.as_dict().values())
  461. if refs:
  462. r.object_store.write_commit_graph(refs, reachable=reachable)
  463. def symbolic_ref(
  464. repo: RepoPath, ref_name: Union[str, bytes], force: bool = False
  465. ) -> None:
  466. """Set git symbolic ref into HEAD.
  467. Args:
  468. repo: path to the repository
  469. ref_name: short name of the new ref
  470. force: force settings without checking if it exists in refs/heads
  471. """
  472. with open_repo_closing(repo) as repo_obj:
  473. ref_path = _make_branch_ref(ref_name)
  474. if not force and ref_path not in repo_obj.refs.keys():
  475. ref_name_str = (
  476. ref_name.decode("utf-8", "replace")
  477. if isinstance(ref_name, bytes)
  478. else ref_name
  479. )
  480. raise Error(f"fatal: ref `{ref_name_str}` is not a ref")
  481. repo_obj.refs.set_symbolic_ref(b"HEAD", ref_path)
  482. def pack_refs(repo: RepoPath, all: bool = False) -> None:
  483. """Pack loose references into packed-refs file."""
  484. with open_repo_closing(repo) as repo_obj:
  485. repo_obj.refs.pack_refs(all=all)
  486. def _get_variables(repo: RepoPath = ".") -> dict[str, str]:
  487. """Internal function to get all Git logical variables.
  488. Args:
  489. repo: Path to the repository
  490. Returns:
  491. A dictionary of all logical variables with values
  492. """
  493. from .repo import get_user_identity
  494. with open_repo_closing(repo) as repo_obj:
  495. config = repo_obj.get_config_stack()
  496. # Define callbacks for each logical variable
  497. def get_author_ident() -> Optional[str]:
  498. """Get GIT_AUTHOR_IDENT."""
  499. try:
  500. author_identity = get_user_identity(config, kind="AUTHOR")
  501. author_tz, _ = get_user_timezones()
  502. timestamp = int(time.time())
  503. return f"{author_identity.decode('utf-8', 'replace')} {timestamp} {author_tz:+05d}"
  504. except Exception:
  505. return None
  506. def get_committer_ident() -> Optional[str]:
  507. """Get GIT_COMMITTER_IDENT."""
  508. try:
  509. committer_identity = get_user_identity(config, kind="COMMITTER")
  510. _, committer_tz = get_user_timezones()
  511. timestamp = int(time.time())
  512. return f"{committer_identity.decode('utf-8', 'replace')} {timestamp} {committer_tz:+05d}"
  513. except Exception:
  514. return None
  515. def get_editor() -> Optional[str]:
  516. """Get GIT_EDITOR."""
  517. editor = os.environ.get("GIT_EDITOR")
  518. if editor is None:
  519. try:
  520. editor_bytes = config.get(("core",), "editor")
  521. editor = editor_bytes.decode("utf-8", "replace")
  522. except KeyError:
  523. editor = os.environ.get("VISUAL") or os.environ.get("EDITOR")
  524. return editor
  525. def get_sequence_editor() -> Optional[str]:
  526. """Get GIT_SEQUENCE_EDITOR."""
  527. sequence_editor = os.environ.get("GIT_SEQUENCE_EDITOR")
  528. if sequence_editor is None:
  529. try:
  530. seq_editor_bytes = config.get(("sequence",), "editor")
  531. sequence_editor = seq_editor_bytes.decode("utf-8", "replace")
  532. except KeyError:
  533. # Falls back to GIT_EDITOR if not set
  534. sequence_editor = get_editor()
  535. return sequence_editor
  536. def get_pager() -> Optional[str]:
  537. """Get GIT_PAGER."""
  538. pager = os.environ.get("GIT_PAGER")
  539. if pager is None:
  540. try:
  541. pager_bytes = config.get(("core",), "pager")
  542. pager = pager_bytes.decode("utf-8", "replace")
  543. except KeyError:
  544. pager = os.environ.get("PAGER")
  545. return pager
  546. def get_default_branch() -> str:
  547. """Get GIT_DEFAULT_BRANCH."""
  548. try:
  549. default_branch_bytes = config.get(("init",), "defaultBranch")
  550. return default_branch_bytes.decode("utf-8", "replace")
  551. except KeyError:
  552. # Git's default is "master"
  553. return "master"
  554. # Dictionary mapping variable names to their getter callbacks
  555. variable_callbacks: dict[str, Callable[[], Optional[str]]] = {
  556. "GIT_AUTHOR_IDENT": get_author_ident,
  557. "GIT_COMMITTER_IDENT": get_committer_ident,
  558. "GIT_EDITOR": get_editor,
  559. "GIT_SEQUENCE_EDITOR": get_sequence_editor,
  560. "GIT_PAGER": get_pager,
  561. "GIT_DEFAULT_BRANCH": get_default_branch,
  562. }
  563. # Build the variables dictionary by calling callbacks
  564. variables: dict[str, str] = {}
  565. for var_name, callback in variable_callbacks.items():
  566. value = callback()
  567. if value is not None:
  568. variables[var_name] = value
  569. return variables
  570. def var_list(repo: RepoPath = ".") -> dict[str, str]:
  571. """List all Git logical variables.
  572. Args:
  573. repo: Path to the repository
  574. Returns:
  575. A dictionary of all logical variables with their values
  576. """
  577. return _get_variables(repo)
  578. def var(repo: RepoPath = ".", variable: str = "GIT_AUTHOR_IDENT") -> str:
  579. """Get the value of a specific Git logical variable.
  580. Args:
  581. repo: Path to the repository
  582. variable: The variable to query (e.g., 'GIT_AUTHOR_IDENT')
  583. Returns:
  584. The value of the requested variable as a string
  585. Raises:
  586. KeyError: If the requested variable has no value
  587. """
  588. variables = _get_variables(repo)
  589. if variable in variables:
  590. return variables[variable]
  591. else:
  592. raise KeyError(f"Variable {variable} has no value")
  593. def commit(
  594. repo: RepoPath = ".",
  595. message: Optional[Union[str, bytes, Callable[[Any, Commit], bytes]]] = None,
  596. author: Optional[bytes] = None,
  597. author_timezone: Optional[int] = None,
  598. committer: Optional[bytes] = None,
  599. commit_timezone: Optional[int] = None,
  600. encoding: Optional[bytes] = None,
  601. no_verify: bool = False,
  602. signoff: Optional[bool] = None,
  603. all: bool = False,
  604. amend: bool = False,
  605. sign: Optional[bool] = None,
  606. ) -> bytes:
  607. """Create a new commit.
  608. Args:
  609. repo: Path to repository
  610. message: Optional commit message (string/bytes or callable that takes
  611. (repo, commit) and returns bytes)
  612. author: Optional author name and email
  613. author_timezone: Author timestamp timezone
  614. committer: Optional committer name and email
  615. commit_timezone: Commit timestamp timezone
  616. encoding: Encoding to use for commit message
  617. no_verify: Skip pre-commit and commit-msg hooks
  618. signoff: Add Signed-off-by line to commit message. If None, uses format.signoff config.
  619. all: Automatically stage all tracked files that have been modified
  620. amend: Replace the tip of the current branch by creating a new commit
  621. sign: GPG sign the commit. If None, uses commit.gpgsign config.
  622. If True, signs with default GPG key. If False, does not sign.
  623. Returns: SHA1 of the new commit
  624. """
  625. encoding_str = encoding.decode("ascii") if encoding else DEFAULT_ENCODING
  626. if isinstance(message, str):
  627. message = message.encode(encoding_str)
  628. if isinstance(author, str):
  629. author = author.encode(encoding_str)
  630. if isinstance(committer, str):
  631. committer = committer.encode(encoding_str)
  632. local_timezone = get_user_timezones()
  633. if author_timezone is None:
  634. author_timezone = local_timezone[0]
  635. if commit_timezone is None:
  636. commit_timezone = local_timezone[1]
  637. with open_repo_closing(repo) as r:
  638. # Handle amend logic
  639. merge_heads = None
  640. if amend:
  641. try:
  642. head_commit = r[r.head()]
  643. assert isinstance(head_commit, Commit)
  644. except KeyError:
  645. raise ValueError("Cannot amend: no existing commit found")
  646. # If message not provided, use the message from the current HEAD
  647. if message is None:
  648. message = head_commit.message
  649. # If author not provided, use the author from the current HEAD
  650. if author is None:
  651. author = head_commit.author
  652. if author_timezone is None:
  653. author_timezone = head_commit.author_timezone
  654. # Use the parent(s) of the current HEAD as our parent(s)
  655. merge_heads = list(head_commit.parents)
  656. # If -a flag is used, stage all modified tracked files
  657. if all:
  658. index = r.open_index()
  659. normalizer = r.get_blob_normalizer()
  660. # Create a wrapper that handles the bytes -> Blob conversion
  661. if normalizer is not None:
  662. def filter_callback(data: bytes, path: bytes) -> bytes:
  663. from dulwich.objects import Blob
  664. blob = Blob()
  665. blob.data = data
  666. normalized_blob = normalizer.checkin_normalize(blob, path)
  667. data_bytes: bytes = normalized_blob.data
  668. return data_bytes
  669. else:
  670. filter_callback = None
  671. unstaged_changes = list(
  672. get_unstaged_changes(index, r.path, filter_callback)
  673. )
  674. if unstaged_changes:
  675. # Convert bytes paths to strings for add function
  676. modified_files: list[Union[str, bytes, os.PathLike[str]]] = []
  677. for path in unstaged_changes:
  678. if isinstance(path, bytes):
  679. modified_files.append(path.decode())
  680. else:
  681. modified_files.append(path)
  682. add(r, paths=modified_files)
  683. # For amend, create dangling commit to avoid adding current HEAD as parent
  684. if amend:
  685. commit_sha = r.get_worktree().commit(
  686. message=message,
  687. author=author,
  688. author_timezone=author_timezone,
  689. committer=committer,
  690. commit_timezone=commit_timezone,
  691. encoding=encoding,
  692. no_verify=no_verify,
  693. sign=sign,
  694. signoff=signoff,
  695. merge_heads=merge_heads,
  696. ref=None,
  697. )
  698. # Update HEAD to point to the new commit
  699. r.refs[b"HEAD"] = commit_sha
  700. return commit_sha
  701. else:
  702. return r.get_worktree().commit(
  703. message=message,
  704. author=author,
  705. author_timezone=author_timezone,
  706. committer=committer,
  707. commit_timezone=commit_timezone,
  708. encoding=encoding,
  709. no_verify=no_verify,
  710. sign=sign,
  711. signoff=signoff,
  712. merge_heads=merge_heads,
  713. )
  714. def commit_tree(
  715. repo: RepoPath,
  716. tree: bytes,
  717. message: Optional[Union[str, bytes]] = None,
  718. author: Optional[bytes] = None,
  719. committer: Optional[bytes] = None,
  720. ) -> bytes:
  721. """Create a new commit object.
  722. Args:
  723. repo: Path to repository
  724. tree: An existing tree object
  725. message: Commit message
  726. author: Optional author name and email
  727. committer: Optional committer name and email
  728. """
  729. with open_repo_closing(repo) as r:
  730. if isinstance(message, str):
  731. message = message.encode(DEFAULT_ENCODING)
  732. return r.get_worktree().commit(
  733. message=message, tree=tree, committer=committer, author=author
  734. )
  735. def interpret_trailers(
  736. message: Union[str, bytes],
  737. *,
  738. trailers: Optional[list[tuple[str, str]]] = None,
  739. trim_empty: bool = False,
  740. only_trailers: bool = False,
  741. only_input: bool = False,
  742. unfold: bool = False,
  743. parse: bool = False,
  744. where: str = "end",
  745. if_exists: str = "addIfDifferentNeighbor",
  746. if_missing: str = "add",
  747. separators: str = ":",
  748. ) -> bytes:
  749. r"""Parse and manipulate trailers in a commit message.
  750. This function implements the functionality of `git interpret-trailers`,
  751. allowing parsing and manipulation of structured metadata (trailers) in
  752. commit messages.
  753. Trailers are key-value pairs at the end of commit messages, formatted like:
  754. Signed-off-by: Alice <alice@example.com>
  755. Reviewed-by: Bob <bob@example.com>
  756. Args:
  757. message: The commit message (string or bytes)
  758. trailers: List of (key, value) tuples to add as new trailers
  759. trim_empty: Remove trailers with empty values
  760. only_trailers: Output only the trailers, not the message body
  761. only_input: Don't add new trailers, only parse existing ones
  762. unfold: Join multiline trailer values into a single line
  763. parse: Shorthand for --only-trailers --only-input --unfold
  764. where: Where to add new trailers ('end', 'start', 'after', 'before')
  765. if_exists: How to handle duplicate keys
  766. - 'add': Always add
  767. - 'replace': Replace all existing
  768. - 'addIfDifferent': Add only if value differs from all existing
  769. - 'addIfDifferentNeighbor': Add only if value differs from neighbors
  770. - 'doNothing': Don't add if key exists
  771. if_missing: What to do if key doesn't exist ('add' or 'doNothing')
  772. separators: Valid separator characters (default ':')
  773. Returns:
  774. The processed message as bytes
  775. Examples:
  776. >>> msg = b"Subject\\n\\nBody text\\n"
  777. >>> interpret_trailers(msg, trailers=[("Signed-off-by", "Alice <alice@example.com>")])
  778. b'Subject\\n\\nBody text\\n\\nSigned-off-by: Alice <alice@example.com>\\n'
  779. >>> msg = b"Subject\\n\\nSigned-off-by: Alice\\n"
  780. >>> interpret_trailers(msg, only_trailers=True)
  781. b'Signed-off-by: Alice\\n'
  782. """
  783. # Handle --parse shorthand
  784. if parse:
  785. only_trailers = True
  786. only_input = True
  787. unfold = True
  788. # Convert message to bytes
  789. if isinstance(message, str):
  790. message_bytes = message.encode("utf-8")
  791. else:
  792. message_bytes = message
  793. # Parse existing trailers
  794. _message_body, parsed_trailers = parse_trailers(message_bytes, separators)
  795. # Apply unfold if requested
  796. if unfold:
  797. for trailer in parsed_trailers:
  798. # Replace newlines and multiple spaces with single space
  799. trailer.value = " ".join(trailer.value.split())
  800. # Apply trim_empty if requested
  801. if trim_empty:
  802. parsed_trailers = [t for t in parsed_trailers if t.value.strip()]
  803. # Add new trailers if requested and not only_input
  804. if not only_input and trailers:
  805. for key, value in trailers:
  806. message_bytes = add_trailer_to_message(
  807. message_bytes,
  808. key,
  809. value,
  810. separators[0], # Use first separator as default
  811. where=where,
  812. if_exists=if_exists,
  813. if_missing=if_missing,
  814. )
  815. # Re-parse to get updated trailers for output
  816. if only_trailers:
  817. _message_body, parsed_trailers = parse_trailers(message_bytes, separators)
  818. # Return based on only_trailers flag
  819. if only_trailers:
  820. return format_trailers(parsed_trailers)
  821. else:
  822. return message_bytes
  823. def init(
  824. path: Union[str, os.PathLike[str]] = ".",
  825. *,
  826. bare: bool = False,
  827. symlinks: Optional[bool] = None,
  828. ) -> Repo:
  829. """Create a new git repository.
  830. Args:
  831. path: Path to repository.
  832. bare: Whether to create a bare repository.
  833. symlinks: Whether to create actual symlinks (defaults to autodetect)
  834. Returns: A Repo instance
  835. """
  836. if not os.path.exists(path):
  837. os.mkdir(path)
  838. if bare:
  839. return Repo.init_bare(path)
  840. else:
  841. return Repo.init(path, symlinks=symlinks)
  842. def clone(
  843. source: Union[str, bytes, Repo],
  844. target: Optional[Union[str, os.PathLike[str]]] = None,
  845. bare: bool = False,
  846. checkout: Optional[bool] = None,
  847. errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
  848. outstream: Optional[BinaryIO] = None,
  849. origin: Optional[str] = "origin",
  850. depth: Optional[int] = None,
  851. branch: Optional[Union[str, bytes]] = None,
  852. config: Optional[Config] = None,
  853. filter_spec: Optional[str] = None,
  854. protocol_version: Optional[int] = None,
  855. recurse_submodules: bool = False,
  856. **kwargs: Union[Union[str, bytes], Sequence[Union[str, bytes]]],
  857. ) -> Repo:
  858. """Clone a local or remote git repository.
  859. Args:
  860. source: Path or URL for source repository
  861. target: Path to target repository (optional)
  862. bare: Whether or not to create a bare repository
  863. checkout: Whether or not to check-out HEAD after cloning
  864. errstream: Optional stream to write progress to
  865. outstream: Optional stream to write progress to (deprecated)
  866. origin: Name of remote from the repository used to clone
  867. depth: Depth to fetch at
  868. branch: Optional branch or tag to be used as HEAD in the new repository
  869. instead of the cloned repository's HEAD.
  870. config: Configuration to use
  871. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  872. Only used if the server supports the Git protocol-v2 'filter'
  873. feature, and ignored otherwise.
  874. protocol_version: desired Git protocol version. By default the highest
  875. mutually supported protocol version will be used.
  876. recurse_submodules: Whether to initialize and clone submodules
  877. **kwargs: Additional keyword arguments including refspecs to fetch.
  878. Can be a bytestring, a string, or a list of bytestring/string.
  879. Returns: The new repository
  880. """
  881. if outstream is not None:
  882. import warnings
  883. warnings.warn(
  884. "outstream= has been deprecated in favour of errstream=.",
  885. DeprecationWarning,
  886. stacklevel=3,
  887. )
  888. # TODO(jelmer): Capture logging output and stream to errstream
  889. if config is None:
  890. config = StackedConfig.default()
  891. if checkout is None:
  892. checkout = not bare
  893. if checkout and bare:
  894. raise Error("checkout and bare are incompatible")
  895. if target is None:
  896. if isinstance(source, Repo):
  897. raise ValueError("target must be specified when cloning from a Repo object")
  898. elif isinstance(source, bytes):
  899. target = source.split(b"/")[-1].decode()
  900. else:
  901. target = source.split("/")[-1]
  902. if isinstance(branch, str):
  903. branch = branch.encode(DEFAULT_ENCODING)
  904. mkdir = not os.path.exists(target)
  905. if isinstance(source, Repo):
  906. # For direct repo cloning, use LocalGitClient
  907. from .client import GitClient, LocalGitClient
  908. client: GitClient = LocalGitClient(config=config)
  909. path = source.path
  910. else:
  911. source_str = source.decode() if isinstance(source, bytes) else source
  912. (client, path) = get_transport_and_path(source_str, config=config, **kwargs) # type: ignore[arg-type]
  913. filter_spec_bytes: Optional[bytes] = None
  914. if filter_spec:
  915. filter_spec_bytes = filter_spec.encode("ascii")
  916. repo = client.clone(
  917. path,
  918. str(target), # Convert PathLike to str
  919. mkdir=mkdir,
  920. bare=bare,
  921. origin=origin,
  922. checkout=checkout,
  923. branch=branch.decode() if branch else None, # Convert bytes to str
  924. progress=lambda data: (errstream.write(data), None)[1],
  925. depth=depth,
  926. filter_spec=filter_spec_bytes,
  927. protocol_version=protocol_version,
  928. )
  929. # Initialize and update submodules if requested
  930. if recurse_submodules and not bare:
  931. try:
  932. submodule_init(repo)
  933. submodule_update(repo, init=True, recursive=True)
  934. except FileNotFoundError as e:
  935. # .gitmodules file doesn't exist - no submodules to process
  936. logging.debug("No .gitmodules file found: %s", e)
  937. except KeyError as e:
  938. # Submodule configuration missing
  939. logging.warning("Submodule configuration error: %s", e)
  940. if errstream:
  941. errstream.write(
  942. f"Warning: Submodule configuration error: {e}\n".encode()
  943. )
  944. return repo
  945. def add(
  946. repo: Union[str, os.PathLike[str], Repo] = ".",
  947. paths: Optional[
  948. Union[
  949. Sequence[Union[str, bytes, os.PathLike[str]]], str, bytes, os.PathLike[str]
  950. ]
  951. ] = None,
  952. ) -> tuple[list[str], set[str]]:
  953. """Add files to the staging area.
  954. Args:
  955. repo: Repository for the files
  956. paths: Paths to add. If None, stages all untracked and modified files from the
  957. current working directory (mimicking 'git add .' behavior).
  958. Returns: Tuple with set of added files and ignored files
  959. If the repository contains ignored directories, the returned set will
  960. contain the path to an ignored directory (with trailing slash). Individual
  961. files within ignored directories will not be returned.
  962. Note: When paths=None, this function adds all untracked and modified files
  963. from the entire repository, mimicking 'git add -A' behavior.
  964. """
  965. ignored = set()
  966. with open_repo_closing(repo) as r:
  967. repo_path = Path(r.path).resolve()
  968. ignore_manager = IgnoreFilterManager.from_repo(r)
  969. # Get unstaged changes once for the entire operation
  970. index = r.open_index()
  971. normalizer = r.get_blob_normalizer()
  972. if normalizer is not None:
  973. def filter_callback(data: bytes, path: bytes) -> bytes:
  974. from dulwich.objects import Blob
  975. blob = Blob()
  976. blob.data = data
  977. normalized_blob = normalizer.checkin_normalize(blob, path)
  978. data_bytes: bytes = normalized_blob.data
  979. return data_bytes
  980. else:
  981. filter_callback = None
  982. # Check if core.preloadIndex is enabled
  983. config = r.get_config_stack()
  984. preload_index = config.get_boolean(b"core", b"preloadIndex", False)
  985. all_unstaged_paths = list(
  986. get_unstaged_changes(index, r.path, filter_callback, preload_index)
  987. )
  988. if not paths:
  989. # When no paths specified, add all untracked and modified files from repo root
  990. paths = [str(repo_path)]
  991. relpaths = []
  992. if isinstance(paths, (str, bytes, os.PathLike)):
  993. paths = [paths]
  994. for p in paths:
  995. # Handle bytes paths by decoding them
  996. if isinstance(p, bytes):
  997. p = p.decode("utf-8")
  998. path = Path(p)
  999. if not path.is_absolute():
  1000. # Make relative paths relative to the repo directory
  1001. path = repo_path / path
  1002. # Don't resolve symlinks completely - only resolve the parent directory
  1003. # to avoid issues when symlinks point outside the repository
  1004. if path.is_symlink():
  1005. # For symlinks, resolve only the parent directory
  1006. parent_resolved = path.parent.resolve()
  1007. resolved_path = parent_resolved / path.name
  1008. else:
  1009. # For regular files/dirs, resolve normally
  1010. resolved_path = path.resolve()
  1011. try:
  1012. relpath = str(resolved_path.relative_to(repo_path)).replace(os.sep, "/")
  1013. except ValueError as e:
  1014. # Path is not within the repository
  1015. p_str = p.decode() if isinstance(p, bytes) else str(p)
  1016. raise ValueError(
  1017. f"Path {p_str} is not within repository {repo_path}"
  1018. ) from e
  1019. # Handle directories by scanning their contents
  1020. if resolved_path.is_dir():
  1021. # Check if the directory itself is ignored
  1022. dir_relpath = posixpath.join(relpath, "") if relpath != "." else ""
  1023. if dir_relpath and ignore_manager.is_ignored(dir_relpath):
  1024. ignored.add(dir_relpath)
  1025. continue
  1026. # When adding a directory, add all untracked files within it
  1027. current_untracked = list(
  1028. get_untracked_paths(
  1029. str(resolved_path),
  1030. str(repo_path),
  1031. index,
  1032. )
  1033. )
  1034. for untracked_path in current_untracked:
  1035. # If we're scanning a subdirectory, adjust the path
  1036. if relpath != ".":
  1037. untracked_path = posixpath.join(relpath, untracked_path)
  1038. if not ignore_manager.is_ignored(untracked_path):
  1039. relpaths.append(untracked_path)
  1040. else:
  1041. ignored.add(untracked_path)
  1042. # Also add unstaged (modified) files within this directory
  1043. for unstaged_path in all_unstaged_paths:
  1044. if isinstance(unstaged_path, bytes):
  1045. unstaged_path_str = unstaged_path.decode("utf-8")
  1046. else:
  1047. unstaged_path_str = unstaged_path
  1048. # Check if this unstaged file is within the directory we're processing
  1049. unstaged_full_path = repo_path / unstaged_path_str
  1050. try:
  1051. unstaged_full_path.relative_to(resolved_path)
  1052. # File is within this directory, add it
  1053. if not ignore_manager.is_ignored(unstaged_path_str):
  1054. relpaths.append(unstaged_path_str)
  1055. else:
  1056. ignored.add(unstaged_path_str)
  1057. except ValueError:
  1058. # File is not within this directory, skip it
  1059. continue
  1060. continue
  1061. # FIXME: Support patterns
  1062. if ignore_manager.is_ignored(relpath):
  1063. ignored.add(relpath)
  1064. continue
  1065. relpaths.append(relpath)
  1066. r.get_worktree().stage(relpaths)
  1067. return (relpaths, ignored)
  1068. def _is_subdir(
  1069. subdir: Union[str, os.PathLike[str]], parentdir: Union[str, os.PathLike[str]]
  1070. ) -> bool:
  1071. """Check whether subdir is parentdir or a subdir of parentdir.
  1072. If parentdir or subdir is a relative path, it will be disamgibuated
  1073. relative to the pwd.
  1074. """
  1075. parentdir_abs = os.path.realpath(parentdir) + os.path.sep
  1076. subdir_abs = os.path.realpath(subdir) + os.path.sep
  1077. return subdir_abs.startswith(parentdir_abs)
  1078. # TODO: option to remove ignored files also, in line with `git clean -fdx`
  1079. def clean(
  1080. repo: Union[str, os.PathLike[str], Repo] = ".",
  1081. target_dir: Optional[Union[str, os.PathLike[str]]] = None,
  1082. ) -> None:
  1083. """Remove any untracked files from the target directory recursively.
  1084. Equivalent to running ``git clean -fd`` in target_dir.
  1085. Args:
  1086. repo: Repository where the files may be tracked
  1087. target_dir: Directory to clean - current directory if None
  1088. """
  1089. if target_dir is None:
  1090. target_dir = os.getcwd()
  1091. with open_repo_closing(repo) as r:
  1092. if not _is_subdir(target_dir, r.path):
  1093. raise Error("target_dir must be in the repo's working dir")
  1094. config = r.get_config_stack()
  1095. config.get_boolean((b"clean",), b"requireForce", True)
  1096. # TODO(jelmer): if require_force is set, then make sure that -f, -i or
  1097. # -n is specified.
  1098. index = r.open_index()
  1099. ignore_manager = IgnoreFilterManager.from_repo(r)
  1100. paths_in_wd = _walk_working_dir_paths(target_dir, r.path)
  1101. # Reverse file visit order, so that files and subdirectories are
  1102. # removed before containing directory
  1103. for ap, is_dir in reversed(list(paths_in_wd)):
  1104. # target_dir and r.path are both str, so ap must be str
  1105. assert isinstance(ap, str)
  1106. if is_dir:
  1107. # All subdirectories and files have been removed if untracked,
  1108. # so dir contains no tracked files iff it is empty.
  1109. is_empty = len(os.listdir(ap)) == 0
  1110. if is_empty:
  1111. os.rmdir(ap)
  1112. else:
  1113. ip = path_to_tree_path(r.path, ap)
  1114. is_tracked = ip in index
  1115. rp = os.path.relpath(ap, r.path)
  1116. is_ignored = ignore_manager.is_ignored(rp)
  1117. if not is_tracked and not is_ignored:
  1118. os.remove(ap)
  1119. def remove(
  1120. repo: Union[str, os.PathLike[str], Repo] = ".",
  1121. paths: Sequence[Union[str, bytes, os.PathLike[str]]] = [],
  1122. cached: bool = False,
  1123. ) -> None:
  1124. """Remove files from the staging area.
  1125. Args:
  1126. repo: Repository for the files
  1127. paths: Paths to remove. Can be absolute or relative to the repository root.
  1128. cached: Only remove from index, not from working directory
  1129. """
  1130. with open_repo_closing(repo) as r:
  1131. index = r.open_index()
  1132. blob_normalizer = r.get_blob_normalizer()
  1133. for p in paths:
  1134. # If path is absolute, use it as-is. Otherwise, treat it as relative to repo
  1135. p_str = os.fsdecode(p) if isinstance(p, bytes) else str(p)
  1136. if os.path.isabs(p_str):
  1137. full_path = p_str
  1138. else:
  1139. # Treat relative paths as relative to the repository root
  1140. full_path = os.path.join(r.path, p_str)
  1141. tree_path = path_to_tree_path(r.path, full_path)
  1142. # Convert to bytes for file operations
  1143. full_path_bytes = os.fsencode(full_path)
  1144. try:
  1145. entry = index[tree_path]
  1146. if isinstance(entry, ConflictedIndexEntry):
  1147. raise Error(f"{p_str} has conflicts in the index")
  1148. index_sha = entry.sha
  1149. except KeyError as exc:
  1150. raise Error(f"{p_str} did not match any files") from exc
  1151. if not cached:
  1152. try:
  1153. st = os.lstat(full_path_bytes)
  1154. except OSError:
  1155. pass
  1156. else:
  1157. try:
  1158. blob = blob_from_path_and_stat(full_path_bytes, st)
  1159. # Apply checkin normalization to compare apples to apples
  1160. if blob_normalizer is not None:
  1161. blob = blob_normalizer.checkin_normalize(blob, tree_path)
  1162. except OSError:
  1163. pass
  1164. else:
  1165. try:
  1166. head_commit = r[r.head()]
  1167. assert isinstance(head_commit, Commit)
  1168. committed_sha = tree_lookup_path(
  1169. r.__getitem__, head_commit.tree, tree_path
  1170. )[1]
  1171. except KeyError:
  1172. committed_sha = None
  1173. if blob.id != index_sha and index_sha != committed_sha:
  1174. raise Error(
  1175. "file has staged content differing "
  1176. f"from both the file and head: {p_str}"
  1177. )
  1178. if index_sha != committed_sha:
  1179. raise Error(f"file has staged changes: {p_str}")
  1180. os.remove(full_path_bytes)
  1181. del index[tree_path]
  1182. index.write()
  1183. rm = remove
  1184. def mv(
  1185. repo: Union[str, os.PathLike[str], Repo],
  1186. source: Union[str, bytes, os.PathLike[str]],
  1187. destination: Union[str, bytes, os.PathLike[str]],
  1188. force: bool = False,
  1189. ) -> None:
  1190. """Move or rename a file, directory, or symlink.
  1191. Args:
  1192. repo: Path to the repository
  1193. source: Path to move from
  1194. destination: Path to move to
  1195. force: Force move even if destination exists
  1196. Raises:
  1197. Error: If source doesn't exist, is not tracked, or destination already exists (without force)
  1198. """
  1199. with open_repo_closing(repo) as r:
  1200. index = r.open_index()
  1201. # Handle paths - convert to string if necessary
  1202. if isinstance(source, bytes):
  1203. source = source.decode(sys.getfilesystemencoding())
  1204. elif hasattr(source, "__fspath__"):
  1205. source = os.fspath(source)
  1206. else:
  1207. source = str(source)
  1208. if isinstance(destination, bytes):
  1209. destination = destination.decode(sys.getfilesystemencoding())
  1210. elif hasattr(destination, "__fspath__"):
  1211. destination = os.fspath(destination)
  1212. else:
  1213. destination = str(destination)
  1214. # Get full paths
  1215. if os.path.isabs(source):
  1216. source_full_path = source
  1217. else:
  1218. # Treat relative paths as relative to the repository root
  1219. source_full_path = os.path.join(r.path, source)
  1220. if os.path.isabs(destination):
  1221. destination_full_path = destination
  1222. else:
  1223. # Treat relative paths as relative to the repository root
  1224. destination_full_path = os.path.join(r.path, destination)
  1225. # Check if destination is a directory
  1226. if os.path.isdir(destination_full_path):
  1227. # Move source into destination directory
  1228. basename = os.path.basename(source_full_path)
  1229. destination_full_path = os.path.join(destination_full_path, basename)
  1230. # Convert to tree paths for index
  1231. source_tree_path = path_to_tree_path(r.path, source_full_path)
  1232. destination_tree_path = path_to_tree_path(r.path, destination_full_path)
  1233. # Check if source exists in index
  1234. if source_tree_path not in index:
  1235. raise Error(f"source '{source}' is not under version control")
  1236. # Check if source exists in filesystem
  1237. if not os.path.exists(source_full_path):
  1238. raise Error(f"source '{source}' does not exist")
  1239. # Check if destination already exists
  1240. if os.path.exists(destination_full_path) and not force:
  1241. raise Error(f"destination '{destination}' already exists (use -f to force)")
  1242. # Check if destination is already in index
  1243. if destination_tree_path in index and not force:
  1244. raise Error(
  1245. f"destination '{destination}' already exists in index (use -f to force)"
  1246. )
  1247. # Get the index entry for the source
  1248. source_entry = index[source_tree_path]
  1249. # Convert to bytes for file operations
  1250. source_full_path_bytes = os.fsencode(source_full_path)
  1251. destination_full_path_bytes = os.fsencode(destination_full_path)
  1252. # Create parent directory for destination if needed
  1253. dest_dir = os.path.dirname(destination_full_path_bytes)
  1254. if dest_dir and not os.path.exists(dest_dir):
  1255. os.makedirs(dest_dir)
  1256. # Move the file in the filesystem
  1257. if os.path.exists(destination_full_path_bytes) and force:
  1258. os.remove(destination_full_path_bytes)
  1259. os.rename(source_full_path_bytes, destination_full_path_bytes)
  1260. # Update the index
  1261. del index[source_tree_path]
  1262. index[destination_tree_path] = source_entry
  1263. index.write()
  1264. move = mv
  1265. def commit_decode(
  1266. commit: Commit, contents: bytes, default_encoding: str = DEFAULT_ENCODING
  1267. ) -> str:
  1268. """Decode commit contents using the commit's encoding or default."""
  1269. if commit.encoding:
  1270. encoding = commit.encoding.decode("ascii")
  1271. else:
  1272. encoding = default_encoding
  1273. return contents.decode(encoding, "replace")
  1274. def commit_encode(
  1275. commit: Commit, contents: str, default_encoding: str = DEFAULT_ENCODING
  1276. ) -> bytes:
  1277. """Encode commit contents using the commit's encoding or default."""
  1278. if commit.encoding:
  1279. encoding = commit.encoding.decode("ascii")
  1280. else:
  1281. encoding = default_encoding
  1282. return contents.encode(encoding)
  1283. def print_commit(
  1284. commit: Commit,
  1285. decode: Callable[[bytes], str],
  1286. outstream: TextIO = sys.stdout,
  1287. ) -> None:
  1288. """Write a human-readable commit log entry.
  1289. Args:
  1290. commit: A `Commit` object
  1291. decode: Function to decode commit data
  1292. outstream: A stream file to write to
  1293. """
  1294. outstream.write("-" * 50 + "\n")
  1295. outstream.write("commit: " + commit.id.decode("ascii") + "\n")
  1296. if len(commit.parents) > 1:
  1297. outstream.write(
  1298. "merge: "
  1299. + "...".join([c.decode("ascii") for c in commit.parents[1:]])
  1300. + "\n"
  1301. )
  1302. outstream.write("Author: " + decode(commit.author) + "\n")
  1303. if commit.author != commit.committer:
  1304. outstream.write("Committer: " + decode(commit.committer) + "\n")
  1305. time_tuple = time.gmtime(commit.author_time + commit.author_timezone)
  1306. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  1307. timezone_str = format_timezone(commit.author_timezone).decode("ascii")
  1308. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  1309. if commit.message:
  1310. outstream.write("\n")
  1311. outstream.write(decode(commit.message) + "\n")
  1312. outstream.write("\n")
  1313. def print_tag(
  1314. tag: Tag, decode: Callable[[bytes], str], outstream: TextIO = sys.stdout
  1315. ) -> None:
  1316. """Write a human-readable tag.
  1317. Args:
  1318. tag: A `Tag` object
  1319. decode: Function for decoding bytes to unicode string
  1320. outstream: A stream to write to
  1321. """
  1322. outstream.write("Tagger: " + decode(tag.tagger) + "\n")
  1323. time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone)
  1324. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  1325. timezone_str = format_timezone(tag.tag_timezone).decode("ascii")
  1326. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  1327. outstream.write("\n")
  1328. outstream.write(decode(tag.message))
  1329. outstream.write("\n")
  1330. def show_blob(
  1331. repo: RepoPath,
  1332. blob: Blob,
  1333. decode: Callable[[bytes], str],
  1334. outstream: TextIO = sys.stdout,
  1335. ) -> None:
  1336. """Write a blob to a stream.
  1337. Args:
  1338. repo: A `Repo` object
  1339. blob: A `Blob` object
  1340. decode: Function for decoding bytes to unicode string
  1341. outstream: A stream file to write to
  1342. """
  1343. outstream.write(decode(blob.data))
  1344. def show_commit(
  1345. repo: RepoPath,
  1346. commit: Commit,
  1347. decode: Callable[[bytes], str],
  1348. outstream: TextIO = sys.stdout,
  1349. ) -> None:
  1350. """Show a commit to a stream.
  1351. Args:
  1352. repo: A `Repo` object
  1353. commit: A `Commit` object
  1354. decode: Function for decoding bytes to unicode string
  1355. outstream: Stream to write to
  1356. """
  1357. from .diff import ColorizedDiffStream
  1358. # Create a wrapper for ColorizedDiffStream to handle string/bytes conversion
  1359. class _StreamWrapper:
  1360. def __init__(self, stream: "ColorizedDiffStream") -> None:
  1361. self.stream = stream
  1362. def write(self, data: Union[str, bytes]) -> None:
  1363. if isinstance(data, str):
  1364. # Convert string to bytes for ColorizedDiffStream
  1365. self.stream.write(data.encode("utf-8"))
  1366. else:
  1367. self.stream.write(data)
  1368. with open_repo_closing(repo) as r:
  1369. # Use wrapper for ColorizedDiffStream, direct stream for others
  1370. if isinstance(outstream, ColorizedDiffStream):
  1371. wrapped_stream = _StreamWrapper(outstream)
  1372. print_commit(commit, decode=decode, outstream=wrapped_stream)
  1373. # Write diff directly to the ColorizedDiffStream as bytes
  1374. write_tree_diff(
  1375. outstream,
  1376. r.object_store,
  1377. commit.parents[0] if commit.parents else None,
  1378. commit.tree,
  1379. )
  1380. else:
  1381. print_commit(commit, decode=decode, outstream=outstream)
  1382. if commit.parents:
  1383. parent_commit = r[commit.parents[0]]
  1384. assert isinstance(parent_commit, Commit)
  1385. base_tree = parent_commit.tree
  1386. else:
  1387. base_tree = None
  1388. # Traditional path: buffer diff and write as decoded text
  1389. diffstream = BytesIO()
  1390. write_tree_diff(diffstream, r.object_store, base_tree, commit.tree)
  1391. diffstream.seek(0)
  1392. outstream.write(commit_decode(commit, diffstream.getvalue()))
  1393. def show_tree(
  1394. repo: RepoPath,
  1395. tree: Tree,
  1396. decode: Callable[[bytes], str],
  1397. outstream: TextIO = sys.stdout,
  1398. ) -> None:
  1399. """Print a tree to a stream.
  1400. Args:
  1401. repo: A `Repo` object
  1402. tree: A `Tree` object
  1403. decode: Function for decoding bytes to unicode string
  1404. outstream: Stream to write to
  1405. """
  1406. for n in tree:
  1407. outstream.write(decode(n) + "\n")
  1408. def show_tag(
  1409. repo: RepoPath,
  1410. tag: Tag,
  1411. decode: Callable[[bytes], str],
  1412. outstream: TextIO = sys.stdout,
  1413. ) -> None:
  1414. """Print a tag to a stream.
  1415. Args:
  1416. repo: A `Repo` object
  1417. tag: A `Tag` object
  1418. decode: Function for decoding bytes to unicode string
  1419. outstream: Stream to write to
  1420. """
  1421. with open_repo_closing(repo) as r:
  1422. print_tag(tag, decode, outstream)
  1423. obj = r[tag.object[1]]
  1424. assert isinstance(obj, (Tree, Blob, Commit, Tag))
  1425. show_object(repo, obj, decode, outstream)
  1426. def show_object(
  1427. repo: RepoPath,
  1428. obj: Union[Tree, Blob, Commit, Tag],
  1429. decode: Callable[[bytes], str],
  1430. outstream: TextIO,
  1431. ) -> None:
  1432. """Show details of a git object."""
  1433. handlers: dict[bytes, Callable[[RepoPath, Any, Any, TextIO], None]] = {
  1434. b"tree": show_tree,
  1435. b"blob": show_blob,
  1436. b"commit": show_commit,
  1437. b"tag": show_tag,
  1438. }
  1439. handler = handlers.get(obj.type_name)
  1440. if handler is None:
  1441. raise ValueError(f"Unknown object type: {obj.type_name.decode()}")
  1442. handler(repo, obj, decode, outstream)
  1443. def print_name_status(changes: Iterator[TreeChange]) -> Iterator[str]:
  1444. """Print a simple status summary, listing changed files."""
  1445. for change in changes:
  1446. if not change:
  1447. continue
  1448. if isinstance(change, list):
  1449. change = change[0]
  1450. if change.type == CHANGE_ADD:
  1451. assert change.new is not None
  1452. path1 = change.new.path
  1453. assert path1 is not None
  1454. path2 = b""
  1455. kind = "A"
  1456. elif change.type == CHANGE_DELETE:
  1457. assert change.old is not None
  1458. path1 = change.old.path
  1459. assert path1 is not None
  1460. path2 = b""
  1461. kind = "D"
  1462. elif change.type == CHANGE_MODIFY:
  1463. assert change.new is not None
  1464. path1 = change.new.path
  1465. assert path1 is not None
  1466. path2 = b""
  1467. kind = "M"
  1468. elif change.type in RENAME_CHANGE_TYPES:
  1469. assert change.old is not None and change.new is not None
  1470. path1 = change.old.path
  1471. assert path1 is not None
  1472. path2_opt = change.new.path
  1473. assert path2_opt is not None
  1474. path2 = path2_opt
  1475. if change.type == CHANGE_RENAME:
  1476. kind = "R"
  1477. elif change.type == CHANGE_COPY:
  1478. kind = "C"
  1479. path1_str = (
  1480. path1.decode("utf-8", errors="replace")
  1481. if isinstance(path1, bytes)
  1482. else path1
  1483. )
  1484. path2_str = (
  1485. path2.decode("utf-8", errors="replace")
  1486. if isinstance(path2, bytes)
  1487. else path2
  1488. )
  1489. yield f"{kind:<8}{path1_str:<20}{path2_str:<20}"
  1490. def log(
  1491. repo: RepoPath = ".",
  1492. paths: Optional[Sequence[Union[str, bytes]]] = None,
  1493. outstream: TextIO = sys.stdout,
  1494. max_entries: Optional[int] = None,
  1495. reverse: bool = False,
  1496. name_status: bool = False,
  1497. ) -> None:
  1498. """Write commit logs.
  1499. Args:
  1500. repo: Path to repository
  1501. paths: Optional set of specific paths to print entries for
  1502. outstream: Stream to write log output to
  1503. reverse: Reverse order in which entries are printed
  1504. name_status: Print name status
  1505. max_entries: Optional maximum number of entries to display
  1506. """
  1507. with open_repo_closing(repo) as r:
  1508. try:
  1509. include = [r.head()]
  1510. except KeyError:
  1511. include = []
  1512. # Convert paths to bytes if needed
  1513. paths_bytes = None
  1514. if paths:
  1515. paths_bytes = [p.encode() if isinstance(p, str) else p for p in paths]
  1516. walker = r.get_walker(
  1517. include=include, max_entries=max_entries, paths=paths_bytes, reverse=reverse
  1518. )
  1519. for entry in walker:
  1520. def decode_wrapper(x: bytes) -> str:
  1521. return commit_decode(entry.commit, x)
  1522. print_commit(entry.commit, decode_wrapper, outstream)
  1523. if name_status:
  1524. outstream.writelines(
  1525. [
  1526. line + "\n"
  1527. for line in print_name_status(
  1528. cast(Iterator[TreeChange], entry.changes())
  1529. )
  1530. ]
  1531. )
  1532. # TODO(jelmer): better default for encoding?
  1533. def show(
  1534. repo: RepoPath = ".",
  1535. objects: Optional[Sequence[Union[str, bytes]]] = None,
  1536. outstream: TextIO = sys.stdout,
  1537. default_encoding: str = DEFAULT_ENCODING,
  1538. ) -> None:
  1539. """Print the changes in a commit.
  1540. Args:
  1541. repo: Path to repository
  1542. objects: Objects to show (defaults to [HEAD])
  1543. outstream: Stream to write to
  1544. default_encoding: Default encoding to use if none is set in the
  1545. commit
  1546. """
  1547. if objects is None:
  1548. objects = ["HEAD"]
  1549. if isinstance(objects, (str, bytes)):
  1550. objects = [objects]
  1551. with open_repo_closing(repo) as r:
  1552. for objectish in objects:
  1553. o = parse_object(r, objectish)
  1554. if isinstance(o, Commit):
  1555. def decode(x: bytes) -> str:
  1556. return commit_decode(o, x, default_encoding)
  1557. else:
  1558. def decode(x: bytes) -> str:
  1559. return x.decode(default_encoding)
  1560. assert isinstance(o, (Tree, Blob, Commit, Tag))
  1561. show_object(r, o, decode, outstream)
  1562. def diff_tree(
  1563. repo: RepoPath,
  1564. old_tree: Union[str, bytes, Tree],
  1565. new_tree: Union[str, bytes, Tree],
  1566. outstream: BinaryIO = default_bytes_out_stream,
  1567. ) -> None:
  1568. """Compares the content and mode of blobs found via two tree objects.
  1569. Args:
  1570. repo: Path to repository
  1571. old_tree: Id of old tree
  1572. new_tree: Id of new tree
  1573. outstream: Stream to write to
  1574. """
  1575. with open_repo_closing(repo) as r:
  1576. if isinstance(old_tree, Tree):
  1577. old_tree_id: Optional[bytes] = old_tree.id
  1578. elif isinstance(old_tree, str):
  1579. old_tree_id = old_tree.encode()
  1580. else:
  1581. old_tree_id = old_tree
  1582. if isinstance(new_tree, Tree):
  1583. new_tree_id: Optional[bytes] = new_tree.id
  1584. elif isinstance(new_tree, str):
  1585. new_tree_id = new_tree.encode()
  1586. else:
  1587. new_tree_id = new_tree
  1588. write_tree_diff(outstream, r.object_store, old_tree_id, new_tree_id)
  1589. def diff(
  1590. repo: RepoPath = ".",
  1591. commit: Optional[Union[str, bytes, Commit]] = None,
  1592. commit2: Optional[Union[str, bytes, Commit]] = None,
  1593. staged: bool = False,
  1594. paths: Optional[Sequence[Union[str, bytes]]] = None,
  1595. outstream: BinaryIO = default_bytes_out_stream,
  1596. diff_algorithm: Optional[str] = None,
  1597. ) -> None:
  1598. """Show diff.
  1599. Args:
  1600. repo: Path to repository
  1601. commit: First commit to compare. If staged is True, compare
  1602. index to this commit. If staged is False, compare working tree
  1603. to this commit. If None, defaults to HEAD for staged and index
  1604. for unstaged.
  1605. commit2: Second commit to compare against first commit. If provided,
  1606. show diff between commit and commit2 (ignoring staged flag).
  1607. staged: If True, show staged changes (index vs commit).
  1608. If False, show unstaged changes (working tree vs commit/index).
  1609. Ignored if commit2 is provided.
  1610. paths: Optional list of paths to limit diff
  1611. outstream: Stream to write to
  1612. diff_algorithm: Algorithm to use for diffing ("myers" or "patience"),
  1613. defaults to the underlying function's default if None
  1614. """
  1615. from . import diff as diff_module
  1616. with open_repo_closing(repo) as r:
  1617. # Normalize paths to bytes
  1618. byte_paths: Optional[list[bytes]] = None
  1619. if paths is not None and paths: # Check if paths is not empty
  1620. byte_paths = []
  1621. for p in paths:
  1622. if isinstance(p, str):
  1623. byte_paths.append(p.encode("utf-8"))
  1624. else:
  1625. byte_paths.append(p)
  1626. elif paths == []: # Convert empty list to None
  1627. byte_paths = None
  1628. else:
  1629. byte_paths = None
  1630. # Resolve commit refs to SHAs if provided
  1631. if commit is not None:
  1632. if isinstance(commit, Commit):
  1633. # Already a Commit object
  1634. commit_sha = commit.id
  1635. commit_obj = commit
  1636. else:
  1637. # parse_commit handles both refs and SHAs, and always returns a Commit object
  1638. commit_obj = parse_commit(r, commit)
  1639. commit_sha = commit_obj.id
  1640. else:
  1641. commit_sha = None
  1642. commit_obj = None
  1643. if commit2 is not None:
  1644. # Compare two commits
  1645. if isinstance(commit2, Commit):
  1646. commit2_obj = commit2
  1647. else:
  1648. commit2_obj = parse_commit(r, commit2)
  1649. # Get trees from commits
  1650. old_tree = commit_obj.tree if commit_obj else None
  1651. new_tree = commit2_obj.tree
  1652. # Use tree_changes to get the changes and apply path filtering
  1653. changes = r.object_store.tree_changes(old_tree, new_tree)
  1654. for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in changes:
  1655. # Skip if paths are specified and this change doesn't match
  1656. if byte_paths:
  1657. path_to_check = newpath or oldpath
  1658. assert path_to_check is not None
  1659. if not any(
  1660. path_to_check == p or path_to_check.startswith(p + b"/")
  1661. for p in byte_paths
  1662. ):
  1663. continue
  1664. write_object_diff(
  1665. outstream,
  1666. r.object_store,
  1667. (oldpath, oldmode, oldsha),
  1668. (newpath, newmode, newsha),
  1669. diff_algorithm=diff_algorithm,
  1670. )
  1671. elif staged:
  1672. # Show staged changes (index vs commit)
  1673. diff_module.diff_index_to_tree(
  1674. r, outstream, commit_sha, byte_paths, diff_algorithm=diff_algorithm
  1675. )
  1676. elif commit is not None:
  1677. # Compare working tree to a specific commit
  1678. assert (
  1679. commit_sha is not None
  1680. ) # mypy: commit_sha is set when commit is not None
  1681. diff_module.diff_working_tree_to_tree(
  1682. r, outstream, commit_sha, byte_paths, diff_algorithm=diff_algorithm
  1683. )
  1684. else:
  1685. # Compare working tree to index
  1686. diff_module.diff_working_tree_to_index(
  1687. r, outstream, byte_paths, diff_algorithm=diff_algorithm
  1688. )
  1689. def rev_list(
  1690. repo: RepoPath,
  1691. commits: Sequence[Union[str, bytes]],
  1692. outstream: BinaryIO = default_bytes_out_stream,
  1693. ) -> None:
  1694. """Lists commit objects in reverse chronological order.
  1695. Args:
  1696. repo: Path to repository
  1697. commits: Commits over which to iterate
  1698. outstream: Stream to write to
  1699. """
  1700. with open_repo_closing(repo) as r:
  1701. for entry in r.get_walker(
  1702. include=[r[c if isinstance(c, bytes) else c.encode()].id for c in commits]
  1703. ):
  1704. outstream.write(entry.commit.id + b"\n")
  1705. def _canonical_part(url: str) -> str:
  1706. name = url.rsplit("/", 1)[-1]
  1707. if name.endswith(".git"):
  1708. name = name[:-4]
  1709. return name
  1710. def submodule_add(
  1711. repo: Union[str, os.PathLike[str], Repo],
  1712. url: str,
  1713. path: Optional[Union[str, os.PathLike[str]]] = None,
  1714. name: Optional[str] = None,
  1715. ) -> None:
  1716. """Add a new submodule.
  1717. Args:
  1718. repo: Path to repository
  1719. url: URL of repository to add as submodule
  1720. path: Path where submodule should live
  1721. name: Name for the submodule
  1722. """
  1723. with open_repo_closing(repo) as r:
  1724. if path is None:
  1725. path = os.path.relpath(_canonical_part(url), r.path)
  1726. if name is None:
  1727. name = os.fsdecode(path) if path is not None else None
  1728. if name is None:
  1729. raise Error("Submodule name must be specified or derivable from path")
  1730. # TODO(jelmer): Move this logic to dulwich.submodule
  1731. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1732. try:
  1733. config = ConfigFile.from_path(gitmodules_path)
  1734. except FileNotFoundError:
  1735. config = ConfigFile()
  1736. config.path = gitmodules_path
  1737. config.set(("submodule", name), "url", url)
  1738. config.set(("submodule", name), "path", os.fsdecode(path))
  1739. config.write_to_path()
  1740. def submodule_init(repo: Union[str, os.PathLike[str], Repo]) -> None:
  1741. """Initialize submodules.
  1742. Args:
  1743. repo: Path to repository
  1744. """
  1745. with open_repo_closing(repo) as r:
  1746. config = r.get_config()
  1747. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1748. for path, url, name in read_submodules(gitmodules_path):
  1749. config.set((b"submodule", name), b"active", True)
  1750. config.set((b"submodule", name), b"url", url)
  1751. config.write_to_path()
  1752. def submodule_list(repo: RepoPath) -> Iterator[tuple[str, str]]:
  1753. """List submodules.
  1754. Args:
  1755. repo: Path to repository
  1756. """
  1757. from .submodule import iter_cached_submodules
  1758. with open_repo_closing(repo) as r:
  1759. head_commit = r[r.head()]
  1760. assert isinstance(head_commit, Commit)
  1761. for path, sha in iter_cached_submodules(r.object_store, head_commit.tree):
  1762. yield path.decode(DEFAULT_ENCODING), sha.decode(DEFAULT_ENCODING)
  1763. def submodule_update(
  1764. repo: Union[str, os.PathLike[str], Repo],
  1765. paths: Optional[Sequence[Union[str, bytes, os.PathLike[str]]]] = None,
  1766. init: bool = False,
  1767. force: bool = False,
  1768. recursive: bool = False,
  1769. errstream: Optional[BinaryIO] = None,
  1770. ) -> None:
  1771. """Update submodules.
  1772. Args:
  1773. repo: Path to repository
  1774. paths: Optional list of specific submodule paths to update. If None, updates all.
  1775. init: If True, initialize submodules first
  1776. force: Force update even if local changes exist
  1777. recursive: If True, recursively update nested submodules
  1778. errstream: Error stream for error messages
  1779. """
  1780. from .submodule import iter_cached_submodules
  1781. with open_repo_closing(repo) as r:
  1782. if init:
  1783. submodule_init(r)
  1784. config = r.get_config()
  1785. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1786. # Get list of submodules to update
  1787. submodules_to_update = []
  1788. head_commit = r[r.head()]
  1789. assert isinstance(head_commit, Commit)
  1790. for path, sha in iter_cached_submodules(r.object_store, head_commit.tree):
  1791. path_str = (
  1792. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1793. )
  1794. if paths is None or path_str in paths:
  1795. submodules_to_update.append((path, sha))
  1796. # Read submodule configuration
  1797. for path, target_sha in submodules_to_update:
  1798. path_str = (
  1799. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1800. )
  1801. # Find the submodule name from .gitmodules
  1802. submodule_name: Optional[bytes] = None
  1803. for sm_path, sm_url, sm_name in read_submodules(gitmodules_path):
  1804. if sm_path == path:
  1805. submodule_name = sm_name
  1806. break
  1807. if not submodule_name:
  1808. continue
  1809. # Get the URL from config
  1810. section = (
  1811. b"submodule",
  1812. submodule_name
  1813. if isinstance(submodule_name, bytes)
  1814. else submodule_name.encode(),
  1815. )
  1816. try:
  1817. url_value = config.get(section, b"url")
  1818. if isinstance(url_value, bytes):
  1819. url = url_value.decode(DEFAULT_ENCODING)
  1820. else:
  1821. url = url_value
  1822. except KeyError:
  1823. # URL not in config, skip this submodule
  1824. continue
  1825. # Get or create the submodule repository paths
  1826. submodule_path = os.path.join(r.path, path_str)
  1827. submodule_git_dir = os.path.join(r.controldir(), "modules", path_str)
  1828. # Clone or fetch the submodule
  1829. if not os.path.exists(submodule_git_dir):
  1830. # Clone the submodule as bare repository
  1831. os.makedirs(os.path.dirname(submodule_git_dir), exist_ok=True)
  1832. # Clone to the git directory
  1833. sub_repo = clone(url, submodule_git_dir, bare=True, checkout=False)
  1834. sub_repo.close()
  1835. # Create the submodule directory if it doesn't exist
  1836. if not os.path.exists(submodule_path):
  1837. os.makedirs(submodule_path)
  1838. # Create .git file in the submodule directory
  1839. relative_git_dir = os.path.relpath(submodule_git_dir, submodule_path)
  1840. git_file_path = os.path.join(submodule_path, ".git")
  1841. with open(git_file_path, "w") as f:
  1842. f.write(f"gitdir: {relative_git_dir}\n")
  1843. # Set up working directory configuration
  1844. with open_repo_closing(submodule_git_dir) as sub_repo:
  1845. sub_config = sub_repo.get_config()
  1846. sub_config.set(
  1847. (b"core",),
  1848. b"worktree",
  1849. os.path.abspath(submodule_path).encode(),
  1850. )
  1851. sub_config.write_to_path()
  1852. # Checkout the target commit
  1853. sub_repo.refs[b"HEAD"] = target_sha
  1854. # Build the index and checkout files
  1855. tree = sub_repo[target_sha]
  1856. if hasattr(tree, "tree"): # If it's a commit, get the tree
  1857. tree_id = tree.tree
  1858. else:
  1859. tree_id = target_sha
  1860. build_index_from_tree(
  1861. submodule_path,
  1862. sub_repo.index_path(),
  1863. sub_repo.object_store,
  1864. tree_id,
  1865. )
  1866. else:
  1867. # Fetch and checkout in existing submodule
  1868. with open_repo_closing(submodule_git_dir) as sub_repo:
  1869. # Fetch from remote
  1870. client, path_segments = get_transport_and_path(url)
  1871. client.fetch(path_segments.encode(), sub_repo)
  1872. # Update to the target commit
  1873. sub_repo.refs[b"HEAD"] = target_sha
  1874. # Reset the working directory
  1875. reset(sub_repo, "hard", target_sha)
  1876. # Recursively update nested submodules if requested
  1877. if recursive:
  1878. submodule_gitmodules = os.path.join(submodule_path, ".gitmodules")
  1879. if os.path.exists(submodule_gitmodules):
  1880. submodule_update(
  1881. submodule_path,
  1882. paths=None,
  1883. init=True, # Always initialize nested submodules
  1884. force=force,
  1885. recursive=True,
  1886. errstream=errstream,
  1887. )
  1888. def tag_create(
  1889. repo: RepoPath,
  1890. tag: Union[str, bytes],
  1891. author: Optional[Union[str, bytes]] = None,
  1892. message: Optional[Union[str, bytes]] = None,
  1893. annotated: bool = False,
  1894. objectish: Union[str, bytes] = "HEAD",
  1895. tag_time: Optional[int] = None,
  1896. tag_timezone: Optional[int] = None,
  1897. sign: Optional[bool] = None,
  1898. encoding: str = DEFAULT_ENCODING,
  1899. ) -> None:
  1900. """Creates a tag in git via dulwich calls.
  1901. Args:
  1902. repo: Path to repository
  1903. tag: tag string
  1904. author: tag author (optional, if annotated is set)
  1905. message: tag message (optional)
  1906. annotated: whether to create an annotated tag
  1907. objectish: object the tag should point at, defaults to HEAD
  1908. tag_time: Optional time for annotated tag
  1909. tag_timezone: Optional timezone for annotated tag
  1910. sign: GPG Sign the tag (bool, defaults to False,
  1911. pass True to use default GPG key,
  1912. pass a str containing Key ID to use a specific GPG key)
  1913. encoding: Encoding to use for tag messages
  1914. """
  1915. with open_repo_closing(repo) as r:
  1916. object = parse_object(r, objectish)
  1917. if isinstance(tag, str):
  1918. tag = tag.encode(encoding)
  1919. if annotated:
  1920. # Create the tag object
  1921. tag_obj = Tag()
  1922. if author is None:
  1923. author = get_user_identity(r.get_config_stack())
  1924. elif isinstance(author, str):
  1925. author = author.encode(encoding)
  1926. else:
  1927. assert isinstance(author, bytes)
  1928. tag_obj.tagger = author
  1929. if isinstance(message, str):
  1930. message = message.encode(encoding)
  1931. elif isinstance(message, bytes):
  1932. pass
  1933. else:
  1934. message = b""
  1935. tag_obj.message = message + "\n".encode(encoding)
  1936. tag_obj.name = tag
  1937. tag_obj.object = (type(object), object.id)
  1938. if tag_time is None:
  1939. tag_time = int(time.time())
  1940. tag_obj.tag_time = tag_time
  1941. if tag_timezone is None:
  1942. tag_timezone = get_user_timezones()[1]
  1943. elif isinstance(tag_timezone, str):
  1944. tag_timezone = parse_timezone(tag_timezone.encode())
  1945. tag_obj.tag_timezone = tag_timezone
  1946. # Check if we should sign the tag
  1947. config = r.get_config_stack()
  1948. if sign is None:
  1949. # Check tag.gpgSign configuration when sign is not explicitly set
  1950. try:
  1951. should_sign = config.get_boolean(
  1952. (b"tag",), b"gpgsign", default=False
  1953. )
  1954. except KeyError:
  1955. should_sign = False # Default to not signing if no config
  1956. else:
  1957. should_sign = sign
  1958. # Get the signing key from config if signing is enabled
  1959. keyid = None
  1960. if should_sign:
  1961. try:
  1962. keyid_bytes = config.get((b"user",), b"signingkey")
  1963. keyid = keyid_bytes.decode() if keyid_bytes else None
  1964. except KeyError:
  1965. keyid = None
  1966. tag_obj.sign(keyid)
  1967. r.object_store.add_object(tag_obj)
  1968. tag_id = tag_obj.id
  1969. else:
  1970. tag_id = object.id
  1971. r.refs[_make_tag_ref(tag)] = tag_id
  1972. def verify_commit(
  1973. repo: RepoPath,
  1974. committish: Union[str, bytes] = "HEAD",
  1975. keyids: Optional[list[str]] = None,
  1976. ) -> None:
  1977. """Verify GPG signature on a commit.
  1978. Args:
  1979. repo: Path to repository
  1980. committish: Commit to verify (defaults to HEAD)
  1981. keyids: Optional list of trusted key IDs. If provided, the commit
  1982. must be signed by one of these keys. If not provided, just verifies
  1983. that the commit has a valid signature.
  1984. Raises:
  1985. gpg.errors.BadSignatures: if GPG signature verification fails
  1986. gpg.errors.MissingSignatures: if commit was not signed by a key
  1987. specified in keyids
  1988. """
  1989. with open_repo_closing(repo) as r:
  1990. commit = parse_commit(r, committish)
  1991. commit.verify(keyids)
  1992. def verify_tag(
  1993. repo: RepoPath,
  1994. tagname: Union[str, bytes],
  1995. keyids: Optional[list[str]] = None,
  1996. ) -> None:
  1997. """Verify GPG signature on a tag.
  1998. Args:
  1999. repo: Path to repository
  2000. tagname: Name of tag to verify
  2001. keyids: Optional list of trusted key IDs. If provided, the tag
  2002. must be signed by one of these keys. If not provided, just verifies
  2003. that the tag has a valid signature.
  2004. Raises:
  2005. gpg.errors.BadSignatures: if GPG signature verification fails
  2006. gpg.errors.MissingSignatures: if tag was not signed by a key
  2007. specified in keyids
  2008. """
  2009. with open_repo_closing(repo) as r:
  2010. if isinstance(tagname, str):
  2011. tagname = tagname.encode()
  2012. tag_ref = _make_tag_ref(tagname)
  2013. tag_id = r.refs[tag_ref]
  2014. tag_obj = r[tag_id]
  2015. if not isinstance(tag_obj, Tag):
  2016. raise Error(f"{tagname!r} does not point to a tag object")
  2017. tag_obj.verify(keyids)
  2018. def tag_list(repo: RepoPath, outstream: TextIO = sys.stdout) -> list[bytes]:
  2019. """List all tags.
  2020. Args:
  2021. repo: Path to repository
  2022. outstream: Stream to write tags to
  2023. """
  2024. with open_repo_closing(repo) as r:
  2025. tags = sorted(r.refs.as_dict(b"refs/tags"))
  2026. return tags
  2027. def tag_delete(repo: RepoPath, name: Union[str, bytes]) -> None:
  2028. """Remove a tag.
  2029. Args:
  2030. repo: Path to repository
  2031. name: Name of tag to remove
  2032. """
  2033. with open_repo_closing(repo) as r:
  2034. if isinstance(name, bytes):
  2035. names = [name]
  2036. elif isinstance(name, list):
  2037. names = name
  2038. else:
  2039. raise Error(f"Unexpected tag name type {name!r}")
  2040. for name in names:
  2041. del r.refs[_make_tag_ref(name)]
  2042. def _make_notes_ref(name: bytes) -> bytes:
  2043. """Make a notes ref name."""
  2044. if name.startswith(b"refs/notes/"):
  2045. return name
  2046. return LOCAL_NOTES_PREFIX + name
  2047. def notes_add(
  2048. repo: RepoPath,
  2049. object_sha: bytes,
  2050. note: bytes,
  2051. ref: bytes = b"commits",
  2052. author: Optional[bytes] = None,
  2053. committer: Optional[bytes] = None,
  2054. message: Optional[bytes] = None,
  2055. ) -> bytes:
  2056. """Add or update a note for an object.
  2057. Args:
  2058. repo: Path to repository
  2059. object_sha: SHA of the object to annotate
  2060. note: Note content
  2061. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  2062. author: Author identity (defaults to committer)
  2063. committer: Committer identity (defaults to config)
  2064. message: Commit message for the notes update
  2065. Returns:
  2066. SHA of the new notes commit
  2067. """
  2068. with open_repo_closing(repo) as r:
  2069. # Parse the object to get its SHA
  2070. obj = parse_object(r, object_sha)
  2071. object_sha = obj.id
  2072. if isinstance(note, str):
  2073. note = note.encode(DEFAULT_ENCODING)
  2074. if isinstance(ref, str):
  2075. ref = ref.encode(DEFAULT_ENCODING)
  2076. notes_ref = _make_notes_ref(ref)
  2077. config = r.get_config_stack()
  2078. return r.notes.set_note(
  2079. object_sha,
  2080. note,
  2081. notes_ref,
  2082. author=author,
  2083. committer=committer,
  2084. message=message,
  2085. config=config,
  2086. )
  2087. def notes_remove(
  2088. repo: RepoPath,
  2089. object_sha: bytes,
  2090. ref: bytes = b"commits",
  2091. author: Optional[bytes] = None,
  2092. committer: Optional[bytes] = None,
  2093. message: Optional[bytes] = None,
  2094. ) -> Optional[bytes]:
  2095. """Remove a note for an object.
  2096. Args:
  2097. repo: Path to repository
  2098. object_sha: SHA of the object to remove notes from
  2099. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  2100. author: Author identity (defaults to committer)
  2101. committer: Committer identity (defaults to config)
  2102. message: Commit message for the notes removal
  2103. Returns:
  2104. SHA of the new notes commit, or None if no note existed
  2105. """
  2106. with open_repo_closing(repo) as r:
  2107. # Parse the object to get its SHA
  2108. obj = parse_object(r, object_sha)
  2109. object_sha = obj.id
  2110. if isinstance(ref, str):
  2111. ref = ref.encode(DEFAULT_ENCODING)
  2112. notes_ref = _make_notes_ref(ref)
  2113. config = r.get_config_stack()
  2114. return r.notes.remove_note(
  2115. object_sha,
  2116. notes_ref,
  2117. author=author,
  2118. committer=committer,
  2119. message=message,
  2120. config=config,
  2121. )
  2122. def notes_show(
  2123. repo: Union[str, os.PathLike[str], Repo], object_sha: bytes, ref: bytes = b"commits"
  2124. ) -> Optional[bytes]:
  2125. """Show the note for an object.
  2126. Args:
  2127. repo: Path to repository
  2128. object_sha: SHA of the object
  2129. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  2130. Returns:
  2131. Note content as bytes, or None if no note exists
  2132. """
  2133. with open_repo_closing(repo) as r:
  2134. # Parse the object to get its SHA
  2135. obj = parse_object(r, object_sha)
  2136. object_sha = obj.id
  2137. if isinstance(ref, str):
  2138. ref = ref.encode(DEFAULT_ENCODING)
  2139. notes_ref = _make_notes_ref(ref)
  2140. config = r.get_config_stack()
  2141. return r.notes.get_note(object_sha, notes_ref, config=config)
  2142. def notes_list(repo: RepoPath, ref: bytes = b"commits") -> list[tuple[bytes, bytes]]:
  2143. """List all notes in a notes ref.
  2144. Args:
  2145. repo: Path to repository
  2146. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  2147. Returns:
  2148. List of tuples of (object_sha, note_content)
  2149. """
  2150. with open_repo_closing(repo) as r:
  2151. if isinstance(ref, str):
  2152. ref = ref.encode(DEFAULT_ENCODING)
  2153. notes_ref = _make_notes_ref(ref)
  2154. config = r.get_config_stack()
  2155. return r.notes.list_notes(notes_ref, config=config)
  2156. def replace_list(repo: RepoPath) -> list[tuple[bytes, bytes]]:
  2157. """List all replacement refs.
  2158. Args:
  2159. repo: Path to repository
  2160. Returns:
  2161. List of tuples of (object_sha, replacement_sha) where object_sha is the
  2162. object being replaced and replacement_sha is what it's replaced with
  2163. """
  2164. with open_repo_closing(repo) as r:
  2165. replacements = []
  2166. for ref in r.refs.keys():
  2167. if ref.startswith(LOCAL_REPLACE_PREFIX):
  2168. object_sha = ref[len(LOCAL_REPLACE_PREFIX) :]
  2169. replacement_sha = r.refs[ref]
  2170. replacements.append((object_sha, replacement_sha))
  2171. return replacements
  2172. def replace_delete(repo: RepoPath, object_sha: Union[str, bytes]) -> None:
  2173. """Delete a replacement ref.
  2174. Args:
  2175. repo: Path to repository
  2176. object_sha: SHA of the object whose replacement should be removed
  2177. """
  2178. with open_repo_closing(repo) as r:
  2179. # Convert to bytes if string
  2180. if isinstance(object_sha, str):
  2181. object_sha_hex = object_sha.encode("ascii")
  2182. else:
  2183. object_sha_hex = object_sha
  2184. replace_ref = _make_replace_ref(object_sha_hex)
  2185. if replace_ref not in r.refs:
  2186. raise KeyError(
  2187. f"No replacement ref found for {object_sha_hex.decode('ascii')}"
  2188. )
  2189. del r.refs[replace_ref]
  2190. def replace_create(
  2191. repo: RepoPath,
  2192. object_sha: Union[str, bytes],
  2193. replacement_sha: Union[str, bytes],
  2194. ) -> None:
  2195. """Create a replacement ref to replace one object with another.
  2196. Args:
  2197. repo: Path to repository
  2198. object_sha: SHA of the object to replace
  2199. replacement_sha: SHA of the replacement object
  2200. """
  2201. with open_repo_closing(repo) as r:
  2202. # Convert to bytes if string
  2203. if isinstance(object_sha, str):
  2204. object_sha_hex = object_sha.encode("ascii")
  2205. else:
  2206. object_sha_hex = object_sha
  2207. if isinstance(replacement_sha, str):
  2208. replacement_sha_hex = replacement_sha.encode("ascii")
  2209. else:
  2210. replacement_sha_hex = replacement_sha
  2211. # Create the replacement ref
  2212. replace_ref = _make_replace_ref(object_sha_hex)
  2213. r.refs[replace_ref] = replacement_sha_hex
  2214. def reset(
  2215. repo: Union[str, os.PathLike[str], Repo],
  2216. mode: str,
  2217. treeish: Union[str, bytes, Commit, Tree, Tag] = "HEAD",
  2218. ) -> None:
  2219. """Reset current HEAD to the specified state.
  2220. Args:
  2221. repo: Path to repository
  2222. mode: Mode ("hard", "soft", "mixed")
  2223. treeish: Treeish to reset to
  2224. """
  2225. with open_repo_closing(repo) as r:
  2226. # Parse the target tree
  2227. tree = parse_tree(r, treeish)
  2228. # Only parse as commit if treeish is not a Tree object
  2229. if isinstance(treeish, Tree):
  2230. # For Tree objects, we can't determine the commit, skip updating HEAD
  2231. target_commit = None
  2232. else:
  2233. target_commit = parse_commit(r, treeish)
  2234. # Update HEAD to point to the target commit
  2235. if target_commit is not None:
  2236. r.refs[b"HEAD"] = target_commit.id
  2237. if mode == "soft":
  2238. # Soft reset: only update HEAD, leave index and working tree unchanged
  2239. return
  2240. elif mode == "mixed":
  2241. # Mixed reset: update HEAD and index, but leave working tree unchanged
  2242. from .object_store import iter_tree_contents
  2243. # Open the index
  2244. index = r.open_index()
  2245. # Clear the current index
  2246. index.clear()
  2247. # Populate index from the target tree
  2248. for entry in iter_tree_contents(r.object_store, tree.id):
  2249. # Create an IndexEntry from the tree entry
  2250. # Use zeros for filesystem-specific fields since we're not touching the working tree
  2251. assert (
  2252. entry.mode is not None
  2253. and entry.sha is not None
  2254. and entry.path is not None
  2255. )
  2256. index_entry = IndexEntry(
  2257. ctime=(0, 0),
  2258. mtime=(0, 0),
  2259. dev=0,
  2260. ino=0,
  2261. mode=entry.mode,
  2262. uid=0,
  2263. gid=0,
  2264. size=0, # Size will be 0 since we're not reading from disk
  2265. sha=entry.sha,
  2266. flags=0,
  2267. )
  2268. index[entry.path] = index_entry
  2269. # Write the updated index
  2270. index.write()
  2271. elif mode == "hard":
  2272. # Hard reset: update HEAD, index, and working tree
  2273. # Get configuration for working directory update
  2274. config = r.get_config()
  2275. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  2276. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  2277. validate_path_element = validate_path_element_ntfs
  2278. elif config.get_boolean(
  2279. b"core", b"core.protectHFS", sys.platform == "darwin"
  2280. ):
  2281. validate_path_element = validate_path_element_hfs
  2282. else:
  2283. validate_path_element = validate_path_element_default
  2284. if config.get_boolean(b"core", b"symlinks", True):
  2285. def symlink_wrapper(
  2286. source: Union[str, bytes, os.PathLike[str]],
  2287. target: Union[str, bytes, os.PathLike[str]],
  2288. ) -> None:
  2289. symlink(source, target) # type: ignore[arg-type,unused-ignore]
  2290. symlink_fn = symlink_wrapper
  2291. else:
  2292. def symlink_fallback(
  2293. source: Union[str, bytes, os.PathLike[str]],
  2294. target: Union[str, bytes, os.PathLike[str]],
  2295. ) -> None:
  2296. mode = "w" + ("b" if isinstance(source, bytes) else "")
  2297. with open(target, mode) as f:
  2298. f.write(source)
  2299. symlink_fn = symlink_fallback
  2300. # Update working tree and index
  2301. blob_normalizer = r.get_blob_normalizer()
  2302. # For reset --hard, use current index tree as old tree to get proper deletions
  2303. index = r.open_index()
  2304. if len(index) > 0:
  2305. index_tree_id = index.commit(r.object_store)
  2306. else:
  2307. # Empty index
  2308. index_tree_id = None
  2309. changes = tree_changes(
  2310. r.object_store, index_tree_id, tree.id, want_unchanged=True
  2311. )
  2312. update_working_tree(
  2313. r,
  2314. index_tree_id,
  2315. tree.id,
  2316. change_iterator=changes,
  2317. honor_filemode=honor_filemode,
  2318. validate_path_element=validate_path_element,
  2319. symlink_fn=symlink_fn,
  2320. force_remove_untracked=True,
  2321. blob_normalizer=blob_normalizer,
  2322. allow_overwrite_modified=True, # Allow overwriting modified files
  2323. )
  2324. else:
  2325. raise Error(f"Invalid reset mode: {mode}")
  2326. def get_remote_repo(
  2327. repo: Repo, remote_location: Optional[Union[str, bytes]] = None
  2328. ) -> tuple[Optional[str], str]:
  2329. """Get the remote repository information.
  2330. Args:
  2331. repo: Local repository object
  2332. remote_location: Optional remote name or URL; defaults to branch remote
  2333. Returns:
  2334. Tuple of (remote_name, remote_url) where remote_name may be None
  2335. if remote_location is a URL rather than a configured remote
  2336. """
  2337. config = repo.get_config()
  2338. if remote_location is None:
  2339. remote_location = get_branch_remote(repo)
  2340. if isinstance(remote_location, str):
  2341. encoded_location = remote_location.encode()
  2342. else:
  2343. encoded_location = remote_location
  2344. section = (b"remote", encoded_location)
  2345. remote_name: Optional[str] = None
  2346. if config.has_section(section):
  2347. remote_name = encoded_location.decode()
  2348. encoded_location = config.get(section, "url")
  2349. else:
  2350. remote_name = None
  2351. return (remote_name, encoded_location.decode())
  2352. def push(
  2353. repo: RepoPath,
  2354. remote_location: Optional[Union[str, bytes]] = None,
  2355. refspecs: Optional[Union[Union[str, bytes], Sequence[Union[str, bytes]]]] = None,
  2356. outstream: BinaryIO = default_bytes_out_stream,
  2357. errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
  2358. force: bool = False,
  2359. **kwargs: object,
  2360. ) -> SendPackResult:
  2361. """Remote push with dulwich via dulwich.client.
  2362. Args:
  2363. repo: Path to repository
  2364. remote_location: Location of the remote
  2365. refspecs: Refs to push to remote
  2366. outstream: A stream file to write output
  2367. errstream: A stream file to write errors
  2368. force: Force overwriting refs
  2369. **kwargs: Additional keyword arguments for the client
  2370. """
  2371. # Open the repo
  2372. with open_repo_closing(repo) as r:
  2373. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  2374. # Check if mirror mode is enabled
  2375. mirror_mode = False
  2376. if remote_name:
  2377. try:
  2378. mirror_mode_val = r.get_config_stack().get_boolean(
  2379. (b"remote", remote_name.encode()), b"mirror"
  2380. )
  2381. if mirror_mode_val is not None:
  2382. mirror_mode = mirror_mode_val
  2383. except KeyError:
  2384. pass
  2385. if mirror_mode:
  2386. # Mirror mode: push all refs and delete non-existent ones
  2387. refspecs = []
  2388. for ref in r.refs.keys():
  2389. # Push all refs to the same name on remote
  2390. refspecs.append(ref + b":" + ref)
  2391. elif refspecs is None:
  2392. refspecs = [active_branch(r)]
  2393. # Normalize refspecs to bytes
  2394. if isinstance(refspecs, str):
  2395. refspecs_bytes: Union[bytes, list[bytes]] = refspecs.encode()
  2396. elif isinstance(refspecs, bytes):
  2397. refspecs_bytes = refspecs
  2398. else:
  2399. refspecs_bytes = []
  2400. for spec in refspecs:
  2401. if isinstance(spec, str):
  2402. refspecs_bytes.append(spec.encode())
  2403. else:
  2404. refspecs_bytes.append(spec)
  2405. # Get the client and path
  2406. client, path = get_transport_and_path(
  2407. remote_location,
  2408. config=r.get_config_stack(),
  2409. **kwargs, # type: ignore[arg-type]
  2410. )
  2411. selected_refs = []
  2412. remote_changed_refs: dict[bytes, Optional[bytes]] = {}
  2413. def update_refs(refs: dict[bytes, bytes]) -> dict[bytes, bytes]:
  2414. from .refs import DictRefsContainer
  2415. remote_refs = DictRefsContainer(refs)
  2416. selected_refs.extend(
  2417. parse_reftuples(r.refs, remote_refs, refspecs_bytes, force=force)
  2418. )
  2419. new_refs = {}
  2420. # In mirror mode, delete remote refs that don't exist locally
  2421. if mirror_mode:
  2422. local_refs = set(r.refs.keys())
  2423. for remote_ref in refs.keys():
  2424. if remote_ref not in local_refs:
  2425. new_refs[remote_ref] = ZERO_SHA
  2426. remote_changed_refs[remote_ref] = None
  2427. # TODO: Handle selected_refs == {None: None}
  2428. for lh, rh, force_ref in selected_refs:
  2429. if lh is None:
  2430. assert rh is not None
  2431. new_refs[rh] = ZERO_SHA
  2432. remote_changed_refs[rh] = None
  2433. else:
  2434. try:
  2435. localsha = r.refs[lh]
  2436. except KeyError as exc:
  2437. raise Error(
  2438. f"No valid ref {lh.decode() if isinstance(lh, bytes) else lh} in local repository"
  2439. ) from exc
  2440. assert rh is not None
  2441. if not force_ref and rh in refs:
  2442. check_diverged(r, refs[rh], localsha)
  2443. new_refs[rh] = localsha
  2444. remote_changed_refs[rh] = localsha
  2445. return new_refs
  2446. err_encoding = getattr(errstream, "encoding", None) or DEFAULT_ENCODING
  2447. remote_location = client.get_url(path)
  2448. try:
  2449. def generate_pack_data_wrapper(
  2450. have: AbstractSet[bytes],
  2451. want: AbstractSet[bytes],
  2452. ofs_delta: bool = False,
  2453. ) -> tuple[int, Iterator[UnpackedObject]]:
  2454. # Wrap to match the expected signature
  2455. # Convert AbstractSet to set since generate_pack_data expects set
  2456. return r.generate_pack_data(
  2457. set(have), set(want), progress=None, ofs_delta=ofs_delta
  2458. )
  2459. result = client.send_pack(
  2460. path.encode(),
  2461. update_refs,
  2462. generate_pack_data=generate_pack_data_wrapper,
  2463. progress=lambda data: (errstream.write(data), None)[1],
  2464. )
  2465. except SendPackError as exc:
  2466. raise Error(
  2467. "Push to " + remote_location + " failed -> " + exc.args[0].decode(),
  2468. ) from exc
  2469. else:
  2470. errstream.write(
  2471. b"Push to " + remote_location.encode(err_encoding) + b" successful.\n"
  2472. )
  2473. for ref, error in (result.ref_status or {}).items():
  2474. if error is not None:
  2475. errstream.write(
  2476. b"Push of ref %s failed: %s\n" % (ref, error.encode(err_encoding))
  2477. )
  2478. else:
  2479. errstream.write(b"Ref %s updated\n" % ref)
  2480. if remote_name is not None:
  2481. _import_remote_refs(r.refs, remote_name, remote_changed_refs)
  2482. return result
  2483. # Trigger auto GC if needed
  2484. from .gc import maybe_auto_gc
  2485. with open_repo_closing(repo) as r:
  2486. maybe_auto_gc(r)
  2487. def pull(
  2488. repo: RepoPath,
  2489. remote_location: Optional[Union[str, bytes]] = None,
  2490. refspecs: Optional[Union[Union[str, bytes], Sequence[Union[str, bytes]]]] = None,
  2491. outstream: BinaryIO = default_bytes_out_stream,
  2492. errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
  2493. fast_forward: bool = True,
  2494. ff_only: bool = False,
  2495. force: bool = False,
  2496. filter_spec: Optional[str] = None,
  2497. protocol_version: Optional[int] = None,
  2498. **kwargs: object,
  2499. ) -> None:
  2500. """Pull from remote via dulwich.client.
  2501. Args:
  2502. repo: Path to repository
  2503. remote_location: Location of the remote
  2504. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  2505. bytestring/string.
  2506. outstream: A stream file to write to output
  2507. errstream: A stream file to write to errors
  2508. fast_forward: If True, raise an exception when fast-forward is not possible
  2509. ff_only: If True, only allow fast-forward merges. Raises DivergedBranches
  2510. when branches have diverged rather than performing a merge.
  2511. force: If True, allow overwriting local changes in the working tree.
  2512. If False, pull will abort if it would overwrite uncommitted changes.
  2513. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  2514. Only used if the server supports the Git protocol-v2 'filter'
  2515. feature, and ignored otherwise.
  2516. protocol_version: desired Git protocol version. By default the highest
  2517. mutually supported protocol version will be used
  2518. **kwargs: Additional keyword arguments for the client
  2519. """
  2520. # Open the repo
  2521. with open_repo_closing(repo) as r:
  2522. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  2523. selected_refs = []
  2524. if refspecs is None:
  2525. refspecs_normalized: Union[bytes, list[bytes]] = [b"HEAD"]
  2526. elif isinstance(refspecs, str):
  2527. refspecs_normalized = refspecs.encode()
  2528. elif isinstance(refspecs, bytes):
  2529. refspecs_normalized = refspecs
  2530. else:
  2531. refspecs_normalized = []
  2532. for spec in refspecs:
  2533. if isinstance(spec, str):
  2534. refspecs_normalized.append(spec.encode())
  2535. else:
  2536. refspecs_normalized.append(spec)
  2537. def determine_wants(
  2538. remote_refs: dict[bytes, bytes], depth: Optional[int] = None
  2539. ) -> list[bytes]:
  2540. from .refs import DictRefsContainer
  2541. remote_refs_container = DictRefsContainer(remote_refs)
  2542. selected_refs.extend(
  2543. parse_reftuples(
  2544. remote_refs_container, r.refs, refspecs_normalized, force=force
  2545. )
  2546. )
  2547. return [
  2548. remote_refs[lh]
  2549. for (lh, rh, force_ref) in selected_refs
  2550. if lh is not None
  2551. and lh in remote_refs
  2552. and remote_refs[lh] not in r.object_store
  2553. ]
  2554. client, path = get_transport_and_path(
  2555. remote_location,
  2556. config=r.get_config_stack(),
  2557. **kwargs, # type: ignore[arg-type]
  2558. )
  2559. if filter_spec:
  2560. filter_spec_bytes: Optional[bytes] = filter_spec.encode("ascii")
  2561. else:
  2562. filter_spec_bytes = None
  2563. def progress(data: bytes) -> None:
  2564. errstream.write(data)
  2565. fetch_result = client.fetch(
  2566. path.encode(),
  2567. r,
  2568. progress=progress,
  2569. determine_wants=determine_wants, # type: ignore[arg-type] # Function matches protocol but mypy can't verify
  2570. filter_spec=filter_spec_bytes,
  2571. protocol_version=protocol_version,
  2572. )
  2573. # Store the old HEAD tree before making changes
  2574. try:
  2575. old_head = r.refs[b"HEAD"]
  2576. old_commit = r[old_head]
  2577. assert isinstance(old_commit, Commit)
  2578. old_tree_id = old_commit.tree
  2579. except KeyError:
  2580. old_tree_id = None
  2581. merged = False
  2582. for lh, rh, force_ref in selected_refs:
  2583. if not force_ref and rh is not None and rh in r.refs:
  2584. try:
  2585. assert lh is not None
  2586. followed_ref = r.refs.follow(rh)[1]
  2587. assert followed_ref is not None
  2588. lh_ref = fetch_result.refs[lh]
  2589. assert lh_ref is not None
  2590. check_diverged(r, followed_ref, lh_ref)
  2591. except DivergedBranches as exc:
  2592. if ff_only or fast_forward:
  2593. raise
  2594. else:
  2595. # Perform merge
  2596. assert lh is not None
  2597. merge_ref = fetch_result.refs[lh]
  2598. assert merge_ref is not None
  2599. _merge_result, conflicts = _do_merge(r, merge_ref)
  2600. if conflicts:
  2601. raise Error(
  2602. f"Merge conflicts occurred: {conflicts}"
  2603. ) from exc
  2604. merged = True
  2605. # Skip updating ref since merge already updated HEAD
  2606. continue
  2607. if rh is not None and lh is not None:
  2608. lh_value = fetch_result.refs[lh]
  2609. if lh_value is not None:
  2610. r.refs[rh] = lh_value
  2611. # Only update HEAD if we didn't perform a merge
  2612. if selected_refs and not merged:
  2613. lh, rh, _ = selected_refs[0]
  2614. if lh is not None:
  2615. ref_value = fetch_result.refs[lh]
  2616. if ref_value is not None:
  2617. r[b"HEAD"] = ref_value
  2618. # Update working tree to match the new HEAD
  2619. # Skip if merge was performed as merge already updates the working tree
  2620. if not merged and old_tree_id is not None:
  2621. head_commit = r[b"HEAD"]
  2622. assert isinstance(head_commit, Commit)
  2623. new_tree_id = head_commit.tree
  2624. blob_normalizer = r.get_blob_normalizer()
  2625. changes = tree_changes(r.object_store, old_tree_id, new_tree_id)
  2626. update_working_tree(
  2627. r,
  2628. old_tree_id,
  2629. new_tree_id,
  2630. change_iterator=changes,
  2631. blob_normalizer=blob_normalizer,
  2632. allow_overwrite_modified=force,
  2633. )
  2634. if remote_name is not None:
  2635. _import_remote_refs(r.refs, remote_name, fetch_result.refs)
  2636. # Trigger auto GC if needed
  2637. from .gc import maybe_auto_gc
  2638. with open_repo_closing(repo) as r:
  2639. maybe_auto_gc(r)
  2640. def status(
  2641. repo: Union[str, os.PathLike[str], Repo] = ".",
  2642. ignored: bool = False,
  2643. untracked_files: str = "normal",
  2644. ) -> GitStatus:
  2645. """Returns staged, unstaged, and untracked changes relative to the HEAD.
  2646. Args:
  2647. repo: Path to repository or repository object
  2648. ignored: Whether to include ignored files in untracked
  2649. untracked_files: How to handle untracked files, defaults to "all":
  2650. "no": do not return untracked files
  2651. "normal": return untracked directories, not their contents
  2652. "all": include all files in untracked directories
  2653. Using untracked_files="no" can be faster than "all" when the worktree
  2654. contains many untracked files/directories.
  2655. Using untracked_files="normal" provides a good balance, only showing
  2656. directories that are entirely untracked without listing all their contents.
  2657. Returns: GitStatus tuple,
  2658. staged - dict with lists of staged paths (diff index/HEAD)
  2659. unstaged - list of unstaged paths (diff index/working-tree)
  2660. untracked - list of untracked, un-ignored & non-.git paths
  2661. """
  2662. with open_repo_closing(repo) as r:
  2663. # 1. Get status of staged
  2664. tracked_changes = get_tree_changes(r)
  2665. # 2. Get status of unstaged
  2666. index = r.open_index()
  2667. normalizer = r.get_blob_normalizer()
  2668. # Create a wrapper that handles the bytes -> Blob conversion
  2669. if normalizer is not None:
  2670. def filter_callback(data: bytes, path: bytes) -> bytes:
  2671. from dulwich.objects import Blob
  2672. blob = Blob()
  2673. blob.data = data
  2674. normalized_blob = normalizer.checkin_normalize(blob, path)
  2675. result_data: bytes = normalized_blob.data
  2676. return result_data
  2677. else:
  2678. filter_callback = None
  2679. # Check if core.preloadIndex is enabled
  2680. config = r.get_config_stack()
  2681. preload_index = config.get_boolean(b"core", b"preloadIndex", False)
  2682. unstaged_changes = list(
  2683. get_unstaged_changes(index, r.path, filter_callback, preload_index)
  2684. )
  2685. untracked_paths = get_untracked_paths(
  2686. r.path,
  2687. r.path,
  2688. index,
  2689. exclude_ignored=not ignored,
  2690. untracked_files=untracked_files,
  2691. )
  2692. if sys.platform == "win32":
  2693. untracked_changes = [
  2694. path.replace(os.path.sep, "/") for path in untracked_paths
  2695. ]
  2696. else:
  2697. untracked_changes = list(untracked_paths)
  2698. return GitStatus(tracked_changes, unstaged_changes, untracked_changes)
  2699. def shortlog(
  2700. repo: Union[str, os.PathLike[str], Repo],
  2701. summary_only: bool = False,
  2702. sort_by_commits: bool = False,
  2703. ) -> list[dict[str, str]]:
  2704. """Summarize commits by author, like git shortlog.
  2705. Args:
  2706. repo: Path to repository or Repo object.
  2707. summary_only: If True, only show counts per author.
  2708. sort_by_commits: If True, sort authors by number of commits.
  2709. Returns:
  2710. A list of dictionaries, each containing:
  2711. - "author": the author's name as a string
  2712. - "messages": all commit messages concatenated into a single string
  2713. """
  2714. with open_repo_closing(repo) as r:
  2715. walker = r.get_walker()
  2716. authors: dict[str, list[str]] = {}
  2717. for entry in walker:
  2718. commit = entry.commit
  2719. author = commit.author.decode(commit.encoding or "utf-8")
  2720. message = commit.message.decode(commit.encoding or "utf-8").strip()
  2721. authors.setdefault(author, []).append(message)
  2722. # Convert messages to single string per author
  2723. items: list[dict[str, str]] = [
  2724. {"author": author, "messages": "\n".join(msgs)}
  2725. for author, msgs in authors.items()
  2726. ]
  2727. if sort_by_commits:
  2728. # Sort by number of commits (lines in messages)
  2729. items.sort(key=lambda x: len(x["messages"].splitlines()), reverse=True)
  2730. return items
  2731. def _walk_working_dir_paths(
  2732. frompath: Union[str, bytes, os.PathLike[str]],
  2733. basepath: Union[str, bytes, os.PathLike[str]],
  2734. prune_dirnames: Optional[Callable[[str, list[str]], list[str]]] = None,
  2735. ) -> Iterator[tuple[Union[str, bytes], bool]]:
  2736. """Get path, is_dir for files in working dir from frompath.
  2737. Args:
  2738. frompath: Path to begin walk
  2739. basepath: Path to compare to
  2740. prune_dirnames: Optional callback to prune dirnames during os.walk
  2741. dirnames will be set to result of prune_dirnames(dirpath, dirnames)
  2742. """
  2743. # Convert paths to strings for os.walk compatibility
  2744. for dirpath, dirnames, filenames in os.walk(frompath): # type: ignore[type-var]
  2745. # Skip .git and below.
  2746. if ".git" in dirnames:
  2747. dirnames.remove(".git")
  2748. if dirpath != basepath:
  2749. continue
  2750. if ".git" in filenames:
  2751. filenames.remove(".git")
  2752. if dirpath != basepath:
  2753. continue
  2754. if dirpath != frompath:
  2755. yield dirpath, True # type: ignore[misc]
  2756. for filename in filenames:
  2757. filepath = os.path.join(dirpath, filename) # type: ignore[call-overload]
  2758. yield filepath, False
  2759. if prune_dirnames:
  2760. dirnames[:] = prune_dirnames(dirpath, dirnames) # type: ignore[arg-type]
  2761. def get_untracked_paths(
  2762. frompath: Union[str, bytes, os.PathLike[str]],
  2763. basepath: Union[str, bytes, os.PathLike[str]],
  2764. index: Index,
  2765. exclude_ignored: bool = False,
  2766. untracked_files: str = "all",
  2767. ) -> Iterator[str]:
  2768. """Get untracked paths.
  2769. Args:
  2770. frompath: Path to walk
  2771. basepath: Path to compare to
  2772. index: Index to check against
  2773. exclude_ignored: Whether to exclude ignored paths
  2774. untracked_files: How to handle untracked files:
  2775. - "no": return an empty list
  2776. - "all": return all files in untracked directories
  2777. - "normal": return untracked directories without listing their contents
  2778. Note: ignored directories will never be walked for performance reasons.
  2779. If exclude_ignored is False, only the path to an ignored directory will
  2780. be yielded, no files inside the directory will be returned
  2781. """
  2782. if untracked_files not in ("no", "all", "normal"):
  2783. raise ValueError("untracked_files must be one of (no, all, normal)")
  2784. if untracked_files == "no":
  2785. return
  2786. # Normalize paths to str
  2787. frompath_str = os.fsdecode(os.fspath(frompath))
  2788. basepath_str = os.fsdecode(os.fspath(basepath))
  2789. with open_repo_closing(basepath_str) as r:
  2790. ignore_manager = IgnoreFilterManager.from_repo(r)
  2791. ignored_dirs = []
  2792. # List to store untracked directories found during traversal
  2793. untracked_dir_list = []
  2794. def directory_has_non_ignored_files(dir_path: str, base_rel_path: str) -> bool:
  2795. """Recursively check if directory contains any non-ignored files."""
  2796. try:
  2797. for entry in os.listdir(dir_path):
  2798. entry_path = os.path.join(dir_path, entry)
  2799. rel_entry = os.path.join(base_rel_path, entry)
  2800. if os.path.isfile(entry_path):
  2801. if ignore_manager.is_ignored(rel_entry) is not True:
  2802. return True
  2803. elif os.path.isdir(entry_path):
  2804. if directory_has_non_ignored_files(entry_path, rel_entry):
  2805. return True
  2806. return False
  2807. except OSError:
  2808. # If we can't read the directory, assume it has non-ignored files
  2809. return True
  2810. def prune_dirnames(dirpath: str, dirnames: list[str]) -> list[str]:
  2811. for i in range(len(dirnames) - 1, -1, -1):
  2812. path = os.path.join(dirpath, dirnames[i])
  2813. ip = os.path.join(os.path.relpath(path, basepath_str), "")
  2814. # Check if directory is ignored
  2815. if ignore_manager.is_ignored(ip) is True:
  2816. if not exclude_ignored:
  2817. ignored_dirs.append(
  2818. os.path.join(os.path.relpath(path, frompath_str), "")
  2819. )
  2820. del dirnames[i]
  2821. continue
  2822. # For "normal" mode, check if the directory is entirely untracked
  2823. if untracked_files == "normal":
  2824. # Convert directory path to tree path for index lookup
  2825. dir_tree_path = path_to_tree_path(basepath_str, path)
  2826. # Check if any file in this directory is tracked
  2827. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  2828. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  2829. if not has_tracked_files:
  2830. # This directory is entirely untracked
  2831. rel_path_base = os.path.relpath(path, basepath_str)
  2832. rel_path_from = os.path.join(
  2833. os.path.relpath(path, frompath_str), ""
  2834. )
  2835. # If excluding ignored, check if directory contains any non-ignored files
  2836. if exclude_ignored:
  2837. if not directory_has_non_ignored_files(path, rel_path_base):
  2838. # Directory only contains ignored files, skip it
  2839. del dirnames[i]
  2840. continue
  2841. # Check if it should be excluded due to ignore rules
  2842. is_ignored = ignore_manager.is_ignored(rel_path_base)
  2843. if not exclude_ignored or not is_ignored:
  2844. untracked_dir_list.append(rel_path_from)
  2845. del dirnames[i]
  2846. return dirnames
  2847. # For "all" mode, use the original behavior
  2848. if untracked_files == "all":
  2849. for ap, is_dir in _walk_working_dir_paths(
  2850. frompath_str, basepath_str, prune_dirnames=prune_dirnames
  2851. ):
  2852. # frompath_str and basepath_str are both str, so ap must be str
  2853. assert isinstance(ap, str)
  2854. if not is_dir:
  2855. ip = path_to_tree_path(basepath_str, ap)
  2856. if ip not in index:
  2857. if not exclude_ignored or not ignore_manager.is_ignored(
  2858. os.path.relpath(ap, basepath_str)
  2859. ):
  2860. yield os.path.relpath(ap, frompath_str)
  2861. else: # "normal" mode
  2862. # Walk directories, handling both files and directories
  2863. for ap, is_dir in _walk_working_dir_paths(
  2864. frompath_str, basepath_str, prune_dirnames=prune_dirnames
  2865. ):
  2866. # frompath_str and basepath_str are both str, so ap must be str
  2867. assert isinstance(ap, str)
  2868. # This part won't be reached for pruned directories
  2869. if is_dir:
  2870. # Check if this directory is entirely untracked
  2871. dir_tree_path = path_to_tree_path(basepath_str, ap)
  2872. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  2873. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  2874. if not has_tracked_files:
  2875. if not exclude_ignored or not ignore_manager.is_ignored(
  2876. os.path.relpath(ap, basepath_str)
  2877. ):
  2878. yield os.path.join(os.path.relpath(ap, frompath_str), "")
  2879. else:
  2880. # Check individual files in directories that contain tracked files
  2881. ip = path_to_tree_path(basepath_str, ap)
  2882. if ip not in index:
  2883. if not exclude_ignored or not ignore_manager.is_ignored(
  2884. os.path.relpath(ap, basepath_str)
  2885. ):
  2886. yield os.path.relpath(ap, frompath_str)
  2887. # Yield any untracked directories found during pruning
  2888. yield from untracked_dir_list
  2889. yield from ignored_dirs
  2890. def grep(
  2891. repo: RepoPath,
  2892. pattern: Union[str, bytes],
  2893. *,
  2894. outstream: TextIO = sys.stdout,
  2895. rev: Optional[Union[str, bytes]] = None,
  2896. pathspecs: Optional[Sequence[Union[str, bytes]]] = None,
  2897. ignore_case: bool = False,
  2898. line_number: bool = False,
  2899. max_depth: Optional[int] = None,
  2900. respect_ignores: bool = True,
  2901. ) -> None:
  2902. """Search for a pattern in tracked files.
  2903. Args:
  2904. repo: Path to repository or Repo object
  2905. pattern: Regular expression pattern to search for
  2906. outstream: Stream to write results to
  2907. rev: Revision to search in (defaults to HEAD)
  2908. pathspecs: Optional list of path patterns to limit search
  2909. ignore_case: Whether to perform case-insensitive matching
  2910. line_number: Whether to output line numbers
  2911. max_depth: Maximum directory depth to search
  2912. respect_ignores: Whether to respect .gitignore patterns
  2913. """
  2914. from .object_store import iter_tree_contents
  2915. # Compile the pattern
  2916. flags = re.IGNORECASE if ignore_case else 0
  2917. try:
  2918. if isinstance(pattern, bytes):
  2919. compiled_pattern = re.compile(pattern, flags)
  2920. else:
  2921. compiled_pattern = re.compile(pattern.encode("utf-8"), flags)
  2922. except re.error as e:
  2923. raise ValueError(f"Invalid regular expression: {e}") from e
  2924. with open_repo_closing(repo) as r:
  2925. # Get the tree to search
  2926. if rev is None:
  2927. try:
  2928. commit = r[b"HEAD"]
  2929. assert isinstance(commit, Commit)
  2930. except KeyError as e:
  2931. raise ValueError("No HEAD commit found") from e
  2932. else:
  2933. rev_bytes = rev if isinstance(rev, bytes) else rev.encode("utf-8")
  2934. commit_obj = parse_commit(r, rev_bytes)
  2935. if commit_obj is None:
  2936. raise ValueError(f"Invalid revision: {rev}")
  2937. commit = commit_obj
  2938. tree = r[commit.tree]
  2939. assert isinstance(tree, Tree)
  2940. # Set up ignore filter if requested
  2941. ignore_manager = None
  2942. if respect_ignores:
  2943. ignore_manager = IgnoreFilterManager.from_repo(r)
  2944. # Convert pathspecs to bytes
  2945. pathspecs_bytes: Optional[list[bytes]] = None
  2946. if pathspecs:
  2947. pathspecs_bytes = [
  2948. p if isinstance(p, bytes) else p.encode("utf-8") for p in pathspecs
  2949. ]
  2950. # Iterate through all files in the tree
  2951. for entry in iter_tree_contents(r.object_store, tree.id):
  2952. path, mode, sha = entry.path, entry.mode, entry.sha
  2953. assert path is not None
  2954. assert mode is not None
  2955. assert sha is not None
  2956. # Skip directories
  2957. if stat.S_ISDIR(mode):
  2958. continue
  2959. # Check max depth
  2960. if max_depth is not None:
  2961. depth = path.count(b"/")
  2962. if depth > max_depth:
  2963. continue
  2964. # Check pathspecs
  2965. if pathspecs_bytes:
  2966. matches_pathspec = False
  2967. for pathspec in pathspecs_bytes:
  2968. # Simple prefix matching (could be enhanced with full pathspec support)
  2969. if path.startswith(pathspec) or fnmatch.fnmatch(
  2970. path.decode("utf-8", errors="replace"),
  2971. pathspec.decode("utf-8", errors="replace"),
  2972. ):
  2973. matches_pathspec = True
  2974. break
  2975. if not matches_pathspec:
  2976. continue
  2977. # Check ignore patterns
  2978. if ignore_manager:
  2979. path_str = path.decode("utf-8", errors="replace")
  2980. if ignore_manager.is_ignored(path_str) is True:
  2981. continue
  2982. # Get the blob content
  2983. blob = r[sha]
  2984. assert isinstance(blob, Blob)
  2985. # Search for pattern in the blob
  2986. content = blob.data
  2987. lines = content.split(b"\n")
  2988. for line_num, line in enumerate(lines, 1):
  2989. if compiled_pattern.search(line):
  2990. path_str = path.decode("utf-8", errors="replace")
  2991. line_str = line.decode("utf-8", errors="replace")
  2992. if line_number:
  2993. outstream.write(f"{path_str}:{line_num}:{line_str}\n")
  2994. else:
  2995. outstream.write(f"{path_str}:{line_str}\n")
  2996. def get_tree_changes(repo: RepoPath) -> dict[str, list[Union[str, bytes]]]:
  2997. """Return add/delete/modify changes to tree by comparing index to HEAD.
  2998. Args:
  2999. repo: repo path or object
  3000. Returns: dict with lists for each type of change
  3001. """
  3002. with open_repo_closing(repo) as r:
  3003. index = r.open_index()
  3004. # Compares the Index to the HEAD & determines changes
  3005. # Iterate through the changes and report add/delete/modify
  3006. # TODO: call out to dulwich.diff_tree somehow.
  3007. tracked_changes: dict[str, list[Union[str, bytes]]] = {
  3008. "add": [],
  3009. "delete": [],
  3010. "modify": [],
  3011. }
  3012. try:
  3013. head_commit = r[b"HEAD"]
  3014. assert isinstance(head_commit, Commit)
  3015. tree_id = head_commit.tree
  3016. except KeyError:
  3017. tree_id = None
  3018. for change in index.changes_from_tree(r.object_store, tree_id):
  3019. if not change[0][0]:
  3020. assert change[0][1] is not None
  3021. tracked_changes["add"].append(change[0][1])
  3022. elif not change[0][1]:
  3023. assert change[0][0] is not None
  3024. tracked_changes["delete"].append(change[0][0])
  3025. elif change[0][0] == change[0][1]:
  3026. assert change[0][0] is not None
  3027. tracked_changes["modify"].append(change[0][0])
  3028. else:
  3029. raise NotImplementedError("git mv ops not yet supported")
  3030. return tracked_changes
  3031. def daemon(
  3032. path: Union[str, os.PathLike[str]] = ".",
  3033. address: Optional[str] = None,
  3034. port: Optional[int] = None,
  3035. ) -> None:
  3036. """Run a daemon serving Git requests over TCP/IP.
  3037. Args:
  3038. path: Path to the directory to serve.
  3039. address: Optional address to listen on (defaults to ::)
  3040. port: Optional port to listen on (defaults to TCP_GIT_PORT)
  3041. """
  3042. # TODO(jelmer): Support git-daemon-export-ok and --export-all.
  3043. backend = FileSystemBackend(os.fspath(path))
  3044. server = TCPGitServer(backend, address or "localhost", port or 9418)
  3045. server.serve_forever()
  3046. def web_daemon(
  3047. path: Union[str, os.PathLike[str]] = ".",
  3048. address: Optional[str] = None,
  3049. port: Optional[int] = None,
  3050. ) -> None:
  3051. """Run a daemon serving Git requests over HTTP.
  3052. Args:
  3053. path: Path to the directory to serve
  3054. address: Optional address to listen on (defaults to ::)
  3055. port: Optional port to listen on (defaults to 80)
  3056. """
  3057. from .web import (
  3058. WSGIRequestHandlerLogger,
  3059. WSGIServerLogger,
  3060. make_server,
  3061. make_wsgi_chain,
  3062. )
  3063. backend = FileSystemBackend(os.fspath(path))
  3064. app = make_wsgi_chain(backend)
  3065. server = make_server(
  3066. address or "::",
  3067. port or 80,
  3068. app,
  3069. handler_class=WSGIRequestHandlerLogger,
  3070. server_class=WSGIServerLogger,
  3071. )
  3072. server.serve_forever()
  3073. def upload_pack(
  3074. path: Union[str, os.PathLike[str]] = ".",
  3075. inf: Optional[BinaryIO] = None,
  3076. outf: Optional[BinaryIO] = None,
  3077. ) -> int:
  3078. """Upload a pack file after negotiating its contents using smart protocol.
  3079. Args:
  3080. path: Path to the repository
  3081. inf: Input stream to communicate with client
  3082. outf: Output stream to communicate with client
  3083. """
  3084. if outf is None:
  3085. outf = sys.stdout.buffer
  3086. if inf is None:
  3087. inf = sys.stdin.buffer
  3088. assert outf is not None
  3089. assert inf is not None
  3090. path = os.path.expanduser(path)
  3091. backend = FileSystemBackend(path)
  3092. def send_fn(data: bytes) -> None:
  3093. outf.write(data)
  3094. outf.flush()
  3095. proto = Protocol(inf.read, send_fn)
  3096. handler = UploadPackHandler(backend, [path], proto)
  3097. # FIXME: Catch exceptions and write a single-line summary to outf.
  3098. handler.handle()
  3099. return 0
  3100. def receive_pack(
  3101. path: Union[str, os.PathLike[str]] = ".",
  3102. inf: Optional[BinaryIO] = None,
  3103. outf: Optional[BinaryIO] = None,
  3104. ) -> int:
  3105. """Receive a pack file after negotiating its contents using smart protocol.
  3106. Args:
  3107. path: Path to the repository
  3108. inf: Input stream to communicate with client
  3109. outf: Output stream to communicate with client
  3110. """
  3111. if outf is None:
  3112. outf = sys.stdout.buffer
  3113. if inf is None:
  3114. inf = sys.stdin.buffer
  3115. assert outf is not None
  3116. assert inf is not None
  3117. path = os.path.expanduser(path)
  3118. backend = FileSystemBackend(path)
  3119. def send_fn(data: bytes) -> None:
  3120. outf.write(data)
  3121. outf.flush()
  3122. proto = Protocol(inf.read, send_fn)
  3123. handler = ReceivePackHandler(backend, [path], proto)
  3124. # FIXME: Catch exceptions and write a single-line summary to outf.
  3125. handler.handle()
  3126. return 0
  3127. def _make_branch_ref(name: Union[str, bytes]) -> Ref:
  3128. if isinstance(name, str):
  3129. name = name.encode(DEFAULT_ENCODING)
  3130. return local_branch_name(name)
  3131. def _make_tag_ref(name: Union[str, bytes]) -> Ref:
  3132. if isinstance(name, str):
  3133. name = name.encode(DEFAULT_ENCODING)
  3134. return local_tag_name(name)
  3135. def _make_replace_ref(name: Union[str, bytes]) -> Ref:
  3136. if isinstance(name, str):
  3137. name = name.encode(DEFAULT_ENCODING)
  3138. return local_replace_name(name)
  3139. def branch_delete(
  3140. repo: RepoPath, name: Union[str, bytes, Sequence[Union[str, bytes]]]
  3141. ) -> None:
  3142. """Delete a branch.
  3143. Args:
  3144. repo: Path to the repository
  3145. name: Name of the branch
  3146. """
  3147. with open_repo_closing(repo) as r:
  3148. if isinstance(name, (list, tuple)):
  3149. names = name
  3150. else:
  3151. names = [name]
  3152. for branch_name in names:
  3153. del r.refs[_make_branch_ref(branch_name)]
  3154. def branch_create(
  3155. repo: Union[str, os.PathLike[str], Repo],
  3156. name: Union[str, bytes],
  3157. objectish: Optional[Union[str, bytes]] = None,
  3158. force: bool = False,
  3159. ) -> None:
  3160. """Create a branch.
  3161. Args:
  3162. repo: Path to the repository
  3163. name: Name of the new branch
  3164. objectish: Target object to point new branch at (defaults to HEAD)
  3165. force: Force creation of branch, even if it already exists
  3166. """
  3167. with open_repo_closing(repo) as r:
  3168. if objectish is None:
  3169. objectish = "HEAD"
  3170. # Try to expand branch shorthand before parsing
  3171. original_objectish = objectish
  3172. objectish_bytes = (
  3173. objectish.encode(DEFAULT_ENCODING)
  3174. if isinstance(objectish, str)
  3175. else objectish
  3176. )
  3177. if b"refs/remotes/" + objectish_bytes in r.refs:
  3178. objectish = b"refs/remotes/" + objectish_bytes
  3179. elif local_branch_name(objectish_bytes) in r.refs:
  3180. objectish = local_branch_name(objectish_bytes)
  3181. object = parse_object(r, objectish)
  3182. refname = _make_branch_ref(name)
  3183. ref_message = (
  3184. b"branch: Created from " + original_objectish.encode(DEFAULT_ENCODING)
  3185. if isinstance(original_objectish, str)
  3186. else b"branch: Created from " + original_objectish
  3187. )
  3188. if force:
  3189. r.refs.set_if_equals(refname, None, object.id, message=ref_message)
  3190. else:
  3191. if not r.refs.add_if_new(refname, object.id, message=ref_message):
  3192. name_str = name.decode() if isinstance(name, bytes) else name
  3193. raise Error(f"Branch with name {name_str} already exists.")
  3194. # Check if we should set up tracking
  3195. config = r.get_config_stack()
  3196. try:
  3197. auto_setup_merge = config.get((b"branch",), b"autoSetupMerge").decode()
  3198. except KeyError:
  3199. auto_setup_merge = "true" # Default value
  3200. # Determine if the objectish refers to a remote-tracking branch
  3201. objectish_ref = None
  3202. if original_objectish != "HEAD":
  3203. # Try to resolve objectish as a ref
  3204. objectish_bytes = (
  3205. original_objectish.encode(DEFAULT_ENCODING)
  3206. if isinstance(original_objectish, str)
  3207. else original_objectish
  3208. )
  3209. if objectish_bytes in r.refs:
  3210. objectish_ref = objectish_bytes
  3211. elif b"refs/remotes/" + objectish_bytes in r.refs:
  3212. objectish_ref = b"refs/remotes/" + objectish_bytes
  3213. elif local_branch_name(objectish_bytes) in r.refs:
  3214. objectish_ref = local_branch_name(objectish_bytes)
  3215. else:
  3216. # HEAD might point to a remote-tracking branch
  3217. head_ref = r.refs.follow(b"HEAD")[0][1]
  3218. if head_ref.startswith(b"refs/remotes/"):
  3219. objectish_ref = head_ref
  3220. # Set up tracking if appropriate
  3221. if objectish_ref and (
  3222. (auto_setup_merge == "always")
  3223. or (
  3224. auto_setup_merge == "true"
  3225. and objectish_ref.startswith(b"refs/remotes/")
  3226. )
  3227. ):
  3228. # Extract remote name and branch from the ref
  3229. if objectish_ref.startswith(b"refs/remotes/"):
  3230. parts = objectish_ref[len(b"refs/remotes/") :].split(b"/", 1)
  3231. if len(parts) == 2:
  3232. remote_name = parts[0]
  3233. remote_branch = local_branch_name(parts[1])
  3234. # Set up tracking
  3235. repo_config = r.get_config()
  3236. branch_name_bytes = (
  3237. name.encode(DEFAULT_ENCODING) if isinstance(name, str) else name
  3238. )
  3239. repo_config.set(
  3240. (b"branch", branch_name_bytes), b"remote", remote_name
  3241. )
  3242. repo_config.set(
  3243. (b"branch", branch_name_bytes), b"merge", remote_branch
  3244. )
  3245. repo_config.write_to_path()
  3246. def filter_branches_by_pattern(branches: Iterable[bytes], pattern: str) -> list[bytes]:
  3247. """Filter branches by fnmatch pattern.
  3248. Args:
  3249. branches: Iterable of branch names as bytes
  3250. pattern: Pattern to match against
  3251. Returns:
  3252. List of filtered branch names
  3253. """
  3254. return [
  3255. branch for branch in branches if fnmatch.fnmatchcase(branch.decode(), pattern)
  3256. ]
  3257. def branch_list(repo: RepoPath) -> list[bytes]:
  3258. """List all branches.
  3259. Args:
  3260. repo: Path to the repository
  3261. Returns:
  3262. List of branch names (without refs/heads/ prefix)
  3263. """
  3264. with open_repo_closing(repo) as r:
  3265. branches = list(r.refs.keys(base=LOCAL_BRANCH_PREFIX))
  3266. # Check for branch.sort configuration
  3267. config = r.get_config_stack()
  3268. try:
  3269. sort_key = config.get((b"branch",), b"sort").decode()
  3270. except KeyError:
  3271. # Default is refname (alphabetical)
  3272. sort_key = "refname"
  3273. # Parse sort key
  3274. reverse = False
  3275. if sort_key.startswith("-"):
  3276. reverse = True
  3277. sort_key = sort_key[1:]
  3278. # Apply sorting
  3279. if sort_key == "refname":
  3280. # Simple alphabetical sort (default)
  3281. branches.sort(reverse=reverse)
  3282. elif sort_key in ("committerdate", "authordate"):
  3283. # Sort by date
  3284. def get_commit_date(branch_name: bytes) -> int:
  3285. ref = local_branch_name(branch_name)
  3286. sha = r.refs[ref]
  3287. commit = r.object_store[sha]
  3288. assert isinstance(commit, Commit)
  3289. if sort_key == "committerdate":
  3290. return cast(int, commit.commit_time)
  3291. else: # authordate
  3292. return cast(int, commit.author_time)
  3293. # Sort branches by date
  3294. # Note: Python's sort naturally orders smaller values first (ascending)
  3295. # For dates, this means oldest first by default
  3296. # Use a stable sort with branch name as secondary key for consistent ordering
  3297. if reverse:
  3298. # For reverse sort, we want newest dates first but alphabetical names second
  3299. branches.sort(key=lambda b: (-get_commit_date(b), b))
  3300. else:
  3301. branches.sort(key=lambda b: (get_commit_date(b), b))
  3302. else:
  3303. # Unknown sort key, fall back to default
  3304. branches.sort()
  3305. return branches
  3306. def branch_remotes_list(repo: RepoPath) -> list[bytes]:
  3307. """List the short names of all remote branches.
  3308. Args:
  3309. repo: Path to the repository
  3310. Returns:
  3311. List of branch names (without refs/remotes/ prefix, and without remote name; e.g. 'main' from 'origin/main')
  3312. """
  3313. with open_repo_closing(repo) as r:
  3314. branches = list(r.refs.keys(base=LOCAL_REMOTE_PREFIX))
  3315. config = r.get_config_stack()
  3316. try:
  3317. sort_key = config.get((b"branch",), b"sort").decode()
  3318. except KeyError:
  3319. # Default is refname (alphabetical)
  3320. sort_key = "refname"
  3321. # Parse sort key
  3322. reverse = False
  3323. if sort_key.startswith("-"):
  3324. reverse = True
  3325. sort_key = sort_key[1:]
  3326. # Apply sorting
  3327. if sort_key == "refname":
  3328. # Simple alphabetical sort (default)
  3329. branches.sort(reverse=reverse)
  3330. elif sort_key in ("committerdate", "authordate"):
  3331. # Sort by date
  3332. def get_commit_date(branch_name: bytes) -> int:
  3333. ref = LOCAL_REMOTE_PREFIX + branch_name
  3334. sha = r.refs[ref]
  3335. commit = r.object_store[sha]
  3336. assert isinstance(commit, Commit)
  3337. if sort_key == "committerdate":
  3338. return cast(int, commit.commit_time)
  3339. else: # authordate
  3340. return cast(int, commit.author_time)
  3341. # Sort branches by date
  3342. # Note: Python's sort naturally orders smaller values first (ascending)
  3343. # For dates, this means oldest first by default
  3344. # Use a stable sort with branch name as secondary key for consistent ordering
  3345. if reverse:
  3346. # For reverse sort, we want newest dates first but alphabetical names second
  3347. branches.sort(key=lambda b: (-get_commit_date(b), b))
  3348. else:
  3349. branches.sort(key=lambda b: (get_commit_date(b), b))
  3350. else:
  3351. # Unknown sort key
  3352. raise ValueError(f"Unknown sort key: {sort_key}")
  3353. return branches
  3354. def _get_branch_merge_status(repo: RepoPath) -> Iterator[tuple[bytes, bool]]:
  3355. """Get merge status for all branches relative to current HEAD.
  3356. Args:
  3357. repo: Path to the repository
  3358. Yields:
  3359. Tuple of (``branch_name``, ``is_merged``) where:
  3360. - ``branch_name``: Branch name without refs/heads/ prefix
  3361. - ``is_merged``: True if branch is merged into HEAD, False otherwise
  3362. """
  3363. with open_repo_closing(repo) as r:
  3364. current_sha = r.refs[b"HEAD"]
  3365. for branch_ref, branch_sha in r.refs.as_dict(base=b"refs/heads/").items():
  3366. # Check if branch is an ancestor of HEAD (fully merged)
  3367. is_merged = can_fast_forward(r, branch_sha, current_sha)
  3368. yield branch_ref, is_merged
  3369. def merged_branches(repo: RepoPath) -> Iterator[bytes]:
  3370. """List branches that have been merged into the current branch.
  3371. Args:
  3372. repo: Path to the repository
  3373. Yields:
  3374. Branch names (without refs/heads/ prefix) that are merged
  3375. into the current HEAD
  3376. """
  3377. for branch_name, is_merged in _get_branch_merge_status(repo):
  3378. if is_merged:
  3379. yield branch_name
  3380. def no_merged_branches(repo: RepoPath) -> Iterator[bytes]:
  3381. """List branches that have been merged into the current branch.
  3382. Args:
  3383. repo: Path to the repository
  3384. Yields:
  3385. Branch names (without refs/heads/ prefix) that are merged
  3386. into the current HEAD
  3387. """
  3388. for branch_name, is_merged in _get_branch_merge_status(repo):
  3389. if not is_merged:
  3390. yield branch_name
  3391. def branches_containing(repo: RepoPath, commit: str) -> Iterator[bytes]:
  3392. """List branches that contain the specified commit.
  3393. Args:
  3394. repo: Path to the repository
  3395. commit: Commit-ish string (SHA, branch name, tag, etc.)
  3396. Yields:
  3397. Branch names (without refs/heads/ prefix) that contain the commit
  3398. Raises:
  3399. ValueError: If the commit reference is malformed
  3400. KeyError: If the commit reference does not exist
  3401. """
  3402. with open_repo_closing(repo) as r:
  3403. commit_obj = parse_commit(r, commit)
  3404. commit_sha = commit_obj.id
  3405. for branch_ref, branch_sha in r.refs.as_dict(base=LOCAL_BRANCH_PREFIX).items():
  3406. if can_fast_forward(r, commit_sha, branch_sha):
  3407. yield branch_ref
  3408. def active_branch(repo: RepoPath) -> bytes:
  3409. """Return the active branch in the repository, if any.
  3410. Args:
  3411. repo: Repository to open
  3412. Returns:
  3413. branch name
  3414. Raises:
  3415. KeyError: if the repository does not have a working tree
  3416. IndexError: if HEAD is floating
  3417. """
  3418. with open_repo_closing(repo) as r:
  3419. active_ref = r.refs.follow(b"HEAD")[0][1]
  3420. if not active_ref.startswith(LOCAL_BRANCH_PREFIX):
  3421. raise ValueError(active_ref)
  3422. return active_ref[len(LOCAL_BRANCH_PREFIX) :]
  3423. def get_branch_remote(repo: Union[str, os.PathLike[str], Repo]) -> bytes:
  3424. """Return the active branch's remote name, if any.
  3425. Args:
  3426. repo: Repository to open
  3427. Returns:
  3428. remote name
  3429. Raises:
  3430. KeyError: if the repository does not have a working tree
  3431. """
  3432. with open_repo_closing(repo) as r:
  3433. branch_name = active_branch(r.path)
  3434. config = r.get_config()
  3435. try:
  3436. remote_name = config.get((b"branch", branch_name), b"remote")
  3437. except KeyError:
  3438. remote_name = b"origin"
  3439. return remote_name
  3440. def get_branch_merge(repo: RepoPath, branch_name: Optional[bytes] = None) -> bytes:
  3441. """Return the branch's merge reference (upstream branch), if any.
  3442. Args:
  3443. repo: Repository to open
  3444. branch_name: Name of the branch (defaults to active branch)
  3445. Returns:
  3446. merge reference name (e.g. b"refs/heads/main")
  3447. Raises:
  3448. KeyError: if the branch does not have a merge configuration
  3449. """
  3450. with open_repo_closing(repo) as r:
  3451. if branch_name is None:
  3452. branch_name = active_branch(r.path)
  3453. config = r.get_config()
  3454. return config.get((b"branch", branch_name), b"merge")
  3455. def set_branch_tracking(
  3456. repo: Union[str, os.PathLike[str], Repo],
  3457. branch_name: bytes,
  3458. remote_name: bytes,
  3459. remote_ref: bytes,
  3460. ) -> None:
  3461. """Set up branch tracking configuration.
  3462. Args:
  3463. repo: Repository to open
  3464. branch_name: Name of the local branch
  3465. remote_name: Name of the remote (e.g. b"origin")
  3466. remote_ref: Remote reference to track (e.g. b"refs/heads/main")
  3467. """
  3468. with open_repo_closing(repo) as r:
  3469. config = r.get_config()
  3470. config.set((b"branch", branch_name), b"remote", remote_name)
  3471. config.set((b"branch", branch_name), b"merge", remote_ref)
  3472. config.write_to_path()
  3473. def fetch(
  3474. repo: RepoPath,
  3475. remote_location: Optional[Union[str, bytes]] = None,
  3476. outstream: TextIO = sys.stdout,
  3477. errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
  3478. message: Optional[bytes] = None,
  3479. depth: Optional[int] = None,
  3480. prune: bool = False,
  3481. prune_tags: bool = False,
  3482. force: bool = False,
  3483. operation: Optional[str] = None,
  3484. thin_packs: bool = True,
  3485. report_activity: Optional[Callable[[int, str], None]] = None,
  3486. quiet: bool = False,
  3487. include_tags: bool = False,
  3488. username: Optional[str] = None,
  3489. password: Optional[str] = None,
  3490. key_filename: Optional[str] = None,
  3491. ssh_command: Optional[str] = None,
  3492. shallow_since: Optional[str] = None,
  3493. shallow_exclude: Optional[list[str]] = None,
  3494. ) -> FetchPackResult:
  3495. """Fetch objects from a remote server.
  3496. Args:
  3497. repo: Path to the repository
  3498. remote_location: String identifying a remote server
  3499. outstream: Output stream (defaults to stdout)
  3500. errstream: Error stream (defaults to stderr)
  3501. message: Reflog message (defaults to b"fetch: from <remote_name>")
  3502. depth: Depth to fetch at
  3503. prune: Prune remote removed refs
  3504. prune_tags: Prune remote removed tags
  3505. force: Force fetching even if it would overwrite local changes
  3506. operation: Git operation for authentication (e.g., "fetch")
  3507. thin_packs: Whether to use thin packs
  3508. report_activity: Optional callback for reporting transport activity
  3509. quiet: Whether to suppress progress output
  3510. include_tags: Whether to include tags
  3511. username: Username for authentication
  3512. password: Password for authentication
  3513. key_filename: SSH key filename
  3514. ssh_command: SSH command to use
  3515. shallow_since: Deepen or shorten the history to include commits after this date
  3516. shallow_exclude: Deepen or shorten the history to exclude commits reachable from these refs
  3517. Returns:
  3518. Dictionary with refs on the remote
  3519. """
  3520. with open_repo_closing(repo) as r:
  3521. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  3522. if message is None:
  3523. message = b"fetch: from " + remote_location.encode(DEFAULT_ENCODING)
  3524. client, path = get_transport_and_path(
  3525. remote_location,
  3526. config=r.get_config_stack(),
  3527. operation=operation,
  3528. thin_packs=thin_packs,
  3529. report_activity=report_activity,
  3530. quiet=quiet,
  3531. include_tags=include_tags,
  3532. username=username,
  3533. password=password,
  3534. key_filename=key_filename,
  3535. ssh_command=ssh_command,
  3536. )
  3537. def progress(data: bytes) -> None:
  3538. errstream.write(data)
  3539. fetch_result = client.fetch(
  3540. path.encode(),
  3541. r,
  3542. progress=progress,
  3543. depth=depth,
  3544. shallow_since=shallow_since,
  3545. shallow_exclude=shallow_exclude,
  3546. )
  3547. if remote_name is not None:
  3548. _import_remote_refs(
  3549. r.refs,
  3550. remote_name,
  3551. fetch_result.refs,
  3552. message,
  3553. prune=prune,
  3554. prune_tags=prune_tags,
  3555. )
  3556. # Trigger auto GC if needed
  3557. from .gc import maybe_auto_gc
  3558. with open_repo_closing(repo) as r:
  3559. maybe_auto_gc(r)
  3560. return fetch_result
  3561. def for_each_ref(
  3562. repo: Union[Repo, str] = ".",
  3563. pattern: Optional[Union[str, bytes]] = None,
  3564. ) -> list[tuple[bytes, bytes, bytes]]:
  3565. """Iterate over all refs that match the (optional) pattern.
  3566. Args:
  3567. repo: Path to the repository
  3568. pattern: Optional glob (7) patterns to filter the refs with
  3569. Returns: List of bytes tuples with: (sha, object_type, ref_name)
  3570. """
  3571. if isinstance(pattern, str):
  3572. pattern = os.fsencode(pattern)
  3573. with open_repo_closing(repo) as r:
  3574. refs = r.get_refs()
  3575. if pattern:
  3576. matching_refs: dict[bytes, bytes] = {}
  3577. pattern_parts = pattern.split(b"/")
  3578. for ref, sha in refs.items():
  3579. matches = False
  3580. # git for-each-ref uses glob (7) style patterns, but fnmatch
  3581. # is greedy and also matches slashes, unlike glob.glob.
  3582. # We have to check parts of the pattern individually.
  3583. # See https://github.com/python/cpython/issues/72904
  3584. ref_parts = ref.split(b"/")
  3585. if len(ref_parts) > len(pattern_parts):
  3586. continue
  3587. for pat, ref_part in zip(pattern_parts, ref_parts):
  3588. matches = fnmatch.fnmatchcase(ref_part, pat)
  3589. if not matches:
  3590. break
  3591. if matches:
  3592. matching_refs[ref] = sha
  3593. refs = matching_refs
  3594. ret: list[tuple[bytes, bytes, bytes]] = [
  3595. (sha, r.get_object(sha).type_name, ref)
  3596. for ref, sha in sorted(
  3597. refs.items(),
  3598. key=lambda ref_sha: ref_sha[0],
  3599. )
  3600. if ref != b"HEAD"
  3601. ]
  3602. return ret
  3603. def show_ref(
  3604. repo: Union[Repo, str] = ".",
  3605. patterns: Optional[list[Union[str, bytes]]] = None,
  3606. head: bool = False,
  3607. branches: bool = False,
  3608. tags: bool = False,
  3609. dereference: bool = False,
  3610. verify: bool = False,
  3611. ) -> list[tuple[bytes, bytes]]:
  3612. """List references in a local repository.
  3613. Args:
  3614. repo: Path to the repository
  3615. patterns: Optional list of patterns to filter refs (matched from the end)
  3616. head: Show the HEAD reference
  3617. branches: Limit to local branches (refs/heads/)
  3618. tags: Limit to local tags (refs/tags/)
  3619. dereference: Dereference tags into object IDs
  3620. verify: Enable stricter reference checking (exact path match)
  3621. Returns: List of tuples with (sha, ref_name) or (sha, ref_name^{}) for dereferenced tags
  3622. """
  3623. # Convert string patterns to bytes
  3624. byte_patterns: Optional[list[bytes]] = None
  3625. if patterns:
  3626. byte_patterns = [os.fsencode(p) if isinstance(p, str) else p for p in patterns]
  3627. with open_repo_closing(repo) as r:
  3628. refs = r.get_refs()
  3629. # Filter by branches/tags if specified
  3630. if branches or tags:
  3631. prefixes = []
  3632. if branches:
  3633. prefixes.append(LOCAL_BRANCH_PREFIX)
  3634. if tags:
  3635. prefixes.append(LOCAL_TAG_PREFIX)
  3636. filtered_refs = filter_ref_prefix(refs, prefixes)
  3637. else:
  3638. # By default, show tags, heads, and remote refs (but not HEAD)
  3639. filtered_refs = filter_ref_prefix(refs, [b"refs/"])
  3640. # Add HEAD if requested
  3641. if head and b"HEAD" in refs:
  3642. filtered_refs[b"HEAD"] = refs[b"HEAD"]
  3643. # Filter by patterns if specified
  3644. if byte_patterns:
  3645. matching_refs: dict[bytes, bytes] = {}
  3646. for ref, sha in filtered_refs.items():
  3647. for pattern in byte_patterns:
  3648. if verify:
  3649. # Verify mode requires exact match
  3650. if ref == pattern:
  3651. matching_refs[ref] = sha
  3652. break
  3653. else:
  3654. # Pattern matching from the end of the full name
  3655. # Only complete parts are matched
  3656. # E.g., "master" matches "refs/heads/master" but not "refs/heads/mymaster"
  3657. pattern_parts = pattern.split(b"/")
  3658. ref_parts = ref.split(b"/")
  3659. # Try to match from the end
  3660. if len(pattern_parts) <= len(ref_parts):
  3661. # Check if the end of ref matches the pattern
  3662. matches = True
  3663. for i in range(len(pattern_parts)):
  3664. if (
  3665. ref_parts[-(len(pattern_parts) - i)]
  3666. != pattern_parts[i]
  3667. ):
  3668. matches = False
  3669. break
  3670. if matches:
  3671. matching_refs[ref] = sha
  3672. break
  3673. filtered_refs = matching_refs
  3674. # Sort by ref name
  3675. sorted_refs = sorted(filtered_refs.items(), key=lambda x: x[0])
  3676. # Build result list
  3677. result: list[tuple[bytes, bytes]] = []
  3678. for ref, sha in sorted_refs:
  3679. result.append((sha, ref))
  3680. # Dereference tags if requested
  3681. if dereference and ref.startswith(LOCAL_TAG_PREFIX):
  3682. try:
  3683. obj = r.get_object(sha)
  3684. # Peel tag objects to get the underlying commit/object
  3685. from .objects import Tag
  3686. while obj.type_name == b"tag":
  3687. assert isinstance(obj, Tag)
  3688. _obj_class, sha = obj.object
  3689. obj = r.get_object(sha)
  3690. result.append((sha, ref + b"^{}"))
  3691. except KeyError:
  3692. # Object not found, skip dereferencing
  3693. pass
  3694. return result
  3695. def show_branch(
  3696. repo: Union[Repo, str] = ".",
  3697. branches: Optional[list[Union[str, bytes]]] = None,
  3698. all_branches: bool = False,
  3699. remotes: bool = False,
  3700. current: bool = False,
  3701. topo_order: bool = False,
  3702. more: Optional[int] = None,
  3703. list_branches: bool = False,
  3704. independent_branches: bool = False,
  3705. merge_base: bool = False,
  3706. ) -> list[str]:
  3707. """Display branches and their commits.
  3708. Args:
  3709. repo: Path to the repository
  3710. branches: List of specific branches to show (default: all local branches)
  3711. all_branches: Show both local and remote branches
  3712. remotes: Show only remote branches
  3713. current: Include current branch if not specified
  3714. topo_order: Show in topological order instead of chronological
  3715. more: Show N more commits beyond common ancestor (negative to show only headers)
  3716. list_branches: Synonym for more=-1 (show only branch headers)
  3717. independent_branches: Show only branches not reachable from others
  3718. merge_base: Show merge bases instead of commit list
  3719. Returns:
  3720. List of output lines
  3721. """
  3722. from .graph import find_octopus_base, independent
  3723. output_lines: list[str] = []
  3724. with open_repo_closing(repo) as r:
  3725. refs = r.get_refs()
  3726. # Determine which branches to show
  3727. branch_refs: dict[bytes, bytes] = {}
  3728. if branches:
  3729. # Specific branches requested
  3730. for branch in branches:
  3731. branch_bytes = (
  3732. os.fsencode(branch) if isinstance(branch, str) else branch
  3733. )
  3734. # Try as full ref name first
  3735. if branch_bytes in refs:
  3736. branch_refs[branch_bytes] = refs[branch_bytes]
  3737. else:
  3738. # Try as branch name
  3739. branch_ref = local_branch_name(branch_bytes)
  3740. if branch_ref in refs:
  3741. branch_refs[branch_ref] = refs[branch_ref]
  3742. # Try as remote branch
  3743. elif LOCAL_REMOTE_PREFIX + branch_bytes in refs:
  3744. branch_refs[LOCAL_REMOTE_PREFIX + branch_bytes] = refs[
  3745. LOCAL_REMOTE_PREFIX + branch_bytes
  3746. ]
  3747. else:
  3748. # Default behavior: show local branches
  3749. if all_branches:
  3750. # Show both local and remote branches
  3751. branch_refs = filter_ref_prefix(
  3752. refs, [LOCAL_BRANCH_PREFIX, LOCAL_REMOTE_PREFIX]
  3753. )
  3754. elif remotes:
  3755. # Show only remote branches
  3756. branch_refs = filter_ref_prefix(refs, [LOCAL_REMOTE_PREFIX])
  3757. else:
  3758. # Show only local branches
  3759. branch_refs = filter_ref_prefix(refs, [LOCAL_BRANCH_PREFIX])
  3760. # Add current branch if requested and not already included
  3761. if current:
  3762. try:
  3763. head_refs, _ = r.refs.follow(b"HEAD")
  3764. if head_refs:
  3765. head_ref = head_refs[0]
  3766. if head_ref not in branch_refs and head_ref in refs:
  3767. branch_refs[head_ref] = refs[head_ref]
  3768. except (KeyError, TypeError):
  3769. # HEAD doesn't point to a branch or doesn't exist
  3770. pass
  3771. if not branch_refs:
  3772. return output_lines
  3773. # Sort branches for consistent output
  3774. sorted_branches = sorted(branch_refs.items(), key=lambda x: x[0])
  3775. branch_sha_list = [sha for _, sha in sorted_branches]
  3776. # Handle --independent flag
  3777. if independent_branches:
  3778. independent_shas = independent(r, branch_sha_list)
  3779. for ref_name, sha in sorted_branches:
  3780. if sha in independent_shas:
  3781. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  3782. output_lines.append(ref_str)
  3783. return output_lines
  3784. # Handle --merge-base flag
  3785. if merge_base:
  3786. if len(branch_sha_list) < 2:
  3787. # Need at least 2 branches for merge base
  3788. return output_lines
  3789. merge_bases = find_octopus_base(r, branch_sha_list)
  3790. for sha in merge_bases:
  3791. output_lines.append(sha.decode("ascii"))
  3792. return output_lines
  3793. # Get current branch for marking
  3794. current_branch: Optional[bytes] = None
  3795. try:
  3796. head_refs, _ = r.refs.follow(b"HEAD")
  3797. if head_refs:
  3798. current_branch = head_refs[0]
  3799. except (KeyError, TypeError):
  3800. pass
  3801. # Collect commit information for each branch
  3802. branch_commits: list[tuple[bytes, str]] = [] # (sha, message)
  3803. for ref_name, sha in sorted_branches:
  3804. try:
  3805. commit = r[sha]
  3806. if hasattr(commit, "message"):
  3807. message = commit.message.decode("utf-8", errors="replace").split(
  3808. "\n"
  3809. )[0]
  3810. else:
  3811. message = ""
  3812. branch_commits.append((sha, message))
  3813. except KeyError:
  3814. branch_commits.append((sha, ""))
  3815. # Handle --list flag (show only branch headers)
  3816. if list_branches or (more is not None and more < 0):
  3817. # Just show the branch headers
  3818. for i, (ref_name, sha) in enumerate(sorted_branches):
  3819. is_current = ref_name == current_branch
  3820. marker = "*" if is_current else "!"
  3821. # Create spacing for alignment
  3822. prefix = " " * i + marker + " " * (len(sorted_branches) - i - 1)
  3823. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  3824. _, message = branch_commits[i]
  3825. output_lines.append(f"{prefix}[{ref_str}] {message}")
  3826. return output_lines
  3827. # Build commit history for visualization
  3828. # Collect all commits reachable from any branch
  3829. all_commits: dict[
  3830. bytes, tuple[int, list[bytes], str]
  3831. ] = {} # sha -> (timestamp, parents, message)
  3832. def collect_commits(sha: bytes, branch_idx: int, visited: set[bytes]) -> None:
  3833. """Recursively collect commits."""
  3834. if sha in visited:
  3835. return
  3836. visited.add(sha)
  3837. try:
  3838. commit = r[sha]
  3839. if not hasattr(commit, "commit_time"):
  3840. return
  3841. timestamp = commit.commit_time
  3842. parents = commit.parents if hasattr(commit, "parents") else []
  3843. message = (
  3844. commit.message.decode("utf-8", errors="replace").split("\n")[0]
  3845. if hasattr(commit, "message")
  3846. else ""
  3847. )
  3848. if sha not in all_commits:
  3849. all_commits[sha] = (timestamp, parents, message)
  3850. # Recurse to parents
  3851. for parent in parents:
  3852. collect_commits(parent, branch_idx, visited)
  3853. except KeyError:
  3854. # Commit not found, stop traversal
  3855. pass
  3856. # Collect commits from all branches
  3857. for i, (_, sha) in enumerate(sorted_branches):
  3858. collect_commits(sha, i, set())
  3859. # Find common ancestor
  3860. common_ancestor_sha = None
  3861. if len(branch_sha_list) >= 2:
  3862. try:
  3863. merge_bases = find_octopus_base(r, branch_sha_list)
  3864. if merge_bases:
  3865. common_ancestor_sha = merge_bases[0]
  3866. except (KeyError, IndexError):
  3867. pass
  3868. # Sort commits (chronological by default, or topological if requested)
  3869. if topo_order:
  3870. # Topological sort is more complex, for now use chronological
  3871. # TODO: Implement proper topological ordering
  3872. sorted_commits = sorted(all_commits.items(), key=lambda x: -x[1][0])
  3873. else:
  3874. # Reverse chronological order (newest first)
  3875. sorted_commits = sorted(all_commits.items(), key=lambda x: -x[1][0])
  3876. # Determine how many commits to show
  3877. if more is not None:
  3878. # Find index of common ancestor
  3879. if common_ancestor_sha and common_ancestor_sha in all_commits:
  3880. ancestor_idx = next(
  3881. (
  3882. i
  3883. for i, (sha, _) in enumerate(sorted_commits)
  3884. if sha == common_ancestor_sha
  3885. ),
  3886. None,
  3887. )
  3888. if ancestor_idx is not None:
  3889. # Show commits up to ancestor + more
  3890. sorted_commits = sorted_commits[: ancestor_idx + 1 + more]
  3891. # Determine which branches contain which commits
  3892. branch_contains: list[set[bytes]] = []
  3893. for ref_name, sha in sorted_branches:
  3894. reachable = set()
  3895. def mark_reachable(commit_sha: bytes) -> None:
  3896. if commit_sha in reachable:
  3897. return
  3898. reachable.add(commit_sha)
  3899. if commit_sha in all_commits:
  3900. _, parents, _ = all_commits[commit_sha]
  3901. for parent in parents:
  3902. mark_reachable(parent)
  3903. mark_reachable(sha)
  3904. branch_contains.append(reachable)
  3905. # Output branch headers
  3906. for i, (ref_name, sha) in enumerate(sorted_branches):
  3907. is_current = ref_name == current_branch
  3908. marker = "*" if is_current else "!"
  3909. # Create spacing for alignment
  3910. prefix = " " * i + marker + " " * (len(sorted_branches) - i - 1)
  3911. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  3912. _, message = branch_commits[i]
  3913. output_lines.append(f"{prefix}[{ref_str}] {message}")
  3914. # Output separator
  3915. output_lines.append("-" * (len(sorted_branches) + 2))
  3916. # Output commits
  3917. for commit_sha, (_, _, message) in sorted_commits:
  3918. # Build marker string
  3919. markers = []
  3920. for i, (ref_name, branch_sha) in enumerate(sorted_branches):
  3921. if commit_sha == branch_sha:
  3922. # This is the tip of the branch
  3923. markers.append("*")
  3924. elif commit_sha in branch_contains[i]:
  3925. # This commit is in the branch
  3926. markers.append("+")
  3927. else:
  3928. # This commit is not in the branch
  3929. markers.append(" ")
  3930. marker_str = "".join(markers)
  3931. output_lines.append(f"{marker_str} [{message}]")
  3932. # Limit output to 26 branches (git show-branch limitation)
  3933. if len(sorted_branches) > 26:
  3934. break
  3935. return output_lines
  3936. def ls_remote(
  3937. remote: Union[str, bytes],
  3938. config: Optional[Config] = None,
  3939. operation: Optional[str] = None,
  3940. thin_packs: bool = True,
  3941. report_activity: Optional[Callable[[int, str], None]] = None,
  3942. quiet: bool = False,
  3943. include_tags: bool = False,
  3944. username: Optional[str] = None,
  3945. password: Optional[str] = None,
  3946. key_filename: Optional[str] = None,
  3947. ssh_command: Optional[str] = None,
  3948. ) -> LsRemoteResult:
  3949. """List the refs in a remote.
  3950. Args:
  3951. remote: Remote repository location
  3952. config: Configuration to use
  3953. operation: Operation type
  3954. thin_packs: Whether to use thin packs
  3955. report_activity: Function to report activity
  3956. quiet: Whether to suppress output
  3957. include_tags: Whether to include tags
  3958. username: Username for authentication
  3959. password: Password for authentication
  3960. key_filename: SSH key filename
  3961. ssh_command: SSH command to use
  3962. Returns:
  3963. LsRemoteResult object with refs and symrefs
  3964. """
  3965. if config is None:
  3966. config = StackedConfig.default()
  3967. remote_str = remote.decode() if isinstance(remote, bytes) else remote
  3968. client, host_path = get_transport_and_path(
  3969. remote_str,
  3970. config=config,
  3971. operation=operation,
  3972. thin_packs=thin_packs,
  3973. report_activity=report_activity,
  3974. quiet=quiet,
  3975. include_tags=include_tags,
  3976. username=username,
  3977. password=password,
  3978. key_filename=key_filename,
  3979. ssh_command=ssh_command,
  3980. )
  3981. return client.get_refs(
  3982. host_path.encode() if isinstance(host_path, str) else host_path
  3983. )
  3984. def repack(repo: RepoPath) -> None:
  3985. """Repack loose files in a repository.
  3986. Currently this only packs loose objects.
  3987. Args:
  3988. repo: Path to the repository
  3989. """
  3990. with open_repo_closing(repo) as r:
  3991. r.object_store.pack_loose_objects()
  3992. def pack_objects(
  3993. repo: RepoPath,
  3994. object_ids: Sequence[bytes],
  3995. packf: BinaryIO,
  3996. idxf: Optional[BinaryIO],
  3997. delta_window_size: Optional[int] = None,
  3998. deltify: Optional[bool] = None,
  3999. reuse_deltas: bool = True,
  4000. pack_index_version: Optional[int] = None,
  4001. ) -> None:
  4002. """Pack objects into a file.
  4003. Args:
  4004. repo: Path to the repository
  4005. object_ids: List of object ids to write
  4006. packf: File-like object to write to
  4007. idxf: File-like object to write to (can be None)
  4008. delta_window_size: Sliding window size for searching for deltas;
  4009. Set to None for default window size.
  4010. deltify: Whether to deltify objects
  4011. reuse_deltas: Allow reuse of existing deltas while deltifying
  4012. pack_index_version: Pack index version to use (1, 2, or 3). If None, uses default version.
  4013. """
  4014. with open_repo_closing(repo) as r:
  4015. entries, data_sum = write_pack_from_container(
  4016. packf.write,
  4017. r.object_store,
  4018. [(oid, None) for oid in object_ids],
  4019. deltify=deltify,
  4020. delta_window_size=delta_window_size,
  4021. reuse_deltas=reuse_deltas,
  4022. )
  4023. if idxf is not None:
  4024. index_entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
  4025. write_pack_index(idxf, index_entries, data_sum, version=pack_index_version)
  4026. def ls_tree(
  4027. repo: RepoPath,
  4028. treeish: Union[str, bytes, Commit, Tree, Tag] = b"HEAD",
  4029. outstream: Union[TextIO, BinaryIO] = sys.stdout,
  4030. recursive: bool = False,
  4031. name_only: bool = False,
  4032. ) -> None:
  4033. """List contents of a tree.
  4034. Args:
  4035. repo: Path to the repository
  4036. treeish: Tree id to list
  4037. outstream: Output stream (defaults to stdout)
  4038. recursive: Whether to recursively list files
  4039. name_only: Only print item name
  4040. """
  4041. def list_tree(store: BaseObjectStore, treeid: bytes, base: bytes) -> None:
  4042. tree = store[treeid]
  4043. assert isinstance(tree, Tree)
  4044. for name, mode, sha in tree.iteritems():
  4045. assert name is not None
  4046. assert mode is not None
  4047. assert sha is not None
  4048. if base:
  4049. name = posixpath.join(base, name)
  4050. if name_only:
  4051. if isinstance(outstream, BinaryIO):
  4052. outstream.write(name + b"\n")
  4053. else:
  4054. outstream.write(name.decode("utf-8", "replace") + "\n")
  4055. else:
  4056. formatted = pretty_format_tree_entry(name, mode, sha)
  4057. if isinstance(outstream, BinaryIO):
  4058. outstream.write(formatted.encode("utf-8"))
  4059. else:
  4060. outstream.write(formatted)
  4061. if stat.S_ISDIR(mode) and recursive:
  4062. list_tree(store, sha, name)
  4063. with open_repo_closing(repo) as r:
  4064. tree = parse_tree(r, treeish)
  4065. list_tree(r.object_store, tree.id, b"")
  4066. def remote_add(
  4067. repo: RepoPath,
  4068. name: Union[bytes, str],
  4069. url: Union[bytes, str],
  4070. ) -> None:
  4071. """Add a remote.
  4072. Args:
  4073. repo: Path to the repository
  4074. name: Remote name
  4075. url: Remote URL
  4076. """
  4077. if not isinstance(name, bytes):
  4078. name = name.encode(DEFAULT_ENCODING)
  4079. if not isinstance(url, bytes):
  4080. url = url.encode(DEFAULT_ENCODING)
  4081. with open_repo_closing(repo) as r:
  4082. c = r.get_config()
  4083. section = (b"remote", name)
  4084. if c.has_section(section):
  4085. raise RemoteExists(f"Remote {name.decode()} already exists")
  4086. c.set(section, b"url", url)
  4087. c.write_to_path()
  4088. def remote_remove(repo: Repo, name: Union[bytes, str]) -> None:
  4089. """Remove a remote.
  4090. Args:
  4091. repo: Path to the repository
  4092. name: Remote name
  4093. """
  4094. if not isinstance(name, bytes):
  4095. name = name.encode(DEFAULT_ENCODING)
  4096. with open_repo_closing(repo) as r:
  4097. c = r.get_config()
  4098. section = (b"remote", name)
  4099. del c[section]
  4100. c.write_to_path()
  4101. def _quote_path(path: str) -> str:
  4102. """Quote a path using C-style quoting similar to git's core.quotePath.
  4103. Args:
  4104. path: Path to quote
  4105. Returns:
  4106. Quoted path string
  4107. """
  4108. # Check if path needs quoting (non-ASCII or special characters)
  4109. needs_quoting = False
  4110. for char in path:
  4111. if ord(char) > 127 or char in '"\\':
  4112. needs_quoting = True
  4113. break
  4114. if not needs_quoting:
  4115. return path
  4116. # Apply C-style quoting
  4117. quoted = '"'
  4118. for char in path:
  4119. if ord(char) > 127:
  4120. # Non-ASCII character, encode as octal escape
  4121. utf8_bytes = char.encode("utf-8")
  4122. for byte in utf8_bytes:
  4123. quoted += f"\\{byte:03o}"
  4124. elif char == '"':
  4125. quoted += '\\"'
  4126. elif char == "\\":
  4127. quoted += "\\\\"
  4128. else:
  4129. quoted += char
  4130. quoted += '"'
  4131. return quoted
  4132. def check_ignore(
  4133. repo: RepoPath,
  4134. paths: Sequence[Union[str, bytes, os.PathLike[str]]],
  4135. no_index: bool = False,
  4136. quote_path: bool = True,
  4137. ) -> Iterator[str]:
  4138. r"""Debug gitignore files.
  4139. Args:
  4140. repo: Path to the repository
  4141. paths: List of paths to check for
  4142. no_index: Don't check index
  4143. quote_path: If True, quote non-ASCII characters in returned paths using
  4144. C-style octal escapes (e.g. "тест.txt" becomes "\\321\\202\\320\\265\\321\\201\\321\\202.txt").
  4145. If False, return raw unicode paths.
  4146. Returns: List of ignored files
  4147. """
  4148. with open_repo_closing(repo) as r:
  4149. index = r.open_index()
  4150. ignore_manager = IgnoreFilterManager.from_repo(r)
  4151. for original_path in paths:
  4152. # Convert path to string for consistent handling
  4153. original_path_fspath = os.fspath(original_path)
  4154. # Normalize to str
  4155. original_path_str = os.fsdecode(original_path_fspath)
  4156. if not no_index and path_to_tree_path(r.path, original_path_str) in index:
  4157. continue
  4158. # Preserve whether the original path had a trailing slash
  4159. had_trailing_slash = original_path_str.endswith(("/", os.path.sep))
  4160. if os.path.isabs(original_path_str):
  4161. path = os.path.relpath(original_path_str, r.path)
  4162. # Normalize Windows paths to use forward slashes
  4163. if os.path.sep != "/":
  4164. path = path.replace(os.path.sep, "/")
  4165. else:
  4166. path = original_path_str
  4167. # Restore trailing slash if it was in the original
  4168. if had_trailing_slash and not path.endswith("/"):
  4169. path = path + "/"
  4170. # For directories, check with trailing slash to get correct ignore behavior
  4171. test_path = path
  4172. path_without_slash = path.rstrip("/")
  4173. is_directory = os.path.isdir(os.path.join(r.path, path_without_slash))
  4174. # If this is a directory path, ensure we test it correctly
  4175. if is_directory and not path.endswith("/"):
  4176. test_path = path + "/"
  4177. if ignore_manager.is_ignored(test_path):
  4178. # Return relative path (like git does) when absolute path was provided
  4179. if os.path.isabs(original_path):
  4180. output_path = path
  4181. else:
  4182. output_path = original_path # type: ignore[assignment]
  4183. yield _quote_path(output_path) if quote_path else output_path
  4184. def update_head(
  4185. repo: RepoPath,
  4186. target: Union[str, bytes],
  4187. detached: bool = False,
  4188. new_branch: Optional[Union[str, bytes]] = None,
  4189. ) -> None:
  4190. """Update HEAD to point at a new branch/commit.
  4191. Note that this does not actually update the working tree.
  4192. Args:
  4193. repo: Path to the repository
  4194. detached: Create a detached head
  4195. target: Branch or committish to switch to
  4196. new_branch: New branch to create
  4197. """
  4198. with open_repo_closing(repo) as r:
  4199. if new_branch is not None:
  4200. to_set = _make_branch_ref(new_branch)
  4201. else:
  4202. to_set = b"HEAD"
  4203. if detached:
  4204. # TODO(jelmer): Provide some way so that the actual ref gets
  4205. # updated rather than what it points to, so the delete isn't
  4206. # necessary.
  4207. del r.refs[to_set]
  4208. r.refs[to_set] = parse_commit(r, target).id
  4209. else:
  4210. r.refs.set_symbolic_ref(to_set, parse_ref(r, target))
  4211. if new_branch is not None:
  4212. r.refs.set_symbolic_ref(b"HEAD", to_set)
  4213. def checkout(
  4214. repo: Union[str, os.PathLike[str], Repo],
  4215. target: Optional[Union[str, bytes, Commit, Tag]] = None,
  4216. force: bool = False,
  4217. new_branch: Optional[Union[bytes, str]] = None,
  4218. paths: Optional[list[Union[bytes, str]]] = None,
  4219. ) -> None:
  4220. """Switch to a branch or commit, updating both HEAD and the working tree.
  4221. This is similar to 'git checkout', allowing you to switch to a branch,
  4222. tag, or specific commit. Unlike update_head, this function also updates
  4223. the working tree to match the target.
  4224. Args:
  4225. repo: Path to repository or repository object
  4226. target: Branch name, tag, or commit SHA to checkout. If None and paths is specified,
  4227. restores files from HEAD
  4228. force: Force checkout even if there are local changes
  4229. new_branch: Create a new branch at target (like git checkout -b)
  4230. paths: List of specific paths to checkout. If specified, only these paths are updated
  4231. and HEAD is not changed
  4232. Raises:
  4233. CheckoutError: If checkout cannot be performed due to conflicts
  4234. KeyError: If the target reference cannot be found
  4235. """
  4236. with open_repo_closing(repo) as r:
  4237. # Store the original target for later reference checks
  4238. original_target = target
  4239. worktree = r.get_worktree()
  4240. # Handle path-specific checkout (like git checkout -- <paths>)
  4241. if paths is not None:
  4242. # Convert paths to bytes
  4243. byte_paths = []
  4244. for path in paths:
  4245. if isinstance(path, str):
  4246. byte_paths.append(path.encode(DEFAULT_ENCODING))
  4247. else:
  4248. byte_paths.append(path)
  4249. # If no target specified, use HEAD
  4250. if target is None:
  4251. try:
  4252. target = r.refs[b"HEAD"]
  4253. except KeyError:
  4254. raise CheckoutError("No HEAD reference found")
  4255. else:
  4256. if isinstance(target, str):
  4257. target = target.encode(DEFAULT_ENCODING)
  4258. # Get the target commit and tree
  4259. target_tree = parse_tree(r, target)
  4260. # Get blob normalizer for line ending conversion
  4261. blob_normalizer = r.get_blob_normalizer()
  4262. # Restore specified paths from target tree
  4263. for path in byte_paths:
  4264. try:
  4265. # Look up the path in the target tree
  4266. mode, sha = target_tree.lookup_path(
  4267. r.object_store.__getitem__, path
  4268. )
  4269. obj = r[sha]
  4270. assert isinstance(obj, Blob), "Expected a Blob object"
  4271. except KeyError:
  4272. # Path doesn't exist in target tree
  4273. pass
  4274. else:
  4275. # Create directories if needed
  4276. # Handle path as string
  4277. if isinstance(path, bytes):
  4278. path_str = path.decode(DEFAULT_ENCODING)
  4279. else:
  4280. path_str = path
  4281. file_path = os.path.join(r.path, path_str)
  4282. os.makedirs(os.path.dirname(file_path), exist_ok=True)
  4283. # Write the file content
  4284. if stat.S_ISREG(mode):
  4285. # Apply checkout filters (smudge)
  4286. if blob_normalizer:
  4287. obj = blob_normalizer.checkout_normalize(obj, path)
  4288. flags = os.O_WRONLY | os.O_CREAT | os.O_TRUNC
  4289. if sys.platform == "win32":
  4290. flags |= os.O_BINARY
  4291. with os.fdopen(os.open(file_path, flags, mode), "wb") as f:
  4292. f.write(obj.data)
  4293. # Update the index
  4294. worktree.stage(path)
  4295. return
  4296. # Normal checkout (switching branches/commits)
  4297. if target is None:
  4298. raise ValueError("Target must be specified for branch/commit checkout")
  4299. if isinstance(target, str):
  4300. target_bytes = target.encode(DEFAULT_ENCODING)
  4301. elif isinstance(target, bytes):
  4302. target_bytes = target
  4303. else:
  4304. # For Commit/Tag objects, we'll use their SHA
  4305. target_bytes = target.id
  4306. if isinstance(new_branch, str):
  4307. new_branch = new_branch.encode(DEFAULT_ENCODING)
  4308. # Parse the target to get the commit
  4309. assert (
  4310. original_target is not None
  4311. ) # Guaranteed by earlier check for normal checkout
  4312. target_commit = parse_commit(r, original_target)
  4313. target_tree_id = target_commit.tree
  4314. # Get current HEAD tree for comparison
  4315. try:
  4316. current_head = r.refs[b"HEAD"]
  4317. current_commit = r[current_head]
  4318. assert isinstance(current_commit, Commit), "Expected a Commit object"
  4319. current_tree_id = current_commit.tree
  4320. except KeyError:
  4321. # No HEAD yet (empty repo)
  4322. current_tree_id = None
  4323. # Check for uncommitted changes if not forcing
  4324. if not force and current_tree_id is not None:
  4325. status_report = status(r)
  4326. changes = []
  4327. # staged is a dict with 'add', 'delete', 'modify' keys
  4328. if isinstance(status_report.staged, dict):
  4329. changes.extend(status_report.staged.get("add", []))
  4330. changes.extend(status_report.staged.get("delete", []))
  4331. changes.extend(status_report.staged.get("modify", []))
  4332. # unstaged is a list
  4333. changes.extend(status_report.unstaged)
  4334. if changes:
  4335. # Check if any changes would conflict with checkout
  4336. target_tree_obj = r[target_tree_id]
  4337. assert isinstance(target_tree_obj, Tree), "Expected a Tree object"
  4338. target_tree = target_tree_obj
  4339. for change in changes:
  4340. if isinstance(change, str):
  4341. change = change.encode(DEFAULT_ENCODING)
  4342. try:
  4343. target_tree.lookup_path(r.object_store.__getitem__, change)
  4344. except KeyError:
  4345. # File doesn't exist in target tree - change can be preserved
  4346. pass
  4347. else:
  4348. # File exists in target tree - would overwrite local changes
  4349. raise CheckoutError(
  4350. f"Your local changes to '{change.decode()}' would be "
  4351. "overwritten by checkout. Please commit or stash before switching."
  4352. )
  4353. # Get configuration for working directory update
  4354. config = r.get_config()
  4355. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  4356. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  4357. validate_path_element = validate_path_element_ntfs
  4358. else:
  4359. validate_path_element = validate_path_element_default
  4360. if config.get_boolean(b"core", b"symlinks", True):
  4361. def symlink_wrapper(
  4362. source: Union[str, bytes, os.PathLike[str]],
  4363. target: Union[str, bytes, os.PathLike[str]],
  4364. ) -> None:
  4365. symlink(source, target) # type: ignore[arg-type,unused-ignore]
  4366. symlink_fn = symlink_wrapper
  4367. else:
  4368. def symlink_fallback(
  4369. source: Union[str, bytes, os.PathLike[str]],
  4370. target: Union[str, bytes, os.PathLike[str]],
  4371. ) -> None:
  4372. mode = "w" + ("b" if isinstance(source, bytes) else "")
  4373. with open(target, mode) as f:
  4374. f.write(source)
  4375. symlink_fn = symlink_fallback
  4376. # Get blob normalizer for line ending conversion
  4377. blob_normalizer = r.get_blob_normalizer()
  4378. # Update working tree
  4379. tree_change_iterator: Iterator[TreeChange] = tree_changes(
  4380. r.object_store, current_tree_id, target_tree_id
  4381. )
  4382. update_working_tree(
  4383. r,
  4384. current_tree_id,
  4385. target_tree_id,
  4386. change_iterator=tree_change_iterator,
  4387. honor_filemode=honor_filemode,
  4388. validate_path_element=validate_path_element,
  4389. symlink_fn=symlink_fn,
  4390. force_remove_untracked=force,
  4391. blob_normalizer=blob_normalizer,
  4392. allow_overwrite_modified=force,
  4393. )
  4394. # Update HEAD
  4395. if new_branch:
  4396. # Create new branch and switch to it
  4397. branch_create(r, new_branch, objectish=target_commit.id.decode("ascii"))
  4398. update_head(r, new_branch)
  4399. # Set up tracking if creating from a remote branch
  4400. from .refs import LOCAL_REMOTE_PREFIX, local_branch_name, parse_remote_ref
  4401. if isinstance(original_target, bytes) and target_bytes.startswith(
  4402. LOCAL_REMOTE_PREFIX
  4403. ):
  4404. try:
  4405. remote_name, branch_name = parse_remote_ref(target_bytes)
  4406. # Set tracking to refs/heads/<branch> on the remote
  4407. set_branch_tracking(
  4408. r, new_branch, remote_name, local_branch_name(branch_name)
  4409. )
  4410. except ValueError:
  4411. # Invalid remote ref format, skip tracking setup
  4412. pass
  4413. else:
  4414. # Check if target is a branch name (with or without refs/heads/ prefix)
  4415. branch_ref = None
  4416. if (
  4417. isinstance(original_target, (str, bytes))
  4418. and target_bytes in r.refs.keys()
  4419. ):
  4420. if target_bytes.startswith(LOCAL_BRANCH_PREFIX):
  4421. branch_ref = target_bytes
  4422. else:
  4423. # Try adding refs/heads/ prefix
  4424. potential_branch = (
  4425. _make_branch_ref(target_bytes)
  4426. if isinstance(original_target, (str, bytes))
  4427. else None
  4428. )
  4429. if potential_branch in r.refs.keys():
  4430. branch_ref = potential_branch
  4431. if branch_ref:
  4432. # It's a branch - update HEAD symbolically
  4433. update_head(r, branch_ref)
  4434. else:
  4435. # It's a tag, other ref, or commit SHA - detached HEAD
  4436. update_head(r, target_commit.id.decode("ascii"), detached=True)
  4437. def reset_file(
  4438. repo: Repo,
  4439. file_path: str,
  4440. target: Union[str, bytes, Commit, Tree, Tag] = b"HEAD",
  4441. symlink_fn: Optional[
  4442. Callable[
  4443. [Union[str, bytes, os.PathLike[str]], Union[str, bytes, os.PathLike[str]]],
  4444. None,
  4445. ]
  4446. ] = None,
  4447. ) -> None:
  4448. """Reset the file to specific commit or branch.
  4449. Args:
  4450. repo: dulwich Repo object
  4451. file_path: file to reset, relative to the repository path
  4452. target: branch or commit or b'HEAD' to reset
  4453. symlink_fn: Function to use for creating symlinks
  4454. """
  4455. tree = parse_tree(repo, treeish=target)
  4456. tree_path = _fs_to_tree_path(file_path)
  4457. file_entry = tree.lookup_path(repo.object_store.__getitem__, tree_path)
  4458. full_path = os.path.join(os.fsencode(repo.path), tree_path)
  4459. blob = repo.object_store[file_entry[1]]
  4460. assert isinstance(blob, Blob)
  4461. mode = file_entry[0]
  4462. build_file_from_blob(blob, mode, full_path, symlink_fn=symlink_fn)
  4463. @replace_me(since="0.22.9", remove_in="0.24.0")
  4464. def checkout_branch(
  4465. repo: Union[str, os.PathLike[str], Repo],
  4466. target: Union[bytes, str],
  4467. force: bool = False,
  4468. ) -> None:
  4469. """Switch branches or restore working tree files.
  4470. This is now a wrapper around the general checkout() function.
  4471. Preserved for backward compatibility.
  4472. Args:
  4473. repo: dulwich Repo object
  4474. target: branch name or commit sha to checkout
  4475. force: true or not to force checkout
  4476. """
  4477. # Simply delegate to the new checkout function
  4478. return checkout(repo, target, force=force)
  4479. def sparse_checkout(
  4480. repo: Union[str, os.PathLike[str], Repo],
  4481. patterns: Optional[list[str]] = None,
  4482. force: bool = False,
  4483. cone: Optional[bool] = None,
  4484. ) -> None:
  4485. """Perform a sparse checkout in the repository (either 'full' or 'cone mode').
  4486. Perform sparse checkout in either 'cone' (directory-based) mode or
  4487. 'full pattern' (.gitignore) mode, depending on the ``cone`` parameter.
  4488. If ``cone`` is ``None``, the mode is inferred from the repository's
  4489. ``core.sparseCheckoutCone`` config setting.
  4490. Steps:
  4491. 1) If ``patterns`` is provided, write them to ``.git/info/sparse-checkout``.
  4492. 2) Determine which paths in the index are included vs. excluded.
  4493. - If ``cone=True``, use "cone-compatible" directory-based logic.
  4494. - If ``cone=False``, use standard .gitignore-style matching.
  4495. 3) Update the index's skip-worktree bits and add/remove files in
  4496. the working tree accordingly.
  4497. 4) If ``force=False``, refuse to remove files that have local modifications.
  4498. Args:
  4499. repo: Path to the repository or a Repo object.
  4500. patterns: Optional list of sparse-checkout patterns to write.
  4501. force: Whether to force removal of locally modified files (default False).
  4502. cone: Boolean indicating cone mode (True/False). If None, read from config.
  4503. Returns:
  4504. None
  4505. """
  4506. with open_repo_closing(repo) as repo_obj:
  4507. # --- 0) Possibly infer 'cone' from config ---
  4508. if cone is None:
  4509. cone = repo_obj.get_worktree().infer_cone_mode()
  4510. # --- 1) Read or write patterns ---
  4511. if patterns is None:
  4512. lines = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4513. if lines is None:
  4514. raise Error("No sparse checkout patterns found.")
  4515. else:
  4516. lines = patterns
  4517. repo_obj.get_worktree().set_sparse_checkout_patterns(patterns)
  4518. # --- 2) Determine the set of included paths ---
  4519. index = repo_obj.open_index()
  4520. included_paths = determine_included_paths(index, lines, cone)
  4521. # --- 3) Apply those results to the index & working tree ---
  4522. try:
  4523. apply_included_paths(repo_obj, included_paths, force=force)
  4524. except SparseCheckoutConflictError as exc:
  4525. raise CheckoutError(*exc.args) from exc
  4526. def cone_mode_init(repo: Union[str, os.PathLike[str], Repo]) -> None:
  4527. """Initialize a repository to use sparse checkout in 'cone' mode.
  4528. Sets ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` in the config.
  4529. Writes an initial ``.git/info/sparse-checkout`` file that includes only
  4530. top-level files (and excludes all subdirectories), e.g. ``["/*", "!/*/"]``.
  4531. Then performs a sparse checkout to update the working tree accordingly.
  4532. If no directories are specified, then only top-level files are included:
  4533. https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling
  4534. Args:
  4535. repo: Path to the repository or a Repo object.
  4536. Returns:
  4537. None
  4538. """
  4539. with open_repo_closing(repo) as repo_obj:
  4540. repo_obj.get_worktree().configure_for_cone_mode()
  4541. patterns = ["/*", "!/*/"] # root-level files only
  4542. sparse_checkout(repo_obj, patterns, force=True, cone=True)
  4543. def cone_mode_set(
  4544. repo: Union[str, os.PathLike[str], Repo], dirs: Sequence[str], force: bool = False
  4545. ) -> None:
  4546. """Overwrite the existing 'cone-mode' sparse patterns with a new set of directories.
  4547. Ensures ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` are enabled.
  4548. Writes new patterns so that only the specified directories (and top-level files)
  4549. remain in the working tree, and applies the sparse checkout update.
  4550. Args:
  4551. repo: Path to the repository or a Repo object.
  4552. dirs: List of directory names to include.
  4553. force: Whether to forcibly discard local modifications (default False).
  4554. Returns:
  4555. None
  4556. """
  4557. with open_repo_closing(repo) as repo_obj:
  4558. repo_obj.get_worktree().configure_for_cone_mode()
  4559. repo_obj.get_worktree().set_cone_mode_patterns(dirs=dirs)
  4560. new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4561. # Finally, apply the patterns and update the working tree
  4562. sparse_checkout(repo_obj, new_patterns, force=force, cone=True)
  4563. def cone_mode_add(
  4564. repo: Union[str, os.PathLike[str], Repo], dirs: Sequence[str], force: bool = False
  4565. ) -> None:
  4566. """Add new directories to the existing 'cone-mode' sparse-checkout patterns.
  4567. Reads the current patterns from ``.git/info/sparse-checkout``, adds pattern
  4568. lines to include the specified directories, and then performs a sparse
  4569. checkout to update the working tree accordingly.
  4570. Args:
  4571. repo: Path to the repository or a Repo object.
  4572. dirs: List of directory names to add to the sparse-checkout.
  4573. force: Whether to forcibly discard local modifications (default False).
  4574. Returns:
  4575. None
  4576. """
  4577. with open_repo_closing(repo) as repo_obj:
  4578. repo_obj.get_worktree().configure_for_cone_mode()
  4579. # Do not pass base patterns as dirs
  4580. base_patterns = ["/*", "!/*/"]
  4581. existing_dirs = [
  4582. pat.strip("/")
  4583. for pat in repo_obj.get_worktree().get_sparse_checkout_patterns()
  4584. if pat not in base_patterns
  4585. ]
  4586. added_dirs = existing_dirs + list(dirs or [])
  4587. repo_obj.get_worktree().set_cone_mode_patterns(dirs=added_dirs)
  4588. new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4589. sparse_checkout(repo_obj, patterns=new_patterns, force=force, cone=True)
  4590. def check_mailmap(repo: RepoPath, contact: Union[str, bytes]) -> bytes:
  4591. """Check canonical name and email of contact.
  4592. Args:
  4593. repo: Path to the repository
  4594. contact: Contact name and/or email
  4595. Returns: Canonical contact data
  4596. """
  4597. with open_repo_closing(repo) as r:
  4598. from .mailmap import Mailmap
  4599. try:
  4600. mailmap = Mailmap.from_path(os.path.join(r.path, ".mailmap"))
  4601. except FileNotFoundError:
  4602. mailmap = Mailmap()
  4603. contact_bytes = (
  4604. contact.encode(DEFAULT_ENCODING) if isinstance(contact, str) else contact
  4605. )
  4606. result = mailmap.lookup(contact_bytes)
  4607. if isinstance(result, bytes):
  4608. return result
  4609. else:
  4610. # Convert tuple back to bytes format
  4611. name, email = result
  4612. if name is None:
  4613. name = b""
  4614. if email is None:
  4615. email = b""
  4616. return name + b" <" + email + b">"
  4617. def fsck(repo: RepoPath) -> Iterator[tuple[bytes, Exception]]:
  4618. """Check a repository.
  4619. Args:
  4620. repo: A path to the repository
  4621. Returns: Iterator over errors/warnings
  4622. """
  4623. with open_repo_closing(repo) as r:
  4624. # TODO(jelmer): check pack files
  4625. # TODO(jelmer): check graph
  4626. # TODO(jelmer): check refs
  4627. for sha in r.object_store:
  4628. o = r.object_store[sha]
  4629. try:
  4630. o.check()
  4631. except Exception as e:
  4632. yield (sha, e)
  4633. def stash_list(
  4634. repo: Union[str, os.PathLike[str], Repo],
  4635. ) -> Iterator[tuple[int, tuple[bytes, bytes]]]:
  4636. """List all stashes in a repository."""
  4637. with open_repo_closing(repo) as r:
  4638. from .stash import Stash
  4639. stash = Stash.from_repo(r)
  4640. entries = stash.stashes()
  4641. # Convert Entry objects to (old_sha, new_sha) tuples
  4642. return enumerate([(entry.old_sha, entry.new_sha) for entry in entries])
  4643. def stash_push(repo: Union[str, os.PathLike[str], Repo]) -> None:
  4644. """Push a new stash onto the stack."""
  4645. with open_repo_closing(repo) as r:
  4646. from .stash import Stash
  4647. stash = Stash.from_repo(r)
  4648. stash.push()
  4649. def stash_pop(repo: Union[str, os.PathLike[str], Repo]) -> None:
  4650. """Pop a stash from the stack."""
  4651. with open_repo_closing(repo) as r:
  4652. from .stash import Stash
  4653. stash = Stash.from_repo(r)
  4654. stash.pop(0)
  4655. def stash_drop(repo: Union[str, os.PathLike[str], Repo], index: int) -> None:
  4656. """Drop a stash from the stack."""
  4657. with open_repo_closing(repo) as r:
  4658. from .stash import Stash
  4659. stash = Stash.from_repo(r)
  4660. stash.drop(index)
  4661. def ls_files(repo: RepoPath) -> list[bytes]:
  4662. """List all files in an index."""
  4663. with open_repo_closing(repo) as r:
  4664. return sorted(r.open_index())
  4665. def find_unique_abbrev(
  4666. object_store: BaseObjectStore, object_id: Union[str, bytes], min_length: int = 7
  4667. ) -> str:
  4668. """Find the shortest unique abbreviation for an object ID.
  4669. Args:
  4670. object_store: Object store to search in
  4671. object_id: The full object ID to abbreviate
  4672. min_length: Minimum length of abbreviation (default 7)
  4673. Returns:
  4674. The shortest unique prefix of the object ID (at least min_length chars)
  4675. """
  4676. if isinstance(object_id, bytes):
  4677. hex_id = object_id.decode("ascii")
  4678. else:
  4679. hex_id = object_id
  4680. # Start with minimum length
  4681. for length in range(min_length, len(hex_id) + 1):
  4682. prefix = hex_id[:length]
  4683. matches = 0
  4684. # Check if this prefix is unique
  4685. for obj_id in object_store:
  4686. if obj_id.decode("ascii").startswith(prefix):
  4687. matches += 1
  4688. if matches > 1:
  4689. # Not unique, need more characters
  4690. break
  4691. if matches == 1:
  4692. # Found unique prefix
  4693. return prefix
  4694. # If we get here, return the full ID
  4695. return hex_id
  4696. def describe(
  4697. repo: Union[str, os.PathLike[str], Repo], abbrev: Optional[int] = None
  4698. ) -> str:
  4699. """Describe the repository version.
  4700. Args:
  4701. repo: git repository
  4702. abbrev: number of characters of commit to take, default is 7
  4703. Returns: a string description of the current git revision
  4704. Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh".
  4705. """
  4706. abbrev_slice = slice(0, abbrev if abbrev is not None else 7)
  4707. # Get the repository
  4708. with open_repo_closing(repo) as r:
  4709. # Get a list of all tags
  4710. refs = r.get_refs()
  4711. tags = {}
  4712. for key, value in refs.items():
  4713. key_str = key.decode()
  4714. obj = r.get_object(value)
  4715. if "tags" not in key_str:
  4716. continue
  4717. _, tag = key_str.rsplit("/", 1)
  4718. if isinstance(obj, Tag):
  4719. # Annotated tag case
  4720. commit = r.get_object(obj.object[1])
  4721. else:
  4722. # Lightweight tag case - obj is already the commit
  4723. commit = obj
  4724. if not isinstance(commit, Commit):
  4725. raise AssertionError(
  4726. f"Expected Commit object, got {type(commit).__name__}"
  4727. )
  4728. tag_info: list[Any] = [
  4729. datetime.datetime(*time.gmtime(commit.commit_time)[:6]),
  4730. commit.id.decode("ascii"),
  4731. ]
  4732. tags[tag] = tag_info
  4733. # Sort tags by datetime (first element of the value list)
  4734. sorted_tags = sorted(
  4735. tags.items(), key=lambda tag_item: tag_item[1][0], reverse=True
  4736. )
  4737. # Get the latest commit
  4738. latest_commit = r[r.head()]
  4739. # If there are no tags, return the latest commit
  4740. if len(sorted_tags) == 0:
  4741. if abbrev is not None:
  4742. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  4743. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  4744. # We're now 0 commits from the top
  4745. commit_count = 0
  4746. # Walk through all commits
  4747. walker = r.get_walker()
  4748. for entry in walker:
  4749. # Check if tag
  4750. commit_id = entry.commit.id.decode("ascii")
  4751. for tag_item in sorted_tags:
  4752. tag_name = tag_item[0]
  4753. tag_commit = tag_item[1][1]
  4754. if commit_id == tag_commit:
  4755. if commit_count == 0:
  4756. return tag_name
  4757. else:
  4758. if abbrev is not None:
  4759. abbrev_hash = latest_commit.id.decode("ascii")[abbrev_slice]
  4760. else:
  4761. abbrev_hash = find_unique_abbrev(
  4762. r.object_store, latest_commit.id
  4763. )
  4764. return f"{tag_name}-{commit_count}-g{abbrev_hash}"
  4765. commit_count += 1
  4766. # Return plain commit if no parent tag can be found
  4767. if abbrev is not None:
  4768. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  4769. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  4770. def get_object_by_path(
  4771. repo: RepoPath,
  4772. path: Union[str, bytes],
  4773. committish: Optional[Union[str, bytes, Commit, Tag]] = None,
  4774. ) -> Union[Blob, Tree, Commit, Tag]:
  4775. """Get an object by path.
  4776. Args:
  4777. repo: A path to the repository
  4778. path: Path to look up
  4779. committish: Commit to look up path in
  4780. Returns: A `ShaFile` object
  4781. """
  4782. if committish is None:
  4783. committish = "HEAD"
  4784. # Get the repository
  4785. with open_repo_closing(repo) as r:
  4786. commit = parse_commit(r, committish)
  4787. base_tree = commit.tree
  4788. if not isinstance(path, bytes):
  4789. path = commit_encode(commit, path)
  4790. (_mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path)
  4791. obj = r[sha]
  4792. assert isinstance(obj, (Blob, Tree, Commit, Tag))
  4793. return obj
  4794. def write_tree(repo: RepoPath) -> bytes:
  4795. """Write a tree object from the index.
  4796. Args:
  4797. repo: Repository for which to write tree
  4798. Returns: tree id for the tree that was written
  4799. """
  4800. with open_repo_closing(repo) as r:
  4801. return r.open_index().commit(r.object_store)
  4802. def _do_merge(
  4803. r: Repo,
  4804. merge_commit_id: bytes,
  4805. no_commit: bool = False,
  4806. no_ff: bool = False,
  4807. message: Optional[bytes] = None,
  4808. author: Optional[bytes] = None,
  4809. committer: Optional[bytes] = None,
  4810. ) -> tuple[Optional[bytes], list[bytes]]:
  4811. """Internal merge implementation that operates on an open repository.
  4812. Args:
  4813. r: Open repository object
  4814. merge_commit_id: SHA of commit to merge
  4815. no_commit: If True, do not create a merge commit
  4816. no_ff: If True, force creation of a merge commit
  4817. message: Optional merge commit message
  4818. author: Optional author for merge commit
  4819. committer: Optional committer for merge commit
  4820. Returns:
  4821. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  4822. if no_commit=True or there were conflicts
  4823. """
  4824. from .graph import find_merge_base
  4825. from .merge import recursive_merge
  4826. # Get HEAD commit
  4827. try:
  4828. head_commit_id = r.refs[b"HEAD"]
  4829. except KeyError:
  4830. raise Error("No HEAD reference found")
  4831. head_commit = r[head_commit_id]
  4832. assert isinstance(head_commit, Commit), "Expected a Commit object"
  4833. merge_commit = r[merge_commit_id]
  4834. assert isinstance(merge_commit, Commit), "Expected a Commit object"
  4835. # Check if fast-forward is possible
  4836. merge_bases = find_merge_base(r, [head_commit_id, merge_commit_id])
  4837. if not merge_bases:
  4838. raise Error("No common ancestor found")
  4839. # Use the first merge base for fast-forward checks
  4840. base_commit_id = merge_bases[0]
  4841. # Check if we're trying to merge the same commit
  4842. if head_commit_id == merge_commit_id:
  4843. # Already up to date
  4844. return (None, [])
  4845. # Check for fast-forward
  4846. if base_commit_id == head_commit_id and not no_ff:
  4847. # Fast-forward merge
  4848. r.refs[b"HEAD"] = merge_commit_id
  4849. # Update the working directory
  4850. changes = tree_changes(r.object_store, head_commit.tree, merge_commit.tree)
  4851. update_working_tree(
  4852. r, head_commit.tree, merge_commit.tree, change_iterator=changes
  4853. )
  4854. return (merge_commit_id, [])
  4855. if base_commit_id == merge_commit_id:
  4856. # Already up to date
  4857. return (None, [])
  4858. # Perform recursive merge (handles multiple merge bases automatically)
  4859. gitattributes = r.get_gitattributes()
  4860. config = r.get_config()
  4861. merged_tree, conflicts = recursive_merge(
  4862. r.object_store, merge_bases, head_commit, merge_commit, gitattributes, config
  4863. )
  4864. # Add merged tree to object store
  4865. r.object_store.add_object(merged_tree)
  4866. # Update index and working directory
  4867. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  4868. update_working_tree(r, head_commit.tree, merged_tree.id, change_iterator=changes)
  4869. if conflicts or no_commit:
  4870. # Don't create a commit if there are conflicts or no_commit is True
  4871. return (None, conflicts)
  4872. # Create merge commit
  4873. merge_commit_obj = Commit()
  4874. merge_commit_obj.tree = merged_tree.id
  4875. merge_commit_obj.parents = [head_commit_id, merge_commit_id]
  4876. # Set author/committer
  4877. if author is None:
  4878. author = get_user_identity(r.get_config_stack())
  4879. if committer is None:
  4880. committer = author
  4881. merge_commit_obj.author = author
  4882. merge_commit_obj.committer = committer
  4883. # Set timestamps
  4884. timestamp = int(time.time())
  4885. timezone = 0 # UTC
  4886. merge_commit_obj.author_time = timestamp
  4887. merge_commit_obj.author_timezone = timezone
  4888. merge_commit_obj.commit_time = timestamp
  4889. merge_commit_obj.commit_timezone = timezone
  4890. # Set commit message
  4891. if message is None:
  4892. message = f"Merge commit '{merge_commit_id.decode()[:7]}'\n".encode()
  4893. merge_commit_obj.message = message.encode() if isinstance(message, str) else message
  4894. # Add commit to object store
  4895. r.object_store.add_object(merge_commit_obj)
  4896. # Update HEAD
  4897. r.refs[b"HEAD"] = merge_commit_obj.id
  4898. return (merge_commit_obj.id, [])
  4899. def _do_octopus_merge(
  4900. r: Repo,
  4901. merge_commit_ids: list[bytes],
  4902. no_commit: bool = False,
  4903. no_ff: bool = False,
  4904. message: Optional[bytes] = None,
  4905. author: Optional[bytes] = None,
  4906. committer: Optional[bytes] = None,
  4907. ) -> tuple[Optional[bytes], list[bytes]]:
  4908. """Internal octopus merge implementation that operates on an open repository.
  4909. Args:
  4910. r: Open repository object
  4911. merge_commit_ids: List of commit SHAs to merge
  4912. no_commit: If True, do not create a merge commit
  4913. no_ff: If True, force creation of a merge commit (ignored for octopus)
  4914. message: Optional merge commit message
  4915. author: Optional author for merge commit
  4916. committer: Optional committer for merge commit
  4917. Returns:
  4918. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  4919. if no_commit=True or there were conflicts
  4920. """
  4921. from .graph import find_octopus_base
  4922. from .merge import octopus_merge
  4923. # Get HEAD commit
  4924. try:
  4925. head_commit_id = r.refs[b"HEAD"]
  4926. except KeyError:
  4927. raise Error("No HEAD reference found")
  4928. head_commit = r[head_commit_id]
  4929. assert isinstance(head_commit, Commit), "Expected a Commit object"
  4930. # Get all commits to merge
  4931. other_commits = []
  4932. for merge_commit_id in merge_commit_ids:
  4933. merge_commit = r[merge_commit_id]
  4934. assert isinstance(merge_commit, Commit), "Expected a Commit object"
  4935. # Check if we're trying to merge the same commit as HEAD
  4936. if head_commit_id == merge_commit_id:
  4937. # Skip this commit, it's already merged
  4938. continue
  4939. other_commits.append(merge_commit)
  4940. # If no commits to merge after filtering, we're already up to date
  4941. if not other_commits:
  4942. return (None, [])
  4943. # If only one commit to merge, use regular merge
  4944. if len(other_commits) == 1:
  4945. return _do_merge(
  4946. r, other_commits[0].id, no_commit, no_ff, message, author, committer
  4947. )
  4948. # Find the octopus merge base
  4949. all_commit_ids = [head_commit_id] + [c.id for c in other_commits]
  4950. merge_bases = find_octopus_base(r, all_commit_ids)
  4951. if not merge_bases:
  4952. raise Error("No common ancestor found")
  4953. # Check if this is a fast-forward (HEAD is the merge base)
  4954. # For octopus merges, fast-forward doesn't really apply, so we always create a merge commit
  4955. # Perform octopus merge
  4956. gitattributes = r.get_gitattributes()
  4957. config = r.get_config()
  4958. merged_tree, conflicts = octopus_merge(
  4959. r.object_store, merge_bases, head_commit, other_commits, gitattributes, config
  4960. )
  4961. # Add merged tree to object store
  4962. r.object_store.add_object(merged_tree)
  4963. # Update index and working directory
  4964. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  4965. update_working_tree(r, head_commit.tree, merged_tree.id, change_iterator=changes)
  4966. if conflicts:
  4967. # Don't create a commit if there are conflicts
  4968. # Octopus merge refuses to proceed with conflicts
  4969. return (None, conflicts)
  4970. if no_commit:
  4971. # Don't create a commit if no_commit is True
  4972. return (None, [])
  4973. # Create merge commit with multiple parents
  4974. merge_commit_obj = Commit()
  4975. merge_commit_obj.tree = merged_tree.id
  4976. merge_commit_obj.parents = [head_commit_id] + [c.id for c in other_commits]
  4977. # Set author/committer
  4978. if author is None:
  4979. author = get_user_identity(r.get_config_stack())
  4980. if committer is None:
  4981. committer = author
  4982. merge_commit_obj.author = author
  4983. merge_commit_obj.committer = committer
  4984. # Set timestamps
  4985. timestamp = int(time.time())
  4986. timezone = 0 # UTC
  4987. merge_commit_obj.author_time = timestamp
  4988. merge_commit_obj.author_timezone = timezone
  4989. merge_commit_obj.commit_time = timestamp
  4990. merge_commit_obj.commit_timezone = timezone
  4991. # Set commit message
  4992. if message is None:
  4993. # Generate default message for octopus merge
  4994. branch_names = []
  4995. for commit_id in merge_commit_ids:
  4996. branch_names.append(commit_id.decode()[:7])
  4997. message = f"Merge commits {', '.join(branch_names)}\n".encode()
  4998. merge_commit_obj.message = message.encode() if isinstance(message, str) else message
  4999. # Add commit to object store
  5000. r.object_store.add_object(merge_commit_obj)
  5001. # Update HEAD
  5002. r.refs[b"HEAD"] = merge_commit_obj.id
  5003. return (merge_commit_obj.id, [])
  5004. def merge(
  5005. repo: Union[str, os.PathLike[str], Repo],
  5006. committish: Union[
  5007. str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]
  5008. ],
  5009. no_commit: bool = False,
  5010. no_ff: bool = False,
  5011. message: Optional[bytes] = None,
  5012. author: Optional[bytes] = None,
  5013. committer: Optional[bytes] = None,
  5014. ) -> tuple[Optional[bytes], list[bytes]]:
  5015. """Merge one or more commits into the current branch.
  5016. Args:
  5017. repo: Repository to merge into
  5018. committish: Commit(s) to merge. Can be a single commit or a sequence of commits.
  5019. When merging more than two heads, the octopus merge strategy is used.
  5020. no_commit: If True, do not create a merge commit
  5021. no_ff: If True, force creation of a merge commit
  5022. message: Optional merge commit message
  5023. author: Optional author for merge commit
  5024. committer: Optional committer for merge commit
  5025. Returns:
  5026. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  5027. if no_commit=True or there were conflicts
  5028. Raises:
  5029. Error: If there is no HEAD reference or commit cannot be found
  5030. """
  5031. with open_repo_closing(repo) as r:
  5032. # Handle both single commit and multiple commits
  5033. if isinstance(committish, (list, tuple)):
  5034. # Multiple commits - use octopus merge
  5035. merge_commit_ids = []
  5036. for c in committish:
  5037. try:
  5038. merge_commit_ids.append(parse_commit(r, c).id)
  5039. except KeyError:
  5040. raise Error(
  5041. f"Cannot find commit '{c.decode() if isinstance(c, bytes) else c}'"
  5042. )
  5043. if len(merge_commit_ids) == 1:
  5044. # Only one commit, use regular merge
  5045. result = _do_merge(
  5046. r, merge_commit_ids[0], no_commit, no_ff, message, author, committer
  5047. )
  5048. else:
  5049. # Multiple commits, use octopus merge
  5050. result = _do_octopus_merge(
  5051. r, merge_commit_ids, no_commit, no_ff, message, author, committer
  5052. )
  5053. else:
  5054. # Single commit - use regular merge
  5055. # Type narrowing: committish is not a sequence in this branch
  5056. single_committish = cast(Union[str, bytes, Commit, Tag], committish)
  5057. try:
  5058. merge_commit_id = parse_commit(r, single_committish).id
  5059. except KeyError:
  5060. raise Error(
  5061. f"Cannot find commit '{single_committish.decode() if isinstance(single_committish, bytes) else single_committish}'"
  5062. )
  5063. result = _do_merge(
  5064. r, merge_commit_id, no_commit, no_ff, message, author, committer
  5065. )
  5066. # Trigger auto GC if needed
  5067. from .gc import maybe_auto_gc
  5068. maybe_auto_gc(r)
  5069. return result
  5070. def unpack_objects(
  5071. pack_path: Union[str, os.PathLike[str]], target: Union[str, os.PathLike[str]] = "."
  5072. ) -> int:
  5073. """Unpack objects from a pack file into the repository.
  5074. Args:
  5075. pack_path: Path to the pack file to unpack
  5076. target: Path to the repository to unpack into
  5077. Returns:
  5078. Number of objects unpacked
  5079. """
  5080. from .pack import Pack
  5081. with open_repo_closing(target) as r:
  5082. pack_basename = os.path.splitext(pack_path)[0]
  5083. with Pack(pack_basename) as pack:
  5084. count = 0
  5085. for unpacked in pack.iter_unpacked():
  5086. obj = unpacked.sha_file()
  5087. r.object_store.add_object(obj)
  5088. count += 1
  5089. return count
  5090. def merge_tree(
  5091. repo: RepoPath,
  5092. base_tree: Optional[Union[str, bytes, Tree, Commit, Tag]],
  5093. our_tree: Union[str, bytes, Tree, Commit, Tag],
  5094. their_tree: Union[str, bytes, Tree, Commit, Tag],
  5095. ) -> tuple[bytes, list[bytes]]:
  5096. """Perform a three-way tree merge without touching the working directory.
  5097. This is similar to git merge-tree, performing a merge at the tree level
  5098. without creating commits or updating any references.
  5099. Args:
  5100. repo: Repository containing the trees
  5101. base_tree: Tree-ish of the common ancestor (or None for no common ancestor)
  5102. our_tree: Tree-ish of our side of the merge
  5103. their_tree: Tree-ish of their side of the merge
  5104. Returns:
  5105. tuple: A tuple of (merged_tree_id, conflicts) where:
  5106. - merged_tree_id is the SHA-1 of the merged tree
  5107. - conflicts is a list of paths (as bytes) that had conflicts
  5108. Raises:
  5109. KeyError: If any of the tree-ish arguments cannot be resolved
  5110. """
  5111. from .merge import Merger
  5112. with open_repo_closing(repo) as r:
  5113. # Resolve tree-ish arguments to actual trees
  5114. base = parse_tree(r, base_tree) if base_tree else None
  5115. ours = parse_tree(r, our_tree)
  5116. theirs = parse_tree(r, their_tree)
  5117. # Perform the merge
  5118. gitattributes = r.get_gitattributes()
  5119. config = r.get_config()
  5120. merger = Merger(r.object_store, gitattributes, config)
  5121. merged_tree, conflicts = merger.merge_trees(base, ours, theirs)
  5122. # Add the merged tree to the object store
  5123. r.object_store.add_object(merged_tree)
  5124. return merged_tree.id, conflicts
  5125. def cherry(
  5126. repo: Union[str, os.PathLike[str], Repo],
  5127. upstream: Optional[Union[str, bytes]] = None,
  5128. head: Optional[Union[str, bytes]] = None,
  5129. limit: Optional[Union[str, bytes]] = None,
  5130. verbose: bool = False,
  5131. ) -> list[tuple[str, bytes, Optional[bytes]]]:
  5132. """Find commits not merged upstream.
  5133. Args:
  5134. repo: Repository path or object
  5135. upstream: Upstream branch (default: tracking branch or @{upstream})
  5136. head: Head branch (default: HEAD)
  5137. limit: Limit commits to those after this ref
  5138. verbose: Include commit messages in output
  5139. Returns:
  5140. List of tuples (status, commit_sha, message) where status is '+' or '-'
  5141. '+' means commit is not in upstream, '-' means equivalent patch exists upstream
  5142. message is None unless verbose=True
  5143. """
  5144. from .patch import commit_patch_id
  5145. with open_repo_closing(repo) as r:
  5146. # Resolve upstream
  5147. if upstream is None:
  5148. # Try to find tracking branch
  5149. upstream_found = False
  5150. head_refs, _ = r.refs.follow(b"HEAD")
  5151. if head_refs:
  5152. head_ref = head_refs[0]
  5153. if head_ref.startswith(b"refs/heads/"):
  5154. config = r.get_config()
  5155. branch_name = head_ref[len(b"refs/heads/") :]
  5156. try:
  5157. upstream_ref = config.get((b"branch", branch_name), b"merge")
  5158. except KeyError:
  5159. upstream_ref = None
  5160. if upstream_ref:
  5161. try:
  5162. remote_name = config.get(
  5163. (b"branch", branch_name), b"remote"
  5164. )
  5165. except KeyError:
  5166. remote_name = None
  5167. if remote_name:
  5168. # Build the tracking branch ref
  5169. upstream_refname = (
  5170. b"refs/remotes/"
  5171. + remote_name
  5172. + b"/"
  5173. + upstream_ref.split(b"/")[-1]
  5174. )
  5175. if upstream_refname in r.refs:
  5176. upstream = upstream_refname
  5177. upstream_found = True
  5178. if not upstream_found:
  5179. # Default to HEAD^ if no tracking branch found
  5180. head_commit = r[b"HEAD"]
  5181. if isinstance(head_commit, Commit) and head_commit.parents:
  5182. upstream = head_commit.parents[0]
  5183. else:
  5184. raise ValueError("Could not determine upstream branch")
  5185. # Resolve head
  5186. if head is None:
  5187. head = b"HEAD"
  5188. # Convert strings to bytes
  5189. if isinstance(upstream, str):
  5190. upstream = upstream.encode("utf-8")
  5191. if isinstance(head, str):
  5192. head = head.encode("utf-8")
  5193. if limit is not None and isinstance(limit, str):
  5194. limit = limit.encode("utf-8")
  5195. # Resolve refs to commit IDs
  5196. assert upstream is not None
  5197. upstream_obj = r[upstream]
  5198. head_obj = r[head]
  5199. upstream_id = upstream_obj.id
  5200. head_id = head_obj.id
  5201. # Get limit commit ID if specified
  5202. limit_id = None
  5203. if limit is not None:
  5204. limit_id = r[limit].id
  5205. # Find all commits reachable from head but not from upstream
  5206. # This is equivalent to: git rev-list ^upstream head
  5207. # Get commits from head that are not in upstream
  5208. walker = r.get_walker([head_id], exclude=[upstream_id])
  5209. head_commits = []
  5210. for entry in walker:
  5211. commit = entry.commit
  5212. # Apply limit if specified
  5213. if limit_id is not None:
  5214. # Stop when we reach the limit commit
  5215. if commit.id == limit_id:
  5216. break
  5217. head_commits.append(commit.id)
  5218. # Compute patch IDs for upstream commits
  5219. upstream_walker = r.get_walker([upstream_id])
  5220. upstream_patch_ids = {} # Maps patch_id -> commit_id for debugging
  5221. for entry in upstream_walker:
  5222. commit = entry.commit
  5223. pid = commit_patch_id(r.object_store, commit.id)
  5224. upstream_patch_ids[pid] = commit.id
  5225. # For each head commit, check if equivalent patch exists in upstream
  5226. results: list[tuple[str, bytes, Optional[bytes]]] = []
  5227. for commit_id in reversed(head_commits): # Show oldest first
  5228. obj = r.object_store[commit_id]
  5229. assert isinstance(obj, Commit)
  5230. commit = obj
  5231. pid = commit_patch_id(r.object_store, commit_id)
  5232. if pid in upstream_patch_ids:
  5233. status = "-"
  5234. else:
  5235. status = "+"
  5236. message = None
  5237. if verbose:
  5238. message = commit.message.split(b"\n")[0] # First line only
  5239. results.append((status, commit_id, message))
  5240. return results
  5241. def cherry_pick( # noqa: D417
  5242. repo: Union[str, os.PathLike[str], Repo],
  5243. committish: Union[str, bytes, Commit, Tag, None],
  5244. no_commit: bool = False,
  5245. continue_: bool = False,
  5246. abort: bool = False,
  5247. ) -> Optional[bytes]:
  5248. r"""Cherry-pick a commit onto the current branch.
  5249. Args:
  5250. repo: Repository to cherry-pick into
  5251. committish: Commit to cherry-pick (can be None only when resuming or aborting)
  5252. no_commit: If True, do not create a commit after applying changes
  5253. ``continue_``: Resume an in-progress cherry-pick after resolving conflicts if True
  5254. abort: Abort an in-progress cherry-pick
  5255. Returns:
  5256. The SHA of the newly created commit, or None if no_commit=True or there were conflicts
  5257. Raises:
  5258. Error: If there is no HEAD reference, commit cannot be found, or operation fails
  5259. """
  5260. from .merge import three_way_merge
  5261. # Validate that committish is provided when needed
  5262. if not (continue_ or abort) and committish is None:
  5263. raise ValueError("committish is required when not using --continue or --abort")
  5264. with open_repo_closing(repo) as r:
  5265. # Handle abort
  5266. if abort:
  5267. # Clean up any cherry-pick state
  5268. try:
  5269. os.remove(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"))
  5270. except FileNotFoundError:
  5271. pass
  5272. try:
  5273. os.remove(os.path.join(r.controldir(), "MERGE_MSG"))
  5274. except FileNotFoundError:
  5275. pass
  5276. # Reset index to HEAD
  5277. head_commit = r[b"HEAD"]
  5278. assert isinstance(head_commit, Commit)
  5279. r.get_worktree().reset_index(head_commit.tree)
  5280. return None
  5281. # Handle continue
  5282. if continue_:
  5283. # Check if there's a cherry-pick in progress
  5284. cherry_pick_head_path = os.path.join(r.controldir(), "CHERRY_PICK_HEAD")
  5285. try:
  5286. with open(cherry_pick_head_path, "rb") as f:
  5287. cherry_pick_commit_id = f.read().strip()
  5288. cherry_pick_commit = r[cherry_pick_commit_id]
  5289. except FileNotFoundError:
  5290. raise Error("No cherry-pick in progress")
  5291. # Check for unresolved conflicts
  5292. if r.open_index().has_conflicts():
  5293. raise Error("Unresolved conflicts remain")
  5294. # Create the commit
  5295. tree_id = r.open_index().commit(r.object_store)
  5296. # Read saved message if any
  5297. merge_msg_path = os.path.join(r.controldir(), "MERGE_MSG")
  5298. try:
  5299. with open(merge_msg_path, "rb") as f:
  5300. message = f.read()
  5301. except FileNotFoundError:
  5302. assert isinstance(cherry_pick_commit, Commit)
  5303. message = cherry_pick_commit.message
  5304. assert isinstance(cherry_pick_commit, Commit)
  5305. new_commit = r.get_worktree().commit(
  5306. message=message,
  5307. tree=tree_id,
  5308. author=cherry_pick_commit.author,
  5309. author_timestamp=cherry_pick_commit.author_time,
  5310. author_timezone=cherry_pick_commit.author_timezone,
  5311. )
  5312. # Clean up state files
  5313. try:
  5314. os.remove(cherry_pick_head_path)
  5315. except FileNotFoundError:
  5316. pass
  5317. try:
  5318. os.remove(merge_msg_path)
  5319. except FileNotFoundError:
  5320. pass
  5321. return new_commit
  5322. # Normal cherry-pick operation
  5323. # Get current HEAD
  5324. try:
  5325. head_commit = r[b"HEAD"]
  5326. except KeyError:
  5327. raise Error("No HEAD reference found")
  5328. # Parse the commit to cherry-pick
  5329. # committish cannot be None here due to validation above
  5330. assert committish is not None
  5331. try:
  5332. cherry_pick_commit = parse_commit(r, committish)
  5333. except KeyError:
  5334. raise Error(
  5335. f"Cannot find commit '{committish.decode() if isinstance(committish, bytes) else committish}'"
  5336. )
  5337. # Check if commit has parents
  5338. assert isinstance(cherry_pick_commit, Commit)
  5339. if not cherry_pick_commit.parents:
  5340. raise Error("Cannot cherry-pick root commit")
  5341. # Get parent of cherry-pick commit
  5342. parent_commit = r[cherry_pick_commit.parents[0]]
  5343. assert isinstance(parent_commit, Commit)
  5344. # Perform three-way merge
  5345. assert isinstance(head_commit, Commit)
  5346. merged_tree, conflicts = three_way_merge(
  5347. r.object_store, parent_commit, head_commit, cherry_pick_commit
  5348. )
  5349. # Add merged tree to object store
  5350. r.object_store.add_object(merged_tree)
  5351. # Update working tree and index
  5352. # Reset index to match merged tree
  5353. r.get_worktree().reset_index(merged_tree.id)
  5354. # Update working tree from the new index
  5355. # Allow overwriting because we're applying the merge result
  5356. assert isinstance(head_commit, Commit)
  5357. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  5358. update_working_tree(
  5359. r,
  5360. head_commit.tree,
  5361. merged_tree.id,
  5362. change_iterator=changes,
  5363. allow_overwrite_modified=True,
  5364. )
  5365. if conflicts:
  5366. # Save state for later continuation
  5367. with open(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"), "wb") as f:
  5368. f.write(cherry_pick_commit.id + b"\n")
  5369. # Save commit message
  5370. with open(os.path.join(r.controldir(), "MERGE_MSG"), "wb") as f:
  5371. f.write(cherry_pick_commit.message)
  5372. raise Error(
  5373. f"Conflicts in: {', '.join(c.decode('utf-8', 'replace') for c in conflicts)}\n"
  5374. f"Fix conflicts and run 'dulwich cherry-pick --continue'"
  5375. )
  5376. if no_commit:
  5377. return None
  5378. # Create the commit
  5379. new_commit = r.get_worktree().commit(
  5380. message=cherry_pick_commit.message,
  5381. tree=merged_tree.id,
  5382. author=cherry_pick_commit.author,
  5383. author_timestamp=cherry_pick_commit.author_time,
  5384. author_timezone=cherry_pick_commit.author_timezone,
  5385. )
  5386. return new_commit
  5387. def revert(
  5388. repo: Union[str, os.PathLike[str], Repo],
  5389. commits: Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]],
  5390. no_commit: bool = False,
  5391. message: Optional[Union[str, bytes]] = None,
  5392. author: Optional[bytes] = None,
  5393. committer: Optional[bytes] = None,
  5394. ) -> Optional[bytes]:
  5395. """Revert one or more commits.
  5396. This creates a new commit that undoes the changes introduced by the
  5397. specified commits. Unlike reset, revert creates a new commit that
  5398. preserves history.
  5399. Args:
  5400. repo: Path to repository or repository object
  5401. commits: List of commit-ish (SHA, ref, etc.) to revert, or a single commit-ish
  5402. no_commit: If True, apply changes to index/working tree but don't commit
  5403. message: Optional commit message (default: "Revert <original subject>")
  5404. author: Optional author for revert commit
  5405. committer: Optional committer for revert commit
  5406. Returns:
  5407. SHA1 of the new revert commit, or None if no_commit=True
  5408. Raises:
  5409. Error: If revert fails due to conflicts or other issues
  5410. """
  5411. from .merge import three_way_merge
  5412. # Normalize commits to a list
  5413. if isinstance(commits, (str, bytes, Commit, Tag)):
  5414. commits = [commits]
  5415. with open_repo_closing(repo) as r:
  5416. # Convert string refs to bytes
  5417. commits_to_revert = []
  5418. for commit_ref in commits:
  5419. if isinstance(commit_ref, str):
  5420. commit_ref = commit_ref.encode("utf-8")
  5421. commit = parse_commit(r, commit_ref)
  5422. commits_to_revert.append(commit)
  5423. # Get current HEAD
  5424. try:
  5425. head_commit_id = r.refs[b"HEAD"]
  5426. except KeyError:
  5427. raise Error("No HEAD reference found")
  5428. head_commit = r[head_commit_id]
  5429. assert isinstance(head_commit, Commit)
  5430. current_tree = head_commit.tree
  5431. # Process commits in order
  5432. for commit_to_revert in commits_to_revert:
  5433. # For revert, we want to apply the inverse of the commit
  5434. # This means using the commit's tree as "base" and its parent as "theirs"
  5435. if not commit_to_revert.parents:
  5436. raise Error(
  5437. f"Cannot revert commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - it has no parents"
  5438. )
  5439. # For simplicity, we only handle commits with one parent (no merge commits)
  5440. if len(commit_to_revert.parents) > 1:
  5441. raise Error(
  5442. f"Cannot revert merge commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - not yet implemented"
  5443. )
  5444. parent_commit = r[commit_to_revert.parents[0]]
  5445. assert isinstance(parent_commit, Commit)
  5446. # Perform three-way merge:
  5447. # - base: the commit we're reverting (what we want to remove)
  5448. # - ours: current HEAD (what we have now)
  5449. # - theirs: parent of commit being reverted (what we want to go back to)
  5450. assert isinstance(commit_to_revert, Commit)
  5451. head_for_merge = r[head_commit_id]
  5452. assert isinstance(head_for_merge, Commit)
  5453. merged_tree, conflicts = three_way_merge(
  5454. r.object_store,
  5455. commit_to_revert, # base
  5456. head_for_merge, # ours
  5457. parent_commit, # theirs
  5458. )
  5459. if conflicts:
  5460. # Update working tree with conflicts
  5461. changes = tree_changes(r.object_store, current_tree, merged_tree.id)
  5462. update_working_tree(
  5463. r, current_tree, merged_tree.id, change_iterator=changes
  5464. )
  5465. conflicted_paths = [c.decode("utf-8", "replace") for c in conflicts]
  5466. raise Error(f"Conflicts while reverting: {', '.join(conflicted_paths)}")
  5467. # Add merged tree to object store
  5468. r.object_store.add_object(merged_tree)
  5469. # Update working tree
  5470. changes = tree_changes(r.object_store, current_tree, merged_tree.id)
  5471. update_working_tree(
  5472. r, current_tree, merged_tree.id, change_iterator=changes
  5473. )
  5474. current_tree = merged_tree.id
  5475. if not no_commit:
  5476. # Create revert commit
  5477. revert_commit = Commit()
  5478. revert_commit.tree = merged_tree.id
  5479. revert_commit.parents = [head_commit_id]
  5480. # Set author/committer
  5481. if author is None:
  5482. author = get_user_identity(r.get_config_stack())
  5483. if committer is None:
  5484. committer = author
  5485. revert_commit.author = author
  5486. revert_commit.committer = committer
  5487. # Set timestamps
  5488. timestamp = int(time.time())
  5489. timezone = 0 # UTC
  5490. revert_commit.author_time = timestamp
  5491. revert_commit.author_timezone = timezone
  5492. revert_commit.commit_time = timestamp
  5493. revert_commit.commit_timezone = timezone
  5494. # Set message
  5495. if message is None:
  5496. # Extract original commit subject
  5497. original_message = commit_to_revert.message
  5498. if isinstance(original_message, bytes):
  5499. original_message = original_message.decode("utf-8", "replace")
  5500. subject = original_message.split("\n")[0]
  5501. message = f'Revert "{subject}"\n\nThis reverts commit {commit_to_revert.id.decode("ascii")}.'.encode()
  5502. elif isinstance(message, str):
  5503. message = message.encode("utf-8")
  5504. revert_commit.message = message
  5505. # Add commit to object store
  5506. r.object_store.add_object(revert_commit)
  5507. # Update HEAD
  5508. r.refs[b"HEAD"] = revert_commit.id
  5509. head_commit_id = revert_commit.id
  5510. return head_commit_id if not no_commit else None
  5511. def gc(
  5512. repo: RepoPath,
  5513. auto: bool = False,
  5514. aggressive: bool = False,
  5515. prune: bool = True,
  5516. grace_period: Optional[int] = 1209600, # 2 weeks default
  5517. dry_run: bool = False,
  5518. progress: Optional[Callable[[str], None]] = None,
  5519. ) -> "GCStats":
  5520. """Run garbage collection on a repository.
  5521. Args:
  5522. repo: Path to the repository or a Repo object
  5523. auto: If True, only run gc if needed
  5524. aggressive: If True, use more aggressive settings
  5525. prune: If True, prune unreachable objects
  5526. grace_period: Grace period in seconds for pruning (default 2 weeks)
  5527. dry_run: If True, only report what would be done
  5528. progress: Optional progress callback
  5529. Returns:
  5530. GCStats object with garbage collection statistics
  5531. """
  5532. from .gc import garbage_collect
  5533. with open_repo_closing(repo) as r:
  5534. return garbage_collect(
  5535. r,
  5536. auto=auto,
  5537. aggressive=aggressive,
  5538. prune=prune,
  5539. grace_period=grace_period,
  5540. dry_run=dry_run,
  5541. progress=progress,
  5542. )
  5543. def prune(
  5544. repo: RepoPath,
  5545. grace_period: Optional[int] = None,
  5546. dry_run: bool = False,
  5547. progress: Optional[Callable[[str], None]] = None,
  5548. ) -> None:
  5549. """Prune/clean up a repository's object store.
  5550. This removes temporary files that were left behind by interrupted
  5551. pack operations.
  5552. Args:
  5553. repo: Path to the repository or a Repo object
  5554. grace_period: Grace period in seconds for removing temporary files
  5555. (default 2 weeks)
  5556. dry_run: If True, only report what would be done
  5557. progress: Optional progress callback
  5558. """
  5559. with open_repo_closing(repo) as r:
  5560. if progress:
  5561. progress("Pruning temporary files")
  5562. if not dry_run:
  5563. r.object_store.prune(grace_period=grace_period)
  5564. def maintenance_run(
  5565. repo: RepoPath,
  5566. tasks: Optional[list[str]] = None,
  5567. auto: bool = False,
  5568. progress: Optional[Callable[[str], None]] = None,
  5569. ) -> "MaintenanceResult":
  5570. """Run maintenance tasks on a repository.
  5571. Args:
  5572. repo: Path to the repository or a Repo object
  5573. tasks: Optional list of specific task names to run
  5574. (e.g., ['gc', 'commit-graph', 'pack-refs'])
  5575. auto: If True, only run tasks if needed
  5576. progress: Optional progress callback
  5577. Returns:
  5578. MaintenanceResult object with task execution results
  5579. """
  5580. from .maintenance import run_maintenance
  5581. with open_repo_closing(repo) as r:
  5582. return run_maintenance(r, tasks=tasks, auto=auto, progress=progress)
  5583. def maintenance_register(repo: RepoPath) -> None:
  5584. """Register a repository for background maintenance.
  5585. This adds the repository to the global maintenance.repo config and sets
  5586. up recommended configuration for scheduled maintenance.
  5587. Args:
  5588. repo: Path to the repository or repository object
  5589. """
  5590. from .maintenance import register_repository
  5591. with open_repo_closing(repo) as r:
  5592. register_repository(r)
  5593. def maintenance_unregister(repo: RepoPath, force: bool = False) -> None:
  5594. """Unregister a repository from background maintenance.
  5595. This removes the repository from the global maintenance.repo config.
  5596. Args:
  5597. repo: Path to the repository or repository object
  5598. force: If True, don't error if repository is not registered
  5599. """
  5600. from .maintenance import unregister_repository
  5601. with open_repo_closing(repo) as r:
  5602. unregister_repository(r, force=force)
  5603. def count_objects(repo: RepoPath = ".", verbose: bool = False) -> CountObjectsResult:
  5604. """Count unpacked objects and their disk usage.
  5605. Args:
  5606. repo: Path to repository or repository object
  5607. verbose: Whether to return verbose information
  5608. Returns:
  5609. CountObjectsResult object with detailed statistics
  5610. """
  5611. with open_repo_closing(repo) as r:
  5612. object_store = r.object_store
  5613. # Count loose objects
  5614. loose_count = 0
  5615. loose_size = 0
  5616. for sha in object_store._iter_loose_objects():
  5617. loose_count += 1
  5618. from .object_store import DiskObjectStore
  5619. assert isinstance(object_store, DiskObjectStore)
  5620. path = object_store._get_shafile_path(sha)
  5621. try:
  5622. stat_info = os.stat(path)
  5623. # Git uses disk usage, not file size. st_blocks is always in
  5624. # 512-byte blocks per POSIX standard
  5625. st_blocks = getattr(stat_info, "st_blocks", None)
  5626. if st_blocks is not None:
  5627. # Available on Linux and macOS
  5628. loose_size += st_blocks * 512
  5629. else:
  5630. # Fallback for Windows
  5631. loose_size += stat_info.st_size
  5632. except FileNotFoundError:
  5633. # Object may have been removed between iteration and stat
  5634. pass
  5635. if not verbose:
  5636. return CountObjectsResult(count=loose_count, size=loose_size)
  5637. # Count pack information
  5638. pack_count = len(object_store.packs)
  5639. in_pack_count = 0
  5640. pack_size = 0
  5641. for pack in object_store.packs:
  5642. in_pack_count += len(pack)
  5643. # Get pack file size
  5644. pack_path = pack._data_path
  5645. try:
  5646. pack_size += os.path.getsize(pack_path)
  5647. except FileNotFoundError:
  5648. pass
  5649. # Get index file size
  5650. idx_path = pack._idx_path
  5651. try:
  5652. pack_size += os.path.getsize(idx_path)
  5653. except FileNotFoundError:
  5654. pass
  5655. return CountObjectsResult(
  5656. count=loose_count,
  5657. size=loose_size,
  5658. in_pack=in_pack_count,
  5659. packs=pack_count,
  5660. size_pack=pack_size,
  5661. )
  5662. def is_interactive_rebase(repo: Union[Repo, str]) -> bool:
  5663. """Check if an interactive rebase is in progress.
  5664. Args:
  5665. repo: Repository to check
  5666. Returns:
  5667. True if interactive rebase is in progress, False otherwise
  5668. """
  5669. with open_repo_closing(repo) as r:
  5670. state_manager = r.get_rebase_state_manager()
  5671. if not state_manager.exists():
  5672. return False
  5673. # Check if todo file exists
  5674. todo = state_manager.load_todo()
  5675. return todo is not None
  5676. def rebase(
  5677. repo: Union[Repo, str],
  5678. upstream: Union[bytes, str],
  5679. onto: Optional[Union[bytes, str]] = None,
  5680. branch: Optional[Union[bytes, str]] = None,
  5681. abort: bool = False,
  5682. continue_rebase: bool = False,
  5683. skip: bool = False,
  5684. interactive: bool = False,
  5685. edit_todo: bool = False,
  5686. ) -> list[bytes]:
  5687. """Rebase commits onto another branch.
  5688. Args:
  5689. repo: Repository to rebase in
  5690. upstream: Upstream branch/commit to rebase onto
  5691. onto: Specific commit to rebase onto (defaults to upstream)
  5692. branch: Branch to rebase (defaults to current branch)
  5693. abort: Abort an in-progress rebase
  5694. continue_rebase: Continue an in-progress rebase
  5695. skip: Skip current commit and continue rebase
  5696. interactive: Start an interactive rebase
  5697. edit_todo: Edit the todo list of an interactive rebase
  5698. Returns:
  5699. List of new commit SHAs created by rebase
  5700. Raises:
  5701. Error: If rebase fails or conflicts occur
  5702. """
  5703. from .cli import launch_editor
  5704. from .rebase import (
  5705. RebaseConflict,
  5706. RebaseError,
  5707. Rebaser,
  5708. process_interactive_rebase,
  5709. start_interactive,
  5710. )
  5711. from .rebase import (
  5712. edit_todo as edit_todo_func,
  5713. )
  5714. with open_repo_closing(repo) as r:
  5715. rebaser = Rebaser(r)
  5716. if abort:
  5717. try:
  5718. rebaser.abort()
  5719. return []
  5720. except RebaseError as e:
  5721. raise Error(str(e))
  5722. if edit_todo:
  5723. # Edit the todo list of an interactive rebase
  5724. try:
  5725. edit_todo_func(r, launch_editor)
  5726. print("Todo list updated. Continue with 'rebase --continue'")
  5727. return []
  5728. except RebaseError as e:
  5729. raise Error(str(e))
  5730. if continue_rebase:
  5731. try:
  5732. if interactive:
  5733. # Continue interactive rebase
  5734. is_complete, pause_reason = process_interactive_rebase(
  5735. r, editor_callback=launch_editor
  5736. )
  5737. if is_complete:
  5738. return [c.id for c in rebaser._done]
  5739. else:
  5740. if pause_reason == "conflict":
  5741. raise Error("Conflicts detected. Resolve and continue.")
  5742. elif pause_reason == "edit":
  5743. print("Stopped for editing. Make changes and continue.")
  5744. elif pause_reason == "break":
  5745. print("Rebase paused at break. Continue when ready.")
  5746. else:
  5747. print(f"Rebase paused: {pause_reason}")
  5748. return []
  5749. else:
  5750. # Continue regular rebase
  5751. result = rebaser.continue_()
  5752. if result is None:
  5753. # Rebase complete
  5754. return [c.id for c in rebaser._done]
  5755. elif isinstance(result, tuple) and result[1]:
  5756. # Still have conflicts
  5757. raise Error(
  5758. f"Conflicts in: {', '.join(f.decode('utf-8', 'replace') for f in result[1])}"
  5759. )
  5760. except RebaseError as e:
  5761. raise Error(str(e))
  5762. # Convert string refs to bytes
  5763. if isinstance(upstream, str):
  5764. upstream = upstream.encode("utf-8")
  5765. if isinstance(onto, str):
  5766. onto = onto.encode("utf-8") if onto else None
  5767. if isinstance(branch, str):
  5768. branch = branch.encode("utf-8") if branch else None
  5769. try:
  5770. if interactive:
  5771. # Start interactive rebase
  5772. todo = start_interactive(r, upstream, onto, branch, launch_editor)
  5773. # Process the todo list
  5774. is_complete, pause_reason = process_interactive_rebase(
  5775. r, todo, editor_callback=launch_editor
  5776. )
  5777. if is_complete:
  5778. return [c.id for c in rebaser._done]
  5779. else:
  5780. if pause_reason == "conflict":
  5781. raise Error("Conflicts detected. Resolve and continue.")
  5782. elif pause_reason == "edit":
  5783. print("Stopped for editing. Make changes and continue.")
  5784. elif pause_reason == "break":
  5785. print("Rebase paused at break. Continue when ready.")
  5786. else:
  5787. print(f"Rebase paused: {pause_reason}")
  5788. return []
  5789. else:
  5790. # Regular rebase
  5791. rebaser.start(upstream, onto, branch)
  5792. # Continue rebase automatically
  5793. result = rebaser.continue_()
  5794. if result is not None:
  5795. # Conflicts
  5796. raise RebaseConflict(result[1])
  5797. # Return the SHAs of the rebased commits
  5798. return [c.id for c in rebaser._done]
  5799. except RebaseConflict as e:
  5800. raise Error(str(e))
  5801. except RebaseError as e:
  5802. raise Error(str(e))
  5803. def annotate(
  5804. repo: RepoPath,
  5805. path: Union[str, bytes],
  5806. committish: Optional[Union[str, bytes, Commit, Tag]] = None,
  5807. ) -> list[tuple[tuple[Commit, TreeEntry], bytes]]:
  5808. """Annotate the history of a file.
  5809. :param repo: Path to the repository
  5810. :param path: Path to annotate
  5811. :param committish: Commit id to find path in
  5812. :return: List of ((Commit, TreeChange), line) tuples
  5813. """
  5814. if committish is None:
  5815. committish = "HEAD"
  5816. from dulwich.annotate import annotate_lines
  5817. with open_repo_closing(repo) as r:
  5818. commit_id = parse_commit(r, committish).id
  5819. # Ensure path is bytes
  5820. if isinstance(path, str):
  5821. path = path.encode()
  5822. return annotate_lines(r.object_store, commit_id, path)
  5823. blame = annotate
  5824. def filter_branch(
  5825. repo: RepoPath = ".",
  5826. branch: Union[str, bytes] = "HEAD",
  5827. *,
  5828. filter_fn: Optional[Callable[[Commit], Optional["CommitData"]]] = None,
  5829. filter_author: Optional[Callable[[bytes], Optional[bytes]]] = None,
  5830. filter_committer: Optional[Callable[[bytes], Optional[bytes]]] = None,
  5831. filter_message: Optional[Callable[[bytes], Optional[bytes]]] = None,
  5832. tree_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
  5833. index_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
  5834. parent_filter: Optional[Callable[[Sequence[bytes]], list[bytes]]] = None,
  5835. commit_filter: Optional[Callable[[Commit, bytes], Optional[bytes]]] = None,
  5836. subdirectory_filter: Optional[Union[str, bytes]] = None,
  5837. prune_empty: bool = False,
  5838. tag_name_filter: Optional[Callable[[bytes], Optional[bytes]]] = None,
  5839. force: bool = False,
  5840. keep_original: bool = True,
  5841. refs: Optional[list[bytes]] = None,
  5842. ) -> dict[bytes, bytes]:
  5843. """Rewrite branch history by creating new commits with filtered properties.
  5844. This is similar to git filter-branch, allowing you to rewrite commit
  5845. history by modifying trees, parents, author, committer, or commit messages.
  5846. Args:
  5847. repo: Path to repository
  5848. branch: Branch to rewrite (defaults to HEAD)
  5849. filter_fn: Optional callable that takes a Commit object and returns
  5850. a dict of updated fields (author, committer, message, etc.)
  5851. filter_author: Optional callable that takes author bytes and returns
  5852. updated author bytes or None to keep unchanged
  5853. filter_committer: Optional callable that takes committer bytes and returns
  5854. updated committer bytes or None to keep unchanged
  5855. filter_message: Optional callable that takes commit message bytes
  5856. and returns updated message bytes
  5857. tree_filter: Optional callable that takes (tree_sha, temp_dir) and returns
  5858. new tree SHA after modifying working directory
  5859. index_filter: Optional callable that takes (tree_sha, temp_index_path) and
  5860. returns new tree SHA after modifying index
  5861. parent_filter: Optional callable that takes parent list and returns
  5862. modified parent list
  5863. commit_filter: Optional callable that takes (Commit, tree_sha) and returns
  5864. new commit SHA or None to skip commit
  5865. subdirectory_filter: Optional subdirectory path to extract as new root
  5866. prune_empty: Whether to prune commits that become empty
  5867. tag_name_filter: Optional callable to rename tags
  5868. force: Force operation even if branch has been filtered before
  5869. keep_original: Keep original refs under refs/original/
  5870. refs: List of refs to rewrite (defaults to [branch])
  5871. Returns:
  5872. Dict mapping old commit SHAs to new commit SHAs
  5873. Raises:
  5874. Error: If branch is already filtered and force is False
  5875. """
  5876. from .filter_branch import CommitFilter, filter_refs
  5877. with open_repo_closing(repo) as r:
  5878. # Parse branch/committish
  5879. if isinstance(branch, str):
  5880. branch = branch.encode()
  5881. # Determine which refs to process
  5882. if refs is None:
  5883. if branch == b"HEAD":
  5884. # Resolve HEAD to actual branch
  5885. try:
  5886. resolved = r.refs.follow(b"HEAD")
  5887. if resolved and resolved[0]:
  5888. # resolved is a list of (refname, sha) tuples
  5889. resolved_ref = resolved[0][-1]
  5890. if resolved_ref and resolved_ref != b"HEAD":
  5891. refs = [resolved_ref]
  5892. else:
  5893. # HEAD points directly to a commit
  5894. refs = [b"HEAD"]
  5895. else:
  5896. refs = [b"HEAD"]
  5897. except SymrefLoop:
  5898. refs = [b"HEAD"]
  5899. else:
  5900. # Convert branch name to full ref if needed
  5901. if not branch.startswith(b"refs/"):
  5902. branch = local_branch_name(branch)
  5903. refs = [branch]
  5904. # Convert subdirectory filter to bytes if needed
  5905. if subdirectory_filter:
  5906. if isinstance(subdirectory_filter, str):
  5907. subdirectory_filter = subdirectory_filter.encode()
  5908. else:
  5909. subdirectory_filter = None
  5910. # Create commit filter
  5911. filter_obj = CommitFilter(
  5912. r.object_store,
  5913. filter_fn=filter_fn,
  5914. filter_author=filter_author,
  5915. filter_committer=filter_committer,
  5916. filter_message=filter_message,
  5917. tree_filter=tree_filter,
  5918. index_filter=index_filter,
  5919. parent_filter=parent_filter,
  5920. commit_filter=commit_filter,
  5921. subdirectory_filter=subdirectory_filter,
  5922. prune_empty=prune_empty,
  5923. tag_name_filter=tag_name_filter,
  5924. )
  5925. # Tag callback for renaming tags
  5926. def rename_tag(old_ref: bytes, new_ref: bytes) -> None:
  5927. # Copy tag to new name
  5928. r.refs[new_ref] = r.refs[old_ref]
  5929. # Delete old tag
  5930. del r.refs[old_ref]
  5931. # Filter refs
  5932. try:
  5933. return filter_refs(
  5934. r.refs,
  5935. r.object_store,
  5936. refs,
  5937. filter_obj,
  5938. keep_original=keep_original,
  5939. force=force,
  5940. tag_callback=rename_tag if tag_name_filter else None,
  5941. )
  5942. except ValueError as e:
  5943. raise Error(str(e)) from e
  5944. def format_patch(
  5945. repo: RepoPath = ".",
  5946. committish: Optional[Union[bytes, tuple[bytes, bytes]]] = None,
  5947. outstream: TextIO = sys.stdout,
  5948. outdir: Optional[Union[str, os.PathLike[str]]] = None,
  5949. n: int = 1,
  5950. stdout: bool = False,
  5951. version: Optional[str] = None,
  5952. ) -> list[str]:
  5953. """Generate patches suitable for git am.
  5954. Args:
  5955. repo: Path to repository
  5956. committish: Commit-ish or commit range to generate patches for.
  5957. Can be a single commit id, or a tuple of (start, end) commit ids
  5958. for a range. If None, formats the last n commits from HEAD.
  5959. outstream: Stream to write to if stdout=True
  5960. outdir: Directory to write patch files to (default: current directory)
  5961. n: Number of patches to generate if committish is None
  5962. stdout: Write patches to stdout instead of files
  5963. version: Version string to include in patches (default: Dulwich version)
  5964. Returns:
  5965. List of patch filenames that were created (empty if stdout=True)
  5966. """
  5967. if outdir is None:
  5968. outdir = "."
  5969. filenames = []
  5970. with open_repo_closing(repo) as r:
  5971. # Determine which commits to format
  5972. commits_to_format = []
  5973. if committish is None:
  5974. # Get the last n commits from HEAD
  5975. try:
  5976. walker = r.get_walker()
  5977. for entry in walker:
  5978. commits_to_format.append(entry.commit)
  5979. if len(commits_to_format) >= n:
  5980. break
  5981. commits_to_format.reverse()
  5982. except KeyError:
  5983. # No HEAD or empty repository
  5984. pass
  5985. elif isinstance(committish, tuple):
  5986. # Handle commit range (start, end)
  5987. start_commit, end_commit = committish
  5988. # Extract commit IDs from commit objects if needed
  5989. start_id = (
  5990. start_commit.id if isinstance(start_commit, Commit) else start_commit
  5991. )
  5992. end_id = end_commit.id if isinstance(end_commit, Commit) else end_commit
  5993. # Walk from end back to start
  5994. walker = r.get_walker(include=[end_id], exclude=[start_id])
  5995. for entry in walker:
  5996. commits_to_format.append(entry.commit)
  5997. commits_to_format.reverse()
  5998. else:
  5999. # Single commit
  6000. commit = r.object_store[committish]
  6001. assert isinstance(commit, Commit)
  6002. commits_to_format.append(commit)
  6003. # Generate patches
  6004. total = len(commits_to_format)
  6005. for i, commit in enumerate(commits_to_format, 1):
  6006. assert isinstance(commit, Commit)
  6007. # Get the parent
  6008. if commit.parents:
  6009. parent_id = commit.parents[0]
  6010. parent = r.object_store[parent_id]
  6011. assert isinstance(parent, Commit)
  6012. else:
  6013. parent = None
  6014. # Generate the diff
  6015. from io import BytesIO
  6016. diff_content = BytesIO()
  6017. if parent:
  6018. write_tree_diff(
  6019. diff_content,
  6020. r.object_store,
  6021. parent.tree,
  6022. commit.tree,
  6023. )
  6024. else:
  6025. # Initial commit - diff against empty tree
  6026. write_tree_diff(
  6027. diff_content,
  6028. r.object_store,
  6029. None,
  6030. commit.tree,
  6031. )
  6032. # Generate patch with commit metadata
  6033. if stdout:
  6034. # Get binary stream from TextIO
  6035. if hasattr(outstream, "buffer"):
  6036. binary_out: IO[bytes] = outstream.buffer
  6037. else:
  6038. # Fallback for non-text streams
  6039. binary_out = outstream # type: ignore[assignment]
  6040. write_commit_patch(
  6041. binary_out,
  6042. commit,
  6043. diff_content.getvalue(),
  6044. (i, total),
  6045. version=version,
  6046. )
  6047. else:
  6048. # Generate filename
  6049. summary = get_summary(commit)
  6050. filename = os.path.join(outdir, f"{i:04d}-{summary}.patch")
  6051. with open(filename, "wb") as f:
  6052. write_commit_patch(
  6053. f,
  6054. commit,
  6055. diff_content.getvalue(),
  6056. (i, total),
  6057. version=version,
  6058. )
  6059. filenames.append(filename)
  6060. return filenames
  6061. def bisect_start(
  6062. repo: Union[str, os.PathLike[str], Repo] = ".",
  6063. bad: Optional[Union[str, bytes, Commit, Tag]] = None,
  6064. good: Optional[
  6065. Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]]
  6066. ] = None,
  6067. paths: Optional[Sequence[bytes]] = None,
  6068. no_checkout: bool = False,
  6069. term_bad: str = "bad",
  6070. term_good: str = "good",
  6071. ) -> Optional[bytes]:
  6072. """Start a new bisect session.
  6073. Args:
  6074. repo: Path to repository or a Repo object
  6075. bad: The bad commit (defaults to HEAD)
  6076. good: List of good commits or a single good commit
  6077. paths: Optional paths to limit bisect to
  6078. no_checkout: If True, don't checkout commits during bisect
  6079. term_bad: Term to use for bad commits (default: "bad")
  6080. term_good: Term to use for good commits (default: "good")
  6081. """
  6082. with open_repo_closing(repo) as r:
  6083. state = BisectState(r)
  6084. # Convert single good commit to sequence
  6085. if good is not None and isinstance(good, (str, bytes, Commit, Tag)):
  6086. good = [good]
  6087. # Parse commits
  6088. bad_sha = parse_commit(r, bad).id if bad else None
  6089. good_shas = [parse_commit(r, g).id for g in good] if good else None
  6090. state.start(bad_sha, good_shas, paths, no_checkout, term_bad, term_good)
  6091. # Return the next commit to test if we have both good and bad
  6092. if bad_sha and good_shas:
  6093. next_sha = state._find_next_commit()
  6094. if next_sha and not no_checkout:
  6095. # Checkout the next commit
  6096. old_commit = r[r.head()]
  6097. assert isinstance(old_commit, Commit)
  6098. old_tree = old_commit.tree if r.head() else None
  6099. r.refs[b"HEAD"] = next_sha
  6100. commit = r[next_sha]
  6101. assert isinstance(commit, Commit)
  6102. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6103. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6104. return next_sha
  6105. return None
  6106. def bisect_bad(
  6107. repo: Union[str, os.PathLike[str], Repo] = ".",
  6108. rev: Optional[Union[str, bytes, Commit, Tag]] = None,
  6109. ) -> Optional[bytes]:
  6110. """Mark a commit as bad.
  6111. Args:
  6112. repo: Path to repository or a Repo object
  6113. rev: Commit to mark as bad (defaults to HEAD)
  6114. Returns:
  6115. The SHA of the next commit to test, or None if bisect is complete
  6116. """
  6117. with open_repo_closing(repo) as r:
  6118. state = BisectState(r)
  6119. rev_sha = parse_commit(r, rev).id if rev else None
  6120. next_sha = state.mark_bad(rev_sha)
  6121. if next_sha:
  6122. # Checkout the next commit
  6123. old_commit = r[r.head()]
  6124. assert isinstance(old_commit, Commit)
  6125. old_tree = old_commit.tree if r.head() else None
  6126. r.refs[b"HEAD"] = next_sha
  6127. commit = r[next_sha]
  6128. assert isinstance(commit, Commit)
  6129. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6130. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6131. return next_sha
  6132. def bisect_good(
  6133. repo: Union[str, os.PathLike[str], Repo] = ".",
  6134. rev: Optional[Union[str, bytes, Commit, Tag]] = None,
  6135. ) -> Optional[bytes]:
  6136. """Mark a commit as good.
  6137. Args:
  6138. repo: Path to repository or a Repo object
  6139. rev: Commit to mark as good (defaults to HEAD)
  6140. Returns:
  6141. The SHA of the next commit to test, or None if bisect is complete
  6142. """
  6143. with open_repo_closing(repo) as r:
  6144. state = BisectState(r)
  6145. rev_sha = parse_commit(r, rev).id if rev else None
  6146. next_sha = state.mark_good(rev_sha)
  6147. if next_sha:
  6148. # Checkout the next commit
  6149. old_commit = r[r.head()]
  6150. assert isinstance(old_commit, Commit)
  6151. old_tree = old_commit.tree if r.head() else None
  6152. r.refs[b"HEAD"] = next_sha
  6153. commit = r[next_sha]
  6154. assert isinstance(commit, Commit)
  6155. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6156. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6157. return next_sha
  6158. def bisect_skip(
  6159. repo: Union[str, os.PathLike[str], Repo] = ".",
  6160. revs: Optional[
  6161. Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]]
  6162. ] = None,
  6163. ) -> Optional[bytes]:
  6164. """Skip one or more commits.
  6165. Args:
  6166. repo: Path to repository or a Repo object
  6167. revs: List of commits to skip (defaults to [HEAD])
  6168. Returns:
  6169. The SHA of the next commit to test, or None if bisect is complete
  6170. """
  6171. with open_repo_closing(repo) as r:
  6172. state = BisectState(r)
  6173. if revs is None:
  6174. rev_shas = None
  6175. else:
  6176. # Convert single rev to sequence
  6177. if isinstance(revs, (str, bytes, Commit, Tag)):
  6178. revs = [revs]
  6179. rev_shas = [parse_commit(r, rev).id for rev in revs]
  6180. next_sha = state.skip(rev_shas)
  6181. if next_sha:
  6182. # Checkout the next commit
  6183. old_commit = r[r.head()]
  6184. assert isinstance(old_commit, Commit)
  6185. old_tree = old_commit.tree if r.head() else None
  6186. r.refs[b"HEAD"] = next_sha
  6187. commit = r[next_sha]
  6188. assert isinstance(commit, Commit)
  6189. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6190. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6191. return next_sha
  6192. def bisect_reset(
  6193. repo: Union[str, os.PathLike[str], Repo] = ".",
  6194. commit: Optional[Union[str, bytes, Commit, Tag]] = None,
  6195. ) -> None:
  6196. """Reset bisect state and return to original branch/commit.
  6197. Args:
  6198. repo: Path to repository or a Repo object
  6199. commit: Optional commit to reset to (defaults to original branch/commit)
  6200. """
  6201. with open_repo_closing(repo) as r:
  6202. state = BisectState(r)
  6203. # Get old tree before reset
  6204. try:
  6205. old_commit = r[r.head()]
  6206. assert isinstance(old_commit, Commit)
  6207. old_tree = old_commit.tree
  6208. except KeyError:
  6209. old_tree = None
  6210. commit_sha = parse_commit(r, commit).id if commit else None
  6211. state.reset(commit_sha)
  6212. # Update working tree to new HEAD
  6213. try:
  6214. new_head = r.head()
  6215. if new_head:
  6216. new_commit = r[new_head]
  6217. assert isinstance(new_commit, Commit)
  6218. changes = tree_changes(r.object_store, old_tree, new_commit.tree)
  6219. update_working_tree(
  6220. r, old_tree, new_commit.tree, change_iterator=changes
  6221. )
  6222. except KeyError:
  6223. # No HEAD after reset
  6224. pass
  6225. def bisect_log(repo: Union[str, os.PathLike[str], Repo] = ".") -> str:
  6226. """Get the bisect log.
  6227. Args:
  6228. repo: Path to repository or a Repo object
  6229. Returns:
  6230. The bisect log as a string
  6231. """
  6232. with open_repo_closing(repo) as r:
  6233. state = BisectState(r)
  6234. return state.get_log()
  6235. def bisect_replay(
  6236. repo: Union[str, os.PathLike[str], Repo],
  6237. log_file: Union[str, os.PathLike[str], BinaryIO],
  6238. ) -> None:
  6239. """Replay a bisect log.
  6240. Args:
  6241. repo: Path to repository or a Repo object
  6242. log_file: Path to the log file or file-like object
  6243. """
  6244. with open_repo_closing(repo) as r:
  6245. state = BisectState(r)
  6246. if isinstance(log_file, (str, os.PathLike)):
  6247. with open(log_file) as f:
  6248. log_content = f.read()
  6249. else:
  6250. content = log_file.read()
  6251. log_content = content.decode() if isinstance(content, bytes) else content
  6252. state.replay(log_content)
  6253. def reflog(
  6254. repo: RepoPath = ".", ref: Union[str, bytes] = b"HEAD", all: bool = False
  6255. ) -> Iterator[Union[Any, tuple[bytes, Any]]]:
  6256. """Show reflog entries for a reference or all references.
  6257. Args:
  6258. repo: Path to repository or a Repo object
  6259. ref: Reference name (defaults to HEAD)
  6260. all: If True, show reflogs for all refs (ignores ref parameter)
  6261. Yields:
  6262. If all=False: ReflogEntry objects
  6263. If all=True: Tuples of (ref_name, ReflogEntry) for all refs with reflogs
  6264. """
  6265. import os
  6266. from .reflog import iter_reflogs
  6267. if isinstance(ref, str):
  6268. ref = ref.encode("utf-8")
  6269. with open_repo_closing(repo) as r:
  6270. if not all:
  6271. yield from r.read_reflog(ref)
  6272. else:
  6273. logs_dir = os.path.join(r.controldir(), "logs")
  6274. # Use iter_reflogs to discover all reflogs
  6275. for ref_bytes in iter_reflogs(logs_dir):
  6276. # Read the reflog entries for this ref
  6277. for entry in r.read_reflog(ref_bytes):
  6278. yield (ref_bytes, entry)
  6279. def reflog_expire(
  6280. repo: RepoPath = ".",
  6281. ref: Optional[Union[str, bytes]] = None,
  6282. all: bool = False,
  6283. expire_time: Optional[int] = None,
  6284. expire_unreachable_time: Optional[int] = None,
  6285. dry_run: bool = False,
  6286. ) -> dict[bytes, int]:
  6287. """Expire reflog entries based on age and reachability.
  6288. Args:
  6289. repo: Path to repository or a Repo object
  6290. ref: Reference name (if not using --all)
  6291. all: If True, expire reflogs for all refs
  6292. expire_time: Expire entries older than this timestamp (seconds since epoch)
  6293. expire_unreachable_time: Expire unreachable entries older than this timestamp
  6294. dry_run: If True, show what would be expired without making changes
  6295. Returns:
  6296. Dictionary mapping ref names to number of expired entries
  6297. """
  6298. import os
  6299. import time
  6300. from .reflog import expire_reflog, iter_reflogs
  6301. if not all and ref is None:
  6302. raise ValueError("Must specify either ref or all=True")
  6303. if isinstance(ref, str):
  6304. ref = ref.encode("utf-8")
  6305. # Default expire times if not specified
  6306. if expire_time is None and expire_unreachable_time is None:
  6307. # Default: expire entries older than 90 days, unreachable older than 30 days
  6308. now = int(time.time())
  6309. expire_time = now - (90 * 24 * 60 * 60)
  6310. expire_unreachable_time = now - (30 * 24 * 60 * 60)
  6311. result = {}
  6312. with open_repo_closing(repo) as r:
  6313. # Determine which refs to process
  6314. refs_to_process: list[bytes] = []
  6315. if all:
  6316. logs_dir = os.path.join(r.controldir(), "logs")
  6317. refs_to_process = list(iter_reflogs(logs_dir))
  6318. else:
  6319. assert ref is not None # Already checked above
  6320. refs_to_process = [ref]
  6321. # Build set of reachable objects if we have unreachable expiration time
  6322. reachable_objects: Optional[set[bytes]] = None
  6323. if expire_unreachable_time is not None:
  6324. from .gc import find_reachable_objects
  6325. reachable_objects = find_reachable_objects(
  6326. r.object_store, r.refs, include_reflogs=False
  6327. )
  6328. # Process each ref
  6329. for ref_name in refs_to_process:
  6330. reflog_path = r._reflog_path(ref_name)
  6331. if not os.path.exists(reflog_path):
  6332. continue
  6333. # Create reachability checker
  6334. def is_reachable(sha: bytes) -> bool:
  6335. if reachable_objects is None:
  6336. # No unreachable expiration, so assume everything is reachable
  6337. return True
  6338. return sha in reachable_objects
  6339. # Open the reflog file
  6340. if dry_run:
  6341. # For dry run, just read and count what would be expired
  6342. with open(reflog_path, "rb") as f:
  6343. from .reflog import read_reflog
  6344. count = 0
  6345. for entry in read_reflog(f):
  6346. is_obj_reachable = is_reachable(entry.new_sha)
  6347. should_expire = False
  6348. if is_obj_reachable and expire_time is not None:
  6349. if entry.timestamp < expire_time:
  6350. should_expire = True
  6351. elif (
  6352. not is_obj_reachable and expire_unreachable_time is not None
  6353. ):
  6354. if entry.timestamp < expire_unreachable_time:
  6355. should_expire = True
  6356. if should_expire:
  6357. count += 1
  6358. result[ref_name] = count
  6359. else:
  6360. # Actually expire entries
  6361. with open(reflog_path, "r+b") as f: # type: ignore[assignment]
  6362. count = expire_reflog(
  6363. f,
  6364. expire_time=expire_time,
  6365. expire_unreachable_time=expire_unreachable_time,
  6366. reachable_checker=is_reachable,
  6367. )
  6368. result[ref_name] = count
  6369. return result
  6370. def reflog_delete(
  6371. repo: RepoPath = ".",
  6372. ref: Union[str, bytes] = b"HEAD",
  6373. index: int = 0,
  6374. rewrite: bool = False,
  6375. ) -> None:
  6376. """Delete a specific reflog entry.
  6377. Args:
  6378. repo: Path to repository or a Repo object
  6379. ref: Reference name
  6380. index: Reflog entry index (0 = newest, in Git reflog order)
  6381. rewrite: If True, rewrite old_sha of subsequent entries to maintain consistency
  6382. """
  6383. import os
  6384. from .reflog import drop_reflog_entry
  6385. if isinstance(ref, str):
  6386. ref = ref.encode("utf-8")
  6387. with open_repo_closing(repo) as r:
  6388. reflog_path = r._reflog_path(ref)
  6389. if not os.path.exists(reflog_path):
  6390. raise ValueError(f"No reflog for ref {ref.decode()}")
  6391. with open(reflog_path, "r+b") as f:
  6392. drop_reflog_entry(f, index, rewrite=rewrite)
  6393. def lfs_track(
  6394. repo: Union[str, os.PathLike[str], Repo] = ".",
  6395. patterns: Optional[Sequence[str]] = None,
  6396. ) -> list[str]:
  6397. """Track file patterns with Git LFS.
  6398. Args:
  6399. repo: Path to repository
  6400. patterns: List of file patterns to track (e.g., ["*.bin", "*.pdf"])
  6401. If None, returns current tracked patterns
  6402. Returns:
  6403. List of tracked patterns
  6404. """
  6405. from .attrs import GitAttributes
  6406. with open_repo_closing(repo) as r:
  6407. gitattributes_path = os.path.join(r.path, ".gitattributes")
  6408. # Load existing GitAttributes
  6409. if os.path.exists(gitattributes_path):
  6410. gitattributes = GitAttributes.from_file(gitattributes_path)
  6411. else:
  6412. gitattributes = GitAttributes()
  6413. if patterns is None:
  6414. # Return current LFS tracked patterns
  6415. tracked = []
  6416. for pattern_obj, attrs in gitattributes:
  6417. if attrs.get(b"filter") == b"lfs":
  6418. tracked.append(pattern_obj.pattern.decode())
  6419. return tracked
  6420. # Add new patterns
  6421. for pattern in patterns:
  6422. # Ensure pattern is bytes
  6423. pattern_bytes = pattern.encode() if isinstance(pattern, str) else pattern
  6424. # Set LFS attributes for the pattern
  6425. gitattributes.set_attribute(pattern_bytes, b"filter", b"lfs")
  6426. gitattributes.set_attribute(pattern_bytes, b"diff", b"lfs")
  6427. gitattributes.set_attribute(pattern_bytes, b"merge", b"lfs")
  6428. gitattributes.set_attribute(pattern_bytes, b"text", False)
  6429. # Write updated attributes
  6430. gitattributes.write_to_file(gitattributes_path)
  6431. # Stage the .gitattributes file
  6432. add(r, [".gitattributes"])
  6433. return lfs_track(r) # Return updated list
  6434. def lfs_untrack(
  6435. repo: Union[str, os.PathLike[str], Repo] = ".",
  6436. patterns: Optional[Sequence[str]] = None,
  6437. ) -> list[str]:
  6438. """Untrack file patterns from Git LFS.
  6439. Args:
  6440. repo: Path to repository
  6441. patterns: List of file patterns to untrack
  6442. Returns:
  6443. List of remaining tracked patterns
  6444. """
  6445. from .attrs import GitAttributes
  6446. if not patterns:
  6447. return lfs_track(repo)
  6448. with open_repo_closing(repo) as r:
  6449. gitattributes_path = os.path.join(r.path, ".gitattributes")
  6450. if not os.path.exists(gitattributes_path):
  6451. return []
  6452. # Load existing GitAttributes
  6453. gitattributes = GitAttributes.from_file(gitattributes_path)
  6454. # Remove specified patterns
  6455. for pattern in patterns:
  6456. pattern_bytes = pattern.encode() if isinstance(pattern, str) else pattern
  6457. # Check if pattern is tracked by LFS
  6458. for pattern_obj, attrs in list(gitattributes):
  6459. if (
  6460. pattern_obj.pattern == pattern_bytes
  6461. and attrs.get(b"filter") == b"lfs"
  6462. ):
  6463. gitattributes.remove_pattern(pattern_bytes)
  6464. break
  6465. # Write updated attributes
  6466. gitattributes.write_to_file(gitattributes_path)
  6467. # Stage the .gitattributes file
  6468. add(r, [".gitattributes"])
  6469. return lfs_track(r) # Return updated list
  6470. def lfs_init(repo: Union[str, os.PathLike[str], Repo] = ".") -> None:
  6471. """Initialize Git LFS in a repository.
  6472. Args:
  6473. repo: Path to repository
  6474. Returns:
  6475. None
  6476. """
  6477. from .lfs import LFSStore
  6478. with open_repo_closing(repo) as r:
  6479. # Create LFS store
  6480. LFSStore.from_repo(r, create=True)
  6481. # Set up Git config for LFS
  6482. config = r.get_config()
  6483. config.set((b"filter", b"lfs"), b"process", b"git-lfs filter-process")
  6484. config.set((b"filter", b"lfs"), b"required", b"true")
  6485. config.set((b"filter", b"lfs"), b"clean", b"git-lfs clean -- %f")
  6486. config.set((b"filter", b"lfs"), b"smudge", b"git-lfs smudge -- %f")
  6487. config.write_to_path()
  6488. def lfs_clean(
  6489. repo: Union[str, os.PathLike[str], Repo] = ".",
  6490. path: Optional[Union[str, os.PathLike[str]]] = None,
  6491. ) -> bytes:
  6492. """Clean a file by converting it to an LFS pointer.
  6493. Args:
  6494. repo: Path to repository
  6495. path: Path to file to clean (relative to repo root)
  6496. Returns:
  6497. LFS pointer content as bytes
  6498. """
  6499. from .lfs import LFSFilterDriver, LFSStore
  6500. with open_repo_closing(repo) as r:
  6501. if path is None:
  6502. raise ValueError("Path must be specified")
  6503. # Get LFS store
  6504. lfs_store = LFSStore.from_repo(r)
  6505. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  6506. # Read file content
  6507. full_path = os.path.join(r.path, path)
  6508. with open(full_path, "rb") as f:
  6509. content = f.read()
  6510. # Clean the content (convert to LFS pointer)
  6511. return filter_driver.clean(content)
  6512. def lfs_smudge(
  6513. repo: Union[str, os.PathLike[str], Repo] = ".",
  6514. pointer_content: Optional[bytes] = None,
  6515. ) -> bytes:
  6516. """Smudge an LFS pointer by retrieving the actual content.
  6517. Args:
  6518. repo: Path to repository
  6519. pointer_content: LFS pointer content as bytes
  6520. Returns:
  6521. Actual file content as bytes
  6522. """
  6523. from .lfs import LFSFilterDriver, LFSStore
  6524. with open_repo_closing(repo) as r:
  6525. if pointer_content is None:
  6526. raise ValueError("Pointer content must be specified")
  6527. # Get LFS store
  6528. lfs_store = LFSStore.from_repo(r)
  6529. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  6530. # Smudge the pointer (retrieve actual content)
  6531. return filter_driver.smudge(pointer_content)
  6532. def lfs_ls_files(
  6533. repo: Union[str, os.PathLike[str], Repo] = ".",
  6534. ref: Optional[Union[str, bytes]] = None,
  6535. ) -> list[tuple[bytes, str, int]]:
  6536. """List files tracked by Git LFS.
  6537. Args:
  6538. repo: Path to repository
  6539. ref: Git ref to check (defaults to HEAD)
  6540. Returns:
  6541. List of (path, oid, size) tuples for LFS files
  6542. """
  6543. from .lfs import LFSPointer
  6544. from .object_store import iter_tree_contents
  6545. with open_repo_closing(repo) as r:
  6546. if ref is None:
  6547. ref = b"HEAD"
  6548. elif isinstance(ref, str):
  6549. ref = ref.encode()
  6550. # Get the commit and tree
  6551. try:
  6552. commit = r[ref]
  6553. assert isinstance(commit, Commit)
  6554. tree = r[commit.tree]
  6555. assert isinstance(tree, Tree)
  6556. except KeyError:
  6557. return []
  6558. lfs_files = []
  6559. # Walk the tree
  6560. for path, mode, sha in iter_tree_contents(r.object_store, tree.id):
  6561. assert path is not None
  6562. assert mode is not None
  6563. assert sha is not None
  6564. if not stat.S_ISREG(mode):
  6565. continue
  6566. # Check if it's an LFS pointer
  6567. obj = r.object_store[sha]
  6568. if not isinstance(obj, Blob):
  6569. raise AssertionError(f"Expected Blob object, got {type(obj).__name__}")
  6570. pointer = LFSPointer.from_bytes(obj.data)
  6571. if pointer is not None:
  6572. lfs_files.append((path, pointer.oid, pointer.size))
  6573. return lfs_files
  6574. def lfs_migrate(
  6575. repo: Union[str, os.PathLike[str], Repo] = ".",
  6576. include: Optional[list[str]] = None,
  6577. exclude: Optional[list[str]] = None,
  6578. everything: bool = False,
  6579. ) -> int:
  6580. """Migrate files to Git LFS.
  6581. Args:
  6582. repo: Path to repository
  6583. include: Patterns of files to include
  6584. exclude: Patterns of files to exclude
  6585. everything: Migrate all files above a certain size
  6586. Returns:
  6587. Number of migrated files
  6588. """
  6589. from .lfs import LFSFilterDriver, LFSStore
  6590. with open_repo_closing(repo) as r:
  6591. # Initialize LFS if needed
  6592. lfs_store = LFSStore.from_repo(r, create=True)
  6593. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  6594. # Get current index
  6595. index = r.open_index()
  6596. migrated = 0
  6597. # Determine files to migrate
  6598. files_to_migrate = []
  6599. if everything:
  6600. # Migrate all files above 100MB
  6601. for path, entry in index.items():
  6602. full_path = os.path.join(r.path, path.decode())
  6603. if os.path.exists(full_path):
  6604. size = os.path.getsize(full_path)
  6605. if size > 100 * 1024 * 1024: # 100MB
  6606. files_to_migrate.append(path.decode())
  6607. else:
  6608. # Use include/exclude patterns
  6609. for path, entry in index.items():
  6610. path_str = path.decode()
  6611. # Check include patterns
  6612. if include:
  6613. matched = any(
  6614. fnmatch.fnmatch(path_str, pattern) for pattern in include
  6615. )
  6616. if not matched:
  6617. continue
  6618. # Check exclude patterns
  6619. if exclude:
  6620. excluded = any(
  6621. fnmatch.fnmatch(path_str, pattern) for pattern in exclude
  6622. )
  6623. if excluded:
  6624. continue
  6625. files_to_migrate.append(path_str)
  6626. # Migrate files
  6627. for path_str in files_to_migrate:
  6628. full_path = os.path.join(r.path, path_str)
  6629. if not os.path.exists(full_path):
  6630. continue
  6631. # Read file content
  6632. with open(full_path, "rb") as f:
  6633. content = f.read()
  6634. # Convert to LFS pointer
  6635. pointer_content = filter_driver.clean(content)
  6636. # Write pointer back to file
  6637. with open(full_path, "wb") as f:
  6638. f.write(pointer_content)
  6639. # Create blob for pointer content and update index
  6640. blob = Blob()
  6641. blob.data = pointer_content
  6642. r.object_store.add_object(blob)
  6643. st = os.stat(full_path)
  6644. index_entry = index_entry_from_stat(st, blob.id, 0)
  6645. path_bytes = path_str.encode() if isinstance(path_str, str) else path_str
  6646. index[path_bytes] = index_entry
  6647. migrated += 1
  6648. # Write updated index
  6649. index.write()
  6650. # Track patterns if include was specified
  6651. if include:
  6652. lfs_track(r, include)
  6653. return migrated
  6654. def lfs_pointer_check(
  6655. repo: Union[str, os.PathLike[str], Repo] = ".",
  6656. paths: Optional[Sequence[str]] = None,
  6657. ) -> dict[str, Optional[Any]]:
  6658. """Check if files are valid LFS pointers.
  6659. Args:
  6660. repo: Path to repository
  6661. paths: List of file paths to check (if None, check all files)
  6662. Returns:
  6663. Dict mapping paths to LFSPointer objects (or None if not a pointer)
  6664. """
  6665. from .lfs import LFSPointer
  6666. with open_repo_closing(repo) as r:
  6667. results = {}
  6668. if paths is None:
  6669. # Check all files in index
  6670. index = r.open_index()
  6671. paths = [path.decode() for path in index]
  6672. for path in paths:
  6673. full_path = os.path.join(r.path, path)
  6674. if os.path.exists(full_path):
  6675. try:
  6676. with open(full_path, "rb") as f:
  6677. content = f.read()
  6678. pointer = LFSPointer.from_bytes(content)
  6679. results[path] = pointer
  6680. except OSError:
  6681. results[path] = None
  6682. else:
  6683. results[path] = None
  6684. return results
  6685. def lfs_fetch(
  6686. repo: Union[str, os.PathLike[str], Repo] = ".",
  6687. remote: str = "origin",
  6688. refs: Optional[list[Union[str, bytes]]] = None,
  6689. ) -> int:
  6690. """Fetch LFS objects from remote.
  6691. Args:
  6692. repo: Path to repository
  6693. remote: Remote name (default: origin)
  6694. refs: Specific refs to fetch LFS objects for (default: all refs)
  6695. Returns:
  6696. Number of objects fetched
  6697. """
  6698. from .lfs import LFSClient, LFSPointer, LFSStore
  6699. with open_repo_closing(repo) as r:
  6700. # Get LFS server URL from config
  6701. config = r.get_config()
  6702. lfs_url_bytes = config.get((b"lfs",), b"url")
  6703. if not lfs_url_bytes:
  6704. # Try remote URL
  6705. remote_url = config.get((b"remote", remote.encode()), b"url")
  6706. if remote_url:
  6707. # Append /info/lfs to remote URL
  6708. remote_url_str = remote_url.decode()
  6709. if remote_url_str.endswith(".git"):
  6710. remote_url_str = remote_url_str[:-4]
  6711. lfs_url = f"{remote_url_str}/info/lfs"
  6712. else:
  6713. raise ValueError(f"No LFS URL configured for remote {remote}")
  6714. else:
  6715. lfs_url = lfs_url_bytes.decode()
  6716. # Get authentication
  6717. auth = None
  6718. # TODO: Support credential helpers and other auth methods
  6719. # Create LFS client and store
  6720. client = LFSClient(lfs_url, auth)
  6721. store = LFSStore.from_repo(r)
  6722. # Find all LFS pointers in the refs
  6723. pointers_to_fetch = []
  6724. if refs is None:
  6725. # Get all refs
  6726. refs = list(r.refs.keys())
  6727. for ref in refs:
  6728. if isinstance(ref, str):
  6729. ref = ref.encode()
  6730. try:
  6731. commit = r[r.refs[ref]]
  6732. except KeyError:
  6733. continue
  6734. # Walk the commit tree
  6735. assert isinstance(commit, Commit)
  6736. for path, mode, sha in r.object_store.iter_tree_contents(commit.tree):
  6737. assert sha is not None
  6738. try:
  6739. obj = r.object_store[sha]
  6740. except KeyError:
  6741. pass
  6742. else:
  6743. if isinstance(obj, Blob):
  6744. pointer = LFSPointer.from_bytes(obj.data)
  6745. if pointer and pointer.is_valid_oid():
  6746. # Check if we already have it
  6747. try:
  6748. with store.open_object(pointer.oid):
  6749. pass # Object exists, no need to fetch
  6750. except KeyError:
  6751. pointers_to_fetch.append((pointer.oid, pointer.size))
  6752. # Fetch missing objects
  6753. fetched = 0
  6754. for oid, size in pointers_to_fetch:
  6755. content = client.download(oid, size)
  6756. store.write_object([content])
  6757. fetched += 1
  6758. return fetched
  6759. def lfs_pull(
  6760. repo: Union[str, os.PathLike[str], Repo] = ".", remote: str = "origin"
  6761. ) -> int:
  6762. """Pull LFS objects for current checkout.
  6763. Args:
  6764. repo: Path to repository
  6765. remote: Remote name (default: origin)
  6766. Returns:
  6767. Number of objects fetched
  6768. """
  6769. from .lfs import LFSPointer, LFSStore
  6770. with open_repo_closing(repo) as r:
  6771. # First do a fetch for HEAD
  6772. fetched = lfs_fetch(repo, remote, [b"HEAD"])
  6773. # Then checkout LFS files in working directory
  6774. store = LFSStore.from_repo(r)
  6775. index = r.open_index()
  6776. for path, entry in index.items():
  6777. full_path = os.path.join(r.path, path.decode())
  6778. if os.path.exists(full_path):
  6779. with open(full_path, "rb") as f:
  6780. content = f.read()
  6781. pointer = LFSPointer.from_bytes(content)
  6782. if pointer and pointer.is_valid_oid():
  6783. try:
  6784. # Replace pointer with actual content
  6785. with store.open_object(pointer.oid) as lfs_file:
  6786. lfs_content = lfs_file.read()
  6787. with open(full_path, "wb") as f:
  6788. f.write(lfs_content)
  6789. except KeyError:
  6790. # Object not available
  6791. pass
  6792. return fetched
  6793. def lfs_push(
  6794. repo: Union[str, os.PathLike[str], Repo] = ".",
  6795. remote: str = "origin",
  6796. refs: Optional[list[Union[str, bytes]]] = None,
  6797. ) -> int:
  6798. """Push LFS objects to remote.
  6799. Args:
  6800. repo: Path to repository
  6801. remote: Remote name (default: origin)
  6802. refs: Specific refs to push LFS objects for (default: current branch)
  6803. Returns:
  6804. Number of objects pushed
  6805. """
  6806. from .lfs import LFSClient, LFSPointer, LFSStore
  6807. with open_repo_closing(repo) as r:
  6808. # Get LFS server URL from config
  6809. config = r.get_config()
  6810. lfs_url_bytes = config.get((b"lfs",), b"url")
  6811. if not lfs_url_bytes:
  6812. # Try remote URL
  6813. remote_url = config.get((b"remote", remote.encode()), b"url")
  6814. if remote_url:
  6815. # Append /info/lfs to remote URL
  6816. remote_url_str = remote_url.decode()
  6817. if remote_url_str.endswith(".git"):
  6818. remote_url_str = remote_url_str[:-4]
  6819. lfs_url = f"{remote_url_str}/info/lfs"
  6820. else:
  6821. raise ValueError(f"No LFS URL configured for remote {remote}")
  6822. else:
  6823. lfs_url = lfs_url_bytes.decode()
  6824. # Get authentication
  6825. auth = None
  6826. # TODO: Support credential helpers and other auth methods
  6827. # Create LFS client and store
  6828. client = LFSClient(lfs_url, auth)
  6829. store = LFSStore.from_repo(r)
  6830. # Find all LFS objects to push
  6831. if refs is None:
  6832. # Push current branch
  6833. head_ref = r.refs.read_ref(b"HEAD")
  6834. refs = [head_ref] if head_ref else []
  6835. objects_to_push = set()
  6836. for ref in refs:
  6837. if isinstance(ref, str):
  6838. ref = ref.encode()
  6839. try:
  6840. if ref.startswith(b"refs/"):
  6841. commit = r[r.refs[ref]]
  6842. else:
  6843. commit = r[ref]
  6844. except KeyError:
  6845. continue
  6846. # Walk the commit tree
  6847. assert isinstance(commit, Commit)
  6848. for path, mode, sha in r.object_store.iter_tree_contents(commit.tree):
  6849. assert sha is not None
  6850. try:
  6851. obj = r.object_store[sha]
  6852. except KeyError:
  6853. pass
  6854. else:
  6855. if isinstance(obj, Blob):
  6856. pointer = LFSPointer.from_bytes(obj.data)
  6857. if pointer and pointer.is_valid_oid():
  6858. objects_to_push.add((pointer.oid, pointer.size))
  6859. # Push objects
  6860. pushed = 0
  6861. for oid, size in objects_to_push:
  6862. try:
  6863. with store.open_object(oid) as f:
  6864. content = f.read()
  6865. except KeyError:
  6866. # Object not in local store
  6867. logging.warn("LFS object %s not found locally", oid)
  6868. else:
  6869. client.upload(oid, size, content)
  6870. pushed += 1
  6871. return pushed
  6872. def lfs_status(repo: Union[str, os.PathLike[str], Repo] = ".") -> dict[str, list[str]]:
  6873. """Show status of LFS files.
  6874. Args:
  6875. repo: Path to repository
  6876. Returns:
  6877. Dict with status information
  6878. """
  6879. from .lfs import LFSPointer, LFSStore
  6880. with open_repo_closing(repo) as r:
  6881. store = LFSStore.from_repo(r)
  6882. index = r.open_index()
  6883. status: dict[str, list[str]] = {
  6884. "tracked": [],
  6885. "not_staged": [],
  6886. "not_committed": [],
  6887. "not_pushed": [],
  6888. "missing": [],
  6889. }
  6890. # Check working directory files
  6891. for path, entry in index.items():
  6892. path_str = path.decode()
  6893. full_path = os.path.join(r.path, path_str)
  6894. if os.path.exists(full_path):
  6895. with open(full_path, "rb") as f:
  6896. content = f.read()
  6897. pointer = LFSPointer.from_bytes(content)
  6898. if pointer and pointer.is_valid_oid():
  6899. status["tracked"].append(path_str)
  6900. # Check if object exists locally
  6901. try:
  6902. with store.open_object(pointer.oid):
  6903. pass # Object exists locally
  6904. except KeyError:
  6905. status["missing"].append(path_str)
  6906. # Check if file has been modified
  6907. if isinstance(entry, ConflictedIndexEntry):
  6908. continue # Skip conflicted entries
  6909. try:
  6910. staged_obj = r.object_store[entry.sha]
  6911. except KeyError:
  6912. pass
  6913. else:
  6914. if not isinstance(staged_obj, Blob):
  6915. raise AssertionError(
  6916. f"Expected Blob object, got {type(staged_obj).__name__}"
  6917. )
  6918. staged_pointer = LFSPointer.from_bytes(staged_obj.data)
  6919. if staged_pointer and staged_pointer.oid != pointer.oid:
  6920. status["not_staged"].append(path_str)
  6921. # TODO: Check for not committed and not pushed files
  6922. return status
  6923. def worktree_list(repo: RepoPath = ".") -> list[Any]:
  6924. """List all worktrees for a repository.
  6925. Args:
  6926. repo: Path to repository
  6927. Returns:
  6928. List of WorkTreeInfo objects
  6929. """
  6930. from .worktree import list_worktrees
  6931. with open_repo_closing(repo) as r:
  6932. return list_worktrees(r)
  6933. def worktree_add(
  6934. repo: RepoPath = ".",
  6935. path: Optional[Union[str, os.PathLike[str]]] = None,
  6936. branch: Optional[Union[str, bytes]] = None,
  6937. commit: Optional[Union[str, bytes]] = None,
  6938. detach: bool = False,
  6939. force: bool = False,
  6940. ) -> str:
  6941. """Add a new worktree.
  6942. Args:
  6943. repo: Path to repository
  6944. path: Path for new worktree
  6945. branch: Branch to checkout (creates if doesn't exist)
  6946. commit: Specific commit to checkout
  6947. detach: Create with detached HEAD
  6948. force: Force creation even if branch is already checked out
  6949. Returns:
  6950. Path to the newly created worktree
  6951. """
  6952. from .worktree import add_worktree
  6953. if path is None:
  6954. raise ValueError("Path is required for worktree add")
  6955. with open_repo_closing(repo) as r:
  6956. commit_bytes = commit.encode() if isinstance(commit, str) else commit
  6957. wt_repo = add_worktree(
  6958. r, path, branch=branch, commit=commit_bytes, detach=detach, force=force
  6959. )
  6960. return wt_repo.path
  6961. def worktree_remove(
  6962. repo: RepoPath = ".",
  6963. path: Optional[Union[str, os.PathLike[str]]] = None,
  6964. force: bool = False,
  6965. ) -> None:
  6966. """Remove a worktree.
  6967. Args:
  6968. repo: Path to repository
  6969. path: Path to worktree to remove
  6970. force: Force removal even if there are local changes
  6971. """
  6972. from .worktree import remove_worktree
  6973. if path is None:
  6974. raise ValueError("Path is required for worktree remove")
  6975. with open_repo_closing(repo) as r:
  6976. remove_worktree(r, path, force=force)
  6977. def worktree_prune(
  6978. repo: RepoPath = ".", dry_run: bool = False, expire: Optional[int] = None
  6979. ) -> list[str]:
  6980. """Prune worktree administrative files.
  6981. Args:
  6982. repo: Path to repository
  6983. dry_run: Only show what would be removed
  6984. expire: Only prune worktrees older than this many seconds
  6985. Returns:
  6986. List of pruned worktree names
  6987. """
  6988. from .worktree import prune_worktrees
  6989. with open_repo_closing(repo) as r:
  6990. return prune_worktrees(r, expire=expire, dry_run=dry_run)
  6991. def worktree_lock(
  6992. repo: RepoPath = ".",
  6993. path: Optional[Union[str, os.PathLike[str]]] = None,
  6994. reason: Optional[str] = None,
  6995. ) -> None:
  6996. """Lock a worktree to prevent it from being pruned.
  6997. Args:
  6998. repo: Path to repository
  6999. path: Path to worktree to lock
  7000. reason: Optional reason for locking
  7001. """
  7002. from .worktree import lock_worktree
  7003. if path is None:
  7004. raise ValueError("Path is required for worktree lock")
  7005. with open_repo_closing(repo) as r:
  7006. lock_worktree(r, path, reason=reason)
  7007. def worktree_unlock(
  7008. repo: RepoPath = ".", path: Optional[Union[str, os.PathLike[str]]] = None
  7009. ) -> None:
  7010. """Unlock a worktree.
  7011. Args:
  7012. repo: Path to repository
  7013. path: Path to worktree to unlock
  7014. """
  7015. from .worktree import unlock_worktree
  7016. if path is None:
  7017. raise ValueError("Path is required for worktree unlock")
  7018. with open_repo_closing(repo) as r:
  7019. unlock_worktree(r, path)
  7020. def worktree_move(
  7021. repo: RepoPath = ".",
  7022. old_path: Optional[Union[str, os.PathLike[str]]] = None,
  7023. new_path: Optional[Union[str, os.PathLike[str]]] = None,
  7024. ) -> None:
  7025. """Move a worktree to a new location.
  7026. Args:
  7027. repo: Path to repository
  7028. old_path: Current path of worktree
  7029. new_path: New path for worktree
  7030. """
  7031. from .worktree import move_worktree
  7032. if old_path is None or new_path is None:
  7033. raise ValueError("Both old_path and new_path are required for worktree move")
  7034. with open_repo_closing(repo) as r:
  7035. move_worktree(r, old_path, new_path)
  7036. def worktree_repair(
  7037. repo: RepoPath = ".",
  7038. paths: Optional[list[Union[str, os.PathLike[str]]]] = None,
  7039. ) -> list[str]:
  7040. """Repair worktree administrative files.
  7041. Args:
  7042. repo: Path to repository
  7043. paths: Optional list of worktree paths to repair. If None, repairs
  7044. connections from the main repository to all linked worktrees.
  7045. Returns:
  7046. List of repaired worktree paths
  7047. """
  7048. from .worktree import repair_worktree
  7049. with open_repo_closing(repo) as r:
  7050. return repair_worktree(r, paths=paths)
  7051. def merge_base(
  7052. repo: RepoPath = ".",
  7053. committishes: Optional[Sequence[Union[str, bytes]]] = None,
  7054. all: bool = False,
  7055. octopus: bool = False,
  7056. ) -> list[bytes]:
  7057. """Find the best common ancestor(s) between commits.
  7058. Args:
  7059. repo: Path to repository
  7060. committishes: List of commit references (branches, tags, commit IDs)
  7061. all: If True, return all merge bases, not just one
  7062. octopus: If True, find merge base of all commits (n-way merge)
  7063. Returns:
  7064. List of commit IDs that are merge bases
  7065. """
  7066. from .graph import find_merge_base, find_octopus_base
  7067. from .objects import Commit
  7068. from .objectspec import parse_object
  7069. if committishes is None or len(committishes) < 2:
  7070. raise ValueError("At least two commits are required")
  7071. with open_repo_closing(repo) as r:
  7072. # Resolve committish references to commit IDs
  7073. commit_ids = []
  7074. for committish in committishes:
  7075. obj = parse_object(r, committish)
  7076. if not isinstance(obj, Commit):
  7077. raise ValueError(f"Expected commit, got {obj.type_name.decode()}")
  7078. commit_ids.append(obj.id)
  7079. # Find merge base
  7080. if octopus:
  7081. result = find_octopus_base(r, commit_ids)
  7082. else:
  7083. result = find_merge_base(r, commit_ids)
  7084. # Return first result only if all=False
  7085. if not all and result:
  7086. return [result[0]]
  7087. return result
  7088. def is_ancestor(
  7089. repo: RepoPath = ".",
  7090. ancestor: Optional[Union[str, bytes]] = None,
  7091. descendant: Optional[Union[str, bytes]] = None,
  7092. ) -> bool:
  7093. """Check if one commit is an ancestor of another.
  7094. Args:
  7095. repo: Path to repository
  7096. ancestor: Commit that might be the ancestor
  7097. descendant: Commit that might be the descendant
  7098. Returns:
  7099. True if ancestor is an ancestor of descendant, False otherwise
  7100. """
  7101. from .graph import find_merge_base
  7102. from .objects import Commit
  7103. from .objectspec import parse_object
  7104. if ancestor is None or descendant is None:
  7105. raise ValueError("Both ancestor and descendant are required")
  7106. with open_repo_closing(repo) as r:
  7107. # Resolve committish references to commit IDs
  7108. ancestor_obj = parse_object(r, ancestor)
  7109. if not isinstance(ancestor_obj, Commit):
  7110. raise ValueError(f"Expected commit, got {ancestor_obj.type_name.decode()}")
  7111. descendant_obj = parse_object(r, descendant)
  7112. if not isinstance(descendant_obj, Commit):
  7113. raise ValueError(
  7114. f"Expected commit, got {descendant_obj.type_name.decode()}"
  7115. )
  7116. # If ancestor is the merge base of (ancestor, descendant), then it's an ancestor
  7117. merge_bases = find_merge_base(r, [ancestor_obj.id, descendant_obj.id])
  7118. return merge_bases == [ancestor_obj.id]
  7119. def independent_commits(
  7120. repo: RepoPath = ".",
  7121. committishes: Optional[Sequence[Union[str, bytes]]] = None,
  7122. ) -> list[bytes]:
  7123. """Filter commits to only those that are not reachable from others.
  7124. Args:
  7125. repo: Path to repository
  7126. committishes: List of commit references to filter
  7127. Returns:
  7128. List of commit IDs that are not ancestors of any other commits in the list
  7129. """
  7130. from .graph import independent
  7131. from .objects import Commit
  7132. from .objectspec import parse_object
  7133. if committishes is None or len(committishes) == 0:
  7134. return []
  7135. with open_repo_closing(repo) as r:
  7136. # Resolve committish references to commit IDs
  7137. commit_ids = []
  7138. for committish in committishes:
  7139. obj = parse_object(r, committish)
  7140. if not isinstance(obj, Commit):
  7141. raise ValueError(f"Expected commit, got {obj.type_name.decode()}")
  7142. commit_ids.append(obj.id)
  7143. # Filter to independent commits
  7144. return independent(r, commit_ids)
  7145. def mailsplit(
  7146. input_path: Optional[Union[str, os.PathLike[str], IO[bytes]]] = None,
  7147. output_dir: Union[str, os.PathLike[str]] = ".",
  7148. start_number: int = 1,
  7149. precision: int = 4,
  7150. keep_cr: bool = False,
  7151. mboxrd: bool = False,
  7152. is_maildir: bool = False,
  7153. ) -> list[str]:
  7154. r"""Split an mbox file or Maildir into individual message files.
  7155. This is similar to git mailsplit.
  7156. Args:
  7157. input_path: Path to mbox file, Maildir, or file-like object. If None, reads from stdin.
  7158. output_dir: Directory where individual messages will be written
  7159. start_number: Starting number for output files (default: 1)
  7160. precision: Number of digits for output filenames (default: 4)
  7161. keep_cr: If True, preserve \r in lines ending with \r\n (default: False)
  7162. mboxrd: If True, treat input as mboxrd format and reverse escaping (default: False)
  7163. is_maildir: If True, treat input_path as a Maildir (default: False)
  7164. Returns:
  7165. List of output file paths that were created
  7166. Raises:
  7167. ValueError: If output_dir doesn't exist or input is invalid
  7168. OSError: If there are issues reading/writing files
  7169. """
  7170. from .mbox import split_maildir, split_mbox
  7171. if is_maildir:
  7172. if input_path is None:
  7173. raise ValueError("input_path is required for Maildir splitting")
  7174. if not isinstance(input_path, (str, bytes, os.PathLike)):
  7175. raise ValueError("Maildir splitting requires a path, not a file object")
  7176. # Convert PathLike to str for split_maildir
  7177. maildir_path: Union[str, bytes] = (
  7178. os.fspath(input_path) if isinstance(input_path, os.PathLike) else input_path
  7179. )
  7180. out_dir: Union[str, bytes] = (
  7181. os.fspath(output_dir) if isinstance(output_dir, os.PathLike) else output_dir
  7182. )
  7183. return split_maildir(
  7184. maildir_path,
  7185. out_dir,
  7186. start_number=start_number,
  7187. precision=precision,
  7188. keep_cr=keep_cr,
  7189. )
  7190. else:
  7191. from typing import BinaryIO, cast
  7192. if input_path is None:
  7193. # Read from stdin
  7194. input_file: Union[str, bytes, BinaryIO] = sys.stdin.buffer
  7195. else:
  7196. # Convert PathLike to str if needed
  7197. if isinstance(input_path, os.PathLike):
  7198. input_file = os.fspath(input_path)
  7199. else:
  7200. # input_path is either str or IO[bytes] here
  7201. input_file = cast(Union[str, BinaryIO], input_path)
  7202. out_dir = (
  7203. os.fspath(output_dir) if isinstance(output_dir, os.PathLike) else output_dir
  7204. )
  7205. return split_mbox(
  7206. input_file,
  7207. out_dir,
  7208. start_number=start_number,
  7209. precision=precision,
  7210. keep_cr=keep_cr,
  7211. mboxrd=mboxrd,
  7212. )