__init__.py 275 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135
  1. # porcelain.py -- Porcelain-like layer on top of Dulwich
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as published by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Simple wrapper that provides porcelain-like functions on top of Dulwich.
  22. Currently implemented:
  23. * archive
  24. * add
  25. * bisect{_start,_bad,_good,_skip,_reset,_log,_replay}
  26. * branch{_create,_delete,_list}
  27. * check_ignore
  28. * checkout
  29. * checkout_branch
  30. * clone
  31. * cone mode{_init, _set, _add}
  32. * commit
  33. * commit_tree
  34. * daemon
  35. * describe
  36. * diff_tree
  37. * fetch
  38. * filter_branch
  39. * for_each_ref
  40. * grep
  41. * init
  42. * interpret_trailers
  43. * ls_files
  44. * ls_remote
  45. * ls_tree
  46. * mailsplit
  47. * merge
  48. * merge_tree
  49. * mv/move
  50. * prune
  51. * pull
  52. * push
  53. * rm
  54. * remote{_add}
  55. * receive_pack
  56. * replace{_create,_delete,_list}
  57. * rerere{_status,_diff,_forget,_clear,_gc}
  58. * reset
  59. * revert
  60. * sparse_checkout
  61. * submodule_add
  62. * submodule_init
  63. * submodule_list
  64. * rev_list
  65. * tag{_create,_delete,_list}
  66. * upload_pack
  67. * update_server_info
  68. * var
  69. * write_commit_graph
  70. * status
  71. * shortlog
  72. * symbolic_ref
  73. * worktree{_add,_list,_remove,_prune,_lock,_unlock,_move}
  74. These functions are meant to behave similarly to the git subcommands.
  75. Differences in behaviour are considered bugs.
  76. Note: one of the consequences of this is that paths tend to be
  77. interpreted relative to the current working directory rather than relative
  78. to the repository root.
  79. Functions should generally accept both unicode strings and bytestrings
  80. """
  81. __all__ = [
  82. "CheckoutError",
  83. "CountObjectsResult",
  84. "DivergedBranches",
  85. "Error",
  86. "NoneStream",
  87. "RemoteExists",
  88. "TimezoneFormatError",
  89. "TransportKwargs",
  90. "active_branch",
  91. "add",
  92. "annotate",
  93. "archive",
  94. "bisect_bad",
  95. "bisect_good",
  96. "bisect_log",
  97. "bisect_replay",
  98. "bisect_reset",
  99. "bisect_skip",
  100. "bisect_start",
  101. "branch_create",
  102. "branch_delete",
  103. "branch_list",
  104. "branch_remotes_list",
  105. "branches_containing",
  106. "check_diverged",
  107. "check_ignore",
  108. "check_mailmap",
  109. "checkout",
  110. "checkout_branch",
  111. "cherry",
  112. "cherry_pick",
  113. "clean",
  114. "clone",
  115. "commit",
  116. "commit_decode",
  117. "commit_encode",
  118. "commit_tree",
  119. "cone_mode_add",
  120. "cone_mode_init",
  121. "cone_mode_set",
  122. "count_objects",
  123. "daemon",
  124. "describe",
  125. "diff",
  126. "diff_tree",
  127. "fetch",
  128. "filter_branch",
  129. "filter_branches_by_pattern",
  130. "find_unique_abbrev",
  131. "for_each_ref",
  132. "format_patch",
  133. "fsck",
  134. "gc",
  135. "get_branch_merge",
  136. "get_branch_remote",
  137. "get_object_by_path",
  138. "get_remote_repo",
  139. "get_tree_changes",
  140. "get_untracked_paths",
  141. "get_user_timezones",
  142. "grep",
  143. "independent_commits",
  144. "init",
  145. "interpret_trailers",
  146. "is_ancestor",
  147. "is_interactive_rebase",
  148. "lfs_clean",
  149. "lfs_fetch",
  150. "lfs_init",
  151. "lfs_ls_files",
  152. "lfs_migrate",
  153. "lfs_pointer_check",
  154. "lfs_pull",
  155. "lfs_push",
  156. "lfs_smudge",
  157. "lfs_status",
  158. "lfs_track",
  159. "lfs_untrack",
  160. "log",
  161. "ls_files",
  162. "ls_remote",
  163. "ls_tree",
  164. "mailinfo",
  165. "mailsplit",
  166. "maintenance_register",
  167. "maintenance_run",
  168. "maintenance_unregister",
  169. "merge",
  170. "merge_base",
  171. "merge_tree",
  172. "merged_branches",
  173. "mv",
  174. "no_merged_branches",
  175. "notes_add",
  176. "notes_list",
  177. "notes_remove",
  178. "notes_show",
  179. "open_repo",
  180. "open_repo_closing",
  181. "pack_objects",
  182. "pack_refs",
  183. "parse_timezone_format",
  184. "path_to_tree_path",
  185. "print_commit",
  186. "print_name_status",
  187. "print_tag",
  188. "prune",
  189. "pull",
  190. "push",
  191. "rebase",
  192. "receive_pack",
  193. "reflog",
  194. "reflog_delete",
  195. "reflog_expire",
  196. "remote_add",
  197. "remote_remove",
  198. "remove",
  199. "repack",
  200. "replace_create",
  201. "replace_delete",
  202. "replace_list",
  203. "rerere",
  204. "rerere_clear",
  205. "rerere_diff",
  206. "rerere_forget",
  207. "rerere_gc",
  208. "rerere_status",
  209. "reset",
  210. "reset_file",
  211. "restore",
  212. "rev_list",
  213. "revert",
  214. "set_branch_tracking",
  215. "shortlog",
  216. "show",
  217. "show_blob",
  218. "show_branch",
  219. "show_commit",
  220. "show_object",
  221. "show_ref",
  222. "show_tag",
  223. "show_tree",
  224. "sparse_checkout",
  225. "stash_drop",
  226. "stash_list",
  227. "stash_pop",
  228. "stash_push",
  229. "status",
  230. "stripspace",
  231. "submodule_add",
  232. "submodule_init",
  233. "submodule_list",
  234. "submodule_update",
  235. "switch",
  236. "symbolic_ref",
  237. "tag_create",
  238. "tag_delete",
  239. "tag_list",
  240. "unpack_objects",
  241. "update_head",
  242. "update_server_info",
  243. "upload_pack",
  244. "var",
  245. "var_list",
  246. "verify_commit",
  247. "verify_tag",
  248. "web_daemon",
  249. "worktree_add",
  250. "worktree_list",
  251. "worktree_lock",
  252. "worktree_move",
  253. "worktree_prune",
  254. "worktree_remove",
  255. "worktree_repair",
  256. "worktree_unlock",
  257. "write_commit_graph",
  258. "write_tree",
  259. ]
  260. import datetime
  261. import fnmatch
  262. import logging
  263. import os
  264. import posixpath
  265. import re
  266. import stat
  267. import sys
  268. import time
  269. from collections import namedtuple
  270. from collections.abc import Callable, Iterable, Iterator, Sequence
  271. from collections.abc import Set as AbstractSet
  272. from contextlib import AbstractContextManager, closing, contextmanager
  273. from dataclasses import dataclass
  274. from io import BytesIO, RawIOBase
  275. from pathlib import Path
  276. from typing import (
  277. IO,
  278. TYPE_CHECKING,
  279. Any,
  280. BinaryIO,
  281. TextIO,
  282. TypedDict,
  283. TypeVar,
  284. cast,
  285. overload,
  286. )
  287. if sys.version_info >= (3, 12):
  288. from typing import override
  289. else:
  290. from typing_extensions import override
  291. from .._typing import Buffer
  292. if TYPE_CHECKING:
  293. import urllib3
  294. from ..filter_branch import CommitData
  295. from ..gc import GCStats
  296. from ..maintenance import MaintenanceResult
  297. from .. import replace_me
  298. from ..archive import tar_stream
  299. from ..bisect import BisectState
  300. from ..client import (
  301. FetchPackResult,
  302. LsRemoteResult,
  303. SendPackResult,
  304. get_transport_and_path,
  305. )
  306. from ..config import Config, StackedConfig
  307. from ..diff_tree import (
  308. CHANGE_ADD,
  309. CHANGE_COPY,
  310. CHANGE_DELETE,
  311. CHANGE_MODIFY,
  312. CHANGE_RENAME,
  313. RENAME_CHANGE_TYPES,
  314. TreeChange,
  315. tree_changes,
  316. )
  317. from ..errors import SendPackError
  318. from ..graph import can_fast_forward
  319. from ..ignore import IgnoreFilterManager
  320. from ..index import (
  321. ConflictedIndexEntry,
  322. Index,
  323. IndexEntry,
  324. _fs_to_tree_path,
  325. blob_from_path_and_stat,
  326. build_file_from_blob,
  327. get_unstaged_changes,
  328. symlink,
  329. update_working_tree,
  330. validate_path_element_default,
  331. validate_path_element_hfs,
  332. validate_path_element_ntfs,
  333. )
  334. from ..object_store import BaseObjectStore, tree_lookup_path
  335. from ..objects import (
  336. Blob,
  337. Commit,
  338. ObjectID,
  339. Tag,
  340. Tree,
  341. TreeEntry,
  342. format_timezone,
  343. parse_timezone,
  344. pretty_format_tree_entry,
  345. )
  346. from ..objectspec import (
  347. parse_commit,
  348. parse_object,
  349. parse_ref,
  350. parse_reftuples,
  351. parse_tree,
  352. )
  353. from ..pack import UnpackedObject, write_pack_from_container, write_pack_index
  354. from ..patch import (
  355. MailinfoResult,
  356. get_summary,
  357. write_commit_patch,
  358. write_object_diff,
  359. write_tree_diff,
  360. )
  361. from ..protocol import ZERO_SHA, Protocol
  362. from ..refs import (
  363. HEADREF,
  364. LOCAL_BRANCH_PREFIX,
  365. LOCAL_REMOTE_PREFIX,
  366. LOCAL_REPLACE_PREFIX,
  367. LOCAL_TAG_PREFIX,
  368. DictRefsContainer,
  369. Ref,
  370. SymrefLoop,
  371. _import_remote_refs,
  372. filter_ref_prefix,
  373. local_branch_name,
  374. local_replace_name,
  375. parse_remote_ref,
  376. shorten_ref_name,
  377. )
  378. from ..repo import BaseRepo, Repo, get_user_identity
  379. from ..server import (
  380. FileSystemBackend,
  381. ReceivePackHandler,
  382. TCPGitServer,
  383. UploadPackHandler,
  384. )
  385. from ..server import update_server_info as server_update_server_info
  386. from ..sparse_patterns import (
  387. SparseCheckoutConflictError,
  388. apply_included_paths,
  389. determine_included_paths,
  390. )
  391. from ..trailers import add_trailer_to_message, format_trailers, parse_trailers
  392. from .lfs import (
  393. lfs_clean,
  394. lfs_fetch,
  395. lfs_init,
  396. lfs_ls_files,
  397. lfs_migrate,
  398. lfs_pointer_check,
  399. lfs_pull,
  400. lfs_push,
  401. lfs_smudge,
  402. lfs_status,
  403. lfs_track,
  404. lfs_untrack,
  405. )
  406. from .notes import (
  407. notes_add,
  408. notes_list,
  409. notes_remove,
  410. notes_show,
  411. )
  412. from .submodule import (
  413. submodule_add,
  414. submodule_init,
  415. submodule_list,
  416. submodule_update,
  417. )
  418. from .tag import tag_create, tag_delete, tag_list, verify_tag
  419. from .worktree import (
  420. worktree_add,
  421. worktree_list,
  422. worktree_lock,
  423. worktree_move,
  424. worktree_prune,
  425. worktree_remove,
  426. worktree_repair,
  427. worktree_unlock,
  428. )
  429. # Module level tuple definition for status output
  430. GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
  431. # TypeVar for preserving BaseRepo subclass types
  432. T = TypeVar("T", bound="BaseRepo")
  433. # Type alias for common repository parameter pattern
  434. RepoPath = str | os.PathLike[str] | Repo
  435. class TransportKwargs(TypedDict, total=False):
  436. """Keyword arguments accepted by get_transport_and_path."""
  437. operation: str | None
  438. thin_packs: bool
  439. report_activity: Callable[[int, str], None] | None
  440. quiet: bool
  441. include_tags: bool
  442. username: str | None
  443. password: str | None
  444. key_filename: str | None
  445. ssh_command: str | None
  446. pool_manager: "urllib3.PoolManager | None"
  447. @dataclass
  448. class CountObjectsResult:
  449. """Result of counting objects in a repository.
  450. Attributes:
  451. count: Number of loose objects
  452. size: Total size of loose objects in bytes
  453. in_pack: Number of objects in pack files
  454. packs: Number of pack files
  455. size_pack: Total size of pack files in bytes
  456. """
  457. count: int
  458. size: int
  459. in_pack: int | None = None
  460. packs: int | None = None
  461. size_pack: int | None = None
  462. class NoneStream(RawIOBase):
  463. """Fallback if stdout or stderr are unavailable, does nothing."""
  464. def read(self, size: int = -1) -> None:
  465. """Read from stream (returns None as this is a null stream)."""
  466. return None
  467. def readall(self) -> bytes:
  468. """Read all bytes (returns empty bytes).
  469. Returns:
  470. Empty bytes object
  471. """
  472. return b""
  473. if sys.version_info >= (3, 12):
  474. @override
  475. def readinto(self, b: Buffer) -> int | None:
  476. return 0
  477. @override
  478. def write(self, b: Buffer) -> int | None:
  479. return len(cast(bytes, b)) if b else 0
  480. else:
  481. @override
  482. def readinto(self, b: bytearray | memoryview) -> int | None: # type: ignore[override]
  483. return 0
  484. @override
  485. def write(self, b: bytes | bytearray | memoryview) -> int | None: # type: ignore[override]
  486. return len(b) if b else 0
  487. default_bytes_out_stream: BinaryIO = cast(
  488. BinaryIO, getattr(sys.stdout, "buffer", None) or NoneStream()
  489. )
  490. default_bytes_err_stream: BinaryIO = cast(
  491. BinaryIO, getattr(sys.stderr, "buffer", None) or NoneStream()
  492. )
  493. DEFAULT_ENCODING = "utf-8"
  494. class Error(Exception):
  495. """Porcelain-based error."""
  496. def __init__(self, msg: str) -> None:
  497. """Initialize Error with message."""
  498. super().__init__(msg)
  499. class RemoteExists(Error):
  500. """Raised when the remote already exists."""
  501. class TimezoneFormatError(Error):
  502. """Raised when the timezone cannot be determined from a given string."""
  503. class CheckoutError(Error):
  504. """Indicates that a checkout cannot be performed."""
  505. def parse_timezone_format(tz_str: str) -> int:
  506. """Parse given string and attempt to return a timezone offset.
  507. Different formats are considered in the following order:
  508. - Git internal format: <unix timestamp> <timezone offset>
  509. - RFC 2822: e.g. Mon, 20 Nov 1995 19:12:08 -0500
  510. - ISO 8601: e.g. 1995-11-20T19:12:08-0500
  511. Args:
  512. tz_str: datetime string
  513. Returns: Timezone offset as integer
  514. Raises:
  515. TimezoneFormatError: if timezone information cannot be extracted
  516. """
  517. import re
  518. # Git internal format
  519. internal_format_pattern = re.compile("^[0-9]+ [+-][0-9]{,4}$")
  520. if re.match(internal_format_pattern, tz_str):
  521. try:
  522. tz_internal = parse_timezone(tz_str.split(" ")[1].encode(DEFAULT_ENCODING))
  523. return tz_internal[0]
  524. except ValueError:
  525. pass
  526. # RFC 2822
  527. import email.utils
  528. rfc_2822 = email.utils.parsedate_tz(tz_str)
  529. if rfc_2822 and rfc_2822[9] is not None:
  530. return rfc_2822[9]
  531. # ISO 8601
  532. # Supported offsets:
  533. # sHHMM, sHH:MM, sHH
  534. iso_8601_pattern = re.compile(
  535. "[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$"
  536. )
  537. match = re.search(iso_8601_pattern, tz_str)
  538. total_secs = 0
  539. if match:
  540. sign, hours, minutes = match.groups()
  541. total_secs += int(hours) * 3600
  542. if minutes:
  543. total_secs += int(minutes) * 60
  544. total_secs = -total_secs if sign == "-" else total_secs
  545. return total_secs
  546. # YYYY.MM.DD, MM/DD/YYYY, DD.MM.YYYY contain no timezone information
  547. raise TimezoneFormatError(tz_str)
  548. def get_user_timezones() -> tuple[int, int]:
  549. """Retrieve local timezone as described in git documentation.
  550. https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
  551. Returns: A tuple containing author timezone, committer timezone.
  552. """
  553. local_timezone = time.localtime().tm_gmtoff
  554. if os.environ.get("GIT_AUTHOR_DATE"):
  555. author_timezone = parse_timezone_format(os.environ["GIT_AUTHOR_DATE"])
  556. else:
  557. author_timezone = local_timezone
  558. if os.environ.get("GIT_COMMITTER_DATE"):
  559. commit_timezone = parse_timezone_format(os.environ["GIT_COMMITTER_DATE"])
  560. else:
  561. commit_timezone = local_timezone
  562. return author_timezone, commit_timezone
  563. @overload
  564. def open_repo(path_or_repo: T) -> AbstractContextManager[T]: ...
  565. @overload
  566. def open_repo(
  567. path_or_repo: str | os.PathLike[str],
  568. ) -> AbstractContextManager[Repo]: ...
  569. def open_repo(
  570. path_or_repo: str | os.PathLike[str] | T,
  571. ) -> AbstractContextManager[T | Repo]:
  572. """Open an argument that can be a repository or a path for a repository."""
  573. if isinstance(path_or_repo, BaseRepo):
  574. return _noop_context_manager(path_or_repo)
  575. return Repo(path_or_repo)
  576. @contextmanager
  577. def _noop_context_manager(obj: T) -> Iterator[T]:
  578. """Context manager that has the same api as closing but does nothing."""
  579. yield obj
  580. def _get_reflog_message(
  581. default_message: bytes, explicit_message: bytes | None = None
  582. ) -> bytes:
  583. """Get reflog message, checking GIT_REFLOG_ACTION environment variable.
  584. Args:
  585. default_message: Default message to use if no explicit message or env var
  586. explicit_message: Explicit message passed as argument (takes precedence)
  587. Returns:
  588. The reflog message with priority:
  589. 1. explicit_message if provided
  590. 2. GIT_REFLOG_ACTION environment variable if set
  591. 3. default_message otherwise
  592. """
  593. if explicit_message is not None:
  594. return explicit_message
  595. env_action = os.environ.get("GIT_REFLOG_ACTION")
  596. if env_action is not None:
  597. return env_action.encode("utf-8")
  598. return default_message
  599. @overload
  600. def open_repo_closing(path_or_repo: T) -> AbstractContextManager[T]: ...
  601. @overload
  602. def open_repo_closing(
  603. path_or_repo: str | bytes | os.PathLike[str],
  604. ) -> AbstractContextManager[Repo]: ...
  605. def open_repo_closing(
  606. path_or_repo: str | bytes | os.PathLike[str] | T,
  607. ) -> AbstractContextManager[T | Repo]:
  608. """Open an argument that can be a repository or a path for a repository.
  609. returns a context manager that will close the repo on exit if the argument
  610. is a path, else does nothing if the argument is a repo.
  611. """
  612. if isinstance(path_or_repo, BaseRepo):
  613. return _noop_context_manager(path_or_repo)
  614. return closing(Repo(path_or_repo))
  615. def path_to_tree_path(
  616. repopath: str | bytes | os.PathLike[str],
  617. path: str | bytes | os.PathLike[str],
  618. tree_encoding: str = DEFAULT_ENCODING,
  619. ) -> bytes:
  620. """Convert a path to a path usable in an index, e.g. bytes and relative to the repository root.
  621. Args:
  622. repopath: Repository path, absolute or relative to the cwd
  623. path: A path, absolute or relative to the cwd
  624. tree_encoding: Encoding to use for tree paths
  625. Returns: A path formatted for use in e.g. an index
  626. """
  627. # Resolve might returns a relative path on Windows
  628. # https://bugs.python.org/issue38671
  629. if sys.platform == "win32":
  630. path = os.path.abspath(path)
  631. # Convert bytes paths to str for Path
  632. if isinstance(path, bytes):
  633. path = os.fsdecode(path)
  634. path = Path(path)
  635. resolved_path = path.resolve()
  636. # Resolve and abspath seems to behave differently regarding symlinks,
  637. # as we are doing abspath on the file path, we need to do the same on
  638. # the repo path or they might not match
  639. if sys.platform == "win32":
  640. repopath = os.path.abspath(repopath)
  641. # Convert bytes paths to str for Path
  642. if isinstance(repopath, bytes):
  643. repopath = os.fsdecode(repopath)
  644. repopath = Path(repopath).resolve()
  645. try:
  646. relpath = resolved_path.relative_to(repopath)
  647. except ValueError:
  648. # If path is a symlink that points to a file outside the repo, we
  649. # want the relpath for the link itself, not the resolved target
  650. if path.is_symlink():
  651. parent = path.parent.resolve()
  652. relpath = (parent / path.name).relative_to(repopath)
  653. else:
  654. raise
  655. if sys.platform == "win32":
  656. return str(relpath).replace(os.path.sep, "/").encode(tree_encoding)
  657. else:
  658. return bytes(relpath)
  659. class DivergedBranches(Error):
  660. """Branches have diverged and fast-forward is not possible."""
  661. def __init__(self, current_sha: bytes, new_sha: bytes) -> None:
  662. """Initialize DivergedBranches error with current and new SHA values."""
  663. self.current_sha = current_sha
  664. self.new_sha = new_sha
  665. def check_diverged(repo: BaseRepo, current_sha: ObjectID, new_sha: ObjectID) -> None:
  666. """Check if updating to a sha can be done with fast forwarding.
  667. Args:
  668. repo: Repository object
  669. current_sha: Current head sha
  670. new_sha: New head sha
  671. """
  672. try:
  673. can = can_fast_forward(repo, current_sha, new_sha)
  674. except KeyError:
  675. can = False
  676. if not can:
  677. raise DivergedBranches(current_sha, new_sha)
  678. def archive(
  679. repo: str | BaseRepo,
  680. committish: str | bytes | Commit | Tag | None = None,
  681. outstream: BinaryIO | RawIOBase = default_bytes_out_stream,
  682. errstream: BinaryIO | RawIOBase = default_bytes_err_stream,
  683. ) -> None:
  684. """Create an archive.
  685. Args:
  686. repo: Path of repository for which to generate an archive.
  687. committish: Commit SHA1 or ref to use
  688. outstream: Output stream (defaults to stdout)
  689. errstream: Error stream (defaults to stderr)
  690. """
  691. if committish is None:
  692. committish = "HEAD"
  693. with open_repo_closing(repo) as repo_obj:
  694. c = parse_commit(repo_obj, committish)
  695. tree = repo_obj.object_store[c.tree]
  696. assert isinstance(tree, Tree)
  697. for chunk in tar_stream(repo_obj.object_store, tree, c.commit_time):
  698. outstream.write(chunk)
  699. def update_server_info(repo: RepoPath = ".") -> None:
  700. """Update server info files for a repository.
  701. Args:
  702. repo: path to the repository
  703. """
  704. with open_repo_closing(repo) as r:
  705. server_update_server_info(r)
  706. def write_commit_graph(repo: RepoPath = ".", reachable: bool = True) -> None:
  707. """Write a commit graph file for a repository.
  708. Args:
  709. repo: path to the repository or a Repo object
  710. reachable: if True, include all commits reachable from refs.
  711. if False, only include direct ref targets.
  712. """
  713. with open_repo_closing(repo) as r:
  714. # Get all refs
  715. refs = list(r.refs.as_dict().values())
  716. if refs:
  717. r.object_store.write_commit_graph(refs, reachable=reachable)
  718. def symbolic_ref(repo: RepoPath, ref_name: str | bytes, force: bool = False) -> None:
  719. """Set git symbolic ref into HEAD.
  720. Args:
  721. repo: path to the repository
  722. ref_name: short name of the new ref
  723. force: force settings without checking if it exists in refs/heads
  724. """
  725. with open_repo_closing(repo) as repo_obj:
  726. ref_path = _make_branch_ref(ref_name)
  727. if not force and ref_path not in repo_obj.refs.keys():
  728. ref_name_str = (
  729. ref_name.decode("utf-8", "replace")
  730. if isinstance(ref_name, bytes)
  731. else ref_name
  732. )
  733. raise Error(f"fatal: ref `{ref_name_str}` is not a ref")
  734. repo_obj.refs.set_symbolic_ref(HEADREF, ref_path)
  735. def pack_refs(repo: RepoPath, all: bool = False) -> None:
  736. """Pack loose references into packed-refs file."""
  737. with open_repo_closing(repo) as repo_obj:
  738. repo_obj.refs.pack_refs(all=all)
  739. def _get_variables(repo: RepoPath = ".") -> dict[str, str]:
  740. """Internal function to get all Git logical variables.
  741. Args:
  742. repo: Path to the repository
  743. Returns:
  744. A dictionary of all logical variables with values
  745. """
  746. with open_repo_closing(repo) as repo_obj:
  747. config = repo_obj.get_config_stack()
  748. # Define callbacks for each logical variable
  749. def get_author_ident() -> str | None:
  750. """Get GIT_AUTHOR_IDENT."""
  751. try:
  752. author_identity = get_user_identity(config, kind="AUTHOR")
  753. author_tz, _ = get_user_timezones()
  754. timestamp = int(time.time())
  755. return f"{author_identity.decode('utf-8', 'replace')} {timestamp} {author_tz:+05d}"
  756. except Exception:
  757. return None
  758. def get_committer_ident() -> str | None:
  759. """Get GIT_COMMITTER_IDENT."""
  760. try:
  761. committer_identity = get_user_identity(config, kind="COMMITTER")
  762. _, committer_tz = get_user_timezones()
  763. timestamp = int(time.time())
  764. return f"{committer_identity.decode('utf-8', 'replace')} {timestamp} {committer_tz:+05d}"
  765. except Exception:
  766. return None
  767. def get_editor() -> str | None:
  768. """Get GIT_EDITOR."""
  769. editor = os.environ.get("GIT_EDITOR")
  770. if editor is None:
  771. try:
  772. editor_bytes = config.get(("core",), "editor")
  773. editor = editor_bytes.decode("utf-8", "replace")
  774. except KeyError:
  775. editor = os.environ.get("VISUAL") or os.environ.get("EDITOR")
  776. return editor
  777. def get_sequence_editor() -> str | None:
  778. """Get GIT_SEQUENCE_EDITOR."""
  779. sequence_editor = os.environ.get("GIT_SEQUENCE_EDITOR")
  780. if sequence_editor is None:
  781. try:
  782. seq_editor_bytes = config.get(("sequence",), "editor")
  783. sequence_editor = seq_editor_bytes.decode("utf-8", "replace")
  784. except KeyError:
  785. # Falls back to GIT_EDITOR if not set
  786. sequence_editor = get_editor()
  787. return sequence_editor
  788. def get_pager() -> str | None:
  789. """Get GIT_PAGER."""
  790. pager = os.environ.get("GIT_PAGER")
  791. if pager is None:
  792. try:
  793. pager_bytes = config.get(("core",), "pager")
  794. pager = pager_bytes.decode("utf-8", "replace")
  795. except KeyError:
  796. pager = os.environ.get("PAGER")
  797. return pager
  798. def get_default_branch() -> str:
  799. """Get GIT_DEFAULT_BRANCH."""
  800. try:
  801. default_branch_bytes = config.get(("init",), "defaultBranch")
  802. return default_branch_bytes.decode("utf-8", "replace")
  803. except KeyError:
  804. # Git's default is "master"
  805. return "master"
  806. # Dictionary mapping variable names to their getter callbacks
  807. variable_callbacks: dict[str, Callable[[], str | None]] = {
  808. "GIT_AUTHOR_IDENT": get_author_ident,
  809. "GIT_COMMITTER_IDENT": get_committer_ident,
  810. "GIT_EDITOR": get_editor,
  811. "GIT_SEQUENCE_EDITOR": get_sequence_editor,
  812. "GIT_PAGER": get_pager,
  813. "GIT_DEFAULT_BRANCH": get_default_branch,
  814. }
  815. # Build the variables dictionary by calling callbacks
  816. variables: dict[str, str] = {}
  817. for var_name, callback in variable_callbacks.items():
  818. value = callback()
  819. if value is not None:
  820. variables[var_name] = value
  821. return variables
  822. def var_list(repo: RepoPath = ".") -> dict[str, str]:
  823. """List all Git logical variables.
  824. Args:
  825. repo: Path to the repository
  826. Returns:
  827. A dictionary of all logical variables with their values
  828. """
  829. return _get_variables(repo)
  830. def var(repo: RepoPath = ".", variable: str = "GIT_AUTHOR_IDENT") -> str:
  831. """Get the value of a specific Git logical variable.
  832. Args:
  833. repo: Path to the repository
  834. variable: The variable to query (e.g., 'GIT_AUTHOR_IDENT')
  835. Returns:
  836. The value of the requested variable as a string
  837. Raises:
  838. KeyError: If the requested variable has no value
  839. """
  840. variables = _get_variables(repo)
  841. if variable in variables:
  842. return variables[variable]
  843. else:
  844. raise KeyError(f"Variable {variable} has no value")
  845. def commit(
  846. repo: RepoPath = ".",
  847. message: str | bytes | Callable[[Any, Commit], bytes] | None = None,
  848. author: bytes | None = None,
  849. author_timezone: int | None = None,
  850. committer: bytes | None = None,
  851. commit_timezone: int | None = None,
  852. encoding: bytes | None = None,
  853. no_verify: bool = False,
  854. signoff: bool | None = None,
  855. all: bool = False,
  856. amend: bool = False,
  857. sign: bool | None = None,
  858. ) -> bytes:
  859. """Create a new commit.
  860. Args:
  861. repo: Path to repository
  862. message: Optional commit message (string/bytes or callable that takes
  863. (repo, commit) and returns bytes)
  864. author: Optional author name and email
  865. author_timezone: Author timestamp timezone
  866. committer: Optional committer name and email
  867. commit_timezone: Commit timestamp timezone
  868. encoding: Encoding to use for commit message
  869. no_verify: Skip pre-commit and commit-msg hooks
  870. signoff: Add Signed-off-by line to commit message. If None, uses format.signoff config.
  871. all: Automatically stage all tracked files that have been modified
  872. amend: Replace the tip of the current branch by creating a new commit
  873. sign: GPG sign the commit. If None, uses commit.gpgsign config.
  874. If True, signs with default GPG key. If False, does not sign.
  875. Returns: SHA1 of the new commit
  876. """
  877. encoding_str = encoding.decode("ascii") if encoding else DEFAULT_ENCODING
  878. if isinstance(message, str):
  879. message = message.encode(encoding_str)
  880. if isinstance(author, str):
  881. author = author.encode(encoding_str)
  882. if isinstance(committer, str):
  883. committer = committer.encode(encoding_str)
  884. local_timezone = get_user_timezones()
  885. if author_timezone is None:
  886. author_timezone = local_timezone[0]
  887. if commit_timezone is None:
  888. commit_timezone = local_timezone[1]
  889. with open_repo_closing(repo) as r:
  890. # Handle amend logic
  891. merge_heads = None
  892. if amend:
  893. try:
  894. head_commit = r[r.head()]
  895. assert isinstance(head_commit, Commit)
  896. except KeyError:
  897. raise ValueError("Cannot amend: no existing commit found")
  898. # If message not provided, use the message from the current HEAD
  899. if message is None:
  900. message = head_commit.message
  901. # If author not provided, use the author from the current HEAD
  902. if author is None:
  903. author = head_commit.author
  904. if author_timezone is None:
  905. author_timezone = head_commit.author_timezone
  906. # Use the parent(s) of the current HEAD as our parent(s)
  907. merge_heads = list(head_commit.parents)
  908. # If -a flag is used, stage all modified tracked files
  909. if all:
  910. index = r.open_index()
  911. normalizer = r.get_blob_normalizer()
  912. # Pass the normalizer's checkin_normalize method directly
  913. if normalizer is not None:
  914. filter_callback = normalizer.checkin_normalize
  915. else:
  916. filter_callback = None
  917. unstaged_changes = list(
  918. get_unstaged_changes(index, r.path, filter_callback)
  919. )
  920. if unstaged_changes:
  921. # Convert bytes paths to strings for add function
  922. modified_files: list[str | bytes | os.PathLike[str]] = []
  923. for path in unstaged_changes:
  924. if isinstance(path, bytes):
  925. modified_files.append(path.decode())
  926. else:
  927. modified_files.append(path)
  928. add(r, paths=modified_files)
  929. # For amend, create dangling commit to avoid adding current HEAD as parent
  930. if amend:
  931. commit_sha = r.get_worktree().commit(
  932. message=message,
  933. author=author,
  934. author_timezone=author_timezone,
  935. committer=committer,
  936. commit_timezone=commit_timezone,
  937. encoding=encoding,
  938. no_verify=no_verify,
  939. sign=sign,
  940. signoff=signoff,
  941. merge_heads=merge_heads,
  942. ref=None,
  943. )
  944. # Update HEAD to point to the new commit with reflog message
  945. try:
  946. old_head = r.refs[HEADREF]
  947. except KeyError:
  948. old_head = None
  949. # Get the actual commit message from the created commit
  950. commit_obj = r[commit_sha]
  951. assert isinstance(commit_obj, Commit)
  952. commit_message = commit_obj.message
  953. default_message = b"commit (amend): " + commit_message
  954. # Truncate message if too long for reflog
  955. if len(default_message) > 100:
  956. default_message = default_message[:97] + b"..."
  957. reflog_message = _get_reflog_message(default_message)
  958. r.refs.set_if_equals(HEADREF, old_head, commit_sha, message=reflog_message)
  959. return commit_sha
  960. else:
  961. return r.get_worktree().commit(
  962. message=message,
  963. author=author,
  964. author_timezone=author_timezone,
  965. committer=committer,
  966. commit_timezone=commit_timezone,
  967. encoding=encoding,
  968. no_verify=no_verify,
  969. sign=sign,
  970. signoff=signoff,
  971. merge_heads=merge_heads,
  972. )
  973. def commit_tree(
  974. repo: RepoPath,
  975. tree: ObjectID,
  976. message: str | bytes | None = None,
  977. author: bytes | None = None,
  978. committer: bytes | None = None,
  979. ) -> ObjectID:
  980. """Create a new commit object.
  981. Args:
  982. repo: Path to repository
  983. tree: An existing tree object
  984. message: Commit message
  985. author: Optional author name and email
  986. committer: Optional committer name and email
  987. """
  988. with open_repo_closing(repo) as r:
  989. if isinstance(message, str):
  990. message = message.encode(DEFAULT_ENCODING)
  991. return r.get_worktree().commit(
  992. message=message, tree=tree, committer=committer, author=author
  993. )
  994. def interpret_trailers(
  995. message: str | bytes,
  996. *,
  997. trailers: list[tuple[str, str]] | None = None,
  998. trim_empty: bool = False,
  999. only_trailers: bool = False,
  1000. only_input: bool = False,
  1001. unfold: bool = False,
  1002. parse: bool = False,
  1003. where: str = "end",
  1004. if_exists: str = "addIfDifferentNeighbor",
  1005. if_missing: str = "add",
  1006. separators: str = ":",
  1007. ) -> bytes:
  1008. r"""Parse and manipulate trailers in a commit message.
  1009. This function implements the functionality of ``git interpret-trailers``,
  1010. allowing parsing and manipulation of structured metadata (trailers) in
  1011. commit messages.
  1012. Trailers are key-value pairs at the end of commit messages, formatted like:
  1013. Signed-off-by: Alice <alice@example.com>
  1014. Reviewed-by: Bob <bob@example.com>
  1015. Args:
  1016. message: The commit message (string or bytes)
  1017. trailers: List of (key, value) tuples to add as new trailers
  1018. trim_empty: Remove trailers with empty values
  1019. only_trailers: Output only the trailers, not the message body
  1020. only_input: Don't add new trailers, only parse existing ones
  1021. unfold: Join multiline trailer values into a single line
  1022. parse: Shorthand for --only-trailers --only-input --unfold
  1023. where: Where to add new trailers ('end', 'start', 'after', 'before')
  1024. if_exists: How to handle duplicate keys
  1025. - 'add': Always add
  1026. - 'replace': Replace all existing
  1027. - 'addIfDifferent': Add only if value differs from all existing
  1028. - 'addIfDifferentNeighbor': Add only if value differs from neighbors
  1029. - 'doNothing': Don't add if key exists
  1030. if_missing: What to do if key doesn't exist ('add' or 'doNothing')
  1031. separators: Valid separator characters (default ':')
  1032. Returns:
  1033. The processed message as bytes
  1034. Examples:
  1035. >>> msg = b"Subject\\n\\nBody text\\n"
  1036. >>> interpret_trailers(msg, trailers=[("Signed-off-by", "Alice <alice@example.com>")])
  1037. b'Subject\\n\\nBody text\\n\\nSigned-off-by: Alice <alice@example.com>\\n'
  1038. >>> msg = b"Subject\\n\\nSigned-off-by: Alice\\n"
  1039. >>> interpret_trailers(msg, only_trailers=True)
  1040. b'Signed-off-by: Alice\\n'
  1041. """
  1042. # Handle --parse shorthand
  1043. if parse:
  1044. only_trailers = True
  1045. only_input = True
  1046. unfold = True
  1047. # Convert message to bytes
  1048. if isinstance(message, str):
  1049. message_bytes = message.encode("utf-8")
  1050. else:
  1051. message_bytes = message
  1052. # Parse existing trailers
  1053. _message_body, parsed_trailers = parse_trailers(message_bytes, separators)
  1054. # Apply unfold if requested
  1055. if unfold:
  1056. for trailer in parsed_trailers:
  1057. # Replace newlines and multiple spaces with single space
  1058. trailer.value = " ".join(trailer.value.split())
  1059. # Apply trim_empty if requested
  1060. if trim_empty:
  1061. parsed_trailers = [t for t in parsed_trailers if t.value.strip()]
  1062. # Add new trailers if requested and not only_input
  1063. if not only_input and trailers:
  1064. for key, value in trailers:
  1065. message_bytes = add_trailer_to_message(
  1066. message_bytes,
  1067. key,
  1068. value,
  1069. separators[0], # Use first separator as default
  1070. where=where,
  1071. if_exists=if_exists,
  1072. if_missing=if_missing,
  1073. )
  1074. # Re-parse to get updated trailers for output
  1075. if only_trailers:
  1076. _message_body, parsed_trailers = parse_trailers(message_bytes, separators)
  1077. # Return based on only_trailers flag
  1078. if only_trailers:
  1079. return format_trailers(parsed_trailers)
  1080. else:
  1081. return message_bytes
  1082. def stripspace(
  1083. text: str | bytes,
  1084. *,
  1085. strip_comments: bool = False,
  1086. comment_char: str = "#",
  1087. comment_lines: bool = False,
  1088. ) -> bytes:
  1089. r"""Strip unnecessary whitespace from text.
  1090. This function implements the functionality of ``git stripspace``, commonly
  1091. used to clean up commit messages and other text content.
  1092. Args:
  1093. text: The text to process (string or bytes)
  1094. strip_comments: If True, remove lines that begin with comment_char
  1095. comment_char: The comment character to use (default: "#")
  1096. comment_lines: If True, prepend comment_char to each line
  1097. Returns:
  1098. The processed text as bytes
  1099. The function performs the following operations:
  1100. 1. If comment_lines is True, prepend comment_char + space to each line
  1101. 2. Strip trailing whitespace from each line
  1102. 3. If strip_comments is True, remove lines starting with comment_char
  1103. 4. Collapse multiple consecutive blank lines into a single blank line
  1104. 5. Remove leading blank lines
  1105. 6. Remove trailing blank lines
  1106. 7. Ensure the text ends with a newline (unless empty)
  1107. Examples:
  1108. >>> stripspace(b" hello \\n\\n\\nworld \\n\\n")
  1109. b'hello\\n\\nworld\\n'
  1110. >>> stripspace(b"# comment\\ntext\\n", strip_comments=True)
  1111. b'text\\n'
  1112. >>> stripspace(b"line\\n", comment_lines=True)
  1113. b'# line\\n'
  1114. """
  1115. from ..stripspace import stripspace as _stripspace
  1116. # Convert text to bytes
  1117. if isinstance(text, str):
  1118. text_bytes = text.encode("utf-8")
  1119. else:
  1120. text_bytes = text
  1121. # Convert comment_char to bytes
  1122. comment_char_bytes = (
  1123. comment_char.encode("utf-8") if isinstance(comment_char, str) else comment_char
  1124. )
  1125. return _stripspace(
  1126. text_bytes,
  1127. strip_comments=strip_comments,
  1128. comment_char=comment_char_bytes,
  1129. comment_lines=comment_lines,
  1130. )
  1131. def init(
  1132. path: str | os.PathLike[str] = ".",
  1133. *,
  1134. bare: bool = False,
  1135. symlinks: bool | None = None,
  1136. object_format: str | None = None,
  1137. ) -> Repo:
  1138. """Create a new git repository.
  1139. Args:
  1140. path: Path to repository.
  1141. bare: Whether to create a bare repository.
  1142. symlinks: Whether to create actual symlinks (defaults to autodetect)
  1143. object_format: Object format to use ("sha1" or "sha256", defaults to "sha1")
  1144. Returns: A Repo instance
  1145. """
  1146. if not os.path.exists(path):
  1147. os.mkdir(path)
  1148. if bare:
  1149. return Repo.init_bare(path, object_format=object_format)
  1150. else:
  1151. return Repo.init(path, symlinks=symlinks, object_format=object_format)
  1152. def _filter_transport_kwargs(**kwargs: object) -> TransportKwargs:
  1153. """Filter kwargs to only include parameters accepted by get_transport_and_path.
  1154. Args:
  1155. **kwargs: Arbitrary keyword arguments
  1156. Returns:
  1157. Dictionary containing only the kwargs that get_transport_and_path accepts
  1158. """
  1159. valid_params = {
  1160. "operation",
  1161. "thin_packs",
  1162. "report_activity",
  1163. "quiet",
  1164. "include_tags",
  1165. "username",
  1166. "password",
  1167. "key_filename",
  1168. "ssh_command",
  1169. "pool_manager",
  1170. }
  1171. return cast(TransportKwargs, {k: v for k, v in kwargs.items() if k in valid_params})
  1172. def clone(
  1173. source: str | bytes | Repo,
  1174. target: str | os.PathLike[str] | None = None,
  1175. bare: bool = False,
  1176. checkout: bool | None = None,
  1177. errstream: BinaryIO | RawIOBase = default_bytes_err_stream,
  1178. outstream: BinaryIO | None = None,
  1179. origin: str | None = "origin",
  1180. depth: int | None = None,
  1181. branch: str | bytes | None = None,
  1182. config: Config | None = None,
  1183. filter_spec: str | None = None,
  1184. protocol_version: int | None = None,
  1185. recurse_submodules: bool = False,
  1186. **kwargs: str | bytes | Sequence[str | bytes],
  1187. ) -> Repo:
  1188. """Clone a local or remote git repository.
  1189. Args:
  1190. source: Path or URL for source repository
  1191. target: Path to target repository (optional)
  1192. bare: Whether or not to create a bare repository
  1193. checkout: Whether or not to check-out HEAD after cloning
  1194. errstream: Optional stream to write progress to
  1195. outstream: Optional stream to write progress to (deprecated)
  1196. origin: Name of remote from the repository used to clone
  1197. depth: Depth to fetch at
  1198. branch: Optional branch or tag to be used as HEAD in the new repository
  1199. instead of the cloned repository's HEAD.
  1200. config: Configuration to use
  1201. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  1202. Only used if the server supports the Git protocol-v2 'filter'
  1203. feature, and ignored otherwise.
  1204. protocol_version: desired Git protocol version. By default the highest
  1205. mutually supported protocol version will be used.
  1206. recurse_submodules: Whether to initialize and clone submodules
  1207. **kwargs: Additional keyword arguments including refspecs to fetch.
  1208. Can be a bytestring, a string, or a list of bytestring/string.
  1209. Returns: The new repository
  1210. """
  1211. if outstream is not None:
  1212. import warnings
  1213. warnings.warn(
  1214. "outstream= has been deprecated in favour of errstream=.",
  1215. DeprecationWarning,
  1216. stacklevel=3,
  1217. )
  1218. # TODO(jelmer): Capture logging output and stream to errstream
  1219. if config is None:
  1220. config = StackedConfig.default()
  1221. if checkout is None:
  1222. checkout = not bare
  1223. if checkout and bare:
  1224. raise Error("checkout and bare are incompatible")
  1225. if target is None:
  1226. if isinstance(source, Repo):
  1227. raise ValueError("target must be specified when cloning from a Repo object")
  1228. elif isinstance(source, bytes):
  1229. target = source.split(b"/")[-1].decode()
  1230. else:
  1231. target = source.split("/")[-1]
  1232. if isinstance(branch, str):
  1233. branch = branch.encode(DEFAULT_ENCODING)
  1234. mkdir = not os.path.exists(target)
  1235. if isinstance(source, Repo):
  1236. # For direct repo cloning, use LocalGitClient
  1237. from ..client import GitClient, LocalGitClient
  1238. client: GitClient = LocalGitClient(config=config)
  1239. path = source.path
  1240. else:
  1241. source_str = source.decode() if isinstance(source, bytes) else source
  1242. transport_kwargs = _filter_transport_kwargs(**kwargs)
  1243. (client, path) = get_transport_and_path(
  1244. source_str, config=config, **transport_kwargs
  1245. )
  1246. filter_spec_bytes: bytes | None = None
  1247. if filter_spec:
  1248. filter_spec_bytes = filter_spec.encode("ascii")
  1249. repo = client.clone(
  1250. path,
  1251. str(target), # Convert PathLike to str
  1252. mkdir=mkdir,
  1253. bare=bare,
  1254. origin=origin,
  1255. checkout=checkout,
  1256. branch=branch.decode() if branch else None, # Convert bytes to str
  1257. progress=lambda data: (errstream.write(data), None)[1],
  1258. depth=depth,
  1259. filter_spec=filter_spec_bytes,
  1260. protocol_version=protocol_version,
  1261. )
  1262. # Initialize and update submodules if requested
  1263. if recurse_submodules and not bare:
  1264. try:
  1265. submodule_init(repo)
  1266. submodule_update(repo, init=True, recursive=True)
  1267. except FileNotFoundError as e:
  1268. # .gitmodules file doesn't exist - no submodules to process
  1269. logging.debug("No .gitmodules file found: %s", e)
  1270. except KeyError as e:
  1271. # Submodule configuration missing
  1272. logging.warning("Submodule configuration error: %s", e)
  1273. if errstream:
  1274. errstream.write(
  1275. f"Warning: Submodule configuration error: {e}\n".encode()
  1276. )
  1277. return repo
  1278. def add(
  1279. repo: str | os.PathLike[str] | Repo = ".",
  1280. paths: Sequence[str | bytes | os.PathLike[str]]
  1281. | str
  1282. | bytes
  1283. | os.PathLike[str]
  1284. | None = None,
  1285. ) -> tuple[list[str], set[str]]:
  1286. """Add files to the staging area.
  1287. Args:
  1288. repo: Repository for the files
  1289. paths: Paths to add. If None, stages all untracked and modified files from the
  1290. current working directory (mimicking 'git add .' behavior).
  1291. Returns: Tuple with set of added files and ignored files
  1292. If the repository contains ignored directories, the returned set will
  1293. contain the path to an ignored directory (with trailing slash). Individual
  1294. files within ignored directories will not be returned.
  1295. Note: When paths=None, this function adds all untracked and modified files
  1296. from the entire repository, mimicking 'git add -A' behavior.
  1297. """
  1298. ignored = set()
  1299. with open_repo_closing(repo) as r:
  1300. repo_path = Path(r.path).resolve()
  1301. ignore_manager = IgnoreFilterManager.from_repo(r)
  1302. # Get unstaged changes once for the entire operation
  1303. index = r.open_index()
  1304. normalizer = r.get_blob_normalizer()
  1305. if normalizer is not None:
  1306. filter_callback = normalizer.checkin_normalize
  1307. else:
  1308. filter_callback = None
  1309. # Check if core.preloadIndex is enabled
  1310. config = r.get_config_stack()
  1311. preload_index = config.get_boolean(b"core", b"preloadIndex", False)
  1312. all_unstaged_paths = list(
  1313. get_unstaged_changes(index, r.path, filter_callback, preload_index)
  1314. )
  1315. if paths is None:
  1316. # When no paths specified, add all untracked and modified files from repo root
  1317. paths = [str(repo_path)]
  1318. relpaths = []
  1319. if isinstance(paths, (str, bytes, os.PathLike)):
  1320. paths = [paths]
  1321. for p in paths:
  1322. # Handle bytes paths by decoding them
  1323. if isinstance(p, bytes):
  1324. p = p.decode("utf-8")
  1325. path = Path(p)
  1326. if not path.is_absolute():
  1327. # Make relative paths relative to the repo directory
  1328. path = repo_path / path
  1329. # Don't resolve symlinks completely - only resolve the parent directory
  1330. # to avoid issues when symlinks point outside the repository
  1331. if path.is_symlink():
  1332. # For symlinks, resolve only the parent directory
  1333. parent_resolved = path.parent.resolve()
  1334. resolved_path = parent_resolved / path.name
  1335. else:
  1336. # For regular files/dirs, resolve normally
  1337. resolved_path = path.resolve()
  1338. try:
  1339. relpath = str(resolved_path.relative_to(repo_path)).replace(os.sep, "/")
  1340. except ValueError as e:
  1341. # Path is not within the repository
  1342. p_str = p.decode() if isinstance(p, bytes) else str(p)
  1343. raise ValueError(
  1344. f"Path {p_str} is not within repository {repo_path}"
  1345. ) from e
  1346. # Handle directories by scanning their contents
  1347. if resolved_path.is_dir():
  1348. # Check if the directory itself is ignored
  1349. dir_relpath = posixpath.join(relpath, "") if relpath != "." else ""
  1350. if dir_relpath and ignore_manager.is_ignored(dir_relpath):
  1351. ignored.add(dir_relpath)
  1352. continue
  1353. # When adding a directory, add all untracked files within it
  1354. current_untracked = list(
  1355. get_untracked_paths(
  1356. str(resolved_path),
  1357. str(repo_path),
  1358. index,
  1359. )
  1360. )
  1361. for untracked_path in current_untracked:
  1362. # If we're scanning a subdirectory, adjust the path
  1363. if relpath != ".":
  1364. untracked_path = posixpath.join(relpath, untracked_path)
  1365. if not ignore_manager.is_ignored(untracked_path):
  1366. relpaths.append(untracked_path)
  1367. else:
  1368. ignored.add(untracked_path)
  1369. # Also add unstaged (modified) files within this directory
  1370. for unstaged_path in all_unstaged_paths:
  1371. if isinstance(unstaged_path, bytes):
  1372. unstaged_path_str = unstaged_path.decode("utf-8")
  1373. else:
  1374. unstaged_path_str = unstaged_path
  1375. # Check if this unstaged file is within the directory we're processing
  1376. unstaged_full_path = repo_path / unstaged_path_str
  1377. try:
  1378. unstaged_full_path.relative_to(resolved_path)
  1379. # File is within this directory, add it
  1380. if not ignore_manager.is_ignored(unstaged_path_str):
  1381. relpaths.append(unstaged_path_str)
  1382. else:
  1383. ignored.add(unstaged_path_str)
  1384. except ValueError:
  1385. # File is not within this directory, skip it
  1386. continue
  1387. continue
  1388. # FIXME: Support patterns
  1389. if ignore_manager.is_ignored(relpath):
  1390. ignored.add(relpath)
  1391. continue
  1392. relpaths.append(relpath)
  1393. r.get_worktree().stage(relpaths)
  1394. return (relpaths, ignored)
  1395. def _is_subdir(
  1396. subdir: str | os.PathLike[str], parentdir: str | os.PathLike[str]
  1397. ) -> bool:
  1398. """Check whether subdir is parentdir or a subdir of parentdir.
  1399. If parentdir or subdir is a relative path, it will be disamgibuated
  1400. relative to the pwd.
  1401. """
  1402. parentdir_abs = os.path.realpath(parentdir) + os.path.sep
  1403. subdir_abs = os.path.realpath(subdir) + os.path.sep
  1404. return subdir_abs.startswith(parentdir_abs)
  1405. # TODO: option to remove ignored files also, in line with `git clean -fdx`
  1406. def clean(
  1407. repo: str | os.PathLike[str] | Repo = ".",
  1408. target_dir: str | os.PathLike[str] | None = None,
  1409. ) -> None:
  1410. """Remove any untracked files from the target directory recursively.
  1411. Equivalent to running ``git clean -fd`` in target_dir.
  1412. Args:
  1413. repo: Repository where the files may be tracked
  1414. target_dir: Directory to clean - current directory if None
  1415. """
  1416. if target_dir is None:
  1417. target_dir = os.getcwd()
  1418. with open_repo_closing(repo) as r:
  1419. if not _is_subdir(target_dir, r.path):
  1420. raise Error("target_dir must be in the repo's working dir")
  1421. config = r.get_config_stack()
  1422. config.get_boolean((b"clean",), b"requireForce", True)
  1423. # TODO(jelmer): if require_force is set, then make sure that -f, -i or
  1424. # -n is specified.
  1425. index = r.open_index()
  1426. ignore_manager = IgnoreFilterManager.from_repo(r)
  1427. paths_in_wd = _walk_working_dir_paths(target_dir, r.path)
  1428. # Reverse file visit order, so that files and subdirectories are
  1429. # removed before containing directory
  1430. for ap, is_dir in reversed(list(paths_in_wd)):
  1431. # target_dir and r.path are both str, so ap must be str
  1432. assert isinstance(ap, str)
  1433. if is_dir:
  1434. # All subdirectories and files have been removed if untracked,
  1435. # so dir contains no tracked files iff it is empty.
  1436. is_empty = len(os.listdir(ap)) == 0
  1437. if is_empty:
  1438. os.rmdir(ap)
  1439. else:
  1440. ip = path_to_tree_path(r.path, ap)
  1441. is_tracked = ip in index
  1442. rp = os.path.relpath(ap, r.path)
  1443. is_ignored = ignore_manager.is_ignored(rp)
  1444. if not is_tracked and not is_ignored:
  1445. os.remove(ap)
  1446. def remove(
  1447. repo: str | os.PathLike[str] | Repo = ".",
  1448. paths: Sequence[str | bytes | os.PathLike[str]] = [],
  1449. cached: bool = False,
  1450. ) -> None:
  1451. """Remove files from the staging area.
  1452. Args:
  1453. repo: Repository for the files
  1454. paths: Paths to remove. Can be absolute or relative to the repository root.
  1455. cached: Only remove from index, not from working directory
  1456. """
  1457. with open_repo_closing(repo) as r:
  1458. index = r.open_index()
  1459. blob_normalizer = r.get_blob_normalizer()
  1460. for p in paths:
  1461. # If path is absolute, use it as-is. Otherwise, treat it as relative to repo
  1462. p_str = os.fsdecode(p) if isinstance(p, bytes) else str(p)
  1463. if os.path.isabs(p_str):
  1464. full_path = p_str
  1465. else:
  1466. # Treat relative paths as relative to the repository root
  1467. full_path = os.path.join(r.path, p_str)
  1468. tree_path = path_to_tree_path(r.path, full_path)
  1469. # Convert to bytes for file operations
  1470. full_path_bytes = os.fsencode(full_path)
  1471. try:
  1472. entry = index[tree_path]
  1473. if isinstance(entry, ConflictedIndexEntry):
  1474. raise Error(f"{p_str} has conflicts in the index")
  1475. index_sha = entry.sha
  1476. except KeyError as exc:
  1477. raise Error(f"{p_str} did not match any files") from exc
  1478. if not cached:
  1479. try:
  1480. st = os.lstat(full_path_bytes)
  1481. except OSError:
  1482. pass
  1483. else:
  1484. try:
  1485. blob = blob_from_path_and_stat(full_path_bytes, st)
  1486. # Apply checkin normalization to compare apples to apples
  1487. if blob_normalizer is not None:
  1488. blob = blob_normalizer.checkin_normalize(blob, tree_path)
  1489. except OSError:
  1490. pass
  1491. else:
  1492. try:
  1493. head_commit = r[r.head()]
  1494. assert isinstance(head_commit, Commit)
  1495. committed_sha = tree_lookup_path(
  1496. r.__getitem__, head_commit.tree, tree_path
  1497. )[1]
  1498. except KeyError:
  1499. committed_sha = None
  1500. if blob.id != index_sha and index_sha != committed_sha:
  1501. raise Error(
  1502. "file has staged content differing "
  1503. f"from both the file and head: {p_str}"
  1504. )
  1505. if index_sha != committed_sha:
  1506. raise Error(f"file has staged changes: {p_str}")
  1507. os.remove(full_path_bytes)
  1508. del index[tree_path]
  1509. index.write()
  1510. rm = remove
  1511. def mv(
  1512. repo: str | os.PathLike[str] | Repo,
  1513. source: str | bytes | os.PathLike[str],
  1514. destination: str | bytes | os.PathLike[str],
  1515. force: bool = False,
  1516. ) -> None:
  1517. """Move or rename a file, directory, or symlink.
  1518. Args:
  1519. repo: Path to the repository
  1520. source: Path to move from
  1521. destination: Path to move to
  1522. force: Force move even if destination exists
  1523. Raises:
  1524. Error: If source doesn't exist, is not tracked, or destination already exists (without force)
  1525. """
  1526. with open_repo_closing(repo) as r:
  1527. index = r.open_index()
  1528. # Handle paths - convert to string if necessary
  1529. if isinstance(source, bytes):
  1530. source = source.decode(sys.getfilesystemencoding())
  1531. elif hasattr(source, "__fspath__"):
  1532. source = os.fspath(source)
  1533. else:
  1534. source = str(source)
  1535. if isinstance(destination, bytes):
  1536. destination = destination.decode(sys.getfilesystemencoding())
  1537. elif hasattr(destination, "__fspath__"):
  1538. destination = os.fspath(destination)
  1539. else:
  1540. destination = str(destination)
  1541. # Get full paths
  1542. if os.path.isabs(source):
  1543. source_full_path = source
  1544. else:
  1545. # Treat relative paths as relative to the repository root
  1546. source_full_path = os.path.join(r.path, source)
  1547. if os.path.isabs(destination):
  1548. destination_full_path = destination
  1549. else:
  1550. # Treat relative paths as relative to the repository root
  1551. destination_full_path = os.path.join(r.path, destination)
  1552. # Check if destination is a directory
  1553. if os.path.isdir(destination_full_path):
  1554. # Move source into destination directory
  1555. basename = os.path.basename(source_full_path)
  1556. destination_full_path = os.path.join(destination_full_path, basename)
  1557. # Convert to tree paths for index
  1558. source_tree_path = path_to_tree_path(r.path, source_full_path)
  1559. destination_tree_path = path_to_tree_path(r.path, destination_full_path)
  1560. # Check if source exists in index
  1561. if source_tree_path not in index:
  1562. raise Error(f"source '{source}' is not under version control")
  1563. # Check if source exists in filesystem
  1564. if not os.path.exists(source_full_path):
  1565. raise Error(f"source '{source}' does not exist")
  1566. # Check if destination already exists
  1567. if os.path.exists(destination_full_path) and not force:
  1568. raise Error(f"destination '{destination}' already exists (use -f to force)")
  1569. # Check if destination is already in index
  1570. if destination_tree_path in index and not force:
  1571. raise Error(
  1572. f"destination '{destination}' already exists in index (use -f to force)"
  1573. )
  1574. # Get the index entry for the source
  1575. source_entry = index[source_tree_path]
  1576. # Convert to bytes for file operations
  1577. source_full_path_bytes = os.fsencode(source_full_path)
  1578. destination_full_path_bytes = os.fsencode(destination_full_path)
  1579. # Create parent directory for destination if needed
  1580. dest_dir = os.path.dirname(destination_full_path_bytes)
  1581. if dest_dir and not os.path.exists(dest_dir):
  1582. os.makedirs(dest_dir)
  1583. # Move the file in the filesystem
  1584. if os.path.exists(destination_full_path_bytes) and force:
  1585. os.remove(destination_full_path_bytes)
  1586. os.rename(source_full_path_bytes, destination_full_path_bytes)
  1587. # Update the index
  1588. del index[source_tree_path]
  1589. index[destination_tree_path] = source_entry
  1590. index.write()
  1591. move = mv
  1592. def commit_decode(
  1593. commit: Commit, contents: bytes, default_encoding: str = DEFAULT_ENCODING
  1594. ) -> str:
  1595. """Decode commit contents using the commit's encoding or default."""
  1596. if commit.encoding:
  1597. encoding = commit.encoding.decode("ascii")
  1598. else:
  1599. encoding = default_encoding
  1600. return contents.decode(encoding, "replace")
  1601. def commit_encode(
  1602. commit: Commit, contents: str, default_encoding: str = DEFAULT_ENCODING
  1603. ) -> bytes:
  1604. """Encode commit contents using the commit's encoding or default."""
  1605. if commit.encoding:
  1606. encoding = commit.encoding.decode("ascii")
  1607. else:
  1608. encoding = default_encoding
  1609. return contents.encode(encoding)
  1610. def print_commit(
  1611. commit: Commit,
  1612. decode: Callable[[bytes], str],
  1613. outstream: TextIO = sys.stdout,
  1614. ) -> None:
  1615. """Write a human-readable commit log entry.
  1616. Args:
  1617. commit: A `Commit` object
  1618. decode: Function to decode commit data
  1619. outstream: A stream file to write to
  1620. """
  1621. outstream.write("-" * 50 + "\n")
  1622. outstream.write("commit: " + commit.id.decode("ascii") + "\n")
  1623. if len(commit.parents) > 1:
  1624. outstream.write(
  1625. "merge: "
  1626. + "...".join([c.decode("ascii") for c in commit.parents[1:]])
  1627. + "\n"
  1628. )
  1629. outstream.write("Author: " + decode(commit.author) + "\n")
  1630. if commit.author != commit.committer:
  1631. outstream.write("Committer: " + decode(commit.committer) + "\n")
  1632. time_tuple = time.gmtime(commit.author_time + commit.author_timezone)
  1633. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  1634. timezone_str = format_timezone(commit.author_timezone).decode("ascii")
  1635. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  1636. if commit.message:
  1637. outstream.write("\n")
  1638. outstream.write(decode(commit.message) + "\n")
  1639. outstream.write("\n")
  1640. def print_tag(
  1641. tag: Tag, decode: Callable[[bytes], str], outstream: TextIO = sys.stdout
  1642. ) -> None:
  1643. """Write a human-readable tag.
  1644. Args:
  1645. tag: A `Tag` object
  1646. decode: Function for decoding bytes to unicode string
  1647. outstream: A stream to write to
  1648. """
  1649. outstream.write("Tagger: " + decode(tag.tagger) + "\n")
  1650. time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone)
  1651. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  1652. timezone_str = format_timezone(tag.tag_timezone).decode("ascii")
  1653. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  1654. outstream.write("\n")
  1655. outstream.write(decode(tag.message))
  1656. outstream.write("\n")
  1657. def show_blob(
  1658. repo: RepoPath,
  1659. blob: Blob,
  1660. decode: Callable[[bytes], str],
  1661. outstream: TextIO = sys.stdout,
  1662. ) -> None:
  1663. """Write a blob to a stream.
  1664. Args:
  1665. repo: A `Repo` object
  1666. blob: A `Blob` object
  1667. decode: Function for decoding bytes to unicode string
  1668. outstream: A stream file to write to
  1669. """
  1670. outstream.write(decode(blob.data))
  1671. def show_commit(
  1672. repo: RepoPath,
  1673. commit: Commit,
  1674. decode: Callable[[bytes], str],
  1675. outstream: TextIO = sys.stdout,
  1676. ) -> None:
  1677. """Show a commit to a stream.
  1678. Args:
  1679. repo: A `Repo` object
  1680. commit: A `Commit` object
  1681. decode: Function for decoding bytes to unicode string
  1682. outstream: Stream to write to
  1683. """
  1684. from ..diff import ColorizedDiffStream
  1685. # Create a wrapper for ColorizedDiffStream to handle string/bytes conversion
  1686. class _StreamWrapper:
  1687. def __init__(self, stream: "ColorizedDiffStream") -> None:
  1688. self.stream = stream
  1689. def write(self, data: str | bytes) -> None:
  1690. if isinstance(data, str):
  1691. # Convert string to bytes for ColorizedDiffStream
  1692. self.stream.write(data.encode("utf-8"))
  1693. else:
  1694. self.stream.write(data)
  1695. with open_repo_closing(repo) as r:
  1696. # Use wrapper for ColorizedDiffStream, direct stream for others
  1697. if isinstance(outstream, ColorizedDiffStream):
  1698. wrapped_stream = _StreamWrapper(outstream)
  1699. print_commit(commit, decode=decode, outstream=wrapped_stream)
  1700. # Write diff directly to the ColorizedDiffStream as bytes
  1701. write_tree_diff(
  1702. outstream,
  1703. r.object_store,
  1704. commit.parents[0] if commit.parents else None,
  1705. commit.tree,
  1706. )
  1707. else:
  1708. print_commit(commit, decode=decode, outstream=outstream)
  1709. if commit.parents:
  1710. parent_commit = r[commit.parents[0]]
  1711. assert isinstance(parent_commit, Commit)
  1712. base_tree = parent_commit.tree
  1713. else:
  1714. base_tree = None
  1715. # Traditional path: buffer diff and write as decoded text
  1716. diffstream = BytesIO()
  1717. write_tree_diff(diffstream, r.object_store, base_tree, commit.tree)
  1718. diffstream.seek(0)
  1719. outstream.write(commit_decode(commit, diffstream.getvalue()))
  1720. def show_tree(
  1721. repo: RepoPath,
  1722. tree: Tree,
  1723. decode: Callable[[bytes], str],
  1724. outstream: TextIO = sys.stdout,
  1725. ) -> None:
  1726. """Print a tree to a stream.
  1727. Args:
  1728. repo: A `Repo` object
  1729. tree: A `Tree` object
  1730. decode: Function for decoding bytes to unicode string
  1731. outstream: Stream to write to
  1732. """
  1733. for n in tree:
  1734. outstream.write(decode(n) + "\n")
  1735. def show_tag(
  1736. repo: RepoPath,
  1737. tag: Tag,
  1738. decode: Callable[[bytes], str],
  1739. outstream: TextIO = sys.stdout,
  1740. ) -> None:
  1741. """Print a tag to a stream.
  1742. Args:
  1743. repo: A `Repo` object
  1744. tag: A `Tag` object
  1745. decode: Function for decoding bytes to unicode string
  1746. outstream: Stream to write to
  1747. """
  1748. with open_repo_closing(repo) as r:
  1749. print_tag(tag, decode, outstream)
  1750. obj = r[tag.object[1]]
  1751. assert isinstance(obj, (Tree, Blob, Commit, Tag))
  1752. show_object(repo, obj, decode, outstream)
  1753. def show_object(
  1754. repo: RepoPath,
  1755. obj: Tree | Blob | Commit | Tag,
  1756. decode: Callable[[bytes], str],
  1757. outstream: TextIO,
  1758. ) -> None:
  1759. """Show details of a git object."""
  1760. handlers: dict[bytes, Callable[[RepoPath, Any, Any, TextIO], None]] = {
  1761. b"tree": show_tree,
  1762. b"blob": show_blob,
  1763. b"commit": show_commit,
  1764. b"tag": show_tag,
  1765. }
  1766. handler = handlers.get(obj.type_name)
  1767. if handler is None:
  1768. raise ValueError(f"Unknown object type: {obj.type_name.decode()}")
  1769. handler(repo, obj, decode, outstream)
  1770. def print_name_status(changes: Iterator[TreeChange]) -> Iterator[str]:
  1771. """Print a simple status summary, listing changed files."""
  1772. for change in changes:
  1773. if not change:
  1774. continue
  1775. if isinstance(change, list):
  1776. change = change[0]
  1777. if change.type == CHANGE_ADD:
  1778. assert change.new is not None
  1779. path1 = change.new.path
  1780. assert path1 is not None
  1781. path2 = b""
  1782. kind = "A"
  1783. elif change.type == CHANGE_DELETE:
  1784. assert change.old is not None
  1785. path1 = change.old.path
  1786. assert path1 is not None
  1787. path2 = b""
  1788. kind = "D"
  1789. elif change.type == CHANGE_MODIFY:
  1790. assert change.new is not None
  1791. path1 = change.new.path
  1792. assert path1 is not None
  1793. path2 = b""
  1794. kind = "M"
  1795. elif change.type in RENAME_CHANGE_TYPES:
  1796. assert change.old is not None and change.new is not None
  1797. path1 = change.old.path
  1798. assert path1 is not None
  1799. path2_opt = change.new.path
  1800. assert path2_opt is not None
  1801. path2 = path2_opt
  1802. if change.type == CHANGE_RENAME:
  1803. kind = "R"
  1804. elif change.type == CHANGE_COPY:
  1805. kind = "C"
  1806. path1_str = (
  1807. path1.decode("utf-8", errors="replace")
  1808. if isinstance(path1, bytes)
  1809. else path1
  1810. )
  1811. path2_str = (
  1812. path2.decode("utf-8", errors="replace")
  1813. if isinstance(path2, bytes)
  1814. else path2
  1815. )
  1816. yield f"{kind:<8}{path1_str:<20}{path2_str:<20}"
  1817. def log(
  1818. repo: RepoPath = ".",
  1819. paths: Sequence[str | bytes] | None = None,
  1820. outstream: TextIO = sys.stdout,
  1821. max_entries: int | None = None,
  1822. reverse: bool = False,
  1823. name_status: bool = False,
  1824. ) -> None:
  1825. """Write commit logs.
  1826. Args:
  1827. repo: Path to repository
  1828. paths: Optional set of specific paths to print entries for
  1829. outstream: Stream to write log output to
  1830. reverse: Reverse order in which entries are printed
  1831. name_status: Print name status
  1832. max_entries: Optional maximum number of entries to display
  1833. """
  1834. with open_repo_closing(repo) as r:
  1835. try:
  1836. include = [r.head()]
  1837. except KeyError:
  1838. include = []
  1839. # Convert paths to bytes if needed
  1840. paths_bytes = None
  1841. if paths:
  1842. paths_bytes = [p.encode() if isinstance(p, str) else p for p in paths]
  1843. walker = r.get_walker(
  1844. include=include, max_entries=max_entries, paths=paths_bytes, reverse=reverse
  1845. )
  1846. for entry in walker:
  1847. def decode_wrapper(x: bytes) -> str:
  1848. return commit_decode(entry.commit, x)
  1849. print_commit(entry.commit, decode_wrapper, outstream)
  1850. if name_status:
  1851. outstream.writelines(
  1852. [
  1853. line + "\n"
  1854. for line in print_name_status(
  1855. cast(Iterator[TreeChange], entry.changes())
  1856. )
  1857. ]
  1858. )
  1859. # TODO(jelmer): better default for encoding?
  1860. def show(
  1861. repo: RepoPath = ".",
  1862. objects: Sequence[str | bytes] | None = None,
  1863. outstream: TextIO = sys.stdout,
  1864. default_encoding: str = DEFAULT_ENCODING,
  1865. ) -> None:
  1866. """Print the changes in a commit.
  1867. Args:
  1868. repo: Path to repository
  1869. objects: Objects to show (defaults to [HEAD])
  1870. outstream: Stream to write to
  1871. default_encoding: Default encoding to use if none is set in the
  1872. commit
  1873. """
  1874. if objects is None:
  1875. objects = ["HEAD"]
  1876. if isinstance(objects, (str, bytes)):
  1877. objects = [objects]
  1878. with open_repo_closing(repo) as r:
  1879. for objectish in objects:
  1880. o = parse_object(r, objectish)
  1881. if isinstance(o, Commit):
  1882. def decode(x: bytes) -> str:
  1883. return commit_decode(o, x, default_encoding)
  1884. else:
  1885. def decode(x: bytes) -> str:
  1886. return x.decode(default_encoding)
  1887. assert isinstance(o, (Tree, Blob, Commit, Tag))
  1888. show_object(r, o, decode, outstream)
  1889. def diff_tree(
  1890. repo: RepoPath,
  1891. old_tree: str | bytes | Tree,
  1892. new_tree: str | bytes | Tree,
  1893. outstream: BinaryIO = default_bytes_out_stream,
  1894. ) -> None:
  1895. """Compares the content and mode of blobs found via two tree objects.
  1896. Args:
  1897. repo: Path to repository
  1898. old_tree: Id of old tree
  1899. new_tree: Id of new tree
  1900. outstream: Stream to write to
  1901. """
  1902. with open_repo_closing(repo) as r:
  1903. if isinstance(old_tree, Tree):
  1904. old_tree_id: ObjectID | None = old_tree.id
  1905. elif isinstance(old_tree, str):
  1906. old_tree_id = ObjectID(old_tree.encode())
  1907. else:
  1908. old_tree_id = ObjectID(old_tree)
  1909. if isinstance(new_tree, Tree):
  1910. new_tree_id: ObjectID | None = new_tree.id
  1911. elif isinstance(new_tree, str):
  1912. new_tree_id = ObjectID(new_tree.encode())
  1913. else:
  1914. new_tree_id = ObjectID(new_tree)
  1915. write_tree_diff(outstream, r.object_store, old_tree_id, new_tree_id)
  1916. def diff(
  1917. repo: RepoPath = ".",
  1918. commit: str | bytes | Commit | None = None,
  1919. commit2: str | bytes | Commit | None = None,
  1920. staged: bool = False,
  1921. paths: Sequence[str | bytes] | None = None,
  1922. outstream: BinaryIO = default_bytes_out_stream,
  1923. diff_algorithm: str | None = None,
  1924. ) -> None:
  1925. """Show diff.
  1926. Args:
  1927. repo: Path to repository
  1928. commit: First commit to compare. If staged is True, compare
  1929. index to this commit. If staged is False, compare working tree
  1930. to this commit. If None, defaults to HEAD for staged and index
  1931. for unstaged.
  1932. commit2: Second commit to compare against first commit. If provided,
  1933. show diff between commit and commit2 (ignoring staged flag).
  1934. staged: If True, show staged changes (index vs commit).
  1935. If False, show unstaged changes (working tree vs commit/index).
  1936. Ignored if commit2 is provided.
  1937. paths: Optional list of paths to limit diff
  1938. outstream: Stream to write to
  1939. diff_algorithm: Algorithm to use for diffing ("myers" or "patience"),
  1940. defaults to the underlying function's default if None
  1941. """
  1942. from .. import diff as diff_module
  1943. with open_repo_closing(repo) as r:
  1944. # Normalize paths to bytes
  1945. byte_paths: list[bytes] | None = None
  1946. if paths is not None and paths: # Check if paths is not empty
  1947. byte_paths = []
  1948. for p in paths:
  1949. if isinstance(p, str):
  1950. byte_paths.append(p.encode("utf-8"))
  1951. else:
  1952. byte_paths.append(p)
  1953. elif paths == []: # Convert empty list to None
  1954. byte_paths = None
  1955. else:
  1956. byte_paths = None
  1957. # Resolve commit refs to SHAs if provided
  1958. if commit is not None:
  1959. if isinstance(commit, Commit):
  1960. # Already a Commit object
  1961. commit_sha = commit.id
  1962. commit_obj = commit
  1963. else:
  1964. # parse_commit handles both refs and SHAs, and always returns a Commit object
  1965. commit_obj = parse_commit(r, commit)
  1966. commit_sha = commit_obj.id
  1967. else:
  1968. commit_sha = None
  1969. commit_obj = None
  1970. if commit2 is not None:
  1971. # Compare two commits
  1972. if isinstance(commit2, Commit):
  1973. commit2_obj = commit2
  1974. else:
  1975. commit2_obj = parse_commit(r, commit2)
  1976. # Get trees from commits
  1977. old_tree = commit_obj.tree if commit_obj else None
  1978. new_tree = commit2_obj.tree
  1979. # Use tree_changes to get the changes and apply path filtering
  1980. changes = r.object_store.tree_changes(old_tree, new_tree)
  1981. for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in changes:
  1982. # Skip if paths are specified and this change doesn't match
  1983. if byte_paths:
  1984. path_to_check = newpath or oldpath
  1985. assert path_to_check is not None
  1986. if not any(
  1987. path_to_check == p or path_to_check.startswith(p + b"/")
  1988. for p in byte_paths
  1989. ):
  1990. continue
  1991. write_object_diff(
  1992. outstream,
  1993. r.object_store,
  1994. (oldpath, oldmode, oldsha),
  1995. (newpath, newmode, newsha),
  1996. diff_algorithm=diff_algorithm,
  1997. )
  1998. elif staged:
  1999. # Show staged changes (index vs commit)
  2000. diff_module.diff_index_to_tree(
  2001. r, outstream, commit_sha, byte_paths, diff_algorithm=diff_algorithm
  2002. )
  2003. elif commit is not None:
  2004. # Compare working tree to a specific commit
  2005. assert (
  2006. commit_sha is not None
  2007. ) # mypy: commit_sha is set when commit is not None
  2008. diff_module.diff_working_tree_to_tree(
  2009. r, outstream, commit_sha, byte_paths, diff_algorithm=diff_algorithm
  2010. )
  2011. else:
  2012. # Compare working tree to index
  2013. diff_module.diff_working_tree_to_index(
  2014. r, outstream, byte_paths, diff_algorithm=diff_algorithm
  2015. )
  2016. def rev_list(
  2017. repo: RepoPath,
  2018. commits: Sequence[str | bytes],
  2019. outstream: BinaryIO = default_bytes_out_stream,
  2020. ) -> None:
  2021. """Lists commit objects in reverse chronological order.
  2022. Args:
  2023. repo: Path to repository
  2024. commits: Commits over which to iterate
  2025. outstream: Stream to write to
  2026. """
  2027. with open_repo_closing(repo) as r:
  2028. for entry in r.get_walker(
  2029. include=[r[c if isinstance(c, bytes) else c.encode()].id for c in commits]
  2030. ):
  2031. outstream.write(entry.commit.id + b"\n")
  2032. def _canonical_part(url: str) -> str:
  2033. name = url.rsplit("/", 1)[-1]
  2034. if name.endswith(".git"):
  2035. name = name[:-4]
  2036. return name
  2037. def verify_commit(
  2038. repo: RepoPath,
  2039. committish: str | bytes = "HEAD",
  2040. keyids: list[str] | None = None,
  2041. ) -> None:
  2042. """Verify signature on a commit.
  2043. Args:
  2044. repo: Path to repository
  2045. committish: Commit to verify (defaults to HEAD)
  2046. keyids: Optional list of trusted key IDs. If provided, the commit
  2047. must be signed by one of these keys. If not provided, just verifies
  2048. that the commit has a valid signature.
  2049. Raises:
  2050. gpg.errors.BadSignatures: if GPG signature verification fails
  2051. gpg.errors.MissingSignatures: if commit was not signed by a key
  2052. specified in keyids
  2053. """
  2054. from dulwich.signature import get_signature_vendor_for_signature
  2055. with open_repo_closing(repo) as r:
  2056. commit = parse_commit(r, committish)
  2057. payload, signature, _sig_type = commit.extract_signature()
  2058. if signature is None:
  2059. return
  2060. vendor = get_signature_vendor_for_signature(
  2061. signature, config=r.get_config_stack(), keyids=keyids
  2062. )
  2063. vendor.verify(payload, signature)
  2064. def replace_list(repo: RepoPath) -> list[tuple[ObjectID, ObjectID]]:
  2065. """List all replacement refs.
  2066. Args:
  2067. repo: Path to repository
  2068. Returns:
  2069. List of tuples of (object_sha, replacement_sha) where object_sha is the
  2070. object being replaced and replacement_sha is what it's replaced with
  2071. """
  2072. with open_repo_closing(repo) as r:
  2073. replacements: list[tuple[ObjectID, ObjectID]] = []
  2074. for ref in r.refs.keys():
  2075. if ref.startswith(LOCAL_REPLACE_PREFIX):
  2076. object_sha = ObjectID(ref[len(LOCAL_REPLACE_PREFIX) :])
  2077. replacement_sha = r.refs[ref]
  2078. replacements.append((object_sha, replacement_sha))
  2079. return replacements
  2080. def replace_delete(repo: RepoPath, object_sha: ObjectID | str) -> None:
  2081. """Delete a replacement ref.
  2082. Args:
  2083. repo: Path to repository
  2084. object_sha: SHA of the object whose replacement should be removed
  2085. """
  2086. with open_repo_closing(repo) as r:
  2087. # Convert to ObjectID if string
  2088. if isinstance(object_sha, str):
  2089. object_sha_id = ObjectID(object_sha.encode("ascii"))
  2090. else:
  2091. object_sha_id = object_sha
  2092. replace_ref = _make_replace_ref(object_sha_id)
  2093. if replace_ref not in r.refs:
  2094. raise KeyError(
  2095. f"No replacement ref found for {object_sha_id.decode('ascii')}"
  2096. )
  2097. del r.refs[replace_ref]
  2098. def replace_create(
  2099. repo: RepoPath,
  2100. object_sha: str | ObjectID,
  2101. replacement_sha: str | ObjectID,
  2102. ) -> None:
  2103. """Create a replacement ref to replace one object with another.
  2104. Args:
  2105. repo: Path to repository
  2106. object_sha: SHA of the object to replace
  2107. replacement_sha: SHA of the replacement object
  2108. """
  2109. with open_repo_closing(repo) as r:
  2110. # Convert to ObjectID if string
  2111. if isinstance(object_sha, str):
  2112. object_sha_id = ObjectID(object_sha.encode("ascii"))
  2113. else:
  2114. object_sha_id = object_sha
  2115. if isinstance(replacement_sha, str):
  2116. replacement_sha_id = ObjectID(replacement_sha.encode("ascii"))
  2117. else:
  2118. replacement_sha_id = replacement_sha
  2119. # Create the replacement ref
  2120. replace_ref = _make_replace_ref(object_sha_id)
  2121. r.refs[replace_ref] = replacement_sha_id
  2122. def reset(
  2123. repo: str | os.PathLike[str] | Repo,
  2124. mode: str,
  2125. treeish: str | bytes | Commit | Tree | Tag = "HEAD",
  2126. ) -> None:
  2127. """Reset current HEAD to the specified state.
  2128. Args:
  2129. repo: Path to repository
  2130. mode: Mode ("hard", "soft", "mixed")
  2131. treeish: Treeish to reset to
  2132. """
  2133. with open_repo_closing(repo) as r:
  2134. # Parse the target tree
  2135. tree = parse_tree(r, treeish)
  2136. # Only parse as commit if treeish is not a Tree object
  2137. if isinstance(treeish, Tree):
  2138. # For Tree objects, we can't determine the commit, skip updating HEAD
  2139. target_commit = None
  2140. else:
  2141. target_commit = parse_commit(r, treeish)
  2142. # Update HEAD to point to the target commit
  2143. if target_commit is not None:
  2144. # Get the current HEAD value for set_if_equals
  2145. try:
  2146. old_head = r.refs[HEADREF]
  2147. except KeyError:
  2148. old_head = None
  2149. # Create reflog message
  2150. treeish_str = (
  2151. treeish.decode("utf-8")
  2152. if isinstance(treeish, bytes)
  2153. else str(treeish)
  2154. if not isinstance(treeish, (Commit, Tree, Tag))
  2155. else target_commit.id.hex()
  2156. )
  2157. default_message = f"reset: moving to {treeish_str}".encode()
  2158. reflog_message = _get_reflog_message(default_message)
  2159. # Update HEAD with reflog message
  2160. r.refs.set_if_equals(
  2161. HEADREF, old_head, target_commit.id, message=reflog_message
  2162. )
  2163. if mode == "soft":
  2164. # Soft reset: only update HEAD, leave index and working tree unchanged
  2165. return
  2166. elif mode == "mixed":
  2167. # Mixed reset: update HEAD and index, but leave working tree unchanged
  2168. from ..object_store import iter_tree_contents
  2169. # Open the index
  2170. index = r.open_index()
  2171. # Clear the current index
  2172. index.clear()
  2173. # Populate index from the target tree
  2174. for entry in iter_tree_contents(r.object_store, tree.id):
  2175. # Create an IndexEntry from the tree entry
  2176. # Use zeros for filesystem-specific fields since we're not touching the working tree
  2177. assert (
  2178. entry.mode is not None
  2179. and entry.sha is not None
  2180. and entry.path is not None
  2181. )
  2182. index_entry = IndexEntry(
  2183. ctime=(0, 0),
  2184. mtime=(0, 0),
  2185. dev=0,
  2186. ino=0,
  2187. mode=entry.mode,
  2188. uid=0,
  2189. gid=0,
  2190. size=0, # Size will be 0 since we're not reading from disk
  2191. sha=entry.sha,
  2192. flags=0,
  2193. )
  2194. index[entry.path] = index_entry
  2195. # Write the updated index
  2196. index.write()
  2197. elif mode == "hard":
  2198. # Hard reset: update HEAD, index, and working tree
  2199. # For reset --hard, use current index tree as old tree to get proper deletions
  2200. index = r.open_index()
  2201. if len(index) > 0:
  2202. index_tree_id = index.commit(r.object_store)
  2203. else:
  2204. # Empty index
  2205. index_tree_id = None
  2206. # Get configuration for working tree updates
  2207. honor_filemode, validate_path_element, symlink_fn = (
  2208. _get_worktree_update_config(r)
  2209. )
  2210. blob_normalizer = r.get_blob_normalizer()
  2211. changes = tree_changes(
  2212. r.object_store, index_tree_id, tree.id, want_unchanged=True
  2213. )
  2214. update_working_tree(
  2215. r,
  2216. index_tree_id,
  2217. tree.id,
  2218. change_iterator=changes,
  2219. honor_filemode=honor_filemode,
  2220. validate_path_element=validate_path_element,
  2221. symlink_fn=symlink_fn,
  2222. force_remove_untracked=True,
  2223. blob_normalizer=blob_normalizer,
  2224. allow_overwrite_modified=True, # Allow overwriting modified files
  2225. )
  2226. else:
  2227. raise Error(f"Invalid reset mode: {mode}")
  2228. def get_remote_repo(
  2229. repo: Repo, remote_location: str | bytes | None = None
  2230. ) -> tuple[str | None, str]:
  2231. """Get the remote repository information.
  2232. Args:
  2233. repo: Local repository object
  2234. remote_location: Optional remote name or URL; defaults to branch remote
  2235. Returns:
  2236. Tuple of (remote_name, remote_url) where remote_name may be None
  2237. if remote_location is a URL rather than a configured remote
  2238. """
  2239. config = repo.get_config()
  2240. if remote_location is None:
  2241. remote_location = get_branch_remote(repo)
  2242. if isinstance(remote_location, str):
  2243. encoded_location = remote_location.encode()
  2244. else:
  2245. encoded_location = remote_location
  2246. section = (b"remote", encoded_location)
  2247. remote_name: str | None = None
  2248. if config.has_section(section):
  2249. remote_name = encoded_location.decode()
  2250. encoded_location = config.get(section, "url")
  2251. else:
  2252. remote_name = None
  2253. return (remote_name, encoded_location.decode())
  2254. def push(
  2255. repo: RepoPath,
  2256. remote_location: str | bytes | None = None,
  2257. refspecs: str | bytes | Sequence[str | bytes] | None = None,
  2258. outstream: BinaryIO = default_bytes_out_stream,
  2259. errstream: BinaryIO | RawIOBase = default_bytes_err_stream,
  2260. force: bool = False,
  2261. **kwargs: object,
  2262. ) -> SendPackResult:
  2263. """Remote push with dulwich via dulwich.client.
  2264. Args:
  2265. repo: Path to repository
  2266. remote_location: Location of the remote
  2267. refspecs: Refs to push to remote
  2268. outstream: A stream file to write output
  2269. errstream: A stream file to write errors
  2270. force: Force overwriting refs
  2271. **kwargs: Additional keyword arguments for the client
  2272. """
  2273. # Open the repo
  2274. with open_repo_closing(repo) as r:
  2275. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  2276. # Check if mirror mode is enabled
  2277. mirror_mode = False
  2278. if remote_name:
  2279. try:
  2280. mirror_mode_val = r.get_config_stack().get_boolean(
  2281. (b"remote", remote_name.encode()), b"mirror"
  2282. )
  2283. if mirror_mode_val is not None:
  2284. mirror_mode = mirror_mode_val
  2285. except KeyError:
  2286. pass
  2287. if mirror_mode:
  2288. # Mirror mode: push all refs and delete non-existent ones
  2289. refspecs = []
  2290. for ref in r.refs.keys():
  2291. # Push all refs to the same name on remote
  2292. refspecs.append(ref + b":" + ref)
  2293. elif refspecs is None:
  2294. refspecs = [active_branch(r)]
  2295. # Normalize refspecs to bytes
  2296. if isinstance(refspecs, str):
  2297. refspecs_bytes: bytes | list[bytes] = refspecs.encode()
  2298. elif isinstance(refspecs, bytes):
  2299. refspecs_bytes = refspecs
  2300. else:
  2301. refspecs_bytes = []
  2302. for spec in refspecs:
  2303. if isinstance(spec, str):
  2304. refspecs_bytes.append(spec.encode())
  2305. else:
  2306. refspecs_bytes.append(spec)
  2307. # Get the client and path
  2308. transport_kwargs = _filter_transport_kwargs(**kwargs)
  2309. client, path = get_transport_and_path(
  2310. remote_location,
  2311. config=r.get_config_stack(),
  2312. **transport_kwargs,
  2313. )
  2314. selected_refs = []
  2315. remote_changed_refs: dict[Ref, ObjectID | None] = {}
  2316. def update_refs(refs: dict[Ref, ObjectID]) -> dict[Ref, ObjectID]:
  2317. remote_refs = DictRefsContainer(refs) # type: ignore[arg-type]
  2318. selected_refs.extend(
  2319. parse_reftuples(r.refs, remote_refs, refspecs_bytes, force=force)
  2320. )
  2321. new_refs: dict[Ref, ObjectID] = {}
  2322. # In mirror mode, delete remote refs that don't exist locally
  2323. if mirror_mode:
  2324. local_refs = set(r.refs.keys())
  2325. for remote_ref in refs.keys():
  2326. if remote_ref not in local_refs:
  2327. new_refs[remote_ref] = ZERO_SHA
  2328. remote_changed_refs[remote_ref] = None
  2329. # TODO: Handle selected_refs == {None: None}
  2330. for lh, rh, force_ref in selected_refs:
  2331. if lh is None:
  2332. assert rh is not None
  2333. new_refs[rh] = ZERO_SHA
  2334. remote_changed_refs[rh] = None
  2335. else:
  2336. try:
  2337. localsha = r.refs[lh]
  2338. except KeyError as exc:
  2339. raise Error(
  2340. f"No valid ref {lh.decode() if isinstance(lh, bytes) else lh} in local repository"
  2341. ) from exc
  2342. assert rh is not None
  2343. if not force_ref and rh in refs:
  2344. check_diverged(r, refs[rh], localsha)
  2345. new_refs[rh] = localsha
  2346. remote_changed_refs[rh] = localsha
  2347. return new_refs
  2348. err_encoding = getattr(errstream, "encoding", None) or DEFAULT_ENCODING
  2349. remote_location = client.get_url(path)
  2350. try:
  2351. def generate_pack_data_wrapper(
  2352. have: AbstractSet[ObjectID],
  2353. want: AbstractSet[ObjectID],
  2354. *,
  2355. ofs_delta: bool = False,
  2356. progress: Callable[..., None] | None = None,
  2357. ) -> tuple[int, Iterator[UnpackedObject]]:
  2358. # Wrap to match the expected signature
  2359. # Convert AbstractSet to set since generate_pack_data expects set
  2360. return r.generate_pack_data(
  2361. set(have), set(want), progress=progress, ofs_delta=ofs_delta
  2362. )
  2363. result = client.send_pack(
  2364. path.encode(),
  2365. update_refs,
  2366. generate_pack_data=generate_pack_data_wrapper,
  2367. progress=lambda data: (errstream.write(data), None)[1],
  2368. )
  2369. except SendPackError as exc:
  2370. raise Error(
  2371. "Push to " + remote_location + " failed -> " + exc.args[0].decode(),
  2372. ) from exc
  2373. else:
  2374. errstream.write(
  2375. b"Push to " + remote_location.encode(err_encoding) + b" successful.\n"
  2376. )
  2377. for ref, error in (result.ref_status or {}).items(): # type: ignore[assignment]
  2378. if error is not None:
  2379. errstream.write(
  2380. f"Push of ref {ref.decode('utf-8', 'replace')} failed: {error}\n".encode(
  2381. err_encoding
  2382. )
  2383. )
  2384. else:
  2385. errstream.write(
  2386. f"Ref {ref.decode('utf-8', 'replace')} updated\n".encode()
  2387. )
  2388. if remote_name is not None:
  2389. _import_remote_refs(r.refs, remote_name, remote_changed_refs)
  2390. return result
  2391. # Trigger auto GC if needed
  2392. from ..gc import maybe_auto_gc
  2393. with open_repo_closing(repo) as r:
  2394. maybe_auto_gc(r)
  2395. def pull(
  2396. repo: RepoPath,
  2397. remote_location: str | bytes | None = None,
  2398. refspecs: str | bytes | Sequence[str | bytes] | None = None,
  2399. outstream: BinaryIO = default_bytes_out_stream,
  2400. errstream: BinaryIO | RawIOBase = default_bytes_err_stream,
  2401. fast_forward: bool = True,
  2402. ff_only: bool = False,
  2403. force: bool = False,
  2404. filter_spec: str | None = None,
  2405. protocol_version: int | None = None,
  2406. **kwargs: object,
  2407. ) -> None:
  2408. """Pull from remote via dulwich.client.
  2409. Args:
  2410. repo: Path to repository
  2411. remote_location: Location of the remote
  2412. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  2413. bytestring/string.
  2414. outstream: A stream file to write to output
  2415. errstream: A stream file to write to errors
  2416. fast_forward: If True, raise an exception when fast-forward is not possible
  2417. ff_only: If True, only allow fast-forward merges. Raises DivergedBranches
  2418. when branches have diverged rather than performing a merge.
  2419. force: If True, allow overwriting local changes in the working tree.
  2420. If False, pull will abort if it would overwrite uncommitted changes.
  2421. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  2422. Only used if the server supports the Git protocol-v2 'filter'
  2423. feature, and ignored otherwise.
  2424. protocol_version: desired Git protocol version. By default the highest
  2425. mutually supported protocol version will be used
  2426. **kwargs: Additional keyword arguments for the client
  2427. """
  2428. # Open the repo
  2429. with open_repo_closing(repo) as r:
  2430. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  2431. selected_refs = []
  2432. if refspecs is None:
  2433. refspecs_normalized: bytes | list[bytes] = [b"HEAD"]
  2434. elif isinstance(refspecs, str):
  2435. refspecs_normalized = refspecs.encode()
  2436. elif isinstance(refspecs, bytes):
  2437. refspecs_normalized = refspecs
  2438. else:
  2439. refspecs_normalized = []
  2440. for spec in refspecs:
  2441. if isinstance(spec, str):
  2442. refspecs_normalized.append(spec.encode())
  2443. else:
  2444. refspecs_normalized.append(spec)
  2445. def determine_wants(
  2446. remote_refs: dict[Ref, ObjectID], depth: int | None = None
  2447. ) -> list[ObjectID]:
  2448. remote_refs_container = DictRefsContainer(remote_refs) # type: ignore[arg-type]
  2449. selected_refs.extend(
  2450. parse_reftuples(
  2451. remote_refs_container, r.refs, refspecs_normalized, force=force
  2452. )
  2453. )
  2454. return [
  2455. remote_refs[lh]
  2456. for (lh, rh, force_ref) in selected_refs
  2457. if lh is not None
  2458. and lh in remote_refs
  2459. and remote_refs[lh] not in r.object_store
  2460. ]
  2461. transport_kwargs = _filter_transport_kwargs(**kwargs)
  2462. client, path = get_transport_and_path(
  2463. remote_location,
  2464. config=r.get_config_stack(),
  2465. **transport_kwargs,
  2466. )
  2467. if filter_spec:
  2468. filter_spec_bytes: bytes | None = filter_spec.encode("ascii")
  2469. else:
  2470. filter_spec_bytes = None
  2471. def progress(data: bytes) -> None:
  2472. errstream.write(data)
  2473. fetch_result = client.fetch(
  2474. path.encode(),
  2475. r,
  2476. progress=progress,
  2477. determine_wants=determine_wants, # type: ignore[arg-type] # Function matches protocol but mypy can't verify
  2478. filter_spec=filter_spec_bytes,
  2479. protocol_version=protocol_version,
  2480. )
  2481. # Store the old HEAD tree before making changes
  2482. try:
  2483. old_head = r.refs[HEADREF]
  2484. old_commit = r[old_head]
  2485. assert isinstance(old_commit, Commit)
  2486. old_tree_id = old_commit.tree
  2487. except KeyError:
  2488. old_tree_id = None
  2489. merged = False
  2490. for lh, rh, force_ref in selected_refs:
  2491. if not force_ref and rh is not None and rh in r.refs:
  2492. try:
  2493. assert lh is not None
  2494. followed_ref = r.refs.follow(rh)[1]
  2495. assert followed_ref is not None
  2496. lh_ref = fetch_result.refs[lh]
  2497. assert lh_ref is not None
  2498. check_diverged(r, followed_ref, lh_ref)
  2499. except DivergedBranches as exc:
  2500. if ff_only or fast_forward:
  2501. raise
  2502. else:
  2503. # Perform merge
  2504. assert lh is not None
  2505. merge_ref = fetch_result.refs[lh]
  2506. assert merge_ref is not None
  2507. _merge_result, conflicts = _do_merge(r, merge_ref)
  2508. if conflicts:
  2509. raise Error(
  2510. f"Merge conflicts occurred: {conflicts}"
  2511. ) from exc
  2512. merged = True
  2513. # Skip updating ref since merge already updated HEAD
  2514. continue
  2515. if rh is not None and lh is not None:
  2516. lh_value = fetch_result.refs[lh]
  2517. if lh_value is not None:
  2518. r.refs[Ref(rh)] = lh_value
  2519. # Only update HEAD if we didn't perform a merge
  2520. if selected_refs and not merged:
  2521. lh, rh, _ = selected_refs[0]
  2522. if lh is not None:
  2523. ref_value = fetch_result.refs[lh]
  2524. if ref_value is not None:
  2525. r[b"HEAD"] = ref_value
  2526. # Update working tree to match the new HEAD
  2527. # Skip if merge was performed as merge already updates the working tree
  2528. if not merged and old_tree_id is not None:
  2529. head_commit = r[b"HEAD"]
  2530. assert isinstance(head_commit, Commit)
  2531. new_tree_id = head_commit.tree
  2532. blob_normalizer = r.get_blob_normalizer()
  2533. changes = tree_changes(r.object_store, old_tree_id, new_tree_id)
  2534. update_working_tree(
  2535. r,
  2536. old_tree_id,
  2537. new_tree_id,
  2538. change_iterator=changes,
  2539. blob_normalizer=blob_normalizer,
  2540. allow_overwrite_modified=force,
  2541. )
  2542. if remote_name is not None:
  2543. _import_remote_refs(r.refs, remote_name, fetch_result.refs)
  2544. # Trigger auto GC if needed
  2545. from ..gc import maybe_auto_gc
  2546. with open_repo_closing(repo) as r:
  2547. maybe_auto_gc(r)
  2548. def status(
  2549. repo: str | os.PathLike[str] | Repo = ".",
  2550. ignored: bool = False,
  2551. untracked_files: str = "normal",
  2552. ) -> GitStatus:
  2553. """Returns staged, unstaged, and untracked changes relative to the HEAD.
  2554. Args:
  2555. repo: Path to repository or repository object
  2556. ignored: Whether to include ignored files in untracked
  2557. untracked_files: How to handle untracked files, defaults to "all":
  2558. "no": do not return untracked files
  2559. "normal": return untracked directories, not their contents
  2560. "all": include all files in untracked directories
  2561. Using untracked_files="no" can be faster than "all" when the worktree
  2562. contains many untracked files/directories.
  2563. Using untracked_files="normal" provides a good balance, only showing
  2564. directories that are entirely untracked without listing all their contents.
  2565. Returns: GitStatus tuple,
  2566. staged - dict with lists of staged paths (diff index/HEAD)
  2567. unstaged - list of unstaged paths (diff index/working-tree)
  2568. untracked - list of untracked, un-ignored & non-.git paths
  2569. """
  2570. with open_repo_closing(repo) as r:
  2571. # Open the index once and reuse it for both staged and unstaged checks
  2572. index = r.open_index()
  2573. # 1. Get status of staged
  2574. tracked_changes = get_tree_changes(r, index)
  2575. # 2. Get status of unstaged
  2576. normalizer = r.get_blob_normalizer()
  2577. # Pass the normalizer's checkin_normalize method directly
  2578. if normalizer is not None:
  2579. filter_callback = normalizer.checkin_normalize
  2580. else:
  2581. filter_callback = None
  2582. # Check if core.preloadIndex is enabled
  2583. config = r.get_config_stack()
  2584. preload_index = config.get_boolean(b"core", b"preloadIndex", False)
  2585. unstaged_changes = list(
  2586. get_unstaged_changes(index, r.path, filter_callback, preload_index)
  2587. )
  2588. untracked_paths = get_untracked_paths(
  2589. r.path,
  2590. r.path,
  2591. index,
  2592. exclude_ignored=not ignored,
  2593. untracked_files=untracked_files,
  2594. )
  2595. if sys.platform == "win32":
  2596. untracked_changes = [
  2597. path.replace(os.path.sep, "/") for path in untracked_paths
  2598. ]
  2599. else:
  2600. untracked_changes = list(untracked_paths)
  2601. return GitStatus(tracked_changes, unstaged_changes, untracked_changes)
  2602. def shortlog(
  2603. repo: str | os.PathLike[str] | Repo,
  2604. summary_only: bool = False,
  2605. sort_by_commits: bool = False,
  2606. ) -> list[dict[str, str]]:
  2607. """Summarize commits by author, like git shortlog.
  2608. Args:
  2609. repo: Path to repository or Repo object.
  2610. summary_only: If True, only show counts per author.
  2611. sort_by_commits: If True, sort authors by number of commits.
  2612. Returns:
  2613. A list where each item is a dict containing:
  2614. - "author": the author's name as a string
  2615. - "messages": all commit messages concatenated into a single string
  2616. """
  2617. with open_repo_closing(repo) as r:
  2618. walker = r.get_walker()
  2619. authors: dict[str, list[str]] = {}
  2620. for entry in walker:
  2621. commit = entry.commit
  2622. author = commit.author.decode(commit.encoding or "utf-8")
  2623. message = commit.message.decode(commit.encoding or "utf-8").strip()
  2624. authors.setdefault(author, []).append(message)
  2625. # Convert messages to single string per author
  2626. items: list[dict[str, str]] = [
  2627. {"author": author, "messages": "\n".join(msgs)}
  2628. for author, msgs in authors.items()
  2629. ]
  2630. if sort_by_commits:
  2631. # Sort by number of commits (lines in messages)
  2632. items.sort(key=lambda x: len(x["messages"].splitlines()), reverse=True)
  2633. return items
  2634. def _walk_working_dir_paths(
  2635. frompath: str | bytes | os.PathLike[str],
  2636. basepath: str | bytes | os.PathLike[str],
  2637. prune_dirnames: Callable[[str, list[str]], list[str]] | None = None,
  2638. ) -> Iterator[tuple[str | bytes, bool]]:
  2639. """Get path, is_dir for files in working dir from frompath.
  2640. Args:
  2641. frompath: Path to begin walk
  2642. basepath: Path to compare to
  2643. prune_dirnames: Optional callback to prune dirnames during os.walk
  2644. dirnames will be set to result of prune_dirnames(dirpath, dirnames)
  2645. """
  2646. # Convert paths to strings for os.walk compatibility
  2647. for dirpath, dirnames, filenames in os.walk(frompath): # type: ignore[type-var]
  2648. # Skip .git and below.
  2649. if ".git" in dirnames:
  2650. dirnames.remove(".git")
  2651. if dirpath != basepath:
  2652. continue
  2653. if ".git" in filenames:
  2654. filenames.remove(".git")
  2655. if dirpath != basepath:
  2656. continue
  2657. if dirpath != frompath:
  2658. yield dirpath, True # type: ignore[misc]
  2659. for filename in filenames:
  2660. filepath = os.path.join(dirpath, filename) # type: ignore[call-overload]
  2661. yield filepath, False
  2662. if prune_dirnames:
  2663. dirnames[:] = prune_dirnames(dirpath, dirnames) # type: ignore[arg-type]
  2664. def get_untracked_paths(
  2665. frompath: str | bytes | os.PathLike[str],
  2666. basepath: str | bytes | os.PathLike[str],
  2667. index: Index,
  2668. exclude_ignored: bool = False,
  2669. untracked_files: str = "all",
  2670. ) -> Iterator[str]:
  2671. """Get untracked paths.
  2672. Args:
  2673. frompath: Path to walk
  2674. basepath: Path to compare to
  2675. index: Index to check against
  2676. exclude_ignored: Whether to exclude ignored paths
  2677. untracked_files: How to handle untracked files:
  2678. - "no": return an empty list
  2679. - "all": return all files in untracked directories
  2680. - "normal": return untracked directories without listing their contents
  2681. Note: ignored directories will never be walked for performance reasons.
  2682. If exclude_ignored is False, only the path to an ignored directory will
  2683. be yielded, no files inside the directory will be returned
  2684. """
  2685. if untracked_files not in ("no", "all", "normal"):
  2686. raise ValueError("untracked_files must be one of (no, all, normal)")
  2687. if untracked_files == "no":
  2688. return
  2689. # Normalize paths to str
  2690. frompath_str = os.fsdecode(os.fspath(frompath))
  2691. basepath_str = os.fsdecode(os.fspath(basepath))
  2692. with open_repo_closing(basepath_str) as r:
  2693. ignore_manager = IgnoreFilterManager.from_repo(r)
  2694. ignored_dirs = []
  2695. # List to store untracked directories found during traversal
  2696. untracked_dir_list = []
  2697. def directory_has_non_ignored_files(dir_path: str, base_rel_path: str) -> bool:
  2698. """Recursively check if directory contains any non-ignored files."""
  2699. try:
  2700. for entry in os.listdir(dir_path):
  2701. entry_path = os.path.join(dir_path, entry)
  2702. rel_entry = os.path.join(base_rel_path, entry)
  2703. if os.path.isfile(entry_path):
  2704. if ignore_manager.is_ignored(rel_entry) is not True:
  2705. return True
  2706. elif os.path.isdir(entry_path):
  2707. if directory_has_non_ignored_files(entry_path, rel_entry):
  2708. return True
  2709. return False
  2710. except OSError:
  2711. # If we can't read the directory, assume it has non-ignored files
  2712. return True
  2713. def prune_dirnames(dirpath: str, dirnames: list[str]) -> list[str]:
  2714. for i in range(len(dirnames) - 1, -1, -1):
  2715. path = os.path.join(dirpath, dirnames[i])
  2716. ip = os.path.join(os.path.relpath(path, basepath_str), "")
  2717. # Check if directory is ignored
  2718. if ignore_manager.is_ignored(ip) is True:
  2719. if not exclude_ignored:
  2720. ignored_dirs.append(
  2721. os.path.join(os.path.relpath(path, frompath_str), "")
  2722. )
  2723. del dirnames[i]
  2724. continue
  2725. # For "normal" mode, check if the directory is entirely untracked
  2726. if untracked_files == "normal":
  2727. # Convert directory path to tree path for index lookup
  2728. dir_tree_path = path_to_tree_path(basepath_str, path)
  2729. # Check if any file in this directory is tracked
  2730. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  2731. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  2732. if not has_tracked_files:
  2733. # This directory is entirely untracked
  2734. rel_path_base = os.path.relpath(path, basepath_str)
  2735. rel_path_from = os.path.join(
  2736. os.path.relpath(path, frompath_str), ""
  2737. )
  2738. # If excluding ignored, check if directory contains any non-ignored files
  2739. if exclude_ignored:
  2740. if not directory_has_non_ignored_files(path, rel_path_base):
  2741. # Directory only contains ignored files, skip it
  2742. del dirnames[i]
  2743. continue
  2744. # Check if it should be excluded due to ignore rules
  2745. is_ignored = ignore_manager.is_ignored(rel_path_base)
  2746. if not exclude_ignored or not is_ignored:
  2747. untracked_dir_list.append(rel_path_from)
  2748. del dirnames[i]
  2749. return dirnames
  2750. # For "all" mode, use the original behavior
  2751. if untracked_files == "all":
  2752. for ap, is_dir in _walk_working_dir_paths(
  2753. frompath_str, basepath_str, prune_dirnames=prune_dirnames
  2754. ):
  2755. # frompath_str and basepath_str are both str, so ap must be str
  2756. assert isinstance(ap, str)
  2757. if not is_dir:
  2758. ip = path_to_tree_path(basepath_str, ap)
  2759. if ip not in index:
  2760. if not exclude_ignored or not ignore_manager.is_ignored(
  2761. os.path.relpath(ap, basepath_str)
  2762. ):
  2763. yield os.path.relpath(ap, frompath_str)
  2764. else: # "normal" mode
  2765. # Walk directories, handling both files and directories
  2766. for ap, is_dir in _walk_working_dir_paths(
  2767. frompath_str, basepath_str, prune_dirnames=prune_dirnames
  2768. ):
  2769. # frompath_str and basepath_str are both str, so ap must be str
  2770. assert isinstance(ap, str)
  2771. # This part won't be reached for pruned directories
  2772. if is_dir:
  2773. # Check if this directory is entirely untracked
  2774. dir_tree_path = path_to_tree_path(basepath_str, ap)
  2775. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  2776. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  2777. if not has_tracked_files:
  2778. if not exclude_ignored or not ignore_manager.is_ignored(
  2779. os.path.relpath(ap, basepath_str)
  2780. ):
  2781. yield os.path.join(os.path.relpath(ap, frompath_str), "")
  2782. else:
  2783. # Check individual files in directories that contain tracked files
  2784. ip = path_to_tree_path(basepath_str, ap)
  2785. if ip not in index:
  2786. if not exclude_ignored or not ignore_manager.is_ignored(
  2787. os.path.relpath(ap, basepath_str)
  2788. ):
  2789. yield os.path.relpath(ap, frompath_str)
  2790. # Yield any untracked directories found during pruning
  2791. yield from untracked_dir_list
  2792. yield from ignored_dirs
  2793. def grep(
  2794. repo: RepoPath,
  2795. pattern: str | bytes,
  2796. *,
  2797. outstream: TextIO = sys.stdout,
  2798. rev: str | bytes | None = None,
  2799. pathspecs: Sequence[str | bytes] | None = None,
  2800. ignore_case: bool = False,
  2801. line_number: bool = False,
  2802. max_depth: int | None = None,
  2803. respect_ignores: bool = True,
  2804. ) -> None:
  2805. """Search for a pattern in tracked files.
  2806. Args:
  2807. repo: Path to repository or Repo object
  2808. pattern: Regular expression pattern to search for
  2809. outstream: Stream to write results to
  2810. rev: Revision to search in (defaults to HEAD)
  2811. pathspecs: Optional list of path patterns to limit search
  2812. ignore_case: Whether to perform case-insensitive matching
  2813. line_number: Whether to output line numbers
  2814. max_depth: Maximum directory depth to search
  2815. respect_ignores: Whether to respect .gitignore patterns
  2816. """
  2817. from ..object_store import iter_tree_contents
  2818. # Compile the pattern
  2819. flags = re.IGNORECASE if ignore_case else 0
  2820. try:
  2821. if isinstance(pattern, bytes):
  2822. compiled_pattern = re.compile(pattern, flags)
  2823. else:
  2824. compiled_pattern = re.compile(pattern.encode("utf-8"), flags)
  2825. except re.error as e:
  2826. raise ValueError(f"Invalid regular expression: {e}") from e
  2827. with open_repo_closing(repo) as r:
  2828. # Get the tree to search
  2829. if rev is None:
  2830. try:
  2831. commit = r[b"HEAD"]
  2832. assert isinstance(commit, Commit)
  2833. except KeyError as e:
  2834. raise ValueError("No HEAD commit found") from e
  2835. else:
  2836. rev_bytes = rev if isinstance(rev, bytes) else rev.encode("utf-8")
  2837. commit_obj = parse_commit(r, rev_bytes)
  2838. if commit_obj is None:
  2839. raise ValueError(f"Invalid revision: {rev}")
  2840. commit = commit_obj
  2841. tree = r[commit.tree]
  2842. assert isinstance(tree, Tree)
  2843. # Set up ignore filter if requested
  2844. ignore_manager = None
  2845. if respect_ignores:
  2846. ignore_manager = IgnoreFilterManager.from_repo(r)
  2847. # Convert pathspecs to bytes
  2848. pathspecs_bytes: list[bytes] | None = None
  2849. if pathspecs:
  2850. pathspecs_bytes = [
  2851. p if isinstance(p, bytes) else p.encode("utf-8") for p in pathspecs
  2852. ]
  2853. # Iterate through all files in the tree
  2854. for entry in iter_tree_contents(r.object_store, tree.id):
  2855. path, mode, sha = entry.path, entry.mode, entry.sha
  2856. assert path is not None
  2857. assert mode is not None
  2858. assert sha is not None
  2859. # Skip directories
  2860. if stat.S_ISDIR(mode):
  2861. continue
  2862. # Check max depth
  2863. if max_depth is not None:
  2864. depth = path.count(b"/")
  2865. if depth > max_depth:
  2866. continue
  2867. # Check pathspecs
  2868. if pathspecs_bytes:
  2869. matches_pathspec = False
  2870. for pathspec in pathspecs_bytes:
  2871. # Simple prefix matching (could be enhanced with full pathspec support)
  2872. if path.startswith(pathspec) or fnmatch.fnmatch(
  2873. path.decode("utf-8", errors="replace"),
  2874. pathspec.decode("utf-8", errors="replace"),
  2875. ):
  2876. matches_pathspec = True
  2877. break
  2878. if not matches_pathspec:
  2879. continue
  2880. # Check ignore patterns
  2881. if ignore_manager:
  2882. path_str = path.decode("utf-8", errors="replace")
  2883. if ignore_manager.is_ignored(path_str) is True:
  2884. continue
  2885. # Get the blob content
  2886. blob = r[sha]
  2887. assert isinstance(blob, Blob)
  2888. # Search for pattern in the blob
  2889. content = blob.data
  2890. lines = content.split(b"\n")
  2891. for line_num, line in enumerate(lines, 1):
  2892. if compiled_pattern.search(line):
  2893. path_str = path.decode("utf-8", errors="replace")
  2894. line_str = line.decode("utf-8", errors="replace")
  2895. if line_number:
  2896. outstream.write(f"{path_str}:{line_num}:{line_str}\n")
  2897. else:
  2898. outstream.write(f"{path_str}:{line_str}\n")
  2899. def get_tree_changes(
  2900. repo: RepoPath, index: Index | None = None
  2901. ) -> dict[str, list[str | bytes]]:
  2902. """Return add/delete/modify changes to tree by comparing index to HEAD.
  2903. Args:
  2904. repo: repo path or object
  2905. index: optional Index object to reuse (avoids re-opening the index)
  2906. Returns: dict with lists for each type of change
  2907. """
  2908. with open_repo_closing(repo) as r:
  2909. if index is None:
  2910. index = r.open_index()
  2911. # Compares the Index to the HEAD & determines changes
  2912. # Iterate through the changes and report add/delete/modify
  2913. # TODO: call out to dulwich.diff_tree somehow.
  2914. tracked_changes: dict[str, list[str | bytes]] = {
  2915. "add": [],
  2916. "delete": [],
  2917. "modify": [],
  2918. }
  2919. try:
  2920. head_commit = r[b"HEAD"]
  2921. assert isinstance(head_commit, Commit)
  2922. tree_id = head_commit.tree
  2923. except KeyError:
  2924. tree_id = None
  2925. for change in index.changes_from_tree(r.object_store, tree_id):
  2926. if not change[0][0]:
  2927. assert change[0][1] is not None
  2928. tracked_changes["add"].append(change[0][1])
  2929. elif not change[0][1]:
  2930. assert change[0][0] is not None
  2931. tracked_changes["delete"].append(change[0][0])
  2932. elif change[0][0] == change[0][1]:
  2933. assert change[0][0] is not None
  2934. tracked_changes["modify"].append(change[0][0])
  2935. else:
  2936. raise NotImplementedError("git mv ops not yet supported")
  2937. return tracked_changes
  2938. def daemon(
  2939. path: str | os.PathLike[str] = ".",
  2940. address: str | None = None,
  2941. port: int | None = None,
  2942. ) -> None:
  2943. """Run a daemon serving Git requests over TCP/IP.
  2944. Args:
  2945. path: Path to the directory to serve.
  2946. address: Optional address to listen on (defaults to ::)
  2947. port: Optional port to listen on (defaults to TCP_GIT_PORT)
  2948. """
  2949. # TODO(jelmer): Support git-daemon-export-ok and --export-all.
  2950. backend = FileSystemBackend(os.fspath(path))
  2951. server = TCPGitServer(backend, address or "localhost", port or 9418)
  2952. server.serve_forever()
  2953. def web_daemon(
  2954. path: str | os.PathLike[str] = ".",
  2955. address: str | None = None,
  2956. port: int | None = None,
  2957. ) -> None:
  2958. """Run a daemon serving Git requests over HTTP.
  2959. Args:
  2960. path: Path to the directory to serve
  2961. address: Optional address to listen on (defaults to ::)
  2962. port: Optional port to listen on (defaults to 80)
  2963. """
  2964. from ..web import (
  2965. WSGIRequestHandlerLogger,
  2966. WSGIServerLogger,
  2967. make_server,
  2968. make_wsgi_chain,
  2969. )
  2970. backend = FileSystemBackend(os.fspath(path))
  2971. app = make_wsgi_chain(backend)
  2972. server = make_server(
  2973. address or "::",
  2974. port or 80,
  2975. app,
  2976. handler_class=WSGIRequestHandlerLogger,
  2977. server_class=WSGIServerLogger,
  2978. )
  2979. server.serve_forever()
  2980. def upload_pack(
  2981. path: str | os.PathLike[str] = ".",
  2982. inf: BinaryIO | None = None,
  2983. outf: BinaryIO | None = None,
  2984. ) -> int:
  2985. """Upload a pack file after negotiating its contents using smart protocol.
  2986. Args:
  2987. path: Path to the repository
  2988. inf: Input stream to communicate with client
  2989. outf: Output stream to communicate with client
  2990. """
  2991. if outf is None:
  2992. outf = sys.stdout.buffer
  2993. if inf is None:
  2994. inf = sys.stdin.buffer
  2995. assert outf is not None
  2996. assert inf is not None
  2997. path = os.path.expanduser(path)
  2998. backend = FileSystemBackend(path)
  2999. def send_fn(data: bytes) -> None:
  3000. outf.write(data)
  3001. outf.flush()
  3002. proto = Protocol(inf.read, send_fn)
  3003. handler = UploadPackHandler(backend, [path], proto)
  3004. # FIXME: Catch exceptions and write a single-line summary to outf.
  3005. handler.handle()
  3006. return 0
  3007. def receive_pack(
  3008. path: str | os.PathLike[str] = ".",
  3009. inf: BinaryIO | None = None,
  3010. outf: BinaryIO | None = None,
  3011. ) -> int:
  3012. """Receive a pack file after negotiating its contents using smart protocol.
  3013. Args:
  3014. path: Path to the repository
  3015. inf: Input stream to communicate with client
  3016. outf: Output stream to communicate with client
  3017. """
  3018. if outf is None:
  3019. outf = sys.stdout.buffer
  3020. if inf is None:
  3021. inf = sys.stdin.buffer
  3022. assert outf is not None
  3023. assert inf is not None
  3024. path = os.path.expanduser(path)
  3025. backend = FileSystemBackend(path)
  3026. def send_fn(data: bytes) -> None:
  3027. outf.write(data)
  3028. outf.flush()
  3029. proto = Protocol(inf.read, send_fn)
  3030. handler = ReceivePackHandler(backend, [path], proto)
  3031. # FIXME: Catch exceptions and write a single-line summary to outf.
  3032. handler.handle()
  3033. return 0
  3034. def _make_branch_ref(name: str | bytes) -> Ref:
  3035. if isinstance(name, str):
  3036. name = name.encode(DEFAULT_ENCODING)
  3037. return local_branch_name(name)
  3038. def _make_replace_ref(name: str | bytes | ObjectID) -> Ref:
  3039. if isinstance(name, str):
  3040. name = name.encode(DEFAULT_ENCODING)
  3041. return local_replace_name(name)
  3042. def branch_delete(repo: RepoPath, name: str | bytes | Sequence[str | bytes]) -> None:
  3043. """Delete a branch.
  3044. Args:
  3045. repo: Path to the repository
  3046. name: Name of the branch
  3047. """
  3048. with open_repo_closing(repo) as r:
  3049. if isinstance(name, (list, tuple)):
  3050. names = name
  3051. else:
  3052. names = [name]
  3053. for branch_name in names:
  3054. del r.refs[_make_branch_ref(branch_name)]
  3055. def branch_create(
  3056. repo: str | os.PathLike[str] | Repo,
  3057. name: str | bytes,
  3058. objectish: str | bytes | None = None,
  3059. force: bool = False,
  3060. ) -> None:
  3061. """Create a branch.
  3062. Args:
  3063. repo: Path to the repository
  3064. name: Name of the new branch
  3065. objectish: Target object to point new branch at (defaults to HEAD)
  3066. force: Force creation of branch, even if it already exists
  3067. """
  3068. with open_repo_closing(repo) as r:
  3069. if objectish is None:
  3070. objectish = "HEAD"
  3071. # Try to expand branch shorthand before parsing
  3072. original_objectish = objectish
  3073. objectish_bytes = (
  3074. objectish.encode(DEFAULT_ENCODING)
  3075. if isinstance(objectish, str)
  3076. else objectish
  3077. )
  3078. if Ref(b"refs/remotes/" + objectish_bytes) in r.refs:
  3079. objectish = b"refs/remotes/" + objectish_bytes
  3080. elif local_branch_name(objectish_bytes) in r.refs:
  3081. objectish = local_branch_name(objectish_bytes)
  3082. object = parse_object(r, objectish)
  3083. refname = _make_branch_ref(name)
  3084. default_message = (
  3085. b"branch: Created from " + original_objectish.encode(DEFAULT_ENCODING)
  3086. if isinstance(original_objectish, str)
  3087. else b"branch: Created from " + original_objectish
  3088. )
  3089. ref_message = _get_reflog_message(default_message)
  3090. if force:
  3091. r.refs.set_if_equals(refname, None, object.id, message=ref_message)
  3092. else:
  3093. if not r.refs.add_if_new(refname, object.id, message=ref_message):
  3094. name_str = name.decode() if isinstance(name, bytes) else name
  3095. raise Error(f"Branch with name {name_str} already exists.")
  3096. # Check if we should set up tracking
  3097. config = r.get_config_stack()
  3098. try:
  3099. auto_setup_merge = config.get((b"branch",), b"autoSetupMerge").decode()
  3100. except KeyError:
  3101. auto_setup_merge = "true" # Default value
  3102. # Determine if the objectish refers to a remote-tracking branch
  3103. objectish_ref = None
  3104. if original_objectish != "HEAD":
  3105. # Try to resolve objectish as a ref
  3106. objectish_bytes = (
  3107. original_objectish.encode(DEFAULT_ENCODING)
  3108. if isinstance(original_objectish, str)
  3109. else original_objectish
  3110. )
  3111. if Ref(objectish_bytes) in r.refs:
  3112. objectish_ref = objectish_bytes
  3113. elif Ref(b"refs/remotes/" + objectish_bytes) in r.refs:
  3114. objectish_ref = b"refs/remotes/" + objectish_bytes
  3115. elif local_branch_name(objectish_bytes) in r.refs:
  3116. objectish_ref = local_branch_name(objectish_bytes)
  3117. else:
  3118. # HEAD might point to a remote-tracking branch
  3119. head_ref = r.refs.follow(HEADREF)[0][1]
  3120. if head_ref.startswith(b"refs/remotes/"):
  3121. objectish_ref = head_ref
  3122. # Set up tracking if appropriate
  3123. if objectish_ref and (
  3124. (auto_setup_merge == "always")
  3125. or (
  3126. auto_setup_merge == "true"
  3127. and objectish_ref.startswith(b"refs/remotes/")
  3128. )
  3129. ):
  3130. # Extract remote name and branch from the ref
  3131. if objectish_ref.startswith(b"refs/remotes/"):
  3132. parts = objectish_ref[len(b"refs/remotes/") :].split(b"/", 1)
  3133. if len(parts) == 2:
  3134. remote_name = parts[0]
  3135. remote_branch = local_branch_name(parts[1])
  3136. # Set up tracking
  3137. repo_config = r.get_config()
  3138. branch_name_bytes = (
  3139. name.encode(DEFAULT_ENCODING) if isinstance(name, str) else name
  3140. )
  3141. repo_config.set(
  3142. (b"branch", branch_name_bytes), b"remote", remote_name
  3143. )
  3144. repo_config.set(
  3145. (b"branch", branch_name_bytes), b"merge", remote_branch
  3146. )
  3147. repo_config.write_to_path()
  3148. def filter_branches_by_pattern(branches: Iterable[bytes], pattern: str) -> list[bytes]:
  3149. """Filter branches by fnmatch pattern.
  3150. Args:
  3151. branches: Iterable of branch names as bytes
  3152. pattern: Pattern to match against
  3153. Returns:
  3154. List of filtered branch names
  3155. """
  3156. return [
  3157. branch for branch in branches if fnmatch.fnmatchcase(branch.decode(), pattern)
  3158. ]
  3159. def branch_list(repo: RepoPath) -> list[Ref]:
  3160. """List all branches.
  3161. Args:
  3162. repo: Path to the repository
  3163. Returns:
  3164. List of branch names (without refs/heads/ prefix)
  3165. """
  3166. with open_repo_closing(repo) as r:
  3167. branches: list[Ref] = list(r.refs.keys(base=Ref(LOCAL_BRANCH_PREFIX)))
  3168. # Check for branch.sort configuration
  3169. config = r.get_config_stack()
  3170. try:
  3171. sort_key = config.get((b"branch",), b"sort").decode()
  3172. except KeyError:
  3173. # Default is refname (alphabetical)
  3174. sort_key = "refname"
  3175. # Parse sort key
  3176. reverse = False
  3177. if sort_key.startswith("-"):
  3178. reverse = True
  3179. sort_key = sort_key[1:]
  3180. # Apply sorting
  3181. if sort_key == "refname":
  3182. # Simple alphabetical sort (default)
  3183. branches.sort(reverse=reverse)
  3184. elif sort_key in ("committerdate", "authordate"):
  3185. # Sort by date
  3186. def get_commit_date(branch_name: bytes) -> int:
  3187. ref = local_branch_name(branch_name)
  3188. sha = r.refs[ref]
  3189. commit = r.object_store[sha]
  3190. assert isinstance(commit, Commit)
  3191. if sort_key == "committerdate":
  3192. return cast(int, commit.commit_time)
  3193. else: # authordate
  3194. return cast(int, commit.author_time)
  3195. # Sort branches by date
  3196. # Note: Python's sort naturally orders smaller values first (ascending)
  3197. # For dates, this means oldest first by default
  3198. # Use a stable sort with branch name as secondary key for consistent ordering
  3199. if reverse:
  3200. # For reverse sort, we want newest dates first but alphabetical names second
  3201. branches.sort(key=lambda b: (-get_commit_date(b), b))
  3202. else:
  3203. branches.sort(key=lambda b: (get_commit_date(b), b))
  3204. else:
  3205. # Unknown sort key, fall back to default
  3206. branches.sort()
  3207. return branches
  3208. def branch_remotes_list(repo: RepoPath) -> list[bytes]:
  3209. """List the short names of all remote branches.
  3210. Args:
  3211. repo: Path to the repository
  3212. Returns:
  3213. List of branch names (without refs/remotes/ prefix, and without remote name; e.g. 'main' from 'origin/main')
  3214. """
  3215. with open_repo_closing(repo) as r:
  3216. branches = [bytes(ref) for ref in r.refs.keys(base=Ref(LOCAL_REMOTE_PREFIX))]
  3217. config = r.get_config_stack()
  3218. try:
  3219. sort_key = config.get((b"branch",), b"sort").decode()
  3220. except KeyError:
  3221. # Default is refname (alphabetical)
  3222. sort_key = "refname"
  3223. # Parse sort key
  3224. reverse = False
  3225. if sort_key.startswith("-"):
  3226. reverse = True
  3227. sort_key = sort_key[1:]
  3228. # Apply sorting
  3229. if sort_key == "refname":
  3230. # Simple alphabetical sort (default)
  3231. branches.sort(reverse=reverse)
  3232. elif sort_key in ("committerdate", "authordate"):
  3233. # Sort by date
  3234. def get_commit_date(branch_name: bytes) -> int:
  3235. ref = LOCAL_REMOTE_PREFIX + branch_name
  3236. sha = r.refs[Ref(ref)]
  3237. commit = r.object_store[sha]
  3238. assert isinstance(commit, Commit)
  3239. if sort_key == "committerdate":
  3240. return cast(int, commit.commit_time)
  3241. else: # authordate
  3242. return cast(int, commit.author_time)
  3243. # Sort branches by date
  3244. # Note: Python's sort naturally orders smaller values first (ascending)
  3245. # For dates, this means oldest first by default
  3246. # Use a stable sort with branch name as secondary key for consistent ordering
  3247. if reverse:
  3248. # For reverse sort, we want newest dates first but alphabetical names second
  3249. branches.sort(key=lambda b: (-get_commit_date(b), b))
  3250. else:
  3251. branches.sort(key=lambda b: (get_commit_date(b), b))
  3252. else:
  3253. # Unknown sort key
  3254. raise ValueError(f"Unknown sort key: {sort_key}")
  3255. return branches
  3256. def _get_branch_merge_status(repo: RepoPath) -> Iterator[tuple[bytes, bool]]:
  3257. """Get merge status for all branches relative to current HEAD.
  3258. Args:
  3259. repo: Path to the repository
  3260. Yields:
  3261. tuple of (``branch_name``, ``is_merged``) where:
  3262. - ``branch_name``: Branch name without refs/heads/ prefix
  3263. - ``is_merged``: True if branch is merged into HEAD, False otherwise
  3264. """
  3265. with open_repo_closing(repo) as r:
  3266. current_sha = r.refs[HEADREF]
  3267. for branch_ref, branch_sha in r.refs.as_dict(base=Ref(b"refs/heads/")).items():
  3268. # Check if branch is an ancestor of HEAD (fully merged)
  3269. is_merged = can_fast_forward(r, branch_sha, current_sha)
  3270. yield branch_ref, is_merged
  3271. def merged_branches(repo: RepoPath) -> Iterator[bytes]:
  3272. """List branches that have been merged into the current branch.
  3273. Args:
  3274. repo: Path to the repository
  3275. Yields:
  3276. Branch names (without refs/heads/ prefix) that are merged
  3277. into the current HEAD
  3278. """
  3279. for branch_name, is_merged in _get_branch_merge_status(repo):
  3280. if is_merged:
  3281. yield branch_name
  3282. def no_merged_branches(repo: RepoPath) -> Iterator[bytes]:
  3283. """List branches that have been merged into the current branch.
  3284. Args:
  3285. repo: Path to the repository
  3286. Yields:
  3287. Branch names (without refs/heads/ prefix) that are merged
  3288. into the current HEAD
  3289. """
  3290. for branch_name, is_merged in _get_branch_merge_status(repo):
  3291. if not is_merged:
  3292. yield branch_name
  3293. def branches_containing(repo: RepoPath, commit: str) -> Iterator[bytes]:
  3294. """List branches that contain the specified commit.
  3295. Args:
  3296. repo: Path to the repository
  3297. commit: Commit-ish string (SHA, branch name, tag, etc.)
  3298. Yields:
  3299. Branch names (without refs/heads/ prefix) that contain the commit
  3300. Raises:
  3301. ValueError: If the commit reference is malformed
  3302. KeyError: If the commit reference does not exist
  3303. """
  3304. with open_repo_closing(repo) as r:
  3305. commit_obj = parse_commit(r, commit)
  3306. commit_sha = commit_obj.id
  3307. for branch_ref, branch_sha in r.refs.as_dict(
  3308. base=Ref(LOCAL_BRANCH_PREFIX)
  3309. ).items():
  3310. if can_fast_forward(r, commit_sha, branch_sha):
  3311. yield branch_ref
  3312. def active_branch(repo: RepoPath) -> bytes:
  3313. """Return the active branch in the repository, if any.
  3314. Args:
  3315. repo: Repository to open
  3316. Returns:
  3317. branch name
  3318. Raises:
  3319. KeyError: if the repository does not have a working tree
  3320. IndexError: if HEAD is floating
  3321. """
  3322. with open_repo_closing(repo) as r:
  3323. active_ref = r.refs.follow(HEADREF)[0][1]
  3324. if not active_ref.startswith(LOCAL_BRANCH_PREFIX):
  3325. raise ValueError(active_ref)
  3326. return active_ref[len(LOCAL_BRANCH_PREFIX) :]
  3327. def get_branch_remote(repo: str | os.PathLike[str] | Repo) -> bytes:
  3328. """Return the active branch's remote name, if any.
  3329. Args:
  3330. repo: Repository to open
  3331. Returns:
  3332. remote name
  3333. Raises:
  3334. KeyError: if the repository does not have a working tree
  3335. """
  3336. with open_repo_closing(repo) as r:
  3337. branch_name = active_branch(r.path)
  3338. config = r.get_config()
  3339. try:
  3340. remote_name = config.get((b"branch", branch_name), b"remote")
  3341. except KeyError:
  3342. remote_name = b"origin"
  3343. return remote_name
  3344. def get_branch_merge(repo: RepoPath, branch_name: bytes | None = None) -> bytes:
  3345. """Return the branch's merge reference (upstream branch), if any.
  3346. Args:
  3347. repo: Repository to open
  3348. branch_name: Name of the branch (defaults to active branch)
  3349. Returns:
  3350. merge reference name (e.g. b"refs/heads/main")
  3351. Raises:
  3352. KeyError: if the branch does not have a merge configuration
  3353. """
  3354. with open_repo_closing(repo) as r:
  3355. if branch_name is None:
  3356. branch_name = active_branch(r.path)
  3357. config = r.get_config()
  3358. return config.get((b"branch", branch_name), b"merge")
  3359. def set_branch_tracking(
  3360. repo: str | os.PathLike[str] | Repo,
  3361. branch_name: bytes,
  3362. remote_name: bytes,
  3363. remote_ref: bytes,
  3364. ) -> None:
  3365. """Set up branch tracking configuration.
  3366. Args:
  3367. repo: Repository to open
  3368. branch_name: Name of the local branch
  3369. remote_name: Name of the remote (e.g. b"origin")
  3370. remote_ref: Remote reference to track (e.g. b"refs/heads/main")
  3371. """
  3372. with open_repo_closing(repo) as r:
  3373. config = r.get_config()
  3374. config.set((b"branch", branch_name), b"remote", remote_name)
  3375. config.set((b"branch", branch_name), b"merge", remote_ref)
  3376. config.write_to_path()
  3377. def fetch(
  3378. repo: RepoPath,
  3379. remote_location: str | bytes | None = None,
  3380. outstream: TextIO = sys.stdout,
  3381. errstream: BinaryIO | RawIOBase = default_bytes_err_stream,
  3382. message: bytes | None = None,
  3383. depth: int | None = None,
  3384. prune: bool = False,
  3385. prune_tags: bool = False,
  3386. force: bool = False,
  3387. operation: str | None = None,
  3388. thin_packs: bool = True,
  3389. report_activity: Callable[[int, str], None] | None = None,
  3390. quiet: bool = False,
  3391. include_tags: bool = False,
  3392. username: str | None = None,
  3393. password: str | None = None,
  3394. key_filename: str | None = None,
  3395. ssh_command: str | None = None,
  3396. shallow_since: str | None = None,
  3397. shallow_exclude: list[str] | None = None,
  3398. ) -> FetchPackResult:
  3399. """Fetch objects from a remote server.
  3400. Args:
  3401. repo: Path to the repository
  3402. remote_location: String identifying a remote server
  3403. outstream: Output stream (defaults to stdout)
  3404. errstream: Error stream (defaults to stderr)
  3405. message: Reflog message (defaults to b"fetch: from <remote_name>")
  3406. depth: Depth to fetch at
  3407. prune: Prune remote removed refs
  3408. prune_tags: Prune remote removed tags
  3409. force: Force fetching even if it would overwrite local changes
  3410. operation: Git operation for authentication (e.g., "fetch")
  3411. thin_packs: Whether to use thin packs
  3412. report_activity: Optional callback for reporting transport activity
  3413. quiet: Whether to suppress progress output
  3414. include_tags: Whether to include tags
  3415. username: Username for authentication
  3416. password: Password for authentication
  3417. key_filename: SSH key filename
  3418. ssh_command: SSH command to use
  3419. shallow_since: Deepen or shorten the history to include commits after this date
  3420. shallow_exclude: Deepen or shorten the history to exclude commits reachable from these refs
  3421. Returns:
  3422. Dictionary with refs on the remote
  3423. """
  3424. with open_repo_closing(repo) as r:
  3425. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  3426. default_message = b"fetch: from " + remote_location.encode(DEFAULT_ENCODING)
  3427. message = _get_reflog_message(default_message, message)
  3428. client, path = get_transport_and_path(
  3429. remote_location,
  3430. config=r.get_config_stack(),
  3431. operation=operation,
  3432. thin_packs=thin_packs,
  3433. report_activity=report_activity,
  3434. quiet=quiet,
  3435. include_tags=include_tags,
  3436. username=username,
  3437. password=password,
  3438. key_filename=key_filename,
  3439. ssh_command=ssh_command,
  3440. )
  3441. def progress(data: bytes) -> None:
  3442. errstream.write(data)
  3443. fetch_result = client.fetch(
  3444. path.encode(),
  3445. r,
  3446. progress=progress,
  3447. depth=depth,
  3448. shallow_since=shallow_since,
  3449. shallow_exclude=shallow_exclude,
  3450. )
  3451. if remote_name is not None:
  3452. _import_remote_refs(
  3453. r.refs,
  3454. remote_name,
  3455. fetch_result.refs,
  3456. message,
  3457. prune=prune,
  3458. prune_tags=prune_tags,
  3459. )
  3460. # Trigger auto GC if needed
  3461. from ..gc import maybe_auto_gc
  3462. with open_repo_closing(repo) as r:
  3463. maybe_auto_gc(r)
  3464. return fetch_result
  3465. def for_each_ref(
  3466. repo: Repo | str = ".",
  3467. pattern: str | bytes | None = None,
  3468. ) -> list[tuple[bytes, bytes, bytes]]:
  3469. """Iterate over all refs that match the (optional) pattern.
  3470. Args:
  3471. repo: Path to the repository
  3472. pattern: Optional glob (7) patterns to filter the refs with
  3473. Returns: List of bytes tuples with: (sha, object_type, ref_name)
  3474. """
  3475. if isinstance(pattern, str):
  3476. pattern = os.fsencode(pattern)
  3477. with open_repo_closing(repo) as r:
  3478. refs = r.get_refs()
  3479. if pattern:
  3480. matching_refs: dict[Ref, ObjectID] = {}
  3481. pattern_parts = pattern.split(b"/")
  3482. for ref, sha in refs.items():
  3483. matches = False
  3484. # git for-each-ref uses glob (7) style patterns, but fnmatch
  3485. # is greedy and also matches slashes, unlike glob.glob.
  3486. # We have to check parts of the pattern individually.
  3487. # See https://github.com/python/cpython/issues/72904
  3488. ref_parts = ref.split(b"/")
  3489. if len(ref_parts) > len(pattern_parts):
  3490. continue
  3491. for pat, ref_part in zip(pattern_parts, ref_parts):
  3492. matches = fnmatch.fnmatchcase(ref_part, pat)
  3493. if not matches:
  3494. break
  3495. if matches:
  3496. matching_refs[ref] = sha
  3497. refs = matching_refs
  3498. ret: list[tuple[bytes, bytes, bytes]] = [
  3499. (sha, r.get_object(sha).type_name, ref)
  3500. for ref, sha in sorted(
  3501. refs.items(),
  3502. key=lambda ref_sha: ref_sha[0],
  3503. )
  3504. if ref != b"HEAD"
  3505. ]
  3506. return ret
  3507. def show_ref(
  3508. repo: Repo | str = ".",
  3509. patterns: list[str | bytes] | None = None,
  3510. head: bool = False,
  3511. branches: bool = False,
  3512. tags: bool = False,
  3513. dereference: bool = False,
  3514. verify: bool = False,
  3515. ) -> list[tuple[bytes, bytes]]:
  3516. """List references in a local repository.
  3517. Args:
  3518. repo: Path to the repository
  3519. patterns: Optional list of patterns to filter refs (matched from the end)
  3520. head: Show the HEAD reference
  3521. branches: Limit to local branches (refs/heads/)
  3522. tags: Limit to local tags (refs/tags/)
  3523. dereference: Dereference tags into object IDs
  3524. verify: Enable stricter reference checking (exact path match)
  3525. Returns: List of tuples with (sha, ref_name) or (sha, ref_name^{}) for dereferenced tags
  3526. """
  3527. # Convert string patterns to bytes
  3528. byte_patterns: list[bytes] | None = None
  3529. if patterns:
  3530. byte_patterns = [os.fsencode(p) if isinstance(p, str) else p for p in patterns]
  3531. with open_repo_closing(repo) as r:
  3532. refs = r.get_refs()
  3533. # Filter by branches/tags if specified
  3534. if branches or tags:
  3535. prefixes = []
  3536. if branches:
  3537. prefixes.append(LOCAL_BRANCH_PREFIX)
  3538. if tags:
  3539. prefixes.append(LOCAL_TAG_PREFIX)
  3540. filtered_refs = filter_ref_prefix(refs, prefixes)
  3541. else:
  3542. # By default, show tags, heads, and remote refs (but not HEAD)
  3543. filtered_refs = filter_ref_prefix(refs, [b"refs/"])
  3544. # Add HEAD if requested
  3545. if head and HEADREF in refs:
  3546. filtered_refs[HEADREF] = refs[HEADREF]
  3547. # Filter by patterns if specified
  3548. if byte_patterns:
  3549. matching_refs: dict[Ref, ObjectID] = {}
  3550. for ref, sha in filtered_refs.items():
  3551. for pattern in byte_patterns:
  3552. if verify:
  3553. # Verify mode requires exact match
  3554. if ref == pattern:
  3555. matching_refs[ref] = sha
  3556. break
  3557. else:
  3558. # Pattern matching from the end of the full name
  3559. # Only complete parts are matched
  3560. # E.g., "master" matches "refs/heads/master" but not "refs/heads/mymaster"
  3561. pattern_parts = pattern.split(b"/")
  3562. ref_parts = ref.split(b"/")
  3563. # Try to match from the end
  3564. if len(pattern_parts) <= len(ref_parts):
  3565. # Check if the end of ref matches the pattern
  3566. matches = True
  3567. for i in range(len(pattern_parts)):
  3568. if (
  3569. ref_parts[-(len(pattern_parts) - i)]
  3570. != pattern_parts[i]
  3571. ):
  3572. matches = False
  3573. break
  3574. if matches:
  3575. matching_refs[ref] = sha
  3576. break
  3577. filtered_refs = matching_refs
  3578. # Sort by ref name
  3579. sorted_refs = sorted(filtered_refs.items(), key=lambda x: x[0])
  3580. # Build result list
  3581. result: list[tuple[bytes, bytes]] = []
  3582. for ref, sha in sorted_refs:
  3583. result.append((sha, ref))
  3584. # Dereference tags if requested
  3585. if dereference and ref.startswith(LOCAL_TAG_PREFIX):
  3586. try:
  3587. obj = r.get_object(sha)
  3588. # Peel tag objects to get the underlying commit/object
  3589. while obj.type_name == b"tag":
  3590. assert isinstance(obj, Tag)
  3591. _obj_class, sha = obj.object
  3592. obj = r.get_object(sha)
  3593. result.append((sha, ref + b"^{}"))
  3594. except KeyError:
  3595. # Object not found, skip dereferencing
  3596. pass
  3597. return result
  3598. def show_branch(
  3599. repo: Repo | str = ".",
  3600. branches: list[str | bytes] | None = None,
  3601. all_branches: bool = False,
  3602. remotes: bool = False,
  3603. current: bool = False,
  3604. topo_order: bool = False,
  3605. more: int | None = None,
  3606. list_branches: bool = False,
  3607. independent_branches: bool = False,
  3608. merge_base: bool = False,
  3609. ) -> list[str]:
  3610. """Display branches and their commits.
  3611. Args:
  3612. repo: Path to the repository
  3613. branches: List of specific branches to show (default: all local branches)
  3614. all_branches: Show both local and remote branches
  3615. remotes: Show only remote branches
  3616. current: Include current branch if not specified
  3617. topo_order: Show in topological order instead of chronological
  3618. more: Show N more commits beyond common ancestor (negative to show only headers)
  3619. list_branches: Synonym for more=-1 (show only branch headers)
  3620. independent_branches: Show only branches not reachable from others
  3621. merge_base: Show merge bases instead of commit list
  3622. Returns:
  3623. List of output lines
  3624. """
  3625. from ..graph import find_octopus_base, independent
  3626. output_lines: list[str] = []
  3627. with open_repo_closing(repo) as r:
  3628. refs = r.get_refs()
  3629. # Determine which branches to show
  3630. branch_refs: dict[Ref, ObjectID] = {}
  3631. if branches:
  3632. # Specific branches requested
  3633. for branch in branches:
  3634. branch_bytes = (
  3635. os.fsencode(branch) if isinstance(branch, str) else branch
  3636. )
  3637. # Try as full ref name first
  3638. branch_ref_check = Ref(branch_bytes)
  3639. if branch_ref_check in refs:
  3640. branch_refs[branch_ref_check] = refs[branch_ref_check]
  3641. else:
  3642. # Try as branch name
  3643. branch_ref = local_branch_name(branch_bytes)
  3644. if branch_ref in refs:
  3645. branch_refs[branch_ref] = refs[branch_ref]
  3646. # Try as remote branch
  3647. else:
  3648. remote_ref = Ref(LOCAL_REMOTE_PREFIX + branch_bytes)
  3649. if remote_ref in refs:
  3650. branch_refs[remote_ref] = refs[remote_ref]
  3651. else:
  3652. # Default behavior: show local branches
  3653. if all_branches:
  3654. # Show both local and remote branches
  3655. branch_refs = filter_ref_prefix(
  3656. refs, [LOCAL_BRANCH_PREFIX, LOCAL_REMOTE_PREFIX]
  3657. )
  3658. elif remotes:
  3659. # Show only remote branches
  3660. branch_refs = filter_ref_prefix(refs, [LOCAL_REMOTE_PREFIX])
  3661. else:
  3662. # Show only local branches
  3663. branch_refs = filter_ref_prefix(refs, [LOCAL_BRANCH_PREFIX])
  3664. # Add current branch if requested and not already included
  3665. if current:
  3666. try:
  3667. head_refs, _ = r.refs.follow(HEADREF)
  3668. if head_refs:
  3669. head_ref = head_refs[0]
  3670. if head_ref not in branch_refs and head_ref in refs:
  3671. branch_refs[head_ref] = refs[head_ref]
  3672. except (KeyError, TypeError):
  3673. # HEAD doesn't point to a branch or doesn't exist
  3674. pass
  3675. if not branch_refs:
  3676. return output_lines
  3677. # Sort branches for consistent output
  3678. sorted_branches = sorted(branch_refs.items(), key=lambda x: x[0])
  3679. branch_sha_list: list[ObjectID] = [sha for _, sha in sorted_branches]
  3680. # Handle --independent flag
  3681. if independent_branches:
  3682. independent_shas = independent(r, branch_sha_list)
  3683. for ref_name, sha in sorted_branches:
  3684. if sha in independent_shas:
  3685. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  3686. output_lines.append(ref_str)
  3687. return output_lines
  3688. # Handle --merge-base flag
  3689. if merge_base:
  3690. if len(branch_sha_list) < 2:
  3691. # Need at least 2 branches for merge base
  3692. return output_lines
  3693. merge_bases = find_octopus_base(r, branch_sha_list)
  3694. for sha in merge_bases:
  3695. output_lines.append(sha.decode("ascii"))
  3696. return output_lines
  3697. # Get current branch for marking
  3698. current_branch: bytes | None = None
  3699. try:
  3700. head_refs, _ = r.refs.follow(HEADREF)
  3701. if head_refs:
  3702. current_branch = head_refs[0]
  3703. except (KeyError, TypeError):
  3704. pass
  3705. # Collect commit information for each branch
  3706. branch_commits: list[tuple[bytes, str]] = [] # (sha, message)
  3707. for ref_name, sha in sorted_branches:
  3708. try:
  3709. commit = r[sha]
  3710. if isinstance(commit, Commit):
  3711. message = commit.message.decode("utf-8", errors="replace").split(
  3712. "\n"
  3713. )[0]
  3714. else:
  3715. message = ""
  3716. branch_commits.append((sha, message))
  3717. except KeyError:
  3718. branch_commits.append((sha, ""))
  3719. # Handle --list flag (show only branch headers)
  3720. if list_branches or (more is not None and more < 0):
  3721. # Just show the branch headers
  3722. for i, (ref_name, sha) in enumerate(sorted_branches):
  3723. is_current = ref_name == current_branch
  3724. marker = "*" if is_current else "!"
  3725. # Create spacing for alignment
  3726. prefix = " " * i + marker + " " * (len(sorted_branches) - i - 1)
  3727. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  3728. _, message = branch_commits[i]
  3729. output_lines.append(f"{prefix}[{ref_str}] {message}")
  3730. return output_lines
  3731. # Build commit history for visualization
  3732. # Collect all commits reachable from any branch
  3733. all_commits: dict[
  3734. bytes, tuple[int, list[bytes], str]
  3735. ] = {} # sha -> (timestamp, parents, message)
  3736. def collect_commits(sha: bytes, branch_idx: int, visited: set[bytes]) -> None:
  3737. """Recursively collect commits."""
  3738. if sha in visited:
  3739. return
  3740. visited.add(sha)
  3741. try:
  3742. commit = r[sha]
  3743. except KeyError:
  3744. # Commit not found, stop traversal
  3745. pass
  3746. else:
  3747. if not isinstance(commit, Commit):
  3748. return
  3749. timestamp = commit.commit_time
  3750. parents = commit.parents if isinstance(commit, Commit) else []
  3751. message = (
  3752. commit.message.decode("utf-8", errors="replace").split("\n")[0]
  3753. if isinstance(commit, Commit)
  3754. else ""
  3755. )
  3756. if sha not in all_commits:
  3757. all_commits[sha] = (timestamp, parents, message)
  3758. # Recurse to parents
  3759. for parent in parents:
  3760. collect_commits(parent, branch_idx, visited)
  3761. # Collect commits from all branches
  3762. for i, (_, sha) in enumerate(sorted_branches):
  3763. collect_commits(sha, i, set())
  3764. # Find common ancestor
  3765. common_ancestor_sha = None
  3766. if len(branch_sha_list) >= 2:
  3767. try:
  3768. merge_bases = find_octopus_base(r, branch_sha_list)
  3769. if merge_bases:
  3770. common_ancestor_sha = merge_bases[0]
  3771. except (KeyError, IndexError):
  3772. pass
  3773. # Sort commits (chronological by default, or topological if requested)
  3774. if topo_order:
  3775. # Topological sort is more complex, for now use chronological
  3776. # TODO: Implement proper topological ordering
  3777. sorted_commits = sorted(all_commits.items(), key=lambda x: -x[1][0])
  3778. else:
  3779. # Reverse chronological order (newest first)
  3780. sorted_commits = sorted(all_commits.items(), key=lambda x: -x[1][0])
  3781. # Determine how many commits to show
  3782. if more is not None:
  3783. # Find index of common ancestor
  3784. if common_ancestor_sha and common_ancestor_sha in all_commits:
  3785. ancestor_idx = next(
  3786. (
  3787. i
  3788. for i, (sha, _) in enumerate(sorted_commits)
  3789. if sha == common_ancestor_sha
  3790. ),
  3791. None,
  3792. )
  3793. if ancestor_idx is not None:
  3794. # Show commits up to ancestor + more
  3795. sorted_commits = sorted_commits[: ancestor_idx + 1 + more]
  3796. # Determine which branches contain which commits
  3797. branch_contains: list[set[bytes]] = []
  3798. for ref_name, sha in sorted_branches:
  3799. reachable = set()
  3800. def mark_reachable(commit_sha: bytes) -> None:
  3801. if commit_sha in reachable:
  3802. return
  3803. reachable.add(commit_sha)
  3804. if commit_sha in all_commits:
  3805. _, parents, _ = all_commits[commit_sha]
  3806. for parent in parents:
  3807. mark_reachable(parent)
  3808. mark_reachable(sha)
  3809. branch_contains.append(reachable)
  3810. # Output branch headers
  3811. for i, (ref_name, sha) in enumerate(sorted_branches):
  3812. is_current = ref_name == current_branch
  3813. marker = "*" if is_current else "!"
  3814. # Create spacing for alignment
  3815. prefix = " " * i + marker + " " * (len(sorted_branches) - i - 1)
  3816. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  3817. _, message = branch_commits[i]
  3818. output_lines.append(f"{prefix}[{ref_str}] {message}")
  3819. # Output separator
  3820. output_lines.append("-" * (len(sorted_branches) + 2))
  3821. # Output commits
  3822. for commit_sha, (_, _, message) in sorted_commits:
  3823. # Build marker string
  3824. markers = []
  3825. for i, (ref_name, branch_sha) in enumerate(sorted_branches):
  3826. if commit_sha == branch_sha:
  3827. # This is the tip of the branch
  3828. markers.append("*")
  3829. elif commit_sha in branch_contains[i]:
  3830. # This commit is in the branch
  3831. markers.append("+")
  3832. else:
  3833. # This commit is not in the branch
  3834. markers.append(" ")
  3835. marker_str = "".join(markers)
  3836. output_lines.append(f"{marker_str} [{message}]")
  3837. # Limit output to 26 branches (git show-branch limitation)
  3838. if len(sorted_branches) > 26:
  3839. break
  3840. return output_lines
  3841. def ls_remote(
  3842. remote: str | bytes,
  3843. config: Config | None = None,
  3844. operation: str | None = None,
  3845. thin_packs: bool = True,
  3846. report_activity: Callable[[int, str], None] | None = None,
  3847. quiet: bool = False,
  3848. include_tags: bool = False,
  3849. username: str | None = None,
  3850. password: str | None = None,
  3851. key_filename: str | None = None,
  3852. ssh_command: str | None = None,
  3853. ) -> LsRemoteResult:
  3854. """List the refs in a remote.
  3855. Args:
  3856. remote: Remote repository location
  3857. config: Configuration to use
  3858. operation: Operation type
  3859. thin_packs: Whether to use thin packs
  3860. report_activity: Function to report activity
  3861. quiet: Whether to suppress output
  3862. include_tags: Whether to include tags
  3863. username: Username for authentication
  3864. password: Password for authentication
  3865. key_filename: SSH key filename
  3866. ssh_command: SSH command to use
  3867. Returns:
  3868. LsRemoteResult object with refs and symrefs
  3869. """
  3870. if config is None:
  3871. config = StackedConfig.default()
  3872. remote_str = remote.decode() if isinstance(remote, bytes) else remote
  3873. client, host_path = get_transport_and_path(
  3874. remote_str,
  3875. config=config,
  3876. operation=operation,
  3877. thin_packs=thin_packs,
  3878. report_activity=report_activity,
  3879. quiet=quiet,
  3880. include_tags=include_tags,
  3881. username=username,
  3882. password=password,
  3883. key_filename=key_filename,
  3884. ssh_command=ssh_command,
  3885. )
  3886. return client.get_refs(
  3887. host_path.encode() if isinstance(host_path, str) else host_path
  3888. )
  3889. def repack(repo: RepoPath, write_bitmaps: bool = False) -> None:
  3890. """Repack loose files in a repository.
  3891. Currently this only packs loose objects.
  3892. Args:
  3893. repo: Path to the repository
  3894. write_bitmaps: Whether to write bitmap indexes for packs
  3895. """
  3896. with open_repo_closing(repo) as r:
  3897. r.object_store.pack_loose_objects()
  3898. if write_bitmaps:
  3899. # Update pack cache to pick up newly created packs
  3900. r.object_store._update_pack_cache()
  3901. r.object_store.generate_pack_bitmaps(r.refs.as_dict())
  3902. def pack_objects(
  3903. repo: RepoPath,
  3904. object_ids: Sequence[ObjectID],
  3905. packf: BinaryIO,
  3906. idxf: BinaryIO | None,
  3907. delta_window_size: int | None = None,
  3908. deltify: bool | None = None,
  3909. reuse_deltas: bool = True,
  3910. pack_index_version: int | None = None,
  3911. ) -> None:
  3912. """Pack objects into a file.
  3913. Args:
  3914. repo: Path to the repository
  3915. object_ids: List of object ids to write
  3916. packf: File-like object to write to
  3917. idxf: File-like object to write to (can be None)
  3918. delta_window_size: Sliding window size for searching for deltas;
  3919. Set to None for default window size.
  3920. deltify: Whether to deltify objects
  3921. reuse_deltas: Allow reuse of existing deltas while deltifying
  3922. pack_index_version: Pack index version to use (1, 2, or 3). If None, uses default version.
  3923. """
  3924. with open_repo_closing(repo) as r:
  3925. entries, data_sum = write_pack_from_container(
  3926. packf.write,
  3927. r.object_store,
  3928. [(oid, None) for oid in object_ids],
  3929. deltify=deltify,
  3930. delta_window_size=delta_window_size,
  3931. reuse_deltas=reuse_deltas,
  3932. object_format=r.object_format,
  3933. )
  3934. if idxf is not None:
  3935. index_entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
  3936. write_pack_index(idxf, index_entries, data_sum, version=pack_index_version)
  3937. def ls_tree(
  3938. repo: RepoPath,
  3939. treeish: str | bytes | Commit | Tree | Tag = b"HEAD",
  3940. outstream: TextIO | BinaryIO = sys.stdout,
  3941. recursive: bool = False,
  3942. name_only: bool = False,
  3943. ) -> None:
  3944. """List contents of a tree.
  3945. Args:
  3946. repo: Path to the repository
  3947. treeish: Tree id to list
  3948. outstream: Output stream (defaults to stdout)
  3949. recursive: Whether to recursively list files
  3950. name_only: Only print item name
  3951. """
  3952. def list_tree(store: BaseObjectStore, treeid: ObjectID, base: bytes) -> None:
  3953. tree = store[treeid]
  3954. assert isinstance(tree, Tree)
  3955. for name, mode, sha in tree.iteritems():
  3956. assert name is not None
  3957. assert mode is not None
  3958. assert sha is not None
  3959. if base:
  3960. name = posixpath.join(base, name)
  3961. if name_only:
  3962. if isinstance(outstream, BinaryIO):
  3963. outstream.write(name + b"\n")
  3964. else:
  3965. outstream.write(name.decode("utf-8", "replace") + "\n")
  3966. else:
  3967. formatted = pretty_format_tree_entry(name, mode, sha)
  3968. if isinstance(outstream, BinaryIO):
  3969. outstream.write(formatted.encode("utf-8"))
  3970. else:
  3971. outstream.write(formatted)
  3972. if stat.S_ISDIR(mode) and recursive:
  3973. list_tree(store, sha, name)
  3974. with open_repo_closing(repo) as r:
  3975. tree = parse_tree(r, treeish)
  3976. list_tree(r.object_store, tree.id, b"")
  3977. def remote_add(
  3978. repo: RepoPath,
  3979. name: bytes | str,
  3980. url: bytes | str,
  3981. ) -> None:
  3982. """Add a remote.
  3983. Args:
  3984. repo: Path to the repository
  3985. name: Remote name
  3986. url: Remote URL
  3987. """
  3988. if not isinstance(name, bytes):
  3989. name = name.encode(DEFAULT_ENCODING)
  3990. if not isinstance(url, bytes):
  3991. url = url.encode(DEFAULT_ENCODING)
  3992. with open_repo_closing(repo) as r:
  3993. c = r.get_config()
  3994. section = (b"remote", name)
  3995. if c.has_section(section):
  3996. raise RemoteExists(f"Remote {name.decode()} already exists")
  3997. c.set(section, b"url", url)
  3998. c.write_to_path()
  3999. def remote_remove(repo: Repo, name: bytes | str) -> None:
  4000. """Remove a remote.
  4001. Args:
  4002. repo: Path to the repository
  4003. name: Remote name
  4004. """
  4005. if not isinstance(name, bytes):
  4006. name = name.encode(DEFAULT_ENCODING)
  4007. with open_repo_closing(repo) as r:
  4008. c = r.get_config()
  4009. section = (b"remote", name)
  4010. del c[section]
  4011. c.write_to_path()
  4012. def _quote_path(path: str) -> str:
  4013. """Quote a path using C-style quoting similar to git's core.quotePath.
  4014. Args:
  4015. path: Path to quote
  4016. Returns:
  4017. Quoted path string
  4018. """
  4019. # Check if path needs quoting (non-ASCII or special characters)
  4020. needs_quoting = False
  4021. for char in path:
  4022. if ord(char) > 127 or char in '"\\':
  4023. needs_quoting = True
  4024. break
  4025. if not needs_quoting:
  4026. return path
  4027. # Apply C-style quoting
  4028. quoted = '"'
  4029. for char in path:
  4030. if ord(char) > 127:
  4031. # Non-ASCII character, encode as octal escape
  4032. utf8_bytes = char.encode("utf-8")
  4033. for byte in utf8_bytes:
  4034. quoted += f"\\{byte:03o}"
  4035. elif char == '"':
  4036. quoted += '\\"'
  4037. elif char == "\\":
  4038. quoted += "\\\\"
  4039. else:
  4040. quoted += char
  4041. quoted += '"'
  4042. return quoted
  4043. def check_ignore(
  4044. repo: RepoPath,
  4045. paths: Sequence[str | bytes | os.PathLike[str]],
  4046. no_index: bool = False,
  4047. quote_path: bool = True,
  4048. ) -> Iterator[str]:
  4049. r"""Debug gitignore files.
  4050. Args:
  4051. repo: Path to the repository
  4052. paths: List of paths to check for
  4053. no_index: Don't check index
  4054. quote_path: If True, quote non-ASCII characters in returned paths using
  4055. C-style octal escapes (e.g. "тест.txt" becomes "\\321\\202\\320\\265\\321\\201\\321\\202.txt").
  4056. If False, return raw unicode paths.
  4057. Returns: List of ignored files
  4058. """
  4059. with open_repo_closing(repo) as r:
  4060. index = r.open_index()
  4061. ignore_manager = IgnoreFilterManager.from_repo(r)
  4062. for original_path in paths:
  4063. # Convert path to string for consistent handling
  4064. original_path_fspath = os.fspath(original_path)
  4065. # Normalize to str
  4066. original_path_str = os.fsdecode(original_path_fspath)
  4067. if not no_index and path_to_tree_path(r.path, original_path_str) in index:
  4068. continue
  4069. # Preserve whether the original path had a trailing slash
  4070. had_trailing_slash = original_path_str.endswith(("/", os.path.sep))
  4071. if os.path.isabs(original_path_str):
  4072. path = os.path.relpath(original_path_str, r.path)
  4073. # Normalize Windows paths to use forward slashes
  4074. if os.path.sep != "/":
  4075. path = path.replace(os.path.sep, "/")
  4076. else:
  4077. path = original_path_str
  4078. # Restore trailing slash if it was in the original
  4079. if had_trailing_slash and not path.endswith("/"):
  4080. path = path + "/"
  4081. # For directories, check with trailing slash to get correct ignore behavior
  4082. test_path = path
  4083. path_without_slash = path.rstrip("/")
  4084. is_directory = os.path.isdir(os.path.join(r.path, path_without_slash))
  4085. # If this is a directory path, ensure we test it correctly
  4086. if is_directory and not path.endswith("/"):
  4087. test_path = path + "/"
  4088. if ignore_manager.is_ignored(test_path):
  4089. # Return relative path (like git does) when absolute path was provided
  4090. if os.path.isabs(original_path):
  4091. output_path = path
  4092. else:
  4093. output_path = original_path # type: ignore[assignment]
  4094. yield _quote_path(output_path) if quote_path else output_path
  4095. def _get_current_head_tree(repo: Repo) -> ObjectID | None:
  4096. """Get the current HEAD tree ID.
  4097. Args:
  4098. repo: Repository object
  4099. Returns:
  4100. Tree ID of current HEAD, or None if no HEAD exists (empty repo)
  4101. """
  4102. try:
  4103. current_head = repo.refs[HEADREF]
  4104. current_commit = repo[current_head]
  4105. assert isinstance(current_commit, Commit), "Expected a Commit object"
  4106. tree_id: ObjectID = current_commit.tree
  4107. return tree_id
  4108. except KeyError:
  4109. # No HEAD yet (empty repo)
  4110. return None
  4111. def _check_uncommitted_changes(
  4112. repo: Repo, target_tree_id: ObjectID, force: bool = False
  4113. ) -> None:
  4114. """Check for uncommitted changes that would conflict with a checkout/switch.
  4115. Args:
  4116. repo: Repository object
  4117. target_tree_id: Tree ID to check conflicts against
  4118. force: If True, skip the check
  4119. Raises:
  4120. CheckoutError: If there are conflicting local changes
  4121. """
  4122. if force:
  4123. return
  4124. # Get current HEAD tree for comparison
  4125. current_tree_id = _get_current_head_tree(repo)
  4126. if current_tree_id is None:
  4127. # No HEAD yet (empty repo)
  4128. return
  4129. status_report = status(repo)
  4130. changes = []
  4131. # staged is a dict with 'add', 'delete', 'modify' keys
  4132. if isinstance(status_report.staged, dict):
  4133. changes.extend(status_report.staged.get("add", []))
  4134. changes.extend(status_report.staged.get("delete", []))
  4135. changes.extend(status_report.staged.get("modify", []))
  4136. # unstaged is a list
  4137. changes.extend(status_report.unstaged)
  4138. if changes:
  4139. # Check if any changes would conflict with checkout
  4140. target_tree_obj = repo[target_tree_id]
  4141. assert isinstance(target_tree_obj, Tree), "Expected a Tree object"
  4142. target_tree = target_tree_obj
  4143. for change in changes:
  4144. if isinstance(change, str):
  4145. change = change.encode(DEFAULT_ENCODING)
  4146. try:
  4147. target_tree.lookup_path(repo.object_store.__getitem__, change)
  4148. except KeyError:
  4149. # File doesn't exist in target tree - change can be preserved
  4150. pass
  4151. else:
  4152. # File exists in target tree - would overwrite local changes
  4153. raise CheckoutError(
  4154. f"Your local changes to '{change.decode()}' would be "
  4155. "overwritten. Please commit or stash before switching."
  4156. )
  4157. def _get_worktree_update_config(
  4158. repo: Repo,
  4159. ) -> tuple[
  4160. bool,
  4161. Callable[[bytes], bool],
  4162. Callable[[str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None],
  4163. ]:
  4164. """Get configuration for working tree updates.
  4165. Args:
  4166. repo: Repository object
  4167. Returns:
  4168. Tuple of (honor_filemode, validate_path_element, symlink_fn)
  4169. """
  4170. config = repo.get_config()
  4171. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  4172. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  4173. validate_path_element = validate_path_element_ntfs
  4174. elif config.get_boolean(b"core", b"core.protectHFS", sys.platform == "darwin"):
  4175. validate_path_element = validate_path_element_hfs
  4176. else:
  4177. validate_path_element = validate_path_element_default
  4178. if config.get_boolean(b"core", b"symlinks", True):
  4179. def symlink_wrapper(
  4180. source: str | bytes | os.PathLike[str],
  4181. target: str | bytes | os.PathLike[str],
  4182. ) -> None:
  4183. symlink(source, target) # type: ignore[arg-type,unused-ignore]
  4184. symlink_fn = symlink_wrapper
  4185. else:
  4186. def symlink_fallback(
  4187. source: str | bytes | os.PathLike[str],
  4188. target: str | bytes | os.PathLike[str],
  4189. ) -> None:
  4190. mode = "w" + ("b" if isinstance(source, bytes) else "")
  4191. with open(target, mode) as f:
  4192. f.write(source)
  4193. symlink_fn = symlink_fallback
  4194. return honor_filemode, validate_path_element, symlink_fn
  4195. def _perform_tree_switch(
  4196. repo: Repo,
  4197. current_tree_id: ObjectID | None,
  4198. target_tree_id: ObjectID,
  4199. force: bool = False,
  4200. ) -> None:
  4201. """Perform the actual working tree switch.
  4202. Args:
  4203. repo: Repository object
  4204. current_tree_id: Current tree ID (or None for empty repo)
  4205. target_tree_id: Target tree ID to switch to
  4206. force: If True, force removal of untracked files and allow overwriting modified files
  4207. """
  4208. honor_filemode, validate_path_element, symlink_fn = _get_worktree_update_config(
  4209. repo
  4210. )
  4211. # Get blob normalizer for line ending conversion
  4212. blob_normalizer = repo.get_blob_normalizer()
  4213. # Update working tree
  4214. tree_change_iterator: Iterator[TreeChange] = tree_changes(
  4215. repo.object_store, current_tree_id, target_tree_id
  4216. )
  4217. update_working_tree(
  4218. repo,
  4219. current_tree_id,
  4220. target_tree_id,
  4221. change_iterator=tree_change_iterator,
  4222. honor_filemode=honor_filemode,
  4223. validate_path_element=validate_path_element,
  4224. symlink_fn=symlink_fn,
  4225. force_remove_untracked=force,
  4226. blob_normalizer=blob_normalizer,
  4227. allow_overwrite_modified=force,
  4228. )
  4229. def update_head(
  4230. repo: RepoPath,
  4231. target: str | bytes,
  4232. detached: bool = False,
  4233. new_branch: str | bytes | None = None,
  4234. ) -> None:
  4235. """Update HEAD to point at a new branch/commit.
  4236. Note that this does not actually update the working tree.
  4237. Args:
  4238. repo: Path to the repository
  4239. detached: Create a detached head
  4240. target: Branch or committish to switch to
  4241. new_branch: New branch to create
  4242. """
  4243. with open_repo_closing(repo) as r:
  4244. if new_branch is not None:
  4245. to_set = _make_branch_ref(new_branch)
  4246. else:
  4247. to_set = HEADREF
  4248. if detached:
  4249. # TODO(jelmer): Provide some way so that the actual ref gets
  4250. # updated rather than what it points to, so the delete isn't
  4251. # necessary.
  4252. del r.refs[to_set]
  4253. r.refs[to_set] = parse_commit(r, target).id
  4254. else:
  4255. r.refs.set_symbolic_ref(to_set, parse_ref(r, target))
  4256. if new_branch is not None:
  4257. r.refs.set_symbolic_ref(HEADREF, to_set)
  4258. def checkout(
  4259. repo: str | os.PathLike[str] | Repo,
  4260. target: str | bytes | Commit | Tag | None = None,
  4261. force: bool = False,
  4262. new_branch: bytes | str | None = None,
  4263. paths: list[bytes | str] | None = None,
  4264. ) -> None:
  4265. """Switch to a branch or commit, updating both HEAD and the working tree.
  4266. This is similar to 'git checkout', allowing you to switch to a branch,
  4267. tag, or specific commit. Unlike update_head, this function also updates
  4268. the working tree to match the target.
  4269. Args:
  4270. repo: Path to repository or repository object
  4271. target: Branch name, tag, or commit SHA to checkout. If None and paths is specified,
  4272. restores files from HEAD
  4273. force: Force checkout even if there are local changes
  4274. new_branch: Create a new branch at target (like git checkout -b)
  4275. paths: List of specific paths to checkout. If specified, only these paths are updated
  4276. and HEAD is not changed
  4277. Raises:
  4278. CheckoutError: If checkout cannot be performed due to conflicts
  4279. KeyError: If the target reference cannot be found
  4280. """
  4281. with open_repo_closing(repo) as r:
  4282. # Store the original target for later reference checks
  4283. original_target = target
  4284. worktree = r.get_worktree()
  4285. # Handle path-specific checkout (like git checkout -- <paths>)
  4286. if paths is not None:
  4287. # Convert paths to bytes
  4288. byte_paths = []
  4289. for path in paths:
  4290. if isinstance(path, str):
  4291. byte_paths.append(path.encode(DEFAULT_ENCODING))
  4292. else:
  4293. byte_paths.append(path)
  4294. # If no target specified, use HEAD
  4295. if target is None:
  4296. try:
  4297. target = r.refs[HEADREF]
  4298. except KeyError:
  4299. raise CheckoutError("No HEAD reference found")
  4300. else:
  4301. if isinstance(target, str):
  4302. target = target.encode(DEFAULT_ENCODING)
  4303. # Get the target commit and tree
  4304. target_tree = parse_tree(r, target)
  4305. # Get blob normalizer for line ending conversion
  4306. blob_normalizer = r.get_blob_normalizer()
  4307. # Restore specified paths from target tree
  4308. for path in byte_paths:
  4309. try:
  4310. # Look up the path in the target tree
  4311. mode, sha = target_tree.lookup_path(
  4312. r.object_store.__getitem__, path
  4313. )
  4314. obj = r[sha]
  4315. assert isinstance(obj, Blob), "Expected a Blob object"
  4316. except KeyError:
  4317. # Path doesn't exist in target tree
  4318. pass
  4319. else:
  4320. # Create directories if needed
  4321. # Handle path as string
  4322. if isinstance(path, bytes):
  4323. path_str = path.decode(DEFAULT_ENCODING)
  4324. else:
  4325. path_str = path
  4326. file_path = os.path.join(r.path, path_str)
  4327. os.makedirs(os.path.dirname(file_path), exist_ok=True)
  4328. # Write the file content
  4329. if stat.S_ISREG(mode):
  4330. # Apply checkout filters (smudge)
  4331. if blob_normalizer:
  4332. obj = blob_normalizer.checkout_normalize(obj, path)
  4333. flags = os.O_WRONLY | os.O_CREAT | os.O_TRUNC
  4334. if sys.platform == "win32":
  4335. flags |= os.O_BINARY
  4336. with os.fdopen(os.open(file_path, flags, mode), "wb") as f:
  4337. f.write(obj.data)
  4338. # Update the index
  4339. worktree.stage(path)
  4340. return
  4341. # Normal checkout (switching branches/commits)
  4342. if target is None:
  4343. raise ValueError("Target must be specified for branch/commit checkout")
  4344. if isinstance(target, str):
  4345. target_bytes = target.encode(DEFAULT_ENCODING)
  4346. elif isinstance(target, bytes):
  4347. target_bytes = target
  4348. else:
  4349. # For Commit/Tag objects, we'll use their SHA
  4350. target_bytes = target.id
  4351. if isinstance(new_branch, str):
  4352. new_branch = new_branch.encode(DEFAULT_ENCODING)
  4353. # Parse the target to get the commit
  4354. assert (
  4355. original_target is not None
  4356. ) # Guaranteed by earlier check for normal checkout
  4357. target_commit = parse_commit(r, original_target)
  4358. target_tree_id = target_commit.tree
  4359. # Get current HEAD tree for comparison
  4360. current_tree_id = _get_current_head_tree(r)
  4361. # Check for uncommitted changes if not forcing
  4362. if current_tree_id is not None:
  4363. _check_uncommitted_changes(r, target_tree_id, force)
  4364. # Update working tree
  4365. _perform_tree_switch(r, current_tree_id, target_tree_id, force)
  4366. # Update HEAD
  4367. if new_branch:
  4368. # Create new branch and switch to it
  4369. branch_create(r, new_branch, objectish=target_commit.id.decode("ascii"))
  4370. update_head(r, new_branch)
  4371. # Set up tracking if creating from a remote branch
  4372. if isinstance(original_target, bytes) and target_bytes.startswith(
  4373. LOCAL_REMOTE_PREFIX
  4374. ):
  4375. try:
  4376. remote_name, branch_name = parse_remote_ref(target_bytes)
  4377. # Set tracking to refs/heads/<branch> on the remote
  4378. set_branch_tracking(
  4379. r, new_branch, remote_name, local_branch_name(branch_name)
  4380. )
  4381. except ValueError:
  4382. # Invalid remote ref format, skip tracking setup
  4383. pass
  4384. else:
  4385. # Check if target is a branch name (with or without refs/heads/ prefix)
  4386. branch_ref = None
  4387. if (
  4388. isinstance(original_target, (str, bytes))
  4389. and target_bytes in r.refs.keys()
  4390. ):
  4391. if target_bytes.startswith(LOCAL_BRANCH_PREFIX):
  4392. branch_ref = target_bytes
  4393. else:
  4394. # Try adding refs/heads/ prefix
  4395. potential_branch = (
  4396. _make_branch_ref(target_bytes)
  4397. if isinstance(original_target, (str, bytes))
  4398. else None
  4399. )
  4400. if potential_branch in r.refs.keys():
  4401. branch_ref = potential_branch
  4402. if branch_ref:
  4403. # It's a branch - update HEAD symbolically
  4404. update_head(r, branch_ref)
  4405. else:
  4406. # It's a tag, other ref, or commit SHA - detached HEAD
  4407. update_head(r, target_commit.id.decode("ascii"), detached=True)
  4408. def restore(
  4409. repo: str | os.PathLike[str] | Repo,
  4410. paths: list[bytes | str],
  4411. source: str | bytes | Commit | Tag | None = None,
  4412. staged: bool = False,
  4413. worktree: bool = True,
  4414. ) -> None:
  4415. """Restore working tree files.
  4416. This is similar to 'git restore', allowing you to restore specific files
  4417. from a commit or the index without changing HEAD.
  4418. Args:
  4419. repo: Path to repository or repository object
  4420. paths: List of specific paths to restore
  4421. source: Branch name, tag, or commit SHA to restore from. If None, restores
  4422. staged files from HEAD, or worktree files from index
  4423. staged: Restore files in the index (--staged)
  4424. worktree: Restore files in the working tree (default: True)
  4425. Raises:
  4426. CheckoutError: If restore cannot be performed
  4427. ValueError: If neither staged nor worktree is specified
  4428. KeyError: If the source reference cannot be found
  4429. """
  4430. if not staged and not worktree:
  4431. raise ValueError("At least one of staged or worktree must be True")
  4432. with open_repo_closing(repo) as r:
  4433. from ..index import _fs_to_tree_path, build_file_from_blob
  4434. # Determine the source tree
  4435. if source is None:
  4436. if staged:
  4437. # Restoring staged files from HEAD
  4438. try:
  4439. source = r.refs[HEADREF]
  4440. except KeyError:
  4441. raise CheckoutError("No HEAD reference found")
  4442. elif worktree:
  4443. # Restoring worktree files from index
  4444. from ..index import ConflictedIndexEntry, IndexEntry
  4445. index = r.open_index()
  4446. for path in paths:
  4447. if isinstance(path, str):
  4448. tree_path = _fs_to_tree_path(path)
  4449. else:
  4450. tree_path = path
  4451. try:
  4452. index_entry = index[tree_path]
  4453. if isinstance(index_entry, ConflictedIndexEntry):
  4454. raise CheckoutError(
  4455. f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' has conflicts"
  4456. )
  4457. blob = r[index_entry.sha]
  4458. assert isinstance(blob, Blob), "Expected a Blob object"
  4459. full_path = os.path.join(os.fsencode(r.path), tree_path)
  4460. mode = index_entry.mode
  4461. # Use build_file_from_blob to write the file
  4462. build_file_from_blob(blob, mode, full_path)
  4463. except KeyError:
  4464. # Path doesn't exist in index
  4465. raise CheckoutError(
  4466. f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' not in index"
  4467. )
  4468. return
  4469. # source is not None at this point
  4470. assert source is not None
  4471. # Get the source tree
  4472. source_tree = parse_tree(r, treeish=source)
  4473. # Restore specified paths from source tree
  4474. for path in paths:
  4475. if isinstance(path, str):
  4476. tree_path = _fs_to_tree_path(path)
  4477. else:
  4478. tree_path = path
  4479. try:
  4480. # Look up the path in the source tree
  4481. mode, sha = source_tree.lookup_path(
  4482. r.object_store.__getitem__, tree_path
  4483. )
  4484. blob = r[sha]
  4485. assert isinstance(blob, Blob), "Expected a Blob object"
  4486. except KeyError:
  4487. # Path doesn't exist in source tree
  4488. raise CheckoutError(
  4489. f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' not found in source"
  4490. )
  4491. full_path = os.path.join(os.fsencode(r.path), tree_path)
  4492. if worktree:
  4493. # Use build_file_from_blob to restore to working tree
  4494. build_file_from_blob(blob, mode, full_path)
  4495. if staged:
  4496. # Update the index with the blob from source
  4497. from ..index import IndexEntry
  4498. index = r.open_index()
  4499. # When only updating staged (not worktree), we want to reset the index
  4500. # to the source, but invalidate the stat cache so Git knows to check
  4501. # the worktree file. Use zeros for stat fields.
  4502. if not worktree:
  4503. # Invalidate stat cache by using zeros
  4504. new_entry = IndexEntry(
  4505. ctime=(0, 0),
  4506. mtime=(0, 0),
  4507. dev=0,
  4508. ino=0,
  4509. mode=mode,
  4510. uid=0,
  4511. gid=0,
  4512. size=0,
  4513. sha=sha,
  4514. )
  4515. else:
  4516. # If we also updated worktree, use actual stat
  4517. from ..index import index_entry_from_stat
  4518. st = os.lstat(full_path)
  4519. new_entry = index_entry_from_stat(st, sha, mode)
  4520. index[tree_path] = new_entry
  4521. index.write()
  4522. def switch(
  4523. repo: str | os.PathLike[str] | Repo,
  4524. target: str | bytes | Commit | Tag,
  4525. create: str | bytes | None = None,
  4526. force: bool = False,
  4527. detach: bool = False,
  4528. ) -> None:
  4529. """Switch branches.
  4530. This is similar to 'git switch', allowing you to switch to a different
  4531. branch or commit, updating both HEAD and the working tree.
  4532. Args:
  4533. repo: Path to repository or repository object
  4534. target: Branch name, tag, or commit SHA to switch to
  4535. create: Create a new branch at target before switching (like git switch -c)
  4536. force: Force switch even if there are local changes
  4537. detach: Switch to a commit in detached HEAD state (like git switch --detach)
  4538. Raises:
  4539. CheckoutError: If switch cannot be performed due to conflicts
  4540. KeyError: If the target reference cannot be found
  4541. ValueError: If both create and detach are specified
  4542. """
  4543. if create and detach:
  4544. raise ValueError("Cannot use both create and detach options")
  4545. with open_repo_closing(repo) as r:
  4546. # Store the original target for later reference checks
  4547. original_target = target
  4548. if isinstance(target, str):
  4549. target_bytes = target.encode(DEFAULT_ENCODING)
  4550. elif isinstance(target, bytes):
  4551. target_bytes = target
  4552. else:
  4553. # For Commit/Tag objects, we'll use their SHA
  4554. target_bytes = target.id
  4555. if isinstance(create, str):
  4556. create = create.encode(DEFAULT_ENCODING)
  4557. # Parse the target to get the commit
  4558. target_commit = parse_commit(r, original_target)
  4559. target_tree_id = target_commit.tree
  4560. # Get current HEAD tree for comparison
  4561. current_tree_id = _get_current_head_tree(r)
  4562. # Check for uncommitted changes if not forcing
  4563. if current_tree_id is not None:
  4564. _check_uncommitted_changes(r, target_tree_id, force)
  4565. # Update working tree
  4566. _perform_tree_switch(r, current_tree_id, target_tree_id, force)
  4567. # Update HEAD
  4568. if create:
  4569. # Create new branch and switch to it
  4570. branch_create(r, create, objectish=target_commit.id.decode("ascii"))
  4571. update_head(r, create)
  4572. # Set up tracking if creating from a remote branch
  4573. if isinstance(original_target, bytes) and target_bytes.startswith(
  4574. LOCAL_REMOTE_PREFIX
  4575. ):
  4576. try:
  4577. remote_name, branch_name = parse_remote_ref(target_bytes)
  4578. # Set tracking to refs/heads/<branch> on the remote
  4579. set_branch_tracking(
  4580. r, create, remote_name, local_branch_name(branch_name)
  4581. )
  4582. except ValueError:
  4583. # Invalid remote ref format, skip tracking setup
  4584. pass
  4585. elif detach:
  4586. # Detached HEAD mode
  4587. update_head(r, target_commit.id.decode("ascii"), detached=True)
  4588. else:
  4589. # Check if target is a branch name (with or without refs/heads/ prefix)
  4590. branch_ref = None
  4591. if (
  4592. isinstance(original_target, (str, bytes))
  4593. and target_bytes in r.refs.keys()
  4594. ):
  4595. if target_bytes.startswith(LOCAL_BRANCH_PREFIX):
  4596. branch_ref = target_bytes
  4597. else:
  4598. # Try adding refs/heads/ prefix
  4599. potential_branch = (
  4600. _make_branch_ref(target_bytes)
  4601. if isinstance(original_target, (str, bytes))
  4602. else None
  4603. )
  4604. if potential_branch in r.refs.keys():
  4605. branch_ref = potential_branch
  4606. if branch_ref:
  4607. # It's a branch - update HEAD symbolically
  4608. update_head(r, branch_ref)
  4609. else:
  4610. # It's a tag, other ref, or commit SHA
  4611. # In git switch, this would be an error unless --detach is used
  4612. raise CheckoutError(
  4613. f"'{target_bytes.decode(DEFAULT_ENCODING)}' is not a branch. "
  4614. "Use detach=True to switch to a commit in detached HEAD state."
  4615. )
  4616. def reset_file(
  4617. repo: Repo,
  4618. file_path: str,
  4619. target: str | bytes | Commit | Tree | Tag = b"HEAD",
  4620. symlink_fn: Callable[
  4621. [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
  4622. ]
  4623. | None = None,
  4624. ) -> None:
  4625. """Reset the file to specific commit or branch.
  4626. Args:
  4627. repo: dulwich Repo object
  4628. file_path: file to reset, relative to the repository path
  4629. target: branch or commit or b'HEAD' to reset
  4630. symlink_fn: Function to use for creating symlinks
  4631. """
  4632. tree = parse_tree(repo, treeish=target)
  4633. tree_path = _fs_to_tree_path(file_path)
  4634. file_entry = tree.lookup_path(repo.object_store.__getitem__, tree_path)
  4635. full_path = os.path.join(os.fsencode(repo.path), tree_path)
  4636. blob = repo.object_store[file_entry[1]]
  4637. assert isinstance(blob, Blob)
  4638. mode = file_entry[0]
  4639. build_file_from_blob(blob, mode, full_path, symlink_fn=symlink_fn)
  4640. @replace_me(since="0.22.9", remove_in="0.24.0")
  4641. def checkout_branch(
  4642. repo: str | os.PathLike[str] | Repo,
  4643. target: bytes | str,
  4644. force: bool = False,
  4645. ) -> None:
  4646. """Switch branches or restore working tree files.
  4647. This is now a wrapper around the general checkout() function.
  4648. Preserved for backward compatibility.
  4649. Args:
  4650. repo: dulwich Repo object
  4651. target: branch name or commit sha to checkout
  4652. force: true or not to force checkout
  4653. """
  4654. # Simply delegate to the new checkout function
  4655. return checkout(repo, target, force=force)
  4656. def sparse_checkout(
  4657. repo: str | os.PathLike[str] | Repo,
  4658. patterns: list[str] | None = None,
  4659. force: bool = False,
  4660. cone: bool | None = None,
  4661. ) -> None:
  4662. """Perform a sparse checkout in the repository (either 'full' or 'cone mode').
  4663. Perform sparse checkout in either 'cone' (directory-based) mode or
  4664. 'full pattern' (.gitignore) mode, depending on the ``cone`` parameter.
  4665. If ``cone`` is ``None``, the mode is inferred from the repository's
  4666. ``core.sparseCheckoutCone`` config setting.
  4667. Steps:
  4668. 1) If ``patterns`` is provided, write them to ``.git/info/sparse-checkout``.
  4669. 2) Determine which paths in the index are included vs. excluded.
  4670. - If ``cone=True``, use "cone-compatible" directory-based logic.
  4671. - If ``cone=False``, use standard .gitignore-style matching.
  4672. 3) Update the index's skip-worktree bits and add/remove files in
  4673. the working tree accordingly.
  4674. 4) If ``force=False``, refuse to remove files that have local modifications.
  4675. Args:
  4676. repo: Path to the repository or a Repo object.
  4677. patterns: Optional list of sparse-checkout patterns to write.
  4678. force: Whether to force removal of locally modified files (default False).
  4679. cone: Boolean indicating cone mode (True/False). If None, read from config.
  4680. Returns:
  4681. None
  4682. """
  4683. with open_repo_closing(repo) as repo_obj:
  4684. # --- 0) Possibly infer 'cone' from config ---
  4685. if cone is None:
  4686. cone = repo_obj.get_worktree().infer_cone_mode()
  4687. # --- 1) Read or write patterns ---
  4688. if patterns is None:
  4689. lines = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4690. if lines is None:
  4691. raise Error("No sparse checkout patterns found.")
  4692. else:
  4693. lines = patterns
  4694. repo_obj.get_worktree().set_sparse_checkout_patterns(patterns)
  4695. # --- 2) Determine the set of included paths ---
  4696. index = repo_obj.open_index()
  4697. included_paths = determine_included_paths(index, lines, cone)
  4698. # --- 3) Apply those results to the index & working tree ---
  4699. try:
  4700. apply_included_paths(repo_obj, included_paths, force=force)
  4701. except SparseCheckoutConflictError as exc:
  4702. raise CheckoutError(*exc.args) from exc
  4703. def cone_mode_init(repo: str | os.PathLike[str] | Repo) -> None:
  4704. """Initialize a repository to use sparse checkout in 'cone' mode.
  4705. Sets ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` in the config.
  4706. Writes an initial ``.git/info/sparse-checkout`` file that includes only
  4707. top-level files (and excludes all subdirectories), e.g. ``["/*", "!/*/"]``.
  4708. Then performs a sparse checkout to update the working tree accordingly.
  4709. If no directories are specified, then only top-level files are included:
  4710. https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling
  4711. Args:
  4712. repo: Path to the repository or a Repo object.
  4713. Returns:
  4714. None
  4715. """
  4716. with open_repo_closing(repo) as repo_obj:
  4717. repo_obj.get_worktree().configure_for_cone_mode()
  4718. patterns = ["/*", "!/*/"] # root-level files only
  4719. sparse_checkout(repo_obj, patterns, force=True, cone=True)
  4720. def cone_mode_set(
  4721. repo: str | os.PathLike[str] | Repo, dirs: Sequence[str], force: bool = False
  4722. ) -> None:
  4723. """Overwrite the existing 'cone-mode' sparse patterns with a new set of directories.
  4724. Ensures ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` are enabled.
  4725. Writes new patterns so that only the specified directories (and top-level files)
  4726. remain in the working tree, and applies the sparse checkout update.
  4727. Args:
  4728. repo: Path to the repository or a Repo object.
  4729. dirs: List of directory names to include.
  4730. force: Whether to forcibly discard local modifications (default False).
  4731. Returns:
  4732. None
  4733. """
  4734. with open_repo_closing(repo) as repo_obj:
  4735. repo_obj.get_worktree().configure_for_cone_mode()
  4736. repo_obj.get_worktree().set_cone_mode_patterns(dirs=dirs)
  4737. new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4738. # Finally, apply the patterns and update the working tree
  4739. sparse_checkout(repo_obj, new_patterns, force=force, cone=True)
  4740. def cone_mode_add(
  4741. repo: str | os.PathLike[str] | Repo, dirs: Sequence[str], force: bool = False
  4742. ) -> None:
  4743. """Add new directories to the existing 'cone-mode' sparse-checkout patterns.
  4744. Reads the current patterns from ``.git/info/sparse-checkout``, adds pattern
  4745. lines to include the specified directories, and then performs a sparse
  4746. checkout to update the working tree accordingly.
  4747. Args:
  4748. repo: Path to the repository or a Repo object.
  4749. dirs: List of directory names to add to the sparse-checkout.
  4750. force: Whether to forcibly discard local modifications (default False).
  4751. Returns:
  4752. None
  4753. """
  4754. with open_repo_closing(repo) as repo_obj:
  4755. repo_obj.get_worktree().configure_for_cone_mode()
  4756. # Do not pass base patterns as dirs
  4757. base_patterns = ["/*", "!/*/"]
  4758. existing_dirs = [
  4759. pat.strip("/")
  4760. for pat in repo_obj.get_worktree().get_sparse_checkout_patterns()
  4761. if pat not in base_patterns
  4762. ]
  4763. added_dirs = existing_dirs + list(dirs or [])
  4764. repo_obj.get_worktree().set_cone_mode_patterns(dirs=added_dirs)
  4765. new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4766. sparse_checkout(repo_obj, patterns=new_patterns, force=force, cone=True)
  4767. def check_mailmap(repo: RepoPath, contact: str | bytes) -> bytes:
  4768. """Check canonical name and email of contact.
  4769. Args:
  4770. repo: Path to the repository
  4771. contact: Contact name and/or email
  4772. Returns: Canonical contact data
  4773. """
  4774. with open_repo_closing(repo) as r:
  4775. from ..mailmap import Mailmap
  4776. try:
  4777. mailmap = Mailmap.from_path(os.path.join(r.path, ".mailmap"))
  4778. except FileNotFoundError:
  4779. mailmap = Mailmap()
  4780. contact_bytes = (
  4781. contact.encode(DEFAULT_ENCODING) if isinstance(contact, str) else contact
  4782. )
  4783. result = mailmap.lookup(contact_bytes)
  4784. if isinstance(result, bytes):
  4785. return result
  4786. else:
  4787. # Convert tuple back to bytes format
  4788. name, email = result
  4789. if name is None:
  4790. name = b""
  4791. if email is None:
  4792. email = b""
  4793. return name + b" <" + email + b">"
  4794. def fsck(repo: RepoPath) -> Iterator[tuple[bytes, Exception]]:
  4795. """Check a repository.
  4796. Args:
  4797. repo: A path to the repository
  4798. Returns: Iterator over errors/warnings
  4799. """
  4800. with open_repo_closing(repo) as r:
  4801. # TODO(jelmer): check pack files
  4802. # TODO(jelmer): check graph
  4803. # TODO(jelmer): check refs
  4804. for sha in r.object_store:
  4805. o = r.object_store[sha]
  4806. try:
  4807. o.check()
  4808. except Exception as e:
  4809. yield (sha, e)
  4810. def stash_list(
  4811. repo: str | os.PathLike[str] | Repo,
  4812. ) -> Iterator[tuple[int, tuple[bytes, bytes]]]:
  4813. """List all stashes in a repository."""
  4814. with open_repo_closing(repo) as r:
  4815. from ..stash import Stash
  4816. stash = Stash.from_repo(r)
  4817. entries = stash.stashes()
  4818. # Convert Entry objects to (old_sha, new_sha) tuples
  4819. return enumerate([(entry.old_sha, entry.new_sha) for entry in entries])
  4820. def stash_push(repo: str | os.PathLike[str] | Repo) -> None:
  4821. """Push a new stash onto the stack."""
  4822. with open_repo_closing(repo) as r:
  4823. from ..stash import Stash
  4824. stash = Stash.from_repo(r)
  4825. stash.push()
  4826. def stash_pop(repo: str | os.PathLike[str] | Repo) -> None:
  4827. """Pop a stash from the stack."""
  4828. with open_repo_closing(repo) as r:
  4829. from ..stash import Stash
  4830. stash = Stash.from_repo(r)
  4831. stash.pop(0)
  4832. def stash_drop(repo: str | os.PathLike[str] | Repo, index: int) -> None:
  4833. """Drop a stash from the stack."""
  4834. with open_repo_closing(repo) as r:
  4835. from ..stash import Stash
  4836. stash = Stash.from_repo(r)
  4837. stash.drop(index)
  4838. def ls_files(repo: RepoPath) -> list[bytes]:
  4839. """List all files in an index."""
  4840. with open_repo_closing(repo) as r:
  4841. return sorted(r.open_index())
  4842. def find_unique_abbrev(
  4843. object_store: BaseObjectStore, object_id: str | bytes, min_length: int = 7
  4844. ) -> str:
  4845. """Find the shortest unique abbreviation for an object ID.
  4846. Args:
  4847. object_store: Object store to search in
  4848. object_id: The full object ID to abbreviate
  4849. min_length: Minimum length of abbreviation (default 7)
  4850. Returns:
  4851. The shortest unique prefix of the object ID (at least min_length chars)
  4852. """
  4853. if isinstance(object_id, bytes):
  4854. hex_id = object_id.decode("ascii")
  4855. else:
  4856. hex_id = object_id
  4857. # Start with minimum length
  4858. for length in range(min_length, len(hex_id) + 1):
  4859. prefix = hex_id[:length]
  4860. matches = 0
  4861. # Check if this prefix is unique
  4862. for obj_id in object_store:
  4863. if obj_id.decode("ascii").startswith(prefix):
  4864. matches += 1
  4865. if matches > 1:
  4866. # Not unique, need more characters
  4867. break
  4868. if matches == 1:
  4869. # Found unique prefix
  4870. return prefix
  4871. # If we get here, return the full ID
  4872. return hex_id
  4873. def describe(repo: str | os.PathLike[str] | Repo, abbrev: int | None = None) -> str:
  4874. """Describe the repository version.
  4875. Args:
  4876. repo: git repository
  4877. abbrev: number of characters of commit to take, default is 7
  4878. Returns: a string description of the current git revision
  4879. Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh".
  4880. """
  4881. abbrev_slice = slice(0, abbrev if abbrev is not None else 7)
  4882. # Get the repository
  4883. with open_repo_closing(repo) as r:
  4884. # Get a list of all tags
  4885. refs = r.get_refs()
  4886. tags = {}
  4887. for key, value in refs.items():
  4888. key_str = key.decode()
  4889. obj = r.get_object(value)
  4890. if "tags" not in key_str:
  4891. continue
  4892. _, tag = key_str.rsplit("/", 1)
  4893. if isinstance(obj, Tag):
  4894. # Annotated tag case
  4895. commit = r.get_object(obj.object[1])
  4896. else:
  4897. # Lightweight tag case - obj is already the commit
  4898. commit = obj
  4899. if not isinstance(commit, Commit):
  4900. raise AssertionError(
  4901. f"Expected Commit object, got {type(commit).__name__}"
  4902. )
  4903. tag_info: list[Any] = [
  4904. datetime.datetime(*time.gmtime(commit.commit_time)[:6]),
  4905. commit.id.decode("ascii"),
  4906. ]
  4907. tags[tag] = tag_info
  4908. # Sort tags by datetime (first element of the value list)
  4909. sorted_tags = sorted(
  4910. tags.items(), key=lambda tag_item: tag_item[1][0], reverse=True
  4911. )
  4912. # Get the latest commit
  4913. latest_commit = r[r.head()]
  4914. # If there are no tags, return the latest commit
  4915. if len(sorted_tags) == 0:
  4916. if abbrev is not None:
  4917. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  4918. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  4919. # We're now 0 commits from the top
  4920. commit_count = 0
  4921. # Walk through all commits
  4922. walker = r.get_walker()
  4923. for entry in walker:
  4924. # Check if tag
  4925. commit_id = entry.commit.id.decode("ascii")
  4926. for tag_item in sorted_tags:
  4927. tag_name = tag_item[0]
  4928. tag_commit = tag_item[1][1]
  4929. if commit_id == tag_commit:
  4930. if commit_count == 0:
  4931. return tag_name
  4932. else:
  4933. if abbrev is not None:
  4934. abbrev_hash = latest_commit.id.decode("ascii")[abbrev_slice]
  4935. else:
  4936. abbrev_hash = find_unique_abbrev(
  4937. r.object_store, latest_commit.id
  4938. )
  4939. return f"{tag_name}-{commit_count}-g{abbrev_hash}"
  4940. commit_count += 1
  4941. # Return plain commit if no parent tag can be found
  4942. if abbrev is not None:
  4943. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  4944. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  4945. def get_object_by_path(
  4946. repo: RepoPath,
  4947. path: str | bytes,
  4948. committish: str | bytes | Commit | Tag | None = None,
  4949. ) -> Blob | Tree | Commit | Tag:
  4950. """Get an object by path.
  4951. Args:
  4952. repo: A path to the repository
  4953. path: Path to look up
  4954. committish: Commit to look up path in
  4955. Returns: A `ShaFile` object
  4956. """
  4957. if committish is None:
  4958. committish = "HEAD"
  4959. # Get the repository
  4960. with open_repo_closing(repo) as r:
  4961. commit = parse_commit(r, committish)
  4962. base_tree = commit.tree
  4963. if not isinstance(path, bytes):
  4964. path = commit_encode(commit, path)
  4965. (_mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path)
  4966. obj = r[sha]
  4967. assert isinstance(obj, (Blob, Tree, Commit, Tag))
  4968. return obj
  4969. def write_tree(repo: RepoPath) -> bytes:
  4970. """Write a tree object from the index.
  4971. Args:
  4972. repo: Repository for which to write tree
  4973. Returns: tree id for the tree that was written
  4974. """
  4975. with open_repo_closing(repo) as r:
  4976. return r.open_index().commit(r.object_store)
  4977. def _do_merge(
  4978. r: Repo,
  4979. merge_commit_id: ObjectID,
  4980. no_commit: bool = False,
  4981. no_ff: bool = False,
  4982. message: bytes | None = None,
  4983. author: bytes | None = None,
  4984. committer: bytes | None = None,
  4985. ) -> tuple[ObjectID | None, list[bytes]]:
  4986. """Internal merge implementation that operates on an open repository.
  4987. Args:
  4988. r: Open repository object
  4989. merge_commit_id: SHA of commit to merge
  4990. no_commit: If True, do not create a merge commit
  4991. no_ff: If True, force creation of a merge commit
  4992. message: Optional merge commit message
  4993. author: Optional author for merge commit
  4994. committer: Optional committer for merge commit
  4995. Returns:
  4996. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  4997. if no_commit=True or there were conflicts
  4998. """
  4999. from ..graph import find_merge_base
  5000. from ..merge import recursive_merge
  5001. # Get HEAD commit
  5002. try:
  5003. head_commit_id = r.refs[HEADREF]
  5004. except KeyError:
  5005. raise Error("No HEAD reference found")
  5006. head_commit = r[head_commit_id]
  5007. assert isinstance(head_commit, Commit), "Expected a Commit object"
  5008. merge_commit = r[merge_commit_id]
  5009. assert isinstance(merge_commit, Commit), "Expected a Commit object"
  5010. # Check if fast-forward is possible
  5011. merge_bases = find_merge_base(r, [head_commit_id, merge_commit_id])
  5012. if not merge_bases:
  5013. raise Error("No common ancestor found")
  5014. # Use the first merge base for fast-forward checks
  5015. base_commit_id = merge_bases[0]
  5016. # Check if we're trying to merge the same commit
  5017. if head_commit_id == merge_commit_id:
  5018. # Already up to date
  5019. return (None, [])
  5020. # Check for fast-forward
  5021. if base_commit_id == head_commit_id and not no_ff:
  5022. # Fast-forward merge
  5023. r.refs[HEADREF] = merge_commit_id
  5024. # Update the working directory
  5025. changes = tree_changes(r.object_store, head_commit.tree, merge_commit.tree)
  5026. update_working_tree(
  5027. r, head_commit.tree, merge_commit.tree, change_iterator=changes
  5028. )
  5029. return (merge_commit_id, [])
  5030. if base_commit_id == merge_commit_id:
  5031. # Already up to date
  5032. return (None, [])
  5033. # Perform recursive merge (handles multiple merge bases automatically)
  5034. gitattributes = r.get_gitattributes()
  5035. config = r.get_config()
  5036. merged_tree, conflicts = recursive_merge(
  5037. r.object_store, merge_bases, head_commit, merge_commit, gitattributes, config
  5038. )
  5039. # Add merged tree to object store
  5040. r.object_store.add_object(merged_tree)
  5041. # Update index and working directory
  5042. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  5043. update_working_tree(r, head_commit.tree, merged_tree.id, change_iterator=changes)
  5044. if conflicts or no_commit:
  5045. # Don't create a commit if there are conflicts or no_commit is True
  5046. return (None, conflicts)
  5047. # Create merge commit
  5048. merge_commit_obj = Commit()
  5049. merge_commit_obj.tree = merged_tree.id
  5050. merge_commit_obj.parents = [head_commit_id, merge_commit_id]
  5051. # Set author/committer
  5052. if author is None:
  5053. author = get_user_identity(r.get_config_stack())
  5054. if committer is None:
  5055. committer = author
  5056. merge_commit_obj.author = author
  5057. merge_commit_obj.committer = committer
  5058. # Set timestamps
  5059. timestamp = int(time.time())
  5060. timezone = 0 # UTC
  5061. merge_commit_obj.author_time = timestamp
  5062. merge_commit_obj.author_timezone = timezone
  5063. merge_commit_obj.commit_time = timestamp
  5064. merge_commit_obj.commit_timezone = timezone
  5065. # Set commit message
  5066. if message is None:
  5067. message = f"Merge commit '{merge_commit_id.decode()[:7]}'\n".encode()
  5068. merge_commit_obj.message = message.encode() if isinstance(message, str) else message
  5069. # Add commit to object store
  5070. r.object_store.add_object(merge_commit_obj)
  5071. # Update HEAD
  5072. r.refs[HEADREF] = merge_commit_obj.id
  5073. return (merge_commit_obj.id, [])
  5074. def _do_octopus_merge(
  5075. r: Repo,
  5076. merge_commit_ids: list[ObjectID],
  5077. no_commit: bool = False,
  5078. no_ff: bool = False,
  5079. message: bytes | None = None,
  5080. author: bytes | None = None,
  5081. committer: bytes | None = None,
  5082. ) -> tuple[ObjectID | None, list[bytes]]:
  5083. """Internal octopus merge implementation that operates on an open repository.
  5084. Args:
  5085. r: Open repository object
  5086. merge_commit_ids: List of commit SHAs to merge
  5087. no_commit: If True, do not create a merge commit
  5088. no_ff: If True, force creation of a merge commit (ignored for octopus)
  5089. message: Optional merge commit message
  5090. author: Optional author for merge commit
  5091. committer: Optional committer for merge commit
  5092. Returns:
  5093. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  5094. if no_commit=True or there were conflicts
  5095. """
  5096. from ..graph import find_octopus_base
  5097. from ..merge import octopus_merge
  5098. # Get HEAD commit
  5099. try:
  5100. head_commit_id = r.refs[HEADREF]
  5101. except KeyError:
  5102. raise Error("No HEAD reference found")
  5103. head_commit = r[head_commit_id]
  5104. assert isinstance(head_commit, Commit), "Expected a Commit object"
  5105. # Get all commits to merge
  5106. other_commits = []
  5107. for merge_commit_id in merge_commit_ids:
  5108. merge_commit = r[merge_commit_id]
  5109. assert isinstance(merge_commit, Commit), "Expected a Commit object"
  5110. # Check if we're trying to merge the same commit as HEAD
  5111. if head_commit_id == merge_commit_id:
  5112. # Skip this commit, it's already merged
  5113. continue
  5114. other_commits.append(merge_commit)
  5115. # If no commits to merge after filtering, we're already up to date
  5116. if not other_commits:
  5117. return (None, [])
  5118. # If only one commit to merge, use regular merge
  5119. if len(other_commits) == 1:
  5120. return _do_merge(
  5121. r, other_commits[0].id, no_commit, no_ff, message, author, committer
  5122. )
  5123. # Find the octopus merge base
  5124. all_commit_ids = [head_commit_id] + [c.id for c in other_commits]
  5125. merge_bases = find_octopus_base(r, all_commit_ids)
  5126. if not merge_bases:
  5127. raise Error("No common ancestor found")
  5128. # Check if this is a fast-forward (HEAD is the merge base)
  5129. # For octopus merges, fast-forward doesn't really apply, so we always create a merge commit
  5130. # Perform octopus merge
  5131. gitattributes = r.get_gitattributes()
  5132. config = r.get_config()
  5133. merged_tree, conflicts = octopus_merge(
  5134. r.object_store, merge_bases, head_commit, other_commits, gitattributes, config
  5135. )
  5136. # Add merged tree to object store
  5137. r.object_store.add_object(merged_tree)
  5138. # Update index and working directory
  5139. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  5140. update_working_tree(r, head_commit.tree, merged_tree.id, change_iterator=changes)
  5141. if conflicts:
  5142. # Don't create a commit if there are conflicts
  5143. # Octopus merge refuses to proceed with conflicts
  5144. return (None, conflicts)
  5145. if no_commit:
  5146. # Don't create a commit if no_commit is True
  5147. return (None, [])
  5148. # Create merge commit with multiple parents
  5149. merge_commit_obj = Commit()
  5150. merge_commit_obj.tree = merged_tree.id
  5151. merge_commit_obj.parents = [head_commit_id] + [c.id for c in other_commits]
  5152. # Set author/committer
  5153. if author is None:
  5154. author = get_user_identity(r.get_config_stack())
  5155. if committer is None:
  5156. committer = author
  5157. merge_commit_obj.author = author
  5158. merge_commit_obj.committer = committer
  5159. # Set timestamps
  5160. timestamp = int(time.time())
  5161. timezone = 0 # UTC
  5162. merge_commit_obj.author_time = timestamp
  5163. merge_commit_obj.author_timezone = timezone
  5164. merge_commit_obj.commit_time = timestamp
  5165. merge_commit_obj.commit_timezone = timezone
  5166. # Set commit message
  5167. if message is None:
  5168. # Generate default message for octopus merge
  5169. branch_names = []
  5170. for commit_id in merge_commit_ids:
  5171. branch_names.append(commit_id.decode()[:7])
  5172. message = f"Merge commits {', '.join(branch_names)}\n".encode()
  5173. merge_commit_obj.message = message.encode() if isinstance(message, str) else message
  5174. # Add commit to object store
  5175. r.object_store.add_object(merge_commit_obj)
  5176. # Update HEAD
  5177. r.refs[HEADREF] = merge_commit_obj.id
  5178. return (merge_commit_obj.id, [])
  5179. def merge(
  5180. repo: str | os.PathLike[str] | Repo,
  5181. committish: str | bytes | Commit | Tag | Sequence[str | bytes | Commit | Tag],
  5182. no_commit: bool = False,
  5183. no_ff: bool = False,
  5184. message: bytes | None = None,
  5185. author: bytes | None = None,
  5186. committer: bytes | None = None,
  5187. ) -> tuple[bytes | None, list[bytes]]:
  5188. """Merge one or more commits into the current branch.
  5189. Args:
  5190. repo: Repository to merge into
  5191. committish: Commit(s) to merge. Can be a single commit or a sequence of commits.
  5192. When merging more than two heads, the octopus merge strategy is used.
  5193. no_commit: If True, do not create a merge commit
  5194. no_ff: If True, force creation of a merge commit
  5195. message: Optional merge commit message
  5196. author: Optional author for merge commit
  5197. committer: Optional committer for merge commit
  5198. Returns:
  5199. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  5200. if no_commit=True or there were conflicts
  5201. Raises:
  5202. Error: If there is no HEAD reference or commit cannot be found
  5203. """
  5204. with open_repo_closing(repo) as r:
  5205. # Handle both single commit and multiple commits
  5206. if isinstance(committish, (list, tuple)):
  5207. # Multiple commits - use octopus merge
  5208. merge_commit_ids = []
  5209. for c in committish:
  5210. try:
  5211. merge_commit_ids.append(parse_commit(r, c).id)
  5212. except KeyError:
  5213. raise Error(
  5214. f"Cannot find commit '{c.decode() if isinstance(c, bytes) else c}'"
  5215. )
  5216. if len(merge_commit_ids) == 1:
  5217. # Only one commit, use regular merge
  5218. result = _do_merge(
  5219. r, merge_commit_ids[0], no_commit, no_ff, message, author, committer
  5220. )
  5221. else:
  5222. # Multiple commits, use octopus merge
  5223. result = _do_octopus_merge(
  5224. r, merge_commit_ids, no_commit, no_ff, message, author, committer
  5225. )
  5226. else:
  5227. # Single commit - use regular merge
  5228. # Type narrowing: committish is not a sequence in this branch
  5229. single_committish = cast(str | bytes | Commit | Tag, committish)
  5230. try:
  5231. merge_commit_id = parse_commit(r, single_committish).id
  5232. except KeyError:
  5233. raise Error(
  5234. f"Cannot find commit '{single_committish.decode() if isinstance(single_committish, bytes) else single_committish}'"
  5235. )
  5236. result = _do_merge(
  5237. r, merge_commit_id, no_commit, no_ff, message, author, committer
  5238. )
  5239. # Trigger auto GC if needed
  5240. from ..gc import maybe_auto_gc
  5241. maybe_auto_gc(r)
  5242. return result
  5243. def unpack_objects(
  5244. pack_path: str | os.PathLike[str], target: str | os.PathLike[str] = "."
  5245. ) -> int:
  5246. """Unpack objects from a pack file into the repository.
  5247. Args:
  5248. pack_path: Path to the pack file to unpack
  5249. target: Path to the repository to unpack into
  5250. Returns:
  5251. Number of objects unpacked
  5252. """
  5253. from ..pack import Pack
  5254. with open_repo_closing(target) as r:
  5255. pack_basename = os.path.splitext(pack_path)[0]
  5256. with Pack(pack_basename, object_format=r.object_store.object_format) as pack:
  5257. count = 0
  5258. for unpacked in pack.iter_unpacked():
  5259. obj = unpacked.sha_file()
  5260. r.object_store.add_object(obj)
  5261. count += 1
  5262. return count
  5263. def merge_tree(
  5264. repo: RepoPath,
  5265. base_tree: str | bytes | Tree | Commit | Tag | None,
  5266. our_tree: str | bytes | Tree | Commit | Tag,
  5267. their_tree: str | bytes | Tree | Commit | Tag,
  5268. ) -> tuple[bytes, list[bytes]]:
  5269. """Perform a three-way tree merge without touching the working directory.
  5270. This is similar to git merge-tree, performing a merge at the tree level
  5271. without creating commits or updating any references.
  5272. Args:
  5273. repo: Repository containing the trees
  5274. base_tree: Tree-ish of the common ancestor (or None for no common ancestor)
  5275. our_tree: Tree-ish of our side of the merge
  5276. their_tree: Tree-ish of their side of the merge
  5277. Returns:
  5278. tuple: A tuple of (merged_tree_id, conflicts) where:
  5279. - merged_tree_id is the SHA-1 of the merged tree
  5280. - conflicts is a list of paths (as bytes) that had conflicts
  5281. Raises:
  5282. KeyError: If any of the tree-ish arguments cannot be resolved
  5283. """
  5284. from ..merge import Merger
  5285. with open_repo_closing(repo) as r:
  5286. # Resolve tree-ish arguments to actual trees
  5287. base = parse_tree(r, base_tree) if base_tree else None
  5288. ours = parse_tree(r, our_tree)
  5289. theirs = parse_tree(r, their_tree)
  5290. # Perform the merge
  5291. gitattributes = r.get_gitattributes()
  5292. config = r.get_config()
  5293. merger = Merger(r.object_store, gitattributes, config)
  5294. merged_tree, conflicts = merger.merge_trees(base, ours, theirs)
  5295. # Add the merged tree to the object store
  5296. r.object_store.add_object(merged_tree)
  5297. return merged_tree.id, conflicts
  5298. def cherry(
  5299. repo: str | os.PathLike[str] | Repo,
  5300. upstream: str | bytes | None = None,
  5301. head: str | bytes | None = None,
  5302. limit: str | bytes | None = None,
  5303. verbose: bool = False,
  5304. ) -> list[tuple[str, bytes, bytes | None]]:
  5305. """Find commits not merged upstream.
  5306. Args:
  5307. repo: Repository path or object
  5308. upstream: Upstream branch (default: tracking branch or @{upstream})
  5309. head: Head branch (default: HEAD)
  5310. limit: Limit commits to those after this ref
  5311. verbose: Include commit messages in output
  5312. Returns:
  5313. List of tuples (status, commit_sha, message) where status is '+' or '-'
  5314. '+' means commit is not in upstream, '-' means equivalent patch exists upstream
  5315. message is None unless verbose=True
  5316. """
  5317. from ..patch import commit_patch_id
  5318. with open_repo_closing(repo) as r:
  5319. # Resolve upstream
  5320. if upstream is None:
  5321. # Try to find tracking branch
  5322. upstream_found = False
  5323. head_refs, _ = r.refs.follow(HEADREF)
  5324. if head_refs:
  5325. head_ref = head_refs[0]
  5326. if head_ref.startswith(b"refs/heads/"):
  5327. config = r.get_config()
  5328. branch_name = head_ref[len(b"refs/heads/") :]
  5329. try:
  5330. upstream_ref = config.get((b"branch", branch_name), b"merge")
  5331. except KeyError:
  5332. upstream_ref = None
  5333. if upstream_ref:
  5334. try:
  5335. remote_name = config.get(
  5336. (b"branch", branch_name), b"remote"
  5337. )
  5338. except KeyError:
  5339. remote_name = None
  5340. if remote_name:
  5341. # Build the tracking branch ref
  5342. upstream_refname = Ref(
  5343. b"refs/remotes/"
  5344. + remote_name
  5345. + b"/"
  5346. + upstream_ref.split(b"/")[-1]
  5347. )
  5348. if upstream_refname in r.refs:
  5349. upstream = upstream_refname
  5350. upstream_found = True
  5351. if not upstream_found:
  5352. # Default to HEAD^ if no tracking branch found
  5353. head_commit = r[HEADREF]
  5354. if isinstance(head_commit, Commit) and head_commit.parents:
  5355. upstream = head_commit.parents[0]
  5356. else:
  5357. raise ValueError("Could not determine upstream branch")
  5358. # Resolve head
  5359. if head is None:
  5360. head = b"HEAD"
  5361. # Convert strings to bytes
  5362. if isinstance(upstream, str):
  5363. upstream = upstream.encode("utf-8")
  5364. if isinstance(head, str):
  5365. head = head.encode("utf-8")
  5366. if limit is not None and isinstance(limit, str):
  5367. limit = limit.encode("utf-8")
  5368. # Resolve refs to commit IDs
  5369. assert upstream is not None
  5370. upstream_obj = r[upstream]
  5371. head_obj = r[head]
  5372. upstream_id = upstream_obj.id
  5373. head_id = head_obj.id
  5374. # Get limit commit ID if specified
  5375. limit_id = None
  5376. if limit is not None:
  5377. limit_id = r[limit].id
  5378. # Find all commits reachable from head but not from upstream
  5379. # This is equivalent to: git rev-list ^upstream head
  5380. # Get commits from head that are not in upstream
  5381. walker = r.get_walker([head_id], exclude=[upstream_id])
  5382. head_commits = []
  5383. for entry in walker:
  5384. commit = entry.commit
  5385. # Apply limit if specified
  5386. if limit_id is not None:
  5387. # Stop when we reach the limit commit
  5388. if commit.id == limit_id:
  5389. break
  5390. head_commits.append(commit.id)
  5391. # Compute patch IDs for upstream commits
  5392. upstream_walker = r.get_walker([upstream_id])
  5393. upstream_patch_ids = {} # Maps patch_id -> commit_id for debugging
  5394. for entry in upstream_walker:
  5395. commit = entry.commit
  5396. pid = commit_patch_id(r.object_store, commit.id)
  5397. upstream_patch_ids[pid] = commit.id
  5398. # For each head commit, check if equivalent patch exists in upstream
  5399. results: list[tuple[str, bytes, bytes | None]] = []
  5400. for commit_id in reversed(head_commits): # Show oldest first
  5401. obj = r.object_store[commit_id]
  5402. assert isinstance(obj, Commit)
  5403. commit = obj
  5404. pid = commit_patch_id(r.object_store, commit_id)
  5405. if pid in upstream_patch_ids:
  5406. status = "-"
  5407. else:
  5408. status = "+"
  5409. message = None
  5410. if verbose:
  5411. message = commit.message.split(b"\n")[0] # First line only
  5412. results.append((status, commit_id, message))
  5413. return results
  5414. def cherry_pick( # noqa: D417
  5415. repo: str | os.PathLike[str] | Repo,
  5416. committish: str | bytes | Commit | Tag | None,
  5417. no_commit: bool = False,
  5418. continue_: bool = False,
  5419. abort: bool = False,
  5420. ) -> bytes | None:
  5421. r"""Cherry-pick a commit onto the current branch.
  5422. Args:
  5423. repo: Repository to cherry-pick into
  5424. committish: Commit to cherry-pick (can be None only when resuming or aborting)
  5425. no_commit: If True, do not create a commit after applying changes
  5426. ``continue_``: Resume an in-progress cherry-pick after resolving conflicts if True
  5427. abort: Abort an in-progress cherry-pick
  5428. Returns:
  5429. The SHA of the newly created commit, or None if no_commit=True or there were conflicts
  5430. Raises:
  5431. Error: If there is no HEAD reference, commit cannot be found, or operation fails
  5432. """
  5433. from ..merge import three_way_merge
  5434. # Validate that committish is provided when needed
  5435. if not (continue_ or abort) and committish is None:
  5436. raise ValueError("committish is required when not using --continue or --abort")
  5437. with open_repo_closing(repo) as r:
  5438. # Handle abort
  5439. if abort:
  5440. # Clean up any cherry-pick state
  5441. try:
  5442. os.remove(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"))
  5443. except FileNotFoundError:
  5444. pass
  5445. try:
  5446. os.remove(os.path.join(r.controldir(), "MERGE_MSG"))
  5447. except FileNotFoundError:
  5448. pass
  5449. # Reset index to HEAD
  5450. head_commit = r[b"HEAD"]
  5451. assert isinstance(head_commit, Commit)
  5452. r.get_worktree().reset_index(head_commit.tree)
  5453. return None
  5454. # Handle continue
  5455. if continue_:
  5456. # Check if there's a cherry-pick in progress
  5457. cherry_pick_head_path = os.path.join(r.controldir(), "CHERRY_PICK_HEAD")
  5458. try:
  5459. with open(cherry_pick_head_path, "rb") as f:
  5460. cherry_pick_commit_id = f.read().strip()
  5461. cherry_pick_commit = r[cherry_pick_commit_id]
  5462. except FileNotFoundError:
  5463. raise Error("No cherry-pick in progress")
  5464. # Check for unresolved conflicts
  5465. if r.open_index().has_conflicts():
  5466. raise Error("Unresolved conflicts remain")
  5467. # Create the commit
  5468. tree_id = r.open_index().commit(r.object_store)
  5469. # Read saved message if any
  5470. merge_msg_path = os.path.join(r.controldir(), "MERGE_MSG")
  5471. try:
  5472. with open(merge_msg_path, "rb") as f:
  5473. message = f.read()
  5474. except FileNotFoundError:
  5475. assert isinstance(cherry_pick_commit, Commit)
  5476. message = cherry_pick_commit.message
  5477. assert isinstance(cherry_pick_commit, Commit)
  5478. new_commit = r.get_worktree().commit(
  5479. message=message,
  5480. tree=tree_id,
  5481. author=cherry_pick_commit.author,
  5482. author_timestamp=cherry_pick_commit.author_time,
  5483. author_timezone=cherry_pick_commit.author_timezone,
  5484. )
  5485. # Clean up state files
  5486. try:
  5487. os.remove(cherry_pick_head_path)
  5488. except FileNotFoundError:
  5489. pass
  5490. try:
  5491. os.remove(merge_msg_path)
  5492. except FileNotFoundError:
  5493. pass
  5494. return new_commit
  5495. # Normal cherry-pick operation
  5496. # Get current HEAD
  5497. try:
  5498. head_commit = r[b"HEAD"]
  5499. except KeyError:
  5500. raise Error("No HEAD reference found")
  5501. # Parse the commit to cherry-pick
  5502. # committish cannot be None here due to validation above
  5503. assert committish is not None
  5504. try:
  5505. cherry_pick_commit = parse_commit(r, committish)
  5506. except KeyError:
  5507. raise Error(
  5508. f"Cannot find commit '{committish.decode() if isinstance(committish, bytes) else committish}'"
  5509. )
  5510. # Check if commit has parents
  5511. assert isinstance(cherry_pick_commit, Commit)
  5512. if not cherry_pick_commit.parents:
  5513. raise Error("Cannot cherry-pick root commit")
  5514. # Get parent of cherry-pick commit
  5515. parent_commit = r[cherry_pick_commit.parents[0]]
  5516. assert isinstance(parent_commit, Commit)
  5517. # Perform three-way merge
  5518. assert isinstance(head_commit, Commit)
  5519. merged_tree, conflicts = three_way_merge(
  5520. r.object_store, parent_commit, head_commit, cherry_pick_commit
  5521. )
  5522. # Add merged tree to object store
  5523. r.object_store.add_object(merged_tree)
  5524. # Update working tree and index
  5525. # Reset index to match merged tree
  5526. r.get_worktree().reset_index(merged_tree.id)
  5527. # Update working tree from the new index
  5528. # Allow overwriting because we're applying the merge result
  5529. assert isinstance(head_commit, Commit)
  5530. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  5531. update_working_tree(
  5532. r,
  5533. head_commit.tree,
  5534. merged_tree.id,
  5535. change_iterator=changes,
  5536. allow_overwrite_modified=True,
  5537. )
  5538. if conflicts:
  5539. # Save state for later continuation
  5540. with open(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"), "wb") as f:
  5541. f.write(cherry_pick_commit.id + b"\n")
  5542. # Save commit message
  5543. with open(os.path.join(r.controldir(), "MERGE_MSG"), "wb") as f:
  5544. f.write(cherry_pick_commit.message)
  5545. raise Error(
  5546. f"Conflicts in: {', '.join(c.decode('utf-8', 'replace') for c in conflicts)}\n"
  5547. f"Fix conflicts and run 'dulwich cherry-pick --continue'"
  5548. )
  5549. if no_commit:
  5550. return None
  5551. # Create the commit
  5552. new_commit = r.get_worktree().commit(
  5553. message=cherry_pick_commit.message,
  5554. tree=merged_tree.id,
  5555. author=cherry_pick_commit.author,
  5556. author_timestamp=cherry_pick_commit.author_time,
  5557. author_timezone=cherry_pick_commit.author_timezone,
  5558. )
  5559. return new_commit
  5560. def revert(
  5561. repo: str | os.PathLike[str] | Repo,
  5562. commits: str | bytes | Commit | Tag | Sequence[str | bytes | Commit | Tag],
  5563. no_commit: bool = False,
  5564. message: str | bytes | None = None,
  5565. author: bytes | None = None,
  5566. committer: bytes | None = None,
  5567. ) -> bytes | None:
  5568. """Revert one or more commits.
  5569. This creates a new commit that undoes the changes introduced by the
  5570. specified commits. Unlike reset, revert creates a new commit that
  5571. preserves history.
  5572. Args:
  5573. repo: Path to repository or repository object
  5574. commits: List of commit-ish (SHA, ref, etc.) to revert, or a single commit-ish
  5575. no_commit: If True, apply changes to index/working tree but don't commit
  5576. message: Optional commit message (default: "Revert <original subject>")
  5577. author: Optional author for revert commit
  5578. committer: Optional committer for revert commit
  5579. Returns:
  5580. SHA1 of the new revert commit, or None if no_commit=True
  5581. Raises:
  5582. Error: If revert fails due to conflicts or other issues
  5583. """
  5584. from ..merge import three_way_merge
  5585. # Normalize commits to a list
  5586. if isinstance(commits, (str, bytes, Commit, Tag)):
  5587. commits = [commits]
  5588. with open_repo_closing(repo) as r:
  5589. # Convert string refs to bytes
  5590. commits_to_revert = []
  5591. for commit_ref in commits:
  5592. if isinstance(commit_ref, str):
  5593. commit_ref = commit_ref.encode("utf-8")
  5594. commit = parse_commit(r, commit_ref)
  5595. commits_to_revert.append(commit)
  5596. # Get current HEAD
  5597. try:
  5598. head_commit_id = r.refs[HEADREF]
  5599. except KeyError:
  5600. raise Error("No HEAD reference found")
  5601. head_commit = r[head_commit_id]
  5602. assert isinstance(head_commit, Commit)
  5603. current_tree = head_commit.tree
  5604. # Process commits in order
  5605. for commit_to_revert in commits_to_revert:
  5606. # For revert, we want to apply the inverse of the commit
  5607. # This means using the commit's tree as "base" and its parent as "theirs"
  5608. if not commit_to_revert.parents:
  5609. raise Error(
  5610. f"Cannot revert commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - it has no parents"
  5611. )
  5612. # For simplicity, we only handle commits with one parent (no merge commits)
  5613. if len(commit_to_revert.parents) > 1:
  5614. raise Error(
  5615. f"Cannot revert merge commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - not yet implemented"
  5616. )
  5617. parent_commit = r[commit_to_revert.parents[0]]
  5618. assert isinstance(parent_commit, Commit)
  5619. # Perform three-way merge:
  5620. # - base: the commit we're reverting (what we want to remove)
  5621. # - ours: current HEAD (what we have now)
  5622. # - theirs: parent of commit being reverted (what we want to go back to)
  5623. assert isinstance(commit_to_revert, Commit)
  5624. head_for_merge = r[head_commit_id]
  5625. assert isinstance(head_for_merge, Commit)
  5626. merged_tree, conflicts = three_way_merge(
  5627. r.object_store,
  5628. commit_to_revert, # base
  5629. head_for_merge, # ours
  5630. parent_commit, # theirs
  5631. )
  5632. if conflicts:
  5633. # Update working tree with conflicts
  5634. changes = tree_changes(r.object_store, current_tree, merged_tree.id)
  5635. update_working_tree(
  5636. r, current_tree, merged_tree.id, change_iterator=changes
  5637. )
  5638. conflicted_paths = [c.decode("utf-8", "replace") for c in conflicts]
  5639. raise Error(f"Conflicts while reverting: {', '.join(conflicted_paths)}")
  5640. # Add merged tree to object store
  5641. r.object_store.add_object(merged_tree)
  5642. # Update working tree
  5643. changes = tree_changes(r.object_store, current_tree, merged_tree.id)
  5644. update_working_tree(
  5645. r, current_tree, merged_tree.id, change_iterator=changes
  5646. )
  5647. current_tree = merged_tree.id
  5648. if not no_commit:
  5649. # Create revert commit
  5650. revert_commit = Commit()
  5651. revert_commit.tree = merged_tree.id
  5652. revert_commit.parents = [head_commit_id]
  5653. # Set author/committer
  5654. if author is None:
  5655. author = get_user_identity(r.get_config_stack())
  5656. if committer is None:
  5657. committer = author
  5658. revert_commit.author = author
  5659. revert_commit.committer = committer
  5660. # Set timestamps
  5661. timestamp = int(time.time())
  5662. timezone = 0 # UTC
  5663. revert_commit.author_time = timestamp
  5664. revert_commit.author_timezone = timezone
  5665. revert_commit.commit_time = timestamp
  5666. revert_commit.commit_timezone = timezone
  5667. # Set message
  5668. if message is None:
  5669. # Extract original commit subject
  5670. original_message = commit_to_revert.message
  5671. if isinstance(original_message, bytes):
  5672. original_message = original_message.decode("utf-8", "replace")
  5673. subject = original_message.split("\n")[0]
  5674. message = f'Revert "{subject}"\n\nThis reverts commit {commit_to_revert.id.decode("ascii")}.'.encode()
  5675. elif isinstance(message, str):
  5676. message = message.encode("utf-8")
  5677. revert_commit.message = message
  5678. # Add commit to object store
  5679. r.object_store.add_object(revert_commit)
  5680. # Update HEAD
  5681. r.refs[HEADREF] = revert_commit.id
  5682. head_commit_id = revert_commit.id
  5683. return head_commit_id if not no_commit else None
  5684. def gc(
  5685. repo: RepoPath,
  5686. auto: bool = False,
  5687. aggressive: bool = False,
  5688. prune: bool = True,
  5689. grace_period: int | None = 1209600, # 2 weeks default
  5690. dry_run: bool = False,
  5691. progress: Callable[[str], None] | None = None,
  5692. ) -> "GCStats":
  5693. """Run garbage collection on a repository.
  5694. Args:
  5695. repo: Path to the repository or a Repo object
  5696. auto: If True, only run gc if needed
  5697. aggressive: If True, use more aggressive settings
  5698. prune: If True, prune unreachable objects
  5699. grace_period: Grace period in seconds for pruning (default 2 weeks)
  5700. dry_run: If True, only report what would be done
  5701. progress: Optional progress callback
  5702. Returns:
  5703. GCStats object with garbage collection statistics
  5704. """
  5705. from ..gc import garbage_collect
  5706. with open_repo_closing(repo) as r:
  5707. return garbage_collect(
  5708. r,
  5709. auto=auto,
  5710. aggressive=aggressive,
  5711. prune=prune,
  5712. grace_period=grace_period,
  5713. dry_run=dry_run,
  5714. progress=progress,
  5715. )
  5716. def prune(
  5717. repo: RepoPath,
  5718. grace_period: int | None = None,
  5719. dry_run: bool = False,
  5720. progress: Callable[[str], None] | None = None,
  5721. ) -> None:
  5722. """Prune/clean up a repository's object store.
  5723. This removes temporary files that were left behind by interrupted
  5724. pack operations.
  5725. Args:
  5726. repo: Path to the repository or a Repo object
  5727. grace_period: Grace period in seconds for removing temporary files
  5728. (default 2 weeks)
  5729. dry_run: If True, only report what would be done
  5730. progress: Optional progress callback
  5731. """
  5732. with open_repo_closing(repo) as r:
  5733. if progress:
  5734. progress("Pruning temporary files")
  5735. if not dry_run:
  5736. r.object_store.prune(grace_period=grace_period)
  5737. def maintenance_run(
  5738. repo: RepoPath,
  5739. tasks: list[str] | None = None,
  5740. auto: bool = False,
  5741. progress: Callable[[str], None] | None = None,
  5742. ) -> "MaintenanceResult":
  5743. """Run maintenance tasks on a repository.
  5744. Args:
  5745. repo: Path to the repository or a Repo object
  5746. tasks: Optional list of specific task names to run
  5747. (e.g., ['gc', 'commit-graph', 'pack-refs'])
  5748. auto: If True, only run tasks if needed
  5749. progress: Optional progress callback
  5750. Returns:
  5751. MaintenanceResult object with task execution results
  5752. """
  5753. from ..maintenance import run_maintenance
  5754. with open_repo_closing(repo) as r:
  5755. return run_maintenance(r, tasks=tasks, auto=auto, progress=progress)
  5756. def maintenance_register(repo: RepoPath) -> None:
  5757. """Register a repository for background maintenance.
  5758. This adds the repository to the global maintenance.repo config and sets
  5759. up recommended configuration for scheduled maintenance.
  5760. Args:
  5761. repo: Path to the repository or repository object
  5762. """
  5763. from ..maintenance import register_repository
  5764. with open_repo_closing(repo) as r:
  5765. register_repository(r)
  5766. def maintenance_unregister(repo: RepoPath, force: bool = False) -> None:
  5767. """Unregister a repository from background maintenance.
  5768. This removes the repository from the global maintenance.repo config.
  5769. Args:
  5770. repo: Path to the repository or repository object
  5771. force: If True, don't error if repository is not registered
  5772. """
  5773. from ..maintenance import unregister_repository
  5774. with open_repo_closing(repo) as r:
  5775. unregister_repository(r, force=force)
  5776. def count_objects(repo: RepoPath = ".", verbose: bool = False) -> CountObjectsResult:
  5777. """Count unpacked objects and their disk usage.
  5778. Args:
  5779. repo: Path to repository or repository object
  5780. verbose: Whether to return verbose information
  5781. Returns:
  5782. CountObjectsResult object with detailed statistics
  5783. """
  5784. with open_repo_closing(repo) as r:
  5785. object_store = r.object_store
  5786. # Count loose objects
  5787. loose_count = 0
  5788. loose_size = 0
  5789. for sha in object_store._iter_loose_objects():
  5790. loose_count += 1
  5791. from ..object_store import DiskObjectStore
  5792. assert isinstance(object_store, DiskObjectStore)
  5793. path = object_store._get_shafile_path(sha)
  5794. try:
  5795. stat_info = os.stat(path)
  5796. # Git uses disk usage, not file size. st_blocks is always in
  5797. # 512-byte blocks per POSIX standard
  5798. st_blocks = getattr(stat_info, "st_blocks", None)
  5799. if st_blocks is not None:
  5800. # Available on Linux and macOS
  5801. loose_size += st_blocks * 512
  5802. else:
  5803. # Fallback for Windows
  5804. loose_size += stat_info.st_size
  5805. except FileNotFoundError:
  5806. # Object may have been removed between iteration and stat
  5807. pass
  5808. if not verbose:
  5809. return CountObjectsResult(count=loose_count, size=loose_size)
  5810. # Count pack information
  5811. pack_count = len(object_store.packs)
  5812. in_pack_count = 0
  5813. pack_size = 0
  5814. for pack in object_store.packs:
  5815. in_pack_count += len(pack)
  5816. # Get pack file size
  5817. pack_path = pack._data_path
  5818. try:
  5819. pack_size += os.path.getsize(pack_path)
  5820. except FileNotFoundError:
  5821. pass
  5822. # Get index file size
  5823. idx_path = pack._idx_path
  5824. try:
  5825. pack_size += os.path.getsize(idx_path)
  5826. except FileNotFoundError:
  5827. pass
  5828. return CountObjectsResult(
  5829. count=loose_count,
  5830. size=loose_size,
  5831. in_pack=in_pack_count,
  5832. packs=pack_count,
  5833. size_pack=pack_size,
  5834. )
  5835. def is_interactive_rebase(repo: Repo | str) -> bool:
  5836. """Check if an interactive rebase is in progress.
  5837. Args:
  5838. repo: Repository to check
  5839. Returns:
  5840. True if interactive rebase is in progress, False otherwise
  5841. """
  5842. with open_repo_closing(repo) as r:
  5843. state_manager = r.get_rebase_state_manager()
  5844. if not state_manager.exists():
  5845. return False
  5846. # Check if todo file exists
  5847. todo = state_manager.load_todo()
  5848. return todo is not None
  5849. def rebase(
  5850. repo: Repo | str,
  5851. upstream: bytes | str,
  5852. onto: bytes | str | None = None,
  5853. branch: bytes | str | None = None,
  5854. abort: bool = False,
  5855. continue_rebase: bool = False,
  5856. skip: bool = False,
  5857. interactive: bool = False,
  5858. edit_todo: bool = False,
  5859. ) -> list[bytes]:
  5860. """Rebase commits onto another branch.
  5861. Args:
  5862. repo: Repository to rebase in
  5863. upstream: Upstream branch/commit to rebase onto
  5864. onto: Specific commit to rebase onto (defaults to upstream)
  5865. branch: Branch to rebase (defaults to current branch)
  5866. abort: Abort an in-progress rebase
  5867. continue_rebase: Continue an in-progress rebase
  5868. skip: Skip current commit and continue rebase
  5869. interactive: Start an interactive rebase
  5870. edit_todo: Edit the todo list of an interactive rebase
  5871. Returns:
  5872. List of new commit SHAs created by rebase
  5873. Raises:
  5874. Error: If rebase fails or conflicts occur
  5875. """
  5876. # TODO: Avoid importing from ..cli
  5877. from ..cli import launch_editor
  5878. from ..rebase import (
  5879. RebaseConflict,
  5880. RebaseError,
  5881. Rebaser,
  5882. process_interactive_rebase,
  5883. start_interactive,
  5884. )
  5885. from ..rebase import (
  5886. edit_todo as edit_todo_func,
  5887. )
  5888. with open_repo_closing(repo) as r:
  5889. rebaser = Rebaser(r)
  5890. if abort:
  5891. try:
  5892. rebaser.abort()
  5893. return []
  5894. except RebaseError as e:
  5895. raise Error(str(e))
  5896. if edit_todo:
  5897. # Edit the todo list of an interactive rebase
  5898. try:
  5899. edit_todo_func(r, launch_editor)
  5900. print("Todo list updated. Continue with 'rebase --continue'")
  5901. return []
  5902. except RebaseError as e:
  5903. raise Error(str(e))
  5904. if continue_rebase:
  5905. try:
  5906. if interactive:
  5907. # Continue interactive rebase
  5908. is_complete, pause_reason = process_interactive_rebase(
  5909. r, editor_callback=launch_editor
  5910. )
  5911. if is_complete:
  5912. return [c.id for c in rebaser._done]
  5913. else:
  5914. if pause_reason == "conflict":
  5915. raise Error("Conflicts detected. Resolve and continue.")
  5916. elif pause_reason == "edit":
  5917. print("Stopped for editing. Make changes and continue.")
  5918. elif pause_reason == "break":
  5919. print("Rebase paused at break. Continue when ready.")
  5920. else:
  5921. print(f"Rebase paused: {pause_reason}")
  5922. return []
  5923. else:
  5924. # Continue regular rebase
  5925. result = rebaser.continue_()
  5926. if result is None:
  5927. # Rebase complete
  5928. return [c.id for c in rebaser._done]
  5929. elif isinstance(result, tuple) and result[1]:
  5930. # Still have conflicts
  5931. raise Error(
  5932. f"Conflicts in: {', '.join(f.decode('utf-8', 'replace') for f in result[1])}"
  5933. )
  5934. except RebaseError as e:
  5935. raise Error(str(e))
  5936. # Convert string refs to bytes
  5937. if isinstance(upstream, str):
  5938. upstream = upstream.encode("utf-8")
  5939. if isinstance(onto, str):
  5940. onto = onto.encode("utf-8") if onto else None
  5941. if isinstance(branch, str):
  5942. branch = branch.encode("utf-8") if branch else None
  5943. try:
  5944. if interactive:
  5945. # Start interactive rebase
  5946. todo = start_interactive(r, upstream, onto, branch, launch_editor)
  5947. # Process the todo list
  5948. is_complete, pause_reason = process_interactive_rebase(
  5949. r, todo, editor_callback=launch_editor
  5950. )
  5951. if is_complete:
  5952. return [c.id for c in rebaser._done]
  5953. else:
  5954. if pause_reason == "conflict":
  5955. raise Error("Conflicts detected. Resolve and continue.")
  5956. elif pause_reason == "edit":
  5957. print("Stopped for editing. Make changes and continue.")
  5958. elif pause_reason == "break":
  5959. print("Rebase paused at break. Continue when ready.")
  5960. else:
  5961. print(f"Rebase paused: {pause_reason}")
  5962. return []
  5963. else:
  5964. # Regular rebase
  5965. rebaser.start(upstream, onto, branch)
  5966. # Continue rebase automatically
  5967. result = rebaser.continue_()
  5968. if result is not None:
  5969. # Conflicts
  5970. raise RebaseConflict(result[1])
  5971. # Return the SHAs of the rebased commits
  5972. return [c.id for c in rebaser._done]
  5973. except RebaseConflict as e:
  5974. raise Error(str(e))
  5975. except RebaseError as e:
  5976. raise Error(str(e))
  5977. def annotate(
  5978. repo: RepoPath,
  5979. path: str | bytes,
  5980. committish: str | bytes | Commit | Tag | None = None,
  5981. ) -> list[tuple[tuple[Commit, TreeEntry], bytes]]:
  5982. """Annotate the history of a file.
  5983. :param repo: Path to the repository
  5984. :param path: Path to annotate
  5985. :param committish: Commit id to find path in
  5986. :return: List of ((Commit, TreeChange), line) tuples
  5987. """
  5988. if committish is None:
  5989. committish = "HEAD"
  5990. from ..annotate import annotate_lines
  5991. with open_repo_closing(repo) as r:
  5992. commit_id = parse_commit(r, committish).id
  5993. # Ensure path is bytes
  5994. if isinstance(path, str):
  5995. path = path.encode()
  5996. return annotate_lines(r.object_store, commit_id, path)
  5997. blame = annotate
  5998. def filter_branch(
  5999. repo: RepoPath = ".",
  6000. branch: str | bytes = "HEAD",
  6001. *,
  6002. filter_fn: Callable[[Commit], "CommitData | None"] | None = None,
  6003. filter_author: Callable[[bytes], bytes | None] | None = None,
  6004. filter_committer: Callable[[bytes], bytes | None] | None = None,
  6005. filter_message: Callable[[bytes], bytes | None] | None = None,
  6006. tree_filter: Callable[[ObjectID, str], ObjectID | None] | None = None,
  6007. index_filter: Callable[[ObjectID, str], ObjectID | None] | None = None,
  6008. parent_filter: Callable[[Sequence[ObjectID]], list[ObjectID]] | None = None,
  6009. commit_filter: Callable[[Commit, ObjectID], ObjectID | None] | None = None,
  6010. subdirectory_filter: str | bytes | None = None,
  6011. prune_empty: bool = False,
  6012. tag_name_filter: Callable[[bytes], bytes | None] | None = None,
  6013. force: bool = False,
  6014. keep_original: bool = True,
  6015. refs: list[bytes] | None = None,
  6016. ) -> dict[ObjectID, ObjectID]:
  6017. """Rewrite branch history by creating new commits with filtered properties.
  6018. This is similar to git filter-branch, allowing you to rewrite commit
  6019. history by modifying trees, parents, author, committer, or commit messages.
  6020. Args:
  6021. repo: Path to repository
  6022. branch: Branch to rewrite (defaults to HEAD)
  6023. filter_fn: Optional callable that takes a Commit object and returns
  6024. a dict of updated fields (author, committer, message, etc.)
  6025. filter_author: Optional callable that takes author bytes and returns
  6026. updated author bytes or None to keep unchanged
  6027. filter_committer: Optional callable that takes committer bytes and returns
  6028. updated committer bytes or None to keep unchanged
  6029. filter_message: Optional callable that takes commit message bytes
  6030. and returns updated message bytes
  6031. tree_filter: Optional callable that takes (tree_sha, temp_dir) and returns
  6032. new tree SHA after modifying working directory
  6033. index_filter: Optional callable that takes (tree_sha, temp_index_path) and
  6034. returns new tree SHA after modifying index
  6035. parent_filter: Optional callable that takes parent list and returns
  6036. modified parent list
  6037. commit_filter: Optional callable that takes (Commit, tree_sha) and returns
  6038. new commit SHA or None to skip commit
  6039. subdirectory_filter: Optional subdirectory path to extract as new root
  6040. prune_empty: Whether to prune commits that become empty
  6041. tag_name_filter: Optional callable to rename tags
  6042. force: Force operation even if branch has been filtered before
  6043. keep_original: Keep original refs under refs/original/
  6044. refs: List of refs to rewrite (defaults to [branch])
  6045. Returns:
  6046. Dict mapping old commit SHAs to new commit SHAs
  6047. Raises:
  6048. Error: If branch is already filtered and force is False
  6049. """
  6050. from ..filter_branch import CommitFilter, filter_refs
  6051. with open_repo_closing(repo) as r:
  6052. # Parse branch/committish
  6053. if isinstance(branch, str):
  6054. branch = branch.encode()
  6055. # Determine which refs to process
  6056. if refs is None:
  6057. if branch == b"HEAD":
  6058. # Resolve HEAD to actual branch
  6059. try:
  6060. resolved = r.refs.follow(HEADREF)
  6061. if resolved and resolved[0]:
  6062. # resolved is a list of (refname, sha) tuples
  6063. resolved_ref = resolved[0][-1]
  6064. if resolved_ref and resolved_ref != b"HEAD":
  6065. refs = [resolved_ref]
  6066. else:
  6067. # HEAD points directly to a commit
  6068. refs = [b"HEAD"]
  6069. else:
  6070. refs = [b"HEAD"]
  6071. except SymrefLoop:
  6072. refs = [b"HEAD"]
  6073. else:
  6074. # Convert branch name to full ref if needed
  6075. if not branch.startswith(b"refs/"):
  6076. branch = local_branch_name(branch)
  6077. refs = [branch]
  6078. # Convert subdirectory filter to bytes if needed
  6079. if subdirectory_filter:
  6080. if isinstance(subdirectory_filter, str):
  6081. subdirectory_filter = subdirectory_filter.encode()
  6082. else:
  6083. subdirectory_filter = None
  6084. # Create commit filter
  6085. filter_obj = CommitFilter(
  6086. r.object_store,
  6087. filter_fn=filter_fn,
  6088. filter_author=filter_author,
  6089. filter_committer=filter_committer,
  6090. filter_message=filter_message,
  6091. tree_filter=tree_filter,
  6092. index_filter=index_filter,
  6093. parent_filter=parent_filter,
  6094. commit_filter=commit_filter,
  6095. subdirectory_filter=subdirectory_filter,
  6096. prune_empty=prune_empty,
  6097. tag_name_filter=tag_name_filter,
  6098. )
  6099. # Tag callback for renaming tags
  6100. def rename_tag(old_ref: Ref, new_ref: Ref) -> None:
  6101. # Copy tag to new name
  6102. r.refs[new_ref] = r.refs[old_ref]
  6103. # Delete old tag
  6104. del r.refs[old_ref]
  6105. # Filter refs
  6106. try:
  6107. return filter_refs(
  6108. r.refs,
  6109. r.object_store,
  6110. refs,
  6111. filter_obj,
  6112. keep_original=keep_original,
  6113. force=force,
  6114. tag_callback=rename_tag if tag_name_filter else None,
  6115. )
  6116. except ValueError as e:
  6117. raise Error(str(e)) from e
  6118. def format_patch(
  6119. repo: RepoPath = ".",
  6120. committish: ObjectID | tuple[ObjectID, ObjectID] | None = None,
  6121. outstream: TextIO = sys.stdout,
  6122. outdir: str | os.PathLike[str] | None = None,
  6123. n: int = 1,
  6124. stdout: bool = False,
  6125. version: str | None = None,
  6126. ) -> list[str]:
  6127. """Generate patches suitable for git am.
  6128. Args:
  6129. repo: Path to repository
  6130. committish: Commit-ish or commit range to generate patches for.
  6131. Can be a single commit id, or a tuple of (start, end) commit ids
  6132. for a range. If None, formats the last n commits from HEAD.
  6133. outstream: Stream to write to if stdout=True
  6134. outdir: Directory to write patch files to (default: current directory)
  6135. n: Number of patches to generate if committish is None
  6136. stdout: Write patches to stdout instead of files
  6137. version: Version string to include in patches (default: Dulwich version)
  6138. Returns:
  6139. List of patch filenames that were created (empty if stdout=True)
  6140. """
  6141. if outdir is None:
  6142. outdir = "."
  6143. filenames = []
  6144. with open_repo_closing(repo) as r:
  6145. # Determine which commits to format
  6146. commits_to_format = []
  6147. if committish is None:
  6148. # Get the last n commits from HEAD
  6149. try:
  6150. walker = r.get_walker()
  6151. for entry in walker:
  6152. commits_to_format.append(entry.commit)
  6153. if len(commits_to_format) >= n:
  6154. break
  6155. commits_to_format.reverse()
  6156. except KeyError:
  6157. # No HEAD or empty repository
  6158. pass
  6159. elif isinstance(committish, tuple):
  6160. # Handle commit range (start, end)
  6161. start_commit, end_commit = committish
  6162. # Extract commit IDs from commit objects if needed
  6163. start_id = (
  6164. start_commit.id if isinstance(start_commit, Commit) else start_commit
  6165. )
  6166. end_id = end_commit.id if isinstance(end_commit, Commit) else end_commit
  6167. # Walk from end back to start
  6168. walker = r.get_walker(include=[end_id], exclude=[start_id])
  6169. for entry in walker:
  6170. commits_to_format.append(entry.commit)
  6171. commits_to_format.reverse()
  6172. else:
  6173. # Single commit
  6174. commit = r.object_store[committish]
  6175. assert isinstance(commit, Commit)
  6176. commits_to_format.append(commit)
  6177. # Generate patches
  6178. total = len(commits_to_format)
  6179. for i, commit in enumerate(commits_to_format, 1):
  6180. assert isinstance(commit, Commit)
  6181. # Get the parent
  6182. if commit.parents:
  6183. parent_id = commit.parents[0]
  6184. parent = r.object_store[parent_id]
  6185. assert isinstance(parent, Commit)
  6186. else:
  6187. parent = None
  6188. # Generate the diff
  6189. from io import BytesIO
  6190. diff_content = BytesIO()
  6191. if parent:
  6192. write_tree_diff(
  6193. diff_content,
  6194. r.object_store,
  6195. parent.tree,
  6196. commit.tree,
  6197. )
  6198. else:
  6199. # Initial commit - diff against empty tree
  6200. write_tree_diff(
  6201. diff_content,
  6202. r.object_store,
  6203. None,
  6204. commit.tree,
  6205. )
  6206. # Generate patch with commit metadata
  6207. if stdout:
  6208. # Get binary stream from TextIO
  6209. if hasattr(outstream, "buffer"):
  6210. binary_out: IO[bytes] = outstream.buffer
  6211. else:
  6212. # Fallback for non-text streams
  6213. binary_out = outstream # type: ignore[assignment]
  6214. write_commit_patch(
  6215. binary_out,
  6216. commit,
  6217. diff_content.getvalue(),
  6218. (i, total),
  6219. version=version,
  6220. )
  6221. else:
  6222. # Generate filename
  6223. summary = get_summary(commit)
  6224. filename = os.path.join(outdir, f"{i:04d}-{summary}.patch")
  6225. with open(filename, "wb") as f:
  6226. write_commit_patch(
  6227. f,
  6228. commit,
  6229. diff_content.getvalue(),
  6230. (i, total),
  6231. version=version,
  6232. )
  6233. filenames.append(filename)
  6234. return filenames
  6235. def bisect_start(
  6236. repo: str | os.PathLike[str] | Repo = ".",
  6237. bad: str | bytes | Commit | Tag | None = None,
  6238. good: str
  6239. | bytes
  6240. | Commit
  6241. | Tag
  6242. | Sequence[str | bytes | Commit | Tag]
  6243. | None = None,
  6244. paths: Sequence[bytes] | None = None,
  6245. no_checkout: bool = False,
  6246. term_bad: str = "bad",
  6247. term_good: str = "good",
  6248. ) -> bytes | None:
  6249. """Start a new bisect session.
  6250. Args:
  6251. repo: Path to repository or a Repo object
  6252. bad: The bad commit (defaults to HEAD)
  6253. good: List of good commits or a single good commit
  6254. paths: Optional paths to limit bisect to
  6255. no_checkout: If True, don't checkout commits during bisect
  6256. term_bad: Term to use for bad commits (default: "bad")
  6257. term_good: Term to use for good commits (default: "good")
  6258. """
  6259. with open_repo_closing(repo) as r:
  6260. state = BisectState(r)
  6261. # Convert single good commit to sequence
  6262. if good is not None and isinstance(good, (str, bytes, Commit, Tag)):
  6263. good = [good]
  6264. # Parse commits
  6265. bad_sha = parse_commit(r, bad).id if bad else None
  6266. good_shas = [parse_commit(r, g).id for g in good] if good else None
  6267. state.start(bad_sha, good_shas, paths, no_checkout, term_bad, term_good)
  6268. # Return the next commit to test if we have both good and bad
  6269. if bad_sha and good_shas:
  6270. next_sha = state._find_next_commit()
  6271. if next_sha and not no_checkout:
  6272. # Checkout the next commit
  6273. old_commit = r[r.head()]
  6274. assert isinstance(old_commit, Commit)
  6275. old_tree = old_commit.tree if r.head() else None
  6276. r.refs[HEADREF] = next_sha
  6277. commit = r[next_sha]
  6278. assert isinstance(commit, Commit)
  6279. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6280. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6281. return next_sha
  6282. return None
  6283. def bisect_bad(
  6284. repo: str | os.PathLike[str] | Repo = ".",
  6285. rev: str | bytes | Commit | Tag | None = None,
  6286. ) -> bytes | None:
  6287. """Mark a commit as bad.
  6288. Args:
  6289. repo: Path to repository or a Repo object
  6290. rev: Commit to mark as bad (defaults to HEAD)
  6291. Returns:
  6292. The SHA of the next commit to test, or None if bisect is complete
  6293. """
  6294. with open_repo_closing(repo) as r:
  6295. state = BisectState(r)
  6296. rev_sha = parse_commit(r, rev).id if rev else None
  6297. next_sha = state.mark_bad(rev_sha)
  6298. if next_sha:
  6299. # Checkout the next commit
  6300. old_commit = r[r.head()]
  6301. assert isinstance(old_commit, Commit)
  6302. old_tree = old_commit.tree if r.head() else None
  6303. r.refs[HEADREF] = next_sha
  6304. commit = r[next_sha]
  6305. assert isinstance(commit, Commit)
  6306. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6307. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6308. return next_sha
  6309. def bisect_good(
  6310. repo: str | os.PathLike[str] | Repo = ".",
  6311. rev: str | bytes | Commit | Tag | None = None,
  6312. ) -> bytes | None:
  6313. """Mark a commit as good.
  6314. Args:
  6315. repo: Path to repository or a Repo object
  6316. rev: Commit to mark as good (defaults to HEAD)
  6317. Returns:
  6318. The SHA of the next commit to test, or None if bisect is complete
  6319. """
  6320. with open_repo_closing(repo) as r:
  6321. state = BisectState(r)
  6322. rev_sha = parse_commit(r, rev).id if rev else None
  6323. next_sha = state.mark_good(rev_sha)
  6324. if next_sha:
  6325. # Checkout the next commit
  6326. old_commit = r[r.head()]
  6327. assert isinstance(old_commit, Commit)
  6328. old_tree = old_commit.tree if r.head() else None
  6329. r.refs[HEADREF] = next_sha
  6330. commit = r[next_sha]
  6331. assert isinstance(commit, Commit)
  6332. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6333. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6334. return next_sha
  6335. def bisect_skip(
  6336. repo: str | os.PathLike[str] | Repo = ".",
  6337. revs: str
  6338. | bytes
  6339. | Commit
  6340. | Tag
  6341. | Sequence[str | bytes | Commit | Tag]
  6342. | None = None,
  6343. ) -> bytes | None:
  6344. """Skip one or more commits.
  6345. Args:
  6346. repo: Path to repository or a Repo object
  6347. revs: List of commits to skip (defaults to [HEAD])
  6348. Returns:
  6349. The SHA of the next commit to test, or None if bisect is complete
  6350. """
  6351. with open_repo_closing(repo) as r:
  6352. state = BisectState(r)
  6353. if revs is None:
  6354. rev_shas = None
  6355. else:
  6356. # Convert single rev to sequence
  6357. if isinstance(revs, (str, bytes, Commit, Tag)):
  6358. revs = [revs]
  6359. rev_shas = [parse_commit(r, rev).id for rev in revs]
  6360. next_sha = state.skip(rev_shas)
  6361. if next_sha:
  6362. # Checkout the next commit
  6363. old_commit = r[r.head()]
  6364. assert isinstance(old_commit, Commit)
  6365. old_tree = old_commit.tree if r.head() else None
  6366. r.refs[HEADREF] = next_sha
  6367. commit = r[next_sha]
  6368. assert isinstance(commit, Commit)
  6369. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6370. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6371. return next_sha
  6372. def bisect_reset(
  6373. repo: str | os.PathLike[str] | Repo = ".",
  6374. commit: str | bytes | Commit | Tag | None = None,
  6375. ) -> None:
  6376. """Reset bisect state and return to original branch/commit.
  6377. Args:
  6378. repo: Path to repository or a Repo object
  6379. commit: Optional commit to reset to (defaults to original branch/commit)
  6380. """
  6381. with open_repo_closing(repo) as r:
  6382. state = BisectState(r)
  6383. # Get old tree before reset
  6384. try:
  6385. old_commit = r[r.head()]
  6386. assert isinstance(old_commit, Commit)
  6387. old_tree = old_commit.tree
  6388. except KeyError:
  6389. old_tree = None
  6390. commit_sha = parse_commit(r, commit).id if commit else None
  6391. state.reset(commit_sha)
  6392. # Update working tree to new HEAD
  6393. try:
  6394. new_head = r.head()
  6395. if new_head:
  6396. new_commit = r[new_head]
  6397. assert isinstance(new_commit, Commit)
  6398. changes = tree_changes(r.object_store, old_tree, new_commit.tree)
  6399. update_working_tree(
  6400. r, old_tree, new_commit.tree, change_iterator=changes
  6401. )
  6402. except KeyError:
  6403. # No HEAD after reset
  6404. pass
  6405. def bisect_log(repo: str | os.PathLike[str] | Repo = ".") -> str:
  6406. """Get the bisect log.
  6407. Args:
  6408. repo: Path to repository or a Repo object
  6409. Returns:
  6410. The bisect log as a string
  6411. """
  6412. with open_repo_closing(repo) as r:
  6413. state = BisectState(r)
  6414. return state.get_log()
  6415. def bisect_replay(
  6416. repo: str | os.PathLike[str] | Repo,
  6417. log_file: str | os.PathLike[str] | BinaryIO,
  6418. ) -> None:
  6419. """Replay a bisect log.
  6420. Args:
  6421. repo: Path to repository or a Repo object
  6422. log_file: Path to the log file or file-like object
  6423. """
  6424. with open_repo_closing(repo) as r:
  6425. state = BisectState(r)
  6426. if isinstance(log_file, (str, os.PathLike)):
  6427. with open(log_file) as f:
  6428. log_content = f.read()
  6429. else:
  6430. content = log_file.read()
  6431. log_content = content.decode() if isinstance(content, bytes) else content
  6432. state.replay(log_content)
  6433. def reflog(
  6434. repo: RepoPath = ".", ref: str | bytes = b"HEAD", all: bool = False
  6435. ) -> Iterator[Any | tuple[bytes, Any]]:
  6436. """Show reflog entries for a reference or all references.
  6437. Args:
  6438. repo: Path to repository or a Repo object
  6439. ref: Reference name (defaults to HEAD)
  6440. all: If True, show reflogs for all refs (ignores ref parameter)
  6441. Yields:
  6442. If all=False: ReflogEntry objects
  6443. If all=True: Tuples of (ref_name, ReflogEntry) for all refs with reflogs
  6444. """
  6445. import os
  6446. from ..reflog import iter_reflogs
  6447. if isinstance(ref, str):
  6448. ref = ref.encode("utf-8")
  6449. with open_repo_closing(repo) as r:
  6450. if not all:
  6451. yield from r.read_reflog(ref)
  6452. else:
  6453. logs_dir = os.path.join(r.controldir(), "logs")
  6454. # Use iter_reflogs to discover all reflogs
  6455. for ref_bytes in iter_reflogs(logs_dir):
  6456. # Read the reflog entries for this ref
  6457. for entry in r.read_reflog(ref_bytes):
  6458. yield (ref_bytes, entry)
  6459. def reflog_expire(
  6460. repo: RepoPath = ".",
  6461. ref: str | bytes | None = None,
  6462. all: bool = False,
  6463. expire_time: int | None = None,
  6464. expire_unreachable_time: int | None = None,
  6465. dry_run: bool = False,
  6466. ) -> dict[bytes, int]:
  6467. """Expire reflog entries based on age and reachability.
  6468. Args:
  6469. repo: Path to repository or a Repo object
  6470. ref: Reference name (if not using --all)
  6471. all: If True, expire reflogs for all refs
  6472. expire_time: Expire entries older than this timestamp (seconds since epoch)
  6473. expire_unreachable_time: Expire unreachable entries older than this timestamp
  6474. dry_run: If True, show what would be expired without making changes
  6475. Returns:
  6476. Dictionary mapping ref names to number of expired entries
  6477. """
  6478. import os
  6479. import time
  6480. from ..reflog import expire_reflog, iter_reflogs
  6481. if not all and ref is None:
  6482. raise ValueError("Must specify either ref or all=True")
  6483. if isinstance(ref, str):
  6484. ref = ref.encode("utf-8")
  6485. # Default expire times if not specified
  6486. if expire_time is None and expire_unreachable_time is None:
  6487. # Default: expire entries older than 90 days, unreachable older than 30 days
  6488. now = int(time.time())
  6489. expire_time = now - (90 * 24 * 60 * 60)
  6490. expire_unreachable_time = now - (30 * 24 * 60 * 60)
  6491. result = {}
  6492. with open_repo_closing(repo) as r:
  6493. # Determine which refs to process
  6494. refs_to_process: list[bytes] = []
  6495. if all:
  6496. logs_dir = os.path.join(r.controldir(), "logs")
  6497. refs_to_process = list(iter_reflogs(logs_dir))
  6498. else:
  6499. assert ref is not None # Already checked above
  6500. refs_to_process = [ref]
  6501. # Build set of reachable objects if we have unreachable expiration time
  6502. reachable_objects: set[ObjectID] | None = None
  6503. if expire_unreachable_time is not None:
  6504. from ..gc import find_reachable_objects
  6505. reachable_objects = find_reachable_objects(
  6506. r.object_store, r.refs, include_reflogs=False
  6507. )
  6508. # Process each ref
  6509. for ref_name in refs_to_process:
  6510. reflog_path = r._reflog_path(ref_name)
  6511. if not os.path.exists(reflog_path):
  6512. continue
  6513. # Create reachability checker
  6514. def is_reachable(sha: bytes) -> bool:
  6515. if reachable_objects is None:
  6516. # No unreachable expiration, so assume everything is reachable
  6517. return True
  6518. return sha in reachable_objects
  6519. # Open the reflog file
  6520. if dry_run:
  6521. # For dry run, just read and count what would be expired
  6522. with open(reflog_path, "rb") as f:
  6523. from ..reflog import read_reflog
  6524. count = 0
  6525. for entry in read_reflog(f):
  6526. is_obj_reachable = is_reachable(entry.new_sha)
  6527. should_expire = False
  6528. if is_obj_reachable and expire_time is not None:
  6529. if entry.timestamp < expire_time:
  6530. should_expire = True
  6531. elif (
  6532. not is_obj_reachable and expire_unreachable_time is not None
  6533. ):
  6534. if entry.timestamp < expire_unreachable_time:
  6535. should_expire = True
  6536. if should_expire:
  6537. count += 1
  6538. result[ref_name] = count
  6539. else:
  6540. # Actually expire entries
  6541. with open(reflog_path, "r+b") as f: # type: ignore[assignment]
  6542. count = expire_reflog(
  6543. f,
  6544. expire_time=expire_time,
  6545. expire_unreachable_time=expire_unreachable_time,
  6546. reachable_checker=is_reachable,
  6547. )
  6548. result[ref_name] = count
  6549. return result
  6550. def reflog_delete(
  6551. repo: RepoPath = ".",
  6552. ref: str | bytes = b"HEAD",
  6553. index: int = 0,
  6554. rewrite: bool = False,
  6555. ) -> None:
  6556. """Delete a specific reflog entry.
  6557. Args:
  6558. repo: Path to repository or a Repo object
  6559. ref: Reference name
  6560. index: Reflog entry index (0 = newest, in Git reflog order)
  6561. rewrite: If True, rewrite old_sha of subsequent entries to maintain consistency
  6562. """
  6563. import os
  6564. from ..reflog import drop_reflog_entry
  6565. if isinstance(ref, str):
  6566. ref = ref.encode("utf-8")
  6567. with open_repo_closing(repo) as r:
  6568. reflog_path = r._reflog_path(ref)
  6569. if not os.path.exists(reflog_path):
  6570. raise ValueError(f"No reflog for ref {ref.decode()}")
  6571. with open(reflog_path, "r+b") as f:
  6572. drop_reflog_entry(f, index, rewrite=rewrite)
  6573. def merge_base(
  6574. repo: RepoPath = ".",
  6575. committishes: Sequence[str | bytes] | None = None,
  6576. all: bool = False,
  6577. octopus: bool = False,
  6578. ) -> list[ObjectID]:
  6579. """Find the best common ancestor(s) between commits.
  6580. Args:
  6581. repo: Path to repository
  6582. committishes: List of commit references (branches, tags, commit IDs)
  6583. all: If True, return all merge bases, not just one
  6584. octopus: If True, find merge base of all commits (n-way merge)
  6585. Returns:
  6586. List of commit IDs that are merge bases
  6587. """
  6588. from ..graph import find_merge_base, find_octopus_base
  6589. from ..objectspec import parse_object
  6590. if committishes is None or len(committishes) < 2:
  6591. raise ValueError("At least two commits are required")
  6592. with open_repo_closing(repo) as r:
  6593. # Resolve committish references to commit IDs
  6594. commit_ids = []
  6595. for committish in committishes:
  6596. obj = parse_object(r, committish)
  6597. if not isinstance(obj, Commit):
  6598. raise ValueError(f"Expected commit, got {obj.type_name.decode()}")
  6599. commit_ids.append(obj.id)
  6600. # Find merge base
  6601. if octopus:
  6602. result = find_octopus_base(r, commit_ids)
  6603. else:
  6604. result = find_merge_base(r, commit_ids)
  6605. # Return first result only if all=False
  6606. if not all and result:
  6607. return [result[0]]
  6608. return result
  6609. def is_ancestor(
  6610. repo: RepoPath = ".",
  6611. ancestor: str | bytes | None = None,
  6612. descendant: str | bytes | None = None,
  6613. ) -> bool:
  6614. """Check if one commit is an ancestor of another.
  6615. Args:
  6616. repo: Path to repository
  6617. ancestor: Commit that might be the ancestor
  6618. descendant: Commit that might be the descendant
  6619. Returns:
  6620. True if ancestor is an ancestor of descendant, False otherwise
  6621. """
  6622. from ..graph import find_merge_base
  6623. from ..objectspec import parse_object
  6624. if ancestor is None or descendant is None:
  6625. raise ValueError("Both ancestor and descendant are required")
  6626. with open_repo_closing(repo) as r:
  6627. # Resolve committish references to commit IDs
  6628. ancestor_obj = parse_object(r, ancestor)
  6629. if not isinstance(ancestor_obj, Commit):
  6630. raise ValueError(f"Expected commit, got {ancestor_obj.type_name.decode()}")
  6631. descendant_obj = parse_object(r, descendant)
  6632. if not isinstance(descendant_obj, Commit):
  6633. raise ValueError(
  6634. f"Expected commit, got {descendant_obj.type_name.decode()}"
  6635. )
  6636. # If ancestor is the merge base of (ancestor, descendant), then it's an ancestor
  6637. merge_bases = find_merge_base(r, [ancestor_obj.id, descendant_obj.id])
  6638. return merge_bases == [ancestor_obj.id]
  6639. def independent_commits(
  6640. repo: RepoPath = ".",
  6641. committishes: Sequence[str | bytes] | None = None,
  6642. ) -> list[ObjectID]:
  6643. """Filter commits to only those that are not reachable from others.
  6644. Args:
  6645. repo: Path to repository
  6646. committishes: List of commit references to filter
  6647. Returns:
  6648. List of commit IDs that are not ancestors of any other commits in the list
  6649. """
  6650. from ..graph import independent
  6651. from ..objectspec import parse_object
  6652. if committishes is None or len(committishes) == 0:
  6653. return []
  6654. with open_repo_closing(repo) as r:
  6655. # Resolve committish references to commit IDs
  6656. commit_ids = []
  6657. for committish in committishes:
  6658. obj = parse_object(r, committish)
  6659. if not isinstance(obj, Commit):
  6660. raise ValueError(f"Expected commit, got {obj.type_name.decode()}")
  6661. commit_ids.append(obj.id)
  6662. # Filter to independent commits
  6663. return independent(r, commit_ids)
  6664. def mailsplit(
  6665. input_path: str | os.PathLike[str] | IO[bytes] | None = None,
  6666. output_dir: str | os.PathLike[str] = ".",
  6667. start_number: int = 1,
  6668. precision: int = 4,
  6669. keep_cr: bool = False,
  6670. mboxrd: bool = False,
  6671. is_maildir: bool = False,
  6672. ) -> list[str]:
  6673. r"""Split an mbox file or Maildir into individual message files.
  6674. This is similar to git mailsplit.
  6675. Args:
  6676. input_path: Path to mbox file, Maildir, or file-like object. If None, reads from stdin.
  6677. output_dir: Directory where individual messages will be written
  6678. start_number: Starting number for output files (default: 1)
  6679. precision: Number of digits for output filenames (default: 4)
  6680. keep_cr: If True, preserve \r in lines ending with \r\n (default: False)
  6681. mboxrd: If True, treat input as mboxrd format and reverse escaping (default: False)
  6682. is_maildir: If True, treat input_path as a Maildir (default: False)
  6683. Returns:
  6684. List of output file paths that were created
  6685. Raises:
  6686. ValueError: If output_dir doesn't exist or input is invalid
  6687. OSError: If there are issues reading/writing files
  6688. """
  6689. from ..mbox import split_maildir, split_mbox
  6690. if is_maildir:
  6691. if input_path is None:
  6692. raise ValueError("input_path is required for Maildir splitting")
  6693. if not isinstance(input_path, (str, bytes, os.PathLike)):
  6694. raise ValueError("Maildir splitting requires a path, not a file object")
  6695. # Convert PathLike to str for split_maildir
  6696. maildir_path: str | bytes = (
  6697. os.fspath(input_path) if isinstance(input_path, os.PathLike) else input_path
  6698. )
  6699. out_dir: str | bytes = (
  6700. os.fspath(output_dir) if isinstance(output_dir, os.PathLike) else output_dir
  6701. )
  6702. return split_maildir(
  6703. maildir_path,
  6704. out_dir,
  6705. start_number=start_number,
  6706. precision=precision,
  6707. keep_cr=keep_cr,
  6708. )
  6709. else:
  6710. if input_path is None:
  6711. # Read from stdin
  6712. input_file: str | bytes | BinaryIO = sys.stdin.buffer
  6713. else:
  6714. # Convert PathLike to str if needed
  6715. if isinstance(input_path, os.PathLike):
  6716. input_file = os.fspath(input_path)
  6717. else:
  6718. # input_path is either str or IO[bytes] here
  6719. input_file = cast(str | BinaryIO, input_path)
  6720. out_dir = (
  6721. os.fspath(output_dir) if isinstance(output_dir, os.PathLike) else output_dir
  6722. )
  6723. return split_mbox(
  6724. input_file,
  6725. out_dir,
  6726. start_number=start_number,
  6727. precision=precision,
  6728. keep_cr=keep_cr,
  6729. mboxrd=mboxrd,
  6730. )
  6731. def mailinfo(
  6732. input_path: str | os.PathLike[str] | IO[bytes] | IO[str] | None = None,
  6733. msg_file: str | os.PathLike[str] | None = None,
  6734. patch_file: str | os.PathLike[str] | None = None,
  6735. keep_subject: bool = False,
  6736. keep_non_patch: bool = False,
  6737. encoding: str | None = None,
  6738. scissors: bool = False,
  6739. message_id: bool = False,
  6740. ) -> MailinfoResult:
  6741. """Extract patch information from an email message.
  6742. This is similar to git mailinfo.
  6743. Args:
  6744. input_path: Path to email file or file-like object. If None, reads from stdin.
  6745. msg_file: Path to write commit message. If None, message not written to file.
  6746. patch_file: Path to write patch content. If None, patch not written to file.
  6747. keep_subject: If True, keep subject intact without munging (-k)
  6748. keep_non_patch: If True, only strip [PATCH] from brackets (-b)
  6749. encoding: Character encoding to use (default: detect from message)
  6750. scissors: If True, remove everything before scissors line
  6751. message_id: If True, include Message-ID in commit message (-m)
  6752. Returns:
  6753. MailinfoResult with parsed information
  6754. Raises:
  6755. ValueError: If message is malformed or missing required fields
  6756. OSError: If there are issues reading/writing files
  6757. Example:
  6758. >>> result = mailinfo("patch.eml", "msg", "patch")
  6759. >>> print(f"Author: {result.author_name} <{result.author_email}>")
  6760. >>> print(f"Subject: {result.subject}")
  6761. """
  6762. from ..mbox import mailinfo as mbox_mailinfo
  6763. if input_path is None:
  6764. # Read from stdin
  6765. input_file: str | bytes | BinaryIO | TextIO = sys.stdin.buffer
  6766. else:
  6767. # Convert PathLike to str if needed
  6768. if isinstance(input_path, os.PathLike):
  6769. input_file = os.fspath(input_path)
  6770. else:
  6771. # input_path is either str or IO[bytes] or IO[str] here
  6772. input_file = cast(str | BinaryIO | TextIO, input_path)
  6773. result = mbox_mailinfo(
  6774. input_file,
  6775. keep_subject=keep_subject,
  6776. keep_non_patch=keep_non_patch,
  6777. encoding=encoding,
  6778. scissors=scissors,
  6779. message_id=message_id,
  6780. )
  6781. # Write message to file if requested
  6782. if msg_file is not None:
  6783. msg_path = (
  6784. os.fspath(msg_file) if isinstance(msg_file, os.PathLike) else msg_file
  6785. )
  6786. with open(msg_path, "w", encoding=encoding or "utf-8") as f:
  6787. f.write(result.message)
  6788. if not result.message.endswith("\n"):
  6789. f.write("\n")
  6790. # Write patch to file if requested
  6791. if patch_file is not None:
  6792. patch_path = (
  6793. os.fspath(patch_file) if isinstance(patch_file, os.PathLike) else patch_file
  6794. )
  6795. with open(patch_path, "w", encoding=encoding or "utf-8") as f:
  6796. f.write(result.patch)
  6797. return result
  6798. def rerere(repo: RepoPath = ".") -> tuple[list[tuple[bytes, str]], list[bytes]]:
  6799. """Record current conflict resolutions and apply known resolutions.
  6800. This reads conflicted files from the working tree and records them
  6801. in the rerere cache. If rerere.autoupdate is enabled and a known
  6802. resolution exists, it will be automatically applied.
  6803. Args:
  6804. repo: Path to the repository
  6805. Returns:
  6806. Tuple of:
  6807. - List of tuples (path, conflict_id) for recorded conflicts
  6808. - List of paths where resolutions were automatically applied
  6809. """
  6810. from ..rerere import _has_conflict_markers, rerere_auto
  6811. with open_repo_closing(repo) as r:
  6812. # Get conflicts from the index (if available)
  6813. index = r.open_index()
  6814. conflicts = []
  6815. for path, entry in index.items():
  6816. if isinstance(entry, ConflictedIndexEntry):
  6817. conflicts.append(path)
  6818. # Also scan working tree for files with conflict markers
  6819. # This is needed because merge() doesn't always create ConflictedIndexEntry
  6820. if not conflicts:
  6821. working_tree = r.path
  6822. for path in index:
  6823. file_path = os.path.join(working_tree, os.fsdecode(path))
  6824. try:
  6825. with open(file_path, "rb") as f:
  6826. content = f.read()
  6827. if _has_conflict_markers(content):
  6828. conflicts.append(path)
  6829. except (FileNotFoundError, IsADirectoryError, PermissionError):
  6830. pass
  6831. # Record conflicts and apply known resolutions
  6832. working_tree = r.path
  6833. return rerere_auto(r, working_tree, conflicts)
  6834. def rerere_status(repo: RepoPath = ".") -> list[tuple[str, bool]]:
  6835. """Get the status of all conflicts in the rerere cache.
  6836. Args:
  6837. repo: Path to the repository
  6838. Returns:
  6839. List of tuples (conflict_id, has_resolution)
  6840. """
  6841. from ..rerere import RerereCache
  6842. with open_repo_closing(repo) as r:
  6843. cache = RerereCache.from_repo(r)
  6844. return cache.status()
  6845. def rerere_diff(
  6846. repo: RepoPath = ".", conflict_id: str | None = None
  6847. ) -> list[tuple[str, bytes, bytes | None]]:
  6848. """Show differences for recorded rerere conflicts.
  6849. Args:
  6850. repo: Path to the repository
  6851. conflict_id: Optional specific conflict ID to show
  6852. Returns:
  6853. List of tuples (conflict_id, preimage, postimage)
  6854. """
  6855. from ..rerere import RerereCache
  6856. with open_repo_closing(repo) as r:
  6857. cache = RerereCache.from_repo(r)
  6858. if conflict_id:
  6859. preimage, postimage = cache.diff(conflict_id)
  6860. if preimage is not None:
  6861. return [(conflict_id, preimage, postimage)]
  6862. return []
  6863. # Show all conflicts
  6864. results = []
  6865. for cid, _has_res in cache.status():
  6866. preimage, postimage = cache.diff(cid)
  6867. if preimage is not None:
  6868. results.append((cid, preimage, postimage))
  6869. return results
  6870. def rerere_forget(repo: RepoPath = ".", pathspec: str | bytes | None = None) -> None:
  6871. """Forget recorded rerere resolutions for a pathspec.
  6872. Args:
  6873. repo: Path to the repository
  6874. pathspec: Path to forget (currently not implemented, forgets all)
  6875. """
  6876. from ..rerere import RerereCache
  6877. with open_repo_closing(repo) as r:
  6878. cache = RerereCache.from_repo(r)
  6879. if pathspec:
  6880. # TODO: Implement pathspec matching
  6881. # For now, we need to track which conflict IDs correspond to which paths
  6882. raise NotImplementedError("Pathspec matching not yet implemented")
  6883. # Forget all conflicts (this is when called with no pathspec after resolving)
  6884. cache.clear()
  6885. def rerere_clear(repo: RepoPath = ".") -> None:
  6886. """Clear all recorded rerere resolutions.
  6887. Args:
  6888. repo: Path to the repository
  6889. """
  6890. from ..rerere import RerereCache
  6891. with open_repo_closing(repo) as r:
  6892. cache = RerereCache.from_repo(r)
  6893. cache.clear()
  6894. def rerere_gc(repo: RepoPath = ".", max_age_days: int = 60) -> None:
  6895. """Garbage collect old rerere resolutions.
  6896. Args:
  6897. repo: Path to the repository
  6898. max_age_days: Maximum age in days for keeping resolutions
  6899. """
  6900. from ..rerere import RerereCache
  6901. with open_repo_closing(repo) as r:
  6902. cache = RerereCache.from_repo(r)
  6903. cache.gc(max_age_days)