porcelain.py 313 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280
  1. # e porcelain.py -- Porcelain-like layer on top of Dulwich
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as published by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Simple wrapper that provides porcelain-like functions on top of Dulwich.
  22. Currently implemented:
  23. * archive
  24. * add
  25. * bisect{_start,_bad,_good,_skip,_reset,_log,_replay}
  26. * branch{_create,_delete,_list}
  27. * check_ignore
  28. * checkout
  29. * checkout_branch
  30. * clone
  31. * cone mode{_init, _set, _add}
  32. * commit
  33. * commit_tree
  34. * daemon
  35. * describe
  36. * diff_tree
  37. * fetch
  38. * filter_branch
  39. * for_each_ref
  40. * grep
  41. * init
  42. * interpret_trailers
  43. * ls_files
  44. * ls_remote
  45. * ls_tree
  46. * mailsplit
  47. * merge
  48. * merge_tree
  49. * mv/move
  50. * prune
  51. * pull
  52. * push
  53. * rm
  54. * remote{_add}
  55. * receive_pack
  56. * replace{_create,_delete,_list}
  57. * rerere{_status,_diff,_forget,_clear,_gc}
  58. * reset
  59. * revert
  60. * sparse_checkout
  61. * submodule_add
  62. * submodule_init
  63. * submodule_list
  64. * rev_list
  65. * tag{_create,_delete,_list}
  66. * upload_pack
  67. * update_server_info
  68. * var
  69. * write_commit_graph
  70. * status
  71. * shortlog
  72. * symbolic_ref
  73. * worktree{_add,_list,_remove,_prune,_lock,_unlock,_move}
  74. These functions are meant to behave similarly to the git subcommands.
  75. Differences in behaviour are considered bugs.
  76. Note: one of the consequences of this is that paths tend to be
  77. interpreted relative to the current working directory rather than relative
  78. to the repository root.
  79. Functions should generally accept both unicode strings and bytestrings
  80. """
  81. import datetime
  82. import fnmatch
  83. import logging
  84. import os
  85. import posixpath
  86. import re
  87. import stat
  88. import sys
  89. import time
  90. from collections import namedtuple
  91. from collections.abc import Callable, Iterable, Iterator, Sequence
  92. from collections.abc import Set as AbstractSet
  93. from contextlib import AbstractContextManager, closing, contextmanager
  94. from dataclasses import dataclass
  95. from io import BytesIO, RawIOBase
  96. from pathlib import Path
  97. from typing import (
  98. IO,
  99. TYPE_CHECKING,
  100. Any,
  101. BinaryIO,
  102. TextIO,
  103. TypedDict,
  104. TypeVar,
  105. cast,
  106. overload,
  107. )
  108. if sys.version_info >= (3, 12):
  109. from typing import override
  110. else:
  111. from typing_extensions import override
  112. from ._typing import Buffer
  113. if TYPE_CHECKING:
  114. import urllib3
  115. from .filter_branch import CommitData
  116. from .gc import GCStats
  117. from .maintenance import MaintenanceResult
  118. from . import replace_me
  119. from .archive import tar_stream
  120. from .bisect import BisectState
  121. from .client import (
  122. FetchPackResult,
  123. LsRemoteResult,
  124. SendPackResult,
  125. get_transport_and_path,
  126. )
  127. from .config import Config, ConfigFile, StackedConfig, read_submodules
  128. from .diff_tree import (
  129. CHANGE_ADD,
  130. CHANGE_COPY,
  131. CHANGE_DELETE,
  132. CHANGE_MODIFY,
  133. CHANGE_RENAME,
  134. RENAME_CHANGE_TYPES,
  135. TreeChange,
  136. tree_changes,
  137. )
  138. from .errors import SendPackError
  139. from .graph import can_fast_forward
  140. from .ignore import IgnoreFilterManager
  141. from .index import (
  142. ConflictedIndexEntry,
  143. Index,
  144. IndexEntry,
  145. _fs_to_tree_path,
  146. blob_from_path_and_stat,
  147. build_file_from_blob,
  148. build_index_from_tree,
  149. get_unstaged_changes,
  150. index_entry_from_stat,
  151. symlink,
  152. update_working_tree,
  153. validate_path_element_default,
  154. validate_path_element_hfs,
  155. validate_path_element_ntfs,
  156. )
  157. from .object_store import BaseObjectStore, tree_lookup_path
  158. from .objects import (
  159. Blob,
  160. Commit,
  161. ObjectID,
  162. Tag,
  163. Tree,
  164. TreeEntry,
  165. format_timezone,
  166. parse_timezone,
  167. pretty_format_tree_entry,
  168. )
  169. from .objectspec import (
  170. parse_commit,
  171. parse_object,
  172. parse_ref,
  173. parse_reftuples,
  174. parse_tree,
  175. )
  176. from .pack import UnpackedObject, write_pack_from_container, write_pack_index
  177. from .patch import (
  178. MailinfoResult,
  179. get_summary,
  180. write_commit_patch,
  181. write_object_diff,
  182. write_tree_diff,
  183. )
  184. from .protocol import ZERO_SHA, Protocol
  185. from .refs import (
  186. HEADREF,
  187. LOCAL_BRANCH_PREFIX,
  188. LOCAL_NOTES_PREFIX,
  189. LOCAL_REMOTE_PREFIX,
  190. LOCAL_REPLACE_PREFIX,
  191. LOCAL_TAG_PREFIX,
  192. DictRefsContainer,
  193. Ref,
  194. SymrefLoop,
  195. _import_remote_refs,
  196. filter_ref_prefix,
  197. local_branch_name,
  198. local_replace_name,
  199. local_tag_name,
  200. parse_remote_ref,
  201. shorten_ref_name,
  202. )
  203. from .repo import BaseRepo, Repo, get_user_identity
  204. from .server import (
  205. FileSystemBackend,
  206. ReceivePackHandler,
  207. TCPGitServer,
  208. UploadPackHandler,
  209. )
  210. from .server import update_server_info as server_update_server_info
  211. from .sparse_patterns import (
  212. SparseCheckoutConflictError,
  213. apply_included_paths,
  214. determine_included_paths,
  215. )
  216. from .trailers import add_trailer_to_message, format_trailers, parse_trailers
  217. # Module level tuple definition for status output
  218. GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
  219. # TypeVar for preserving BaseRepo subclass types
  220. T = TypeVar("T", bound="BaseRepo")
  221. # Type alias for common repository parameter pattern
  222. RepoPath = str | os.PathLike[str] | Repo
  223. class TransportKwargs(TypedDict, total=False):
  224. """Keyword arguments accepted by get_transport_and_path."""
  225. operation: str | None
  226. thin_packs: bool
  227. report_activity: Callable[[int, str], None] | None
  228. quiet: bool
  229. include_tags: bool
  230. username: str | None
  231. password: str | None
  232. key_filename: str | None
  233. ssh_command: str | None
  234. pool_manager: "urllib3.PoolManager | None"
  235. @dataclass
  236. class CountObjectsResult:
  237. """Result of counting objects in a repository.
  238. Attributes:
  239. count: Number of loose objects
  240. size: Total size of loose objects in bytes
  241. in_pack: Number of objects in pack files
  242. packs: Number of pack files
  243. size_pack: Total size of pack files in bytes
  244. """
  245. count: int
  246. size: int
  247. in_pack: int | None = None
  248. packs: int | None = None
  249. size_pack: int | None = None
  250. class NoneStream(RawIOBase):
  251. """Fallback if stdout or stderr are unavailable, does nothing."""
  252. def read(self, size: int = -1) -> None:
  253. """Read from stream (returns None as this is a null stream)."""
  254. return None
  255. def readall(self) -> bytes:
  256. """Read all bytes (returns empty bytes).
  257. Returns:
  258. Empty bytes object
  259. """
  260. return b""
  261. if sys.version_info >= (3, 12):
  262. @override
  263. def readinto(self, b: Buffer) -> int | None:
  264. return 0
  265. @override
  266. def write(self, b: Buffer) -> int | None:
  267. return len(cast(bytes, b)) if b else 0
  268. else:
  269. @override
  270. def readinto(self, b: bytearray | memoryview) -> int | None: # type: ignore[override]
  271. return 0
  272. @override
  273. def write(self, b: bytes | bytearray | memoryview) -> int | None: # type: ignore[override]
  274. return len(b) if b else 0
  275. default_bytes_out_stream: BinaryIO = cast(
  276. BinaryIO, getattr(sys.stdout, "buffer", None) or NoneStream()
  277. )
  278. default_bytes_err_stream: BinaryIO = cast(
  279. BinaryIO, getattr(sys.stderr, "buffer", None) or NoneStream()
  280. )
  281. DEFAULT_ENCODING = "utf-8"
  282. class Error(Exception):
  283. """Porcelain-based error."""
  284. def __init__(self, msg: str) -> None:
  285. """Initialize Error with message."""
  286. super().__init__(msg)
  287. class RemoteExists(Error):
  288. """Raised when the remote already exists."""
  289. class TimezoneFormatError(Error):
  290. """Raised when the timezone cannot be determined from a given string."""
  291. class CheckoutError(Error):
  292. """Indicates that a checkout cannot be performed."""
  293. def parse_timezone_format(tz_str: str) -> int:
  294. """Parse given string and attempt to return a timezone offset.
  295. Different formats are considered in the following order:
  296. - Git internal format: <unix timestamp> <timezone offset>
  297. - RFC 2822: e.g. Mon, 20 Nov 1995 19:12:08 -0500
  298. - ISO 8601: e.g. 1995-11-20T19:12:08-0500
  299. Args:
  300. tz_str: datetime string
  301. Returns: Timezone offset as integer
  302. Raises:
  303. TimezoneFormatError: if timezone information cannot be extracted
  304. """
  305. import re
  306. # Git internal format
  307. internal_format_pattern = re.compile("^[0-9]+ [+-][0-9]{,4}$")
  308. if re.match(internal_format_pattern, tz_str):
  309. try:
  310. tz_internal = parse_timezone(tz_str.split(" ")[1].encode(DEFAULT_ENCODING))
  311. return tz_internal[0]
  312. except ValueError:
  313. pass
  314. # RFC 2822
  315. import email.utils
  316. rfc_2822 = email.utils.parsedate_tz(tz_str)
  317. if rfc_2822 and rfc_2822[9] is not None:
  318. return rfc_2822[9]
  319. # ISO 8601
  320. # Supported offsets:
  321. # sHHMM, sHH:MM, sHH
  322. iso_8601_pattern = re.compile(
  323. "[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$"
  324. )
  325. match = re.search(iso_8601_pattern, tz_str)
  326. total_secs = 0
  327. if match:
  328. sign, hours, minutes = match.groups()
  329. total_secs += int(hours) * 3600
  330. if minutes:
  331. total_secs += int(minutes) * 60
  332. total_secs = -total_secs if sign == "-" else total_secs
  333. return total_secs
  334. # YYYY.MM.DD, MM/DD/YYYY, DD.MM.YYYY contain no timezone information
  335. raise TimezoneFormatError(tz_str)
  336. def get_user_timezones() -> tuple[int, int]:
  337. """Retrieve local timezone as described in git documentation.
  338. https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
  339. Returns: A tuple containing author timezone, committer timezone.
  340. """
  341. local_timezone = time.localtime().tm_gmtoff
  342. if os.environ.get("GIT_AUTHOR_DATE"):
  343. author_timezone = parse_timezone_format(os.environ["GIT_AUTHOR_DATE"])
  344. else:
  345. author_timezone = local_timezone
  346. if os.environ.get("GIT_COMMITTER_DATE"):
  347. commit_timezone = parse_timezone_format(os.environ["GIT_COMMITTER_DATE"])
  348. else:
  349. commit_timezone = local_timezone
  350. return author_timezone, commit_timezone
  351. @overload
  352. def open_repo(path_or_repo: T) -> AbstractContextManager[T]: ...
  353. @overload
  354. def open_repo(
  355. path_or_repo: str | os.PathLike[str],
  356. ) -> AbstractContextManager[Repo]: ...
  357. def open_repo(
  358. path_or_repo: str | os.PathLike[str] | T,
  359. ) -> AbstractContextManager[T | Repo]:
  360. """Open an argument that can be a repository or a path for a repository."""
  361. if isinstance(path_or_repo, BaseRepo):
  362. return _noop_context_manager(path_or_repo)
  363. return Repo(path_or_repo)
  364. @contextmanager
  365. def _noop_context_manager(obj: T) -> Iterator[T]:
  366. """Context manager that has the same api as closing but does nothing."""
  367. yield obj
  368. def _get_reflog_message(
  369. default_message: bytes, explicit_message: bytes | None = None
  370. ) -> bytes:
  371. """Get reflog message, checking GIT_REFLOG_ACTION environment variable.
  372. Args:
  373. default_message: Default message to use if no explicit message or env var
  374. explicit_message: Explicit message passed as argument (takes precedence)
  375. Returns:
  376. The reflog message with priority:
  377. 1. explicit_message if provided
  378. 2. GIT_REFLOG_ACTION environment variable if set
  379. 3. default_message otherwise
  380. """
  381. if explicit_message is not None:
  382. return explicit_message
  383. env_action = os.environ.get("GIT_REFLOG_ACTION")
  384. if env_action is not None:
  385. return env_action.encode("utf-8")
  386. return default_message
  387. @overload
  388. def open_repo_closing(path_or_repo: T) -> AbstractContextManager[T]: ...
  389. @overload
  390. def open_repo_closing(
  391. path_or_repo: str | bytes | os.PathLike[str],
  392. ) -> AbstractContextManager[Repo]: ...
  393. def open_repo_closing(
  394. path_or_repo: str | bytes | os.PathLike[str] | T,
  395. ) -> AbstractContextManager[T | Repo]:
  396. """Open an argument that can be a repository or a path for a repository.
  397. returns a context manager that will close the repo on exit if the argument
  398. is a path, else does nothing if the argument is a repo.
  399. """
  400. if isinstance(path_or_repo, BaseRepo):
  401. return _noop_context_manager(path_or_repo)
  402. return closing(Repo(path_or_repo))
  403. def path_to_tree_path(
  404. repopath: str | bytes | os.PathLike[str],
  405. path: str | bytes | os.PathLike[str],
  406. tree_encoding: str = DEFAULT_ENCODING,
  407. ) -> bytes:
  408. """Convert a path to a path usable in an index, e.g. bytes and relative to the repository root.
  409. Args:
  410. repopath: Repository path, absolute or relative to the cwd
  411. path: A path, absolute or relative to the cwd
  412. tree_encoding: Encoding to use for tree paths
  413. Returns: A path formatted for use in e.g. an index
  414. """
  415. # Resolve might returns a relative path on Windows
  416. # https://bugs.python.org/issue38671
  417. if sys.platform == "win32":
  418. path = os.path.abspath(path)
  419. # Convert bytes paths to str for Path
  420. if isinstance(path, bytes):
  421. path = os.fsdecode(path)
  422. path = Path(path)
  423. resolved_path = path.resolve()
  424. # Resolve and abspath seems to behave differently regarding symlinks,
  425. # as we are doing abspath on the file path, we need to do the same on
  426. # the repo path or they might not match
  427. if sys.platform == "win32":
  428. repopath = os.path.abspath(repopath)
  429. # Convert bytes paths to str for Path
  430. if isinstance(repopath, bytes):
  431. repopath = os.fsdecode(repopath)
  432. repopath = Path(repopath).resolve()
  433. try:
  434. relpath = resolved_path.relative_to(repopath)
  435. except ValueError:
  436. # If path is a symlink that points to a file outside the repo, we
  437. # want the relpath for the link itself, not the resolved target
  438. if path.is_symlink():
  439. parent = path.parent.resolve()
  440. relpath = (parent / path.name).relative_to(repopath)
  441. else:
  442. raise
  443. if sys.platform == "win32":
  444. return str(relpath).replace(os.path.sep, "/").encode(tree_encoding)
  445. else:
  446. return bytes(relpath)
  447. class DivergedBranches(Error):
  448. """Branches have diverged and fast-forward is not possible."""
  449. def __init__(self, current_sha: bytes, new_sha: bytes) -> None:
  450. """Initialize DivergedBranches error with current and new SHA values."""
  451. self.current_sha = current_sha
  452. self.new_sha = new_sha
  453. def check_diverged(repo: BaseRepo, current_sha: ObjectID, new_sha: ObjectID) -> None:
  454. """Check if updating to a sha can be done with fast forwarding.
  455. Args:
  456. repo: Repository object
  457. current_sha: Current head sha
  458. new_sha: New head sha
  459. """
  460. try:
  461. can = can_fast_forward(repo, current_sha, new_sha)
  462. except KeyError:
  463. can = False
  464. if not can:
  465. raise DivergedBranches(current_sha, new_sha)
  466. def archive(
  467. repo: str | BaseRepo,
  468. committish: str | bytes | Commit | Tag | None = None,
  469. outstream: BinaryIO | RawIOBase = default_bytes_out_stream,
  470. errstream: BinaryIO | RawIOBase = default_bytes_err_stream,
  471. ) -> None:
  472. """Create an archive.
  473. Args:
  474. repo: Path of repository for which to generate an archive.
  475. committish: Commit SHA1 or ref to use
  476. outstream: Output stream (defaults to stdout)
  477. errstream: Error stream (defaults to stderr)
  478. """
  479. if committish is None:
  480. committish = "HEAD"
  481. with open_repo_closing(repo) as repo_obj:
  482. c = parse_commit(repo_obj, committish)
  483. tree = repo_obj.object_store[c.tree]
  484. assert isinstance(tree, Tree)
  485. for chunk in tar_stream(repo_obj.object_store, tree, c.commit_time):
  486. outstream.write(chunk)
  487. def update_server_info(repo: RepoPath = ".") -> None:
  488. """Update server info files for a repository.
  489. Args:
  490. repo: path to the repository
  491. """
  492. with open_repo_closing(repo) as r:
  493. server_update_server_info(r)
  494. def write_commit_graph(repo: RepoPath = ".", reachable: bool = True) -> None:
  495. """Write a commit graph file for a repository.
  496. Args:
  497. repo: path to the repository or a Repo object
  498. reachable: if True, include all commits reachable from refs.
  499. if False, only include direct ref targets.
  500. """
  501. with open_repo_closing(repo) as r:
  502. # Get all refs
  503. refs = list(r.refs.as_dict().values())
  504. if refs:
  505. r.object_store.write_commit_graph(refs, reachable=reachable)
  506. def symbolic_ref(repo: RepoPath, ref_name: str | bytes, force: bool = False) -> None:
  507. """Set git symbolic ref into HEAD.
  508. Args:
  509. repo: path to the repository
  510. ref_name: short name of the new ref
  511. force: force settings without checking if it exists in refs/heads
  512. """
  513. with open_repo_closing(repo) as repo_obj:
  514. ref_path = _make_branch_ref(ref_name)
  515. if not force and ref_path not in repo_obj.refs.keys():
  516. ref_name_str = (
  517. ref_name.decode("utf-8", "replace")
  518. if isinstance(ref_name, bytes)
  519. else ref_name
  520. )
  521. raise Error(f"fatal: ref `{ref_name_str}` is not a ref")
  522. repo_obj.refs.set_symbolic_ref(HEADREF, ref_path)
  523. def pack_refs(repo: RepoPath, all: bool = False) -> None:
  524. """Pack loose references into packed-refs file."""
  525. with open_repo_closing(repo) as repo_obj:
  526. repo_obj.refs.pack_refs(all=all)
  527. def _get_variables(repo: RepoPath = ".") -> dict[str, str]:
  528. """Internal function to get all Git logical variables.
  529. Args:
  530. repo: Path to the repository
  531. Returns:
  532. A dictionary of all logical variables with values
  533. """
  534. with open_repo_closing(repo) as repo_obj:
  535. config = repo_obj.get_config_stack()
  536. # Define callbacks for each logical variable
  537. def get_author_ident() -> str | None:
  538. """Get GIT_AUTHOR_IDENT."""
  539. try:
  540. author_identity = get_user_identity(config, kind="AUTHOR")
  541. author_tz, _ = get_user_timezones()
  542. timestamp = int(time.time())
  543. return f"{author_identity.decode('utf-8', 'replace')} {timestamp} {author_tz:+05d}"
  544. except Exception:
  545. return None
  546. def get_committer_ident() -> str | None:
  547. """Get GIT_COMMITTER_IDENT."""
  548. try:
  549. committer_identity = get_user_identity(config, kind="COMMITTER")
  550. _, committer_tz = get_user_timezones()
  551. timestamp = int(time.time())
  552. return f"{committer_identity.decode('utf-8', 'replace')} {timestamp} {committer_tz:+05d}"
  553. except Exception:
  554. return None
  555. def get_editor() -> str | None:
  556. """Get GIT_EDITOR."""
  557. editor = os.environ.get("GIT_EDITOR")
  558. if editor is None:
  559. try:
  560. editor_bytes = config.get(("core",), "editor")
  561. editor = editor_bytes.decode("utf-8", "replace")
  562. except KeyError:
  563. editor = os.environ.get("VISUAL") or os.environ.get("EDITOR")
  564. return editor
  565. def get_sequence_editor() -> str | None:
  566. """Get GIT_SEQUENCE_EDITOR."""
  567. sequence_editor = os.environ.get("GIT_SEQUENCE_EDITOR")
  568. if sequence_editor is None:
  569. try:
  570. seq_editor_bytes = config.get(("sequence",), "editor")
  571. sequence_editor = seq_editor_bytes.decode("utf-8", "replace")
  572. except KeyError:
  573. # Falls back to GIT_EDITOR if not set
  574. sequence_editor = get_editor()
  575. return sequence_editor
  576. def get_pager() -> str | None:
  577. """Get GIT_PAGER."""
  578. pager = os.environ.get("GIT_PAGER")
  579. if pager is None:
  580. try:
  581. pager_bytes = config.get(("core",), "pager")
  582. pager = pager_bytes.decode("utf-8", "replace")
  583. except KeyError:
  584. pager = os.environ.get("PAGER")
  585. return pager
  586. def get_default_branch() -> str:
  587. """Get GIT_DEFAULT_BRANCH."""
  588. try:
  589. default_branch_bytes = config.get(("init",), "defaultBranch")
  590. return default_branch_bytes.decode("utf-8", "replace")
  591. except KeyError:
  592. # Git's default is "master"
  593. return "master"
  594. # Dictionary mapping variable names to their getter callbacks
  595. variable_callbacks: dict[str, Callable[[], str | None]] = {
  596. "GIT_AUTHOR_IDENT": get_author_ident,
  597. "GIT_COMMITTER_IDENT": get_committer_ident,
  598. "GIT_EDITOR": get_editor,
  599. "GIT_SEQUENCE_EDITOR": get_sequence_editor,
  600. "GIT_PAGER": get_pager,
  601. "GIT_DEFAULT_BRANCH": get_default_branch,
  602. }
  603. # Build the variables dictionary by calling callbacks
  604. variables: dict[str, str] = {}
  605. for var_name, callback in variable_callbacks.items():
  606. value = callback()
  607. if value is not None:
  608. variables[var_name] = value
  609. return variables
  610. def var_list(repo: RepoPath = ".") -> dict[str, str]:
  611. """List all Git logical variables.
  612. Args:
  613. repo: Path to the repository
  614. Returns:
  615. A dictionary of all logical variables with their values
  616. """
  617. return _get_variables(repo)
  618. def var(repo: RepoPath = ".", variable: str = "GIT_AUTHOR_IDENT") -> str:
  619. """Get the value of a specific Git logical variable.
  620. Args:
  621. repo: Path to the repository
  622. variable: The variable to query (e.g., 'GIT_AUTHOR_IDENT')
  623. Returns:
  624. The value of the requested variable as a string
  625. Raises:
  626. KeyError: If the requested variable has no value
  627. """
  628. variables = _get_variables(repo)
  629. if variable in variables:
  630. return variables[variable]
  631. else:
  632. raise KeyError(f"Variable {variable} has no value")
  633. def commit(
  634. repo: RepoPath = ".",
  635. message: str | bytes | Callable[[Any, Commit], bytes] | None = None,
  636. author: bytes | None = None,
  637. author_timezone: int | None = None,
  638. committer: bytes | None = None,
  639. commit_timezone: int | None = None,
  640. encoding: bytes | None = None,
  641. no_verify: bool = False,
  642. signoff: bool | None = None,
  643. all: bool = False,
  644. amend: bool = False,
  645. sign: bool | None = None,
  646. ) -> bytes:
  647. """Create a new commit.
  648. Args:
  649. repo: Path to repository
  650. message: Optional commit message (string/bytes or callable that takes
  651. (repo, commit) and returns bytes)
  652. author: Optional author name and email
  653. author_timezone: Author timestamp timezone
  654. committer: Optional committer name and email
  655. commit_timezone: Commit timestamp timezone
  656. encoding: Encoding to use for commit message
  657. no_verify: Skip pre-commit and commit-msg hooks
  658. signoff: Add Signed-off-by line to commit message. If None, uses format.signoff config.
  659. all: Automatically stage all tracked files that have been modified
  660. amend: Replace the tip of the current branch by creating a new commit
  661. sign: GPG sign the commit. If None, uses commit.gpgsign config.
  662. If True, signs with default GPG key. If False, does not sign.
  663. Returns: SHA1 of the new commit
  664. """
  665. encoding_str = encoding.decode("ascii") if encoding else DEFAULT_ENCODING
  666. if isinstance(message, str):
  667. message = message.encode(encoding_str)
  668. if isinstance(author, str):
  669. author = author.encode(encoding_str)
  670. if isinstance(committer, str):
  671. committer = committer.encode(encoding_str)
  672. local_timezone = get_user_timezones()
  673. if author_timezone is None:
  674. author_timezone = local_timezone[0]
  675. if commit_timezone is None:
  676. commit_timezone = local_timezone[1]
  677. with open_repo_closing(repo) as r:
  678. # Handle amend logic
  679. merge_heads = None
  680. if amend:
  681. try:
  682. head_commit = r[r.head()]
  683. assert isinstance(head_commit, Commit)
  684. except KeyError:
  685. raise ValueError("Cannot amend: no existing commit found")
  686. # If message not provided, use the message from the current HEAD
  687. if message is None:
  688. message = head_commit.message
  689. # If author not provided, use the author from the current HEAD
  690. if author is None:
  691. author = head_commit.author
  692. if author_timezone is None:
  693. author_timezone = head_commit.author_timezone
  694. # Use the parent(s) of the current HEAD as our parent(s)
  695. merge_heads = list(head_commit.parents)
  696. # If -a flag is used, stage all modified tracked files
  697. if all:
  698. index = r.open_index()
  699. normalizer = r.get_blob_normalizer()
  700. # Create a wrapper that handles the bytes -> Blob conversion
  701. if normalizer is not None:
  702. def filter_callback(data: bytes, path: bytes) -> bytes:
  703. blob = Blob()
  704. blob.data = data
  705. normalized_blob = normalizer.checkin_normalize(blob, path)
  706. data_bytes: bytes = normalized_blob.data
  707. return data_bytes
  708. else:
  709. filter_callback = None
  710. unstaged_changes = list(
  711. get_unstaged_changes(index, r.path, filter_callback)
  712. )
  713. if unstaged_changes:
  714. # Convert bytes paths to strings for add function
  715. modified_files: list[str | bytes | os.PathLike[str]] = []
  716. for path in unstaged_changes:
  717. if isinstance(path, bytes):
  718. modified_files.append(path.decode())
  719. else:
  720. modified_files.append(path)
  721. add(r, paths=modified_files)
  722. # For amend, create dangling commit to avoid adding current HEAD as parent
  723. if amend:
  724. commit_sha = r.get_worktree().commit(
  725. message=message,
  726. author=author,
  727. author_timezone=author_timezone,
  728. committer=committer,
  729. commit_timezone=commit_timezone,
  730. encoding=encoding,
  731. no_verify=no_verify,
  732. sign=sign,
  733. signoff=signoff,
  734. merge_heads=merge_heads,
  735. ref=None,
  736. )
  737. # Update HEAD to point to the new commit with reflog message
  738. try:
  739. old_head = r.refs[HEADREF]
  740. except KeyError:
  741. old_head = None
  742. # Get the actual commit message from the created commit
  743. commit_obj = r[commit_sha]
  744. assert isinstance(commit_obj, Commit)
  745. commit_message = commit_obj.message
  746. default_message = b"commit (amend): " + commit_message
  747. # Truncate message if too long for reflog
  748. if len(default_message) > 100:
  749. default_message = default_message[:97] + b"..."
  750. reflog_message = _get_reflog_message(default_message)
  751. r.refs.set_if_equals(HEADREF, old_head, commit_sha, message=reflog_message)
  752. return commit_sha
  753. else:
  754. return r.get_worktree().commit(
  755. message=message,
  756. author=author,
  757. author_timezone=author_timezone,
  758. committer=committer,
  759. commit_timezone=commit_timezone,
  760. encoding=encoding,
  761. no_verify=no_verify,
  762. sign=sign,
  763. signoff=signoff,
  764. merge_heads=merge_heads,
  765. )
  766. def commit_tree(
  767. repo: RepoPath,
  768. tree: ObjectID,
  769. message: str | bytes | None = None,
  770. author: bytes | None = None,
  771. committer: bytes | None = None,
  772. ) -> ObjectID:
  773. """Create a new commit object.
  774. Args:
  775. repo: Path to repository
  776. tree: An existing tree object
  777. message: Commit message
  778. author: Optional author name and email
  779. committer: Optional committer name and email
  780. """
  781. with open_repo_closing(repo) as r:
  782. if isinstance(message, str):
  783. message = message.encode(DEFAULT_ENCODING)
  784. return r.get_worktree().commit(
  785. message=message, tree=tree, committer=committer, author=author
  786. )
  787. def interpret_trailers(
  788. message: str | bytes,
  789. *,
  790. trailers: list[tuple[str, str]] | None = None,
  791. trim_empty: bool = False,
  792. only_trailers: bool = False,
  793. only_input: bool = False,
  794. unfold: bool = False,
  795. parse: bool = False,
  796. where: str = "end",
  797. if_exists: str = "addIfDifferentNeighbor",
  798. if_missing: str = "add",
  799. separators: str = ":",
  800. ) -> bytes:
  801. r"""Parse and manipulate trailers in a commit message.
  802. This function implements the functionality of ``git interpret-trailers``,
  803. allowing parsing and manipulation of structured metadata (trailers) in
  804. commit messages.
  805. Trailers are key-value pairs at the end of commit messages, formatted like:
  806. Signed-off-by: Alice <alice@example.com>
  807. Reviewed-by: Bob <bob@example.com>
  808. Args:
  809. message: The commit message (string or bytes)
  810. trailers: List of (key, value) tuples to add as new trailers
  811. trim_empty: Remove trailers with empty values
  812. only_trailers: Output only the trailers, not the message body
  813. only_input: Don't add new trailers, only parse existing ones
  814. unfold: Join multiline trailer values into a single line
  815. parse: Shorthand for --only-trailers --only-input --unfold
  816. where: Where to add new trailers ('end', 'start', 'after', 'before')
  817. if_exists: How to handle duplicate keys
  818. - 'add': Always add
  819. - 'replace': Replace all existing
  820. - 'addIfDifferent': Add only if value differs from all existing
  821. - 'addIfDifferentNeighbor': Add only if value differs from neighbors
  822. - 'doNothing': Don't add if key exists
  823. if_missing: What to do if key doesn't exist ('add' or 'doNothing')
  824. separators: Valid separator characters (default ':')
  825. Returns:
  826. The processed message as bytes
  827. Examples:
  828. >>> msg = b"Subject\\n\\nBody text\\n"
  829. >>> interpret_trailers(msg, trailers=[("Signed-off-by", "Alice <alice@example.com>")])
  830. b'Subject\\n\\nBody text\\n\\nSigned-off-by: Alice <alice@example.com>\\n'
  831. >>> msg = b"Subject\\n\\nSigned-off-by: Alice\\n"
  832. >>> interpret_trailers(msg, only_trailers=True)
  833. b'Signed-off-by: Alice\\n'
  834. """
  835. # Handle --parse shorthand
  836. if parse:
  837. only_trailers = True
  838. only_input = True
  839. unfold = True
  840. # Convert message to bytes
  841. if isinstance(message, str):
  842. message_bytes = message.encode("utf-8")
  843. else:
  844. message_bytes = message
  845. # Parse existing trailers
  846. _message_body, parsed_trailers = parse_trailers(message_bytes, separators)
  847. # Apply unfold if requested
  848. if unfold:
  849. for trailer in parsed_trailers:
  850. # Replace newlines and multiple spaces with single space
  851. trailer.value = " ".join(trailer.value.split())
  852. # Apply trim_empty if requested
  853. if trim_empty:
  854. parsed_trailers = [t for t in parsed_trailers if t.value.strip()]
  855. # Add new trailers if requested and not only_input
  856. if not only_input and trailers:
  857. for key, value in trailers:
  858. message_bytes = add_trailer_to_message(
  859. message_bytes,
  860. key,
  861. value,
  862. separators[0], # Use first separator as default
  863. where=where,
  864. if_exists=if_exists,
  865. if_missing=if_missing,
  866. )
  867. # Re-parse to get updated trailers for output
  868. if only_trailers:
  869. _message_body, parsed_trailers = parse_trailers(message_bytes, separators)
  870. # Return based on only_trailers flag
  871. if only_trailers:
  872. return format_trailers(parsed_trailers)
  873. else:
  874. return message_bytes
  875. def stripspace(
  876. text: str | bytes,
  877. *,
  878. strip_comments: bool = False,
  879. comment_char: str = "#",
  880. comment_lines: bool = False,
  881. ) -> bytes:
  882. r"""Strip unnecessary whitespace from text.
  883. This function implements the functionality of ``git stripspace``, commonly
  884. used to clean up commit messages and other text content.
  885. Args:
  886. text: The text to process (string or bytes)
  887. strip_comments: If True, remove lines that begin with comment_char
  888. comment_char: The comment character to use (default: "#")
  889. comment_lines: If True, prepend comment_char to each line
  890. Returns:
  891. The processed text as bytes
  892. The function performs the following operations:
  893. 1. If comment_lines is True, prepend comment_char + space to each line
  894. 2. Strip trailing whitespace from each line
  895. 3. If strip_comments is True, remove lines starting with comment_char
  896. 4. Collapse multiple consecutive blank lines into a single blank line
  897. 5. Remove leading blank lines
  898. 6. Remove trailing blank lines
  899. 7. Ensure the text ends with a newline (unless empty)
  900. Examples:
  901. >>> stripspace(b" hello \\n\\n\\nworld \\n\\n")
  902. b'hello\\n\\nworld\\n'
  903. >>> stripspace(b"# comment\\ntext\\n", strip_comments=True)
  904. b'text\\n'
  905. >>> stripspace(b"line\\n", comment_lines=True)
  906. b'# line\\n'
  907. """
  908. from .stripspace import stripspace as _stripspace
  909. # Convert text to bytes
  910. if isinstance(text, str):
  911. text_bytes = text.encode("utf-8")
  912. else:
  913. text_bytes = text
  914. # Convert comment_char to bytes
  915. comment_char_bytes = (
  916. comment_char.encode("utf-8") if isinstance(comment_char, str) else comment_char
  917. )
  918. return _stripspace(
  919. text_bytes,
  920. strip_comments=strip_comments,
  921. comment_char=comment_char_bytes,
  922. comment_lines=comment_lines,
  923. )
  924. def init(
  925. path: str | os.PathLike[str] = ".",
  926. *,
  927. bare: bool = False,
  928. symlinks: bool | None = None,
  929. ) -> Repo:
  930. """Create a new git repository.
  931. Args:
  932. path: Path to repository.
  933. bare: Whether to create a bare repository.
  934. symlinks: Whether to create actual symlinks (defaults to autodetect)
  935. Returns: A Repo instance
  936. """
  937. if not os.path.exists(path):
  938. os.mkdir(path)
  939. if bare:
  940. return Repo.init_bare(path)
  941. else:
  942. return Repo.init(path, symlinks=symlinks)
  943. def _filter_transport_kwargs(**kwargs: object) -> TransportKwargs:
  944. """Filter kwargs to only include parameters accepted by get_transport_and_path.
  945. Args:
  946. **kwargs: Arbitrary keyword arguments
  947. Returns:
  948. Dictionary containing only the kwargs that get_transport_and_path accepts
  949. """
  950. valid_params = {
  951. "operation",
  952. "thin_packs",
  953. "report_activity",
  954. "quiet",
  955. "include_tags",
  956. "username",
  957. "password",
  958. "key_filename",
  959. "ssh_command",
  960. "pool_manager",
  961. }
  962. return cast(TransportKwargs, {k: v for k, v in kwargs.items() if k in valid_params})
  963. def clone(
  964. source: str | bytes | Repo,
  965. target: str | os.PathLike[str] | None = None,
  966. bare: bool = False,
  967. checkout: bool | None = None,
  968. errstream: BinaryIO | RawIOBase = default_bytes_err_stream,
  969. outstream: BinaryIO | None = None,
  970. origin: str | None = "origin",
  971. depth: int | None = None,
  972. branch: str | bytes | None = None,
  973. config: Config | None = None,
  974. filter_spec: str | None = None,
  975. protocol_version: int | None = None,
  976. recurse_submodules: bool = False,
  977. **kwargs: str | bytes | Sequence[str | bytes],
  978. ) -> Repo:
  979. """Clone a local or remote git repository.
  980. Args:
  981. source: Path or URL for source repository
  982. target: Path to target repository (optional)
  983. bare: Whether or not to create a bare repository
  984. checkout: Whether or not to check-out HEAD after cloning
  985. errstream: Optional stream to write progress to
  986. outstream: Optional stream to write progress to (deprecated)
  987. origin: Name of remote from the repository used to clone
  988. depth: Depth to fetch at
  989. branch: Optional branch or tag to be used as HEAD in the new repository
  990. instead of the cloned repository's HEAD.
  991. config: Configuration to use
  992. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  993. Only used if the server supports the Git protocol-v2 'filter'
  994. feature, and ignored otherwise.
  995. protocol_version: desired Git protocol version. By default the highest
  996. mutually supported protocol version will be used.
  997. recurse_submodules: Whether to initialize and clone submodules
  998. **kwargs: Additional keyword arguments including refspecs to fetch.
  999. Can be a bytestring, a string, or a list of bytestring/string.
  1000. Returns: The new repository
  1001. """
  1002. if outstream is not None:
  1003. import warnings
  1004. warnings.warn(
  1005. "outstream= has been deprecated in favour of errstream=.",
  1006. DeprecationWarning,
  1007. stacklevel=3,
  1008. )
  1009. # TODO(jelmer): Capture logging output and stream to errstream
  1010. if config is None:
  1011. config = StackedConfig.default()
  1012. if checkout is None:
  1013. checkout = not bare
  1014. if checkout and bare:
  1015. raise Error("checkout and bare are incompatible")
  1016. if target is None:
  1017. if isinstance(source, Repo):
  1018. raise ValueError("target must be specified when cloning from a Repo object")
  1019. elif isinstance(source, bytes):
  1020. target = source.split(b"/")[-1].decode()
  1021. else:
  1022. target = source.split("/")[-1]
  1023. if isinstance(branch, str):
  1024. branch = branch.encode(DEFAULT_ENCODING)
  1025. mkdir = not os.path.exists(target)
  1026. if isinstance(source, Repo):
  1027. # For direct repo cloning, use LocalGitClient
  1028. from .client import GitClient, LocalGitClient
  1029. client: GitClient = LocalGitClient(config=config)
  1030. path = source.path
  1031. else:
  1032. source_str = source.decode() if isinstance(source, bytes) else source
  1033. transport_kwargs = _filter_transport_kwargs(**kwargs)
  1034. (client, path) = get_transport_and_path(
  1035. source_str, config=config, **transport_kwargs
  1036. )
  1037. filter_spec_bytes: bytes | None = None
  1038. if filter_spec:
  1039. filter_spec_bytes = filter_spec.encode("ascii")
  1040. repo = client.clone(
  1041. path,
  1042. str(target), # Convert PathLike to str
  1043. mkdir=mkdir,
  1044. bare=bare,
  1045. origin=origin,
  1046. checkout=checkout,
  1047. branch=branch.decode() if branch else None, # Convert bytes to str
  1048. progress=lambda data: (errstream.write(data), None)[1],
  1049. depth=depth,
  1050. filter_spec=filter_spec_bytes,
  1051. protocol_version=protocol_version,
  1052. )
  1053. # Initialize and update submodules if requested
  1054. if recurse_submodules and not bare:
  1055. try:
  1056. submodule_init(repo)
  1057. submodule_update(repo, init=True, recursive=True)
  1058. except FileNotFoundError as e:
  1059. # .gitmodules file doesn't exist - no submodules to process
  1060. logging.debug("No .gitmodules file found: %s", e)
  1061. except KeyError as e:
  1062. # Submodule configuration missing
  1063. logging.warning("Submodule configuration error: %s", e)
  1064. if errstream:
  1065. errstream.write(
  1066. f"Warning: Submodule configuration error: {e}\n".encode()
  1067. )
  1068. return repo
  1069. def add(
  1070. repo: str | os.PathLike[str] | Repo = ".",
  1071. paths: Sequence[str | bytes | os.PathLike[str]]
  1072. | str
  1073. | bytes
  1074. | os.PathLike[str]
  1075. | None = None,
  1076. ) -> tuple[list[str], set[str]]:
  1077. """Add files to the staging area.
  1078. Args:
  1079. repo: Repository for the files
  1080. paths: Paths to add. If None, stages all untracked and modified files from the
  1081. current working directory (mimicking 'git add .' behavior).
  1082. Returns: Tuple with set of added files and ignored files
  1083. If the repository contains ignored directories, the returned set will
  1084. contain the path to an ignored directory (with trailing slash). Individual
  1085. files within ignored directories will not be returned.
  1086. Note: When paths=None, this function adds all untracked and modified files
  1087. from the entire repository, mimicking 'git add -A' behavior.
  1088. """
  1089. ignored = set()
  1090. with open_repo_closing(repo) as r:
  1091. repo_path = Path(r.path).resolve()
  1092. ignore_manager = IgnoreFilterManager.from_repo(r)
  1093. # Get unstaged changes once for the entire operation
  1094. index = r.open_index()
  1095. normalizer = r.get_blob_normalizer()
  1096. if normalizer is not None:
  1097. def filter_callback(data: bytes, path: bytes) -> bytes:
  1098. blob = Blob()
  1099. blob.data = data
  1100. normalized_blob = normalizer.checkin_normalize(blob, path)
  1101. data_bytes: bytes = normalized_blob.data
  1102. return data_bytes
  1103. else:
  1104. filter_callback = None
  1105. # Check if core.preloadIndex is enabled
  1106. config = r.get_config_stack()
  1107. preload_index = config.get_boolean(b"core", b"preloadIndex", False)
  1108. all_unstaged_paths = list(
  1109. get_unstaged_changes(index, r.path, filter_callback, preload_index)
  1110. )
  1111. if not paths:
  1112. # When no paths specified, add all untracked and modified files from repo root
  1113. paths = [str(repo_path)]
  1114. relpaths = []
  1115. if isinstance(paths, (str, bytes, os.PathLike)):
  1116. paths = [paths]
  1117. for p in paths:
  1118. # Handle bytes paths by decoding them
  1119. if isinstance(p, bytes):
  1120. p = p.decode("utf-8")
  1121. path = Path(p)
  1122. if not path.is_absolute():
  1123. # Make relative paths relative to the repo directory
  1124. path = repo_path / path
  1125. # Don't resolve symlinks completely - only resolve the parent directory
  1126. # to avoid issues when symlinks point outside the repository
  1127. if path.is_symlink():
  1128. # For symlinks, resolve only the parent directory
  1129. parent_resolved = path.parent.resolve()
  1130. resolved_path = parent_resolved / path.name
  1131. else:
  1132. # For regular files/dirs, resolve normally
  1133. resolved_path = path.resolve()
  1134. try:
  1135. relpath = str(resolved_path.relative_to(repo_path)).replace(os.sep, "/")
  1136. except ValueError as e:
  1137. # Path is not within the repository
  1138. p_str = p.decode() if isinstance(p, bytes) else str(p)
  1139. raise ValueError(
  1140. f"Path {p_str} is not within repository {repo_path}"
  1141. ) from e
  1142. # Handle directories by scanning their contents
  1143. if resolved_path.is_dir():
  1144. # Check if the directory itself is ignored
  1145. dir_relpath = posixpath.join(relpath, "") if relpath != "." else ""
  1146. if dir_relpath and ignore_manager.is_ignored(dir_relpath):
  1147. ignored.add(dir_relpath)
  1148. continue
  1149. # When adding a directory, add all untracked files within it
  1150. current_untracked = list(
  1151. get_untracked_paths(
  1152. str(resolved_path),
  1153. str(repo_path),
  1154. index,
  1155. )
  1156. )
  1157. for untracked_path in current_untracked:
  1158. # If we're scanning a subdirectory, adjust the path
  1159. if relpath != ".":
  1160. untracked_path = posixpath.join(relpath, untracked_path)
  1161. if not ignore_manager.is_ignored(untracked_path):
  1162. relpaths.append(untracked_path)
  1163. else:
  1164. ignored.add(untracked_path)
  1165. # Also add unstaged (modified) files within this directory
  1166. for unstaged_path in all_unstaged_paths:
  1167. if isinstance(unstaged_path, bytes):
  1168. unstaged_path_str = unstaged_path.decode("utf-8")
  1169. else:
  1170. unstaged_path_str = unstaged_path
  1171. # Check if this unstaged file is within the directory we're processing
  1172. unstaged_full_path = repo_path / unstaged_path_str
  1173. try:
  1174. unstaged_full_path.relative_to(resolved_path)
  1175. # File is within this directory, add it
  1176. if not ignore_manager.is_ignored(unstaged_path_str):
  1177. relpaths.append(unstaged_path_str)
  1178. else:
  1179. ignored.add(unstaged_path_str)
  1180. except ValueError:
  1181. # File is not within this directory, skip it
  1182. continue
  1183. continue
  1184. # FIXME: Support patterns
  1185. if ignore_manager.is_ignored(relpath):
  1186. ignored.add(relpath)
  1187. continue
  1188. relpaths.append(relpath)
  1189. r.get_worktree().stage(relpaths)
  1190. return (relpaths, ignored)
  1191. def _is_subdir(
  1192. subdir: str | os.PathLike[str], parentdir: str | os.PathLike[str]
  1193. ) -> bool:
  1194. """Check whether subdir is parentdir or a subdir of parentdir.
  1195. If parentdir or subdir is a relative path, it will be disamgibuated
  1196. relative to the pwd.
  1197. """
  1198. parentdir_abs = os.path.realpath(parentdir) + os.path.sep
  1199. subdir_abs = os.path.realpath(subdir) + os.path.sep
  1200. return subdir_abs.startswith(parentdir_abs)
  1201. # TODO: option to remove ignored files also, in line with `git clean -fdx`
  1202. def clean(
  1203. repo: str | os.PathLike[str] | Repo = ".",
  1204. target_dir: str | os.PathLike[str] | None = None,
  1205. ) -> None:
  1206. """Remove any untracked files from the target directory recursively.
  1207. Equivalent to running ``git clean -fd`` in target_dir.
  1208. Args:
  1209. repo: Repository where the files may be tracked
  1210. target_dir: Directory to clean - current directory if None
  1211. """
  1212. if target_dir is None:
  1213. target_dir = os.getcwd()
  1214. with open_repo_closing(repo) as r:
  1215. if not _is_subdir(target_dir, r.path):
  1216. raise Error("target_dir must be in the repo's working dir")
  1217. config = r.get_config_stack()
  1218. config.get_boolean((b"clean",), b"requireForce", True)
  1219. # TODO(jelmer): if require_force is set, then make sure that -f, -i or
  1220. # -n is specified.
  1221. index = r.open_index()
  1222. ignore_manager = IgnoreFilterManager.from_repo(r)
  1223. paths_in_wd = _walk_working_dir_paths(target_dir, r.path)
  1224. # Reverse file visit order, so that files and subdirectories are
  1225. # removed before containing directory
  1226. for ap, is_dir in reversed(list(paths_in_wd)):
  1227. # target_dir and r.path are both str, so ap must be str
  1228. assert isinstance(ap, str)
  1229. if is_dir:
  1230. # All subdirectories and files have been removed if untracked,
  1231. # so dir contains no tracked files iff it is empty.
  1232. is_empty = len(os.listdir(ap)) == 0
  1233. if is_empty:
  1234. os.rmdir(ap)
  1235. else:
  1236. ip = path_to_tree_path(r.path, ap)
  1237. is_tracked = ip in index
  1238. rp = os.path.relpath(ap, r.path)
  1239. is_ignored = ignore_manager.is_ignored(rp)
  1240. if not is_tracked and not is_ignored:
  1241. os.remove(ap)
  1242. def remove(
  1243. repo: str | os.PathLike[str] | Repo = ".",
  1244. paths: Sequence[str | bytes | os.PathLike[str]] = [],
  1245. cached: bool = False,
  1246. ) -> None:
  1247. """Remove files from the staging area.
  1248. Args:
  1249. repo: Repository for the files
  1250. paths: Paths to remove. Can be absolute or relative to the repository root.
  1251. cached: Only remove from index, not from working directory
  1252. """
  1253. with open_repo_closing(repo) as r:
  1254. index = r.open_index()
  1255. blob_normalizer = r.get_blob_normalizer()
  1256. for p in paths:
  1257. # If path is absolute, use it as-is. Otherwise, treat it as relative to repo
  1258. p_str = os.fsdecode(p) if isinstance(p, bytes) else str(p)
  1259. if os.path.isabs(p_str):
  1260. full_path = p_str
  1261. else:
  1262. # Treat relative paths as relative to the repository root
  1263. full_path = os.path.join(r.path, p_str)
  1264. tree_path = path_to_tree_path(r.path, full_path)
  1265. # Convert to bytes for file operations
  1266. full_path_bytes = os.fsencode(full_path)
  1267. try:
  1268. entry = index[tree_path]
  1269. if isinstance(entry, ConflictedIndexEntry):
  1270. raise Error(f"{p_str} has conflicts in the index")
  1271. index_sha = entry.sha
  1272. except KeyError as exc:
  1273. raise Error(f"{p_str} did not match any files") from exc
  1274. if not cached:
  1275. try:
  1276. st = os.lstat(full_path_bytes)
  1277. except OSError:
  1278. pass
  1279. else:
  1280. try:
  1281. blob = blob_from_path_and_stat(full_path_bytes, st)
  1282. # Apply checkin normalization to compare apples to apples
  1283. if blob_normalizer is not None:
  1284. blob = blob_normalizer.checkin_normalize(blob, tree_path)
  1285. except OSError:
  1286. pass
  1287. else:
  1288. try:
  1289. head_commit = r[r.head()]
  1290. assert isinstance(head_commit, Commit)
  1291. committed_sha = tree_lookup_path(
  1292. r.__getitem__, head_commit.tree, tree_path
  1293. )[1]
  1294. except KeyError:
  1295. committed_sha = None
  1296. if blob.id != index_sha and index_sha != committed_sha:
  1297. raise Error(
  1298. "file has staged content differing "
  1299. f"from both the file and head: {p_str}"
  1300. )
  1301. if index_sha != committed_sha:
  1302. raise Error(f"file has staged changes: {p_str}")
  1303. os.remove(full_path_bytes)
  1304. del index[tree_path]
  1305. index.write()
  1306. rm = remove
  1307. def mv(
  1308. repo: str | os.PathLike[str] | Repo,
  1309. source: str | bytes | os.PathLike[str],
  1310. destination: str | bytes | os.PathLike[str],
  1311. force: bool = False,
  1312. ) -> None:
  1313. """Move or rename a file, directory, or symlink.
  1314. Args:
  1315. repo: Path to the repository
  1316. source: Path to move from
  1317. destination: Path to move to
  1318. force: Force move even if destination exists
  1319. Raises:
  1320. Error: If source doesn't exist, is not tracked, or destination already exists (without force)
  1321. """
  1322. with open_repo_closing(repo) as r:
  1323. index = r.open_index()
  1324. # Handle paths - convert to string if necessary
  1325. if isinstance(source, bytes):
  1326. source = source.decode(sys.getfilesystemencoding())
  1327. elif hasattr(source, "__fspath__"):
  1328. source = os.fspath(source)
  1329. else:
  1330. source = str(source)
  1331. if isinstance(destination, bytes):
  1332. destination = destination.decode(sys.getfilesystemencoding())
  1333. elif hasattr(destination, "__fspath__"):
  1334. destination = os.fspath(destination)
  1335. else:
  1336. destination = str(destination)
  1337. # Get full paths
  1338. if os.path.isabs(source):
  1339. source_full_path = source
  1340. else:
  1341. # Treat relative paths as relative to the repository root
  1342. source_full_path = os.path.join(r.path, source)
  1343. if os.path.isabs(destination):
  1344. destination_full_path = destination
  1345. else:
  1346. # Treat relative paths as relative to the repository root
  1347. destination_full_path = os.path.join(r.path, destination)
  1348. # Check if destination is a directory
  1349. if os.path.isdir(destination_full_path):
  1350. # Move source into destination directory
  1351. basename = os.path.basename(source_full_path)
  1352. destination_full_path = os.path.join(destination_full_path, basename)
  1353. # Convert to tree paths for index
  1354. source_tree_path = path_to_tree_path(r.path, source_full_path)
  1355. destination_tree_path = path_to_tree_path(r.path, destination_full_path)
  1356. # Check if source exists in index
  1357. if source_tree_path not in index:
  1358. raise Error(f"source '{source}' is not under version control")
  1359. # Check if source exists in filesystem
  1360. if not os.path.exists(source_full_path):
  1361. raise Error(f"source '{source}' does not exist")
  1362. # Check if destination already exists
  1363. if os.path.exists(destination_full_path) and not force:
  1364. raise Error(f"destination '{destination}' already exists (use -f to force)")
  1365. # Check if destination is already in index
  1366. if destination_tree_path in index and not force:
  1367. raise Error(
  1368. f"destination '{destination}' already exists in index (use -f to force)"
  1369. )
  1370. # Get the index entry for the source
  1371. source_entry = index[source_tree_path]
  1372. # Convert to bytes for file operations
  1373. source_full_path_bytes = os.fsencode(source_full_path)
  1374. destination_full_path_bytes = os.fsencode(destination_full_path)
  1375. # Create parent directory for destination if needed
  1376. dest_dir = os.path.dirname(destination_full_path_bytes)
  1377. if dest_dir and not os.path.exists(dest_dir):
  1378. os.makedirs(dest_dir)
  1379. # Move the file in the filesystem
  1380. if os.path.exists(destination_full_path_bytes) and force:
  1381. os.remove(destination_full_path_bytes)
  1382. os.rename(source_full_path_bytes, destination_full_path_bytes)
  1383. # Update the index
  1384. del index[source_tree_path]
  1385. index[destination_tree_path] = source_entry
  1386. index.write()
  1387. move = mv
  1388. def commit_decode(
  1389. commit: Commit, contents: bytes, default_encoding: str = DEFAULT_ENCODING
  1390. ) -> str:
  1391. """Decode commit contents using the commit's encoding or default."""
  1392. if commit.encoding:
  1393. encoding = commit.encoding.decode("ascii")
  1394. else:
  1395. encoding = default_encoding
  1396. return contents.decode(encoding, "replace")
  1397. def commit_encode(
  1398. commit: Commit, contents: str, default_encoding: str = DEFAULT_ENCODING
  1399. ) -> bytes:
  1400. """Encode commit contents using the commit's encoding or default."""
  1401. if commit.encoding:
  1402. encoding = commit.encoding.decode("ascii")
  1403. else:
  1404. encoding = default_encoding
  1405. return contents.encode(encoding)
  1406. def print_commit(
  1407. commit: Commit,
  1408. decode: Callable[[bytes], str],
  1409. outstream: TextIO = sys.stdout,
  1410. ) -> None:
  1411. """Write a human-readable commit log entry.
  1412. Args:
  1413. commit: A `Commit` object
  1414. decode: Function to decode commit data
  1415. outstream: A stream file to write to
  1416. """
  1417. outstream.write("-" * 50 + "\n")
  1418. outstream.write("commit: " + commit.id.decode("ascii") + "\n")
  1419. if len(commit.parents) > 1:
  1420. outstream.write(
  1421. "merge: "
  1422. + "...".join([c.decode("ascii") for c in commit.parents[1:]])
  1423. + "\n"
  1424. )
  1425. outstream.write("Author: " + decode(commit.author) + "\n")
  1426. if commit.author != commit.committer:
  1427. outstream.write("Committer: " + decode(commit.committer) + "\n")
  1428. time_tuple = time.gmtime(commit.author_time + commit.author_timezone)
  1429. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  1430. timezone_str = format_timezone(commit.author_timezone).decode("ascii")
  1431. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  1432. if commit.message:
  1433. outstream.write("\n")
  1434. outstream.write(decode(commit.message) + "\n")
  1435. outstream.write("\n")
  1436. def print_tag(
  1437. tag: Tag, decode: Callable[[bytes], str], outstream: TextIO = sys.stdout
  1438. ) -> None:
  1439. """Write a human-readable tag.
  1440. Args:
  1441. tag: A `Tag` object
  1442. decode: Function for decoding bytes to unicode string
  1443. outstream: A stream to write to
  1444. """
  1445. outstream.write("Tagger: " + decode(tag.tagger) + "\n")
  1446. time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone)
  1447. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  1448. timezone_str = format_timezone(tag.tag_timezone).decode("ascii")
  1449. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  1450. outstream.write("\n")
  1451. outstream.write(decode(tag.message))
  1452. outstream.write("\n")
  1453. def show_blob(
  1454. repo: RepoPath,
  1455. blob: Blob,
  1456. decode: Callable[[bytes], str],
  1457. outstream: TextIO = sys.stdout,
  1458. ) -> None:
  1459. """Write a blob to a stream.
  1460. Args:
  1461. repo: A `Repo` object
  1462. blob: A `Blob` object
  1463. decode: Function for decoding bytes to unicode string
  1464. outstream: A stream file to write to
  1465. """
  1466. outstream.write(decode(blob.data))
  1467. def show_commit(
  1468. repo: RepoPath,
  1469. commit: Commit,
  1470. decode: Callable[[bytes], str],
  1471. outstream: TextIO = sys.stdout,
  1472. ) -> None:
  1473. """Show a commit to a stream.
  1474. Args:
  1475. repo: A `Repo` object
  1476. commit: A `Commit` object
  1477. decode: Function for decoding bytes to unicode string
  1478. outstream: Stream to write to
  1479. """
  1480. from .diff import ColorizedDiffStream
  1481. # Create a wrapper for ColorizedDiffStream to handle string/bytes conversion
  1482. class _StreamWrapper:
  1483. def __init__(self, stream: "ColorizedDiffStream") -> None:
  1484. self.stream = stream
  1485. def write(self, data: str | bytes) -> None:
  1486. if isinstance(data, str):
  1487. # Convert string to bytes for ColorizedDiffStream
  1488. self.stream.write(data.encode("utf-8"))
  1489. else:
  1490. self.stream.write(data)
  1491. with open_repo_closing(repo) as r:
  1492. # Use wrapper for ColorizedDiffStream, direct stream for others
  1493. if isinstance(outstream, ColorizedDiffStream):
  1494. wrapped_stream = _StreamWrapper(outstream)
  1495. print_commit(commit, decode=decode, outstream=wrapped_stream)
  1496. # Write diff directly to the ColorizedDiffStream as bytes
  1497. write_tree_diff(
  1498. outstream,
  1499. r.object_store,
  1500. commit.parents[0] if commit.parents else None,
  1501. commit.tree,
  1502. )
  1503. else:
  1504. print_commit(commit, decode=decode, outstream=outstream)
  1505. if commit.parents:
  1506. parent_commit = r[commit.parents[0]]
  1507. assert isinstance(parent_commit, Commit)
  1508. base_tree = parent_commit.tree
  1509. else:
  1510. base_tree = None
  1511. # Traditional path: buffer diff and write as decoded text
  1512. diffstream = BytesIO()
  1513. write_tree_diff(diffstream, r.object_store, base_tree, commit.tree)
  1514. diffstream.seek(0)
  1515. outstream.write(commit_decode(commit, diffstream.getvalue()))
  1516. def show_tree(
  1517. repo: RepoPath,
  1518. tree: Tree,
  1519. decode: Callable[[bytes], str],
  1520. outstream: TextIO = sys.stdout,
  1521. ) -> None:
  1522. """Print a tree to a stream.
  1523. Args:
  1524. repo: A `Repo` object
  1525. tree: A `Tree` object
  1526. decode: Function for decoding bytes to unicode string
  1527. outstream: Stream to write to
  1528. """
  1529. for n in tree:
  1530. outstream.write(decode(n) + "\n")
  1531. def show_tag(
  1532. repo: RepoPath,
  1533. tag: Tag,
  1534. decode: Callable[[bytes], str],
  1535. outstream: TextIO = sys.stdout,
  1536. ) -> None:
  1537. """Print a tag to a stream.
  1538. Args:
  1539. repo: A `Repo` object
  1540. tag: A `Tag` object
  1541. decode: Function for decoding bytes to unicode string
  1542. outstream: Stream to write to
  1543. """
  1544. with open_repo_closing(repo) as r:
  1545. print_tag(tag, decode, outstream)
  1546. obj = r[tag.object[1]]
  1547. assert isinstance(obj, (Tree, Blob, Commit, Tag))
  1548. show_object(repo, obj, decode, outstream)
  1549. def show_object(
  1550. repo: RepoPath,
  1551. obj: Tree | Blob | Commit | Tag,
  1552. decode: Callable[[bytes], str],
  1553. outstream: TextIO,
  1554. ) -> None:
  1555. """Show details of a git object."""
  1556. handlers: dict[bytes, Callable[[RepoPath, Any, Any, TextIO], None]] = {
  1557. b"tree": show_tree,
  1558. b"blob": show_blob,
  1559. b"commit": show_commit,
  1560. b"tag": show_tag,
  1561. }
  1562. handler = handlers.get(obj.type_name)
  1563. if handler is None:
  1564. raise ValueError(f"Unknown object type: {obj.type_name.decode()}")
  1565. handler(repo, obj, decode, outstream)
  1566. def print_name_status(changes: Iterator[TreeChange]) -> Iterator[str]:
  1567. """Print a simple status summary, listing changed files."""
  1568. for change in changes:
  1569. if not change:
  1570. continue
  1571. if isinstance(change, list):
  1572. change = change[0]
  1573. if change.type == CHANGE_ADD:
  1574. assert change.new is not None
  1575. path1 = change.new.path
  1576. assert path1 is not None
  1577. path2 = b""
  1578. kind = "A"
  1579. elif change.type == CHANGE_DELETE:
  1580. assert change.old is not None
  1581. path1 = change.old.path
  1582. assert path1 is not None
  1583. path2 = b""
  1584. kind = "D"
  1585. elif change.type == CHANGE_MODIFY:
  1586. assert change.new is not None
  1587. path1 = change.new.path
  1588. assert path1 is not None
  1589. path2 = b""
  1590. kind = "M"
  1591. elif change.type in RENAME_CHANGE_TYPES:
  1592. assert change.old is not None and change.new is not None
  1593. path1 = change.old.path
  1594. assert path1 is not None
  1595. path2_opt = change.new.path
  1596. assert path2_opt is not None
  1597. path2 = path2_opt
  1598. if change.type == CHANGE_RENAME:
  1599. kind = "R"
  1600. elif change.type == CHANGE_COPY:
  1601. kind = "C"
  1602. path1_str = (
  1603. path1.decode("utf-8", errors="replace")
  1604. if isinstance(path1, bytes)
  1605. else path1
  1606. )
  1607. path2_str = (
  1608. path2.decode("utf-8", errors="replace")
  1609. if isinstance(path2, bytes)
  1610. else path2
  1611. )
  1612. yield f"{kind:<8}{path1_str:<20}{path2_str:<20}"
  1613. def log(
  1614. repo: RepoPath = ".",
  1615. paths: Sequence[str | bytes] | None = None,
  1616. outstream: TextIO = sys.stdout,
  1617. max_entries: int | None = None,
  1618. reverse: bool = False,
  1619. name_status: bool = False,
  1620. ) -> None:
  1621. """Write commit logs.
  1622. Args:
  1623. repo: Path to repository
  1624. paths: Optional set of specific paths to print entries for
  1625. outstream: Stream to write log output to
  1626. reverse: Reverse order in which entries are printed
  1627. name_status: Print name status
  1628. max_entries: Optional maximum number of entries to display
  1629. """
  1630. with open_repo_closing(repo) as r:
  1631. try:
  1632. include = [r.head()]
  1633. except KeyError:
  1634. include = []
  1635. # Convert paths to bytes if needed
  1636. paths_bytes = None
  1637. if paths:
  1638. paths_bytes = [p.encode() if isinstance(p, str) else p for p in paths]
  1639. walker = r.get_walker(
  1640. include=include, max_entries=max_entries, paths=paths_bytes, reverse=reverse
  1641. )
  1642. for entry in walker:
  1643. def decode_wrapper(x: bytes) -> str:
  1644. return commit_decode(entry.commit, x)
  1645. print_commit(entry.commit, decode_wrapper, outstream)
  1646. if name_status:
  1647. outstream.writelines(
  1648. [
  1649. line + "\n"
  1650. for line in print_name_status(
  1651. cast(Iterator[TreeChange], entry.changes())
  1652. )
  1653. ]
  1654. )
  1655. # TODO(jelmer): better default for encoding?
  1656. def show(
  1657. repo: RepoPath = ".",
  1658. objects: Sequence[str | bytes] | None = None,
  1659. outstream: TextIO = sys.stdout,
  1660. default_encoding: str = DEFAULT_ENCODING,
  1661. ) -> None:
  1662. """Print the changes in a commit.
  1663. Args:
  1664. repo: Path to repository
  1665. objects: Objects to show (defaults to [HEAD])
  1666. outstream: Stream to write to
  1667. default_encoding: Default encoding to use if none is set in the
  1668. commit
  1669. """
  1670. if objects is None:
  1671. objects = ["HEAD"]
  1672. if isinstance(objects, (str, bytes)):
  1673. objects = [objects]
  1674. with open_repo_closing(repo) as r:
  1675. for objectish in objects:
  1676. o = parse_object(r, objectish)
  1677. if isinstance(o, Commit):
  1678. def decode(x: bytes) -> str:
  1679. return commit_decode(o, x, default_encoding)
  1680. else:
  1681. def decode(x: bytes) -> str:
  1682. return x.decode(default_encoding)
  1683. assert isinstance(o, (Tree, Blob, Commit, Tag))
  1684. show_object(r, o, decode, outstream)
  1685. def diff_tree(
  1686. repo: RepoPath,
  1687. old_tree: str | bytes | Tree,
  1688. new_tree: str | bytes | Tree,
  1689. outstream: BinaryIO = default_bytes_out_stream,
  1690. ) -> None:
  1691. """Compares the content and mode of blobs found via two tree objects.
  1692. Args:
  1693. repo: Path to repository
  1694. old_tree: Id of old tree
  1695. new_tree: Id of new tree
  1696. outstream: Stream to write to
  1697. """
  1698. with open_repo_closing(repo) as r:
  1699. if isinstance(old_tree, Tree):
  1700. old_tree_id: ObjectID | None = old_tree.id
  1701. elif isinstance(old_tree, str):
  1702. old_tree_id = ObjectID(old_tree.encode())
  1703. else:
  1704. old_tree_id = ObjectID(old_tree)
  1705. if isinstance(new_tree, Tree):
  1706. new_tree_id: ObjectID | None = new_tree.id
  1707. elif isinstance(new_tree, str):
  1708. new_tree_id = ObjectID(new_tree.encode())
  1709. else:
  1710. new_tree_id = ObjectID(new_tree)
  1711. write_tree_diff(outstream, r.object_store, old_tree_id, new_tree_id)
  1712. def diff(
  1713. repo: RepoPath = ".",
  1714. commit: str | bytes | Commit | None = None,
  1715. commit2: str | bytes | Commit | None = None,
  1716. staged: bool = False,
  1717. paths: Sequence[str | bytes] | None = None,
  1718. outstream: BinaryIO = default_bytes_out_stream,
  1719. diff_algorithm: str | None = None,
  1720. ) -> None:
  1721. """Show diff.
  1722. Args:
  1723. repo: Path to repository
  1724. commit: First commit to compare. If staged is True, compare
  1725. index to this commit. If staged is False, compare working tree
  1726. to this commit. If None, defaults to HEAD for staged and index
  1727. for unstaged.
  1728. commit2: Second commit to compare against first commit. If provided,
  1729. show diff between commit and commit2 (ignoring staged flag).
  1730. staged: If True, show staged changes (index vs commit).
  1731. If False, show unstaged changes (working tree vs commit/index).
  1732. Ignored if commit2 is provided.
  1733. paths: Optional list of paths to limit diff
  1734. outstream: Stream to write to
  1735. diff_algorithm: Algorithm to use for diffing ("myers" or "patience"),
  1736. defaults to the underlying function's default if None
  1737. """
  1738. from . import diff as diff_module
  1739. with open_repo_closing(repo) as r:
  1740. # Normalize paths to bytes
  1741. byte_paths: list[bytes] | None = None
  1742. if paths is not None and paths: # Check if paths is not empty
  1743. byte_paths = []
  1744. for p in paths:
  1745. if isinstance(p, str):
  1746. byte_paths.append(p.encode("utf-8"))
  1747. else:
  1748. byte_paths.append(p)
  1749. elif paths == []: # Convert empty list to None
  1750. byte_paths = None
  1751. else:
  1752. byte_paths = None
  1753. # Resolve commit refs to SHAs if provided
  1754. if commit is not None:
  1755. if isinstance(commit, Commit):
  1756. # Already a Commit object
  1757. commit_sha = commit.id
  1758. commit_obj = commit
  1759. else:
  1760. # parse_commit handles both refs and SHAs, and always returns a Commit object
  1761. commit_obj = parse_commit(r, commit)
  1762. commit_sha = commit_obj.id
  1763. else:
  1764. commit_sha = None
  1765. commit_obj = None
  1766. if commit2 is not None:
  1767. # Compare two commits
  1768. if isinstance(commit2, Commit):
  1769. commit2_obj = commit2
  1770. else:
  1771. commit2_obj = parse_commit(r, commit2)
  1772. # Get trees from commits
  1773. old_tree = commit_obj.tree if commit_obj else None
  1774. new_tree = commit2_obj.tree
  1775. # Use tree_changes to get the changes and apply path filtering
  1776. changes = r.object_store.tree_changes(old_tree, new_tree)
  1777. for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in changes:
  1778. # Skip if paths are specified and this change doesn't match
  1779. if byte_paths:
  1780. path_to_check = newpath or oldpath
  1781. assert path_to_check is not None
  1782. if not any(
  1783. path_to_check == p or path_to_check.startswith(p + b"/")
  1784. for p in byte_paths
  1785. ):
  1786. continue
  1787. write_object_diff(
  1788. outstream,
  1789. r.object_store,
  1790. (oldpath, oldmode, oldsha),
  1791. (newpath, newmode, newsha),
  1792. diff_algorithm=diff_algorithm,
  1793. )
  1794. elif staged:
  1795. # Show staged changes (index vs commit)
  1796. diff_module.diff_index_to_tree(
  1797. r, outstream, commit_sha, byte_paths, diff_algorithm=diff_algorithm
  1798. )
  1799. elif commit is not None:
  1800. # Compare working tree to a specific commit
  1801. assert (
  1802. commit_sha is not None
  1803. ) # mypy: commit_sha is set when commit is not None
  1804. diff_module.diff_working_tree_to_tree(
  1805. r, outstream, commit_sha, byte_paths, diff_algorithm=diff_algorithm
  1806. )
  1807. else:
  1808. # Compare working tree to index
  1809. diff_module.diff_working_tree_to_index(
  1810. r, outstream, byte_paths, diff_algorithm=diff_algorithm
  1811. )
  1812. def rev_list(
  1813. repo: RepoPath,
  1814. commits: Sequence[str | bytes],
  1815. outstream: BinaryIO = default_bytes_out_stream,
  1816. ) -> None:
  1817. """Lists commit objects in reverse chronological order.
  1818. Args:
  1819. repo: Path to repository
  1820. commits: Commits over which to iterate
  1821. outstream: Stream to write to
  1822. """
  1823. with open_repo_closing(repo) as r:
  1824. for entry in r.get_walker(
  1825. include=[r[c if isinstance(c, bytes) else c.encode()].id for c in commits]
  1826. ):
  1827. outstream.write(entry.commit.id + b"\n")
  1828. def _canonical_part(url: str) -> str:
  1829. name = url.rsplit("/", 1)[-1]
  1830. if name.endswith(".git"):
  1831. name = name[:-4]
  1832. return name
  1833. def submodule_add(
  1834. repo: str | os.PathLike[str] | Repo,
  1835. url: str,
  1836. path: str | os.PathLike[str] | None = None,
  1837. name: str | None = None,
  1838. ) -> None:
  1839. """Add a new submodule.
  1840. Args:
  1841. repo: Path to repository
  1842. url: URL of repository to add as submodule
  1843. path: Path where submodule should live
  1844. name: Name for the submodule
  1845. """
  1846. with open_repo_closing(repo) as r:
  1847. if path is None:
  1848. path = os.path.relpath(_canonical_part(url), r.path)
  1849. if name is None:
  1850. name = os.fsdecode(path) if path is not None else None
  1851. if name is None:
  1852. raise Error("Submodule name must be specified or derivable from path")
  1853. # TODO(jelmer): Move this logic to dulwich.submodule
  1854. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1855. try:
  1856. config = ConfigFile.from_path(gitmodules_path)
  1857. except FileNotFoundError:
  1858. config = ConfigFile()
  1859. config.path = gitmodules_path
  1860. config.set(("submodule", name), "url", url)
  1861. config.set(("submodule", name), "path", os.fsdecode(path))
  1862. config.write_to_path()
  1863. def submodule_init(repo: str | os.PathLike[str] | Repo) -> None:
  1864. """Initialize submodules.
  1865. Args:
  1866. repo: Path to repository
  1867. """
  1868. with open_repo_closing(repo) as r:
  1869. config = r.get_config()
  1870. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1871. for path, url, name in read_submodules(gitmodules_path):
  1872. config.set((b"submodule", name), b"active", True)
  1873. config.set((b"submodule", name), b"url", url)
  1874. config.write_to_path()
  1875. def submodule_list(repo: RepoPath) -> Iterator[tuple[str, str]]:
  1876. """List submodules.
  1877. Args:
  1878. repo: Path to repository
  1879. """
  1880. from .submodule import iter_cached_submodules
  1881. with open_repo_closing(repo) as r:
  1882. head_commit = r[r.head()]
  1883. assert isinstance(head_commit, Commit)
  1884. for path, sha in iter_cached_submodules(r.object_store, head_commit.tree):
  1885. yield path.decode(DEFAULT_ENCODING), sha.decode(DEFAULT_ENCODING)
  1886. def submodule_update(
  1887. repo: str | os.PathLike[str] | Repo,
  1888. paths: Sequence[str | bytes | os.PathLike[str]] | None = None,
  1889. init: bool = False,
  1890. force: bool = False,
  1891. recursive: bool = False,
  1892. errstream: BinaryIO | None = None,
  1893. ) -> None:
  1894. """Update submodules.
  1895. Args:
  1896. repo: Path to repository
  1897. paths: Optional list of specific submodule paths to update. If None, updates all.
  1898. init: If True, initialize submodules first
  1899. force: Force update even if local changes exist
  1900. recursive: If True, recursively update nested submodules
  1901. errstream: Error stream for error messages
  1902. """
  1903. from .submodule import iter_cached_submodules
  1904. with open_repo_closing(repo) as r:
  1905. if init:
  1906. submodule_init(r)
  1907. config = r.get_config()
  1908. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1909. # Get list of submodules to update
  1910. submodules_to_update = []
  1911. head_commit = r[r.head()]
  1912. assert isinstance(head_commit, Commit)
  1913. for path, sha in iter_cached_submodules(r.object_store, head_commit.tree):
  1914. path_str = (
  1915. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1916. )
  1917. if paths is None or path_str in paths:
  1918. submodules_to_update.append((path, sha))
  1919. # Read submodule configuration
  1920. for path, target_sha in submodules_to_update:
  1921. path_str = (
  1922. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1923. )
  1924. # Find the submodule name from .gitmodules
  1925. submodule_name: bytes | None = None
  1926. for sm_path, sm_url, sm_name in read_submodules(gitmodules_path):
  1927. if sm_path == path:
  1928. submodule_name = sm_name
  1929. break
  1930. if not submodule_name:
  1931. continue
  1932. # Get the URL from config
  1933. section = (
  1934. b"submodule",
  1935. submodule_name
  1936. if isinstance(submodule_name, bytes)
  1937. else submodule_name.encode(),
  1938. )
  1939. try:
  1940. url_value = config.get(section, b"url")
  1941. if isinstance(url_value, bytes):
  1942. url = url_value.decode(DEFAULT_ENCODING)
  1943. else:
  1944. url = url_value
  1945. except KeyError:
  1946. # URL not in config, skip this submodule
  1947. continue
  1948. # Get or create the submodule repository paths
  1949. submodule_path = os.path.join(r.path, path_str)
  1950. submodule_git_dir = os.path.join(r.controldir(), "modules", path_str)
  1951. # Clone or fetch the submodule
  1952. if not os.path.exists(submodule_git_dir):
  1953. # Clone the submodule as bare repository
  1954. os.makedirs(os.path.dirname(submodule_git_dir), exist_ok=True)
  1955. # Clone to the git directory
  1956. sub_repo = clone(url, submodule_git_dir, bare=True, checkout=False)
  1957. sub_repo.close()
  1958. # Create the submodule directory if it doesn't exist
  1959. if not os.path.exists(submodule_path):
  1960. os.makedirs(submodule_path)
  1961. # Create .git file in the submodule directory
  1962. relative_git_dir = os.path.relpath(submodule_git_dir, submodule_path)
  1963. git_file_path = os.path.join(submodule_path, ".git")
  1964. with open(git_file_path, "w") as f:
  1965. f.write(f"gitdir: {relative_git_dir}\n")
  1966. # Set up working directory configuration
  1967. with open_repo_closing(submodule_git_dir) as sub_repo:
  1968. sub_config = sub_repo.get_config()
  1969. sub_config.set(
  1970. (b"core",),
  1971. b"worktree",
  1972. os.path.abspath(submodule_path).encode(),
  1973. )
  1974. sub_config.write_to_path()
  1975. # Checkout the target commit
  1976. sub_repo.refs[HEADREF] = target_sha
  1977. # Build the index and checkout files
  1978. tree = sub_repo[target_sha]
  1979. if hasattr(tree, "tree"): # If it's a commit, get the tree
  1980. tree_id = tree.tree
  1981. else:
  1982. tree_id = target_sha
  1983. build_index_from_tree(
  1984. submodule_path,
  1985. sub_repo.index_path(),
  1986. sub_repo.object_store,
  1987. tree_id,
  1988. )
  1989. else:
  1990. # Fetch and checkout in existing submodule
  1991. with open_repo_closing(submodule_git_dir) as sub_repo:
  1992. # Fetch from remote
  1993. client, path_segments = get_transport_and_path(url)
  1994. client.fetch(path_segments.encode(), sub_repo)
  1995. # Update to the target commit
  1996. sub_repo.refs[HEADREF] = target_sha
  1997. # Reset the working directory
  1998. reset(sub_repo, "hard", target_sha)
  1999. # Recursively update nested submodules if requested
  2000. if recursive:
  2001. submodule_gitmodules = os.path.join(submodule_path, ".gitmodules")
  2002. if os.path.exists(submodule_gitmodules):
  2003. submodule_update(
  2004. submodule_path,
  2005. paths=None,
  2006. init=True, # Always initialize nested submodules
  2007. force=force,
  2008. recursive=True,
  2009. errstream=errstream,
  2010. )
  2011. def tag_create(
  2012. repo: RepoPath,
  2013. tag: str | bytes,
  2014. author: str | bytes | None = None,
  2015. message: str | bytes | None = None,
  2016. annotated: bool = False,
  2017. objectish: str | bytes = "HEAD",
  2018. tag_time: int | None = None,
  2019. tag_timezone: int | None = None,
  2020. sign: bool | None = None,
  2021. encoding: str = DEFAULT_ENCODING,
  2022. ) -> None:
  2023. """Creates a tag in git via dulwich calls.
  2024. Args:
  2025. repo: Path to repository
  2026. tag: tag string
  2027. author: tag author (optional, if annotated is set)
  2028. message: tag message (optional)
  2029. annotated: whether to create an annotated tag
  2030. objectish: object the tag should point at, defaults to HEAD
  2031. tag_time: Optional time for annotated tag
  2032. tag_timezone: Optional timezone for annotated tag
  2033. sign: GPG Sign the tag (bool, defaults to False,
  2034. pass True to use default GPG key,
  2035. pass a str containing Key ID to use a specific GPG key)
  2036. encoding: Encoding to use for tag messages
  2037. """
  2038. with open_repo_closing(repo) as r:
  2039. object = parse_object(r, objectish)
  2040. if isinstance(tag, str):
  2041. tag = tag.encode(encoding)
  2042. if annotated:
  2043. # Create the tag object
  2044. tag_obj = Tag()
  2045. if author is None:
  2046. author = get_user_identity(r.get_config_stack())
  2047. elif isinstance(author, str):
  2048. author = author.encode(encoding)
  2049. else:
  2050. assert isinstance(author, bytes)
  2051. tag_obj.tagger = author
  2052. if isinstance(message, str):
  2053. message = message.encode(encoding)
  2054. elif isinstance(message, bytes):
  2055. pass
  2056. else:
  2057. message = b""
  2058. tag_obj.message = message + "\n".encode(encoding)
  2059. tag_obj.name = tag
  2060. tag_obj.object = (type(object), object.id)
  2061. if tag_time is None:
  2062. tag_time = int(time.time())
  2063. tag_obj.tag_time = tag_time
  2064. if tag_timezone is None:
  2065. tag_timezone = get_user_timezones()[1]
  2066. elif isinstance(tag_timezone, str):
  2067. tag_timezone = parse_timezone(tag_timezone.encode())
  2068. tag_obj.tag_timezone = tag_timezone
  2069. # Check if we should sign the tag
  2070. config = r.get_config_stack()
  2071. if sign is None:
  2072. # Check tag.gpgSign configuration when sign is not explicitly set
  2073. try:
  2074. should_sign = config.get_boolean(
  2075. (b"tag",), b"gpgsign", default=False
  2076. )
  2077. except KeyError:
  2078. should_sign = False # Default to not signing if no config
  2079. else:
  2080. should_sign = sign
  2081. # Get the signing key from config if signing is enabled
  2082. keyid = None
  2083. if should_sign:
  2084. try:
  2085. keyid_bytes = config.get((b"user",), b"signingkey")
  2086. keyid = keyid_bytes.decode() if keyid_bytes else None
  2087. except KeyError:
  2088. keyid = None
  2089. tag_obj.sign(keyid)
  2090. r.object_store.add_object(tag_obj)
  2091. tag_id = tag_obj.id
  2092. else:
  2093. tag_id = object.id
  2094. r.refs[_make_tag_ref(tag)] = tag_id
  2095. def verify_commit(
  2096. repo: RepoPath,
  2097. committish: str | bytes = "HEAD",
  2098. keyids: list[str] | None = None,
  2099. ) -> None:
  2100. """Verify GPG signature on a commit.
  2101. Args:
  2102. repo: Path to repository
  2103. committish: Commit to verify (defaults to HEAD)
  2104. keyids: Optional list of trusted key IDs. If provided, the commit
  2105. must be signed by one of these keys. If not provided, just verifies
  2106. that the commit has a valid signature.
  2107. Raises:
  2108. gpg.errors.BadSignatures: if GPG signature verification fails
  2109. gpg.errors.MissingSignatures: if commit was not signed by a key
  2110. specified in keyids
  2111. """
  2112. with open_repo_closing(repo) as r:
  2113. commit = parse_commit(r, committish)
  2114. commit.verify(keyids)
  2115. def verify_tag(
  2116. repo: RepoPath,
  2117. tagname: str | bytes,
  2118. keyids: list[str] | None = None,
  2119. ) -> None:
  2120. """Verify GPG signature on a tag.
  2121. Args:
  2122. repo: Path to repository
  2123. tagname: Name of tag to verify
  2124. keyids: Optional list of trusted key IDs. If provided, the tag
  2125. must be signed by one of these keys. If not provided, just verifies
  2126. that the tag has a valid signature.
  2127. Raises:
  2128. gpg.errors.BadSignatures: if GPG signature verification fails
  2129. gpg.errors.MissingSignatures: if tag was not signed by a key
  2130. specified in keyids
  2131. """
  2132. with open_repo_closing(repo) as r:
  2133. if isinstance(tagname, str):
  2134. tagname = tagname.encode()
  2135. tag_ref = _make_tag_ref(tagname)
  2136. tag_id = r.refs[tag_ref]
  2137. tag_obj = r[tag_id]
  2138. if not isinstance(tag_obj, Tag):
  2139. raise Error(f"{tagname!r} does not point to a tag object")
  2140. tag_obj.verify(keyids)
  2141. def tag_list(repo: RepoPath, outstream: TextIO = sys.stdout) -> list[Ref]:
  2142. """List all tags.
  2143. Args:
  2144. repo: Path to repository
  2145. outstream: Stream to write tags to
  2146. """
  2147. with open_repo_closing(repo) as r:
  2148. tags: list[Ref] = sorted(r.refs.as_dict(Ref(b"refs/tags")))
  2149. return tags
  2150. def tag_delete(repo: RepoPath, name: str | bytes) -> None:
  2151. """Remove a tag.
  2152. Args:
  2153. repo: Path to repository
  2154. name: Name of tag to remove
  2155. """
  2156. with open_repo_closing(repo) as r:
  2157. if isinstance(name, bytes):
  2158. names = [name]
  2159. elif isinstance(name, list):
  2160. names = name
  2161. else:
  2162. raise Error(f"Unexpected tag name type {name!r}")
  2163. for name in names:
  2164. del r.refs[_make_tag_ref(name)]
  2165. def _make_notes_ref(name: bytes) -> bytes:
  2166. """Make a notes ref name."""
  2167. if name.startswith(b"refs/notes/"):
  2168. return name
  2169. return LOCAL_NOTES_PREFIX + name
  2170. def notes_add(
  2171. repo: RepoPath,
  2172. object_sha: bytes,
  2173. note: bytes,
  2174. ref: bytes = b"commits",
  2175. author: bytes | None = None,
  2176. committer: bytes | None = None,
  2177. message: bytes | None = None,
  2178. ) -> bytes:
  2179. """Add or update a note for an object.
  2180. Args:
  2181. repo: Path to repository
  2182. object_sha: SHA of the object to annotate
  2183. note: Note content
  2184. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  2185. author: Author identity (defaults to committer)
  2186. committer: Committer identity (defaults to config)
  2187. message: Commit message for the notes update
  2188. Returns:
  2189. SHA of the new notes commit
  2190. """
  2191. with open_repo_closing(repo) as r:
  2192. # Parse the object to get its SHA
  2193. obj = parse_object(r, object_sha)
  2194. object_sha = obj.id
  2195. if isinstance(note, str):
  2196. note = note.encode(DEFAULT_ENCODING)
  2197. if isinstance(ref, str):
  2198. ref = ref.encode(DEFAULT_ENCODING)
  2199. notes_ref = _make_notes_ref(ref)
  2200. config = r.get_config_stack()
  2201. return r.notes.set_note(
  2202. object_sha,
  2203. note,
  2204. notes_ref,
  2205. author=author,
  2206. committer=committer,
  2207. message=message,
  2208. config=config,
  2209. )
  2210. def notes_remove(
  2211. repo: RepoPath,
  2212. object_sha: bytes,
  2213. ref: bytes = b"commits",
  2214. author: bytes | None = None,
  2215. committer: bytes | None = None,
  2216. message: bytes | None = None,
  2217. ) -> bytes | None:
  2218. """Remove a note for an object.
  2219. Args:
  2220. repo: Path to repository
  2221. object_sha: SHA of the object to remove notes from
  2222. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  2223. author: Author identity (defaults to committer)
  2224. committer: Committer identity (defaults to config)
  2225. message: Commit message for the notes removal
  2226. Returns:
  2227. SHA of the new notes commit, or None if no note existed
  2228. """
  2229. with open_repo_closing(repo) as r:
  2230. # Parse the object to get its SHA
  2231. obj = parse_object(r, object_sha)
  2232. object_sha = obj.id
  2233. if isinstance(ref, str):
  2234. ref = ref.encode(DEFAULT_ENCODING)
  2235. notes_ref = _make_notes_ref(ref)
  2236. config = r.get_config_stack()
  2237. return r.notes.remove_note(
  2238. object_sha,
  2239. notes_ref,
  2240. author=author,
  2241. committer=committer,
  2242. message=message,
  2243. config=config,
  2244. )
  2245. def notes_show(
  2246. repo: str | os.PathLike[str] | Repo, object_sha: bytes, ref: bytes = b"commits"
  2247. ) -> bytes | None:
  2248. """Show the note for an object.
  2249. Args:
  2250. repo: Path to repository
  2251. object_sha: SHA of the object
  2252. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  2253. Returns:
  2254. Note content as bytes, or None if no note exists
  2255. """
  2256. with open_repo_closing(repo) as r:
  2257. # Parse the object to get its SHA
  2258. obj = parse_object(r, object_sha)
  2259. object_sha = obj.id
  2260. if isinstance(ref, str):
  2261. ref = ref.encode(DEFAULT_ENCODING)
  2262. notes_ref = _make_notes_ref(ref)
  2263. config = r.get_config_stack()
  2264. return r.notes.get_note(object_sha, notes_ref, config=config)
  2265. def notes_list(repo: RepoPath, ref: bytes = b"commits") -> list[tuple[ObjectID, bytes]]:
  2266. """List all notes in a notes ref.
  2267. Args:
  2268. repo: Path to repository
  2269. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  2270. Returns:
  2271. List of tuples of (object_sha, note_content)
  2272. """
  2273. with open_repo_closing(repo) as r:
  2274. if isinstance(ref, str):
  2275. ref = ref.encode(DEFAULT_ENCODING)
  2276. notes_ref = _make_notes_ref(ref)
  2277. config = r.get_config_stack()
  2278. return r.notes.list_notes(notes_ref, config=config)
  2279. def replace_list(repo: RepoPath) -> list[tuple[ObjectID, ObjectID]]:
  2280. """List all replacement refs.
  2281. Args:
  2282. repo: Path to repository
  2283. Returns:
  2284. List of tuples of (object_sha, replacement_sha) where object_sha is the
  2285. object being replaced and replacement_sha is what it's replaced with
  2286. """
  2287. with open_repo_closing(repo) as r:
  2288. replacements: list[tuple[ObjectID, ObjectID]] = []
  2289. for ref in r.refs.keys():
  2290. if ref.startswith(LOCAL_REPLACE_PREFIX):
  2291. object_sha = ObjectID(ref[len(LOCAL_REPLACE_PREFIX) :])
  2292. replacement_sha = r.refs[ref]
  2293. replacements.append((object_sha, replacement_sha))
  2294. return replacements
  2295. def replace_delete(repo: RepoPath, object_sha: ObjectID | str) -> None:
  2296. """Delete a replacement ref.
  2297. Args:
  2298. repo: Path to repository
  2299. object_sha: SHA of the object whose replacement should be removed
  2300. """
  2301. with open_repo_closing(repo) as r:
  2302. # Convert to ObjectID if string
  2303. if isinstance(object_sha, str):
  2304. object_sha_id = ObjectID(object_sha.encode("ascii"))
  2305. else:
  2306. object_sha_id = object_sha
  2307. replace_ref = _make_replace_ref(object_sha_id)
  2308. if replace_ref not in r.refs:
  2309. raise KeyError(
  2310. f"No replacement ref found for {object_sha_id.decode('ascii')}"
  2311. )
  2312. del r.refs[replace_ref]
  2313. def replace_create(
  2314. repo: RepoPath,
  2315. object_sha: str | ObjectID,
  2316. replacement_sha: str | ObjectID,
  2317. ) -> None:
  2318. """Create a replacement ref to replace one object with another.
  2319. Args:
  2320. repo: Path to repository
  2321. object_sha: SHA of the object to replace
  2322. replacement_sha: SHA of the replacement object
  2323. """
  2324. with open_repo_closing(repo) as r:
  2325. # Convert to ObjectID if string
  2326. if isinstance(object_sha, str):
  2327. object_sha_id = ObjectID(object_sha.encode("ascii"))
  2328. else:
  2329. object_sha_id = object_sha
  2330. if isinstance(replacement_sha, str):
  2331. replacement_sha_id = ObjectID(replacement_sha.encode("ascii"))
  2332. else:
  2333. replacement_sha_id = replacement_sha
  2334. # Create the replacement ref
  2335. replace_ref = _make_replace_ref(object_sha_id)
  2336. r.refs[replace_ref] = replacement_sha_id
  2337. def reset(
  2338. repo: str | os.PathLike[str] | Repo,
  2339. mode: str,
  2340. treeish: str | bytes | Commit | Tree | Tag = "HEAD",
  2341. ) -> None:
  2342. """Reset current HEAD to the specified state.
  2343. Args:
  2344. repo: Path to repository
  2345. mode: Mode ("hard", "soft", "mixed")
  2346. treeish: Treeish to reset to
  2347. """
  2348. with open_repo_closing(repo) as r:
  2349. # Parse the target tree
  2350. tree = parse_tree(r, treeish)
  2351. # Only parse as commit if treeish is not a Tree object
  2352. if isinstance(treeish, Tree):
  2353. # For Tree objects, we can't determine the commit, skip updating HEAD
  2354. target_commit = None
  2355. else:
  2356. target_commit = parse_commit(r, treeish)
  2357. # Update HEAD to point to the target commit
  2358. if target_commit is not None:
  2359. # Get the current HEAD value for set_if_equals
  2360. try:
  2361. old_head = r.refs[HEADREF]
  2362. except KeyError:
  2363. old_head = None
  2364. # Create reflog message
  2365. treeish_str = (
  2366. treeish.decode("utf-8")
  2367. if isinstance(treeish, bytes)
  2368. else str(treeish)
  2369. if not isinstance(treeish, (Commit, Tree, Tag))
  2370. else target_commit.id.hex()
  2371. )
  2372. default_message = f"reset: moving to {treeish_str}".encode()
  2373. reflog_message = _get_reflog_message(default_message)
  2374. # Update HEAD with reflog message
  2375. r.refs.set_if_equals(
  2376. HEADREF, old_head, target_commit.id, message=reflog_message
  2377. )
  2378. if mode == "soft":
  2379. # Soft reset: only update HEAD, leave index and working tree unchanged
  2380. return
  2381. elif mode == "mixed":
  2382. # Mixed reset: update HEAD and index, but leave working tree unchanged
  2383. from .object_store import iter_tree_contents
  2384. # Open the index
  2385. index = r.open_index()
  2386. # Clear the current index
  2387. index.clear()
  2388. # Populate index from the target tree
  2389. for entry in iter_tree_contents(r.object_store, tree.id):
  2390. # Create an IndexEntry from the tree entry
  2391. # Use zeros for filesystem-specific fields since we're not touching the working tree
  2392. assert (
  2393. entry.mode is not None
  2394. and entry.sha is not None
  2395. and entry.path is not None
  2396. )
  2397. index_entry = IndexEntry(
  2398. ctime=(0, 0),
  2399. mtime=(0, 0),
  2400. dev=0,
  2401. ino=0,
  2402. mode=entry.mode,
  2403. uid=0,
  2404. gid=0,
  2405. size=0, # Size will be 0 since we're not reading from disk
  2406. sha=entry.sha,
  2407. flags=0,
  2408. )
  2409. index[entry.path] = index_entry
  2410. # Write the updated index
  2411. index.write()
  2412. elif mode == "hard":
  2413. # Hard reset: update HEAD, index, and working tree
  2414. # For reset --hard, use current index tree as old tree to get proper deletions
  2415. index = r.open_index()
  2416. if len(index) > 0:
  2417. index_tree_id = index.commit(r.object_store)
  2418. else:
  2419. # Empty index
  2420. index_tree_id = None
  2421. # Get configuration for working tree updates
  2422. honor_filemode, validate_path_element, symlink_fn = (
  2423. _get_worktree_update_config(r)
  2424. )
  2425. blob_normalizer = r.get_blob_normalizer()
  2426. changes = tree_changes(
  2427. r.object_store, index_tree_id, tree.id, want_unchanged=True
  2428. )
  2429. update_working_tree(
  2430. r,
  2431. index_tree_id,
  2432. tree.id,
  2433. change_iterator=changes,
  2434. honor_filemode=honor_filemode,
  2435. validate_path_element=validate_path_element,
  2436. symlink_fn=symlink_fn,
  2437. force_remove_untracked=True,
  2438. blob_normalizer=blob_normalizer,
  2439. allow_overwrite_modified=True, # Allow overwriting modified files
  2440. )
  2441. else:
  2442. raise Error(f"Invalid reset mode: {mode}")
  2443. def get_remote_repo(
  2444. repo: Repo, remote_location: str | bytes | None = None
  2445. ) -> tuple[str | None, str]:
  2446. """Get the remote repository information.
  2447. Args:
  2448. repo: Local repository object
  2449. remote_location: Optional remote name or URL; defaults to branch remote
  2450. Returns:
  2451. Tuple of (remote_name, remote_url) where remote_name may be None
  2452. if remote_location is a URL rather than a configured remote
  2453. """
  2454. config = repo.get_config()
  2455. if remote_location is None:
  2456. remote_location = get_branch_remote(repo)
  2457. if isinstance(remote_location, str):
  2458. encoded_location = remote_location.encode()
  2459. else:
  2460. encoded_location = remote_location
  2461. section = (b"remote", encoded_location)
  2462. remote_name: str | None = None
  2463. if config.has_section(section):
  2464. remote_name = encoded_location.decode()
  2465. encoded_location = config.get(section, "url")
  2466. else:
  2467. remote_name = None
  2468. return (remote_name, encoded_location.decode())
  2469. def push(
  2470. repo: RepoPath,
  2471. remote_location: str | bytes | None = None,
  2472. refspecs: str | bytes | Sequence[str | bytes] | None = None,
  2473. outstream: BinaryIO = default_bytes_out_stream,
  2474. errstream: BinaryIO | RawIOBase = default_bytes_err_stream,
  2475. force: bool = False,
  2476. **kwargs: object,
  2477. ) -> SendPackResult:
  2478. """Remote push with dulwich via dulwich.client.
  2479. Args:
  2480. repo: Path to repository
  2481. remote_location: Location of the remote
  2482. refspecs: Refs to push to remote
  2483. outstream: A stream file to write output
  2484. errstream: A stream file to write errors
  2485. force: Force overwriting refs
  2486. **kwargs: Additional keyword arguments for the client
  2487. """
  2488. # Open the repo
  2489. with open_repo_closing(repo) as r:
  2490. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  2491. # Check if mirror mode is enabled
  2492. mirror_mode = False
  2493. if remote_name:
  2494. try:
  2495. mirror_mode_val = r.get_config_stack().get_boolean(
  2496. (b"remote", remote_name.encode()), b"mirror"
  2497. )
  2498. if mirror_mode_val is not None:
  2499. mirror_mode = mirror_mode_val
  2500. except KeyError:
  2501. pass
  2502. if mirror_mode:
  2503. # Mirror mode: push all refs and delete non-existent ones
  2504. refspecs = []
  2505. for ref in r.refs.keys():
  2506. # Push all refs to the same name on remote
  2507. refspecs.append(ref + b":" + ref)
  2508. elif refspecs is None:
  2509. refspecs = [active_branch(r)]
  2510. # Normalize refspecs to bytes
  2511. if isinstance(refspecs, str):
  2512. refspecs_bytes: bytes | list[bytes] = refspecs.encode()
  2513. elif isinstance(refspecs, bytes):
  2514. refspecs_bytes = refspecs
  2515. else:
  2516. refspecs_bytes = []
  2517. for spec in refspecs:
  2518. if isinstance(spec, str):
  2519. refspecs_bytes.append(spec.encode())
  2520. else:
  2521. refspecs_bytes.append(spec)
  2522. # Get the client and path
  2523. transport_kwargs = _filter_transport_kwargs(**kwargs)
  2524. client, path = get_transport_and_path(
  2525. remote_location,
  2526. config=r.get_config_stack(),
  2527. **transport_kwargs,
  2528. )
  2529. selected_refs = []
  2530. remote_changed_refs: dict[Ref, ObjectID | None] = {}
  2531. def update_refs(refs: dict[Ref, ObjectID]) -> dict[Ref, ObjectID]:
  2532. remote_refs = DictRefsContainer(refs) # type: ignore[arg-type]
  2533. selected_refs.extend(
  2534. parse_reftuples(r.refs, remote_refs, refspecs_bytes, force=force)
  2535. )
  2536. new_refs: dict[Ref, ObjectID] = {}
  2537. # In mirror mode, delete remote refs that don't exist locally
  2538. if mirror_mode:
  2539. local_refs = set(r.refs.keys())
  2540. for remote_ref in refs.keys():
  2541. if remote_ref not in local_refs:
  2542. new_refs[remote_ref] = ZERO_SHA
  2543. remote_changed_refs[remote_ref] = None
  2544. # TODO: Handle selected_refs == {None: None}
  2545. for lh, rh, force_ref in selected_refs:
  2546. if lh is None:
  2547. assert rh is not None
  2548. new_refs[rh] = ZERO_SHA
  2549. remote_changed_refs[rh] = None
  2550. else:
  2551. try:
  2552. localsha = r.refs[lh]
  2553. except KeyError as exc:
  2554. raise Error(
  2555. f"No valid ref {lh.decode() if isinstance(lh, bytes) else lh} in local repository"
  2556. ) from exc
  2557. assert rh is not None
  2558. if not force_ref and rh in refs:
  2559. check_diverged(r, refs[rh], localsha)
  2560. new_refs[rh] = localsha
  2561. remote_changed_refs[rh] = localsha
  2562. return new_refs
  2563. err_encoding = getattr(errstream, "encoding", None) or DEFAULT_ENCODING
  2564. remote_location = client.get_url(path)
  2565. try:
  2566. def generate_pack_data_wrapper(
  2567. have: AbstractSet[ObjectID],
  2568. want: AbstractSet[ObjectID],
  2569. *,
  2570. ofs_delta: bool = False,
  2571. progress: Callable[..., None] | None = None,
  2572. ) -> tuple[int, Iterator[UnpackedObject]]:
  2573. # Wrap to match the expected signature
  2574. # Convert AbstractSet to set since generate_pack_data expects set
  2575. return r.generate_pack_data(
  2576. set(have), set(want), progress=progress, ofs_delta=ofs_delta
  2577. )
  2578. result = client.send_pack(
  2579. path.encode(),
  2580. update_refs,
  2581. generate_pack_data=generate_pack_data_wrapper,
  2582. progress=lambda data: (errstream.write(data), None)[1],
  2583. )
  2584. except SendPackError as exc:
  2585. raise Error(
  2586. "Push to " + remote_location + " failed -> " + exc.args[0].decode(),
  2587. ) from exc
  2588. else:
  2589. errstream.write(
  2590. b"Push to " + remote_location.encode(err_encoding) + b" successful.\n"
  2591. )
  2592. for ref, error in (result.ref_status or {}).items(): # type: ignore[assignment]
  2593. if error is not None:
  2594. errstream.write(
  2595. f"Push of ref {ref.decode('utf-8', 'replace')} failed: {error}\n".encode(
  2596. err_encoding
  2597. )
  2598. )
  2599. else:
  2600. errstream.write(
  2601. f"Ref {ref.decode('utf-8', 'replace')} updated\n".encode()
  2602. )
  2603. if remote_name is not None:
  2604. _import_remote_refs(r.refs, remote_name, remote_changed_refs)
  2605. return result
  2606. # Trigger auto GC if needed
  2607. from .gc import maybe_auto_gc
  2608. with open_repo_closing(repo) as r:
  2609. maybe_auto_gc(r)
  2610. def pull(
  2611. repo: RepoPath,
  2612. remote_location: str | bytes | None = None,
  2613. refspecs: str | bytes | Sequence[str | bytes] | None = None,
  2614. outstream: BinaryIO = default_bytes_out_stream,
  2615. errstream: BinaryIO | RawIOBase = default_bytes_err_stream,
  2616. fast_forward: bool = True,
  2617. ff_only: bool = False,
  2618. force: bool = False,
  2619. filter_spec: str | None = None,
  2620. protocol_version: int | None = None,
  2621. **kwargs: object,
  2622. ) -> None:
  2623. """Pull from remote via dulwich.client.
  2624. Args:
  2625. repo: Path to repository
  2626. remote_location: Location of the remote
  2627. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  2628. bytestring/string.
  2629. outstream: A stream file to write to output
  2630. errstream: A stream file to write to errors
  2631. fast_forward: If True, raise an exception when fast-forward is not possible
  2632. ff_only: If True, only allow fast-forward merges. Raises DivergedBranches
  2633. when branches have diverged rather than performing a merge.
  2634. force: If True, allow overwriting local changes in the working tree.
  2635. If False, pull will abort if it would overwrite uncommitted changes.
  2636. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  2637. Only used if the server supports the Git protocol-v2 'filter'
  2638. feature, and ignored otherwise.
  2639. protocol_version: desired Git protocol version. By default the highest
  2640. mutually supported protocol version will be used
  2641. **kwargs: Additional keyword arguments for the client
  2642. """
  2643. # Open the repo
  2644. with open_repo_closing(repo) as r:
  2645. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  2646. selected_refs = []
  2647. if refspecs is None:
  2648. refspecs_normalized: bytes | list[bytes] = [b"HEAD"]
  2649. elif isinstance(refspecs, str):
  2650. refspecs_normalized = refspecs.encode()
  2651. elif isinstance(refspecs, bytes):
  2652. refspecs_normalized = refspecs
  2653. else:
  2654. refspecs_normalized = []
  2655. for spec in refspecs:
  2656. if isinstance(spec, str):
  2657. refspecs_normalized.append(spec.encode())
  2658. else:
  2659. refspecs_normalized.append(spec)
  2660. def determine_wants(
  2661. remote_refs: dict[Ref, ObjectID], depth: int | None = None
  2662. ) -> list[ObjectID]:
  2663. remote_refs_container = DictRefsContainer(remote_refs) # type: ignore[arg-type]
  2664. selected_refs.extend(
  2665. parse_reftuples(
  2666. remote_refs_container, r.refs, refspecs_normalized, force=force
  2667. )
  2668. )
  2669. return [
  2670. remote_refs[lh]
  2671. for (lh, rh, force_ref) in selected_refs
  2672. if lh is not None
  2673. and lh in remote_refs
  2674. and remote_refs[lh] not in r.object_store
  2675. ]
  2676. transport_kwargs = _filter_transport_kwargs(**kwargs)
  2677. client, path = get_transport_and_path(
  2678. remote_location,
  2679. config=r.get_config_stack(),
  2680. **transport_kwargs,
  2681. )
  2682. if filter_spec:
  2683. filter_spec_bytes: bytes | None = filter_spec.encode("ascii")
  2684. else:
  2685. filter_spec_bytes = None
  2686. def progress(data: bytes) -> None:
  2687. errstream.write(data)
  2688. fetch_result = client.fetch(
  2689. path.encode(),
  2690. r,
  2691. progress=progress,
  2692. determine_wants=determine_wants, # type: ignore[arg-type] # Function matches protocol but mypy can't verify
  2693. filter_spec=filter_spec_bytes,
  2694. protocol_version=protocol_version,
  2695. )
  2696. # Store the old HEAD tree before making changes
  2697. try:
  2698. old_head = r.refs[HEADREF]
  2699. old_commit = r[old_head]
  2700. assert isinstance(old_commit, Commit)
  2701. old_tree_id = old_commit.tree
  2702. except KeyError:
  2703. old_tree_id = None
  2704. merged = False
  2705. for lh, rh, force_ref in selected_refs:
  2706. if not force_ref and rh is not None and rh in r.refs:
  2707. try:
  2708. assert lh is not None
  2709. followed_ref = r.refs.follow(rh)[1]
  2710. assert followed_ref is not None
  2711. lh_ref = fetch_result.refs[lh]
  2712. assert lh_ref is not None
  2713. check_diverged(r, followed_ref, lh_ref)
  2714. except DivergedBranches as exc:
  2715. if ff_only or fast_forward:
  2716. raise
  2717. else:
  2718. # Perform merge
  2719. assert lh is not None
  2720. merge_ref = fetch_result.refs[lh]
  2721. assert merge_ref is not None
  2722. _merge_result, conflicts = _do_merge(r, merge_ref)
  2723. if conflicts:
  2724. raise Error(
  2725. f"Merge conflicts occurred: {conflicts}"
  2726. ) from exc
  2727. merged = True
  2728. # Skip updating ref since merge already updated HEAD
  2729. continue
  2730. if rh is not None and lh is not None:
  2731. lh_value = fetch_result.refs[lh]
  2732. if lh_value is not None:
  2733. r.refs[Ref(rh)] = lh_value
  2734. # Only update HEAD if we didn't perform a merge
  2735. if selected_refs and not merged:
  2736. lh, rh, _ = selected_refs[0]
  2737. if lh is not None:
  2738. ref_value = fetch_result.refs[lh]
  2739. if ref_value is not None:
  2740. r[b"HEAD"] = ref_value
  2741. # Update working tree to match the new HEAD
  2742. # Skip if merge was performed as merge already updates the working tree
  2743. if not merged and old_tree_id is not None:
  2744. head_commit = r[b"HEAD"]
  2745. assert isinstance(head_commit, Commit)
  2746. new_tree_id = head_commit.tree
  2747. blob_normalizer = r.get_blob_normalizer()
  2748. changes = tree_changes(r.object_store, old_tree_id, new_tree_id)
  2749. update_working_tree(
  2750. r,
  2751. old_tree_id,
  2752. new_tree_id,
  2753. change_iterator=changes,
  2754. blob_normalizer=blob_normalizer,
  2755. allow_overwrite_modified=force,
  2756. )
  2757. if remote_name is not None:
  2758. _import_remote_refs(r.refs, remote_name, fetch_result.refs)
  2759. # Trigger auto GC if needed
  2760. from .gc import maybe_auto_gc
  2761. with open_repo_closing(repo) as r:
  2762. maybe_auto_gc(r)
  2763. def status(
  2764. repo: str | os.PathLike[str] | Repo = ".",
  2765. ignored: bool = False,
  2766. untracked_files: str = "normal",
  2767. ) -> GitStatus:
  2768. """Returns staged, unstaged, and untracked changes relative to the HEAD.
  2769. Args:
  2770. repo: Path to repository or repository object
  2771. ignored: Whether to include ignored files in untracked
  2772. untracked_files: How to handle untracked files, defaults to "all":
  2773. "no": do not return untracked files
  2774. "normal": return untracked directories, not their contents
  2775. "all": include all files in untracked directories
  2776. Using untracked_files="no" can be faster than "all" when the worktree
  2777. contains many untracked files/directories.
  2778. Using untracked_files="normal" provides a good balance, only showing
  2779. directories that are entirely untracked without listing all their contents.
  2780. Returns: GitStatus tuple,
  2781. staged - dict with lists of staged paths (diff index/HEAD)
  2782. unstaged - list of unstaged paths (diff index/working-tree)
  2783. untracked - list of untracked, un-ignored & non-.git paths
  2784. """
  2785. with open_repo_closing(repo) as r:
  2786. # Open the index once and reuse it for both staged and unstaged checks
  2787. index = r.open_index()
  2788. # 1. Get status of staged
  2789. tracked_changes = get_tree_changes(r, index)
  2790. # 2. Get status of unstaged
  2791. normalizer = r.get_blob_normalizer()
  2792. # Create a wrapper that handles the bytes -> Blob conversion
  2793. if normalizer is not None:
  2794. def filter_callback(data: bytes, path: bytes) -> bytes:
  2795. blob = Blob()
  2796. blob.data = data
  2797. normalized_blob = normalizer.checkin_normalize(blob, path)
  2798. result_data: bytes = normalized_blob.data
  2799. return result_data
  2800. else:
  2801. filter_callback = None
  2802. # Check if core.preloadIndex is enabled
  2803. config = r.get_config_stack()
  2804. preload_index = config.get_boolean(b"core", b"preloadIndex", False)
  2805. unstaged_changes = list(
  2806. get_unstaged_changes(index, r.path, filter_callback, preload_index)
  2807. )
  2808. untracked_paths = get_untracked_paths(
  2809. r.path,
  2810. r.path,
  2811. index,
  2812. exclude_ignored=not ignored,
  2813. untracked_files=untracked_files,
  2814. )
  2815. if sys.platform == "win32":
  2816. untracked_changes = [
  2817. path.replace(os.path.sep, "/") for path in untracked_paths
  2818. ]
  2819. else:
  2820. untracked_changes = list(untracked_paths)
  2821. return GitStatus(tracked_changes, unstaged_changes, untracked_changes)
  2822. def shortlog(
  2823. repo: str | os.PathLike[str] | Repo,
  2824. summary_only: bool = False,
  2825. sort_by_commits: bool = False,
  2826. ) -> list[dict[str, str]]:
  2827. """Summarize commits by author, like git shortlog.
  2828. Args:
  2829. repo: Path to repository or Repo object.
  2830. summary_only: If True, only show counts per author.
  2831. sort_by_commits: If True, sort authors by number of commits.
  2832. Returns:
  2833. A list where each item is a dict containing:
  2834. - "author": the author's name as a string
  2835. - "messages": all commit messages concatenated into a single string
  2836. """
  2837. with open_repo_closing(repo) as r:
  2838. walker = r.get_walker()
  2839. authors: dict[str, list[str]] = {}
  2840. for entry in walker:
  2841. commit = entry.commit
  2842. author = commit.author.decode(commit.encoding or "utf-8")
  2843. message = commit.message.decode(commit.encoding or "utf-8").strip()
  2844. authors.setdefault(author, []).append(message)
  2845. # Convert messages to single string per author
  2846. items: list[dict[str, str]] = [
  2847. {"author": author, "messages": "\n".join(msgs)}
  2848. for author, msgs in authors.items()
  2849. ]
  2850. if sort_by_commits:
  2851. # Sort by number of commits (lines in messages)
  2852. items.sort(key=lambda x: len(x["messages"].splitlines()), reverse=True)
  2853. return items
  2854. def _walk_working_dir_paths(
  2855. frompath: str | bytes | os.PathLike[str],
  2856. basepath: str | bytes | os.PathLike[str],
  2857. prune_dirnames: Callable[[str, list[str]], list[str]] | None = None,
  2858. ) -> Iterator[tuple[str | bytes, bool]]:
  2859. """Get path, is_dir for files in working dir from frompath.
  2860. Args:
  2861. frompath: Path to begin walk
  2862. basepath: Path to compare to
  2863. prune_dirnames: Optional callback to prune dirnames during os.walk
  2864. dirnames will be set to result of prune_dirnames(dirpath, dirnames)
  2865. """
  2866. # Convert paths to strings for os.walk compatibility
  2867. for dirpath, dirnames, filenames in os.walk(frompath): # type: ignore[type-var]
  2868. # Skip .git and below.
  2869. if ".git" in dirnames:
  2870. dirnames.remove(".git")
  2871. if dirpath != basepath:
  2872. continue
  2873. if ".git" in filenames:
  2874. filenames.remove(".git")
  2875. if dirpath != basepath:
  2876. continue
  2877. if dirpath != frompath:
  2878. yield dirpath, True # type: ignore[misc]
  2879. for filename in filenames:
  2880. filepath = os.path.join(dirpath, filename) # type: ignore[call-overload]
  2881. yield filepath, False
  2882. if prune_dirnames:
  2883. dirnames[:] = prune_dirnames(dirpath, dirnames) # type: ignore[arg-type]
  2884. def get_untracked_paths(
  2885. frompath: str | bytes | os.PathLike[str],
  2886. basepath: str | bytes | os.PathLike[str],
  2887. index: Index,
  2888. exclude_ignored: bool = False,
  2889. untracked_files: str = "all",
  2890. ) -> Iterator[str]:
  2891. """Get untracked paths.
  2892. Args:
  2893. frompath: Path to walk
  2894. basepath: Path to compare to
  2895. index: Index to check against
  2896. exclude_ignored: Whether to exclude ignored paths
  2897. untracked_files: How to handle untracked files:
  2898. - "no": return an empty list
  2899. - "all": return all files in untracked directories
  2900. - "normal": return untracked directories without listing their contents
  2901. Note: ignored directories will never be walked for performance reasons.
  2902. If exclude_ignored is False, only the path to an ignored directory will
  2903. be yielded, no files inside the directory will be returned
  2904. """
  2905. if untracked_files not in ("no", "all", "normal"):
  2906. raise ValueError("untracked_files must be one of (no, all, normal)")
  2907. if untracked_files == "no":
  2908. return
  2909. # Normalize paths to str
  2910. frompath_str = os.fsdecode(os.fspath(frompath))
  2911. basepath_str = os.fsdecode(os.fspath(basepath))
  2912. with open_repo_closing(basepath_str) as r:
  2913. ignore_manager = IgnoreFilterManager.from_repo(r)
  2914. ignored_dirs = []
  2915. # List to store untracked directories found during traversal
  2916. untracked_dir_list = []
  2917. def directory_has_non_ignored_files(dir_path: str, base_rel_path: str) -> bool:
  2918. """Recursively check if directory contains any non-ignored files."""
  2919. try:
  2920. for entry in os.listdir(dir_path):
  2921. entry_path = os.path.join(dir_path, entry)
  2922. rel_entry = os.path.join(base_rel_path, entry)
  2923. if os.path.isfile(entry_path):
  2924. if ignore_manager.is_ignored(rel_entry) is not True:
  2925. return True
  2926. elif os.path.isdir(entry_path):
  2927. if directory_has_non_ignored_files(entry_path, rel_entry):
  2928. return True
  2929. return False
  2930. except OSError:
  2931. # If we can't read the directory, assume it has non-ignored files
  2932. return True
  2933. def prune_dirnames(dirpath: str, dirnames: list[str]) -> list[str]:
  2934. for i in range(len(dirnames) - 1, -1, -1):
  2935. path = os.path.join(dirpath, dirnames[i])
  2936. ip = os.path.join(os.path.relpath(path, basepath_str), "")
  2937. # Check if directory is ignored
  2938. if ignore_manager.is_ignored(ip) is True:
  2939. if not exclude_ignored:
  2940. ignored_dirs.append(
  2941. os.path.join(os.path.relpath(path, frompath_str), "")
  2942. )
  2943. del dirnames[i]
  2944. continue
  2945. # For "normal" mode, check if the directory is entirely untracked
  2946. if untracked_files == "normal":
  2947. # Convert directory path to tree path for index lookup
  2948. dir_tree_path = path_to_tree_path(basepath_str, path)
  2949. # Check if any file in this directory is tracked
  2950. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  2951. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  2952. if not has_tracked_files:
  2953. # This directory is entirely untracked
  2954. rel_path_base = os.path.relpath(path, basepath_str)
  2955. rel_path_from = os.path.join(
  2956. os.path.relpath(path, frompath_str), ""
  2957. )
  2958. # If excluding ignored, check if directory contains any non-ignored files
  2959. if exclude_ignored:
  2960. if not directory_has_non_ignored_files(path, rel_path_base):
  2961. # Directory only contains ignored files, skip it
  2962. del dirnames[i]
  2963. continue
  2964. # Check if it should be excluded due to ignore rules
  2965. is_ignored = ignore_manager.is_ignored(rel_path_base)
  2966. if not exclude_ignored or not is_ignored:
  2967. untracked_dir_list.append(rel_path_from)
  2968. del dirnames[i]
  2969. return dirnames
  2970. # For "all" mode, use the original behavior
  2971. if untracked_files == "all":
  2972. for ap, is_dir in _walk_working_dir_paths(
  2973. frompath_str, basepath_str, prune_dirnames=prune_dirnames
  2974. ):
  2975. # frompath_str and basepath_str are both str, so ap must be str
  2976. assert isinstance(ap, str)
  2977. if not is_dir:
  2978. ip = path_to_tree_path(basepath_str, ap)
  2979. if ip not in index:
  2980. if not exclude_ignored or not ignore_manager.is_ignored(
  2981. os.path.relpath(ap, basepath_str)
  2982. ):
  2983. yield os.path.relpath(ap, frompath_str)
  2984. else: # "normal" mode
  2985. # Walk directories, handling both files and directories
  2986. for ap, is_dir in _walk_working_dir_paths(
  2987. frompath_str, basepath_str, prune_dirnames=prune_dirnames
  2988. ):
  2989. # frompath_str and basepath_str are both str, so ap must be str
  2990. assert isinstance(ap, str)
  2991. # This part won't be reached for pruned directories
  2992. if is_dir:
  2993. # Check if this directory is entirely untracked
  2994. dir_tree_path = path_to_tree_path(basepath_str, ap)
  2995. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  2996. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  2997. if not has_tracked_files:
  2998. if not exclude_ignored or not ignore_manager.is_ignored(
  2999. os.path.relpath(ap, basepath_str)
  3000. ):
  3001. yield os.path.join(os.path.relpath(ap, frompath_str), "")
  3002. else:
  3003. # Check individual files in directories that contain tracked files
  3004. ip = path_to_tree_path(basepath_str, ap)
  3005. if ip not in index:
  3006. if not exclude_ignored or not ignore_manager.is_ignored(
  3007. os.path.relpath(ap, basepath_str)
  3008. ):
  3009. yield os.path.relpath(ap, frompath_str)
  3010. # Yield any untracked directories found during pruning
  3011. yield from untracked_dir_list
  3012. yield from ignored_dirs
  3013. def grep(
  3014. repo: RepoPath,
  3015. pattern: str | bytes,
  3016. *,
  3017. outstream: TextIO = sys.stdout,
  3018. rev: str | bytes | None = None,
  3019. pathspecs: Sequence[str | bytes] | None = None,
  3020. ignore_case: bool = False,
  3021. line_number: bool = False,
  3022. max_depth: int | None = None,
  3023. respect_ignores: bool = True,
  3024. ) -> None:
  3025. """Search for a pattern in tracked files.
  3026. Args:
  3027. repo: Path to repository or Repo object
  3028. pattern: Regular expression pattern to search for
  3029. outstream: Stream to write results to
  3030. rev: Revision to search in (defaults to HEAD)
  3031. pathspecs: Optional list of path patterns to limit search
  3032. ignore_case: Whether to perform case-insensitive matching
  3033. line_number: Whether to output line numbers
  3034. max_depth: Maximum directory depth to search
  3035. respect_ignores: Whether to respect .gitignore patterns
  3036. """
  3037. from .object_store import iter_tree_contents
  3038. # Compile the pattern
  3039. flags = re.IGNORECASE if ignore_case else 0
  3040. try:
  3041. if isinstance(pattern, bytes):
  3042. compiled_pattern = re.compile(pattern, flags)
  3043. else:
  3044. compiled_pattern = re.compile(pattern.encode("utf-8"), flags)
  3045. except re.error as e:
  3046. raise ValueError(f"Invalid regular expression: {e}") from e
  3047. with open_repo_closing(repo) as r:
  3048. # Get the tree to search
  3049. if rev is None:
  3050. try:
  3051. commit = r[b"HEAD"]
  3052. assert isinstance(commit, Commit)
  3053. except KeyError as e:
  3054. raise ValueError("No HEAD commit found") from e
  3055. else:
  3056. rev_bytes = rev if isinstance(rev, bytes) else rev.encode("utf-8")
  3057. commit_obj = parse_commit(r, rev_bytes)
  3058. if commit_obj is None:
  3059. raise ValueError(f"Invalid revision: {rev}")
  3060. commit = commit_obj
  3061. tree = r[commit.tree]
  3062. assert isinstance(tree, Tree)
  3063. # Set up ignore filter if requested
  3064. ignore_manager = None
  3065. if respect_ignores:
  3066. ignore_manager = IgnoreFilterManager.from_repo(r)
  3067. # Convert pathspecs to bytes
  3068. pathspecs_bytes: list[bytes] | None = None
  3069. if pathspecs:
  3070. pathspecs_bytes = [
  3071. p if isinstance(p, bytes) else p.encode("utf-8") for p in pathspecs
  3072. ]
  3073. # Iterate through all files in the tree
  3074. for entry in iter_tree_contents(r.object_store, tree.id):
  3075. path, mode, sha = entry.path, entry.mode, entry.sha
  3076. assert path is not None
  3077. assert mode is not None
  3078. assert sha is not None
  3079. # Skip directories
  3080. if stat.S_ISDIR(mode):
  3081. continue
  3082. # Check max depth
  3083. if max_depth is not None:
  3084. depth = path.count(b"/")
  3085. if depth > max_depth:
  3086. continue
  3087. # Check pathspecs
  3088. if pathspecs_bytes:
  3089. matches_pathspec = False
  3090. for pathspec in pathspecs_bytes:
  3091. # Simple prefix matching (could be enhanced with full pathspec support)
  3092. if path.startswith(pathspec) or fnmatch.fnmatch(
  3093. path.decode("utf-8", errors="replace"),
  3094. pathspec.decode("utf-8", errors="replace"),
  3095. ):
  3096. matches_pathspec = True
  3097. break
  3098. if not matches_pathspec:
  3099. continue
  3100. # Check ignore patterns
  3101. if ignore_manager:
  3102. path_str = path.decode("utf-8", errors="replace")
  3103. if ignore_manager.is_ignored(path_str) is True:
  3104. continue
  3105. # Get the blob content
  3106. blob = r[sha]
  3107. assert isinstance(blob, Blob)
  3108. # Search for pattern in the blob
  3109. content = blob.data
  3110. lines = content.split(b"\n")
  3111. for line_num, line in enumerate(lines, 1):
  3112. if compiled_pattern.search(line):
  3113. path_str = path.decode("utf-8", errors="replace")
  3114. line_str = line.decode("utf-8", errors="replace")
  3115. if line_number:
  3116. outstream.write(f"{path_str}:{line_num}:{line_str}\n")
  3117. else:
  3118. outstream.write(f"{path_str}:{line_str}\n")
  3119. def get_tree_changes(
  3120. repo: RepoPath, index: Index | None = None
  3121. ) -> dict[str, list[str | bytes]]:
  3122. """Return add/delete/modify changes to tree by comparing index to HEAD.
  3123. Args:
  3124. repo: repo path or object
  3125. index: optional Index object to reuse (avoids re-opening the index)
  3126. Returns: dict with lists for each type of change
  3127. """
  3128. with open_repo_closing(repo) as r:
  3129. if index is None:
  3130. index = r.open_index()
  3131. # Compares the Index to the HEAD & determines changes
  3132. # Iterate through the changes and report add/delete/modify
  3133. # TODO: call out to dulwich.diff_tree somehow.
  3134. tracked_changes: dict[str, list[str | bytes]] = {
  3135. "add": [],
  3136. "delete": [],
  3137. "modify": [],
  3138. }
  3139. try:
  3140. head_commit = r[b"HEAD"]
  3141. assert isinstance(head_commit, Commit)
  3142. tree_id = head_commit.tree
  3143. except KeyError:
  3144. tree_id = None
  3145. for change in index.changes_from_tree(r.object_store, tree_id):
  3146. if not change[0][0]:
  3147. assert change[0][1] is not None
  3148. tracked_changes["add"].append(change[0][1])
  3149. elif not change[0][1]:
  3150. assert change[0][0] is not None
  3151. tracked_changes["delete"].append(change[0][0])
  3152. elif change[0][0] == change[0][1]:
  3153. assert change[0][0] is not None
  3154. tracked_changes["modify"].append(change[0][0])
  3155. else:
  3156. raise NotImplementedError("git mv ops not yet supported")
  3157. return tracked_changes
  3158. def daemon(
  3159. path: str | os.PathLike[str] = ".",
  3160. address: str | None = None,
  3161. port: int | None = None,
  3162. ) -> None:
  3163. """Run a daemon serving Git requests over TCP/IP.
  3164. Args:
  3165. path: Path to the directory to serve.
  3166. address: Optional address to listen on (defaults to ::)
  3167. port: Optional port to listen on (defaults to TCP_GIT_PORT)
  3168. """
  3169. # TODO(jelmer): Support git-daemon-export-ok and --export-all.
  3170. backend = FileSystemBackend(os.fspath(path))
  3171. server = TCPGitServer(backend, address or "localhost", port or 9418)
  3172. server.serve_forever()
  3173. def web_daemon(
  3174. path: str | os.PathLike[str] = ".",
  3175. address: str | None = None,
  3176. port: int | None = None,
  3177. ) -> None:
  3178. """Run a daemon serving Git requests over HTTP.
  3179. Args:
  3180. path: Path to the directory to serve
  3181. address: Optional address to listen on (defaults to ::)
  3182. port: Optional port to listen on (defaults to 80)
  3183. """
  3184. from .web import (
  3185. WSGIRequestHandlerLogger,
  3186. WSGIServerLogger,
  3187. make_server,
  3188. make_wsgi_chain,
  3189. )
  3190. backend = FileSystemBackend(os.fspath(path))
  3191. app = make_wsgi_chain(backend)
  3192. server = make_server(
  3193. address or "::",
  3194. port or 80,
  3195. app,
  3196. handler_class=WSGIRequestHandlerLogger,
  3197. server_class=WSGIServerLogger,
  3198. )
  3199. server.serve_forever()
  3200. def upload_pack(
  3201. path: str | os.PathLike[str] = ".",
  3202. inf: BinaryIO | None = None,
  3203. outf: BinaryIO | None = None,
  3204. ) -> int:
  3205. """Upload a pack file after negotiating its contents using smart protocol.
  3206. Args:
  3207. path: Path to the repository
  3208. inf: Input stream to communicate with client
  3209. outf: Output stream to communicate with client
  3210. """
  3211. if outf is None:
  3212. outf = sys.stdout.buffer
  3213. if inf is None:
  3214. inf = sys.stdin.buffer
  3215. assert outf is not None
  3216. assert inf is not None
  3217. path = os.path.expanduser(path)
  3218. backend = FileSystemBackend(path)
  3219. def send_fn(data: bytes) -> None:
  3220. outf.write(data)
  3221. outf.flush()
  3222. proto = Protocol(inf.read, send_fn)
  3223. handler = UploadPackHandler(backend, [path], proto)
  3224. # FIXME: Catch exceptions and write a single-line summary to outf.
  3225. handler.handle()
  3226. return 0
  3227. def receive_pack(
  3228. path: str | os.PathLike[str] = ".",
  3229. inf: BinaryIO | None = None,
  3230. outf: BinaryIO | None = None,
  3231. ) -> int:
  3232. """Receive a pack file after negotiating its contents using smart protocol.
  3233. Args:
  3234. path: Path to the repository
  3235. inf: Input stream to communicate with client
  3236. outf: Output stream to communicate with client
  3237. """
  3238. if outf is None:
  3239. outf = sys.stdout.buffer
  3240. if inf is None:
  3241. inf = sys.stdin.buffer
  3242. assert outf is not None
  3243. assert inf is not None
  3244. path = os.path.expanduser(path)
  3245. backend = FileSystemBackend(path)
  3246. def send_fn(data: bytes) -> None:
  3247. outf.write(data)
  3248. outf.flush()
  3249. proto = Protocol(inf.read, send_fn)
  3250. handler = ReceivePackHandler(backend, [path], proto)
  3251. # FIXME: Catch exceptions and write a single-line summary to outf.
  3252. handler.handle()
  3253. return 0
  3254. def _make_branch_ref(name: str | bytes) -> Ref:
  3255. if isinstance(name, str):
  3256. name = name.encode(DEFAULT_ENCODING)
  3257. return local_branch_name(name)
  3258. def _make_tag_ref(name: str | bytes) -> Ref:
  3259. if isinstance(name, str):
  3260. name = name.encode(DEFAULT_ENCODING)
  3261. return local_tag_name(name)
  3262. def _make_replace_ref(name: str | bytes | ObjectID) -> Ref:
  3263. if isinstance(name, str):
  3264. name = name.encode(DEFAULT_ENCODING)
  3265. return local_replace_name(name)
  3266. def branch_delete(repo: RepoPath, name: str | bytes | Sequence[str | bytes]) -> None:
  3267. """Delete a branch.
  3268. Args:
  3269. repo: Path to the repository
  3270. name: Name of the branch
  3271. """
  3272. with open_repo_closing(repo) as r:
  3273. if isinstance(name, (list, tuple)):
  3274. names = name
  3275. else:
  3276. names = [name]
  3277. for branch_name in names:
  3278. del r.refs[_make_branch_ref(branch_name)]
  3279. def branch_create(
  3280. repo: str | os.PathLike[str] | Repo,
  3281. name: str | bytes,
  3282. objectish: str | bytes | None = None,
  3283. force: bool = False,
  3284. ) -> None:
  3285. """Create a branch.
  3286. Args:
  3287. repo: Path to the repository
  3288. name: Name of the new branch
  3289. objectish: Target object to point new branch at (defaults to HEAD)
  3290. force: Force creation of branch, even if it already exists
  3291. """
  3292. with open_repo_closing(repo) as r:
  3293. if objectish is None:
  3294. objectish = "HEAD"
  3295. # Try to expand branch shorthand before parsing
  3296. original_objectish = objectish
  3297. objectish_bytes = (
  3298. objectish.encode(DEFAULT_ENCODING)
  3299. if isinstance(objectish, str)
  3300. else objectish
  3301. )
  3302. if Ref(b"refs/remotes/" + objectish_bytes) in r.refs:
  3303. objectish = b"refs/remotes/" + objectish_bytes
  3304. elif local_branch_name(objectish_bytes) in r.refs:
  3305. objectish = local_branch_name(objectish_bytes)
  3306. object = parse_object(r, objectish)
  3307. refname = _make_branch_ref(name)
  3308. default_message = (
  3309. b"branch: Created from " + original_objectish.encode(DEFAULT_ENCODING)
  3310. if isinstance(original_objectish, str)
  3311. else b"branch: Created from " + original_objectish
  3312. )
  3313. ref_message = _get_reflog_message(default_message)
  3314. if force:
  3315. r.refs.set_if_equals(refname, None, object.id, message=ref_message)
  3316. else:
  3317. if not r.refs.add_if_new(refname, object.id, message=ref_message):
  3318. name_str = name.decode() if isinstance(name, bytes) else name
  3319. raise Error(f"Branch with name {name_str} already exists.")
  3320. # Check if we should set up tracking
  3321. config = r.get_config_stack()
  3322. try:
  3323. auto_setup_merge = config.get((b"branch",), b"autoSetupMerge").decode()
  3324. except KeyError:
  3325. auto_setup_merge = "true" # Default value
  3326. # Determine if the objectish refers to a remote-tracking branch
  3327. objectish_ref = None
  3328. if original_objectish != "HEAD":
  3329. # Try to resolve objectish as a ref
  3330. objectish_bytes = (
  3331. original_objectish.encode(DEFAULT_ENCODING)
  3332. if isinstance(original_objectish, str)
  3333. else original_objectish
  3334. )
  3335. if Ref(objectish_bytes) in r.refs:
  3336. objectish_ref = objectish_bytes
  3337. elif Ref(b"refs/remotes/" + objectish_bytes) in r.refs:
  3338. objectish_ref = b"refs/remotes/" + objectish_bytes
  3339. elif local_branch_name(objectish_bytes) in r.refs:
  3340. objectish_ref = local_branch_name(objectish_bytes)
  3341. else:
  3342. # HEAD might point to a remote-tracking branch
  3343. head_ref = r.refs.follow(HEADREF)[0][1]
  3344. if head_ref.startswith(b"refs/remotes/"):
  3345. objectish_ref = head_ref
  3346. # Set up tracking if appropriate
  3347. if objectish_ref and (
  3348. (auto_setup_merge == "always")
  3349. or (
  3350. auto_setup_merge == "true"
  3351. and objectish_ref.startswith(b"refs/remotes/")
  3352. )
  3353. ):
  3354. # Extract remote name and branch from the ref
  3355. if objectish_ref.startswith(b"refs/remotes/"):
  3356. parts = objectish_ref[len(b"refs/remotes/") :].split(b"/", 1)
  3357. if len(parts) == 2:
  3358. remote_name = parts[0]
  3359. remote_branch = local_branch_name(parts[1])
  3360. # Set up tracking
  3361. repo_config = r.get_config()
  3362. branch_name_bytes = (
  3363. name.encode(DEFAULT_ENCODING) if isinstance(name, str) else name
  3364. )
  3365. repo_config.set(
  3366. (b"branch", branch_name_bytes), b"remote", remote_name
  3367. )
  3368. repo_config.set(
  3369. (b"branch", branch_name_bytes), b"merge", remote_branch
  3370. )
  3371. repo_config.write_to_path()
  3372. def filter_branches_by_pattern(branches: Iterable[bytes], pattern: str) -> list[bytes]:
  3373. """Filter branches by fnmatch pattern.
  3374. Args:
  3375. branches: Iterable of branch names as bytes
  3376. pattern: Pattern to match against
  3377. Returns:
  3378. List of filtered branch names
  3379. """
  3380. return [
  3381. branch for branch in branches if fnmatch.fnmatchcase(branch.decode(), pattern)
  3382. ]
  3383. def branch_list(repo: RepoPath) -> list[Ref]:
  3384. """List all branches.
  3385. Args:
  3386. repo: Path to the repository
  3387. Returns:
  3388. List of branch names (without refs/heads/ prefix)
  3389. """
  3390. with open_repo_closing(repo) as r:
  3391. branches: list[Ref] = list(r.refs.keys(base=Ref(LOCAL_BRANCH_PREFIX)))
  3392. # Check for branch.sort configuration
  3393. config = r.get_config_stack()
  3394. try:
  3395. sort_key = config.get((b"branch",), b"sort").decode()
  3396. except KeyError:
  3397. # Default is refname (alphabetical)
  3398. sort_key = "refname"
  3399. # Parse sort key
  3400. reverse = False
  3401. if sort_key.startswith("-"):
  3402. reverse = True
  3403. sort_key = sort_key[1:]
  3404. # Apply sorting
  3405. if sort_key == "refname":
  3406. # Simple alphabetical sort (default)
  3407. branches.sort(reverse=reverse)
  3408. elif sort_key in ("committerdate", "authordate"):
  3409. # Sort by date
  3410. def get_commit_date(branch_name: bytes) -> int:
  3411. ref = local_branch_name(branch_name)
  3412. sha = r.refs[ref]
  3413. commit = r.object_store[sha]
  3414. assert isinstance(commit, Commit)
  3415. if sort_key == "committerdate":
  3416. return cast(int, commit.commit_time)
  3417. else: # authordate
  3418. return cast(int, commit.author_time)
  3419. # Sort branches by date
  3420. # Note: Python's sort naturally orders smaller values first (ascending)
  3421. # For dates, this means oldest first by default
  3422. # Use a stable sort with branch name as secondary key for consistent ordering
  3423. if reverse:
  3424. # For reverse sort, we want newest dates first but alphabetical names second
  3425. branches.sort(key=lambda b: (-get_commit_date(b), b))
  3426. else:
  3427. branches.sort(key=lambda b: (get_commit_date(b), b))
  3428. else:
  3429. # Unknown sort key, fall back to default
  3430. branches.sort()
  3431. return branches
  3432. def branch_remotes_list(repo: RepoPath) -> list[bytes]:
  3433. """List the short names of all remote branches.
  3434. Args:
  3435. repo: Path to the repository
  3436. Returns:
  3437. List of branch names (without refs/remotes/ prefix, and without remote name; e.g. 'main' from 'origin/main')
  3438. """
  3439. with open_repo_closing(repo) as r:
  3440. branches = [bytes(ref) for ref in r.refs.keys(base=Ref(LOCAL_REMOTE_PREFIX))]
  3441. config = r.get_config_stack()
  3442. try:
  3443. sort_key = config.get((b"branch",), b"sort").decode()
  3444. except KeyError:
  3445. # Default is refname (alphabetical)
  3446. sort_key = "refname"
  3447. # Parse sort key
  3448. reverse = False
  3449. if sort_key.startswith("-"):
  3450. reverse = True
  3451. sort_key = sort_key[1:]
  3452. # Apply sorting
  3453. if sort_key == "refname":
  3454. # Simple alphabetical sort (default)
  3455. branches.sort(reverse=reverse)
  3456. elif sort_key in ("committerdate", "authordate"):
  3457. # Sort by date
  3458. def get_commit_date(branch_name: bytes) -> int:
  3459. ref = LOCAL_REMOTE_PREFIX + branch_name
  3460. sha = r.refs[Ref(ref)]
  3461. commit = r.object_store[sha]
  3462. assert isinstance(commit, Commit)
  3463. if sort_key == "committerdate":
  3464. return cast(int, commit.commit_time)
  3465. else: # authordate
  3466. return cast(int, commit.author_time)
  3467. # Sort branches by date
  3468. # Note: Python's sort naturally orders smaller values first (ascending)
  3469. # For dates, this means oldest first by default
  3470. # Use a stable sort with branch name as secondary key for consistent ordering
  3471. if reverse:
  3472. # For reverse sort, we want newest dates first but alphabetical names second
  3473. branches.sort(key=lambda b: (-get_commit_date(b), b))
  3474. else:
  3475. branches.sort(key=lambda b: (get_commit_date(b), b))
  3476. else:
  3477. # Unknown sort key
  3478. raise ValueError(f"Unknown sort key: {sort_key}")
  3479. return branches
  3480. def _get_branch_merge_status(repo: RepoPath) -> Iterator[tuple[bytes, bool]]:
  3481. """Get merge status for all branches relative to current HEAD.
  3482. Args:
  3483. repo: Path to the repository
  3484. Yields:
  3485. tuple of (``branch_name``, ``is_merged``) where:
  3486. - ``branch_name``: Branch name without refs/heads/ prefix
  3487. - ``is_merged``: True if branch is merged into HEAD, False otherwise
  3488. """
  3489. with open_repo_closing(repo) as r:
  3490. current_sha = r.refs[HEADREF]
  3491. for branch_ref, branch_sha in r.refs.as_dict(base=Ref(b"refs/heads/")).items():
  3492. # Check if branch is an ancestor of HEAD (fully merged)
  3493. is_merged = can_fast_forward(r, branch_sha, current_sha)
  3494. yield branch_ref, is_merged
  3495. def merged_branches(repo: RepoPath) -> Iterator[bytes]:
  3496. """List branches that have been merged into the current branch.
  3497. Args:
  3498. repo: Path to the repository
  3499. Yields:
  3500. Branch names (without refs/heads/ prefix) that are merged
  3501. into the current HEAD
  3502. """
  3503. for branch_name, is_merged in _get_branch_merge_status(repo):
  3504. if is_merged:
  3505. yield branch_name
  3506. def no_merged_branches(repo: RepoPath) -> Iterator[bytes]:
  3507. """List branches that have been merged into the current branch.
  3508. Args:
  3509. repo: Path to the repository
  3510. Yields:
  3511. Branch names (without refs/heads/ prefix) that are merged
  3512. into the current HEAD
  3513. """
  3514. for branch_name, is_merged in _get_branch_merge_status(repo):
  3515. if not is_merged:
  3516. yield branch_name
  3517. def branches_containing(repo: RepoPath, commit: str) -> Iterator[bytes]:
  3518. """List branches that contain the specified commit.
  3519. Args:
  3520. repo: Path to the repository
  3521. commit: Commit-ish string (SHA, branch name, tag, etc.)
  3522. Yields:
  3523. Branch names (without refs/heads/ prefix) that contain the commit
  3524. Raises:
  3525. ValueError: If the commit reference is malformed
  3526. KeyError: If the commit reference does not exist
  3527. """
  3528. with open_repo_closing(repo) as r:
  3529. commit_obj = parse_commit(r, commit)
  3530. commit_sha = commit_obj.id
  3531. for branch_ref, branch_sha in r.refs.as_dict(
  3532. base=Ref(LOCAL_BRANCH_PREFIX)
  3533. ).items():
  3534. if can_fast_forward(r, commit_sha, branch_sha):
  3535. yield branch_ref
  3536. def active_branch(repo: RepoPath) -> bytes:
  3537. """Return the active branch in the repository, if any.
  3538. Args:
  3539. repo: Repository to open
  3540. Returns:
  3541. branch name
  3542. Raises:
  3543. KeyError: if the repository does not have a working tree
  3544. IndexError: if HEAD is floating
  3545. """
  3546. with open_repo_closing(repo) as r:
  3547. active_ref = r.refs.follow(HEADREF)[0][1]
  3548. if not active_ref.startswith(LOCAL_BRANCH_PREFIX):
  3549. raise ValueError(active_ref)
  3550. return active_ref[len(LOCAL_BRANCH_PREFIX) :]
  3551. def get_branch_remote(repo: str | os.PathLike[str] | Repo) -> bytes:
  3552. """Return the active branch's remote name, if any.
  3553. Args:
  3554. repo: Repository to open
  3555. Returns:
  3556. remote name
  3557. Raises:
  3558. KeyError: if the repository does not have a working tree
  3559. """
  3560. with open_repo_closing(repo) as r:
  3561. branch_name = active_branch(r.path)
  3562. config = r.get_config()
  3563. try:
  3564. remote_name = config.get((b"branch", branch_name), b"remote")
  3565. except KeyError:
  3566. remote_name = b"origin"
  3567. return remote_name
  3568. def get_branch_merge(repo: RepoPath, branch_name: bytes | None = None) -> bytes:
  3569. """Return the branch's merge reference (upstream branch), if any.
  3570. Args:
  3571. repo: Repository to open
  3572. branch_name: Name of the branch (defaults to active branch)
  3573. Returns:
  3574. merge reference name (e.g. b"refs/heads/main")
  3575. Raises:
  3576. KeyError: if the branch does not have a merge configuration
  3577. """
  3578. with open_repo_closing(repo) as r:
  3579. if branch_name is None:
  3580. branch_name = active_branch(r.path)
  3581. config = r.get_config()
  3582. return config.get((b"branch", branch_name), b"merge")
  3583. def set_branch_tracking(
  3584. repo: str | os.PathLike[str] | Repo,
  3585. branch_name: bytes,
  3586. remote_name: bytes,
  3587. remote_ref: bytes,
  3588. ) -> None:
  3589. """Set up branch tracking configuration.
  3590. Args:
  3591. repo: Repository to open
  3592. branch_name: Name of the local branch
  3593. remote_name: Name of the remote (e.g. b"origin")
  3594. remote_ref: Remote reference to track (e.g. b"refs/heads/main")
  3595. """
  3596. with open_repo_closing(repo) as r:
  3597. config = r.get_config()
  3598. config.set((b"branch", branch_name), b"remote", remote_name)
  3599. config.set((b"branch", branch_name), b"merge", remote_ref)
  3600. config.write_to_path()
  3601. def fetch(
  3602. repo: RepoPath,
  3603. remote_location: str | bytes | None = None,
  3604. outstream: TextIO = sys.stdout,
  3605. errstream: BinaryIO | RawIOBase = default_bytes_err_stream,
  3606. message: bytes | None = None,
  3607. depth: int | None = None,
  3608. prune: bool = False,
  3609. prune_tags: bool = False,
  3610. force: bool = False,
  3611. operation: str | None = None,
  3612. thin_packs: bool = True,
  3613. report_activity: Callable[[int, str], None] | None = None,
  3614. quiet: bool = False,
  3615. include_tags: bool = False,
  3616. username: str | None = None,
  3617. password: str | None = None,
  3618. key_filename: str | None = None,
  3619. ssh_command: str | None = None,
  3620. shallow_since: str | None = None,
  3621. shallow_exclude: list[str] | None = None,
  3622. ) -> FetchPackResult:
  3623. """Fetch objects from a remote server.
  3624. Args:
  3625. repo: Path to the repository
  3626. remote_location: String identifying a remote server
  3627. outstream: Output stream (defaults to stdout)
  3628. errstream: Error stream (defaults to stderr)
  3629. message: Reflog message (defaults to b"fetch: from <remote_name>")
  3630. depth: Depth to fetch at
  3631. prune: Prune remote removed refs
  3632. prune_tags: Prune remote removed tags
  3633. force: Force fetching even if it would overwrite local changes
  3634. operation: Git operation for authentication (e.g., "fetch")
  3635. thin_packs: Whether to use thin packs
  3636. report_activity: Optional callback for reporting transport activity
  3637. quiet: Whether to suppress progress output
  3638. include_tags: Whether to include tags
  3639. username: Username for authentication
  3640. password: Password for authentication
  3641. key_filename: SSH key filename
  3642. ssh_command: SSH command to use
  3643. shallow_since: Deepen or shorten the history to include commits after this date
  3644. shallow_exclude: Deepen or shorten the history to exclude commits reachable from these refs
  3645. Returns:
  3646. Dictionary with refs on the remote
  3647. """
  3648. with open_repo_closing(repo) as r:
  3649. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  3650. default_message = b"fetch: from " + remote_location.encode(DEFAULT_ENCODING)
  3651. message = _get_reflog_message(default_message, message)
  3652. client, path = get_transport_and_path(
  3653. remote_location,
  3654. config=r.get_config_stack(),
  3655. operation=operation,
  3656. thin_packs=thin_packs,
  3657. report_activity=report_activity,
  3658. quiet=quiet,
  3659. include_tags=include_tags,
  3660. username=username,
  3661. password=password,
  3662. key_filename=key_filename,
  3663. ssh_command=ssh_command,
  3664. )
  3665. def progress(data: bytes) -> None:
  3666. errstream.write(data)
  3667. fetch_result = client.fetch(
  3668. path.encode(),
  3669. r,
  3670. progress=progress,
  3671. depth=depth,
  3672. shallow_since=shallow_since,
  3673. shallow_exclude=shallow_exclude,
  3674. )
  3675. if remote_name is not None:
  3676. _import_remote_refs(
  3677. r.refs,
  3678. remote_name,
  3679. fetch_result.refs,
  3680. message,
  3681. prune=prune,
  3682. prune_tags=prune_tags,
  3683. )
  3684. # Trigger auto GC if needed
  3685. from .gc import maybe_auto_gc
  3686. with open_repo_closing(repo) as r:
  3687. maybe_auto_gc(r)
  3688. return fetch_result
  3689. def for_each_ref(
  3690. repo: Repo | str = ".",
  3691. pattern: str | bytes | None = None,
  3692. ) -> list[tuple[bytes, bytes, bytes]]:
  3693. """Iterate over all refs that match the (optional) pattern.
  3694. Args:
  3695. repo: Path to the repository
  3696. pattern: Optional glob (7) patterns to filter the refs with
  3697. Returns: List of bytes tuples with: (sha, object_type, ref_name)
  3698. """
  3699. if isinstance(pattern, str):
  3700. pattern = os.fsencode(pattern)
  3701. with open_repo_closing(repo) as r:
  3702. refs = r.get_refs()
  3703. if pattern:
  3704. matching_refs: dict[Ref, ObjectID] = {}
  3705. pattern_parts = pattern.split(b"/")
  3706. for ref, sha in refs.items():
  3707. matches = False
  3708. # git for-each-ref uses glob (7) style patterns, but fnmatch
  3709. # is greedy and also matches slashes, unlike glob.glob.
  3710. # We have to check parts of the pattern individually.
  3711. # See https://github.com/python/cpython/issues/72904
  3712. ref_parts = ref.split(b"/")
  3713. if len(ref_parts) > len(pattern_parts):
  3714. continue
  3715. for pat, ref_part in zip(pattern_parts, ref_parts):
  3716. matches = fnmatch.fnmatchcase(ref_part, pat)
  3717. if not matches:
  3718. break
  3719. if matches:
  3720. matching_refs[ref] = sha
  3721. refs = matching_refs
  3722. ret: list[tuple[bytes, bytes, bytes]] = [
  3723. (sha, r.get_object(sha).type_name, ref)
  3724. for ref, sha in sorted(
  3725. refs.items(),
  3726. key=lambda ref_sha: ref_sha[0],
  3727. )
  3728. if ref != b"HEAD"
  3729. ]
  3730. return ret
  3731. def show_ref(
  3732. repo: Repo | str = ".",
  3733. patterns: list[str | bytes] | None = None,
  3734. head: bool = False,
  3735. branches: bool = False,
  3736. tags: bool = False,
  3737. dereference: bool = False,
  3738. verify: bool = False,
  3739. ) -> list[tuple[bytes, bytes]]:
  3740. """List references in a local repository.
  3741. Args:
  3742. repo: Path to the repository
  3743. patterns: Optional list of patterns to filter refs (matched from the end)
  3744. head: Show the HEAD reference
  3745. branches: Limit to local branches (refs/heads/)
  3746. tags: Limit to local tags (refs/tags/)
  3747. dereference: Dereference tags into object IDs
  3748. verify: Enable stricter reference checking (exact path match)
  3749. Returns: List of tuples with (sha, ref_name) or (sha, ref_name^{}) for dereferenced tags
  3750. """
  3751. # Convert string patterns to bytes
  3752. byte_patterns: list[bytes] | None = None
  3753. if patterns:
  3754. byte_patterns = [os.fsencode(p) if isinstance(p, str) else p for p in patterns]
  3755. with open_repo_closing(repo) as r:
  3756. refs = r.get_refs()
  3757. # Filter by branches/tags if specified
  3758. if branches or tags:
  3759. prefixes = []
  3760. if branches:
  3761. prefixes.append(LOCAL_BRANCH_PREFIX)
  3762. if tags:
  3763. prefixes.append(LOCAL_TAG_PREFIX)
  3764. filtered_refs = filter_ref_prefix(refs, prefixes)
  3765. else:
  3766. # By default, show tags, heads, and remote refs (but not HEAD)
  3767. filtered_refs = filter_ref_prefix(refs, [b"refs/"])
  3768. # Add HEAD if requested
  3769. if head and HEADREF in refs:
  3770. filtered_refs[HEADREF] = refs[HEADREF]
  3771. # Filter by patterns if specified
  3772. if byte_patterns:
  3773. matching_refs: dict[Ref, ObjectID] = {}
  3774. for ref, sha in filtered_refs.items():
  3775. for pattern in byte_patterns:
  3776. if verify:
  3777. # Verify mode requires exact match
  3778. if ref == pattern:
  3779. matching_refs[ref] = sha
  3780. break
  3781. else:
  3782. # Pattern matching from the end of the full name
  3783. # Only complete parts are matched
  3784. # E.g., "master" matches "refs/heads/master" but not "refs/heads/mymaster"
  3785. pattern_parts = pattern.split(b"/")
  3786. ref_parts = ref.split(b"/")
  3787. # Try to match from the end
  3788. if len(pattern_parts) <= len(ref_parts):
  3789. # Check if the end of ref matches the pattern
  3790. matches = True
  3791. for i in range(len(pattern_parts)):
  3792. if (
  3793. ref_parts[-(len(pattern_parts) - i)]
  3794. != pattern_parts[i]
  3795. ):
  3796. matches = False
  3797. break
  3798. if matches:
  3799. matching_refs[ref] = sha
  3800. break
  3801. filtered_refs = matching_refs
  3802. # Sort by ref name
  3803. sorted_refs = sorted(filtered_refs.items(), key=lambda x: x[0])
  3804. # Build result list
  3805. result: list[tuple[bytes, bytes]] = []
  3806. for ref, sha in sorted_refs:
  3807. result.append((sha, ref))
  3808. # Dereference tags if requested
  3809. if dereference and ref.startswith(LOCAL_TAG_PREFIX):
  3810. try:
  3811. obj = r.get_object(sha)
  3812. # Peel tag objects to get the underlying commit/object
  3813. while obj.type_name == b"tag":
  3814. assert isinstance(obj, Tag)
  3815. _obj_class, sha = obj.object
  3816. obj = r.get_object(sha)
  3817. result.append((sha, ref + b"^{}"))
  3818. except KeyError:
  3819. # Object not found, skip dereferencing
  3820. pass
  3821. return result
  3822. def show_branch(
  3823. repo: Repo | str = ".",
  3824. branches: list[str | bytes] | None = None,
  3825. all_branches: bool = False,
  3826. remotes: bool = False,
  3827. current: bool = False,
  3828. topo_order: bool = False,
  3829. more: int | None = None,
  3830. list_branches: bool = False,
  3831. independent_branches: bool = False,
  3832. merge_base: bool = False,
  3833. ) -> list[str]:
  3834. """Display branches and their commits.
  3835. Args:
  3836. repo: Path to the repository
  3837. branches: List of specific branches to show (default: all local branches)
  3838. all_branches: Show both local and remote branches
  3839. remotes: Show only remote branches
  3840. current: Include current branch if not specified
  3841. topo_order: Show in topological order instead of chronological
  3842. more: Show N more commits beyond common ancestor (negative to show only headers)
  3843. list_branches: Synonym for more=-1 (show only branch headers)
  3844. independent_branches: Show only branches not reachable from others
  3845. merge_base: Show merge bases instead of commit list
  3846. Returns:
  3847. List of output lines
  3848. """
  3849. from .graph import find_octopus_base, independent
  3850. output_lines: list[str] = []
  3851. with open_repo_closing(repo) as r:
  3852. refs = r.get_refs()
  3853. # Determine which branches to show
  3854. branch_refs: dict[Ref, ObjectID] = {}
  3855. if branches:
  3856. # Specific branches requested
  3857. for branch in branches:
  3858. branch_bytes = (
  3859. os.fsencode(branch) if isinstance(branch, str) else branch
  3860. )
  3861. # Try as full ref name first
  3862. branch_ref_check = Ref(branch_bytes)
  3863. if branch_ref_check in refs:
  3864. branch_refs[branch_ref_check] = refs[branch_ref_check]
  3865. else:
  3866. # Try as branch name
  3867. branch_ref = local_branch_name(branch_bytes)
  3868. if branch_ref in refs:
  3869. branch_refs[branch_ref] = refs[branch_ref]
  3870. # Try as remote branch
  3871. else:
  3872. remote_ref = Ref(LOCAL_REMOTE_PREFIX + branch_bytes)
  3873. if remote_ref in refs:
  3874. branch_refs[remote_ref] = refs[remote_ref]
  3875. else:
  3876. # Default behavior: show local branches
  3877. if all_branches:
  3878. # Show both local and remote branches
  3879. branch_refs = filter_ref_prefix(
  3880. refs, [LOCAL_BRANCH_PREFIX, LOCAL_REMOTE_PREFIX]
  3881. )
  3882. elif remotes:
  3883. # Show only remote branches
  3884. branch_refs = filter_ref_prefix(refs, [LOCAL_REMOTE_PREFIX])
  3885. else:
  3886. # Show only local branches
  3887. branch_refs = filter_ref_prefix(refs, [LOCAL_BRANCH_PREFIX])
  3888. # Add current branch if requested and not already included
  3889. if current:
  3890. try:
  3891. head_refs, _ = r.refs.follow(HEADREF)
  3892. if head_refs:
  3893. head_ref = head_refs[0]
  3894. if head_ref not in branch_refs and head_ref in refs:
  3895. branch_refs[head_ref] = refs[head_ref]
  3896. except (KeyError, TypeError):
  3897. # HEAD doesn't point to a branch or doesn't exist
  3898. pass
  3899. if not branch_refs:
  3900. return output_lines
  3901. # Sort branches for consistent output
  3902. sorted_branches = sorted(branch_refs.items(), key=lambda x: x[0])
  3903. branch_sha_list: list[ObjectID] = [sha for _, sha in sorted_branches]
  3904. # Handle --independent flag
  3905. if independent_branches:
  3906. independent_shas = independent(r, branch_sha_list)
  3907. for ref_name, sha in sorted_branches:
  3908. if sha in independent_shas:
  3909. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  3910. output_lines.append(ref_str)
  3911. return output_lines
  3912. # Handle --merge-base flag
  3913. if merge_base:
  3914. if len(branch_sha_list) < 2:
  3915. # Need at least 2 branches for merge base
  3916. return output_lines
  3917. merge_bases = find_octopus_base(r, branch_sha_list)
  3918. for sha in merge_bases:
  3919. output_lines.append(sha.decode("ascii"))
  3920. return output_lines
  3921. # Get current branch for marking
  3922. current_branch: bytes | None = None
  3923. try:
  3924. head_refs, _ = r.refs.follow(HEADREF)
  3925. if head_refs:
  3926. current_branch = head_refs[0]
  3927. except (KeyError, TypeError):
  3928. pass
  3929. # Collect commit information for each branch
  3930. branch_commits: list[tuple[bytes, str]] = [] # (sha, message)
  3931. for ref_name, sha in sorted_branches:
  3932. try:
  3933. commit = r[sha]
  3934. if hasattr(commit, "message"):
  3935. message = commit.message.decode("utf-8", errors="replace").split(
  3936. "\n"
  3937. )[0]
  3938. else:
  3939. message = ""
  3940. branch_commits.append((sha, message))
  3941. except KeyError:
  3942. branch_commits.append((sha, ""))
  3943. # Handle --list flag (show only branch headers)
  3944. if list_branches or (more is not None and more < 0):
  3945. # Just show the branch headers
  3946. for i, (ref_name, sha) in enumerate(sorted_branches):
  3947. is_current = ref_name == current_branch
  3948. marker = "*" if is_current else "!"
  3949. # Create spacing for alignment
  3950. prefix = " " * i + marker + " " * (len(sorted_branches) - i - 1)
  3951. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  3952. _, message = branch_commits[i]
  3953. output_lines.append(f"{prefix}[{ref_str}] {message}")
  3954. return output_lines
  3955. # Build commit history for visualization
  3956. # Collect all commits reachable from any branch
  3957. all_commits: dict[
  3958. bytes, tuple[int, list[bytes], str]
  3959. ] = {} # sha -> (timestamp, parents, message)
  3960. def collect_commits(sha: bytes, branch_idx: int, visited: set[bytes]) -> None:
  3961. """Recursively collect commits."""
  3962. if sha in visited:
  3963. return
  3964. visited.add(sha)
  3965. try:
  3966. commit = r[sha]
  3967. if not hasattr(commit, "commit_time"):
  3968. return
  3969. timestamp = commit.commit_time
  3970. parents = commit.parents if hasattr(commit, "parents") else []
  3971. message = (
  3972. commit.message.decode("utf-8", errors="replace").split("\n")[0]
  3973. if hasattr(commit, "message")
  3974. else ""
  3975. )
  3976. if sha not in all_commits:
  3977. all_commits[sha] = (timestamp, parents, message)
  3978. # Recurse to parents
  3979. for parent in parents:
  3980. collect_commits(parent, branch_idx, visited)
  3981. except KeyError:
  3982. # Commit not found, stop traversal
  3983. pass
  3984. # Collect commits from all branches
  3985. for i, (_, sha) in enumerate(sorted_branches):
  3986. collect_commits(sha, i, set())
  3987. # Find common ancestor
  3988. common_ancestor_sha = None
  3989. if len(branch_sha_list) >= 2:
  3990. try:
  3991. merge_bases = find_octopus_base(r, branch_sha_list)
  3992. if merge_bases:
  3993. common_ancestor_sha = merge_bases[0]
  3994. except (KeyError, IndexError):
  3995. pass
  3996. # Sort commits (chronological by default, or topological if requested)
  3997. if topo_order:
  3998. # Topological sort is more complex, for now use chronological
  3999. # TODO: Implement proper topological ordering
  4000. sorted_commits = sorted(all_commits.items(), key=lambda x: -x[1][0])
  4001. else:
  4002. # Reverse chronological order (newest first)
  4003. sorted_commits = sorted(all_commits.items(), key=lambda x: -x[1][0])
  4004. # Determine how many commits to show
  4005. if more is not None:
  4006. # Find index of common ancestor
  4007. if common_ancestor_sha and common_ancestor_sha in all_commits:
  4008. ancestor_idx = next(
  4009. (
  4010. i
  4011. for i, (sha, _) in enumerate(sorted_commits)
  4012. if sha == common_ancestor_sha
  4013. ),
  4014. None,
  4015. )
  4016. if ancestor_idx is not None:
  4017. # Show commits up to ancestor + more
  4018. sorted_commits = sorted_commits[: ancestor_idx + 1 + more]
  4019. # Determine which branches contain which commits
  4020. branch_contains: list[set[bytes]] = []
  4021. for ref_name, sha in sorted_branches:
  4022. reachable = set()
  4023. def mark_reachable(commit_sha: bytes) -> None:
  4024. if commit_sha in reachable:
  4025. return
  4026. reachable.add(commit_sha)
  4027. if commit_sha in all_commits:
  4028. _, parents, _ = all_commits[commit_sha]
  4029. for parent in parents:
  4030. mark_reachable(parent)
  4031. mark_reachable(sha)
  4032. branch_contains.append(reachable)
  4033. # Output branch headers
  4034. for i, (ref_name, sha) in enumerate(sorted_branches):
  4035. is_current = ref_name == current_branch
  4036. marker = "*" if is_current else "!"
  4037. # Create spacing for alignment
  4038. prefix = " " * i + marker + " " * (len(sorted_branches) - i - 1)
  4039. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  4040. _, message = branch_commits[i]
  4041. output_lines.append(f"{prefix}[{ref_str}] {message}")
  4042. # Output separator
  4043. output_lines.append("-" * (len(sorted_branches) + 2))
  4044. # Output commits
  4045. for commit_sha, (_, _, message) in sorted_commits:
  4046. # Build marker string
  4047. markers = []
  4048. for i, (ref_name, branch_sha) in enumerate(sorted_branches):
  4049. if commit_sha == branch_sha:
  4050. # This is the tip of the branch
  4051. markers.append("*")
  4052. elif commit_sha in branch_contains[i]:
  4053. # This commit is in the branch
  4054. markers.append("+")
  4055. else:
  4056. # This commit is not in the branch
  4057. markers.append(" ")
  4058. marker_str = "".join(markers)
  4059. output_lines.append(f"{marker_str} [{message}]")
  4060. # Limit output to 26 branches (git show-branch limitation)
  4061. if len(sorted_branches) > 26:
  4062. break
  4063. return output_lines
  4064. def ls_remote(
  4065. remote: str | bytes,
  4066. config: Config | None = None,
  4067. operation: str | None = None,
  4068. thin_packs: bool = True,
  4069. report_activity: Callable[[int, str], None] | None = None,
  4070. quiet: bool = False,
  4071. include_tags: bool = False,
  4072. username: str | None = None,
  4073. password: str | None = None,
  4074. key_filename: str | None = None,
  4075. ssh_command: str | None = None,
  4076. ) -> LsRemoteResult:
  4077. """List the refs in a remote.
  4078. Args:
  4079. remote: Remote repository location
  4080. config: Configuration to use
  4081. operation: Operation type
  4082. thin_packs: Whether to use thin packs
  4083. report_activity: Function to report activity
  4084. quiet: Whether to suppress output
  4085. include_tags: Whether to include tags
  4086. username: Username for authentication
  4087. password: Password for authentication
  4088. key_filename: SSH key filename
  4089. ssh_command: SSH command to use
  4090. Returns:
  4091. LsRemoteResult object with refs and symrefs
  4092. """
  4093. if config is None:
  4094. config = StackedConfig.default()
  4095. remote_str = remote.decode() if isinstance(remote, bytes) else remote
  4096. client, host_path = get_transport_and_path(
  4097. remote_str,
  4098. config=config,
  4099. operation=operation,
  4100. thin_packs=thin_packs,
  4101. report_activity=report_activity,
  4102. quiet=quiet,
  4103. include_tags=include_tags,
  4104. username=username,
  4105. password=password,
  4106. key_filename=key_filename,
  4107. ssh_command=ssh_command,
  4108. )
  4109. return client.get_refs(
  4110. host_path.encode() if isinstance(host_path, str) else host_path
  4111. )
  4112. def repack(repo: RepoPath, write_bitmaps: bool = False) -> None:
  4113. """Repack loose files in a repository.
  4114. Currently this only packs loose objects.
  4115. Args:
  4116. repo: Path to the repository
  4117. write_bitmaps: Whether to write bitmap indexes for packs
  4118. """
  4119. with open_repo_closing(repo) as r:
  4120. r.object_store.pack_loose_objects()
  4121. if write_bitmaps:
  4122. # Update pack cache to pick up newly created packs
  4123. r.object_store._update_pack_cache()
  4124. r.object_store.generate_pack_bitmaps(r.refs.as_dict())
  4125. def pack_objects(
  4126. repo: RepoPath,
  4127. object_ids: Sequence[ObjectID],
  4128. packf: BinaryIO,
  4129. idxf: BinaryIO | None,
  4130. delta_window_size: int | None = None,
  4131. deltify: bool | None = None,
  4132. reuse_deltas: bool = True,
  4133. pack_index_version: int | None = None,
  4134. ) -> None:
  4135. """Pack objects into a file.
  4136. Args:
  4137. repo: Path to the repository
  4138. object_ids: List of object ids to write
  4139. packf: File-like object to write to
  4140. idxf: File-like object to write to (can be None)
  4141. delta_window_size: Sliding window size for searching for deltas;
  4142. Set to None for default window size.
  4143. deltify: Whether to deltify objects
  4144. reuse_deltas: Allow reuse of existing deltas while deltifying
  4145. pack_index_version: Pack index version to use (1, 2, or 3). If None, uses default version.
  4146. """
  4147. with open_repo_closing(repo) as r:
  4148. entries, data_sum = write_pack_from_container(
  4149. packf.write,
  4150. r.object_store,
  4151. [(oid, None) for oid in object_ids],
  4152. deltify=deltify,
  4153. delta_window_size=delta_window_size,
  4154. reuse_deltas=reuse_deltas,
  4155. )
  4156. if idxf is not None:
  4157. index_entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
  4158. write_pack_index(idxf, index_entries, data_sum, version=pack_index_version)
  4159. def ls_tree(
  4160. repo: RepoPath,
  4161. treeish: str | bytes | Commit | Tree | Tag = b"HEAD",
  4162. outstream: TextIO | BinaryIO = sys.stdout,
  4163. recursive: bool = False,
  4164. name_only: bool = False,
  4165. ) -> None:
  4166. """List contents of a tree.
  4167. Args:
  4168. repo: Path to the repository
  4169. treeish: Tree id to list
  4170. outstream: Output stream (defaults to stdout)
  4171. recursive: Whether to recursively list files
  4172. name_only: Only print item name
  4173. """
  4174. def list_tree(store: BaseObjectStore, treeid: ObjectID, base: bytes) -> None:
  4175. tree = store[treeid]
  4176. assert isinstance(tree, Tree)
  4177. for name, mode, sha in tree.iteritems():
  4178. assert name is not None
  4179. assert mode is not None
  4180. assert sha is not None
  4181. if base:
  4182. name = posixpath.join(base, name)
  4183. if name_only:
  4184. if isinstance(outstream, BinaryIO):
  4185. outstream.write(name + b"\n")
  4186. else:
  4187. outstream.write(name.decode("utf-8", "replace") + "\n")
  4188. else:
  4189. formatted = pretty_format_tree_entry(name, mode, sha)
  4190. if isinstance(outstream, BinaryIO):
  4191. outstream.write(formatted.encode("utf-8"))
  4192. else:
  4193. outstream.write(formatted)
  4194. if stat.S_ISDIR(mode) and recursive:
  4195. list_tree(store, sha, name)
  4196. with open_repo_closing(repo) as r:
  4197. tree = parse_tree(r, treeish)
  4198. list_tree(r.object_store, tree.id, b"")
  4199. def remote_add(
  4200. repo: RepoPath,
  4201. name: bytes | str,
  4202. url: bytes | str,
  4203. ) -> None:
  4204. """Add a remote.
  4205. Args:
  4206. repo: Path to the repository
  4207. name: Remote name
  4208. url: Remote URL
  4209. """
  4210. if not isinstance(name, bytes):
  4211. name = name.encode(DEFAULT_ENCODING)
  4212. if not isinstance(url, bytes):
  4213. url = url.encode(DEFAULT_ENCODING)
  4214. with open_repo_closing(repo) as r:
  4215. c = r.get_config()
  4216. section = (b"remote", name)
  4217. if c.has_section(section):
  4218. raise RemoteExists(f"Remote {name.decode()} already exists")
  4219. c.set(section, b"url", url)
  4220. c.write_to_path()
  4221. def remote_remove(repo: Repo, name: bytes | str) -> None:
  4222. """Remove a remote.
  4223. Args:
  4224. repo: Path to the repository
  4225. name: Remote name
  4226. """
  4227. if not isinstance(name, bytes):
  4228. name = name.encode(DEFAULT_ENCODING)
  4229. with open_repo_closing(repo) as r:
  4230. c = r.get_config()
  4231. section = (b"remote", name)
  4232. del c[section]
  4233. c.write_to_path()
  4234. def _quote_path(path: str) -> str:
  4235. """Quote a path using C-style quoting similar to git's core.quotePath.
  4236. Args:
  4237. path: Path to quote
  4238. Returns:
  4239. Quoted path string
  4240. """
  4241. # Check if path needs quoting (non-ASCII or special characters)
  4242. needs_quoting = False
  4243. for char in path:
  4244. if ord(char) > 127 or char in '"\\':
  4245. needs_quoting = True
  4246. break
  4247. if not needs_quoting:
  4248. return path
  4249. # Apply C-style quoting
  4250. quoted = '"'
  4251. for char in path:
  4252. if ord(char) > 127:
  4253. # Non-ASCII character, encode as octal escape
  4254. utf8_bytes = char.encode("utf-8")
  4255. for byte in utf8_bytes:
  4256. quoted += f"\\{byte:03o}"
  4257. elif char == '"':
  4258. quoted += '\\"'
  4259. elif char == "\\":
  4260. quoted += "\\\\"
  4261. else:
  4262. quoted += char
  4263. quoted += '"'
  4264. return quoted
  4265. def check_ignore(
  4266. repo: RepoPath,
  4267. paths: Sequence[str | bytes | os.PathLike[str]],
  4268. no_index: bool = False,
  4269. quote_path: bool = True,
  4270. ) -> Iterator[str]:
  4271. r"""Debug gitignore files.
  4272. Args:
  4273. repo: Path to the repository
  4274. paths: List of paths to check for
  4275. no_index: Don't check index
  4276. quote_path: If True, quote non-ASCII characters in returned paths using
  4277. C-style octal escapes (e.g. "тест.txt" becomes "\\321\\202\\320\\265\\321\\201\\321\\202.txt").
  4278. If False, return raw unicode paths.
  4279. Returns: List of ignored files
  4280. """
  4281. with open_repo_closing(repo) as r:
  4282. index = r.open_index()
  4283. ignore_manager = IgnoreFilterManager.from_repo(r)
  4284. for original_path in paths:
  4285. # Convert path to string for consistent handling
  4286. original_path_fspath = os.fspath(original_path)
  4287. # Normalize to str
  4288. original_path_str = os.fsdecode(original_path_fspath)
  4289. if not no_index and path_to_tree_path(r.path, original_path_str) in index:
  4290. continue
  4291. # Preserve whether the original path had a trailing slash
  4292. had_trailing_slash = original_path_str.endswith(("/", os.path.sep))
  4293. if os.path.isabs(original_path_str):
  4294. path = os.path.relpath(original_path_str, r.path)
  4295. # Normalize Windows paths to use forward slashes
  4296. if os.path.sep != "/":
  4297. path = path.replace(os.path.sep, "/")
  4298. else:
  4299. path = original_path_str
  4300. # Restore trailing slash if it was in the original
  4301. if had_trailing_slash and not path.endswith("/"):
  4302. path = path + "/"
  4303. # For directories, check with trailing slash to get correct ignore behavior
  4304. test_path = path
  4305. path_without_slash = path.rstrip("/")
  4306. is_directory = os.path.isdir(os.path.join(r.path, path_without_slash))
  4307. # If this is a directory path, ensure we test it correctly
  4308. if is_directory and not path.endswith("/"):
  4309. test_path = path + "/"
  4310. if ignore_manager.is_ignored(test_path):
  4311. # Return relative path (like git does) when absolute path was provided
  4312. if os.path.isabs(original_path):
  4313. output_path = path
  4314. else:
  4315. output_path = original_path # type: ignore[assignment]
  4316. yield _quote_path(output_path) if quote_path else output_path
  4317. def _get_current_head_tree(repo: Repo) -> ObjectID | None:
  4318. """Get the current HEAD tree ID.
  4319. Args:
  4320. repo: Repository object
  4321. Returns:
  4322. Tree ID of current HEAD, or None if no HEAD exists (empty repo)
  4323. """
  4324. try:
  4325. current_head = repo.refs[HEADREF]
  4326. current_commit = repo[current_head]
  4327. assert isinstance(current_commit, Commit), "Expected a Commit object"
  4328. tree_id: ObjectID = current_commit.tree
  4329. return tree_id
  4330. except KeyError:
  4331. # No HEAD yet (empty repo)
  4332. return None
  4333. def _check_uncommitted_changes(
  4334. repo: Repo, target_tree_id: ObjectID, force: bool = False
  4335. ) -> None:
  4336. """Check for uncommitted changes that would conflict with a checkout/switch.
  4337. Args:
  4338. repo: Repository object
  4339. target_tree_id: Tree ID to check conflicts against
  4340. force: If True, skip the check
  4341. Raises:
  4342. CheckoutError: If there are conflicting local changes
  4343. """
  4344. if force:
  4345. return
  4346. # Get current HEAD tree for comparison
  4347. current_tree_id = _get_current_head_tree(repo)
  4348. if current_tree_id is None:
  4349. # No HEAD yet (empty repo)
  4350. return
  4351. status_report = status(repo)
  4352. changes = []
  4353. # staged is a dict with 'add', 'delete', 'modify' keys
  4354. if isinstance(status_report.staged, dict):
  4355. changes.extend(status_report.staged.get("add", []))
  4356. changes.extend(status_report.staged.get("delete", []))
  4357. changes.extend(status_report.staged.get("modify", []))
  4358. # unstaged is a list
  4359. changes.extend(status_report.unstaged)
  4360. if changes:
  4361. # Check if any changes would conflict with checkout
  4362. target_tree_obj = repo[target_tree_id]
  4363. assert isinstance(target_tree_obj, Tree), "Expected a Tree object"
  4364. target_tree = target_tree_obj
  4365. for change in changes:
  4366. if isinstance(change, str):
  4367. change = change.encode(DEFAULT_ENCODING)
  4368. try:
  4369. target_tree.lookup_path(repo.object_store.__getitem__, change)
  4370. except KeyError:
  4371. # File doesn't exist in target tree - change can be preserved
  4372. pass
  4373. else:
  4374. # File exists in target tree - would overwrite local changes
  4375. raise CheckoutError(
  4376. f"Your local changes to '{change.decode()}' would be "
  4377. "overwritten. Please commit or stash before switching."
  4378. )
  4379. def _get_worktree_update_config(
  4380. repo: Repo,
  4381. ) -> tuple[
  4382. bool,
  4383. Callable[[bytes], bool],
  4384. Callable[[str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None],
  4385. ]:
  4386. """Get configuration for working tree updates.
  4387. Args:
  4388. repo: Repository object
  4389. Returns:
  4390. Tuple of (honor_filemode, validate_path_element, symlink_fn)
  4391. """
  4392. config = repo.get_config()
  4393. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  4394. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  4395. validate_path_element = validate_path_element_ntfs
  4396. elif config.get_boolean(b"core", b"core.protectHFS", sys.platform == "darwin"):
  4397. validate_path_element = validate_path_element_hfs
  4398. else:
  4399. validate_path_element = validate_path_element_default
  4400. if config.get_boolean(b"core", b"symlinks", True):
  4401. def symlink_wrapper(
  4402. source: str | bytes | os.PathLike[str],
  4403. target: str | bytes | os.PathLike[str],
  4404. ) -> None:
  4405. symlink(source, target) # type: ignore[arg-type,unused-ignore]
  4406. symlink_fn = symlink_wrapper
  4407. else:
  4408. def symlink_fallback(
  4409. source: str | bytes | os.PathLike[str],
  4410. target: str | bytes | os.PathLike[str],
  4411. ) -> None:
  4412. mode = "w" + ("b" if isinstance(source, bytes) else "")
  4413. with open(target, mode) as f:
  4414. f.write(source)
  4415. symlink_fn = symlink_fallback
  4416. return honor_filemode, validate_path_element, symlink_fn
  4417. def _perform_tree_switch(
  4418. repo: Repo,
  4419. current_tree_id: ObjectID | None,
  4420. target_tree_id: ObjectID,
  4421. force: bool = False,
  4422. ) -> None:
  4423. """Perform the actual working tree switch.
  4424. Args:
  4425. repo: Repository object
  4426. current_tree_id: Current tree ID (or None for empty repo)
  4427. target_tree_id: Target tree ID to switch to
  4428. force: If True, force removal of untracked files and allow overwriting modified files
  4429. """
  4430. honor_filemode, validate_path_element, symlink_fn = _get_worktree_update_config(
  4431. repo
  4432. )
  4433. # Get blob normalizer for line ending conversion
  4434. blob_normalizer = repo.get_blob_normalizer()
  4435. # Update working tree
  4436. tree_change_iterator: Iterator[TreeChange] = tree_changes(
  4437. repo.object_store, current_tree_id, target_tree_id
  4438. )
  4439. update_working_tree(
  4440. repo,
  4441. current_tree_id,
  4442. target_tree_id,
  4443. change_iterator=tree_change_iterator,
  4444. honor_filemode=honor_filemode,
  4445. validate_path_element=validate_path_element,
  4446. symlink_fn=symlink_fn,
  4447. force_remove_untracked=force,
  4448. blob_normalizer=blob_normalizer,
  4449. allow_overwrite_modified=force,
  4450. )
  4451. def update_head(
  4452. repo: RepoPath,
  4453. target: str | bytes,
  4454. detached: bool = False,
  4455. new_branch: str | bytes | None = None,
  4456. ) -> None:
  4457. """Update HEAD to point at a new branch/commit.
  4458. Note that this does not actually update the working tree.
  4459. Args:
  4460. repo: Path to the repository
  4461. detached: Create a detached head
  4462. target: Branch or committish to switch to
  4463. new_branch: New branch to create
  4464. """
  4465. with open_repo_closing(repo) as r:
  4466. if new_branch is not None:
  4467. to_set = _make_branch_ref(new_branch)
  4468. else:
  4469. to_set = HEADREF
  4470. if detached:
  4471. # TODO(jelmer): Provide some way so that the actual ref gets
  4472. # updated rather than what it points to, so the delete isn't
  4473. # necessary.
  4474. del r.refs[to_set]
  4475. r.refs[to_set] = parse_commit(r, target).id
  4476. else:
  4477. r.refs.set_symbolic_ref(to_set, parse_ref(r, target))
  4478. if new_branch is not None:
  4479. r.refs.set_symbolic_ref(HEADREF, to_set)
  4480. def checkout(
  4481. repo: str | os.PathLike[str] | Repo,
  4482. target: str | bytes | Commit | Tag | None = None,
  4483. force: bool = False,
  4484. new_branch: bytes | str | None = None,
  4485. paths: list[bytes | str] | None = None,
  4486. ) -> None:
  4487. """Switch to a branch or commit, updating both HEAD and the working tree.
  4488. This is similar to 'git checkout', allowing you to switch to a branch,
  4489. tag, or specific commit. Unlike update_head, this function also updates
  4490. the working tree to match the target.
  4491. Args:
  4492. repo: Path to repository or repository object
  4493. target: Branch name, tag, or commit SHA to checkout. If None and paths is specified,
  4494. restores files from HEAD
  4495. force: Force checkout even if there are local changes
  4496. new_branch: Create a new branch at target (like git checkout -b)
  4497. paths: List of specific paths to checkout. If specified, only these paths are updated
  4498. and HEAD is not changed
  4499. Raises:
  4500. CheckoutError: If checkout cannot be performed due to conflicts
  4501. KeyError: If the target reference cannot be found
  4502. """
  4503. with open_repo_closing(repo) as r:
  4504. # Store the original target for later reference checks
  4505. original_target = target
  4506. worktree = r.get_worktree()
  4507. # Handle path-specific checkout (like git checkout -- <paths>)
  4508. if paths is not None:
  4509. # Convert paths to bytes
  4510. byte_paths = []
  4511. for path in paths:
  4512. if isinstance(path, str):
  4513. byte_paths.append(path.encode(DEFAULT_ENCODING))
  4514. else:
  4515. byte_paths.append(path)
  4516. # If no target specified, use HEAD
  4517. if target is None:
  4518. try:
  4519. target = r.refs[HEADREF]
  4520. except KeyError:
  4521. raise CheckoutError("No HEAD reference found")
  4522. else:
  4523. if isinstance(target, str):
  4524. target = target.encode(DEFAULT_ENCODING)
  4525. # Get the target commit and tree
  4526. target_tree = parse_tree(r, target)
  4527. # Get blob normalizer for line ending conversion
  4528. blob_normalizer = r.get_blob_normalizer()
  4529. # Restore specified paths from target tree
  4530. for path in byte_paths:
  4531. try:
  4532. # Look up the path in the target tree
  4533. mode, sha = target_tree.lookup_path(
  4534. r.object_store.__getitem__, path
  4535. )
  4536. obj = r[sha]
  4537. assert isinstance(obj, Blob), "Expected a Blob object"
  4538. except KeyError:
  4539. # Path doesn't exist in target tree
  4540. pass
  4541. else:
  4542. # Create directories if needed
  4543. # Handle path as string
  4544. if isinstance(path, bytes):
  4545. path_str = path.decode(DEFAULT_ENCODING)
  4546. else:
  4547. path_str = path
  4548. file_path = os.path.join(r.path, path_str)
  4549. os.makedirs(os.path.dirname(file_path), exist_ok=True)
  4550. # Write the file content
  4551. if stat.S_ISREG(mode):
  4552. # Apply checkout filters (smudge)
  4553. if blob_normalizer:
  4554. obj = blob_normalizer.checkout_normalize(obj, path)
  4555. flags = os.O_WRONLY | os.O_CREAT | os.O_TRUNC
  4556. if sys.platform == "win32":
  4557. flags |= os.O_BINARY
  4558. with os.fdopen(os.open(file_path, flags, mode), "wb") as f:
  4559. f.write(obj.data)
  4560. # Update the index
  4561. worktree.stage(path)
  4562. return
  4563. # Normal checkout (switching branches/commits)
  4564. if target is None:
  4565. raise ValueError("Target must be specified for branch/commit checkout")
  4566. if isinstance(target, str):
  4567. target_bytes = target.encode(DEFAULT_ENCODING)
  4568. elif isinstance(target, bytes):
  4569. target_bytes = target
  4570. else:
  4571. # For Commit/Tag objects, we'll use their SHA
  4572. target_bytes = target.id
  4573. if isinstance(new_branch, str):
  4574. new_branch = new_branch.encode(DEFAULT_ENCODING)
  4575. # Parse the target to get the commit
  4576. assert (
  4577. original_target is not None
  4578. ) # Guaranteed by earlier check for normal checkout
  4579. target_commit = parse_commit(r, original_target)
  4580. target_tree_id = target_commit.tree
  4581. # Get current HEAD tree for comparison
  4582. current_tree_id = _get_current_head_tree(r)
  4583. # Check for uncommitted changes if not forcing
  4584. if current_tree_id is not None:
  4585. _check_uncommitted_changes(r, target_tree_id, force)
  4586. # Update working tree
  4587. _perform_tree_switch(r, current_tree_id, target_tree_id, force)
  4588. # Update HEAD
  4589. if new_branch:
  4590. # Create new branch and switch to it
  4591. branch_create(r, new_branch, objectish=target_commit.id.decode("ascii"))
  4592. update_head(r, new_branch)
  4593. # Set up tracking if creating from a remote branch
  4594. if isinstance(original_target, bytes) and target_bytes.startswith(
  4595. LOCAL_REMOTE_PREFIX
  4596. ):
  4597. try:
  4598. remote_name, branch_name = parse_remote_ref(target_bytes)
  4599. # Set tracking to refs/heads/<branch> on the remote
  4600. set_branch_tracking(
  4601. r, new_branch, remote_name, local_branch_name(branch_name)
  4602. )
  4603. except ValueError:
  4604. # Invalid remote ref format, skip tracking setup
  4605. pass
  4606. else:
  4607. # Check if target is a branch name (with or without refs/heads/ prefix)
  4608. branch_ref = None
  4609. if (
  4610. isinstance(original_target, (str, bytes))
  4611. and target_bytes in r.refs.keys()
  4612. ):
  4613. if target_bytes.startswith(LOCAL_BRANCH_PREFIX):
  4614. branch_ref = target_bytes
  4615. else:
  4616. # Try adding refs/heads/ prefix
  4617. potential_branch = (
  4618. _make_branch_ref(target_bytes)
  4619. if isinstance(original_target, (str, bytes))
  4620. else None
  4621. )
  4622. if potential_branch in r.refs.keys():
  4623. branch_ref = potential_branch
  4624. if branch_ref:
  4625. # It's a branch - update HEAD symbolically
  4626. update_head(r, branch_ref)
  4627. else:
  4628. # It's a tag, other ref, or commit SHA - detached HEAD
  4629. update_head(r, target_commit.id.decode("ascii"), detached=True)
  4630. def restore(
  4631. repo: str | os.PathLike[str] | Repo,
  4632. paths: list[bytes | str],
  4633. source: str | bytes | Commit | Tag | None = None,
  4634. staged: bool = False,
  4635. worktree: bool = True,
  4636. ) -> None:
  4637. """Restore working tree files.
  4638. This is similar to 'git restore', allowing you to restore specific files
  4639. from a commit or the index without changing HEAD.
  4640. Args:
  4641. repo: Path to repository or repository object
  4642. paths: List of specific paths to restore
  4643. source: Branch name, tag, or commit SHA to restore from. If None, restores
  4644. staged files from HEAD, or worktree files from index
  4645. staged: Restore files in the index (--staged)
  4646. worktree: Restore files in the working tree (default: True)
  4647. Raises:
  4648. CheckoutError: If restore cannot be performed
  4649. ValueError: If neither staged nor worktree is specified
  4650. KeyError: If the source reference cannot be found
  4651. """
  4652. if not staged and not worktree:
  4653. raise ValueError("At least one of staged or worktree must be True")
  4654. with open_repo_closing(repo) as r:
  4655. from .index import _fs_to_tree_path, build_file_from_blob
  4656. # Determine the source tree
  4657. if source is None:
  4658. if staged:
  4659. # Restoring staged files from HEAD
  4660. try:
  4661. source = r.refs[HEADREF]
  4662. except KeyError:
  4663. raise CheckoutError("No HEAD reference found")
  4664. elif worktree:
  4665. # Restoring worktree files from index
  4666. from .index import ConflictedIndexEntry, IndexEntry
  4667. index = r.open_index()
  4668. for path in paths:
  4669. if isinstance(path, str):
  4670. tree_path = _fs_to_tree_path(path)
  4671. else:
  4672. tree_path = path
  4673. try:
  4674. index_entry = index[tree_path]
  4675. if isinstance(index_entry, ConflictedIndexEntry):
  4676. raise CheckoutError(
  4677. f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' has conflicts"
  4678. )
  4679. blob = r[index_entry.sha]
  4680. assert isinstance(blob, Blob), "Expected a Blob object"
  4681. full_path = os.path.join(os.fsencode(r.path), tree_path)
  4682. mode = index_entry.mode
  4683. # Use build_file_from_blob to write the file
  4684. build_file_from_blob(blob, mode, full_path)
  4685. except KeyError:
  4686. # Path doesn't exist in index
  4687. raise CheckoutError(
  4688. f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' not in index"
  4689. )
  4690. return
  4691. # source is not None at this point
  4692. assert source is not None
  4693. # Get the source tree
  4694. source_tree = parse_tree(r, treeish=source)
  4695. # Restore specified paths from source tree
  4696. for path in paths:
  4697. if isinstance(path, str):
  4698. tree_path = _fs_to_tree_path(path)
  4699. else:
  4700. tree_path = path
  4701. try:
  4702. # Look up the path in the source tree
  4703. mode, sha = source_tree.lookup_path(
  4704. r.object_store.__getitem__, tree_path
  4705. )
  4706. blob = r[sha]
  4707. assert isinstance(blob, Blob), "Expected a Blob object"
  4708. except KeyError:
  4709. # Path doesn't exist in source tree
  4710. raise CheckoutError(
  4711. f"Path '{path if isinstance(path, str) else path.decode(DEFAULT_ENCODING)}' not found in source"
  4712. )
  4713. full_path = os.path.join(os.fsencode(r.path), tree_path)
  4714. if worktree:
  4715. # Use build_file_from_blob to restore to working tree
  4716. build_file_from_blob(blob, mode, full_path)
  4717. if staged:
  4718. # Update the index with the blob from source
  4719. from .index import IndexEntry
  4720. index = r.open_index()
  4721. # When only updating staged (not worktree), we want to reset the index
  4722. # to the source, but invalidate the stat cache so Git knows to check
  4723. # the worktree file. Use zeros for stat fields.
  4724. if not worktree:
  4725. # Invalidate stat cache by using zeros
  4726. new_entry = IndexEntry(
  4727. ctime=(0, 0),
  4728. mtime=(0, 0),
  4729. dev=0,
  4730. ino=0,
  4731. mode=mode,
  4732. uid=0,
  4733. gid=0,
  4734. size=0,
  4735. sha=sha,
  4736. )
  4737. else:
  4738. # If we also updated worktree, use actual stat
  4739. from .index import index_entry_from_stat
  4740. st = os.lstat(full_path)
  4741. new_entry = index_entry_from_stat(st, sha, mode)
  4742. index[tree_path] = new_entry
  4743. index.write()
  4744. def switch(
  4745. repo: str | os.PathLike[str] | Repo,
  4746. target: str | bytes | Commit | Tag,
  4747. create: str | bytes | None = None,
  4748. force: bool = False,
  4749. detach: bool = False,
  4750. ) -> None:
  4751. """Switch branches.
  4752. This is similar to 'git switch', allowing you to switch to a different
  4753. branch or commit, updating both HEAD and the working tree.
  4754. Args:
  4755. repo: Path to repository or repository object
  4756. target: Branch name, tag, or commit SHA to switch to
  4757. create: Create a new branch at target before switching (like git switch -c)
  4758. force: Force switch even if there are local changes
  4759. detach: Switch to a commit in detached HEAD state (like git switch --detach)
  4760. Raises:
  4761. CheckoutError: If switch cannot be performed due to conflicts
  4762. KeyError: If the target reference cannot be found
  4763. ValueError: If both create and detach are specified
  4764. """
  4765. if create and detach:
  4766. raise ValueError("Cannot use both create and detach options")
  4767. with open_repo_closing(repo) as r:
  4768. # Store the original target for later reference checks
  4769. original_target = target
  4770. if isinstance(target, str):
  4771. target_bytes = target.encode(DEFAULT_ENCODING)
  4772. elif isinstance(target, bytes):
  4773. target_bytes = target
  4774. else:
  4775. # For Commit/Tag objects, we'll use their SHA
  4776. target_bytes = target.id
  4777. if isinstance(create, str):
  4778. create = create.encode(DEFAULT_ENCODING)
  4779. # Parse the target to get the commit
  4780. target_commit = parse_commit(r, original_target)
  4781. target_tree_id = target_commit.tree
  4782. # Get current HEAD tree for comparison
  4783. current_tree_id = _get_current_head_tree(r)
  4784. # Check for uncommitted changes if not forcing
  4785. if current_tree_id is not None:
  4786. _check_uncommitted_changes(r, target_tree_id, force)
  4787. # Update working tree
  4788. _perform_tree_switch(r, current_tree_id, target_tree_id, force)
  4789. # Update HEAD
  4790. if create:
  4791. # Create new branch and switch to it
  4792. branch_create(r, create, objectish=target_commit.id.decode("ascii"))
  4793. update_head(r, create)
  4794. # Set up tracking if creating from a remote branch
  4795. if isinstance(original_target, bytes) and target_bytes.startswith(
  4796. LOCAL_REMOTE_PREFIX
  4797. ):
  4798. try:
  4799. remote_name, branch_name = parse_remote_ref(target_bytes)
  4800. # Set tracking to refs/heads/<branch> on the remote
  4801. set_branch_tracking(
  4802. r, create, remote_name, local_branch_name(branch_name)
  4803. )
  4804. except ValueError:
  4805. # Invalid remote ref format, skip tracking setup
  4806. pass
  4807. elif detach:
  4808. # Detached HEAD mode
  4809. update_head(r, target_commit.id.decode("ascii"), detached=True)
  4810. else:
  4811. # Check if target is a branch name (with or without refs/heads/ prefix)
  4812. branch_ref = None
  4813. if (
  4814. isinstance(original_target, (str, bytes))
  4815. and target_bytes in r.refs.keys()
  4816. ):
  4817. if target_bytes.startswith(LOCAL_BRANCH_PREFIX):
  4818. branch_ref = target_bytes
  4819. else:
  4820. # Try adding refs/heads/ prefix
  4821. potential_branch = (
  4822. _make_branch_ref(target_bytes)
  4823. if isinstance(original_target, (str, bytes))
  4824. else None
  4825. )
  4826. if potential_branch in r.refs.keys():
  4827. branch_ref = potential_branch
  4828. if branch_ref:
  4829. # It's a branch - update HEAD symbolically
  4830. update_head(r, branch_ref)
  4831. else:
  4832. # It's a tag, other ref, or commit SHA
  4833. # In git switch, this would be an error unless --detach is used
  4834. raise CheckoutError(
  4835. f"'{target_bytes.decode(DEFAULT_ENCODING)}' is not a branch. "
  4836. "Use detach=True to switch to a commit in detached HEAD state."
  4837. )
  4838. def reset_file(
  4839. repo: Repo,
  4840. file_path: str,
  4841. target: str | bytes | Commit | Tree | Tag = b"HEAD",
  4842. symlink_fn: Callable[
  4843. [str | bytes | os.PathLike[str], str | bytes | os.PathLike[str]], None
  4844. ]
  4845. | None = None,
  4846. ) -> None:
  4847. """Reset the file to specific commit or branch.
  4848. Args:
  4849. repo: dulwich Repo object
  4850. file_path: file to reset, relative to the repository path
  4851. target: branch or commit or b'HEAD' to reset
  4852. symlink_fn: Function to use for creating symlinks
  4853. """
  4854. tree = parse_tree(repo, treeish=target)
  4855. tree_path = _fs_to_tree_path(file_path)
  4856. file_entry = tree.lookup_path(repo.object_store.__getitem__, tree_path)
  4857. full_path = os.path.join(os.fsencode(repo.path), tree_path)
  4858. blob = repo.object_store[file_entry[1]]
  4859. assert isinstance(blob, Blob)
  4860. mode = file_entry[0]
  4861. build_file_from_blob(blob, mode, full_path, symlink_fn=symlink_fn)
  4862. @replace_me(since="0.22.9", remove_in="0.24.0")
  4863. def checkout_branch(
  4864. repo: str | os.PathLike[str] | Repo,
  4865. target: bytes | str,
  4866. force: bool = False,
  4867. ) -> None:
  4868. """Switch branches or restore working tree files.
  4869. This is now a wrapper around the general checkout() function.
  4870. Preserved for backward compatibility.
  4871. Args:
  4872. repo: dulwich Repo object
  4873. target: branch name or commit sha to checkout
  4874. force: true or not to force checkout
  4875. """
  4876. # Simply delegate to the new checkout function
  4877. return checkout(repo, target, force=force)
  4878. def sparse_checkout(
  4879. repo: str | os.PathLike[str] | Repo,
  4880. patterns: list[str] | None = None,
  4881. force: bool = False,
  4882. cone: bool | None = None,
  4883. ) -> None:
  4884. """Perform a sparse checkout in the repository (either 'full' or 'cone mode').
  4885. Perform sparse checkout in either 'cone' (directory-based) mode or
  4886. 'full pattern' (.gitignore) mode, depending on the ``cone`` parameter.
  4887. If ``cone`` is ``None``, the mode is inferred from the repository's
  4888. ``core.sparseCheckoutCone`` config setting.
  4889. Steps:
  4890. 1) If ``patterns`` is provided, write them to ``.git/info/sparse-checkout``.
  4891. 2) Determine which paths in the index are included vs. excluded.
  4892. - If ``cone=True``, use "cone-compatible" directory-based logic.
  4893. - If ``cone=False``, use standard .gitignore-style matching.
  4894. 3) Update the index's skip-worktree bits and add/remove files in
  4895. the working tree accordingly.
  4896. 4) If ``force=False``, refuse to remove files that have local modifications.
  4897. Args:
  4898. repo: Path to the repository or a Repo object.
  4899. patterns: Optional list of sparse-checkout patterns to write.
  4900. force: Whether to force removal of locally modified files (default False).
  4901. cone: Boolean indicating cone mode (True/False). If None, read from config.
  4902. Returns:
  4903. None
  4904. """
  4905. with open_repo_closing(repo) as repo_obj:
  4906. # --- 0) Possibly infer 'cone' from config ---
  4907. if cone is None:
  4908. cone = repo_obj.get_worktree().infer_cone_mode()
  4909. # --- 1) Read or write patterns ---
  4910. if patterns is None:
  4911. lines = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4912. if lines is None:
  4913. raise Error("No sparse checkout patterns found.")
  4914. else:
  4915. lines = patterns
  4916. repo_obj.get_worktree().set_sparse_checkout_patterns(patterns)
  4917. # --- 2) Determine the set of included paths ---
  4918. index = repo_obj.open_index()
  4919. included_paths = determine_included_paths(index, lines, cone)
  4920. # --- 3) Apply those results to the index & working tree ---
  4921. try:
  4922. apply_included_paths(repo_obj, included_paths, force=force)
  4923. except SparseCheckoutConflictError as exc:
  4924. raise CheckoutError(*exc.args) from exc
  4925. def cone_mode_init(repo: str | os.PathLike[str] | Repo) -> None:
  4926. """Initialize a repository to use sparse checkout in 'cone' mode.
  4927. Sets ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` in the config.
  4928. Writes an initial ``.git/info/sparse-checkout`` file that includes only
  4929. top-level files (and excludes all subdirectories), e.g. ``["/*", "!/*/"]``.
  4930. Then performs a sparse checkout to update the working tree accordingly.
  4931. If no directories are specified, then only top-level files are included:
  4932. https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling
  4933. Args:
  4934. repo: Path to the repository or a Repo object.
  4935. Returns:
  4936. None
  4937. """
  4938. with open_repo_closing(repo) as repo_obj:
  4939. repo_obj.get_worktree().configure_for_cone_mode()
  4940. patterns = ["/*", "!/*/"] # root-level files only
  4941. sparse_checkout(repo_obj, patterns, force=True, cone=True)
  4942. def cone_mode_set(
  4943. repo: str | os.PathLike[str] | Repo, dirs: Sequence[str], force: bool = False
  4944. ) -> None:
  4945. """Overwrite the existing 'cone-mode' sparse patterns with a new set of directories.
  4946. Ensures ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` are enabled.
  4947. Writes new patterns so that only the specified directories (and top-level files)
  4948. remain in the working tree, and applies the sparse checkout update.
  4949. Args:
  4950. repo: Path to the repository or a Repo object.
  4951. dirs: List of directory names to include.
  4952. force: Whether to forcibly discard local modifications (default False).
  4953. Returns:
  4954. None
  4955. """
  4956. with open_repo_closing(repo) as repo_obj:
  4957. repo_obj.get_worktree().configure_for_cone_mode()
  4958. repo_obj.get_worktree().set_cone_mode_patterns(dirs=dirs)
  4959. new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4960. # Finally, apply the patterns and update the working tree
  4961. sparse_checkout(repo_obj, new_patterns, force=force, cone=True)
  4962. def cone_mode_add(
  4963. repo: str | os.PathLike[str] | Repo, dirs: Sequence[str], force: bool = False
  4964. ) -> None:
  4965. """Add new directories to the existing 'cone-mode' sparse-checkout patterns.
  4966. Reads the current patterns from ``.git/info/sparse-checkout``, adds pattern
  4967. lines to include the specified directories, and then performs a sparse
  4968. checkout to update the working tree accordingly.
  4969. Args:
  4970. repo: Path to the repository or a Repo object.
  4971. dirs: List of directory names to add to the sparse-checkout.
  4972. force: Whether to forcibly discard local modifications (default False).
  4973. Returns:
  4974. None
  4975. """
  4976. with open_repo_closing(repo) as repo_obj:
  4977. repo_obj.get_worktree().configure_for_cone_mode()
  4978. # Do not pass base patterns as dirs
  4979. base_patterns = ["/*", "!/*/"]
  4980. existing_dirs = [
  4981. pat.strip("/")
  4982. for pat in repo_obj.get_worktree().get_sparse_checkout_patterns()
  4983. if pat not in base_patterns
  4984. ]
  4985. added_dirs = existing_dirs + list(dirs or [])
  4986. repo_obj.get_worktree().set_cone_mode_patterns(dirs=added_dirs)
  4987. new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4988. sparse_checkout(repo_obj, patterns=new_patterns, force=force, cone=True)
  4989. def check_mailmap(repo: RepoPath, contact: str | bytes) -> bytes:
  4990. """Check canonical name and email of contact.
  4991. Args:
  4992. repo: Path to the repository
  4993. contact: Contact name and/or email
  4994. Returns: Canonical contact data
  4995. """
  4996. with open_repo_closing(repo) as r:
  4997. from .mailmap import Mailmap
  4998. try:
  4999. mailmap = Mailmap.from_path(os.path.join(r.path, ".mailmap"))
  5000. except FileNotFoundError:
  5001. mailmap = Mailmap()
  5002. contact_bytes = (
  5003. contact.encode(DEFAULT_ENCODING) if isinstance(contact, str) else contact
  5004. )
  5005. result = mailmap.lookup(contact_bytes)
  5006. if isinstance(result, bytes):
  5007. return result
  5008. else:
  5009. # Convert tuple back to bytes format
  5010. name, email = result
  5011. if name is None:
  5012. name = b""
  5013. if email is None:
  5014. email = b""
  5015. return name + b" <" + email + b">"
  5016. def fsck(repo: RepoPath) -> Iterator[tuple[bytes, Exception]]:
  5017. """Check a repository.
  5018. Args:
  5019. repo: A path to the repository
  5020. Returns: Iterator over errors/warnings
  5021. """
  5022. with open_repo_closing(repo) as r:
  5023. # TODO(jelmer): check pack files
  5024. # TODO(jelmer): check graph
  5025. # TODO(jelmer): check refs
  5026. for sha in r.object_store:
  5027. o = r.object_store[sha]
  5028. try:
  5029. o.check()
  5030. except Exception as e:
  5031. yield (sha, e)
  5032. def stash_list(
  5033. repo: str | os.PathLike[str] | Repo,
  5034. ) -> Iterator[tuple[int, tuple[bytes, bytes]]]:
  5035. """List all stashes in a repository."""
  5036. with open_repo_closing(repo) as r:
  5037. from .stash import Stash
  5038. stash = Stash.from_repo(r)
  5039. entries = stash.stashes()
  5040. # Convert Entry objects to (old_sha, new_sha) tuples
  5041. return enumerate([(entry.old_sha, entry.new_sha) for entry in entries])
  5042. def stash_push(repo: str | os.PathLike[str] | Repo) -> None:
  5043. """Push a new stash onto the stack."""
  5044. with open_repo_closing(repo) as r:
  5045. from .stash import Stash
  5046. stash = Stash.from_repo(r)
  5047. stash.push()
  5048. def stash_pop(repo: str | os.PathLike[str] | Repo) -> None:
  5049. """Pop a stash from the stack."""
  5050. with open_repo_closing(repo) as r:
  5051. from .stash import Stash
  5052. stash = Stash.from_repo(r)
  5053. stash.pop(0)
  5054. def stash_drop(repo: str | os.PathLike[str] | Repo, index: int) -> None:
  5055. """Drop a stash from the stack."""
  5056. with open_repo_closing(repo) as r:
  5057. from .stash import Stash
  5058. stash = Stash.from_repo(r)
  5059. stash.drop(index)
  5060. def ls_files(repo: RepoPath) -> list[bytes]:
  5061. """List all files in an index."""
  5062. with open_repo_closing(repo) as r:
  5063. return sorted(r.open_index())
  5064. def find_unique_abbrev(
  5065. object_store: BaseObjectStore, object_id: str | bytes, min_length: int = 7
  5066. ) -> str:
  5067. """Find the shortest unique abbreviation for an object ID.
  5068. Args:
  5069. object_store: Object store to search in
  5070. object_id: The full object ID to abbreviate
  5071. min_length: Minimum length of abbreviation (default 7)
  5072. Returns:
  5073. The shortest unique prefix of the object ID (at least min_length chars)
  5074. """
  5075. if isinstance(object_id, bytes):
  5076. hex_id = object_id.decode("ascii")
  5077. else:
  5078. hex_id = object_id
  5079. # Start with minimum length
  5080. for length in range(min_length, len(hex_id) + 1):
  5081. prefix = hex_id[:length]
  5082. matches = 0
  5083. # Check if this prefix is unique
  5084. for obj_id in object_store:
  5085. if obj_id.decode("ascii").startswith(prefix):
  5086. matches += 1
  5087. if matches > 1:
  5088. # Not unique, need more characters
  5089. break
  5090. if matches == 1:
  5091. # Found unique prefix
  5092. return prefix
  5093. # If we get here, return the full ID
  5094. return hex_id
  5095. def describe(repo: str | os.PathLike[str] | Repo, abbrev: int | None = None) -> str:
  5096. """Describe the repository version.
  5097. Args:
  5098. repo: git repository
  5099. abbrev: number of characters of commit to take, default is 7
  5100. Returns: a string description of the current git revision
  5101. Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh".
  5102. """
  5103. abbrev_slice = slice(0, abbrev if abbrev is not None else 7)
  5104. # Get the repository
  5105. with open_repo_closing(repo) as r:
  5106. # Get a list of all tags
  5107. refs = r.get_refs()
  5108. tags = {}
  5109. for key, value in refs.items():
  5110. key_str = key.decode()
  5111. obj = r.get_object(value)
  5112. if "tags" not in key_str:
  5113. continue
  5114. _, tag = key_str.rsplit("/", 1)
  5115. if isinstance(obj, Tag):
  5116. # Annotated tag case
  5117. commit = r.get_object(obj.object[1])
  5118. else:
  5119. # Lightweight tag case - obj is already the commit
  5120. commit = obj
  5121. if not isinstance(commit, Commit):
  5122. raise AssertionError(
  5123. f"Expected Commit object, got {type(commit).__name__}"
  5124. )
  5125. tag_info: list[Any] = [
  5126. datetime.datetime(*time.gmtime(commit.commit_time)[:6]),
  5127. commit.id.decode("ascii"),
  5128. ]
  5129. tags[tag] = tag_info
  5130. # Sort tags by datetime (first element of the value list)
  5131. sorted_tags = sorted(
  5132. tags.items(), key=lambda tag_item: tag_item[1][0], reverse=True
  5133. )
  5134. # Get the latest commit
  5135. latest_commit = r[r.head()]
  5136. # If there are no tags, return the latest commit
  5137. if len(sorted_tags) == 0:
  5138. if abbrev is not None:
  5139. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  5140. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  5141. # We're now 0 commits from the top
  5142. commit_count = 0
  5143. # Walk through all commits
  5144. walker = r.get_walker()
  5145. for entry in walker:
  5146. # Check if tag
  5147. commit_id = entry.commit.id.decode("ascii")
  5148. for tag_item in sorted_tags:
  5149. tag_name = tag_item[0]
  5150. tag_commit = tag_item[1][1]
  5151. if commit_id == tag_commit:
  5152. if commit_count == 0:
  5153. return tag_name
  5154. else:
  5155. if abbrev is not None:
  5156. abbrev_hash = latest_commit.id.decode("ascii")[abbrev_slice]
  5157. else:
  5158. abbrev_hash = find_unique_abbrev(
  5159. r.object_store, latest_commit.id
  5160. )
  5161. return f"{tag_name}-{commit_count}-g{abbrev_hash}"
  5162. commit_count += 1
  5163. # Return plain commit if no parent tag can be found
  5164. if abbrev is not None:
  5165. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  5166. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  5167. def get_object_by_path(
  5168. repo: RepoPath,
  5169. path: str | bytes,
  5170. committish: str | bytes | Commit | Tag | None = None,
  5171. ) -> Blob | Tree | Commit | Tag:
  5172. """Get an object by path.
  5173. Args:
  5174. repo: A path to the repository
  5175. path: Path to look up
  5176. committish: Commit to look up path in
  5177. Returns: A `ShaFile` object
  5178. """
  5179. if committish is None:
  5180. committish = "HEAD"
  5181. # Get the repository
  5182. with open_repo_closing(repo) as r:
  5183. commit = parse_commit(r, committish)
  5184. base_tree = commit.tree
  5185. if not isinstance(path, bytes):
  5186. path = commit_encode(commit, path)
  5187. (_mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path)
  5188. obj = r[sha]
  5189. assert isinstance(obj, (Blob, Tree, Commit, Tag))
  5190. return obj
  5191. def write_tree(repo: RepoPath) -> bytes:
  5192. """Write a tree object from the index.
  5193. Args:
  5194. repo: Repository for which to write tree
  5195. Returns: tree id for the tree that was written
  5196. """
  5197. with open_repo_closing(repo) as r:
  5198. return r.open_index().commit(r.object_store)
  5199. def _do_merge(
  5200. r: Repo,
  5201. merge_commit_id: ObjectID,
  5202. no_commit: bool = False,
  5203. no_ff: bool = False,
  5204. message: bytes | None = None,
  5205. author: bytes | None = None,
  5206. committer: bytes | None = None,
  5207. ) -> tuple[ObjectID | None, list[bytes]]:
  5208. """Internal merge implementation that operates on an open repository.
  5209. Args:
  5210. r: Open repository object
  5211. merge_commit_id: SHA of commit to merge
  5212. no_commit: If True, do not create a merge commit
  5213. no_ff: If True, force creation of a merge commit
  5214. message: Optional merge commit message
  5215. author: Optional author for merge commit
  5216. committer: Optional committer for merge commit
  5217. Returns:
  5218. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  5219. if no_commit=True or there were conflicts
  5220. """
  5221. from .graph import find_merge_base
  5222. from .merge import recursive_merge
  5223. # Get HEAD commit
  5224. try:
  5225. head_commit_id = r.refs[HEADREF]
  5226. except KeyError:
  5227. raise Error("No HEAD reference found")
  5228. head_commit = r[head_commit_id]
  5229. assert isinstance(head_commit, Commit), "Expected a Commit object"
  5230. merge_commit = r[merge_commit_id]
  5231. assert isinstance(merge_commit, Commit), "Expected a Commit object"
  5232. # Check if fast-forward is possible
  5233. merge_bases = find_merge_base(r, [head_commit_id, merge_commit_id])
  5234. if not merge_bases:
  5235. raise Error("No common ancestor found")
  5236. # Use the first merge base for fast-forward checks
  5237. base_commit_id = merge_bases[0]
  5238. # Check if we're trying to merge the same commit
  5239. if head_commit_id == merge_commit_id:
  5240. # Already up to date
  5241. return (None, [])
  5242. # Check for fast-forward
  5243. if base_commit_id == head_commit_id and not no_ff:
  5244. # Fast-forward merge
  5245. r.refs[HEADREF] = merge_commit_id
  5246. # Update the working directory
  5247. changes = tree_changes(r.object_store, head_commit.tree, merge_commit.tree)
  5248. update_working_tree(
  5249. r, head_commit.tree, merge_commit.tree, change_iterator=changes
  5250. )
  5251. return (merge_commit_id, [])
  5252. if base_commit_id == merge_commit_id:
  5253. # Already up to date
  5254. return (None, [])
  5255. # Perform recursive merge (handles multiple merge bases automatically)
  5256. gitattributes = r.get_gitattributes()
  5257. config = r.get_config()
  5258. merged_tree, conflicts = recursive_merge(
  5259. r.object_store, merge_bases, head_commit, merge_commit, gitattributes, config
  5260. )
  5261. # Add merged tree to object store
  5262. r.object_store.add_object(merged_tree)
  5263. # Update index and working directory
  5264. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  5265. update_working_tree(r, head_commit.tree, merged_tree.id, change_iterator=changes)
  5266. if conflicts or no_commit:
  5267. # Don't create a commit if there are conflicts or no_commit is True
  5268. return (None, conflicts)
  5269. # Create merge commit
  5270. merge_commit_obj = Commit()
  5271. merge_commit_obj.tree = merged_tree.id
  5272. merge_commit_obj.parents = [head_commit_id, merge_commit_id]
  5273. # Set author/committer
  5274. if author is None:
  5275. author = get_user_identity(r.get_config_stack())
  5276. if committer is None:
  5277. committer = author
  5278. merge_commit_obj.author = author
  5279. merge_commit_obj.committer = committer
  5280. # Set timestamps
  5281. timestamp = int(time.time())
  5282. timezone = 0 # UTC
  5283. merge_commit_obj.author_time = timestamp
  5284. merge_commit_obj.author_timezone = timezone
  5285. merge_commit_obj.commit_time = timestamp
  5286. merge_commit_obj.commit_timezone = timezone
  5287. # Set commit message
  5288. if message is None:
  5289. message = f"Merge commit '{merge_commit_id.decode()[:7]}'\n".encode()
  5290. merge_commit_obj.message = message.encode() if isinstance(message, str) else message
  5291. # Add commit to object store
  5292. r.object_store.add_object(merge_commit_obj)
  5293. # Update HEAD
  5294. r.refs[HEADREF] = merge_commit_obj.id
  5295. return (merge_commit_obj.id, [])
  5296. def _do_octopus_merge(
  5297. r: Repo,
  5298. merge_commit_ids: list[ObjectID],
  5299. no_commit: bool = False,
  5300. no_ff: bool = False,
  5301. message: bytes | None = None,
  5302. author: bytes | None = None,
  5303. committer: bytes | None = None,
  5304. ) -> tuple[ObjectID | None, list[bytes]]:
  5305. """Internal octopus merge implementation that operates on an open repository.
  5306. Args:
  5307. r: Open repository object
  5308. merge_commit_ids: List of commit SHAs to merge
  5309. no_commit: If True, do not create a merge commit
  5310. no_ff: If True, force creation of a merge commit (ignored for octopus)
  5311. message: Optional merge commit message
  5312. author: Optional author for merge commit
  5313. committer: Optional committer for merge commit
  5314. Returns:
  5315. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  5316. if no_commit=True or there were conflicts
  5317. """
  5318. from .graph import find_octopus_base
  5319. from .merge import octopus_merge
  5320. # Get HEAD commit
  5321. try:
  5322. head_commit_id = r.refs[HEADREF]
  5323. except KeyError:
  5324. raise Error("No HEAD reference found")
  5325. head_commit = r[head_commit_id]
  5326. assert isinstance(head_commit, Commit), "Expected a Commit object"
  5327. # Get all commits to merge
  5328. other_commits = []
  5329. for merge_commit_id in merge_commit_ids:
  5330. merge_commit = r[merge_commit_id]
  5331. assert isinstance(merge_commit, Commit), "Expected a Commit object"
  5332. # Check if we're trying to merge the same commit as HEAD
  5333. if head_commit_id == merge_commit_id:
  5334. # Skip this commit, it's already merged
  5335. continue
  5336. other_commits.append(merge_commit)
  5337. # If no commits to merge after filtering, we're already up to date
  5338. if not other_commits:
  5339. return (None, [])
  5340. # If only one commit to merge, use regular merge
  5341. if len(other_commits) == 1:
  5342. return _do_merge(
  5343. r, other_commits[0].id, no_commit, no_ff, message, author, committer
  5344. )
  5345. # Find the octopus merge base
  5346. all_commit_ids = [head_commit_id] + [c.id for c in other_commits]
  5347. merge_bases = find_octopus_base(r, all_commit_ids)
  5348. if not merge_bases:
  5349. raise Error("No common ancestor found")
  5350. # Check if this is a fast-forward (HEAD is the merge base)
  5351. # For octopus merges, fast-forward doesn't really apply, so we always create a merge commit
  5352. # Perform octopus merge
  5353. gitattributes = r.get_gitattributes()
  5354. config = r.get_config()
  5355. merged_tree, conflicts = octopus_merge(
  5356. r.object_store, merge_bases, head_commit, other_commits, gitattributes, config
  5357. )
  5358. # Add merged tree to object store
  5359. r.object_store.add_object(merged_tree)
  5360. # Update index and working directory
  5361. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  5362. update_working_tree(r, head_commit.tree, merged_tree.id, change_iterator=changes)
  5363. if conflicts:
  5364. # Don't create a commit if there are conflicts
  5365. # Octopus merge refuses to proceed with conflicts
  5366. return (None, conflicts)
  5367. if no_commit:
  5368. # Don't create a commit if no_commit is True
  5369. return (None, [])
  5370. # Create merge commit with multiple parents
  5371. merge_commit_obj = Commit()
  5372. merge_commit_obj.tree = merged_tree.id
  5373. merge_commit_obj.parents = [head_commit_id] + [c.id for c in other_commits]
  5374. # Set author/committer
  5375. if author is None:
  5376. author = get_user_identity(r.get_config_stack())
  5377. if committer is None:
  5378. committer = author
  5379. merge_commit_obj.author = author
  5380. merge_commit_obj.committer = committer
  5381. # Set timestamps
  5382. timestamp = int(time.time())
  5383. timezone = 0 # UTC
  5384. merge_commit_obj.author_time = timestamp
  5385. merge_commit_obj.author_timezone = timezone
  5386. merge_commit_obj.commit_time = timestamp
  5387. merge_commit_obj.commit_timezone = timezone
  5388. # Set commit message
  5389. if message is None:
  5390. # Generate default message for octopus merge
  5391. branch_names = []
  5392. for commit_id in merge_commit_ids:
  5393. branch_names.append(commit_id.decode()[:7])
  5394. message = f"Merge commits {', '.join(branch_names)}\n".encode()
  5395. merge_commit_obj.message = message.encode() if isinstance(message, str) else message
  5396. # Add commit to object store
  5397. r.object_store.add_object(merge_commit_obj)
  5398. # Update HEAD
  5399. r.refs[HEADREF] = merge_commit_obj.id
  5400. return (merge_commit_obj.id, [])
  5401. def merge(
  5402. repo: str | os.PathLike[str] | Repo,
  5403. committish: str | bytes | Commit | Tag | Sequence[str | bytes | Commit | Tag],
  5404. no_commit: bool = False,
  5405. no_ff: bool = False,
  5406. message: bytes | None = None,
  5407. author: bytes | None = None,
  5408. committer: bytes | None = None,
  5409. ) -> tuple[bytes | None, list[bytes]]:
  5410. """Merge one or more commits into the current branch.
  5411. Args:
  5412. repo: Repository to merge into
  5413. committish: Commit(s) to merge. Can be a single commit or a sequence of commits.
  5414. When merging more than two heads, the octopus merge strategy is used.
  5415. no_commit: If True, do not create a merge commit
  5416. no_ff: If True, force creation of a merge commit
  5417. message: Optional merge commit message
  5418. author: Optional author for merge commit
  5419. committer: Optional committer for merge commit
  5420. Returns:
  5421. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  5422. if no_commit=True or there were conflicts
  5423. Raises:
  5424. Error: If there is no HEAD reference or commit cannot be found
  5425. """
  5426. with open_repo_closing(repo) as r:
  5427. # Handle both single commit and multiple commits
  5428. if isinstance(committish, (list, tuple)):
  5429. # Multiple commits - use octopus merge
  5430. merge_commit_ids = []
  5431. for c in committish:
  5432. try:
  5433. merge_commit_ids.append(parse_commit(r, c).id)
  5434. except KeyError:
  5435. raise Error(
  5436. f"Cannot find commit '{c.decode() if isinstance(c, bytes) else c}'"
  5437. )
  5438. if len(merge_commit_ids) == 1:
  5439. # Only one commit, use regular merge
  5440. result = _do_merge(
  5441. r, merge_commit_ids[0], no_commit, no_ff, message, author, committer
  5442. )
  5443. else:
  5444. # Multiple commits, use octopus merge
  5445. result = _do_octopus_merge(
  5446. r, merge_commit_ids, no_commit, no_ff, message, author, committer
  5447. )
  5448. else:
  5449. # Single commit - use regular merge
  5450. # Type narrowing: committish is not a sequence in this branch
  5451. single_committish = cast(str | bytes | Commit | Tag, committish)
  5452. try:
  5453. merge_commit_id = parse_commit(r, single_committish).id
  5454. except KeyError:
  5455. raise Error(
  5456. f"Cannot find commit '{single_committish.decode() if isinstance(single_committish, bytes) else single_committish}'"
  5457. )
  5458. result = _do_merge(
  5459. r, merge_commit_id, no_commit, no_ff, message, author, committer
  5460. )
  5461. # Trigger auto GC if needed
  5462. from .gc import maybe_auto_gc
  5463. maybe_auto_gc(r)
  5464. return result
  5465. def unpack_objects(
  5466. pack_path: str | os.PathLike[str], target: str | os.PathLike[str] = "."
  5467. ) -> int:
  5468. """Unpack objects from a pack file into the repository.
  5469. Args:
  5470. pack_path: Path to the pack file to unpack
  5471. target: Path to the repository to unpack into
  5472. Returns:
  5473. Number of objects unpacked
  5474. """
  5475. from .pack import Pack
  5476. with open_repo_closing(target) as r:
  5477. pack_basename = os.path.splitext(pack_path)[0]
  5478. with Pack(pack_basename) as pack:
  5479. count = 0
  5480. for unpacked in pack.iter_unpacked():
  5481. obj = unpacked.sha_file()
  5482. r.object_store.add_object(obj)
  5483. count += 1
  5484. return count
  5485. def merge_tree(
  5486. repo: RepoPath,
  5487. base_tree: str | bytes | Tree | Commit | Tag | None,
  5488. our_tree: str | bytes | Tree | Commit | Tag,
  5489. their_tree: str | bytes | Tree | Commit | Tag,
  5490. ) -> tuple[bytes, list[bytes]]:
  5491. """Perform a three-way tree merge without touching the working directory.
  5492. This is similar to git merge-tree, performing a merge at the tree level
  5493. without creating commits or updating any references.
  5494. Args:
  5495. repo: Repository containing the trees
  5496. base_tree: Tree-ish of the common ancestor (or None for no common ancestor)
  5497. our_tree: Tree-ish of our side of the merge
  5498. their_tree: Tree-ish of their side of the merge
  5499. Returns:
  5500. tuple: A tuple of (merged_tree_id, conflicts) where:
  5501. - merged_tree_id is the SHA-1 of the merged tree
  5502. - conflicts is a list of paths (as bytes) that had conflicts
  5503. Raises:
  5504. KeyError: If any of the tree-ish arguments cannot be resolved
  5505. """
  5506. from .merge import Merger
  5507. with open_repo_closing(repo) as r:
  5508. # Resolve tree-ish arguments to actual trees
  5509. base = parse_tree(r, base_tree) if base_tree else None
  5510. ours = parse_tree(r, our_tree)
  5511. theirs = parse_tree(r, their_tree)
  5512. # Perform the merge
  5513. gitattributes = r.get_gitattributes()
  5514. config = r.get_config()
  5515. merger = Merger(r.object_store, gitattributes, config)
  5516. merged_tree, conflicts = merger.merge_trees(base, ours, theirs)
  5517. # Add the merged tree to the object store
  5518. r.object_store.add_object(merged_tree)
  5519. return merged_tree.id, conflicts
  5520. def cherry(
  5521. repo: str | os.PathLike[str] | Repo,
  5522. upstream: str | bytes | None = None,
  5523. head: str | bytes | None = None,
  5524. limit: str | bytes | None = None,
  5525. verbose: bool = False,
  5526. ) -> list[tuple[str, bytes, bytes | None]]:
  5527. """Find commits not merged upstream.
  5528. Args:
  5529. repo: Repository path or object
  5530. upstream: Upstream branch (default: tracking branch or @{upstream})
  5531. head: Head branch (default: HEAD)
  5532. limit: Limit commits to those after this ref
  5533. verbose: Include commit messages in output
  5534. Returns:
  5535. List of tuples (status, commit_sha, message) where status is '+' or '-'
  5536. '+' means commit is not in upstream, '-' means equivalent patch exists upstream
  5537. message is None unless verbose=True
  5538. """
  5539. from .patch import commit_patch_id
  5540. with open_repo_closing(repo) as r:
  5541. # Resolve upstream
  5542. if upstream is None:
  5543. # Try to find tracking branch
  5544. upstream_found = False
  5545. head_refs, _ = r.refs.follow(HEADREF)
  5546. if head_refs:
  5547. head_ref = head_refs[0]
  5548. if head_ref.startswith(b"refs/heads/"):
  5549. config = r.get_config()
  5550. branch_name = head_ref[len(b"refs/heads/") :]
  5551. try:
  5552. upstream_ref = config.get((b"branch", branch_name), b"merge")
  5553. except KeyError:
  5554. upstream_ref = None
  5555. if upstream_ref:
  5556. try:
  5557. remote_name = config.get(
  5558. (b"branch", branch_name), b"remote"
  5559. )
  5560. except KeyError:
  5561. remote_name = None
  5562. if remote_name:
  5563. # Build the tracking branch ref
  5564. upstream_refname = Ref(
  5565. b"refs/remotes/"
  5566. + remote_name
  5567. + b"/"
  5568. + upstream_ref.split(b"/")[-1]
  5569. )
  5570. if upstream_refname in r.refs:
  5571. upstream = upstream_refname
  5572. upstream_found = True
  5573. if not upstream_found:
  5574. # Default to HEAD^ if no tracking branch found
  5575. head_commit = r[HEADREF]
  5576. if isinstance(head_commit, Commit) and head_commit.parents:
  5577. upstream = head_commit.parents[0]
  5578. else:
  5579. raise ValueError("Could not determine upstream branch")
  5580. # Resolve head
  5581. if head is None:
  5582. head = b"HEAD"
  5583. # Convert strings to bytes
  5584. if isinstance(upstream, str):
  5585. upstream = upstream.encode("utf-8")
  5586. if isinstance(head, str):
  5587. head = head.encode("utf-8")
  5588. if limit is not None and isinstance(limit, str):
  5589. limit = limit.encode("utf-8")
  5590. # Resolve refs to commit IDs
  5591. assert upstream is not None
  5592. upstream_obj = r[upstream]
  5593. head_obj = r[head]
  5594. upstream_id = upstream_obj.id
  5595. head_id = head_obj.id
  5596. # Get limit commit ID if specified
  5597. limit_id = None
  5598. if limit is not None:
  5599. limit_id = r[limit].id
  5600. # Find all commits reachable from head but not from upstream
  5601. # This is equivalent to: git rev-list ^upstream head
  5602. # Get commits from head that are not in upstream
  5603. walker = r.get_walker([head_id], exclude=[upstream_id])
  5604. head_commits = []
  5605. for entry in walker:
  5606. commit = entry.commit
  5607. # Apply limit if specified
  5608. if limit_id is not None:
  5609. # Stop when we reach the limit commit
  5610. if commit.id == limit_id:
  5611. break
  5612. head_commits.append(commit.id)
  5613. # Compute patch IDs for upstream commits
  5614. upstream_walker = r.get_walker([upstream_id])
  5615. upstream_patch_ids = {} # Maps patch_id -> commit_id for debugging
  5616. for entry in upstream_walker:
  5617. commit = entry.commit
  5618. pid = commit_patch_id(r.object_store, commit.id)
  5619. upstream_patch_ids[pid] = commit.id
  5620. # For each head commit, check if equivalent patch exists in upstream
  5621. results: list[tuple[str, bytes, bytes | None]] = []
  5622. for commit_id in reversed(head_commits): # Show oldest first
  5623. obj = r.object_store[commit_id]
  5624. assert isinstance(obj, Commit)
  5625. commit = obj
  5626. pid = commit_patch_id(r.object_store, commit_id)
  5627. if pid in upstream_patch_ids:
  5628. status = "-"
  5629. else:
  5630. status = "+"
  5631. message = None
  5632. if verbose:
  5633. message = commit.message.split(b"\n")[0] # First line only
  5634. results.append((status, commit_id, message))
  5635. return results
  5636. def cherry_pick( # noqa: D417
  5637. repo: str | os.PathLike[str] | Repo,
  5638. committish: str | bytes | Commit | Tag | None,
  5639. no_commit: bool = False,
  5640. continue_: bool = False,
  5641. abort: bool = False,
  5642. ) -> bytes | None:
  5643. r"""Cherry-pick a commit onto the current branch.
  5644. Args:
  5645. repo: Repository to cherry-pick into
  5646. committish: Commit to cherry-pick (can be None only when resuming or aborting)
  5647. no_commit: If True, do not create a commit after applying changes
  5648. ``continue_``: Resume an in-progress cherry-pick after resolving conflicts if True
  5649. abort: Abort an in-progress cherry-pick
  5650. Returns:
  5651. The SHA of the newly created commit, or None if no_commit=True or there were conflicts
  5652. Raises:
  5653. Error: If there is no HEAD reference, commit cannot be found, or operation fails
  5654. """
  5655. from .merge import three_way_merge
  5656. # Validate that committish is provided when needed
  5657. if not (continue_ or abort) and committish is None:
  5658. raise ValueError("committish is required when not using --continue or --abort")
  5659. with open_repo_closing(repo) as r:
  5660. # Handle abort
  5661. if abort:
  5662. # Clean up any cherry-pick state
  5663. try:
  5664. os.remove(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"))
  5665. except FileNotFoundError:
  5666. pass
  5667. try:
  5668. os.remove(os.path.join(r.controldir(), "MERGE_MSG"))
  5669. except FileNotFoundError:
  5670. pass
  5671. # Reset index to HEAD
  5672. head_commit = r[b"HEAD"]
  5673. assert isinstance(head_commit, Commit)
  5674. r.get_worktree().reset_index(head_commit.tree)
  5675. return None
  5676. # Handle continue
  5677. if continue_:
  5678. # Check if there's a cherry-pick in progress
  5679. cherry_pick_head_path = os.path.join(r.controldir(), "CHERRY_PICK_HEAD")
  5680. try:
  5681. with open(cherry_pick_head_path, "rb") as f:
  5682. cherry_pick_commit_id = f.read().strip()
  5683. cherry_pick_commit = r[cherry_pick_commit_id]
  5684. except FileNotFoundError:
  5685. raise Error("No cherry-pick in progress")
  5686. # Check for unresolved conflicts
  5687. if r.open_index().has_conflicts():
  5688. raise Error("Unresolved conflicts remain")
  5689. # Create the commit
  5690. tree_id = r.open_index().commit(r.object_store)
  5691. # Read saved message if any
  5692. merge_msg_path = os.path.join(r.controldir(), "MERGE_MSG")
  5693. try:
  5694. with open(merge_msg_path, "rb") as f:
  5695. message = f.read()
  5696. except FileNotFoundError:
  5697. assert isinstance(cherry_pick_commit, Commit)
  5698. message = cherry_pick_commit.message
  5699. assert isinstance(cherry_pick_commit, Commit)
  5700. new_commit = r.get_worktree().commit(
  5701. message=message,
  5702. tree=tree_id,
  5703. author=cherry_pick_commit.author,
  5704. author_timestamp=cherry_pick_commit.author_time,
  5705. author_timezone=cherry_pick_commit.author_timezone,
  5706. )
  5707. # Clean up state files
  5708. try:
  5709. os.remove(cherry_pick_head_path)
  5710. except FileNotFoundError:
  5711. pass
  5712. try:
  5713. os.remove(merge_msg_path)
  5714. except FileNotFoundError:
  5715. pass
  5716. return new_commit
  5717. # Normal cherry-pick operation
  5718. # Get current HEAD
  5719. try:
  5720. head_commit = r[b"HEAD"]
  5721. except KeyError:
  5722. raise Error("No HEAD reference found")
  5723. # Parse the commit to cherry-pick
  5724. # committish cannot be None here due to validation above
  5725. assert committish is not None
  5726. try:
  5727. cherry_pick_commit = parse_commit(r, committish)
  5728. except KeyError:
  5729. raise Error(
  5730. f"Cannot find commit '{committish.decode() if isinstance(committish, bytes) else committish}'"
  5731. )
  5732. # Check if commit has parents
  5733. assert isinstance(cherry_pick_commit, Commit)
  5734. if not cherry_pick_commit.parents:
  5735. raise Error("Cannot cherry-pick root commit")
  5736. # Get parent of cherry-pick commit
  5737. parent_commit = r[cherry_pick_commit.parents[0]]
  5738. assert isinstance(parent_commit, Commit)
  5739. # Perform three-way merge
  5740. assert isinstance(head_commit, Commit)
  5741. merged_tree, conflicts = three_way_merge(
  5742. r.object_store, parent_commit, head_commit, cherry_pick_commit
  5743. )
  5744. # Add merged tree to object store
  5745. r.object_store.add_object(merged_tree)
  5746. # Update working tree and index
  5747. # Reset index to match merged tree
  5748. r.get_worktree().reset_index(merged_tree.id)
  5749. # Update working tree from the new index
  5750. # Allow overwriting because we're applying the merge result
  5751. assert isinstance(head_commit, Commit)
  5752. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  5753. update_working_tree(
  5754. r,
  5755. head_commit.tree,
  5756. merged_tree.id,
  5757. change_iterator=changes,
  5758. allow_overwrite_modified=True,
  5759. )
  5760. if conflicts:
  5761. # Save state for later continuation
  5762. with open(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"), "wb") as f:
  5763. f.write(cherry_pick_commit.id + b"\n")
  5764. # Save commit message
  5765. with open(os.path.join(r.controldir(), "MERGE_MSG"), "wb") as f:
  5766. f.write(cherry_pick_commit.message)
  5767. raise Error(
  5768. f"Conflicts in: {', '.join(c.decode('utf-8', 'replace') for c in conflicts)}\n"
  5769. f"Fix conflicts and run 'dulwich cherry-pick --continue'"
  5770. )
  5771. if no_commit:
  5772. return None
  5773. # Create the commit
  5774. new_commit = r.get_worktree().commit(
  5775. message=cherry_pick_commit.message,
  5776. tree=merged_tree.id,
  5777. author=cherry_pick_commit.author,
  5778. author_timestamp=cherry_pick_commit.author_time,
  5779. author_timezone=cherry_pick_commit.author_timezone,
  5780. )
  5781. return new_commit
  5782. def revert(
  5783. repo: str | os.PathLike[str] | Repo,
  5784. commits: str | bytes | Commit | Tag | Sequence[str | bytes | Commit | Tag],
  5785. no_commit: bool = False,
  5786. message: str | bytes | None = None,
  5787. author: bytes | None = None,
  5788. committer: bytes | None = None,
  5789. ) -> bytes | None:
  5790. """Revert one or more commits.
  5791. This creates a new commit that undoes the changes introduced by the
  5792. specified commits. Unlike reset, revert creates a new commit that
  5793. preserves history.
  5794. Args:
  5795. repo: Path to repository or repository object
  5796. commits: List of commit-ish (SHA, ref, etc.) to revert, or a single commit-ish
  5797. no_commit: If True, apply changes to index/working tree but don't commit
  5798. message: Optional commit message (default: "Revert <original subject>")
  5799. author: Optional author for revert commit
  5800. committer: Optional committer for revert commit
  5801. Returns:
  5802. SHA1 of the new revert commit, or None if no_commit=True
  5803. Raises:
  5804. Error: If revert fails due to conflicts or other issues
  5805. """
  5806. from .merge import three_way_merge
  5807. # Normalize commits to a list
  5808. if isinstance(commits, (str, bytes, Commit, Tag)):
  5809. commits = [commits]
  5810. with open_repo_closing(repo) as r:
  5811. # Convert string refs to bytes
  5812. commits_to_revert = []
  5813. for commit_ref in commits:
  5814. if isinstance(commit_ref, str):
  5815. commit_ref = commit_ref.encode("utf-8")
  5816. commit = parse_commit(r, commit_ref)
  5817. commits_to_revert.append(commit)
  5818. # Get current HEAD
  5819. try:
  5820. head_commit_id = r.refs[HEADREF]
  5821. except KeyError:
  5822. raise Error("No HEAD reference found")
  5823. head_commit = r[head_commit_id]
  5824. assert isinstance(head_commit, Commit)
  5825. current_tree = head_commit.tree
  5826. # Process commits in order
  5827. for commit_to_revert in commits_to_revert:
  5828. # For revert, we want to apply the inverse of the commit
  5829. # This means using the commit's tree as "base" and its parent as "theirs"
  5830. if not commit_to_revert.parents:
  5831. raise Error(
  5832. f"Cannot revert commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - it has no parents"
  5833. )
  5834. # For simplicity, we only handle commits with one parent (no merge commits)
  5835. if len(commit_to_revert.parents) > 1:
  5836. raise Error(
  5837. f"Cannot revert merge commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - not yet implemented"
  5838. )
  5839. parent_commit = r[commit_to_revert.parents[0]]
  5840. assert isinstance(parent_commit, Commit)
  5841. # Perform three-way merge:
  5842. # - base: the commit we're reverting (what we want to remove)
  5843. # - ours: current HEAD (what we have now)
  5844. # - theirs: parent of commit being reverted (what we want to go back to)
  5845. assert isinstance(commit_to_revert, Commit)
  5846. head_for_merge = r[head_commit_id]
  5847. assert isinstance(head_for_merge, Commit)
  5848. merged_tree, conflicts = three_way_merge(
  5849. r.object_store,
  5850. commit_to_revert, # base
  5851. head_for_merge, # ours
  5852. parent_commit, # theirs
  5853. )
  5854. if conflicts:
  5855. # Update working tree with conflicts
  5856. changes = tree_changes(r.object_store, current_tree, merged_tree.id)
  5857. update_working_tree(
  5858. r, current_tree, merged_tree.id, change_iterator=changes
  5859. )
  5860. conflicted_paths = [c.decode("utf-8", "replace") for c in conflicts]
  5861. raise Error(f"Conflicts while reverting: {', '.join(conflicted_paths)}")
  5862. # Add merged tree to object store
  5863. r.object_store.add_object(merged_tree)
  5864. # Update working tree
  5865. changes = tree_changes(r.object_store, current_tree, merged_tree.id)
  5866. update_working_tree(
  5867. r, current_tree, merged_tree.id, change_iterator=changes
  5868. )
  5869. current_tree = merged_tree.id
  5870. if not no_commit:
  5871. # Create revert commit
  5872. revert_commit = Commit()
  5873. revert_commit.tree = merged_tree.id
  5874. revert_commit.parents = [head_commit_id]
  5875. # Set author/committer
  5876. if author is None:
  5877. author = get_user_identity(r.get_config_stack())
  5878. if committer is None:
  5879. committer = author
  5880. revert_commit.author = author
  5881. revert_commit.committer = committer
  5882. # Set timestamps
  5883. timestamp = int(time.time())
  5884. timezone = 0 # UTC
  5885. revert_commit.author_time = timestamp
  5886. revert_commit.author_timezone = timezone
  5887. revert_commit.commit_time = timestamp
  5888. revert_commit.commit_timezone = timezone
  5889. # Set message
  5890. if message is None:
  5891. # Extract original commit subject
  5892. original_message = commit_to_revert.message
  5893. if isinstance(original_message, bytes):
  5894. original_message = original_message.decode("utf-8", "replace")
  5895. subject = original_message.split("\n")[0]
  5896. message = f'Revert "{subject}"\n\nThis reverts commit {commit_to_revert.id.decode("ascii")}.'.encode()
  5897. elif isinstance(message, str):
  5898. message = message.encode("utf-8")
  5899. revert_commit.message = message
  5900. # Add commit to object store
  5901. r.object_store.add_object(revert_commit)
  5902. # Update HEAD
  5903. r.refs[HEADREF] = revert_commit.id
  5904. head_commit_id = revert_commit.id
  5905. return head_commit_id if not no_commit else None
  5906. def gc(
  5907. repo: RepoPath,
  5908. auto: bool = False,
  5909. aggressive: bool = False,
  5910. prune: bool = True,
  5911. grace_period: int | None = 1209600, # 2 weeks default
  5912. dry_run: bool = False,
  5913. progress: Callable[[str], None] | None = None,
  5914. ) -> "GCStats":
  5915. """Run garbage collection on a repository.
  5916. Args:
  5917. repo: Path to the repository or a Repo object
  5918. auto: If True, only run gc if needed
  5919. aggressive: If True, use more aggressive settings
  5920. prune: If True, prune unreachable objects
  5921. grace_period: Grace period in seconds for pruning (default 2 weeks)
  5922. dry_run: If True, only report what would be done
  5923. progress: Optional progress callback
  5924. Returns:
  5925. GCStats object with garbage collection statistics
  5926. """
  5927. from .gc import garbage_collect
  5928. with open_repo_closing(repo) as r:
  5929. return garbage_collect(
  5930. r,
  5931. auto=auto,
  5932. aggressive=aggressive,
  5933. prune=prune,
  5934. grace_period=grace_period,
  5935. dry_run=dry_run,
  5936. progress=progress,
  5937. )
  5938. def prune(
  5939. repo: RepoPath,
  5940. grace_period: int | None = None,
  5941. dry_run: bool = False,
  5942. progress: Callable[[str], None] | None = None,
  5943. ) -> None:
  5944. """Prune/clean up a repository's object store.
  5945. This removes temporary files that were left behind by interrupted
  5946. pack operations.
  5947. Args:
  5948. repo: Path to the repository or a Repo object
  5949. grace_period: Grace period in seconds for removing temporary files
  5950. (default 2 weeks)
  5951. dry_run: If True, only report what would be done
  5952. progress: Optional progress callback
  5953. """
  5954. with open_repo_closing(repo) as r:
  5955. if progress:
  5956. progress("Pruning temporary files")
  5957. if not dry_run:
  5958. r.object_store.prune(grace_period=grace_period)
  5959. def maintenance_run(
  5960. repo: RepoPath,
  5961. tasks: list[str] | None = None,
  5962. auto: bool = False,
  5963. progress: Callable[[str], None] | None = None,
  5964. ) -> "MaintenanceResult":
  5965. """Run maintenance tasks on a repository.
  5966. Args:
  5967. repo: Path to the repository or a Repo object
  5968. tasks: Optional list of specific task names to run
  5969. (e.g., ['gc', 'commit-graph', 'pack-refs'])
  5970. auto: If True, only run tasks if needed
  5971. progress: Optional progress callback
  5972. Returns:
  5973. MaintenanceResult object with task execution results
  5974. """
  5975. from .maintenance import run_maintenance
  5976. with open_repo_closing(repo) as r:
  5977. return run_maintenance(r, tasks=tasks, auto=auto, progress=progress)
  5978. def maintenance_register(repo: RepoPath) -> None:
  5979. """Register a repository for background maintenance.
  5980. This adds the repository to the global maintenance.repo config and sets
  5981. up recommended configuration for scheduled maintenance.
  5982. Args:
  5983. repo: Path to the repository or repository object
  5984. """
  5985. from .maintenance import register_repository
  5986. with open_repo_closing(repo) as r:
  5987. register_repository(r)
  5988. def maintenance_unregister(repo: RepoPath, force: bool = False) -> None:
  5989. """Unregister a repository from background maintenance.
  5990. This removes the repository from the global maintenance.repo config.
  5991. Args:
  5992. repo: Path to the repository or repository object
  5993. force: If True, don't error if repository is not registered
  5994. """
  5995. from .maintenance import unregister_repository
  5996. with open_repo_closing(repo) as r:
  5997. unregister_repository(r, force=force)
  5998. def count_objects(repo: RepoPath = ".", verbose: bool = False) -> CountObjectsResult:
  5999. """Count unpacked objects and their disk usage.
  6000. Args:
  6001. repo: Path to repository or repository object
  6002. verbose: Whether to return verbose information
  6003. Returns:
  6004. CountObjectsResult object with detailed statistics
  6005. """
  6006. with open_repo_closing(repo) as r:
  6007. object_store = r.object_store
  6008. # Count loose objects
  6009. loose_count = 0
  6010. loose_size = 0
  6011. for sha in object_store._iter_loose_objects():
  6012. loose_count += 1
  6013. from .object_store import DiskObjectStore
  6014. assert isinstance(object_store, DiskObjectStore)
  6015. path = object_store._get_shafile_path(sha)
  6016. try:
  6017. stat_info = os.stat(path)
  6018. # Git uses disk usage, not file size. st_blocks is always in
  6019. # 512-byte blocks per POSIX standard
  6020. st_blocks = getattr(stat_info, "st_blocks", None)
  6021. if st_blocks is not None:
  6022. # Available on Linux and macOS
  6023. loose_size += st_blocks * 512
  6024. else:
  6025. # Fallback for Windows
  6026. loose_size += stat_info.st_size
  6027. except FileNotFoundError:
  6028. # Object may have been removed between iteration and stat
  6029. pass
  6030. if not verbose:
  6031. return CountObjectsResult(count=loose_count, size=loose_size)
  6032. # Count pack information
  6033. pack_count = len(object_store.packs)
  6034. in_pack_count = 0
  6035. pack_size = 0
  6036. for pack in object_store.packs:
  6037. in_pack_count += len(pack)
  6038. # Get pack file size
  6039. pack_path = pack._data_path
  6040. try:
  6041. pack_size += os.path.getsize(pack_path)
  6042. except FileNotFoundError:
  6043. pass
  6044. # Get index file size
  6045. idx_path = pack._idx_path
  6046. try:
  6047. pack_size += os.path.getsize(idx_path)
  6048. except FileNotFoundError:
  6049. pass
  6050. return CountObjectsResult(
  6051. count=loose_count,
  6052. size=loose_size,
  6053. in_pack=in_pack_count,
  6054. packs=pack_count,
  6055. size_pack=pack_size,
  6056. )
  6057. def is_interactive_rebase(repo: Repo | str) -> bool:
  6058. """Check if an interactive rebase is in progress.
  6059. Args:
  6060. repo: Repository to check
  6061. Returns:
  6062. True if interactive rebase is in progress, False otherwise
  6063. """
  6064. with open_repo_closing(repo) as r:
  6065. state_manager = r.get_rebase_state_manager()
  6066. if not state_manager.exists():
  6067. return False
  6068. # Check if todo file exists
  6069. todo = state_manager.load_todo()
  6070. return todo is not None
  6071. def rebase(
  6072. repo: Repo | str,
  6073. upstream: bytes | str,
  6074. onto: bytes | str | None = None,
  6075. branch: bytes | str | None = None,
  6076. abort: bool = False,
  6077. continue_rebase: bool = False,
  6078. skip: bool = False,
  6079. interactive: bool = False,
  6080. edit_todo: bool = False,
  6081. ) -> list[bytes]:
  6082. """Rebase commits onto another branch.
  6083. Args:
  6084. repo: Repository to rebase in
  6085. upstream: Upstream branch/commit to rebase onto
  6086. onto: Specific commit to rebase onto (defaults to upstream)
  6087. branch: Branch to rebase (defaults to current branch)
  6088. abort: Abort an in-progress rebase
  6089. continue_rebase: Continue an in-progress rebase
  6090. skip: Skip current commit and continue rebase
  6091. interactive: Start an interactive rebase
  6092. edit_todo: Edit the todo list of an interactive rebase
  6093. Returns:
  6094. List of new commit SHAs created by rebase
  6095. Raises:
  6096. Error: If rebase fails or conflicts occur
  6097. """
  6098. # TODO: Avoid importing from .cli
  6099. from .cli import launch_editor
  6100. from .rebase import (
  6101. RebaseConflict,
  6102. RebaseError,
  6103. Rebaser,
  6104. process_interactive_rebase,
  6105. start_interactive,
  6106. )
  6107. from .rebase import (
  6108. edit_todo as edit_todo_func,
  6109. )
  6110. with open_repo_closing(repo) as r:
  6111. rebaser = Rebaser(r)
  6112. if abort:
  6113. try:
  6114. rebaser.abort()
  6115. return []
  6116. except RebaseError as e:
  6117. raise Error(str(e))
  6118. if edit_todo:
  6119. # Edit the todo list of an interactive rebase
  6120. try:
  6121. edit_todo_func(r, launch_editor)
  6122. print("Todo list updated. Continue with 'rebase --continue'")
  6123. return []
  6124. except RebaseError as e:
  6125. raise Error(str(e))
  6126. if continue_rebase:
  6127. try:
  6128. if interactive:
  6129. # Continue interactive rebase
  6130. is_complete, pause_reason = process_interactive_rebase(
  6131. r, editor_callback=launch_editor
  6132. )
  6133. if is_complete:
  6134. return [c.id for c in rebaser._done]
  6135. else:
  6136. if pause_reason == "conflict":
  6137. raise Error("Conflicts detected. Resolve and continue.")
  6138. elif pause_reason == "edit":
  6139. print("Stopped for editing. Make changes and continue.")
  6140. elif pause_reason == "break":
  6141. print("Rebase paused at break. Continue when ready.")
  6142. else:
  6143. print(f"Rebase paused: {pause_reason}")
  6144. return []
  6145. else:
  6146. # Continue regular rebase
  6147. result = rebaser.continue_()
  6148. if result is None:
  6149. # Rebase complete
  6150. return [c.id for c in rebaser._done]
  6151. elif isinstance(result, tuple) and result[1]:
  6152. # Still have conflicts
  6153. raise Error(
  6154. f"Conflicts in: {', '.join(f.decode('utf-8', 'replace') for f in result[1])}"
  6155. )
  6156. except RebaseError as e:
  6157. raise Error(str(e))
  6158. # Convert string refs to bytes
  6159. if isinstance(upstream, str):
  6160. upstream = upstream.encode("utf-8")
  6161. if isinstance(onto, str):
  6162. onto = onto.encode("utf-8") if onto else None
  6163. if isinstance(branch, str):
  6164. branch = branch.encode("utf-8") if branch else None
  6165. try:
  6166. if interactive:
  6167. # Start interactive rebase
  6168. todo = start_interactive(r, upstream, onto, branch, launch_editor)
  6169. # Process the todo list
  6170. is_complete, pause_reason = process_interactive_rebase(
  6171. r, todo, editor_callback=launch_editor
  6172. )
  6173. if is_complete:
  6174. return [c.id for c in rebaser._done]
  6175. else:
  6176. if pause_reason == "conflict":
  6177. raise Error("Conflicts detected. Resolve and continue.")
  6178. elif pause_reason == "edit":
  6179. print("Stopped for editing. Make changes and continue.")
  6180. elif pause_reason == "break":
  6181. print("Rebase paused at break. Continue when ready.")
  6182. else:
  6183. print(f"Rebase paused: {pause_reason}")
  6184. return []
  6185. else:
  6186. # Regular rebase
  6187. rebaser.start(upstream, onto, branch)
  6188. # Continue rebase automatically
  6189. result = rebaser.continue_()
  6190. if result is not None:
  6191. # Conflicts
  6192. raise RebaseConflict(result[1])
  6193. # Return the SHAs of the rebased commits
  6194. return [c.id for c in rebaser._done]
  6195. except RebaseConflict as e:
  6196. raise Error(str(e))
  6197. except RebaseError as e:
  6198. raise Error(str(e))
  6199. def annotate(
  6200. repo: RepoPath,
  6201. path: str | bytes,
  6202. committish: str | bytes | Commit | Tag | None = None,
  6203. ) -> list[tuple[tuple[Commit, TreeEntry], bytes]]:
  6204. """Annotate the history of a file.
  6205. :param repo: Path to the repository
  6206. :param path: Path to annotate
  6207. :param committish: Commit id to find path in
  6208. :return: List of ((Commit, TreeChange), line) tuples
  6209. """
  6210. if committish is None:
  6211. committish = "HEAD"
  6212. from .annotate import annotate_lines
  6213. with open_repo_closing(repo) as r:
  6214. commit_id = parse_commit(r, committish).id
  6215. # Ensure path is bytes
  6216. if isinstance(path, str):
  6217. path = path.encode()
  6218. return annotate_lines(r.object_store, commit_id, path)
  6219. blame = annotate
  6220. def filter_branch(
  6221. repo: RepoPath = ".",
  6222. branch: str | bytes = "HEAD",
  6223. *,
  6224. filter_fn: Callable[[Commit], "CommitData | None"] | None = None,
  6225. filter_author: Callable[[bytes], bytes | None] | None = None,
  6226. filter_committer: Callable[[bytes], bytes | None] | None = None,
  6227. filter_message: Callable[[bytes], bytes | None] | None = None,
  6228. tree_filter: Callable[[ObjectID, str], ObjectID | None] | None = None,
  6229. index_filter: Callable[[ObjectID, str], ObjectID | None] | None = None,
  6230. parent_filter: Callable[[Sequence[ObjectID]], list[ObjectID]] | None = None,
  6231. commit_filter: Callable[[Commit, ObjectID], ObjectID | None] | None = None,
  6232. subdirectory_filter: str | bytes | None = None,
  6233. prune_empty: bool = False,
  6234. tag_name_filter: Callable[[bytes], bytes | None] | None = None,
  6235. force: bool = False,
  6236. keep_original: bool = True,
  6237. refs: list[bytes] | None = None,
  6238. ) -> dict[ObjectID, ObjectID]:
  6239. """Rewrite branch history by creating new commits with filtered properties.
  6240. This is similar to git filter-branch, allowing you to rewrite commit
  6241. history by modifying trees, parents, author, committer, or commit messages.
  6242. Args:
  6243. repo: Path to repository
  6244. branch: Branch to rewrite (defaults to HEAD)
  6245. filter_fn: Optional callable that takes a Commit object and returns
  6246. a dict of updated fields (author, committer, message, etc.)
  6247. filter_author: Optional callable that takes author bytes and returns
  6248. updated author bytes or None to keep unchanged
  6249. filter_committer: Optional callable that takes committer bytes and returns
  6250. updated committer bytes or None to keep unchanged
  6251. filter_message: Optional callable that takes commit message bytes
  6252. and returns updated message bytes
  6253. tree_filter: Optional callable that takes (tree_sha, temp_dir) and returns
  6254. new tree SHA after modifying working directory
  6255. index_filter: Optional callable that takes (tree_sha, temp_index_path) and
  6256. returns new tree SHA after modifying index
  6257. parent_filter: Optional callable that takes parent list and returns
  6258. modified parent list
  6259. commit_filter: Optional callable that takes (Commit, tree_sha) and returns
  6260. new commit SHA or None to skip commit
  6261. subdirectory_filter: Optional subdirectory path to extract as new root
  6262. prune_empty: Whether to prune commits that become empty
  6263. tag_name_filter: Optional callable to rename tags
  6264. force: Force operation even if branch has been filtered before
  6265. keep_original: Keep original refs under refs/original/
  6266. refs: List of refs to rewrite (defaults to [branch])
  6267. Returns:
  6268. Dict mapping old commit SHAs to new commit SHAs
  6269. Raises:
  6270. Error: If branch is already filtered and force is False
  6271. """
  6272. from .filter_branch import CommitFilter, filter_refs
  6273. with open_repo_closing(repo) as r:
  6274. # Parse branch/committish
  6275. if isinstance(branch, str):
  6276. branch = branch.encode()
  6277. # Determine which refs to process
  6278. if refs is None:
  6279. if branch == b"HEAD":
  6280. # Resolve HEAD to actual branch
  6281. try:
  6282. resolved = r.refs.follow(HEADREF)
  6283. if resolved and resolved[0]:
  6284. # resolved is a list of (refname, sha) tuples
  6285. resolved_ref = resolved[0][-1]
  6286. if resolved_ref and resolved_ref != b"HEAD":
  6287. refs = [resolved_ref]
  6288. else:
  6289. # HEAD points directly to a commit
  6290. refs = [b"HEAD"]
  6291. else:
  6292. refs = [b"HEAD"]
  6293. except SymrefLoop:
  6294. refs = [b"HEAD"]
  6295. else:
  6296. # Convert branch name to full ref if needed
  6297. if not branch.startswith(b"refs/"):
  6298. branch = local_branch_name(branch)
  6299. refs = [branch]
  6300. # Convert subdirectory filter to bytes if needed
  6301. if subdirectory_filter:
  6302. if isinstance(subdirectory_filter, str):
  6303. subdirectory_filter = subdirectory_filter.encode()
  6304. else:
  6305. subdirectory_filter = None
  6306. # Create commit filter
  6307. filter_obj = CommitFilter(
  6308. r.object_store,
  6309. filter_fn=filter_fn,
  6310. filter_author=filter_author,
  6311. filter_committer=filter_committer,
  6312. filter_message=filter_message,
  6313. tree_filter=tree_filter,
  6314. index_filter=index_filter,
  6315. parent_filter=parent_filter,
  6316. commit_filter=commit_filter,
  6317. subdirectory_filter=subdirectory_filter,
  6318. prune_empty=prune_empty,
  6319. tag_name_filter=tag_name_filter,
  6320. )
  6321. # Tag callback for renaming tags
  6322. def rename_tag(old_ref: Ref, new_ref: Ref) -> None:
  6323. # Copy tag to new name
  6324. r.refs[new_ref] = r.refs[old_ref]
  6325. # Delete old tag
  6326. del r.refs[old_ref]
  6327. # Filter refs
  6328. try:
  6329. return filter_refs(
  6330. r.refs,
  6331. r.object_store,
  6332. refs,
  6333. filter_obj,
  6334. keep_original=keep_original,
  6335. force=force,
  6336. tag_callback=rename_tag if tag_name_filter else None,
  6337. )
  6338. except ValueError as e:
  6339. raise Error(str(e)) from e
  6340. def format_patch(
  6341. repo: RepoPath = ".",
  6342. committish: ObjectID | tuple[ObjectID, ObjectID] | None = None,
  6343. outstream: TextIO = sys.stdout,
  6344. outdir: str | os.PathLike[str] | None = None,
  6345. n: int = 1,
  6346. stdout: bool = False,
  6347. version: str | None = None,
  6348. ) -> list[str]:
  6349. """Generate patches suitable for git am.
  6350. Args:
  6351. repo: Path to repository
  6352. committish: Commit-ish or commit range to generate patches for.
  6353. Can be a single commit id, or a tuple of (start, end) commit ids
  6354. for a range. If None, formats the last n commits from HEAD.
  6355. outstream: Stream to write to if stdout=True
  6356. outdir: Directory to write patch files to (default: current directory)
  6357. n: Number of patches to generate if committish is None
  6358. stdout: Write patches to stdout instead of files
  6359. version: Version string to include in patches (default: Dulwich version)
  6360. Returns:
  6361. List of patch filenames that were created (empty if stdout=True)
  6362. """
  6363. if outdir is None:
  6364. outdir = "."
  6365. filenames = []
  6366. with open_repo_closing(repo) as r:
  6367. # Determine which commits to format
  6368. commits_to_format = []
  6369. if committish is None:
  6370. # Get the last n commits from HEAD
  6371. try:
  6372. walker = r.get_walker()
  6373. for entry in walker:
  6374. commits_to_format.append(entry.commit)
  6375. if len(commits_to_format) >= n:
  6376. break
  6377. commits_to_format.reverse()
  6378. except KeyError:
  6379. # No HEAD or empty repository
  6380. pass
  6381. elif isinstance(committish, tuple):
  6382. # Handle commit range (start, end)
  6383. start_commit, end_commit = committish
  6384. # Extract commit IDs from commit objects if needed
  6385. start_id = (
  6386. start_commit.id if isinstance(start_commit, Commit) else start_commit
  6387. )
  6388. end_id = end_commit.id if isinstance(end_commit, Commit) else end_commit
  6389. # Walk from end back to start
  6390. walker = r.get_walker(include=[end_id], exclude=[start_id])
  6391. for entry in walker:
  6392. commits_to_format.append(entry.commit)
  6393. commits_to_format.reverse()
  6394. else:
  6395. # Single commit
  6396. commit = r.object_store[committish]
  6397. assert isinstance(commit, Commit)
  6398. commits_to_format.append(commit)
  6399. # Generate patches
  6400. total = len(commits_to_format)
  6401. for i, commit in enumerate(commits_to_format, 1):
  6402. assert isinstance(commit, Commit)
  6403. # Get the parent
  6404. if commit.parents:
  6405. parent_id = commit.parents[0]
  6406. parent = r.object_store[parent_id]
  6407. assert isinstance(parent, Commit)
  6408. else:
  6409. parent = None
  6410. # Generate the diff
  6411. from io import BytesIO
  6412. diff_content = BytesIO()
  6413. if parent:
  6414. write_tree_diff(
  6415. diff_content,
  6416. r.object_store,
  6417. parent.tree,
  6418. commit.tree,
  6419. )
  6420. else:
  6421. # Initial commit - diff against empty tree
  6422. write_tree_diff(
  6423. diff_content,
  6424. r.object_store,
  6425. None,
  6426. commit.tree,
  6427. )
  6428. # Generate patch with commit metadata
  6429. if stdout:
  6430. # Get binary stream from TextIO
  6431. if hasattr(outstream, "buffer"):
  6432. binary_out: IO[bytes] = outstream.buffer
  6433. else:
  6434. # Fallback for non-text streams
  6435. binary_out = outstream # type: ignore[assignment]
  6436. write_commit_patch(
  6437. binary_out,
  6438. commit,
  6439. diff_content.getvalue(),
  6440. (i, total),
  6441. version=version,
  6442. )
  6443. else:
  6444. # Generate filename
  6445. summary = get_summary(commit)
  6446. filename = os.path.join(outdir, f"{i:04d}-{summary}.patch")
  6447. with open(filename, "wb") as f:
  6448. write_commit_patch(
  6449. f,
  6450. commit,
  6451. diff_content.getvalue(),
  6452. (i, total),
  6453. version=version,
  6454. )
  6455. filenames.append(filename)
  6456. return filenames
  6457. def bisect_start(
  6458. repo: str | os.PathLike[str] | Repo = ".",
  6459. bad: str | bytes | Commit | Tag | None = None,
  6460. good: str
  6461. | bytes
  6462. | Commit
  6463. | Tag
  6464. | Sequence[str | bytes | Commit | Tag]
  6465. | None = None,
  6466. paths: Sequence[bytes] | None = None,
  6467. no_checkout: bool = False,
  6468. term_bad: str = "bad",
  6469. term_good: str = "good",
  6470. ) -> bytes | None:
  6471. """Start a new bisect session.
  6472. Args:
  6473. repo: Path to repository or a Repo object
  6474. bad: The bad commit (defaults to HEAD)
  6475. good: List of good commits or a single good commit
  6476. paths: Optional paths to limit bisect to
  6477. no_checkout: If True, don't checkout commits during bisect
  6478. term_bad: Term to use for bad commits (default: "bad")
  6479. term_good: Term to use for good commits (default: "good")
  6480. """
  6481. with open_repo_closing(repo) as r:
  6482. state = BisectState(r)
  6483. # Convert single good commit to sequence
  6484. if good is not None and isinstance(good, (str, bytes, Commit, Tag)):
  6485. good = [good]
  6486. # Parse commits
  6487. bad_sha = parse_commit(r, bad).id if bad else None
  6488. good_shas = [parse_commit(r, g).id for g in good] if good else None
  6489. state.start(bad_sha, good_shas, paths, no_checkout, term_bad, term_good)
  6490. # Return the next commit to test if we have both good and bad
  6491. if bad_sha and good_shas:
  6492. next_sha = state._find_next_commit()
  6493. if next_sha and not no_checkout:
  6494. # Checkout the next commit
  6495. old_commit = r[r.head()]
  6496. assert isinstance(old_commit, Commit)
  6497. old_tree = old_commit.tree if r.head() else None
  6498. r.refs[HEADREF] = next_sha
  6499. commit = r[next_sha]
  6500. assert isinstance(commit, Commit)
  6501. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6502. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6503. return next_sha
  6504. return None
  6505. def bisect_bad(
  6506. repo: str | os.PathLike[str] | Repo = ".",
  6507. rev: str | bytes | Commit | Tag | None = None,
  6508. ) -> bytes | None:
  6509. """Mark a commit as bad.
  6510. Args:
  6511. repo: Path to repository or a Repo object
  6512. rev: Commit to mark as bad (defaults to HEAD)
  6513. Returns:
  6514. The SHA of the next commit to test, or None if bisect is complete
  6515. """
  6516. with open_repo_closing(repo) as r:
  6517. state = BisectState(r)
  6518. rev_sha = parse_commit(r, rev).id if rev else None
  6519. next_sha = state.mark_bad(rev_sha)
  6520. if next_sha:
  6521. # Checkout the next commit
  6522. old_commit = r[r.head()]
  6523. assert isinstance(old_commit, Commit)
  6524. old_tree = old_commit.tree if r.head() else None
  6525. r.refs[HEADREF] = next_sha
  6526. commit = r[next_sha]
  6527. assert isinstance(commit, Commit)
  6528. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6529. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6530. return next_sha
  6531. def bisect_good(
  6532. repo: str | os.PathLike[str] | Repo = ".",
  6533. rev: str | bytes | Commit | Tag | None = None,
  6534. ) -> bytes | None:
  6535. """Mark a commit as good.
  6536. Args:
  6537. repo: Path to repository or a Repo object
  6538. rev: Commit to mark as good (defaults to HEAD)
  6539. Returns:
  6540. The SHA of the next commit to test, or None if bisect is complete
  6541. """
  6542. with open_repo_closing(repo) as r:
  6543. state = BisectState(r)
  6544. rev_sha = parse_commit(r, rev).id if rev else None
  6545. next_sha = state.mark_good(rev_sha)
  6546. if next_sha:
  6547. # Checkout the next commit
  6548. old_commit = r[r.head()]
  6549. assert isinstance(old_commit, Commit)
  6550. old_tree = old_commit.tree if r.head() else None
  6551. r.refs[HEADREF] = next_sha
  6552. commit = r[next_sha]
  6553. assert isinstance(commit, Commit)
  6554. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6555. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6556. return next_sha
  6557. def bisect_skip(
  6558. repo: str | os.PathLike[str] | Repo = ".",
  6559. revs: str
  6560. | bytes
  6561. | Commit
  6562. | Tag
  6563. | Sequence[str | bytes | Commit | Tag]
  6564. | None = None,
  6565. ) -> bytes | None:
  6566. """Skip one or more commits.
  6567. Args:
  6568. repo: Path to repository or a Repo object
  6569. revs: List of commits to skip (defaults to [HEAD])
  6570. Returns:
  6571. The SHA of the next commit to test, or None if bisect is complete
  6572. """
  6573. with open_repo_closing(repo) as r:
  6574. state = BisectState(r)
  6575. if revs is None:
  6576. rev_shas = None
  6577. else:
  6578. # Convert single rev to sequence
  6579. if isinstance(revs, (str, bytes, Commit, Tag)):
  6580. revs = [revs]
  6581. rev_shas = [parse_commit(r, rev).id for rev in revs]
  6582. next_sha = state.skip(rev_shas)
  6583. if next_sha:
  6584. # Checkout the next commit
  6585. old_commit = r[r.head()]
  6586. assert isinstance(old_commit, Commit)
  6587. old_tree = old_commit.tree if r.head() else None
  6588. r.refs[HEADREF] = next_sha
  6589. commit = r[next_sha]
  6590. assert isinstance(commit, Commit)
  6591. changes = tree_changes(r.object_store, old_tree, commit.tree)
  6592. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  6593. return next_sha
  6594. def bisect_reset(
  6595. repo: str | os.PathLike[str] | Repo = ".",
  6596. commit: str | bytes | Commit | Tag | None = None,
  6597. ) -> None:
  6598. """Reset bisect state and return to original branch/commit.
  6599. Args:
  6600. repo: Path to repository or a Repo object
  6601. commit: Optional commit to reset to (defaults to original branch/commit)
  6602. """
  6603. with open_repo_closing(repo) as r:
  6604. state = BisectState(r)
  6605. # Get old tree before reset
  6606. try:
  6607. old_commit = r[r.head()]
  6608. assert isinstance(old_commit, Commit)
  6609. old_tree = old_commit.tree
  6610. except KeyError:
  6611. old_tree = None
  6612. commit_sha = parse_commit(r, commit).id if commit else None
  6613. state.reset(commit_sha)
  6614. # Update working tree to new HEAD
  6615. try:
  6616. new_head = r.head()
  6617. if new_head:
  6618. new_commit = r[new_head]
  6619. assert isinstance(new_commit, Commit)
  6620. changes = tree_changes(r.object_store, old_tree, new_commit.tree)
  6621. update_working_tree(
  6622. r, old_tree, new_commit.tree, change_iterator=changes
  6623. )
  6624. except KeyError:
  6625. # No HEAD after reset
  6626. pass
  6627. def bisect_log(repo: str | os.PathLike[str] | Repo = ".") -> str:
  6628. """Get the bisect log.
  6629. Args:
  6630. repo: Path to repository or a Repo object
  6631. Returns:
  6632. The bisect log as a string
  6633. """
  6634. with open_repo_closing(repo) as r:
  6635. state = BisectState(r)
  6636. return state.get_log()
  6637. def bisect_replay(
  6638. repo: str | os.PathLike[str] | Repo,
  6639. log_file: str | os.PathLike[str] | BinaryIO,
  6640. ) -> None:
  6641. """Replay a bisect log.
  6642. Args:
  6643. repo: Path to repository or a Repo object
  6644. log_file: Path to the log file or file-like object
  6645. """
  6646. with open_repo_closing(repo) as r:
  6647. state = BisectState(r)
  6648. if isinstance(log_file, (str, os.PathLike)):
  6649. with open(log_file) as f:
  6650. log_content = f.read()
  6651. else:
  6652. content = log_file.read()
  6653. log_content = content.decode() if isinstance(content, bytes) else content
  6654. state.replay(log_content)
  6655. def reflog(
  6656. repo: RepoPath = ".", ref: str | bytes = b"HEAD", all: bool = False
  6657. ) -> Iterator[Any | tuple[bytes, Any]]:
  6658. """Show reflog entries for a reference or all references.
  6659. Args:
  6660. repo: Path to repository or a Repo object
  6661. ref: Reference name (defaults to HEAD)
  6662. all: If True, show reflogs for all refs (ignores ref parameter)
  6663. Yields:
  6664. If all=False: ReflogEntry objects
  6665. If all=True: Tuples of (ref_name, ReflogEntry) for all refs with reflogs
  6666. """
  6667. import os
  6668. from .reflog import iter_reflogs
  6669. if isinstance(ref, str):
  6670. ref = ref.encode("utf-8")
  6671. with open_repo_closing(repo) as r:
  6672. if not all:
  6673. yield from r.read_reflog(ref)
  6674. else:
  6675. logs_dir = os.path.join(r.controldir(), "logs")
  6676. # Use iter_reflogs to discover all reflogs
  6677. for ref_bytes in iter_reflogs(logs_dir):
  6678. # Read the reflog entries for this ref
  6679. for entry in r.read_reflog(ref_bytes):
  6680. yield (ref_bytes, entry)
  6681. def reflog_expire(
  6682. repo: RepoPath = ".",
  6683. ref: str | bytes | None = None,
  6684. all: bool = False,
  6685. expire_time: int | None = None,
  6686. expire_unreachable_time: int | None = None,
  6687. dry_run: bool = False,
  6688. ) -> dict[bytes, int]:
  6689. """Expire reflog entries based on age and reachability.
  6690. Args:
  6691. repo: Path to repository or a Repo object
  6692. ref: Reference name (if not using --all)
  6693. all: If True, expire reflogs for all refs
  6694. expire_time: Expire entries older than this timestamp (seconds since epoch)
  6695. expire_unreachable_time: Expire unreachable entries older than this timestamp
  6696. dry_run: If True, show what would be expired without making changes
  6697. Returns:
  6698. Dictionary mapping ref names to number of expired entries
  6699. """
  6700. import os
  6701. import time
  6702. from .reflog import expire_reflog, iter_reflogs
  6703. if not all and ref is None:
  6704. raise ValueError("Must specify either ref or all=True")
  6705. if isinstance(ref, str):
  6706. ref = ref.encode("utf-8")
  6707. # Default expire times if not specified
  6708. if expire_time is None and expire_unreachable_time is None:
  6709. # Default: expire entries older than 90 days, unreachable older than 30 days
  6710. now = int(time.time())
  6711. expire_time = now - (90 * 24 * 60 * 60)
  6712. expire_unreachable_time = now - (30 * 24 * 60 * 60)
  6713. result = {}
  6714. with open_repo_closing(repo) as r:
  6715. # Determine which refs to process
  6716. refs_to_process: list[bytes] = []
  6717. if all:
  6718. logs_dir = os.path.join(r.controldir(), "logs")
  6719. refs_to_process = list(iter_reflogs(logs_dir))
  6720. else:
  6721. assert ref is not None # Already checked above
  6722. refs_to_process = [ref]
  6723. # Build set of reachable objects if we have unreachable expiration time
  6724. reachable_objects: set[ObjectID] | None = None
  6725. if expire_unreachable_time is not None:
  6726. from .gc import find_reachable_objects
  6727. reachable_objects = find_reachable_objects(
  6728. r.object_store, r.refs, include_reflogs=False
  6729. )
  6730. # Process each ref
  6731. for ref_name in refs_to_process:
  6732. reflog_path = r._reflog_path(ref_name)
  6733. if not os.path.exists(reflog_path):
  6734. continue
  6735. # Create reachability checker
  6736. def is_reachable(sha: bytes) -> bool:
  6737. if reachable_objects is None:
  6738. # No unreachable expiration, so assume everything is reachable
  6739. return True
  6740. return sha in reachable_objects
  6741. # Open the reflog file
  6742. if dry_run:
  6743. # For dry run, just read and count what would be expired
  6744. with open(reflog_path, "rb") as f:
  6745. from .reflog import read_reflog
  6746. count = 0
  6747. for entry in read_reflog(f):
  6748. is_obj_reachable = is_reachable(entry.new_sha)
  6749. should_expire = False
  6750. if is_obj_reachable and expire_time is not None:
  6751. if entry.timestamp < expire_time:
  6752. should_expire = True
  6753. elif (
  6754. not is_obj_reachable and expire_unreachable_time is not None
  6755. ):
  6756. if entry.timestamp < expire_unreachable_time:
  6757. should_expire = True
  6758. if should_expire:
  6759. count += 1
  6760. result[ref_name] = count
  6761. else:
  6762. # Actually expire entries
  6763. with open(reflog_path, "r+b") as f: # type: ignore[assignment]
  6764. count = expire_reflog(
  6765. f,
  6766. expire_time=expire_time,
  6767. expire_unreachable_time=expire_unreachable_time,
  6768. reachable_checker=is_reachable,
  6769. )
  6770. result[ref_name] = count
  6771. return result
  6772. def reflog_delete(
  6773. repo: RepoPath = ".",
  6774. ref: str | bytes = b"HEAD",
  6775. index: int = 0,
  6776. rewrite: bool = False,
  6777. ) -> None:
  6778. """Delete a specific reflog entry.
  6779. Args:
  6780. repo: Path to repository or a Repo object
  6781. ref: Reference name
  6782. index: Reflog entry index (0 = newest, in Git reflog order)
  6783. rewrite: If True, rewrite old_sha of subsequent entries to maintain consistency
  6784. """
  6785. import os
  6786. from .reflog import drop_reflog_entry
  6787. if isinstance(ref, str):
  6788. ref = ref.encode("utf-8")
  6789. with open_repo_closing(repo) as r:
  6790. reflog_path = r._reflog_path(ref)
  6791. if not os.path.exists(reflog_path):
  6792. raise ValueError(f"No reflog for ref {ref.decode()}")
  6793. with open(reflog_path, "r+b") as f:
  6794. drop_reflog_entry(f, index, rewrite=rewrite)
  6795. def lfs_track(
  6796. repo: str | os.PathLike[str] | Repo = ".",
  6797. patterns: Sequence[str] | None = None,
  6798. ) -> list[str]:
  6799. """Track file patterns with Git LFS.
  6800. Args:
  6801. repo: Path to repository
  6802. patterns: List of file patterns to track (e.g., ["*.bin", "*.pdf"])
  6803. If None, returns current tracked patterns
  6804. Returns:
  6805. List of tracked patterns
  6806. """
  6807. from .attrs import GitAttributes
  6808. with open_repo_closing(repo) as r:
  6809. gitattributes_path = os.path.join(r.path, ".gitattributes")
  6810. # Load existing GitAttributes
  6811. if os.path.exists(gitattributes_path):
  6812. gitattributes = GitAttributes.from_file(gitattributes_path)
  6813. else:
  6814. gitattributes = GitAttributes()
  6815. if patterns is None:
  6816. # Return current LFS tracked patterns
  6817. tracked = []
  6818. for pattern_obj, attrs in gitattributes:
  6819. if attrs.get(b"filter") == b"lfs":
  6820. tracked.append(pattern_obj.pattern.decode())
  6821. return tracked
  6822. # Add new patterns
  6823. for pattern in patterns:
  6824. # Ensure pattern is bytes
  6825. pattern_bytes = pattern.encode() if isinstance(pattern, str) else pattern
  6826. # Set LFS attributes for the pattern
  6827. gitattributes.set_attribute(pattern_bytes, b"filter", b"lfs")
  6828. gitattributes.set_attribute(pattern_bytes, b"diff", b"lfs")
  6829. gitattributes.set_attribute(pattern_bytes, b"merge", b"lfs")
  6830. gitattributes.set_attribute(pattern_bytes, b"text", False)
  6831. # Write updated attributes
  6832. gitattributes.write_to_file(gitattributes_path)
  6833. # Stage the .gitattributes file
  6834. add(r, [".gitattributes"])
  6835. return lfs_track(r) # Return updated list
  6836. def lfs_untrack(
  6837. repo: str | os.PathLike[str] | Repo = ".",
  6838. patterns: Sequence[str] | None = None,
  6839. ) -> list[str]:
  6840. """Untrack file patterns from Git LFS.
  6841. Args:
  6842. repo: Path to repository
  6843. patterns: List of file patterns to untrack
  6844. Returns:
  6845. List of remaining tracked patterns
  6846. """
  6847. from .attrs import GitAttributes
  6848. if not patterns:
  6849. return lfs_track(repo)
  6850. with open_repo_closing(repo) as r:
  6851. gitattributes_path = os.path.join(r.path, ".gitattributes")
  6852. if not os.path.exists(gitattributes_path):
  6853. return []
  6854. # Load existing GitAttributes
  6855. gitattributes = GitAttributes.from_file(gitattributes_path)
  6856. # Remove specified patterns
  6857. for pattern in patterns:
  6858. pattern_bytes = pattern.encode() if isinstance(pattern, str) else pattern
  6859. # Check if pattern is tracked by LFS
  6860. for pattern_obj, attrs in list(gitattributes):
  6861. if (
  6862. pattern_obj.pattern == pattern_bytes
  6863. and attrs.get(b"filter") == b"lfs"
  6864. ):
  6865. gitattributes.remove_pattern(pattern_bytes)
  6866. break
  6867. # Write updated attributes
  6868. gitattributes.write_to_file(gitattributes_path)
  6869. # Stage the .gitattributes file
  6870. add(r, [".gitattributes"])
  6871. return lfs_track(r) # Return updated list
  6872. def lfs_init(repo: str | os.PathLike[str] | Repo = ".") -> None:
  6873. """Initialize Git LFS in a repository.
  6874. Args:
  6875. repo: Path to repository
  6876. Returns:
  6877. None
  6878. """
  6879. from .lfs import LFSStore
  6880. with open_repo_closing(repo) as r:
  6881. # Create LFS store
  6882. LFSStore.from_repo(r, create=True)
  6883. # Set up Git config for LFS
  6884. config = r.get_config()
  6885. config.set((b"filter", b"lfs"), b"process", b"git-lfs filter-process")
  6886. config.set((b"filter", b"lfs"), b"required", b"true")
  6887. config.set((b"filter", b"lfs"), b"clean", b"git-lfs clean -- %f")
  6888. config.set((b"filter", b"lfs"), b"smudge", b"git-lfs smudge -- %f")
  6889. config.write_to_path()
  6890. def lfs_clean(
  6891. repo: str | os.PathLike[str] | Repo = ".",
  6892. path: str | os.PathLike[str] | None = None,
  6893. ) -> bytes:
  6894. """Clean a file by converting it to an LFS pointer.
  6895. Args:
  6896. repo: Path to repository
  6897. path: Path to file to clean (relative to repo root)
  6898. Returns:
  6899. LFS pointer content as bytes
  6900. """
  6901. from .lfs import LFSFilterDriver, LFSStore
  6902. with open_repo_closing(repo) as r:
  6903. if path is None:
  6904. raise ValueError("Path must be specified")
  6905. # Get LFS store
  6906. lfs_store = LFSStore.from_repo(r)
  6907. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  6908. # Read file content
  6909. full_path = os.path.join(r.path, path)
  6910. with open(full_path, "rb") as f:
  6911. content = f.read()
  6912. # Clean the content (convert to LFS pointer)
  6913. return filter_driver.clean(content)
  6914. def lfs_smudge(
  6915. repo: str | os.PathLike[str] | Repo = ".",
  6916. pointer_content: bytes | None = None,
  6917. ) -> bytes:
  6918. """Smudge an LFS pointer by retrieving the actual content.
  6919. Args:
  6920. repo: Path to repository
  6921. pointer_content: LFS pointer content as bytes
  6922. Returns:
  6923. Actual file content as bytes
  6924. """
  6925. from .lfs import LFSFilterDriver, LFSStore
  6926. with open_repo_closing(repo) as r:
  6927. if pointer_content is None:
  6928. raise ValueError("Pointer content must be specified")
  6929. # Get LFS store
  6930. lfs_store = LFSStore.from_repo(r)
  6931. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  6932. # Smudge the pointer (retrieve actual content)
  6933. return filter_driver.smudge(pointer_content)
  6934. def lfs_ls_files(
  6935. repo: str | os.PathLike[str] | Repo = ".",
  6936. ref: str | bytes | None = None,
  6937. ) -> list[tuple[bytes, str, int]]:
  6938. """List files tracked by Git LFS.
  6939. Args:
  6940. repo: Path to repository
  6941. ref: Git ref to check (defaults to HEAD)
  6942. Returns:
  6943. List of (path, oid, size) tuples for LFS files
  6944. """
  6945. from .lfs import LFSPointer
  6946. from .object_store import iter_tree_contents
  6947. with open_repo_closing(repo) as r:
  6948. if ref is None:
  6949. ref = b"HEAD"
  6950. elif isinstance(ref, str):
  6951. ref = ref.encode()
  6952. # Get the commit and tree
  6953. try:
  6954. commit = r[ref]
  6955. assert isinstance(commit, Commit)
  6956. tree = r[commit.tree]
  6957. assert isinstance(tree, Tree)
  6958. except KeyError:
  6959. return []
  6960. lfs_files = []
  6961. # Walk the tree
  6962. for path, mode, sha in iter_tree_contents(r.object_store, tree.id):
  6963. assert path is not None
  6964. assert mode is not None
  6965. assert sha is not None
  6966. if not stat.S_ISREG(mode):
  6967. continue
  6968. # Check if it's an LFS pointer
  6969. obj = r.object_store[sha]
  6970. if not isinstance(obj, Blob):
  6971. raise AssertionError(f"Expected Blob object, got {type(obj).__name__}")
  6972. pointer = LFSPointer.from_bytes(obj.data)
  6973. if pointer is not None:
  6974. lfs_files.append((path, pointer.oid, pointer.size))
  6975. return lfs_files
  6976. def lfs_migrate(
  6977. repo: str | os.PathLike[str] | Repo = ".",
  6978. include: list[str] | None = None,
  6979. exclude: list[str] | None = None,
  6980. everything: bool = False,
  6981. ) -> int:
  6982. """Migrate files to Git LFS.
  6983. Args:
  6984. repo: Path to repository
  6985. include: Patterns of files to include
  6986. exclude: Patterns of files to exclude
  6987. everything: Migrate all files above a certain size
  6988. Returns:
  6989. Number of migrated files
  6990. """
  6991. from .lfs import LFSFilterDriver, LFSStore
  6992. with open_repo_closing(repo) as r:
  6993. # Initialize LFS if needed
  6994. lfs_store = LFSStore.from_repo(r, create=True)
  6995. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  6996. # Get current index
  6997. index = r.open_index()
  6998. migrated = 0
  6999. # Determine files to migrate
  7000. files_to_migrate = []
  7001. if everything:
  7002. # Migrate all files above 100MB
  7003. for path, entry in index.items():
  7004. full_path = os.path.join(r.path, path.decode())
  7005. if os.path.exists(full_path):
  7006. size = os.path.getsize(full_path)
  7007. if size > 100 * 1024 * 1024: # 100MB
  7008. files_to_migrate.append(path.decode())
  7009. else:
  7010. # Use include/exclude patterns
  7011. for path, entry in index.items():
  7012. path_str = path.decode()
  7013. # Check include patterns
  7014. if include:
  7015. matched = any(
  7016. fnmatch.fnmatch(path_str, pattern) for pattern in include
  7017. )
  7018. if not matched:
  7019. continue
  7020. # Check exclude patterns
  7021. if exclude:
  7022. excluded = any(
  7023. fnmatch.fnmatch(path_str, pattern) for pattern in exclude
  7024. )
  7025. if excluded:
  7026. continue
  7027. files_to_migrate.append(path_str)
  7028. # Migrate files
  7029. for path_str in files_to_migrate:
  7030. full_path = os.path.join(r.path, path_str)
  7031. if not os.path.exists(full_path):
  7032. continue
  7033. # Read file content
  7034. with open(full_path, "rb") as f:
  7035. content = f.read()
  7036. # Convert to LFS pointer
  7037. pointer_content = filter_driver.clean(content)
  7038. # Write pointer back to file
  7039. with open(full_path, "wb") as f:
  7040. f.write(pointer_content)
  7041. # Create blob for pointer content and update index
  7042. blob = Blob()
  7043. blob.data = pointer_content
  7044. r.object_store.add_object(blob)
  7045. st = os.stat(full_path)
  7046. index_entry = index_entry_from_stat(st, blob.id, 0)
  7047. path_bytes = path_str.encode() if isinstance(path_str, str) else path_str
  7048. index[path_bytes] = index_entry
  7049. migrated += 1
  7050. # Write updated index
  7051. index.write()
  7052. # Track patterns if include was specified
  7053. if include:
  7054. lfs_track(r, include)
  7055. return migrated
  7056. def lfs_pointer_check(
  7057. repo: str | os.PathLike[str] | Repo = ".",
  7058. paths: Sequence[str] | None = None,
  7059. ) -> dict[str, Any | None]:
  7060. """Check if files are valid LFS pointers.
  7061. Args:
  7062. repo: Path to repository
  7063. paths: List of file paths to check (if None, check all files)
  7064. Returns:
  7065. Dict mapping paths to LFSPointer objects (or None if not a pointer)
  7066. """
  7067. from .lfs import LFSPointer
  7068. with open_repo_closing(repo) as r:
  7069. results = {}
  7070. if paths is None:
  7071. # Check all files in index
  7072. index = r.open_index()
  7073. paths = [path.decode() for path in index]
  7074. for path in paths:
  7075. full_path = os.path.join(r.path, path)
  7076. if os.path.exists(full_path):
  7077. try:
  7078. with open(full_path, "rb") as f:
  7079. content = f.read()
  7080. pointer = LFSPointer.from_bytes(content)
  7081. results[path] = pointer
  7082. except OSError:
  7083. results[path] = None
  7084. else:
  7085. results[path] = None
  7086. return results
  7087. def lfs_fetch(
  7088. repo: str | os.PathLike[str] | Repo = ".",
  7089. remote: str = "origin",
  7090. refs: list[str | bytes] | None = None,
  7091. ) -> int:
  7092. """Fetch LFS objects from remote.
  7093. Args:
  7094. repo: Path to repository
  7095. remote: Remote name (default: origin)
  7096. refs: Specific refs to fetch LFS objects for (default: all refs)
  7097. Returns:
  7098. Number of objects fetched
  7099. """
  7100. from .lfs import LFSClient, LFSPointer, LFSStore
  7101. with open_repo_closing(repo) as r:
  7102. # Get LFS server URL from config
  7103. config = r.get_config()
  7104. lfs_url_bytes = config.get((b"lfs",), b"url")
  7105. if not lfs_url_bytes:
  7106. # Try remote URL
  7107. remote_url = config.get((b"remote", remote.encode()), b"url")
  7108. if remote_url:
  7109. # Append /info/lfs to remote URL
  7110. remote_url_str = remote_url.decode()
  7111. if remote_url_str.endswith(".git"):
  7112. remote_url_str = remote_url_str[:-4]
  7113. lfs_url = f"{remote_url_str}/info/lfs"
  7114. else:
  7115. raise ValueError(f"No LFS URL configured for remote {remote}")
  7116. else:
  7117. lfs_url = lfs_url_bytes.decode()
  7118. # Get authentication
  7119. auth = None
  7120. # TODO: Support credential helpers and other auth methods
  7121. # Create LFS client and store
  7122. client = LFSClient(lfs_url, auth)
  7123. store = LFSStore.from_repo(r)
  7124. # Find all LFS pointers in the refs
  7125. pointers_to_fetch = []
  7126. if refs is None:
  7127. # Get all refs
  7128. refs = list(r.refs.keys())
  7129. for ref in refs:
  7130. if isinstance(ref, str):
  7131. ref_key = Ref(ref.encode())
  7132. elif isinstance(ref, bytes):
  7133. ref_key = Ref(ref)
  7134. else:
  7135. ref_key = ref
  7136. try:
  7137. commit = r[r.refs[ref_key]]
  7138. except KeyError:
  7139. continue
  7140. # Walk the commit tree
  7141. assert isinstance(commit, Commit)
  7142. for path, mode, sha in r.object_store.iter_tree_contents(commit.tree):
  7143. assert sha is not None
  7144. try:
  7145. obj = r.object_store[sha]
  7146. except KeyError:
  7147. pass
  7148. else:
  7149. if isinstance(obj, Blob):
  7150. pointer = LFSPointer.from_bytes(obj.data)
  7151. if pointer and pointer.is_valid_oid():
  7152. # Check if we already have it
  7153. try:
  7154. with store.open_object(pointer.oid):
  7155. pass # Object exists, no need to fetch
  7156. except KeyError:
  7157. pointers_to_fetch.append((pointer.oid, pointer.size))
  7158. # Fetch missing objects
  7159. fetched = 0
  7160. for oid, size in pointers_to_fetch:
  7161. content = client.download(oid, size)
  7162. store.write_object([content])
  7163. fetched += 1
  7164. return fetched
  7165. def lfs_pull(repo: str | os.PathLike[str] | Repo = ".", remote: str = "origin") -> int:
  7166. """Pull LFS objects for current checkout.
  7167. Args:
  7168. repo: Path to repository
  7169. remote: Remote name (default: origin)
  7170. Returns:
  7171. Number of objects fetched
  7172. """
  7173. from .lfs import LFSPointer, LFSStore
  7174. with open_repo_closing(repo) as r:
  7175. # First do a fetch for HEAD
  7176. fetched = lfs_fetch(repo, remote, [b"HEAD"])
  7177. # Then checkout LFS files in working directory
  7178. store = LFSStore.from_repo(r)
  7179. index = r.open_index()
  7180. for path, entry in index.items():
  7181. full_path = os.path.join(r.path, path.decode())
  7182. if os.path.exists(full_path):
  7183. with open(full_path, "rb") as f:
  7184. content = f.read()
  7185. pointer = LFSPointer.from_bytes(content)
  7186. if pointer and pointer.is_valid_oid():
  7187. try:
  7188. # Replace pointer with actual content
  7189. with store.open_object(pointer.oid) as lfs_file:
  7190. lfs_content = lfs_file.read()
  7191. with open(full_path, "wb") as f:
  7192. f.write(lfs_content)
  7193. except KeyError:
  7194. # Object not available
  7195. pass
  7196. return fetched
  7197. def lfs_push(
  7198. repo: str | os.PathLike[str] | Repo = ".",
  7199. remote: str = "origin",
  7200. refs: list[str | bytes] | None = None,
  7201. ) -> int:
  7202. """Push LFS objects to remote.
  7203. Args:
  7204. repo: Path to repository
  7205. remote: Remote name (default: origin)
  7206. refs: Specific refs to push LFS objects for (default: current branch)
  7207. Returns:
  7208. Number of objects pushed
  7209. """
  7210. from .lfs import LFSClient, LFSPointer, LFSStore
  7211. with open_repo_closing(repo) as r:
  7212. # Get LFS server URL from config
  7213. config = r.get_config()
  7214. lfs_url_bytes = config.get((b"lfs",), b"url")
  7215. if not lfs_url_bytes:
  7216. # Try remote URL
  7217. remote_url = config.get((b"remote", remote.encode()), b"url")
  7218. if remote_url:
  7219. # Append /info/lfs to remote URL
  7220. remote_url_str = remote_url.decode()
  7221. if remote_url_str.endswith(".git"):
  7222. remote_url_str = remote_url_str[:-4]
  7223. lfs_url = f"{remote_url_str}/info/lfs"
  7224. else:
  7225. raise ValueError(f"No LFS URL configured for remote {remote}")
  7226. else:
  7227. lfs_url = lfs_url_bytes.decode()
  7228. # Get authentication
  7229. auth = None
  7230. # TODO: Support credential helpers and other auth methods
  7231. # Create LFS client and store
  7232. client = LFSClient(lfs_url, auth)
  7233. store = LFSStore.from_repo(r)
  7234. # Find all LFS objects to push
  7235. if refs is None:
  7236. # Push current branch
  7237. head_ref = r.refs.read_ref(HEADREF)
  7238. refs = [head_ref] if head_ref else []
  7239. objects_to_push = set()
  7240. for ref in refs:
  7241. if isinstance(ref, str):
  7242. ref_bytes = ref.encode()
  7243. else:
  7244. ref_bytes = ref
  7245. try:
  7246. if ref_bytes.startswith(b"refs/"):
  7247. commit = r[r.refs[Ref(ref_bytes)]]
  7248. else:
  7249. commit = r[ref_bytes]
  7250. except KeyError:
  7251. continue
  7252. # Walk the commit tree
  7253. assert isinstance(commit, Commit)
  7254. for path, mode, sha in r.object_store.iter_tree_contents(commit.tree):
  7255. assert sha is not None
  7256. try:
  7257. obj = r.object_store[sha]
  7258. except KeyError:
  7259. pass
  7260. else:
  7261. if isinstance(obj, Blob):
  7262. pointer = LFSPointer.from_bytes(obj.data)
  7263. if pointer and pointer.is_valid_oid():
  7264. objects_to_push.add((pointer.oid, pointer.size))
  7265. # Push objects
  7266. pushed = 0
  7267. for oid, size in objects_to_push:
  7268. try:
  7269. with store.open_object(oid) as f:
  7270. content = f.read()
  7271. except KeyError:
  7272. # Object not in local store
  7273. logging.warn("LFS object %s not found locally", oid)
  7274. else:
  7275. client.upload(oid, size, content)
  7276. pushed += 1
  7277. return pushed
  7278. def lfs_status(repo: str | os.PathLike[str] | Repo = ".") -> dict[str, list[str]]:
  7279. """Show status of LFS files.
  7280. Args:
  7281. repo: Path to repository
  7282. Returns:
  7283. Dict with status information
  7284. """
  7285. from .lfs import LFSPointer, LFSStore
  7286. with open_repo_closing(repo) as r:
  7287. store = LFSStore.from_repo(r)
  7288. index = r.open_index()
  7289. status: dict[str, list[str]] = {
  7290. "tracked": [],
  7291. "not_staged": [],
  7292. "not_committed": [],
  7293. "not_pushed": [],
  7294. "missing": [],
  7295. }
  7296. # Check working directory files
  7297. for path, entry in index.items():
  7298. path_str = path.decode()
  7299. full_path = os.path.join(r.path, path_str)
  7300. if os.path.exists(full_path):
  7301. with open(full_path, "rb") as f:
  7302. content = f.read()
  7303. pointer = LFSPointer.from_bytes(content)
  7304. if pointer and pointer.is_valid_oid():
  7305. status["tracked"].append(path_str)
  7306. # Check if object exists locally
  7307. try:
  7308. with store.open_object(pointer.oid):
  7309. pass # Object exists locally
  7310. except KeyError:
  7311. status["missing"].append(path_str)
  7312. # Check if file has been modified
  7313. if isinstance(entry, ConflictedIndexEntry):
  7314. continue # Skip conflicted entries
  7315. try:
  7316. staged_obj = r.object_store[entry.sha]
  7317. except KeyError:
  7318. pass
  7319. else:
  7320. if not isinstance(staged_obj, Blob):
  7321. raise AssertionError(
  7322. f"Expected Blob object, got {type(staged_obj).__name__}"
  7323. )
  7324. staged_pointer = LFSPointer.from_bytes(staged_obj.data)
  7325. if staged_pointer and staged_pointer.oid != pointer.oid:
  7326. status["not_staged"].append(path_str)
  7327. # TODO: Check for not committed and not pushed files
  7328. return status
  7329. def worktree_list(repo: RepoPath = ".") -> list[Any]:
  7330. """List all worktrees for a repository.
  7331. Args:
  7332. repo: Path to repository
  7333. Returns:
  7334. List of WorkTreeInfo objects
  7335. """
  7336. from .worktree import list_worktrees
  7337. with open_repo_closing(repo) as r:
  7338. return list_worktrees(r)
  7339. def worktree_add(
  7340. repo: RepoPath = ".",
  7341. path: str | os.PathLike[str] | None = None,
  7342. branch: str | bytes | None = None,
  7343. commit: str | bytes | None = None,
  7344. detach: bool = False,
  7345. force: bool = False,
  7346. ) -> str:
  7347. """Add a new worktree.
  7348. Args:
  7349. repo: Path to repository
  7350. path: Path for new worktree
  7351. branch: Branch to checkout (creates if doesn't exist)
  7352. commit: Specific commit to checkout
  7353. detach: Create with detached HEAD
  7354. force: Force creation even if branch is already checked out
  7355. Returns:
  7356. Path to the newly created worktree
  7357. """
  7358. from .worktree import add_worktree
  7359. if path is None:
  7360. raise ValueError("Path is required for worktree add")
  7361. with open_repo_closing(repo) as r:
  7362. commit_bytes = commit.encode() if isinstance(commit, str) else commit
  7363. commit_id = ObjectID(commit_bytes) if commit_bytes is not None else None
  7364. wt_repo = add_worktree(
  7365. r, path, branch=branch, commit=commit_id, detach=detach, force=force
  7366. )
  7367. return wt_repo.path
  7368. def worktree_remove(
  7369. repo: RepoPath = ".",
  7370. path: str | os.PathLike[str] | None = None,
  7371. force: bool = False,
  7372. ) -> None:
  7373. """Remove a worktree.
  7374. Args:
  7375. repo: Path to repository
  7376. path: Path to worktree to remove
  7377. force: Force removal even if there are local changes
  7378. """
  7379. from .worktree import remove_worktree
  7380. if path is None:
  7381. raise ValueError("Path is required for worktree remove")
  7382. with open_repo_closing(repo) as r:
  7383. remove_worktree(r, path, force=force)
  7384. def worktree_prune(
  7385. repo: RepoPath = ".", dry_run: bool = False, expire: int | None = None
  7386. ) -> list[str]:
  7387. """Prune worktree administrative files.
  7388. Args:
  7389. repo: Path to repository
  7390. dry_run: Only show what would be removed
  7391. expire: Only prune worktrees older than this many seconds
  7392. Returns:
  7393. List of pruned worktree names
  7394. """
  7395. from .worktree import prune_worktrees
  7396. with open_repo_closing(repo) as r:
  7397. return prune_worktrees(r, expire=expire, dry_run=dry_run)
  7398. def worktree_lock(
  7399. repo: RepoPath = ".",
  7400. path: str | os.PathLike[str] | None = None,
  7401. reason: str | None = None,
  7402. ) -> None:
  7403. """Lock a worktree to prevent it from being pruned.
  7404. Args:
  7405. repo: Path to repository
  7406. path: Path to worktree to lock
  7407. reason: Optional reason for locking
  7408. """
  7409. from .worktree import lock_worktree
  7410. if path is None:
  7411. raise ValueError("Path is required for worktree lock")
  7412. with open_repo_closing(repo) as r:
  7413. lock_worktree(r, path, reason=reason)
  7414. def worktree_unlock(
  7415. repo: RepoPath = ".", path: str | os.PathLike[str] | None = None
  7416. ) -> None:
  7417. """Unlock a worktree.
  7418. Args:
  7419. repo: Path to repository
  7420. path: Path to worktree to unlock
  7421. """
  7422. from .worktree import unlock_worktree
  7423. if path is None:
  7424. raise ValueError("Path is required for worktree unlock")
  7425. with open_repo_closing(repo) as r:
  7426. unlock_worktree(r, path)
  7427. def worktree_move(
  7428. repo: RepoPath = ".",
  7429. old_path: str | os.PathLike[str] | None = None,
  7430. new_path: str | os.PathLike[str] | None = None,
  7431. ) -> None:
  7432. """Move a worktree to a new location.
  7433. Args:
  7434. repo: Path to repository
  7435. old_path: Current path of worktree
  7436. new_path: New path for worktree
  7437. """
  7438. from .worktree import move_worktree
  7439. if old_path is None or new_path is None:
  7440. raise ValueError("Both old_path and new_path are required for worktree move")
  7441. with open_repo_closing(repo) as r:
  7442. move_worktree(r, old_path, new_path)
  7443. def worktree_repair(
  7444. repo: RepoPath = ".",
  7445. paths: list[str | os.PathLike[str]] | None = None,
  7446. ) -> list[str]:
  7447. """Repair worktree administrative files.
  7448. Args:
  7449. repo: Path to repository
  7450. paths: Optional list of worktree paths to repair. If None, repairs
  7451. connections from the main repository to all linked worktrees.
  7452. Returns:
  7453. List of repaired worktree paths
  7454. """
  7455. from .worktree import repair_worktree
  7456. with open_repo_closing(repo) as r:
  7457. return repair_worktree(r, paths=paths)
  7458. def merge_base(
  7459. repo: RepoPath = ".",
  7460. committishes: Sequence[str | bytes] | None = None,
  7461. all: bool = False,
  7462. octopus: bool = False,
  7463. ) -> list[ObjectID]:
  7464. """Find the best common ancestor(s) between commits.
  7465. Args:
  7466. repo: Path to repository
  7467. committishes: List of commit references (branches, tags, commit IDs)
  7468. all: If True, return all merge bases, not just one
  7469. octopus: If True, find merge base of all commits (n-way merge)
  7470. Returns:
  7471. List of commit IDs that are merge bases
  7472. """
  7473. from .graph import find_merge_base, find_octopus_base
  7474. from .objectspec import parse_object
  7475. if committishes is None or len(committishes) < 2:
  7476. raise ValueError("At least two commits are required")
  7477. with open_repo_closing(repo) as r:
  7478. # Resolve committish references to commit IDs
  7479. commit_ids = []
  7480. for committish in committishes:
  7481. obj = parse_object(r, committish)
  7482. if not isinstance(obj, Commit):
  7483. raise ValueError(f"Expected commit, got {obj.type_name.decode()}")
  7484. commit_ids.append(obj.id)
  7485. # Find merge base
  7486. if octopus:
  7487. result = find_octopus_base(r, commit_ids)
  7488. else:
  7489. result = find_merge_base(r, commit_ids)
  7490. # Return first result only if all=False
  7491. if not all and result:
  7492. return [result[0]]
  7493. return result
  7494. def is_ancestor(
  7495. repo: RepoPath = ".",
  7496. ancestor: str | bytes | None = None,
  7497. descendant: str | bytes | None = None,
  7498. ) -> bool:
  7499. """Check if one commit is an ancestor of another.
  7500. Args:
  7501. repo: Path to repository
  7502. ancestor: Commit that might be the ancestor
  7503. descendant: Commit that might be the descendant
  7504. Returns:
  7505. True if ancestor is an ancestor of descendant, False otherwise
  7506. """
  7507. from .graph import find_merge_base
  7508. from .objectspec import parse_object
  7509. if ancestor is None or descendant is None:
  7510. raise ValueError("Both ancestor and descendant are required")
  7511. with open_repo_closing(repo) as r:
  7512. # Resolve committish references to commit IDs
  7513. ancestor_obj = parse_object(r, ancestor)
  7514. if not isinstance(ancestor_obj, Commit):
  7515. raise ValueError(f"Expected commit, got {ancestor_obj.type_name.decode()}")
  7516. descendant_obj = parse_object(r, descendant)
  7517. if not isinstance(descendant_obj, Commit):
  7518. raise ValueError(
  7519. f"Expected commit, got {descendant_obj.type_name.decode()}"
  7520. )
  7521. # If ancestor is the merge base of (ancestor, descendant), then it's an ancestor
  7522. merge_bases = find_merge_base(r, [ancestor_obj.id, descendant_obj.id])
  7523. return merge_bases == [ancestor_obj.id]
  7524. def independent_commits(
  7525. repo: RepoPath = ".",
  7526. committishes: Sequence[str | bytes] | None = None,
  7527. ) -> list[ObjectID]:
  7528. """Filter commits to only those that are not reachable from others.
  7529. Args:
  7530. repo: Path to repository
  7531. committishes: List of commit references to filter
  7532. Returns:
  7533. List of commit IDs that are not ancestors of any other commits in the list
  7534. """
  7535. from .graph import independent
  7536. from .objectspec import parse_object
  7537. if committishes is None or len(committishes) == 0:
  7538. return []
  7539. with open_repo_closing(repo) as r:
  7540. # Resolve committish references to commit IDs
  7541. commit_ids = []
  7542. for committish in committishes:
  7543. obj = parse_object(r, committish)
  7544. if not isinstance(obj, Commit):
  7545. raise ValueError(f"Expected commit, got {obj.type_name.decode()}")
  7546. commit_ids.append(obj.id)
  7547. # Filter to independent commits
  7548. return independent(r, commit_ids)
  7549. def mailsplit(
  7550. input_path: str | os.PathLike[str] | IO[bytes] | None = None,
  7551. output_dir: str | os.PathLike[str] = ".",
  7552. start_number: int = 1,
  7553. precision: int = 4,
  7554. keep_cr: bool = False,
  7555. mboxrd: bool = False,
  7556. is_maildir: bool = False,
  7557. ) -> list[str]:
  7558. r"""Split an mbox file or Maildir into individual message files.
  7559. This is similar to git mailsplit.
  7560. Args:
  7561. input_path: Path to mbox file, Maildir, or file-like object. If None, reads from stdin.
  7562. output_dir: Directory where individual messages will be written
  7563. start_number: Starting number for output files (default: 1)
  7564. precision: Number of digits for output filenames (default: 4)
  7565. keep_cr: If True, preserve \r in lines ending with \r\n (default: False)
  7566. mboxrd: If True, treat input as mboxrd format and reverse escaping (default: False)
  7567. is_maildir: If True, treat input_path as a Maildir (default: False)
  7568. Returns:
  7569. List of output file paths that were created
  7570. Raises:
  7571. ValueError: If output_dir doesn't exist or input is invalid
  7572. OSError: If there are issues reading/writing files
  7573. """
  7574. from .mbox import split_maildir, split_mbox
  7575. if is_maildir:
  7576. if input_path is None:
  7577. raise ValueError("input_path is required for Maildir splitting")
  7578. if not isinstance(input_path, (str, bytes, os.PathLike)):
  7579. raise ValueError("Maildir splitting requires a path, not a file object")
  7580. # Convert PathLike to str for split_maildir
  7581. maildir_path: str | bytes = (
  7582. os.fspath(input_path) if isinstance(input_path, os.PathLike) else input_path
  7583. )
  7584. out_dir: str | bytes = (
  7585. os.fspath(output_dir) if isinstance(output_dir, os.PathLike) else output_dir
  7586. )
  7587. return split_maildir(
  7588. maildir_path,
  7589. out_dir,
  7590. start_number=start_number,
  7591. precision=precision,
  7592. keep_cr=keep_cr,
  7593. )
  7594. else:
  7595. if input_path is None:
  7596. # Read from stdin
  7597. input_file: str | bytes | BinaryIO = sys.stdin.buffer
  7598. else:
  7599. # Convert PathLike to str if needed
  7600. if isinstance(input_path, os.PathLike):
  7601. input_file = os.fspath(input_path)
  7602. else:
  7603. # input_path is either str or IO[bytes] here
  7604. input_file = cast(str | BinaryIO, input_path)
  7605. out_dir = (
  7606. os.fspath(output_dir) if isinstance(output_dir, os.PathLike) else output_dir
  7607. )
  7608. return split_mbox(
  7609. input_file,
  7610. out_dir,
  7611. start_number=start_number,
  7612. precision=precision,
  7613. keep_cr=keep_cr,
  7614. mboxrd=mboxrd,
  7615. )
  7616. def mailinfo(
  7617. input_path: str | os.PathLike[str] | IO[bytes] | IO[str] | None = None,
  7618. msg_file: str | os.PathLike[str] | None = None,
  7619. patch_file: str | os.PathLike[str] | None = None,
  7620. keep_subject: bool = False,
  7621. keep_non_patch: bool = False,
  7622. encoding: str | None = None,
  7623. scissors: bool = False,
  7624. message_id: bool = False,
  7625. ) -> MailinfoResult:
  7626. """Extract patch information from an email message.
  7627. This is similar to git mailinfo.
  7628. Args:
  7629. input_path: Path to email file or file-like object. If None, reads from stdin.
  7630. msg_file: Path to write commit message. If None, message not written to file.
  7631. patch_file: Path to write patch content. If None, patch not written to file.
  7632. keep_subject: If True, keep subject intact without munging (-k)
  7633. keep_non_patch: If True, only strip [PATCH] from brackets (-b)
  7634. encoding: Character encoding to use (default: detect from message)
  7635. scissors: If True, remove everything before scissors line
  7636. message_id: If True, include Message-ID in commit message (-m)
  7637. Returns:
  7638. MailinfoResult with parsed information
  7639. Raises:
  7640. ValueError: If message is malformed or missing required fields
  7641. OSError: If there are issues reading/writing files
  7642. Example:
  7643. >>> result = mailinfo("patch.eml", "msg", "patch")
  7644. >>> print(f"Author: {result.author_name} <{result.author_email}>")
  7645. >>> print(f"Subject: {result.subject}")
  7646. """
  7647. from .mbox import mailinfo as mbox_mailinfo
  7648. if input_path is None:
  7649. # Read from stdin
  7650. input_file: str | bytes | BinaryIO | TextIO = sys.stdin.buffer
  7651. else:
  7652. # Convert PathLike to str if needed
  7653. if isinstance(input_path, os.PathLike):
  7654. input_file = os.fspath(input_path)
  7655. else:
  7656. # input_path is either str or IO[bytes] or IO[str] here
  7657. input_file = cast(str | BinaryIO | TextIO, input_path)
  7658. result = mbox_mailinfo(
  7659. input_file,
  7660. keep_subject=keep_subject,
  7661. keep_non_patch=keep_non_patch,
  7662. encoding=encoding,
  7663. scissors=scissors,
  7664. message_id=message_id,
  7665. )
  7666. # Write message to file if requested
  7667. if msg_file is not None:
  7668. msg_path = (
  7669. os.fspath(msg_file) if isinstance(msg_file, os.PathLike) else msg_file
  7670. )
  7671. with open(msg_path, "w", encoding=encoding or "utf-8") as f:
  7672. f.write(result.message)
  7673. if not result.message.endswith("\n"):
  7674. f.write("\n")
  7675. # Write patch to file if requested
  7676. if patch_file is not None:
  7677. patch_path = (
  7678. os.fspath(patch_file) if isinstance(patch_file, os.PathLike) else patch_file
  7679. )
  7680. with open(patch_path, "w", encoding=encoding or "utf-8") as f:
  7681. f.write(result.patch)
  7682. return result
  7683. def rerere(repo: RepoPath = ".") -> tuple[list[tuple[bytes, str]], list[bytes]]:
  7684. """Record current conflict resolutions and apply known resolutions.
  7685. This reads conflicted files from the working tree and records them
  7686. in the rerere cache. If rerere.autoupdate is enabled and a known
  7687. resolution exists, it will be automatically applied.
  7688. Args:
  7689. repo: Path to the repository
  7690. Returns:
  7691. Tuple of:
  7692. - List of tuples (path, conflict_id) for recorded conflicts
  7693. - List of paths where resolutions were automatically applied
  7694. """
  7695. from .rerere import _has_conflict_markers, rerere_auto
  7696. with open_repo_closing(repo) as r:
  7697. # Get conflicts from the index (if available)
  7698. index = r.open_index()
  7699. conflicts = []
  7700. for path, entry in index.items():
  7701. if isinstance(entry, ConflictedIndexEntry):
  7702. conflicts.append(path)
  7703. # Also scan working tree for files with conflict markers
  7704. # This is needed because merge() doesn't always create ConflictedIndexEntry
  7705. if not conflicts:
  7706. working_tree = r.path
  7707. for path in index:
  7708. file_path = os.path.join(working_tree, os.fsdecode(path))
  7709. try:
  7710. with open(file_path, "rb") as f:
  7711. content = f.read()
  7712. if _has_conflict_markers(content):
  7713. conflicts.append(path)
  7714. except (FileNotFoundError, IsADirectoryError, PermissionError):
  7715. pass
  7716. # Record conflicts and apply known resolutions
  7717. working_tree = r.path
  7718. return rerere_auto(r, working_tree, conflicts)
  7719. def rerere_status(repo: RepoPath = ".") -> list[tuple[str, bool]]:
  7720. """Get the status of all conflicts in the rerere cache.
  7721. Args:
  7722. repo: Path to the repository
  7723. Returns:
  7724. List of tuples (conflict_id, has_resolution)
  7725. """
  7726. from .rerere import RerereCache
  7727. with open_repo_closing(repo) as r:
  7728. cache = RerereCache.from_repo(r)
  7729. return cache.status()
  7730. def rerere_diff(
  7731. repo: RepoPath = ".", conflict_id: str | None = None
  7732. ) -> list[tuple[str, bytes, bytes | None]]:
  7733. """Show differences for recorded rerere conflicts.
  7734. Args:
  7735. repo: Path to the repository
  7736. conflict_id: Optional specific conflict ID to show
  7737. Returns:
  7738. List of tuples (conflict_id, preimage, postimage)
  7739. """
  7740. from .rerere import RerereCache
  7741. with open_repo_closing(repo) as r:
  7742. cache = RerereCache.from_repo(r)
  7743. if conflict_id:
  7744. preimage, postimage = cache.diff(conflict_id)
  7745. if preimage is not None:
  7746. return [(conflict_id, preimage, postimage)]
  7747. return []
  7748. # Show all conflicts
  7749. results = []
  7750. for cid, _has_res in cache.status():
  7751. preimage, postimage = cache.diff(cid)
  7752. if preimage is not None:
  7753. results.append((cid, preimage, postimage))
  7754. return results
  7755. def rerere_forget(repo: RepoPath = ".", pathspec: str | bytes | None = None) -> None:
  7756. """Forget recorded rerere resolutions for a pathspec.
  7757. Args:
  7758. repo: Path to the repository
  7759. pathspec: Path to forget (currently not implemented, forgets all)
  7760. """
  7761. from .rerere import RerereCache
  7762. with open_repo_closing(repo) as r:
  7763. cache = RerereCache.from_repo(r)
  7764. if pathspec:
  7765. # TODO: Implement pathspec matching
  7766. # For now, we need to track which conflict IDs correspond to which paths
  7767. raise NotImplementedError("Pathspec matching not yet implemented")
  7768. # Forget all conflicts (this is when called with no pathspec after resolving)
  7769. cache.clear()
  7770. def rerere_clear(repo: RepoPath = ".") -> None:
  7771. """Clear all recorded rerere resolutions.
  7772. Args:
  7773. repo: Path to the repository
  7774. """
  7775. from .rerere import RerereCache
  7776. with open_repo_closing(repo) as r:
  7777. cache = RerereCache.from_repo(r)
  7778. cache.clear()
  7779. def rerere_gc(repo: RepoPath = ".", max_age_days: int = 60) -> None:
  7780. """Garbage collect old rerere resolutions.
  7781. Args:
  7782. repo: Path to the repository
  7783. max_age_days: Maximum age in days for keeping resolutions
  7784. """
  7785. from .rerere import RerereCache
  7786. with open_repo_closing(repo) as r:
  7787. cache = RerereCache.from_repo(r)
  7788. cache.gc(max_age_days)