2
0

porcelain.py 262 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747
  1. # e porcelain.py -- Porcelain-like layer on top of Dulwich
  2. # Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk>
  3. #
  4. # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
  5. # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
  6. # General Public License as published by the Free Software Foundation; version 2.0
  7. # or (at your option) any later version. You can redistribute it and/or
  8. # modify it under the terms of either of these two licenses.
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # You should have received a copy of the licenses; if not, see
  17. # <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
  18. # and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
  19. # License, Version 2.0.
  20. #
  21. """Simple wrapper that provides porcelain-like functions on top of Dulwich.
  22. Currently implemented:
  23. * archive
  24. * add
  25. * bisect{_start,_bad,_good,_skip,_reset,_log,_replay}
  26. * branch{_create,_delete,_list}
  27. * check_ignore
  28. * checkout
  29. * checkout_branch
  30. * clone
  31. * cone mode{_init, _set, _add}
  32. * commit
  33. * commit_tree
  34. * daemon
  35. * describe
  36. * diff_tree
  37. * fetch
  38. * filter_branch
  39. * for_each_ref
  40. * init
  41. * ls_files
  42. * ls_remote
  43. * ls_tree
  44. * mailsplit
  45. * merge
  46. * merge_tree
  47. * mv/move
  48. * prune
  49. * pull
  50. * push
  51. * rm
  52. * remote{_add}
  53. * receive_pack
  54. * reset
  55. * revert
  56. * sparse_checkout
  57. * submodule_add
  58. * submodule_init
  59. * submodule_list
  60. * rev_list
  61. * tag{_create,_delete,_list}
  62. * upload_pack
  63. * update_server_info
  64. * var
  65. * write_commit_graph
  66. * status
  67. * shortlog
  68. * symbolic_ref
  69. * worktree{_add,_list,_remove,_prune,_lock,_unlock,_move}
  70. These functions are meant to behave similarly to the git subcommands.
  71. Differences in behaviour are considered bugs.
  72. Note: one of the consequences of this is that paths tend to be
  73. interpreted relative to the current working directory rather than relative
  74. to the repository root.
  75. Functions should generally accept both unicode strings and bytestrings
  76. """
  77. import datetime
  78. import fnmatch
  79. import logging
  80. import os
  81. import posixpath
  82. import stat
  83. import sys
  84. import time
  85. from collections import namedtuple
  86. from collections.abc import Iterable, Iterator, Sequence
  87. from collections.abc import Set as AbstractSet
  88. from contextlib import AbstractContextManager, closing, contextmanager
  89. from dataclasses import dataclass
  90. from io import BytesIO, RawIOBase
  91. from pathlib import Path
  92. from typing import (
  93. IO,
  94. TYPE_CHECKING,
  95. Any,
  96. BinaryIO,
  97. Callable,
  98. Optional,
  99. TextIO,
  100. TypeVar,
  101. Union,
  102. cast,
  103. overload,
  104. )
  105. if sys.version_info >= (3, 12):
  106. from collections.abc import Buffer
  107. from typing import override
  108. else:
  109. from typing_extensions import Buffer, override
  110. if TYPE_CHECKING:
  111. from .filter_branch import CommitData
  112. from .gc import GCStats
  113. from . import replace_me
  114. from .archive import tar_stream
  115. from .bisect import BisectState
  116. from .client import (
  117. FetchPackResult,
  118. LsRemoteResult,
  119. SendPackResult,
  120. get_transport_and_path,
  121. )
  122. from .config import Config, ConfigFile, StackedConfig, read_submodules
  123. from .diff_tree import (
  124. CHANGE_ADD,
  125. CHANGE_COPY,
  126. CHANGE_DELETE,
  127. CHANGE_MODIFY,
  128. CHANGE_RENAME,
  129. RENAME_CHANGE_TYPES,
  130. TreeChange,
  131. tree_changes,
  132. )
  133. from .errors import SendPackError
  134. from .graph import can_fast_forward
  135. from .ignore import IgnoreFilterManager
  136. from .index import (
  137. ConflictedIndexEntry,
  138. Index,
  139. IndexEntry,
  140. _fs_to_tree_path,
  141. blob_from_path_and_stat,
  142. build_file_from_blob,
  143. build_index_from_tree,
  144. get_unstaged_changes,
  145. index_entry_from_stat,
  146. symlink,
  147. update_working_tree,
  148. validate_path_element_default,
  149. validate_path_element_hfs,
  150. validate_path_element_ntfs,
  151. )
  152. from .object_store import BaseObjectStore, tree_lookup_path
  153. from .objects import (
  154. Blob,
  155. Commit,
  156. Tag,
  157. Tree,
  158. TreeEntry,
  159. format_timezone,
  160. parse_timezone,
  161. pretty_format_tree_entry,
  162. )
  163. from .objectspec import (
  164. parse_commit,
  165. parse_object,
  166. parse_ref,
  167. parse_reftuples,
  168. parse_tree,
  169. )
  170. from .pack import UnpackedObject, write_pack_from_container, write_pack_index
  171. from .patch import (
  172. get_summary,
  173. write_commit_patch,
  174. write_object_diff,
  175. write_tree_diff,
  176. )
  177. from .protocol import ZERO_SHA, Protocol
  178. from .refs import (
  179. LOCAL_BRANCH_PREFIX,
  180. LOCAL_NOTES_PREFIX,
  181. LOCAL_REMOTE_PREFIX,
  182. LOCAL_TAG_PREFIX,
  183. Ref,
  184. SymrefLoop,
  185. _import_remote_refs,
  186. filter_ref_prefix,
  187. shorten_ref_name,
  188. )
  189. from .repo import BaseRepo, Repo, get_user_identity
  190. from .server import (
  191. FileSystemBackend,
  192. ReceivePackHandler,
  193. TCPGitServer,
  194. UploadPackHandler,
  195. )
  196. from .server import update_server_info as server_update_server_info
  197. from .sparse_patterns import (
  198. SparseCheckoutConflictError,
  199. apply_included_paths,
  200. determine_included_paths,
  201. )
  202. # Module level tuple definition for status output
  203. GitStatus = namedtuple("GitStatus", "staged unstaged untracked")
  204. # TypeVar for preserving BaseRepo subclass types
  205. T = TypeVar("T", bound="BaseRepo")
  206. # Type alias for common repository parameter pattern
  207. RepoPath = Union[str, os.PathLike[str], Repo]
  208. @dataclass
  209. class CountObjectsResult:
  210. """Result of counting objects in a repository.
  211. Attributes:
  212. count: Number of loose objects
  213. size: Total size of loose objects in bytes
  214. in_pack: Number of objects in pack files
  215. packs: Number of pack files
  216. size_pack: Total size of pack files in bytes
  217. """
  218. count: int
  219. size: int
  220. in_pack: Optional[int] = None
  221. packs: Optional[int] = None
  222. size_pack: Optional[int] = None
  223. class NoneStream(RawIOBase):
  224. """Fallback if stdout or stderr are unavailable, does nothing."""
  225. def read(self, size: int = -1) -> None:
  226. """Read from stream (returns None as this is a null stream)."""
  227. return None
  228. def readall(self) -> bytes:
  229. """Read all bytes (returns empty bytes).
  230. Returns:
  231. Empty bytes object
  232. """
  233. return b""
  234. @override
  235. def readinto(self, b: Buffer) -> Optional[int]:
  236. return 0
  237. @override
  238. def write(self, b: Buffer) -> Optional[int]:
  239. # All Buffer implementations (bytes, bytearray, memoryview) support len()
  240. return len(b) if b else 0 # type: ignore[arg-type]
  241. default_bytes_out_stream: BinaryIO = cast(
  242. BinaryIO, getattr(sys.stdout, "buffer", None) or NoneStream()
  243. )
  244. default_bytes_err_stream: BinaryIO = cast(
  245. BinaryIO, getattr(sys.stderr, "buffer", None) or NoneStream()
  246. )
  247. DEFAULT_ENCODING = "utf-8"
  248. class Error(Exception):
  249. """Porcelain-based error."""
  250. def __init__(self, msg: str) -> None:
  251. """Initialize Error with message."""
  252. super().__init__(msg)
  253. class RemoteExists(Error):
  254. """Raised when the remote already exists."""
  255. class TimezoneFormatError(Error):
  256. """Raised when the timezone cannot be determined from a given string."""
  257. class CheckoutError(Error):
  258. """Indicates that a checkout cannot be performed."""
  259. def parse_timezone_format(tz_str: str) -> int:
  260. """Parse given string and attempt to return a timezone offset.
  261. Different formats are considered in the following order:
  262. - Git internal format: <unix timestamp> <timezone offset>
  263. - RFC 2822: e.g. Mon, 20 Nov 1995 19:12:08 -0500
  264. - ISO 8601: e.g. 1995-11-20T19:12:08-0500
  265. Args:
  266. tz_str: datetime string
  267. Returns: Timezone offset as integer
  268. Raises:
  269. TimezoneFormatError: if timezone information cannot be extracted
  270. """
  271. import re
  272. # Git internal format
  273. internal_format_pattern = re.compile("^[0-9]+ [+-][0-9]{,4}$")
  274. if re.match(internal_format_pattern, tz_str):
  275. try:
  276. tz_internal = parse_timezone(tz_str.split(" ")[1].encode(DEFAULT_ENCODING))
  277. return tz_internal[0]
  278. except ValueError:
  279. pass
  280. # RFC 2822
  281. import email.utils
  282. rfc_2822 = email.utils.parsedate_tz(tz_str)
  283. if rfc_2822 and rfc_2822[9] is not None:
  284. return rfc_2822[9]
  285. # ISO 8601
  286. # Supported offsets:
  287. # sHHMM, sHH:MM, sHH
  288. iso_8601_pattern = re.compile(
  289. "[0-9] ?([+-])([0-9]{2})(?::(?=[0-9]{2}))?([0-9]{2})?$"
  290. )
  291. match = re.search(iso_8601_pattern, tz_str)
  292. total_secs = 0
  293. if match:
  294. sign, hours, minutes = match.groups()
  295. total_secs += int(hours) * 3600
  296. if minutes:
  297. total_secs += int(minutes) * 60
  298. total_secs = -total_secs if sign == "-" else total_secs
  299. return total_secs
  300. # YYYY.MM.DD, MM/DD/YYYY, DD.MM.YYYY contain no timezone information
  301. raise TimezoneFormatError(tz_str)
  302. def get_user_timezones() -> tuple[int, int]:
  303. """Retrieve local timezone as described in git documentation.
  304. https://raw.githubusercontent.com/git/git/v2.3.0/Documentation/date-formats.txt
  305. Returns: A tuple containing author timezone, committer timezone.
  306. """
  307. local_timezone = time.localtime().tm_gmtoff
  308. if os.environ.get("GIT_AUTHOR_DATE"):
  309. author_timezone = parse_timezone_format(os.environ["GIT_AUTHOR_DATE"])
  310. else:
  311. author_timezone = local_timezone
  312. if os.environ.get("GIT_COMMITTER_DATE"):
  313. commit_timezone = parse_timezone_format(os.environ["GIT_COMMITTER_DATE"])
  314. else:
  315. commit_timezone = local_timezone
  316. return author_timezone, commit_timezone
  317. @overload
  318. def open_repo(path_or_repo: T) -> AbstractContextManager[T]: ...
  319. @overload
  320. def open_repo(
  321. path_or_repo: Union[str, os.PathLike[str]],
  322. ) -> AbstractContextManager[Repo]: ...
  323. def open_repo(
  324. path_or_repo: Union[str, os.PathLike[str], T],
  325. ) -> AbstractContextManager[Union[T, Repo]]:
  326. """Open an argument that can be a repository or a path for a repository."""
  327. if isinstance(path_or_repo, BaseRepo):
  328. return _noop_context_manager(path_or_repo)
  329. return Repo(path_or_repo)
  330. @contextmanager
  331. def _noop_context_manager(obj: T) -> Iterator[T]:
  332. """Context manager that has the same api as closing but does nothing."""
  333. yield obj
  334. @overload
  335. def open_repo_closing(path_or_repo: T) -> AbstractContextManager[T]: ...
  336. @overload
  337. def open_repo_closing(
  338. path_or_repo: Union[str, bytes, os.PathLike[str]],
  339. ) -> AbstractContextManager[Repo]: ...
  340. def open_repo_closing(
  341. path_or_repo: Union[str, bytes, os.PathLike[str], T],
  342. ) -> AbstractContextManager[Union[T, Repo]]:
  343. """Open an argument that can be a repository or a path for a repository.
  344. returns a context manager that will close the repo on exit if the argument
  345. is a path, else does nothing if the argument is a repo.
  346. """
  347. if isinstance(path_or_repo, BaseRepo):
  348. return _noop_context_manager(path_or_repo)
  349. return closing(Repo(path_or_repo))
  350. def path_to_tree_path(
  351. repopath: Union[str, bytes, os.PathLike[str]],
  352. path: Union[str, bytes, os.PathLike[str]],
  353. tree_encoding: str = DEFAULT_ENCODING,
  354. ) -> bytes:
  355. """Convert a path to a path usable in an index, e.g. bytes and relative to the repository root.
  356. Args:
  357. repopath: Repository path, absolute or relative to the cwd
  358. path: A path, absolute or relative to the cwd
  359. tree_encoding: Encoding to use for tree paths
  360. Returns: A path formatted for use in e.g. an index
  361. """
  362. # Resolve might returns a relative path on Windows
  363. # https://bugs.python.org/issue38671
  364. if sys.platform == "win32":
  365. path = os.path.abspath(path)
  366. # Convert bytes paths to str for Path
  367. if isinstance(path, bytes):
  368. path = os.fsdecode(path)
  369. path = Path(path)
  370. resolved_path = path.resolve()
  371. # Resolve and abspath seems to behave differently regarding symlinks,
  372. # as we are doing abspath on the file path, we need to do the same on
  373. # the repo path or they might not match
  374. if sys.platform == "win32":
  375. repopath = os.path.abspath(repopath)
  376. # Convert bytes paths to str for Path
  377. if isinstance(repopath, bytes):
  378. repopath = os.fsdecode(repopath)
  379. repopath = Path(repopath).resolve()
  380. try:
  381. relpath = resolved_path.relative_to(repopath)
  382. except ValueError:
  383. # If path is a symlink that points to a file outside the repo, we
  384. # want the relpath for the link itself, not the resolved target
  385. if path.is_symlink():
  386. parent = path.parent.resolve()
  387. relpath = (parent / path.name).relative_to(repopath)
  388. else:
  389. raise
  390. if sys.platform == "win32":
  391. return str(relpath).replace(os.path.sep, "/").encode(tree_encoding)
  392. else:
  393. return bytes(relpath)
  394. class DivergedBranches(Error):
  395. """Branches have diverged and fast-forward is not possible."""
  396. def __init__(self, current_sha: bytes, new_sha: bytes) -> None:
  397. """Initialize DivergedBranches error with current and new SHA values."""
  398. self.current_sha = current_sha
  399. self.new_sha = new_sha
  400. def check_diverged(repo: BaseRepo, current_sha: bytes, new_sha: bytes) -> None:
  401. """Check if updating to a sha can be done with fast forwarding.
  402. Args:
  403. repo: Repository object
  404. current_sha: Current head sha
  405. new_sha: New head sha
  406. """
  407. try:
  408. can = can_fast_forward(repo, current_sha, new_sha)
  409. except KeyError:
  410. can = False
  411. if not can:
  412. raise DivergedBranches(current_sha, new_sha)
  413. def archive(
  414. repo: Union[str, BaseRepo],
  415. committish: Optional[Union[str, bytes, Commit, Tag]] = None,
  416. outstream: Union[BinaryIO, RawIOBase] = default_bytes_out_stream,
  417. errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
  418. ) -> None:
  419. """Create an archive.
  420. Args:
  421. repo: Path of repository for which to generate an archive.
  422. committish: Commit SHA1 or ref to use
  423. outstream: Output stream (defaults to stdout)
  424. errstream: Error stream (defaults to stderr)
  425. """
  426. if committish is None:
  427. committish = "HEAD"
  428. with open_repo_closing(repo) as repo_obj:
  429. c = parse_commit(repo_obj, committish)
  430. tree = repo_obj.object_store[c.tree]
  431. assert isinstance(tree, Tree)
  432. for chunk in tar_stream(repo_obj.object_store, tree, c.commit_time):
  433. outstream.write(chunk)
  434. def update_server_info(repo: RepoPath = ".") -> None:
  435. """Update server info files for a repository.
  436. Args:
  437. repo: path to the repository
  438. """
  439. with open_repo_closing(repo) as r:
  440. server_update_server_info(r)
  441. def write_commit_graph(repo: RepoPath = ".", reachable: bool = True) -> None:
  442. """Write a commit graph file for a repository.
  443. Args:
  444. repo: path to the repository or a Repo object
  445. reachable: if True, include all commits reachable from refs.
  446. if False, only include direct ref targets.
  447. """
  448. with open_repo_closing(repo) as r:
  449. # Get all refs
  450. refs = list(r.refs.as_dict().values())
  451. if refs:
  452. r.object_store.write_commit_graph(refs, reachable=reachable)
  453. def symbolic_ref(
  454. repo: RepoPath, ref_name: Union[str, bytes], force: bool = False
  455. ) -> None:
  456. """Set git symbolic ref into HEAD.
  457. Args:
  458. repo: path to the repository
  459. ref_name: short name of the new ref
  460. force: force settings without checking if it exists in refs/heads
  461. """
  462. with open_repo_closing(repo) as repo_obj:
  463. ref_path = _make_branch_ref(ref_name)
  464. if not force and ref_path not in repo_obj.refs.keys():
  465. ref_name_str = (
  466. ref_name.decode("utf-8", "replace")
  467. if isinstance(ref_name, bytes)
  468. else ref_name
  469. )
  470. raise Error(f"fatal: ref `{ref_name_str}` is not a ref")
  471. repo_obj.refs.set_symbolic_ref(b"HEAD", ref_path)
  472. def pack_refs(repo: RepoPath, all: bool = False) -> None:
  473. """Pack loose references into packed-refs file."""
  474. with open_repo_closing(repo) as repo_obj:
  475. repo_obj.refs.pack_refs(all=all)
  476. def _get_variables(repo: RepoPath = ".") -> dict[str, str]:
  477. """Internal function to get all Git logical variables.
  478. Args:
  479. repo: Path to the repository
  480. Returns:
  481. A dictionary of all logical variables with values
  482. """
  483. from .repo import get_user_identity
  484. with open_repo_closing(repo) as repo_obj:
  485. config = repo_obj.get_config_stack()
  486. # Define callbacks for each logical variable
  487. def get_author_ident() -> Optional[str]:
  488. """Get GIT_AUTHOR_IDENT."""
  489. try:
  490. author_identity = get_user_identity(config, kind="AUTHOR")
  491. author_tz, _ = get_user_timezones()
  492. timestamp = int(time.time())
  493. return f"{author_identity.decode('utf-8', 'replace')} {timestamp} {author_tz:+05d}"
  494. except Exception:
  495. return None
  496. def get_committer_ident() -> Optional[str]:
  497. """Get GIT_COMMITTER_IDENT."""
  498. try:
  499. committer_identity = get_user_identity(config, kind="COMMITTER")
  500. _, committer_tz = get_user_timezones()
  501. timestamp = int(time.time())
  502. return f"{committer_identity.decode('utf-8', 'replace')} {timestamp} {committer_tz:+05d}"
  503. except Exception:
  504. return None
  505. def get_editor() -> Optional[str]:
  506. """Get GIT_EDITOR."""
  507. editor = os.environ.get("GIT_EDITOR")
  508. if editor is None:
  509. try:
  510. editor_bytes = config.get(("core",), "editor")
  511. editor = editor_bytes.decode("utf-8", "replace")
  512. except KeyError:
  513. editor = os.environ.get("VISUAL") or os.environ.get("EDITOR")
  514. return editor
  515. def get_sequence_editor() -> Optional[str]:
  516. """Get GIT_SEQUENCE_EDITOR."""
  517. sequence_editor = os.environ.get("GIT_SEQUENCE_EDITOR")
  518. if sequence_editor is None:
  519. try:
  520. seq_editor_bytes = config.get(("sequence",), "editor")
  521. sequence_editor = seq_editor_bytes.decode("utf-8", "replace")
  522. except KeyError:
  523. # Falls back to GIT_EDITOR if not set
  524. sequence_editor = get_editor()
  525. return sequence_editor
  526. def get_pager() -> Optional[str]:
  527. """Get GIT_PAGER."""
  528. pager = os.environ.get("GIT_PAGER")
  529. if pager is None:
  530. try:
  531. pager_bytes = config.get(("core",), "pager")
  532. pager = pager_bytes.decode("utf-8", "replace")
  533. except KeyError:
  534. pager = os.environ.get("PAGER")
  535. return pager
  536. def get_default_branch() -> str:
  537. """Get GIT_DEFAULT_BRANCH."""
  538. try:
  539. default_branch_bytes = config.get(("init",), "defaultBranch")
  540. return default_branch_bytes.decode("utf-8", "replace")
  541. except KeyError:
  542. # Git's default is "master"
  543. return "master"
  544. # Dictionary mapping variable names to their getter callbacks
  545. variable_callbacks: dict[str, Callable[[], Optional[str]]] = {
  546. "GIT_AUTHOR_IDENT": get_author_ident,
  547. "GIT_COMMITTER_IDENT": get_committer_ident,
  548. "GIT_EDITOR": get_editor,
  549. "GIT_SEQUENCE_EDITOR": get_sequence_editor,
  550. "GIT_PAGER": get_pager,
  551. "GIT_DEFAULT_BRANCH": get_default_branch,
  552. }
  553. # Build the variables dictionary by calling callbacks
  554. variables: dict[str, str] = {}
  555. for var_name, callback in variable_callbacks.items():
  556. value = callback()
  557. if value is not None:
  558. variables[var_name] = value
  559. return variables
  560. def var_list(repo: RepoPath = ".") -> dict[str, str]:
  561. """List all Git logical variables.
  562. Args:
  563. repo: Path to the repository
  564. Returns:
  565. A dictionary of all logical variables with their values
  566. """
  567. return _get_variables(repo)
  568. def var(repo: RepoPath = ".", variable: str = "GIT_AUTHOR_IDENT") -> str:
  569. """Get the value of a specific Git logical variable.
  570. Args:
  571. repo: Path to the repository
  572. variable: The variable to query (e.g., 'GIT_AUTHOR_IDENT')
  573. Returns:
  574. The value of the requested variable as a string
  575. Raises:
  576. KeyError: If the requested variable has no value
  577. """
  578. variables = _get_variables(repo)
  579. if variable in variables:
  580. return variables[variable]
  581. else:
  582. raise KeyError(f"Variable {variable} has no value")
  583. def commit(
  584. repo: RepoPath = ".",
  585. message: Optional[Union[str, bytes, Callable[[Any, Commit], bytes]]] = None,
  586. author: Optional[bytes] = None,
  587. author_timezone: Optional[int] = None,
  588. committer: Optional[bytes] = None,
  589. commit_timezone: Optional[int] = None,
  590. encoding: Optional[bytes] = None,
  591. no_verify: bool = False,
  592. signoff: Optional[bool] = None,
  593. all: bool = False,
  594. amend: bool = False,
  595. sign: Optional[bool] = None,
  596. ) -> bytes:
  597. """Create a new commit.
  598. Args:
  599. repo: Path to repository
  600. message: Optional commit message (string/bytes or callable that takes
  601. (repo, commit) and returns bytes)
  602. author: Optional author name and email
  603. author_timezone: Author timestamp timezone
  604. committer: Optional committer name and email
  605. commit_timezone: Commit timestamp timezone
  606. encoding: Encoding to use for commit message
  607. no_verify: Skip pre-commit and commit-msg hooks
  608. signoff: Add Signed-off-by line to commit message. If None, uses format.signoff config.
  609. all: Automatically stage all tracked files that have been modified
  610. amend: Replace the tip of the current branch by creating a new commit
  611. sign: GPG sign the commit. If None, uses commit.gpgsign config.
  612. If True, signs with default GPG key. If False, does not sign.
  613. Returns: SHA1 of the new commit
  614. """
  615. encoding_str = encoding.decode("ascii") if encoding else DEFAULT_ENCODING
  616. if isinstance(message, str):
  617. message = message.encode(encoding_str)
  618. if isinstance(author, str):
  619. author = author.encode(encoding_str)
  620. if isinstance(committer, str):
  621. committer = committer.encode(encoding_str)
  622. local_timezone = get_user_timezones()
  623. if author_timezone is None:
  624. author_timezone = local_timezone[0]
  625. if commit_timezone is None:
  626. commit_timezone = local_timezone[1]
  627. with open_repo_closing(repo) as r:
  628. # Handle amend logic
  629. merge_heads = None
  630. if amend:
  631. try:
  632. head_commit = r[r.head()]
  633. assert isinstance(head_commit, Commit)
  634. except KeyError:
  635. raise ValueError("Cannot amend: no existing commit found")
  636. # If message not provided, use the message from the current HEAD
  637. if message is None:
  638. message = head_commit.message
  639. # If author not provided, use the author from the current HEAD
  640. if author is None:
  641. author = head_commit.author
  642. if author_timezone is None:
  643. author_timezone = head_commit.author_timezone
  644. # Use the parent(s) of the current HEAD as our parent(s)
  645. merge_heads = list(head_commit.parents)
  646. # If -a flag is used, stage all modified tracked files
  647. if all:
  648. index = r.open_index()
  649. normalizer = r.get_blob_normalizer()
  650. # Create a wrapper that handles the bytes -> Blob conversion
  651. if normalizer is not None:
  652. def filter_callback(data: bytes, path: bytes) -> bytes:
  653. from dulwich.objects import Blob
  654. blob = Blob()
  655. blob.data = data
  656. normalized_blob = normalizer.checkin_normalize(blob, path)
  657. data_bytes: bytes = normalized_blob.data
  658. return data_bytes
  659. else:
  660. filter_callback = None
  661. unstaged_changes = list(
  662. get_unstaged_changes(index, r.path, filter_callback)
  663. )
  664. if unstaged_changes:
  665. # Convert bytes paths to strings for add function
  666. modified_files: list[Union[str, bytes, os.PathLike[str]]] = []
  667. for path in unstaged_changes:
  668. if isinstance(path, bytes):
  669. modified_files.append(path.decode())
  670. else:
  671. modified_files.append(path)
  672. add(r, paths=modified_files)
  673. # For amend, create dangling commit to avoid adding current HEAD as parent
  674. if amend:
  675. commit_sha = r.get_worktree().commit(
  676. message=message,
  677. author=author,
  678. author_timezone=author_timezone,
  679. committer=committer,
  680. commit_timezone=commit_timezone,
  681. encoding=encoding,
  682. no_verify=no_verify,
  683. sign=sign,
  684. merge_heads=merge_heads,
  685. ref=None,
  686. )
  687. # Update HEAD to point to the new commit
  688. r.refs[b"HEAD"] = commit_sha
  689. return commit_sha
  690. else:
  691. return r.get_worktree().commit(
  692. message=message,
  693. author=author,
  694. author_timezone=author_timezone,
  695. committer=committer,
  696. commit_timezone=commit_timezone,
  697. encoding=encoding,
  698. no_verify=no_verify,
  699. sign=sign,
  700. merge_heads=merge_heads,
  701. )
  702. def commit_tree(
  703. repo: RepoPath,
  704. tree: bytes,
  705. message: Optional[Union[str, bytes]] = None,
  706. author: Optional[bytes] = None,
  707. committer: Optional[bytes] = None,
  708. ) -> bytes:
  709. """Create a new commit object.
  710. Args:
  711. repo: Path to repository
  712. tree: An existing tree object
  713. message: Commit message
  714. author: Optional author name and email
  715. committer: Optional committer name and email
  716. """
  717. with open_repo_closing(repo) as r:
  718. if isinstance(message, str):
  719. message = message.encode(DEFAULT_ENCODING)
  720. return r.get_worktree().commit(
  721. message=message, tree=tree, committer=committer, author=author
  722. )
  723. def init(
  724. path: Union[str, os.PathLike[str]] = ".",
  725. *,
  726. bare: bool = False,
  727. symlinks: Optional[bool] = None,
  728. ) -> Repo:
  729. """Create a new git repository.
  730. Args:
  731. path: Path to repository.
  732. bare: Whether to create a bare repository.
  733. symlinks: Whether to create actual symlinks (defaults to autodetect)
  734. Returns: A Repo instance
  735. """
  736. if not os.path.exists(path):
  737. os.mkdir(path)
  738. if bare:
  739. return Repo.init_bare(path)
  740. else:
  741. return Repo.init(path, symlinks=symlinks)
  742. def clone(
  743. source: Union[str, bytes, Repo],
  744. target: Optional[Union[str, os.PathLike[str]]] = None,
  745. bare: bool = False,
  746. checkout: Optional[bool] = None,
  747. errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
  748. outstream: Optional[BinaryIO] = None,
  749. origin: Optional[str] = "origin",
  750. depth: Optional[int] = None,
  751. branch: Optional[Union[str, bytes]] = None,
  752. config: Optional[Config] = None,
  753. filter_spec: Optional[str] = None,
  754. protocol_version: Optional[int] = None,
  755. recurse_submodules: bool = False,
  756. **kwargs: Union[Union[str, bytes], Sequence[Union[str, bytes]]],
  757. ) -> Repo:
  758. """Clone a local or remote git repository.
  759. Args:
  760. source: Path or URL for source repository
  761. target: Path to target repository (optional)
  762. bare: Whether or not to create a bare repository
  763. checkout: Whether or not to check-out HEAD after cloning
  764. errstream: Optional stream to write progress to
  765. outstream: Optional stream to write progress to (deprecated)
  766. origin: Name of remote from the repository used to clone
  767. depth: Depth to fetch at
  768. branch: Optional branch or tag to be used as HEAD in the new repository
  769. instead of the cloned repository's HEAD.
  770. config: Configuration to use
  771. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  772. Only used if the server supports the Git protocol-v2 'filter'
  773. feature, and ignored otherwise.
  774. protocol_version: desired Git protocol version. By default the highest
  775. mutually supported protocol version will be used.
  776. recurse_submodules: Whether to initialize and clone submodules
  777. **kwargs: Additional keyword arguments including refspecs to fetch.
  778. Can be a bytestring, a string, or a list of bytestring/string.
  779. Returns: The new repository
  780. """
  781. if outstream is not None:
  782. import warnings
  783. warnings.warn(
  784. "outstream= has been deprecated in favour of errstream=.",
  785. DeprecationWarning,
  786. stacklevel=3,
  787. )
  788. # TODO(jelmer): Capture logging output and stream to errstream
  789. if config is None:
  790. config = StackedConfig.default()
  791. if checkout is None:
  792. checkout = not bare
  793. if checkout and bare:
  794. raise Error("checkout and bare are incompatible")
  795. if target is None:
  796. if isinstance(source, Repo):
  797. raise ValueError("target must be specified when cloning from a Repo object")
  798. elif isinstance(source, bytes):
  799. target = source.split(b"/")[-1].decode()
  800. else:
  801. target = source.split("/")[-1]
  802. if isinstance(branch, str):
  803. branch = branch.encode(DEFAULT_ENCODING)
  804. mkdir = not os.path.exists(target)
  805. if isinstance(source, Repo):
  806. # For direct repo cloning, use LocalGitClient
  807. from .client import GitClient, LocalGitClient
  808. client: GitClient = LocalGitClient(config=config)
  809. path = source.path
  810. else:
  811. source_str = source.decode() if isinstance(source, bytes) else source
  812. (client, path) = get_transport_and_path(source_str, config=config, **kwargs) # type: ignore[arg-type]
  813. filter_spec_bytes: Optional[bytes] = None
  814. if filter_spec:
  815. filter_spec_bytes = filter_spec.encode("ascii")
  816. repo = client.clone(
  817. path,
  818. str(target), # Convert PathLike to str
  819. mkdir=mkdir,
  820. bare=bare,
  821. origin=origin,
  822. checkout=checkout,
  823. branch=branch.decode() if branch else None, # Convert bytes to str
  824. progress=lambda data: (errstream.write(data), None)[1],
  825. depth=depth,
  826. filter_spec=filter_spec_bytes,
  827. protocol_version=protocol_version,
  828. )
  829. # Initialize and update submodules if requested
  830. if recurse_submodules and not bare:
  831. try:
  832. submodule_init(repo)
  833. submodule_update(repo, init=True)
  834. except FileNotFoundError as e:
  835. # .gitmodules file doesn't exist - no submodules to process
  836. logging.debug("No .gitmodules file found: %s", e)
  837. except KeyError as e:
  838. # Submodule configuration missing
  839. logging.warning("Submodule configuration error: %s", e)
  840. if errstream:
  841. errstream.write(
  842. f"Warning: Submodule configuration error: {e}\n".encode()
  843. )
  844. return repo
  845. def add(
  846. repo: Union[str, os.PathLike[str], Repo] = ".",
  847. paths: Optional[
  848. Union[
  849. Sequence[Union[str, bytes, os.PathLike[str]]], str, bytes, os.PathLike[str]
  850. ]
  851. ] = None,
  852. ) -> tuple[list[str], set[str]]:
  853. """Add files to the staging area.
  854. Args:
  855. repo: Repository for the files
  856. paths: Paths to add. If None, stages all untracked and modified files from the
  857. current working directory (mimicking 'git add .' behavior).
  858. Returns: Tuple with set of added files and ignored files
  859. If the repository contains ignored directories, the returned set will
  860. contain the path to an ignored directory (with trailing slash). Individual
  861. files within ignored directories will not be returned.
  862. Note: When paths=None, this function adds all untracked and modified files
  863. from the entire repository, mimicking 'git add -A' behavior.
  864. """
  865. ignored = set()
  866. with open_repo_closing(repo) as r:
  867. repo_path = Path(r.path).resolve()
  868. ignore_manager = IgnoreFilterManager.from_repo(r)
  869. # Get unstaged changes once for the entire operation
  870. index = r.open_index()
  871. normalizer = r.get_blob_normalizer()
  872. if normalizer is not None:
  873. def filter_callback(data: bytes, path: bytes) -> bytes:
  874. from dulwich.objects import Blob
  875. blob = Blob()
  876. blob.data = data
  877. normalized_blob = normalizer.checkin_normalize(blob, path)
  878. data_bytes: bytes = normalized_blob.data
  879. return data_bytes
  880. else:
  881. filter_callback = None
  882. # Check if core.preloadIndex is enabled
  883. config = r.get_config_stack()
  884. preload_index = config.get_boolean(b"core", b"preloadIndex", False)
  885. all_unstaged_paths = list(
  886. get_unstaged_changes(index, r.path, filter_callback, preload_index)
  887. )
  888. if not paths:
  889. # When no paths specified, add all untracked and modified files from repo root
  890. paths = [str(repo_path)]
  891. relpaths = []
  892. if isinstance(paths, (str, bytes, os.PathLike)):
  893. paths = [paths]
  894. for p in paths:
  895. # Handle bytes paths by decoding them
  896. if isinstance(p, bytes):
  897. p = p.decode("utf-8")
  898. path = Path(p)
  899. if not path.is_absolute():
  900. # Make relative paths relative to the repo directory
  901. path = repo_path / path
  902. # Don't resolve symlinks completely - only resolve the parent directory
  903. # to avoid issues when symlinks point outside the repository
  904. if path.is_symlink():
  905. # For symlinks, resolve only the parent directory
  906. parent_resolved = path.parent.resolve()
  907. resolved_path = parent_resolved / path.name
  908. else:
  909. # For regular files/dirs, resolve normally
  910. resolved_path = path.resolve()
  911. try:
  912. relpath = str(resolved_path.relative_to(repo_path)).replace(os.sep, "/")
  913. except ValueError as e:
  914. # Path is not within the repository
  915. p_str = p.decode() if isinstance(p, bytes) else str(p)
  916. raise ValueError(
  917. f"Path {p_str} is not within repository {repo_path}"
  918. ) from e
  919. # Handle directories by scanning their contents
  920. if resolved_path.is_dir():
  921. # Check if the directory itself is ignored
  922. dir_relpath = posixpath.join(relpath, "") if relpath != "." else ""
  923. if dir_relpath and ignore_manager.is_ignored(dir_relpath):
  924. ignored.add(dir_relpath)
  925. continue
  926. # When adding a directory, add all untracked files within it
  927. current_untracked = list(
  928. get_untracked_paths(
  929. str(resolved_path),
  930. str(repo_path),
  931. index,
  932. )
  933. )
  934. for untracked_path in current_untracked:
  935. # If we're scanning a subdirectory, adjust the path
  936. if relpath != ".":
  937. untracked_path = posixpath.join(relpath, untracked_path)
  938. if not ignore_manager.is_ignored(untracked_path):
  939. relpaths.append(untracked_path)
  940. else:
  941. ignored.add(untracked_path)
  942. # Also add unstaged (modified) files within this directory
  943. for unstaged_path in all_unstaged_paths:
  944. if isinstance(unstaged_path, bytes):
  945. unstaged_path_str = unstaged_path.decode("utf-8")
  946. else:
  947. unstaged_path_str = unstaged_path
  948. # Check if this unstaged file is within the directory we're processing
  949. unstaged_full_path = repo_path / unstaged_path_str
  950. try:
  951. unstaged_full_path.relative_to(resolved_path)
  952. # File is within this directory, add it
  953. if not ignore_manager.is_ignored(unstaged_path_str):
  954. relpaths.append(unstaged_path_str)
  955. else:
  956. ignored.add(unstaged_path_str)
  957. except ValueError:
  958. # File is not within this directory, skip it
  959. continue
  960. continue
  961. # FIXME: Support patterns
  962. if ignore_manager.is_ignored(relpath):
  963. ignored.add(relpath)
  964. continue
  965. relpaths.append(relpath)
  966. r.get_worktree().stage(relpaths)
  967. return (relpaths, ignored)
  968. def _is_subdir(
  969. subdir: Union[str, os.PathLike[str]], parentdir: Union[str, os.PathLike[str]]
  970. ) -> bool:
  971. """Check whether subdir is parentdir or a subdir of parentdir.
  972. If parentdir or subdir is a relative path, it will be disamgibuated
  973. relative to the pwd.
  974. """
  975. parentdir_abs = os.path.realpath(parentdir) + os.path.sep
  976. subdir_abs = os.path.realpath(subdir) + os.path.sep
  977. return subdir_abs.startswith(parentdir_abs)
  978. # TODO: option to remove ignored files also, in line with `git clean -fdx`
  979. def clean(
  980. repo: Union[str, os.PathLike[str], Repo] = ".",
  981. target_dir: Optional[Union[str, os.PathLike[str]]] = None,
  982. ) -> None:
  983. """Remove any untracked files from the target directory recursively.
  984. Equivalent to running ``git clean -fd`` in target_dir.
  985. Args:
  986. repo: Repository where the files may be tracked
  987. target_dir: Directory to clean - current directory if None
  988. """
  989. if target_dir is None:
  990. target_dir = os.getcwd()
  991. with open_repo_closing(repo) as r:
  992. if not _is_subdir(target_dir, r.path):
  993. raise Error("target_dir must be in the repo's working dir")
  994. config = r.get_config_stack()
  995. config.get_boolean((b"clean",), b"requireForce", True)
  996. # TODO(jelmer): if require_force is set, then make sure that -f, -i or
  997. # -n is specified.
  998. index = r.open_index()
  999. ignore_manager = IgnoreFilterManager.from_repo(r)
  1000. paths_in_wd = _walk_working_dir_paths(target_dir, r.path)
  1001. # Reverse file visit order, so that files and subdirectories are
  1002. # removed before containing directory
  1003. for ap, is_dir in reversed(list(paths_in_wd)):
  1004. # target_dir and r.path are both str, so ap must be str
  1005. assert isinstance(ap, str)
  1006. if is_dir:
  1007. # All subdirectories and files have been removed if untracked,
  1008. # so dir contains no tracked files iff it is empty.
  1009. is_empty = len(os.listdir(ap)) == 0
  1010. if is_empty:
  1011. os.rmdir(ap)
  1012. else:
  1013. ip = path_to_tree_path(r.path, ap)
  1014. is_tracked = ip in index
  1015. rp = os.path.relpath(ap, r.path)
  1016. is_ignored = ignore_manager.is_ignored(rp)
  1017. if not is_tracked and not is_ignored:
  1018. os.remove(ap)
  1019. def remove(
  1020. repo: Union[str, os.PathLike[str], Repo] = ".",
  1021. paths: Sequence[Union[str, bytes, os.PathLike[str]]] = [],
  1022. cached: bool = False,
  1023. ) -> None:
  1024. """Remove files from the staging area.
  1025. Args:
  1026. repo: Repository for the files
  1027. paths: Paths to remove. Can be absolute or relative to the repository root.
  1028. cached: Only remove from index, not from working directory
  1029. """
  1030. with open_repo_closing(repo) as r:
  1031. index = r.open_index()
  1032. blob_normalizer = r.get_blob_normalizer()
  1033. for p in paths:
  1034. # If path is absolute, use it as-is. Otherwise, treat it as relative to repo
  1035. p_str = os.fsdecode(p) if isinstance(p, bytes) else str(p)
  1036. if os.path.isabs(p_str):
  1037. full_path = p_str
  1038. else:
  1039. # Treat relative paths as relative to the repository root
  1040. full_path = os.path.join(r.path, p_str)
  1041. tree_path = path_to_tree_path(r.path, full_path)
  1042. # Convert to bytes for file operations
  1043. full_path_bytes = os.fsencode(full_path)
  1044. try:
  1045. entry = index[tree_path]
  1046. if isinstance(entry, ConflictedIndexEntry):
  1047. raise Error(f"{p_str} has conflicts in the index")
  1048. index_sha = entry.sha
  1049. except KeyError as exc:
  1050. raise Error(f"{p_str} did not match any files") from exc
  1051. if not cached:
  1052. try:
  1053. st = os.lstat(full_path_bytes)
  1054. except OSError:
  1055. pass
  1056. else:
  1057. try:
  1058. blob = blob_from_path_and_stat(full_path_bytes, st)
  1059. # Apply checkin normalization to compare apples to apples
  1060. if blob_normalizer is not None:
  1061. blob = blob_normalizer.checkin_normalize(blob, tree_path)
  1062. except OSError:
  1063. pass
  1064. else:
  1065. try:
  1066. head_commit = r[r.head()]
  1067. assert isinstance(head_commit, Commit)
  1068. committed_sha = tree_lookup_path(
  1069. r.__getitem__, head_commit.tree, tree_path
  1070. )[1]
  1071. except KeyError:
  1072. committed_sha = None
  1073. if blob.id != index_sha and index_sha != committed_sha:
  1074. raise Error(
  1075. "file has staged content differing "
  1076. f"from both the file and head: {p_str}"
  1077. )
  1078. if index_sha != committed_sha:
  1079. raise Error(f"file has staged changes: {p_str}")
  1080. os.remove(full_path_bytes)
  1081. del index[tree_path]
  1082. index.write()
  1083. rm = remove
  1084. def mv(
  1085. repo: Union[str, os.PathLike[str], Repo],
  1086. source: Union[str, bytes, os.PathLike[str]],
  1087. destination: Union[str, bytes, os.PathLike[str]],
  1088. force: bool = False,
  1089. ) -> None:
  1090. """Move or rename a file, directory, or symlink.
  1091. Args:
  1092. repo: Path to the repository
  1093. source: Path to move from
  1094. destination: Path to move to
  1095. force: Force move even if destination exists
  1096. Raises:
  1097. Error: If source doesn't exist, is not tracked, or destination already exists (without force)
  1098. """
  1099. with open_repo_closing(repo) as r:
  1100. index = r.open_index()
  1101. # Handle paths - convert to string if necessary
  1102. if isinstance(source, bytes):
  1103. source = source.decode(sys.getfilesystemencoding())
  1104. elif hasattr(source, "__fspath__"):
  1105. source = os.fspath(source)
  1106. else:
  1107. source = str(source)
  1108. if isinstance(destination, bytes):
  1109. destination = destination.decode(sys.getfilesystemencoding())
  1110. elif hasattr(destination, "__fspath__"):
  1111. destination = os.fspath(destination)
  1112. else:
  1113. destination = str(destination)
  1114. # Get full paths
  1115. if os.path.isabs(source):
  1116. source_full_path = source
  1117. else:
  1118. # Treat relative paths as relative to the repository root
  1119. source_full_path = os.path.join(r.path, source)
  1120. if os.path.isabs(destination):
  1121. destination_full_path = destination
  1122. else:
  1123. # Treat relative paths as relative to the repository root
  1124. destination_full_path = os.path.join(r.path, destination)
  1125. # Check if destination is a directory
  1126. if os.path.isdir(destination_full_path):
  1127. # Move source into destination directory
  1128. basename = os.path.basename(source_full_path)
  1129. destination_full_path = os.path.join(destination_full_path, basename)
  1130. # Convert to tree paths for index
  1131. source_tree_path = path_to_tree_path(r.path, source_full_path)
  1132. destination_tree_path = path_to_tree_path(r.path, destination_full_path)
  1133. # Check if source exists in index
  1134. if source_tree_path not in index:
  1135. raise Error(f"source '{source}' is not under version control")
  1136. # Check if source exists in filesystem
  1137. if not os.path.exists(source_full_path):
  1138. raise Error(f"source '{source}' does not exist")
  1139. # Check if destination already exists
  1140. if os.path.exists(destination_full_path) and not force:
  1141. raise Error(f"destination '{destination}' already exists (use -f to force)")
  1142. # Check if destination is already in index
  1143. if destination_tree_path in index and not force:
  1144. raise Error(
  1145. f"destination '{destination}' already exists in index (use -f to force)"
  1146. )
  1147. # Get the index entry for the source
  1148. source_entry = index[source_tree_path]
  1149. # Convert to bytes for file operations
  1150. source_full_path_bytes = os.fsencode(source_full_path)
  1151. destination_full_path_bytes = os.fsencode(destination_full_path)
  1152. # Create parent directory for destination if needed
  1153. dest_dir = os.path.dirname(destination_full_path_bytes)
  1154. if dest_dir and not os.path.exists(dest_dir):
  1155. os.makedirs(dest_dir)
  1156. # Move the file in the filesystem
  1157. if os.path.exists(destination_full_path_bytes) and force:
  1158. os.remove(destination_full_path_bytes)
  1159. os.rename(source_full_path_bytes, destination_full_path_bytes)
  1160. # Update the index
  1161. del index[source_tree_path]
  1162. index[destination_tree_path] = source_entry
  1163. index.write()
  1164. move = mv
  1165. def commit_decode(
  1166. commit: Commit, contents: bytes, default_encoding: str = DEFAULT_ENCODING
  1167. ) -> str:
  1168. """Decode commit contents using the commit's encoding or default."""
  1169. if commit.encoding:
  1170. encoding = commit.encoding.decode("ascii")
  1171. else:
  1172. encoding = default_encoding
  1173. return contents.decode(encoding, "replace")
  1174. def commit_encode(
  1175. commit: Commit, contents: str, default_encoding: str = DEFAULT_ENCODING
  1176. ) -> bytes:
  1177. """Encode commit contents using the commit's encoding or default."""
  1178. if commit.encoding:
  1179. encoding = commit.encoding.decode("ascii")
  1180. else:
  1181. encoding = default_encoding
  1182. return contents.encode(encoding)
  1183. def print_commit(
  1184. commit: Commit,
  1185. decode: Callable[[bytes], str],
  1186. outstream: TextIO = sys.stdout,
  1187. ) -> None:
  1188. """Write a human-readable commit log entry.
  1189. Args:
  1190. commit: A `Commit` object
  1191. decode: Function to decode commit data
  1192. outstream: A stream file to write to
  1193. """
  1194. outstream.write("-" * 50 + "\n")
  1195. outstream.write("commit: " + commit.id.decode("ascii") + "\n")
  1196. if len(commit.parents) > 1:
  1197. outstream.write(
  1198. "merge: "
  1199. + "...".join([c.decode("ascii") for c in commit.parents[1:]])
  1200. + "\n"
  1201. )
  1202. outstream.write("Author: " + decode(commit.author) + "\n")
  1203. if commit.author != commit.committer:
  1204. outstream.write("Committer: " + decode(commit.committer) + "\n")
  1205. time_tuple = time.gmtime(commit.author_time + commit.author_timezone)
  1206. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  1207. timezone_str = format_timezone(commit.author_timezone).decode("ascii")
  1208. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  1209. if commit.message:
  1210. outstream.write("\n")
  1211. outstream.write(decode(commit.message) + "\n")
  1212. outstream.write("\n")
  1213. def print_tag(
  1214. tag: Tag, decode: Callable[[bytes], str], outstream: TextIO = sys.stdout
  1215. ) -> None:
  1216. """Write a human-readable tag.
  1217. Args:
  1218. tag: A `Tag` object
  1219. decode: Function for decoding bytes to unicode string
  1220. outstream: A stream to write to
  1221. """
  1222. outstream.write("Tagger: " + decode(tag.tagger) + "\n")
  1223. time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone)
  1224. time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple)
  1225. timezone_str = format_timezone(tag.tag_timezone).decode("ascii")
  1226. outstream.write("Date: " + time_str + " " + timezone_str + "\n")
  1227. outstream.write("\n")
  1228. outstream.write(decode(tag.message))
  1229. outstream.write("\n")
  1230. def show_blob(
  1231. repo: RepoPath,
  1232. blob: Blob,
  1233. decode: Callable[[bytes], str],
  1234. outstream: TextIO = sys.stdout,
  1235. ) -> None:
  1236. """Write a blob to a stream.
  1237. Args:
  1238. repo: A `Repo` object
  1239. blob: A `Blob` object
  1240. decode: Function for decoding bytes to unicode string
  1241. outstream: A stream file to write to
  1242. """
  1243. outstream.write(decode(blob.data))
  1244. def show_commit(
  1245. repo: RepoPath,
  1246. commit: Commit,
  1247. decode: Callable[[bytes], str],
  1248. outstream: TextIO = sys.stdout,
  1249. ) -> None:
  1250. """Show a commit to a stream.
  1251. Args:
  1252. repo: A `Repo` object
  1253. commit: A `Commit` object
  1254. decode: Function for decoding bytes to unicode string
  1255. outstream: Stream to write to
  1256. """
  1257. from .diff import ColorizedDiffStream
  1258. # Create a wrapper for ColorizedDiffStream to handle string/bytes conversion
  1259. class _StreamWrapper:
  1260. def __init__(self, stream: "ColorizedDiffStream") -> None:
  1261. self.stream = stream
  1262. def write(self, data: Union[str, bytes]) -> None:
  1263. if isinstance(data, str):
  1264. # Convert string to bytes for ColorizedDiffStream
  1265. self.stream.write(data.encode("utf-8"))
  1266. else:
  1267. self.stream.write(data)
  1268. with open_repo_closing(repo) as r:
  1269. # Use wrapper for ColorizedDiffStream, direct stream for others
  1270. if isinstance(outstream, ColorizedDiffStream):
  1271. wrapped_stream = _StreamWrapper(outstream)
  1272. print_commit(commit, decode=decode, outstream=wrapped_stream)
  1273. # Write diff directly to the ColorizedDiffStream as bytes
  1274. write_tree_diff(
  1275. outstream,
  1276. r.object_store,
  1277. commit.parents[0] if commit.parents else None,
  1278. commit.tree,
  1279. )
  1280. else:
  1281. print_commit(commit, decode=decode, outstream=outstream)
  1282. if commit.parents:
  1283. parent_commit = r[commit.parents[0]]
  1284. assert isinstance(parent_commit, Commit)
  1285. base_tree = parent_commit.tree
  1286. else:
  1287. base_tree = None
  1288. # Traditional path: buffer diff and write as decoded text
  1289. diffstream = BytesIO()
  1290. write_tree_diff(diffstream, r.object_store, base_tree, commit.tree)
  1291. diffstream.seek(0)
  1292. outstream.write(commit_decode(commit, diffstream.getvalue()))
  1293. def show_tree(
  1294. repo: RepoPath,
  1295. tree: Tree,
  1296. decode: Callable[[bytes], str],
  1297. outstream: TextIO = sys.stdout,
  1298. ) -> None:
  1299. """Print a tree to a stream.
  1300. Args:
  1301. repo: A `Repo` object
  1302. tree: A `Tree` object
  1303. decode: Function for decoding bytes to unicode string
  1304. outstream: Stream to write to
  1305. """
  1306. for n in tree:
  1307. outstream.write(decode(n) + "\n")
  1308. def show_tag(
  1309. repo: RepoPath,
  1310. tag: Tag,
  1311. decode: Callable[[bytes], str],
  1312. outstream: TextIO = sys.stdout,
  1313. ) -> None:
  1314. """Print a tag to a stream.
  1315. Args:
  1316. repo: A `Repo` object
  1317. tag: A `Tag` object
  1318. decode: Function for decoding bytes to unicode string
  1319. outstream: Stream to write to
  1320. """
  1321. with open_repo_closing(repo) as r:
  1322. print_tag(tag, decode, outstream)
  1323. obj = r[tag.object[1]]
  1324. assert isinstance(obj, (Tree, Blob, Commit, Tag))
  1325. show_object(repo, obj, decode, outstream)
  1326. def show_object(
  1327. repo: RepoPath,
  1328. obj: Union[Tree, Blob, Commit, Tag],
  1329. decode: Callable[[bytes], str],
  1330. outstream: TextIO,
  1331. ) -> None:
  1332. """Show details of a git object."""
  1333. handlers: dict[bytes, Callable[[RepoPath, Any, Any, TextIO], None]] = {
  1334. b"tree": show_tree,
  1335. b"blob": show_blob,
  1336. b"commit": show_commit,
  1337. b"tag": show_tag,
  1338. }
  1339. handler = handlers.get(obj.type_name)
  1340. if handler is None:
  1341. raise ValueError(f"Unknown object type: {obj.type_name.decode()}")
  1342. handler(repo, obj, decode, outstream)
  1343. def print_name_status(changes: Iterator[TreeChange]) -> Iterator[str]:
  1344. """Print a simple status summary, listing changed files."""
  1345. for change in changes:
  1346. if not change:
  1347. continue
  1348. if isinstance(change, list):
  1349. change = change[0]
  1350. if change.type == CHANGE_ADD:
  1351. assert change.new is not None
  1352. path1 = change.new.path
  1353. assert path1 is not None
  1354. path2 = b""
  1355. kind = "A"
  1356. elif change.type == CHANGE_DELETE:
  1357. assert change.old is not None
  1358. path1 = change.old.path
  1359. assert path1 is not None
  1360. path2 = b""
  1361. kind = "D"
  1362. elif change.type == CHANGE_MODIFY:
  1363. assert change.new is not None
  1364. path1 = change.new.path
  1365. assert path1 is not None
  1366. path2 = b""
  1367. kind = "M"
  1368. elif change.type in RENAME_CHANGE_TYPES:
  1369. assert change.old is not None and change.new is not None
  1370. path1 = change.old.path
  1371. assert path1 is not None
  1372. path2_opt = change.new.path
  1373. assert path2_opt is not None
  1374. path2 = path2_opt
  1375. if change.type == CHANGE_RENAME:
  1376. kind = "R"
  1377. elif change.type == CHANGE_COPY:
  1378. kind = "C"
  1379. path1_str = (
  1380. path1.decode("utf-8", errors="replace")
  1381. if isinstance(path1, bytes)
  1382. else path1
  1383. )
  1384. path2_str = (
  1385. path2.decode("utf-8", errors="replace")
  1386. if isinstance(path2, bytes)
  1387. else path2
  1388. )
  1389. yield f"{kind:<8}{path1_str:<20}{path2_str:<20}"
  1390. def log(
  1391. repo: RepoPath = ".",
  1392. paths: Optional[Sequence[Union[str, bytes]]] = None,
  1393. outstream: TextIO = sys.stdout,
  1394. max_entries: Optional[int] = None,
  1395. reverse: bool = False,
  1396. name_status: bool = False,
  1397. ) -> None:
  1398. """Write commit logs.
  1399. Args:
  1400. repo: Path to repository
  1401. paths: Optional set of specific paths to print entries for
  1402. outstream: Stream to write log output to
  1403. reverse: Reverse order in which entries are printed
  1404. name_status: Print name status
  1405. max_entries: Optional maximum number of entries to display
  1406. """
  1407. with open_repo_closing(repo) as r:
  1408. try:
  1409. include = [r.head()]
  1410. except KeyError:
  1411. include = []
  1412. # Convert paths to bytes if needed
  1413. paths_bytes = None
  1414. if paths:
  1415. paths_bytes = [p.encode() if isinstance(p, str) else p for p in paths]
  1416. walker = r.get_walker(
  1417. include=include, max_entries=max_entries, paths=paths_bytes, reverse=reverse
  1418. )
  1419. for entry in walker:
  1420. def decode_wrapper(x: bytes) -> str:
  1421. return commit_decode(entry.commit, x)
  1422. print_commit(entry.commit, decode_wrapper, outstream)
  1423. if name_status:
  1424. outstream.writelines(
  1425. [
  1426. line + "\n"
  1427. for line in print_name_status(
  1428. cast(Iterator[TreeChange], entry.changes())
  1429. )
  1430. ]
  1431. )
  1432. # TODO(jelmer): better default for encoding?
  1433. def show(
  1434. repo: RepoPath = ".",
  1435. objects: Optional[Sequence[Union[str, bytes]]] = None,
  1436. outstream: TextIO = sys.stdout,
  1437. default_encoding: str = DEFAULT_ENCODING,
  1438. ) -> None:
  1439. """Print the changes in a commit.
  1440. Args:
  1441. repo: Path to repository
  1442. objects: Objects to show (defaults to [HEAD])
  1443. outstream: Stream to write to
  1444. default_encoding: Default encoding to use if none is set in the
  1445. commit
  1446. """
  1447. if objects is None:
  1448. objects = ["HEAD"]
  1449. if isinstance(objects, (str, bytes)):
  1450. objects = [objects]
  1451. with open_repo_closing(repo) as r:
  1452. for objectish in objects:
  1453. o = parse_object(r, objectish)
  1454. if isinstance(o, Commit):
  1455. def decode(x: bytes) -> str:
  1456. return commit_decode(o, x, default_encoding)
  1457. else:
  1458. def decode(x: bytes) -> str:
  1459. return x.decode(default_encoding)
  1460. assert isinstance(o, (Tree, Blob, Commit, Tag))
  1461. show_object(r, o, decode, outstream)
  1462. def diff_tree(
  1463. repo: RepoPath,
  1464. old_tree: Union[str, bytes, Tree],
  1465. new_tree: Union[str, bytes, Tree],
  1466. outstream: BinaryIO = default_bytes_out_stream,
  1467. ) -> None:
  1468. """Compares the content and mode of blobs found via two tree objects.
  1469. Args:
  1470. repo: Path to repository
  1471. old_tree: Id of old tree
  1472. new_tree: Id of new tree
  1473. outstream: Stream to write to
  1474. """
  1475. with open_repo_closing(repo) as r:
  1476. if isinstance(old_tree, Tree):
  1477. old_tree_id: Optional[bytes] = old_tree.id
  1478. elif isinstance(old_tree, str):
  1479. old_tree_id = old_tree.encode()
  1480. else:
  1481. old_tree_id = old_tree
  1482. if isinstance(new_tree, Tree):
  1483. new_tree_id: Optional[bytes] = new_tree.id
  1484. elif isinstance(new_tree, str):
  1485. new_tree_id = new_tree.encode()
  1486. else:
  1487. new_tree_id = new_tree
  1488. write_tree_diff(outstream, r.object_store, old_tree_id, new_tree_id)
  1489. def diff(
  1490. repo: RepoPath = ".",
  1491. commit: Optional[Union[str, bytes, Commit]] = None,
  1492. commit2: Optional[Union[str, bytes, Commit]] = None,
  1493. staged: bool = False,
  1494. paths: Optional[Sequence[Union[str, bytes]]] = None,
  1495. outstream: BinaryIO = default_bytes_out_stream,
  1496. diff_algorithm: Optional[str] = None,
  1497. ) -> None:
  1498. """Show diff.
  1499. Args:
  1500. repo: Path to repository
  1501. commit: First commit to compare. If staged is True, compare
  1502. index to this commit. If staged is False, compare working tree
  1503. to this commit. If None, defaults to HEAD for staged and index
  1504. for unstaged.
  1505. commit2: Second commit to compare against first commit. If provided,
  1506. show diff between commit and commit2 (ignoring staged flag).
  1507. staged: If True, show staged changes (index vs commit).
  1508. If False, show unstaged changes (working tree vs commit/index).
  1509. Ignored if commit2 is provided.
  1510. paths: Optional list of paths to limit diff
  1511. outstream: Stream to write to
  1512. diff_algorithm: Algorithm to use for diffing ("myers" or "patience"),
  1513. defaults to the underlying function's default if None
  1514. """
  1515. from . import diff as diff_module
  1516. with open_repo_closing(repo) as r:
  1517. # Normalize paths to bytes
  1518. byte_paths: Optional[list[bytes]] = None
  1519. if paths is not None and paths: # Check if paths is not empty
  1520. byte_paths = []
  1521. for p in paths:
  1522. if isinstance(p, str):
  1523. byte_paths.append(p.encode("utf-8"))
  1524. else:
  1525. byte_paths.append(p)
  1526. elif paths == []: # Convert empty list to None
  1527. byte_paths = None
  1528. else:
  1529. byte_paths = None
  1530. # Resolve commit refs to SHAs if provided
  1531. if commit is not None:
  1532. if isinstance(commit, Commit):
  1533. # Already a Commit object
  1534. commit_sha = commit.id
  1535. commit_obj = commit
  1536. else:
  1537. # parse_commit handles both refs and SHAs, and always returns a Commit object
  1538. commit_obj = parse_commit(r, commit)
  1539. commit_sha = commit_obj.id
  1540. else:
  1541. commit_sha = None
  1542. commit_obj = None
  1543. if commit2 is not None:
  1544. # Compare two commits
  1545. if isinstance(commit2, Commit):
  1546. commit2_obj = commit2
  1547. else:
  1548. commit2_obj = parse_commit(r, commit2)
  1549. # Get trees from commits
  1550. old_tree = commit_obj.tree if commit_obj else None
  1551. new_tree = commit2_obj.tree
  1552. # Use tree_changes to get the changes and apply path filtering
  1553. changes = r.object_store.tree_changes(old_tree, new_tree)
  1554. for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in changes:
  1555. # Skip if paths are specified and this change doesn't match
  1556. if byte_paths:
  1557. path_to_check = newpath or oldpath
  1558. assert path_to_check is not None
  1559. if not any(
  1560. path_to_check == p or path_to_check.startswith(p + b"/")
  1561. for p in byte_paths
  1562. ):
  1563. continue
  1564. write_object_diff(
  1565. outstream,
  1566. r.object_store,
  1567. (oldpath, oldmode, oldsha),
  1568. (newpath, newmode, newsha),
  1569. diff_algorithm=diff_algorithm,
  1570. )
  1571. elif staged:
  1572. # Show staged changes (index vs commit)
  1573. diff_module.diff_index_to_tree(
  1574. r, outstream, commit_sha, byte_paths, diff_algorithm=diff_algorithm
  1575. )
  1576. elif commit is not None:
  1577. # Compare working tree to a specific commit
  1578. assert (
  1579. commit_sha is not None
  1580. ) # mypy: commit_sha is set when commit is not None
  1581. diff_module.diff_working_tree_to_tree(
  1582. r, outstream, commit_sha, byte_paths, diff_algorithm=diff_algorithm
  1583. )
  1584. else:
  1585. # Compare working tree to index
  1586. diff_module.diff_working_tree_to_index(
  1587. r, outstream, byte_paths, diff_algorithm=diff_algorithm
  1588. )
  1589. def rev_list(
  1590. repo: RepoPath,
  1591. commits: Sequence[Union[str, bytes]],
  1592. outstream: BinaryIO = default_bytes_out_stream,
  1593. ) -> None:
  1594. """Lists commit objects in reverse chronological order.
  1595. Args:
  1596. repo: Path to repository
  1597. commits: Commits over which to iterate
  1598. outstream: Stream to write to
  1599. """
  1600. with open_repo_closing(repo) as r:
  1601. for entry in r.get_walker(
  1602. include=[r[c if isinstance(c, bytes) else c.encode()].id for c in commits]
  1603. ):
  1604. outstream.write(entry.commit.id + b"\n")
  1605. def _canonical_part(url: str) -> str:
  1606. name = url.rsplit("/", 1)[-1]
  1607. if name.endswith(".git"):
  1608. name = name[:-4]
  1609. return name
  1610. def submodule_add(
  1611. repo: Union[str, os.PathLike[str], Repo],
  1612. url: str,
  1613. path: Optional[Union[str, os.PathLike[str]]] = None,
  1614. name: Optional[str] = None,
  1615. ) -> None:
  1616. """Add a new submodule.
  1617. Args:
  1618. repo: Path to repository
  1619. url: URL of repository to add as submodule
  1620. path: Path where submodule should live
  1621. name: Name for the submodule
  1622. """
  1623. with open_repo_closing(repo) as r:
  1624. if path is None:
  1625. path = os.path.relpath(_canonical_part(url), r.path)
  1626. if name is None:
  1627. name = os.fsdecode(path) if path is not None else None
  1628. if name is None:
  1629. raise Error("Submodule name must be specified or derivable from path")
  1630. # TODO(jelmer): Move this logic to dulwich.submodule
  1631. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1632. try:
  1633. config = ConfigFile.from_path(gitmodules_path)
  1634. except FileNotFoundError:
  1635. config = ConfigFile()
  1636. config.path = gitmodules_path
  1637. config.set(("submodule", name), "url", url)
  1638. config.set(("submodule", name), "path", os.fsdecode(path))
  1639. config.write_to_path()
  1640. def submodule_init(repo: Union[str, os.PathLike[str], Repo]) -> None:
  1641. """Initialize submodules.
  1642. Args:
  1643. repo: Path to repository
  1644. """
  1645. with open_repo_closing(repo) as r:
  1646. config = r.get_config()
  1647. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1648. for path, url, name in read_submodules(gitmodules_path):
  1649. config.set((b"submodule", name), b"active", True)
  1650. config.set((b"submodule", name), b"url", url)
  1651. config.write_to_path()
  1652. def submodule_list(repo: RepoPath) -> Iterator[tuple[str, str]]:
  1653. """List submodules.
  1654. Args:
  1655. repo: Path to repository
  1656. """
  1657. from .submodule import iter_cached_submodules
  1658. with open_repo_closing(repo) as r:
  1659. head_commit = r[r.head()]
  1660. assert isinstance(head_commit, Commit)
  1661. for path, sha in iter_cached_submodules(r.object_store, head_commit.tree):
  1662. yield path.decode(DEFAULT_ENCODING), sha.decode(DEFAULT_ENCODING)
  1663. def submodule_update(
  1664. repo: Union[str, os.PathLike[str], Repo],
  1665. paths: Optional[Sequence[Union[str, bytes, os.PathLike[str]]]] = None,
  1666. init: bool = False,
  1667. force: bool = False,
  1668. errstream: Optional[BinaryIO] = None,
  1669. ) -> None:
  1670. """Update submodules.
  1671. Args:
  1672. repo: Path to repository
  1673. paths: Optional list of specific submodule paths to update. If None, updates all.
  1674. init: If True, initialize submodules first
  1675. force: Force update even if local changes exist
  1676. errstream: Error stream for error messages
  1677. """
  1678. from .submodule import iter_cached_submodules
  1679. with open_repo_closing(repo) as r:
  1680. if init:
  1681. submodule_init(r)
  1682. config = r.get_config()
  1683. gitmodules_path = os.path.join(r.path, ".gitmodules")
  1684. # Get list of submodules to update
  1685. submodules_to_update = []
  1686. head_commit = r[r.head()]
  1687. assert isinstance(head_commit, Commit)
  1688. for path, sha in iter_cached_submodules(r.object_store, head_commit.tree):
  1689. path_str = (
  1690. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1691. )
  1692. if paths is None or path_str in paths:
  1693. submodules_to_update.append((path, sha))
  1694. # Read submodule configuration
  1695. for path, target_sha in submodules_to_update:
  1696. path_str = (
  1697. path.decode(DEFAULT_ENCODING) if isinstance(path, bytes) else path
  1698. )
  1699. # Find the submodule name from .gitmodules
  1700. submodule_name: Optional[bytes] = None
  1701. for sm_path, sm_url, sm_name in read_submodules(gitmodules_path):
  1702. if sm_path == path:
  1703. submodule_name = sm_name
  1704. break
  1705. if not submodule_name:
  1706. continue
  1707. # Get the URL from config
  1708. section = (
  1709. b"submodule",
  1710. submodule_name
  1711. if isinstance(submodule_name, bytes)
  1712. else submodule_name.encode(),
  1713. )
  1714. try:
  1715. url_value = config.get(section, b"url")
  1716. if isinstance(url_value, bytes):
  1717. url = url_value.decode(DEFAULT_ENCODING)
  1718. else:
  1719. url = url_value
  1720. except KeyError:
  1721. # URL not in config, skip this submodule
  1722. continue
  1723. # Get or create the submodule repository paths
  1724. submodule_path = os.path.join(r.path, path_str)
  1725. submodule_git_dir = os.path.join(r.path, ".git", "modules", path_str)
  1726. # Clone or fetch the submodule
  1727. if not os.path.exists(submodule_git_dir):
  1728. # Clone the submodule as bare repository
  1729. os.makedirs(os.path.dirname(submodule_git_dir), exist_ok=True)
  1730. # Clone to the git directory
  1731. sub_repo = clone(url, submodule_git_dir, bare=True, checkout=False)
  1732. sub_repo.close()
  1733. # Create the submodule directory if it doesn't exist
  1734. if not os.path.exists(submodule_path):
  1735. os.makedirs(submodule_path)
  1736. # Create .git file in the submodule directory
  1737. depth = path_str.count("/") + 1
  1738. relative_git_dir = "../" * depth + ".git/modules/" + path_str
  1739. git_file_path = os.path.join(submodule_path, ".git")
  1740. with open(git_file_path, "w") as f:
  1741. f.write(f"gitdir: {relative_git_dir}\n")
  1742. # Set up working directory configuration
  1743. with open_repo_closing(submodule_git_dir) as sub_repo:
  1744. sub_config = sub_repo.get_config()
  1745. sub_config.set(
  1746. (b"core",),
  1747. b"worktree",
  1748. os.path.abspath(submodule_path).encode(),
  1749. )
  1750. sub_config.write_to_path()
  1751. # Checkout the target commit
  1752. sub_repo.refs[b"HEAD"] = target_sha
  1753. # Build the index and checkout files
  1754. tree = sub_repo[target_sha]
  1755. if hasattr(tree, "tree"): # If it's a commit, get the tree
  1756. tree_id = tree.tree
  1757. else:
  1758. tree_id = target_sha
  1759. build_index_from_tree(
  1760. submodule_path,
  1761. sub_repo.index_path(),
  1762. sub_repo.object_store,
  1763. tree_id,
  1764. )
  1765. else:
  1766. # Fetch and checkout in existing submodule
  1767. with open_repo_closing(submodule_git_dir) as sub_repo:
  1768. # Fetch from remote
  1769. client, path_segments = get_transport_and_path(url)
  1770. client.fetch(path_segments.encode(), sub_repo)
  1771. # Update to the target commit
  1772. sub_repo.refs[b"HEAD"] = target_sha
  1773. # Reset the working directory
  1774. reset(sub_repo, "hard", target_sha)
  1775. def tag_create(
  1776. repo: RepoPath,
  1777. tag: Union[str, bytes],
  1778. author: Optional[Union[str, bytes]] = None,
  1779. message: Optional[Union[str, bytes]] = None,
  1780. annotated: bool = False,
  1781. objectish: Union[str, bytes] = "HEAD",
  1782. tag_time: Optional[int] = None,
  1783. tag_timezone: Optional[int] = None,
  1784. sign: Optional[bool] = None,
  1785. encoding: str = DEFAULT_ENCODING,
  1786. ) -> None:
  1787. """Creates a tag in git via dulwich calls.
  1788. Args:
  1789. repo: Path to repository
  1790. tag: tag string
  1791. author: tag author (optional, if annotated is set)
  1792. message: tag message (optional)
  1793. annotated: whether to create an annotated tag
  1794. objectish: object the tag should point at, defaults to HEAD
  1795. tag_time: Optional time for annotated tag
  1796. tag_timezone: Optional timezone for annotated tag
  1797. sign: GPG Sign the tag (bool, defaults to False,
  1798. pass True to use default GPG key,
  1799. pass a str containing Key ID to use a specific GPG key)
  1800. encoding: Encoding to use for tag messages
  1801. """
  1802. with open_repo_closing(repo) as r:
  1803. object = parse_object(r, objectish)
  1804. if isinstance(tag, str):
  1805. tag = tag.encode(encoding)
  1806. if annotated:
  1807. # Create the tag object
  1808. tag_obj = Tag()
  1809. if author is None:
  1810. author = get_user_identity(r.get_config_stack())
  1811. elif isinstance(author, str):
  1812. author = author.encode(encoding)
  1813. else:
  1814. assert isinstance(author, bytes)
  1815. tag_obj.tagger = author
  1816. if isinstance(message, str):
  1817. message = message.encode(encoding)
  1818. elif isinstance(message, bytes):
  1819. pass
  1820. else:
  1821. message = b""
  1822. tag_obj.message = message + "\n".encode(encoding)
  1823. tag_obj.name = tag
  1824. tag_obj.object = (type(object), object.id)
  1825. if tag_time is None:
  1826. tag_time = int(time.time())
  1827. tag_obj.tag_time = tag_time
  1828. if tag_timezone is None:
  1829. tag_timezone = get_user_timezones()[1]
  1830. elif isinstance(tag_timezone, str):
  1831. tag_timezone = parse_timezone(tag_timezone.encode())
  1832. tag_obj.tag_timezone = tag_timezone
  1833. # Check if we should sign the tag
  1834. config = r.get_config_stack()
  1835. if sign is None:
  1836. # Check tag.gpgSign configuration when sign is not explicitly set
  1837. try:
  1838. should_sign = config.get_boolean(
  1839. (b"tag",), b"gpgsign", default=False
  1840. )
  1841. except KeyError:
  1842. should_sign = False # Default to not signing if no config
  1843. else:
  1844. should_sign = sign
  1845. # Get the signing key from config if signing is enabled
  1846. keyid = None
  1847. if should_sign:
  1848. try:
  1849. keyid_bytes = config.get((b"user",), b"signingkey")
  1850. keyid = keyid_bytes.decode() if keyid_bytes else None
  1851. except KeyError:
  1852. keyid = None
  1853. tag_obj.sign(keyid)
  1854. r.object_store.add_object(tag_obj)
  1855. tag_id = tag_obj.id
  1856. else:
  1857. tag_id = object.id
  1858. r.refs[_make_tag_ref(tag)] = tag_id
  1859. def verify_commit(
  1860. repo: RepoPath,
  1861. committish: Union[str, bytes] = "HEAD",
  1862. keyids: Optional[list[str]] = None,
  1863. ) -> None:
  1864. """Verify GPG signature on a commit.
  1865. Args:
  1866. repo: Path to repository
  1867. committish: Commit to verify (defaults to HEAD)
  1868. keyids: Optional list of trusted key IDs. If provided, the commit
  1869. must be signed by one of these keys. If not provided, just verifies
  1870. that the commit has a valid signature.
  1871. Raises:
  1872. gpg.errors.BadSignatures: if GPG signature verification fails
  1873. gpg.errors.MissingSignatures: if commit was not signed by a key
  1874. specified in keyids
  1875. """
  1876. with open_repo_closing(repo) as r:
  1877. commit = parse_commit(r, committish)
  1878. commit.verify(keyids)
  1879. def verify_tag(
  1880. repo: RepoPath,
  1881. tagname: Union[str, bytes],
  1882. keyids: Optional[list[str]] = None,
  1883. ) -> None:
  1884. """Verify GPG signature on a tag.
  1885. Args:
  1886. repo: Path to repository
  1887. tagname: Name of tag to verify
  1888. keyids: Optional list of trusted key IDs. If provided, the tag
  1889. must be signed by one of these keys. If not provided, just verifies
  1890. that the tag has a valid signature.
  1891. Raises:
  1892. gpg.errors.BadSignatures: if GPG signature verification fails
  1893. gpg.errors.MissingSignatures: if tag was not signed by a key
  1894. specified in keyids
  1895. """
  1896. with open_repo_closing(repo) as r:
  1897. if isinstance(tagname, str):
  1898. tagname = tagname.encode()
  1899. tag_ref = _make_tag_ref(tagname)
  1900. tag_id = r.refs[tag_ref]
  1901. tag_obj = r[tag_id]
  1902. if not isinstance(tag_obj, Tag):
  1903. raise Error(f"{tagname!r} does not point to a tag object")
  1904. tag_obj.verify(keyids)
  1905. def tag_list(repo: RepoPath, outstream: TextIO = sys.stdout) -> list[bytes]:
  1906. """List all tags.
  1907. Args:
  1908. repo: Path to repository
  1909. outstream: Stream to write tags to
  1910. """
  1911. with open_repo_closing(repo) as r:
  1912. tags = sorted(r.refs.as_dict(b"refs/tags"))
  1913. return tags
  1914. def tag_delete(repo: RepoPath, name: Union[str, bytes]) -> None:
  1915. """Remove a tag.
  1916. Args:
  1917. repo: Path to repository
  1918. name: Name of tag to remove
  1919. """
  1920. with open_repo_closing(repo) as r:
  1921. if isinstance(name, bytes):
  1922. names = [name]
  1923. elif isinstance(name, list):
  1924. names = name
  1925. else:
  1926. raise Error(f"Unexpected tag name type {name!r}")
  1927. for name in names:
  1928. del r.refs[_make_tag_ref(name)]
  1929. def _make_notes_ref(name: bytes) -> bytes:
  1930. """Make a notes ref name."""
  1931. if name.startswith(b"refs/notes/"):
  1932. return name
  1933. return LOCAL_NOTES_PREFIX + name
  1934. def notes_add(
  1935. repo: RepoPath,
  1936. object_sha: bytes,
  1937. note: bytes,
  1938. ref: bytes = b"commits",
  1939. author: Optional[bytes] = None,
  1940. committer: Optional[bytes] = None,
  1941. message: Optional[bytes] = None,
  1942. ) -> bytes:
  1943. """Add or update a note for an object.
  1944. Args:
  1945. repo: Path to repository
  1946. object_sha: SHA of the object to annotate
  1947. note: Note content
  1948. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1949. author: Author identity (defaults to committer)
  1950. committer: Committer identity (defaults to config)
  1951. message: Commit message for the notes update
  1952. Returns:
  1953. SHA of the new notes commit
  1954. """
  1955. with open_repo_closing(repo) as r:
  1956. # Parse the object to get its SHA
  1957. obj = parse_object(r, object_sha)
  1958. object_sha = obj.id
  1959. if isinstance(note, str):
  1960. note = note.encode(DEFAULT_ENCODING)
  1961. if isinstance(ref, str):
  1962. ref = ref.encode(DEFAULT_ENCODING)
  1963. notes_ref = _make_notes_ref(ref)
  1964. config = r.get_config_stack()
  1965. return r.notes.set_note(
  1966. object_sha,
  1967. note,
  1968. notes_ref,
  1969. author=author,
  1970. committer=committer,
  1971. message=message,
  1972. config=config,
  1973. )
  1974. def notes_remove(
  1975. repo: RepoPath,
  1976. object_sha: bytes,
  1977. ref: bytes = b"commits",
  1978. author: Optional[bytes] = None,
  1979. committer: Optional[bytes] = None,
  1980. message: Optional[bytes] = None,
  1981. ) -> Optional[bytes]:
  1982. """Remove a note for an object.
  1983. Args:
  1984. repo: Path to repository
  1985. object_sha: SHA of the object to remove notes from
  1986. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  1987. author: Author identity (defaults to committer)
  1988. committer: Committer identity (defaults to config)
  1989. message: Commit message for the notes removal
  1990. Returns:
  1991. SHA of the new notes commit, or None if no note existed
  1992. """
  1993. with open_repo_closing(repo) as r:
  1994. # Parse the object to get its SHA
  1995. obj = parse_object(r, object_sha)
  1996. object_sha = obj.id
  1997. if isinstance(ref, str):
  1998. ref = ref.encode(DEFAULT_ENCODING)
  1999. notes_ref = _make_notes_ref(ref)
  2000. config = r.get_config_stack()
  2001. return r.notes.remove_note(
  2002. object_sha,
  2003. notes_ref,
  2004. author=author,
  2005. committer=committer,
  2006. message=message,
  2007. config=config,
  2008. )
  2009. def notes_show(
  2010. repo: Union[str, os.PathLike[str], Repo], object_sha: bytes, ref: bytes = b"commits"
  2011. ) -> Optional[bytes]:
  2012. """Show the note for an object.
  2013. Args:
  2014. repo: Path to repository
  2015. object_sha: SHA of the object
  2016. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  2017. Returns:
  2018. Note content as bytes, or None if no note exists
  2019. """
  2020. with open_repo_closing(repo) as r:
  2021. # Parse the object to get its SHA
  2022. obj = parse_object(r, object_sha)
  2023. object_sha = obj.id
  2024. if isinstance(ref, str):
  2025. ref = ref.encode(DEFAULT_ENCODING)
  2026. notes_ref = _make_notes_ref(ref)
  2027. config = r.get_config_stack()
  2028. return r.notes.get_note(object_sha, notes_ref, config=config)
  2029. def notes_list(repo: RepoPath, ref: bytes = b"commits") -> list[tuple[bytes, bytes]]:
  2030. """List all notes in a notes ref.
  2031. Args:
  2032. repo: Path to repository
  2033. ref: Notes ref to use (defaults to "commits" for refs/notes/commits)
  2034. Returns:
  2035. List of tuples of (object_sha, note_content)
  2036. """
  2037. with open_repo_closing(repo) as r:
  2038. if isinstance(ref, str):
  2039. ref = ref.encode(DEFAULT_ENCODING)
  2040. notes_ref = _make_notes_ref(ref)
  2041. config = r.get_config_stack()
  2042. return r.notes.list_notes(notes_ref, config=config)
  2043. def reset(
  2044. repo: Union[str, os.PathLike[str], Repo],
  2045. mode: str,
  2046. treeish: Union[str, bytes, Commit, Tree, Tag] = "HEAD",
  2047. ) -> None:
  2048. """Reset current HEAD to the specified state.
  2049. Args:
  2050. repo: Path to repository
  2051. mode: Mode ("hard", "soft", "mixed")
  2052. treeish: Treeish to reset to
  2053. """
  2054. with open_repo_closing(repo) as r:
  2055. # Parse the target tree
  2056. tree = parse_tree(r, treeish)
  2057. # Only parse as commit if treeish is not a Tree object
  2058. if isinstance(treeish, Tree):
  2059. # For Tree objects, we can't determine the commit, skip updating HEAD
  2060. target_commit = None
  2061. else:
  2062. target_commit = parse_commit(r, treeish)
  2063. # Update HEAD to point to the target commit
  2064. if target_commit is not None:
  2065. r.refs[b"HEAD"] = target_commit.id
  2066. if mode == "soft":
  2067. # Soft reset: only update HEAD, leave index and working tree unchanged
  2068. return
  2069. elif mode == "mixed":
  2070. # Mixed reset: update HEAD and index, but leave working tree unchanged
  2071. from .object_store import iter_tree_contents
  2072. # Open the index
  2073. index = r.open_index()
  2074. # Clear the current index
  2075. index.clear()
  2076. # Populate index from the target tree
  2077. for entry in iter_tree_contents(r.object_store, tree.id):
  2078. # Create an IndexEntry from the tree entry
  2079. # Use zeros for filesystem-specific fields since we're not touching the working tree
  2080. assert (
  2081. entry.mode is not None
  2082. and entry.sha is not None
  2083. and entry.path is not None
  2084. )
  2085. index_entry = IndexEntry(
  2086. ctime=(0, 0),
  2087. mtime=(0, 0),
  2088. dev=0,
  2089. ino=0,
  2090. mode=entry.mode,
  2091. uid=0,
  2092. gid=0,
  2093. size=0, # Size will be 0 since we're not reading from disk
  2094. sha=entry.sha,
  2095. flags=0,
  2096. )
  2097. index[entry.path] = index_entry
  2098. # Write the updated index
  2099. index.write()
  2100. elif mode == "hard":
  2101. # Hard reset: update HEAD, index, and working tree
  2102. # Get configuration for working directory update
  2103. config = r.get_config()
  2104. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  2105. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  2106. validate_path_element = validate_path_element_ntfs
  2107. elif config.get_boolean(
  2108. b"core", b"core.protectHFS", sys.platform == "darwin"
  2109. ):
  2110. validate_path_element = validate_path_element_hfs
  2111. else:
  2112. validate_path_element = validate_path_element_default
  2113. if config.get_boolean(b"core", b"symlinks", True):
  2114. def symlink_wrapper(
  2115. source: Union[str, bytes, os.PathLike[str]],
  2116. target: Union[str, bytes, os.PathLike[str]],
  2117. ) -> None:
  2118. symlink(source, target) # type: ignore[arg-type,unused-ignore]
  2119. symlink_fn = symlink_wrapper
  2120. else:
  2121. def symlink_fallback(
  2122. source: Union[str, bytes, os.PathLike[str]],
  2123. target: Union[str, bytes, os.PathLike[str]],
  2124. ) -> None:
  2125. mode = "w" + ("b" if isinstance(source, bytes) else "")
  2126. with open(target, mode) as f:
  2127. f.write(source)
  2128. symlink_fn = symlink_fallback
  2129. # Update working tree and index
  2130. blob_normalizer = r.get_blob_normalizer()
  2131. # For reset --hard, use current index tree as old tree to get proper deletions
  2132. index = r.open_index()
  2133. if len(index) > 0:
  2134. index_tree_id = index.commit(r.object_store)
  2135. else:
  2136. # Empty index
  2137. index_tree_id = None
  2138. changes = tree_changes(
  2139. r.object_store, index_tree_id, tree.id, want_unchanged=True
  2140. )
  2141. update_working_tree(
  2142. r,
  2143. index_tree_id,
  2144. tree.id,
  2145. change_iterator=changes,
  2146. honor_filemode=honor_filemode,
  2147. validate_path_element=validate_path_element,
  2148. symlink_fn=symlink_fn,
  2149. force_remove_untracked=True,
  2150. blob_normalizer=blob_normalizer,
  2151. allow_overwrite_modified=True, # Allow overwriting modified files
  2152. )
  2153. else:
  2154. raise Error(f"Invalid reset mode: {mode}")
  2155. def get_remote_repo(
  2156. repo: Repo, remote_location: Optional[Union[str, bytes]] = None
  2157. ) -> tuple[Optional[str], str]:
  2158. """Get the remote repository information.
  2159. Args:
  2160. repo: Local repository object
  2161. remote_location: Optional remote name or URL; defaults to branch remote
  2162. Returns:
  2163. Tuple of (remote_name, remote_url) where remote_name may be None
  2164. if remote_location is a URL rather than a configured remote
  2165. """
  2166. config = repo.get_config()
  2167. if remote_location is None:
  2168. remote_location = get_branch_remote(repo)
  2169. if isinstance(remote_location, str):
  2170. encoded_location = remote_location.encode()
  2171. else:
  2172. encoded_location = remote_location
  2173. section = (b"remote", encoded_location)
  2174. remote_name: Optional[str] = None
  2175. if config.has_section(section):
  2176. remote_name = encoded_location.decode()
  2177. encoded_location = config.get(section, "url")
  2178. else:
  2179. remote_name = None
  2180. return (remote_name, encoded_location.decode())
  2181. def push(
  2182. repo: RepoPath,
  2183. remote_location: Optional[Union[str, bytes]] = None,
  2184. refspecs: Optional[Union[Union[str, bytes], Sequence[Union[str, bytes]]]] = None,
  2185. outstream: BinaryIO = default_bytes_out_stream,
  2186. errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
  2187. force: bool = False,
  2188. **kwargs: object,
  2189. ) -> SendPackResult:
  2190. """Remote push with dulwich via dulwich.client.
  2191. Args:
  2192. repo: Path to repository
  2193. remote_location: Location of the remote
  2194. refspecs: Refs to push to remote
  2195. outstream: A stream file to write output
  2196. errstream: A stream file to write errors
  2197. force: Force overwriting refs
  2198. **kwargs: Additional keyword arguments for the client
  2199. """
  2200. # Open the repo
  2201. with open_repo_closing(repo) as r:
  2202. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  2203. # Check if mirror mode is enabled
  2204. mirror_mode = False
  2205. if remote_name:
  2206. try:
  2207. mirror_mode_val = r.get_config_stack().get_boolean(
  2208. (b"remote", remote_name.encode()), b"mirror"
  2209. )
  2210. if mirror_mode_val is not None:
  2211. mirror_mode = mirror_mode_val
  2212. except KeyError:
  2213. pass
  2214. if mirror_mode:
  2215. # Mirror mode: push all refs and delete non-existent ones
  2216. refspecs = []
  2217. for ref in r.refs.keys():
  2218. # Push all refs to the same name on remote
  2219. refspecs.append(ref + b":" + ref)
  2220. elif refspecs is None:
  2221. refspecs = [active_branch(r)]
  2222. # Normalize refspecs to bytes
  2223. if isinstance(refspecs, str):
  2224. refspecs_bytes: Union[bytes, list[bytes]] = refspecs.encode()
  2225. elif isinstance(refspecs, bytes):
  2226. refspecs_bytes = refspecs
  2227. else:
  2228. refspecs_bytes = []
  2229. for spec in refspecs:
  2230. if isinstance(spec, str):
  2231. refspecs_bytes.append(spec.encode())
  2232. else:
  2233. refspecs_bytes.append(spec)
  2234. # Get the client and path
  2235. client, path = get_transport_and_path(
  2236. remote_location,
  2237. config=r.get_config_stack(),
  2238. **kwargs, # type: ignore[arg-type]
  2239. )
  2240. selected_refs = []
  2241. remote_changed_refs: dict[bytes, Optional[bytes]] = {}
  2242. def update_refs(refs: dict[bytes, bytes]) -> dict[bytes, bytes]:
  2243. from .refs import DictRefsContainer
  2244. remote_refs = DictRefsContainer(refs)
  2245. selected_refs.extend(
  2246. parse_reftuples(r.refs, remote_refs, refspecs_bytes, force=force)
  2247. )
  2248. new_refs = {}
  2249. # In mirror mode, delete remote refs that don't exist locally
  2250. if mirror_mode:
  2251. local_refs = set(r.refs.keys())
  2252. for remote_ref in refs.keys():
  2253. if remote_ref not in local_refs:
  2254. new_refs[remote_ref] = ZERO_SHA
  2255. remote_changed_refs[remote_ref] = None
  2256. # TODO: Handle selected_refs == {None: None}
  2257. for lh, rh, force_ref in selected_refs:
  2258. if lh is None:
  2259. assert rh is not None
  2260. new_refs[rh] = ZERO_SHA
  2261. remote_changed_refs[rh] = None
  2262. else:
  2263. try:
  2264. localsha = r.refs[lh]
  2265. except KeyError as exc:
  2266. raise Error(
  2267. f"No valid ref {lh.decode() if isinstance(lh, bytes) else lh} in local repository"
  2268. ) from exc
  2269. assert rh is not None
  2270. if not force_ref and rh in refs:
  2271. check_diverged(r, refs[rh], localsha)
  2272. new_refs[rh] = localsha
  2273. remote_changed_refs[rh] = localsha
  2274. return new_refs
  2275. err_encoding = getattr(errstream, "encoding", None) or DEFAULT_ENCODING
  2276. remote_location = client.get_url(path)
  2277. try:
  2278. def generate_pack_data_wrapper(
  2279. have: AbstractSet[bytes],
  2280. want: AbstractSet[bytes],
  2281. ofs_delta: bool = False,
  2282. ) -> tuple[int, Iterator[UnpackedObject]]:
  2283. # Wrap to match the expected signature
  2284. # Convert AbstractSet to set since generate_pack_data expects set
  2285. return r.generate_pack_data(
  2286. set(have), set(want), progress=None, ofs_delta=ofs_delta
  2287. )
  2288. result = client.send_pack(
  2289. path.encode(),
  2290. update_refs,
  2291. generate_pack_data=generate_pack_data_wrapper,
  2292. progress=lambda data: (errstream.write(data), None)[1],
  2293. )
  2294. except SendPackError as exc:
  2295. raise Error(
  2296. "Push to " + remote_location + " failed -> " + exc.args[0].decode(),
  2297. ) from exc
  2298. else:
  2299. errstream.write(
  2300. b"Push to " + remote_location.encode(err_encoding) + b" successful.\n"
  2301. )
  2302. for ref, error in (result.ref_status or {}).items():
  2303. if error is not None:
  2304. errstream.write(
  2305. b"Push of ref %s failed: %s\n" % (ref, error.encode(err_encoding))
  2306. )
  2307. else:
  2308. errstream.write(b"Ref %s updated\n" % ref)
  2309. if remote_name is not None:
  2310. _import_remote_refs(r.refs, remote_name, remote_changed_refs)
  2311. return result
  2312. # Trigger auto GC if needed
  2313. from .gc import maybe_auto_gc
  2314. with open_repo_closing(repo) as r:
  2315. maybe_auto_gc(r)
  2316. def pull(
  2317. repo: RepoPath,
  2318. remote_location: Optional[Union[str, bytes]] = None,
  2319. refspecs: Optional[Union[Union[str, bytes], Sequence[Union[str, bytes]]]] = None,
  2320. outstream: BinaryIO = default_bytes_out_stream,
  2321. errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
  2322. fast_forward: bool = True,
  2323. ff_only: bool = False,
  2324. force: bool = False,
  2325. filter_spec: Optional[str] = None,
  2326. protocol_version: Optional[int] = None,
  2327. **kwargs: object,
  2328. ) -> None:
  2329. """Pull from remote via dulwich.client.
  2330. Args:
  2331. repo: Path to repository
  2332. remote_location: Location of the remote
  2333. refspecs: refspecs to fetch. Can be a bytestring, a string, or a list of
  2334. bytestring/string.
  2335. outstream: A stream file to write to output
  2336. errstream: A stream file to write to errors
  2337. fast_forward: If True, raise an exception when fast-forward is not possible
  2338. ff_only: If True, only allow fast-forward merges. Raises DivergedBranches
  2339. when branches have diverged rather than performing a merge.
  2340. force: If True, allow overwriting local changes in the working tree.
  2341. If False, pull will abort if it would overwrite uncommitted changes.
  2342. filter_spec: A git-rev-list-style object filter spec, as an ASCII string.
  2343. Only used if the server supports the Git protocol-v2 'filter'
  2344. feature, and ignored otherwise.
  2345. protocol_version: desired Git protocol version. By default the highest
  2346. mutually supported protocol version will be used
  2347. **kwargs: Additional keyword arguments for the client
  2348. """
  2349. # Open the repo
  2350. with open_repo_closing(repo) as r:
  2351. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  2352. selected_refs = []
  2353. if refspecs is None:
  2354. refspecs_normalized: Union[bytes, list[bytes]] = [b"HEAD"]
  2355. elif isinstance(refspecs, str):
  2356. refspecs_normalized = refspecs.encode()
  2357. elif isinstance(refspecs, bytes):
  2358. refspecs_normalized = refspecs
  2359. else:
  2360. refspecs_normalized = []
  2361. for spec in refspecs:
  2362. if isinstance(spec, str):
  2363. refspecs_normalized.append(spec.encode())
  2364. else:
  2365. refspecs_normalized.append(spec)
  2366. def determine_wants(
  2367. remote_refs: dict[bytes, bytes], depth: Optional[int] = None
  2368. ) -> list[bytes]:
  2369. from .refs import DictRefsContainer
  2370. remote_refs_container = DictRefsContainer(remote_refs)
  2371. selected_refs.extend(
  2372. parse_reftuples(
  2373. remote_refs_container, r.refs, refspecs_normalized, force=force
  2374. )
  2375. )
  2376. return [
  2377. remote_refs[lh]
  2378. for (lh, rh, force_ref) in selected_refs
  2379. if lh is not None
  2380. and lh in remote_refs
  2381. and remote_refs[lh] not in r.object_store
  2382. ]
  2383. client, path = get_transport_and_path(
  2384. remote_location,
  2385. config=r.get_config_stack(),
  2386. **kwargs, # type: ignore[arg-type]
  2387. )
  2388. if filter_spec:
  2389. filter_spec_bytes: Optional[bytes] = filter_spec.encode("ascii")
  2390. else:
  2391. filter_spec_bytes = None
  2392. def progress(data: bytes) -> None:
  2393. errstream.write(data)
  2394. fetch_result = client.fetch(
  2395. path.encode(),
  2396. r,
  2397. progress=progress,
  2398. determine_wants=determine_wants, # type: ignore[arg-type] # Function matches protocol but mypy can't verify
  2399. filter_spec=filter_spec_bytes,
  2400. protocol_version=protocol_version,
  2401. )
  2402. # Store the old HEAD tree before making changes
  2403. try:
  2404. old_head = r.refs[b"HEAD"]
  2405. old_commit = r[old_head]
  2406. assert isinstance(old_commit, Commit)
  2407. old_tree_id = old_commit.tree
  2408. except KeyError:
  2409. old_tree_id = None
  2410. merged = False
  2411. for lh, rh, force_ref in selected_refs:
  2412. if not force_ref and rh is not None and rh in r.refs:
  2413. try:
  2414. assert lh is not None
  2415. followed_ref = r.refs.follow(rh)[1]
  2416. assert followed_ref is not None
  2417. lh_ref = fetch_result.refs[lh]
  2418. assert lh_ref is not None
  2419. check_diverged(r, followed_ref, lh_ref)
  2420. except DivergedBranches as exc:
  2421. if ff_only or fast_forward:
  2422. raise
  2423. else:
  2424. # Perform merge
  2425. assert lh is not None
  2426. merge_ref = fetch_result.refs[lh]
  2427. assert merge_ref is not None
  2428. _merge_result, conflicts = _do_merge(r, merge_ref)
  2429. if conflicts:
  2430. raise Error(
  2431. f"Merge conflicts occurred: {conflicts}"
  2432. ) from exc
  2433. merged = True
  2434. # Skip updating ref since merge already updated HEAD
  2435. continue
  2436. if rh is not None and lh is not None:
  2437. lh_value = fetch_result.refs[lh]
  2438. if lh_value is not None:
  2439. r.refs[rh] = lh_value
  2440. # Only update HEAD if we didn't perform a merge
  2441. if selected_refs and not merged:
  2442. lh, rh, _ = selected_refs[0]
  2443. if lh is not None:
  2444. ref_value = fetch_result.refs[lh]
  2445. if ref_value is not None:
  2446. r[b"HEAD"] = ref_value
  2447. # Update working tree to match the new HEAD
  2448. # Skip if merge was performed as merge already updates the working tree
  2449. if not merged and old_tree_id is not None:
  2450. head_commit = r[b"HEAD"]
  2451. assert isinstance(head_commit, Commit)
  2452. new_tree_id = head_commit.tree
  2453. blob_normalizer = r.get_blob_normalizer()
  2454. changes = tree_changes(r.object_store, old_tree_id, new_tree_id)
  2455. update_working_tree(
  2456. r,
  2457. old_tree_id,
  2458. new_tree_id,
  2459. change_iterator=changes,
  2460. blob_normalizer=blob_normalizer,
  2461. allow_overwrite_modified=force,
  2462. )
  2463. if remote_name is not None:
  2464. _import_remote_refs(r.refs, remote_name, fetch_result.refs)
  2465. # Trigger auto GC if needed
  2466. from .gc import maybe_auto_gc
  2467. with open_repo_closing(repo) as r:
  2468. maybe_auto_gc(r)
  2469. def status(
  2470. repo: Union[str, os.PathLike[str], Repo] = ".",
  2471. ignored: bool = False,
  2472. untracked_files: str = "normal",
  2473. ) -> GitStatus:
  2474. """Returns staged, unstaged, and untracked changes relative to the HEAD.
  2475. Args:
  2476. repo: Path to repository or repository object
  2477. ignored: Whether to include ignored files in untracked
  2478. untracked_files: How to handle untracked files, defaults to "all":
  2479. "no": do not return untracked files
  2480. "normal": return untracked directories, not their contents
  2481. "all": include all files in untracked directories
  2482. Using untracked_files="no" can be faster than "all" when the worktree
  2483. contains many untracked files/directories.
  2484. Using untracked_files="normal" provides a good balance, only showing
  2485. directories that are entirely untracked without listing all their contents.
  2486. Returns: GitStatus tuple,
  2487. staged - dict with lists of staged paths (diff index/HEAD)
  2488. unstaged - list of unstaged paths (diff index/working-tree)
  2489. untracked - list of untracked, un-ignored & non-.git paths
  2490. """
  2491. with open_repo_closing(repo) as r:
  2492. # 1. Get status of staged
  2493. tracked_changes = get_tree_changes(r)
  2494. # 2. Get status of unstaged
  2495. index = r.open_index()
  2496. normalizer = r.get_blob_normalizer()
  2497. # Create a wrapper that handles the bytes -> Blob conversion
  2498. if normalizer is not None:
  2499. def filter_callback(data: bytes, path: bytes) -> bytes:
  2500. from dulwich.objects import Blob
  2501. blob = Blob()
  2502. blob.data = data
  2503. normalized_blob = normalizer.checkin_normalize(blob, path)
  2504. result_data: bytes = normalized_blob.data
  2505. return result_data
  2506. else:
  2507. filter_callback = None
  2508. # Check if core.preloadIndex is enabled
  2509. config = r.get_config_stack()
  2510. preload_index = config.get_boolean(b"core", b"preloadIndex", False)
  2511. unstaged_changes = list(
  2512. get_unstaged_changes(index, r.path, filter_callback, preload_index)
  2513. )
  2514. untracked_paths = get_untracked_paths(
  2515. r.path,
  2516. r.path,
  2517. index,
  2518. exclude_ignored=not ignored,
  2519. untracked_files=untracked_files,
  2520. )
  2521. if sys.platform == "win32":
  2522. untracked_changes = [
  2523. path.replace(os.path.sep, "/") for path in untracked_paths
  2524. ]
  2525. else:
  2526. untracked_changes = list(untracked_paths)
  2527. return GitStatus(tracked_changes, unstaged_changes, untracked_changes)
  2528. def shortlog(
  2529. repo: Union[str, os.PathLike[str], Repo],
  2530. summary_only: bool = False,
  2531. sort_by_commits: bool = False,
  2532. ) -> list[dict[str, str]]:
  2533. """Summarize commits by author, like git shortlog.
  2534. Args:
  2535. repo: Path to repository or Repo object.
  2536. summary_only: If True, only show counts per author.
  2537. sort_by_commits: If True, sort authors by number of commits.
  2538. Returns:
  2539. A list of dictionaries, each containing:
  2540. - "author": the author's name as a string
  2541. - "messages": all commit messages concatenated into a single string
  2542. """
  2543. with open_repo_closing(repo) as r:
  2544. walker = r.get_walker()
  2545. authors: dict[str, list[str]] = {}
  2546. for entry in walker:
  2547. commit = entry.commit
  2548. author = commit.author.decode(commit.encoding or "utf-8")
  2549. message = commit.message.decode(commit.encoding or "utf-8").strip()
  2550. authors.setdefault(author, []).append(message)
  2551. # Convert messages to single string per author
  2552. items: list[dict[str, str]] = [
  2553. {"author": author, "messages": "\n".join(msgs)}
  2554. for author, msgs in authors.items()
  2555. ]
  2556. if sort_by_commits:
  2557. # Sort by number of commits (lines in messages)
  2558. items.sort(key=lambda x: len(x["messages"].splitlines()), reverse=True)
  2559. return items
  2560. def _walk_working_dir_paths(
  2561. frompath: Union[str, bytes, os.PathLike[str]],
  2562. basepath: Union[str, bytes, os.PathLike[str]],
  2563. prune_dirnames: Optional[Callable[[str, list[str]], list[str]]] = None,
  2564. ) -> Iterator[tuple[Union[str, bytes], bool]]:
  2565. """Get path, is_dir for files in working dir from frompath.
  2566. Args:
  2567. frompath: Path to begin walk
  2568. basepath: Path to compare to
  2569. prune_dirnames: Optional callback to prune dirnames during os.walk
  2570. dirnames will be set to result of prune_dirnames(dirpath, dirnames)
  2571. """
  2572. # Convert paths to strings for os.walk compatibility
  2573. for dirpath, dirnames, filenames in os.walk(frompath): # type: ignore[type-var]
  2574. # Skip .git and below.
  2575. if ".git" in dirnames:
  2576. dirnames.remove(".git")
  2577. if dirpath != basepath:
  2578. continue
  2579. if ".git" in filenames:
  2580. filenames.remove(".git")
  2581. if dirpath != basepath:
  2582. continue
  2583. if dirpath != frompath:
  2584. yield dirpath, True # type: ignore[misc]
  2585. for filename in filenames:
  2586. filepath = os.path.join(dirpath, filename) # type: ignore[call-overload]
  2587. yield filepath, False
  2588. if prune_dirnames:
  2589. dirnames[:] = prune_dirnames(dirpath, dirnames) # type: ignore[arg-type]
  2590. def get_untracked_paths(
  2591. frompath: Union[str, bytes, os.PathLike[str]],
  2592. basepath: Union[str, bytes, os.PathLike[str]],
  2593. index: Index,
  2594. exclude_ignored: bool = False,
  2595. untracked_files: str = "all",
  2596. ) -> Iterator[str]:
  2597. """Get untracked paths.
  2598. Args:
  2599. frompath: Path to walk
  2600. basepath: Path to compare to
  2601. index: Index to check against
  2602. exclude_ignored: Whether to exclude ignored paths
  2603. untracked_files: How to handle untracked files:
  2604. - "no": return an empty list
  2605. - "all": return all files in untracked directories
  2606. - "normal": return untracked directories without listing their contents
  2607. Note: ignored directories will never be walked for performance reasons.
  2608. If exclude_ignored is False, only the path to an ignored directory will
  2609. be yielded, no files inside the directory will be returned
  2610. """
  2611. if untracked_files not in ("no", "all", "normal"):
  2612. raise ValueError("untracked_files must be one of (no, all, normal)")
  2613. if untracked_files == "no":
  2614. return
  2615. # Normalize paths to str
  2616. frompath_str = os.fsdecode(os.fspath(frompath))
  2617. basepath_str = os.fsdecode(os.fspath(basepath))
  2618. with open_repo_closing(basepath_str) as r:
  2619. ignore_manager = IgnoreFilterManager.from_repo(r)
  2620. ignored_dirs = []
  2621. # List to store untracked directories found during traversal
  2622. untracked_dir_list = []
  2623. def directory_has_non_ignored_files(dir_path: str, base_rel_path: str) -> bool:
  2624. """Recursively check if directory contains any non-ignored files."""
  2625. try:
  2626. for entry in os.listdir(dir_path):
  2627. entry_path = os.path.join(dir_path, entry)
  2628. rel_entry = os.path.join(base_rel_path, entry)
  2629. if os.path.isfile(entry_path):
  2630. if ignore_manager.is_ignored(rel_entry) is not True:
  2631. return True
  2632. elif os.path.isdir(entry_path):
  2633. if directory_has_non_ignored_files(entry_path, rel_entry):
  2634. return True
  2635. return False
  2636. except OSError:
  2637. # If we can't read the directory, assume it has non-ignored files
  2638. return True
  2639. def prune_dirnames(dirpath: str, dirnames: list[str]) -> list[str]:
  2640. for i in range(len(dirnames) - 1, -1, -1):
  2641. path = os.path.join(dirpath, dirnames[i])
  2642. ip = os.path.join(os.path.relpath(path, basepath_str), "")
  2643. # Check if directory is ignored
  2644. if ignore_manager.is_ignored(ip) is True:
  2645. if not exclude_ignored:
  2646. ignored_dirs.append(
  2647. os.path.join(os.path.relpath(path, frompath_str), "")
  2648. )
  2649. del dirnames[i]
  2650. continue
  2651. # For "normal" mode, check if the directory is entirely untracked
  2652. if untracked_files == "normal":
  2653. # Convert directory path to tree path for index lookup
  2654. dir_tree_path = path_to_tree_path(basepath_str, path)
  2655. # Check if any file in this directory is tracked
  2656. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  2657. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  2658. if not has_tracked_files:
  2659. # This directory is entirely untracked
  2660. rel_path_base = os.path.relpath(path, basepath_str)
  2661. rel_path_from = os.path.join(
  2662. os.path.relpath(path, frompath_str), ""
  2663. )
  2664. # If excluding ignored, check if directory contains any non-ignored files
  2665. if exclude_ignored:
  2666. if not directory_has_non_ignored_files(path, rel_path_base):
  2667. # Directory only contains ignored files, skip it
  2668. del dirnames[i]
  2669. continue
  2670. # Check if it should be excluded due to ignore rules
  2671. is_ignored = ignore_manager.is_ignored(rel_path_base)
  2672. if not exclude_ignored or not is_ignored:
  2673. untracked_dir_list.append(rel_path_from)
  2674. del dirnames[i]
  2675. return dirnames
  2676. # For "all" mode, use the original behavior
  2677. if untracked_files == "all":
  2678. for ap, is_dir in _walk_working_dir_paths(
  2679. frompath_str, basepath_str, prune_dirnames=prune_dirnames
  2680. ):
  2681. # frompath_str and basepath_str are both str, so ap must be str
  2682. assert isinstance(ap, str)
  2683. if not is_dir:
  2684. ip = path_to_tree_path(basepath_str, ap)
  2685. if ip not in index:
  2686. if not exclude_ignored or not ignore_manager.is_ignored(
  2687. os.path.relpath(ap, basepath_str)
  2688. ):
  2689. yield os.path.relpath(ap, frompath_str)
  2690. else: # "normal" mode
  2691. # Walk directories, handling both files and directories
  2692. for ap, is_dir in _walk_working_dir_paths(
  2693. frompath_str, basepath_str, prune_dirnames=prune_dirnames
  2694. ):
  2695. # frompath_str and basepath_str are both str, so ap must be str
  2696. assert isinstance(ap, str)
  2697. # This part won't be reached for pruned directories
  2698. if is_dir:
  2699. # Check if this directory is entirely untracked
  2700. dir_tree_path = path_to_tree_path(basepath_str, ap)
  2701. dir_prefix = dir_tree_path + b"/" if dir_tree_path else b""
  2702. has_tracked_files = any(name.startswith(dir_prefix) for name in index)
  2703. if not has_tracked_files:
  2704. if not exclude_ignored or not ignore_manager.is_ignored(
  2705. os.path.relpath(ap, basepath_str)
  2706. ):
  2707. yield os.path.join(os.path.relpath(ap, frompath_str), "")
  2708. else:
  2709. # Check individual files in directories that contain tracked files
  2710. ip = path_to_tree_path(basepath_str, ap)
  2711. if ip not in index:
  2712. if not exclude_ignored or not ignore_manager.is_ignored(
  2713. os.path.relpath(ap, basepath_str)
  2714. ):
  2715. yield os.path.relpath(ap, frompath_str)
  2716. # Yield any untracked directories found during pruning
  2717. yield from untracked_dir_list
  2718. yield from ignored_dirs
  2719. def get_tree_changes(repo: RepoPath) -> dict[str, list[Union[str, bytes]]]:
  2720. """Return add/delete/modify changes to tree by comparing index to HEAD.
  2721. Args:
  2722. repo: repo path or object
  2723. Returns: dict with lists for each type of change
  2724. """
  2725. with open_repo_closing(repo) as r:
  2726. index = r.open_index()
  2727. # Compares the Index to the HEAD & determines changes
  2728. # Iterate through the changes and report add/delete/modify
  2729. # TODO: call out to dulwich.diff_tree somehow.
  2730. tracked_changes: dict[str, list[Union[str, bytes]]] = {
  2731. "add": [],
  2732. "delete": [],
  2733. "modify": [],
  2734. }
  2735. try:
  2736. head_commit = r[b"HEAD"]
  2737. assert isinstance(head_commit, Commit)
  2738. tree_id = head_commit.tree
  2739. except KeyError:
  2740. tree_id = None
  2741. for change in index.changes_from_tree(r.object_store, tree_id):
  2742. if not change[0][0]:
  2743. assert change[0][1] is not None
  2744. tracked_changes["add"].append(change[0][1])
  2745. elif not change[0][1]:
  2746. assert change[0][0] is not None
  2747. tracked_changes["delete"].append(change[0][0])
  2748. elif change[0][0] == change[0][1]:
  2749. assert change[0][0] is not None
  2750. tracked_changes["modify"].append(change[0][0])
  2751. else:
  2752. raise NotImplementedError("git mv ops not yet supported")
  2753. return tracked_changes
  2754. def daemon(
  2755. path: Union[str, os.PathLike[str]] = ".",
  2756. address: Optional[str] = None,
  2757. port: Optional[int] = None,
  2758. ) -> None:
  2759. """Run a daemon serving Git requests over TCP/IP.
  2760. Args:
  2761. path: Path to the directory to serve.
  2762. address: Optional address to listen on (defaults to ::)
  2763. port: Optional port to listen on (defaults to TCP_GIT_PORT)
  2764. """
  2765. # TODO(jelmer): Support git-daemon-export-ok and --export-all.
  2766. backend = FileSystemBackend(os.fspath(path))
  2767. server = TCPGitServer(backend, address or "localhost", port or 9418)
  2768. server.serve_forever()
  2769. def web_daemon(
  2770. path: Union[str, os.PathLike[str]] = ".",
  2771. address: Optional[str] = None,
  2772. port: Optional[int] = None,
  2773. ) -> None:
  2774. """Run a daemon serving Git requests over HTTP.
  2775. Args:
  2776. path: Path to the directory to serve
  2777. address: Optional address to listen on (defaults to ::)
  2778. port: Optional port to listen on (defaults to 80)
  2779. """
  2780. from .web import (
  2781. WSGIRequestHandlerLogger,
  2782. WSGIServerLogger,
  2783. make_server,
  2784. make_wsgi_chain,
  2785. )
  2786. backend = FileSystemBackend(os.fspath(path))
  2787. app = make_wsgi_chain(backend)
  2788. server = make_server(
  2789. address or "::",
  2790. port or 80,
  2791. app,
  2792. handler_class=WSGIRequestHandlerLogger,
  2793. server_class=WSGIServerLogger,
  2794. )
  2795. server.serve_forever()
  2796. def upload_pack(
  2797. path: Union[str, os.PathLike[str]] = ".",
  2798. inf: Optional[BinaryIO] = None,
  2799. outf: Optional[BinaryIO] = None,
  2800. ) -> int:
  2801. """Upload a pack file after negotiating its contents using smart protocol.
  2802. Args:
  2803. path: Path to the repository
  2804. inf: Input stream to communicate with client
  2805. outf: Output stream to communicate with client
  2806. """
  2807. if outf is None:
  2808. outf = sys.stdout.buffer
  2809. if inf is None:
  2810. inf = sys.stdin.buffer
  2811. assert outf is not None
  2812. assert inf is not None
  2813. path = os.path.expanduser(path)
  2814. backend = FileSystemBackend(path)
  2815. def send_fn(data: bytes) -> None:
  2816. outf.write(data)
  2817. outf.flush()
  2818. proto = Protocol(inf.read, send_fn)
  2819. handler = UploadPackHandler(backend, [path], proto)
  2820. # FIXME: Catch exceptions and write a single-line summary to outf.
  2821. handler.handle()
  2822. return 0
  2823. def receive_pack(
  2824. path: Union[str, os.PathLike[str]] = ".",
  2825. inf: Optional[BinaryIO] = None,
  2826. outf: Optional[BinaryIO] = None,
  2827. ) -> int:
  2828. """Receive a pack file after negotiating its contents using smart protocol.
  2829. Args:
  2830. path: Path to the repository
  2831. inf: Input stream to communicate with client
  2832. outf: Output stream to communicate with client
  2833. """
  2834. if outf is None:
  2835. outf = sys.stdout.buffer
  2836. if inf is None:
  2837. inf = sys.stdin.buffer
  2838. assert outf is not None
  2839. assert inf is not None
  2840. path = os.path.expanduser(path)
  2841. backend = FileSystemBackend(path)
  2842. def send_fn(data: bytes) -> None:
  2843. outf.write(data)
  2844. outf.flush()
  2845. proto = Protocol(inf.read, send_fn)
  2846. handler = ReceivePackHandler(backend, [path], proto)
  2847. # FIXME: Catch exceptions and write a single-line summary to outf.
  2848. handler.handle()
  2849. return 0
  2850. def _make_branch_ref(name: Union[str, bytes]) -> Ref:
  2851. if isinstance(name, str):
  2852. name = name.encode(DEFAULT_ENCODING)
  2853. return LOCAL_BRANCH_PREFIX + name
  2854. def _make_tag_ref(name: Union[str, bytes]) -> Ref:
  2855. if isinstance(name, str):
  2856. name = name.encode(DEFAULT_ENCODING)
  2857. return LOCAL_TAG_PREFIX + name
  2858. def branch_delete(
  2859. repo: RepoPath, name: Union[str, bytes, Sequence[Union[str, bytes]]]
  2860. ) -> None:
  2861. """Delete a branch.
  2862. Args:
  2863. repo: Path to the repository
  2864. name: Name of the branch
  2865. """
  2866. with open_repo_closing(repo) as r:
  2867. if isinstance(name, (list, tuple)):
  2868. names = name
  2869. else:
  2870. names = [name]
  2871. for branch_name in names:
  2872. del r.refs[_make_branch_ref(branch_name)]
  2873. def branch_create(
  2874. repo: Union[str, os.PathLike[str], Repo],
  2875. name: Union[str, bytes],
  2876. objectish: Optional[Union[str, bytes]] = None,
  2877. force: bool = False,
  2878. ) -> None:
  2879. """Create a branch.
  2880. Args:
  2881. repo: Path to the repository
  2882. name: Name of the new branch
  2883. objectish: Target object to point new branch at (defaults to HEAD)
  2884. force: Force creation of branch, even if it already exists
  2885. """
  2886. with open_repo_closing(repo) as r:
  2887. if objectish is None:
  2888. objectish = "HEAD"
  2889. # Try to expand branch shorthand before parsing
  2890. original_objectish = objectish
  2891. objectish_bytes = (
  2892. objectish.encode(DEFAULT_ENCODING)
  2893. if isinstance(objectish, str)
  2894. else objectish
  2895. )
  2896. if b"refs/remotes/" + objectish_bytes in r.refs:
  2897. objectish = b"refs/remotes/" + objectish_bytes
  2898. elif b"refs/heads/" + objectish_bytes in r.refs:
  2899. objectish = b"refs/heads/" + objectish_bytes
  2900. object = parse_object(r, objectish)
  2901. refname = _make_branch_ref(name)
  2902. ref_message = (
  2903. b"branch: Created from " + original_objectish.encode(DEFAULT_ENCODING)
  2904. if isinstance(original_objectish, str)
  2905. else b"branch: Created from " + original_objectish
  2906. )
  2907. if force:
  2908. r.refs.set_if_equals(refname, None, object.id, message=ref_message)
  2909. else:
  2910. if not r.refs.add_if_new(refname, object.id, message=ref_message):
  2911. name_str = name.decode() if isinstance(name, bytes) else name
  2912. raise Error(f"Branch with name {name_str} already exists.")
  2913. # Check if we should set up tracking
  2914. config = r.get_config_stack()
  2915. try:
  2916. auto_setup_merge = config.get((b"branch",), b"autoSetupMerge").decode()
  2917. except KeyError:
  2918. auto_setup_merge = "true" # Default value
  2919. # Determine if the objectish refers to a remote-tracking branch
  2920. objectish_ref = None
  2921. if original_objectish != "HEAD":
  2922. # Try to resolve objectish as a ref
  2923. objectish_bytes = (
  2924. original_objectish.encode(DEFAULT_ENCODING)
  2925. if isinstance(original_objectish, str)
  2926. else original_objectish
  2927. )
  2928. if objectish_bytes in r.refs:
  2929. objectish_ref = objectish_bytes
  2930. elif b"refs/remotes/" + objectish_bytes in r.refs:
  2931. objectish_ref = b"refs/remotes/" + objectish_bytes
  2932. elif b"refs/heads/" + objectish_bytes in r.refs:
  2933. objectish_ref = b"refs/heads/" + objectish_bytes
  2934. else:
  2935. # HEAD might point to a remote-tracking branch
  2936. head_ref = r.refs.follow(b"HEAD")[0][1]
  2937. if head_ref.startswith(b"refs/remotes/"):
  2938. objectish_ref = head_ref
  2939. # Set up tracking if appropriate
  2940. if objectish_ref and (
  2941. (auto_setup_merge == "always")
  2942. or (
  2943. auto_setup_merge == "true"
  2944. and objectish_ref.startswith(b"refs/remotes/")
  2945. )
  2946. ):
  2947. # Extract remote name and branch from the ref
  2948. if objectish_ref.startswith(b"refs/remotes/"):
  2949. parts = objectish_ref[len(b"refs/remotes/") :].split(b"/", 1)
  2950. if len(parts) == 2:
  2951. remote_name = parts[0]
  2952. remote_branch = b"refs/heads/" + parts[1]
  2953. # Set up tracking
  2954. repo_config = r.get_config()
  2955. branch_name_bytes = (
  2956. name.encode(DEFAULT_ENCODING) if isinstance(name, str) else name
  2957. )
  2958. repo_config.set(
  2959. (b"branch", branch_name_bytes), b"remote", remote_name
  2960. )
  2961. repo_config.set(
  2962. (b"branch", branch_name_bytes), b"merge", remote_branch
  2963. )
  2964. repo_config.write_to_path()
  2965. def filter_branches_by_pattern(branches: Iterable[bytes], pattern: str) -> list[bytes]:
  2966. """Filter branches by fnmatch pattern.
  2967. Args:
  2968. branches: Iterable of branch names as bytes
  2969. pattern: Pattern to match against
  2970. Returns:
  2971. List of filtered branch names
  2972. """
  2973. return [
  2974. branch for branch in branches if fnmatch.fnmatchcase(branch.decode(), pattern)
  2975. ]
  2976. def branch_list(repo: RepoPath) -> list[bytes]:
  2977. """List all branches.
  2978. Args:
  2979. repo: Path to the repository
  2980. Returns:
  2981. List of branch names (without refs/heads/ prefix)
  2982. """
  2983. with open_repo_closing(repo) as r:
  2984. branches = list(r.refs.keys(base=LOCAL_BRANCH_PREFIX))
  2985. # Check for branch.sort configuration
  2986. config = r.get_config_stack()
  2987. try:
  2988. sort_key = config.get((b"branch",), b"sort").decode()
  2989. except KeyError:
  2990. # Default is refname (alphabetical)
  2991. sort_key = "refname"
  2992. # Parse sort key
  2993. reverse = False
  2994. if sort_key.startswith("-"):
  2995. reverse = True
  2996. sort_key = sort_key[1:]
  2997. # Apply sorting
  2998. if sort_key == "refname":
  2999. # Simple alphabetical sort (default)
  3000. branches.sort(reverse=reverse)
  3001. elif sort_key in ("committerdate", "authordate"):
  3002. # Sort by date
  3003. def get_commit_date(branch_name: bytes) -> int:
  3004. ref = LOCAL_BRANCH_PREFIX + branch_name
  3005. sha = r.refs[ref]
  3006. commit = r.object_store[sha]
  3007. assert isinstance(commit, Commit)
  3008. if sort_key == "committerdate":
  3009. return cast(int, commit.commit_time)
  3010. else: # authordate
  3011. return cast(int, commit.author_time)
  3012. # Sort branches by date
  3013. # Note: Python's sort naturally orders smaller values first (ascending)
  3014. # For dates, this means oldest first by default
  3015. # Use a stable sort with branch name as secondary key for consistent ordering
  3016. if reverse:
  3017. # For reverse sort, we want newest dates first but alphabetical names second
  3018. branches.sort(key=lambda b: (-get_commit_date(b), b))
  3019. else:
  3020. branches.sort(key=lambda b: (get_commit_date(b), b))
  3021. else:
  3022. # Unknown sort key, fall back to default
  3023. branches.sort()
  3024. return branches
  3025. def branch_remotes_list(repo: RepoPath) -> list[bytes]:
  3026. """List the short names of all remote branches.
  3027. Args:
  3028. repo: Path to the repository
  3029. Returns:
  3030. List of branch names (without refs/remotes/ prefix, and without remote name; e.g. 'main' from 'origin/main')
  3031. """
  3032. with open_repo_closing(repo) as r:
  3033. branches = list(r.refs.keys(base=LOCAL_REMOTE_PREFIX))
  3034. config = r.get_config_stack()
  3035. try:
  3036. sort_key = config.get((b"branch",), b"sort").decode()
  3037. except KeyError:
  3038. # Default is refname (alphabetical)
  3039. sort_key = "refname"
  3040. # Parse sort key
  3041. reverse = False
  3042. if sort_key.startswith("-"):
  3043. reverse = True
  3044. sort_key = sort_key[1:]
  3045. # Apply sorting
  3046. if sort_key == "refname":
  3047. # Simple alphabetical sort (default)
  3048. branches.sort(reverse=reverse)
  3049. elif sort_key in ("committerdate", "authordate"):
  3050. # Sort by date
  3051. def get_commit_date(branch_name: bytes) -> int:
  3052. ref = LOCAL_REMOTE_PREFIX + branch_name
  3053. sha = r.refs[ref]
  3054. commit = r.object_store[sha]
  3055. assert isinstance(commit, Commit)
  3056. if sort_key == "committerdate":
  3057. return cast(int, commit.commit_time)
  3058. else: # authordate
  3059. return cast(int, commit.author_time)
  3060. # Sort branches by date
  3061. # Note: Python's sort naturally orders smaller values first (ascending)
  3062. # For dates, this means oldest first by default
  3063. # Use a stable sort with branch name as secondary key for consistent ordering
  3064. if reverse:
  3065. # For reverse sort, we want newest dates first but alphabetical names second
  3066. branches.sort(key=lambda b: (-get_commit_date(b), b))
  3067. else:
  3068. branches.sort(key=lambda b: (get_commit_date(b), b))
  3069. else:
  3070. # Unknown sort key
  3071. raise ValueError(f"Unknown sort key: {sort_key}")
  3072. return branches
  3073. def _get_branch_merge_status(repo: RepoPath) -> Iterator[tuple[bytes, bool]]:
  3074. """Get merge status for all branches relative to current HEAD.
  3075. Args:
  3076. repo: Path to the repository
  3077. Yields:
  3078. Tuple of (branch_name, is_merged) where:
  3079. - branch_name: Branch name without refs/heads/ prefix
  3080. - is_merged: True if branch is merged into HEAD, False otherwise
  3081. """
  3082. with open_repo_closing(repo) as r:
  3083. current_sha = r.refs[b"HEAD"]
  3084. for branch_ref, branch_sha in r.refs.as_dict(base=b"refs/heads/").items():
  3085. # Check if branch is an ancestor of HEAD (fully merged)
  3086. is_merged = can_fast_forward(r, branch_sha, current_sha)
  3087. yield branch_ref, is_merged
  3088. def merged_branches(repo: RepoPath) -> Iterator[bytes]:
  3089. """List branches that have been merged into the current branch.
  3090. Args:
  3091. repo: Path to the repository
  3092. Yields:
  3093. Branch names (without refs/heads/ prefix) that are merged
  3094. into the current HEAD
  3095. """
  3096. for branch_name, is_merged in _get_branch_merge_status(repo):
  3097. if is_merged:
  3098. yield branch_name
  3099. def no_merged_branches(repo: RepoPath) -> Iterator[bytes]:
  3100. """List branches that have been merged into the current branch.
  3101. Args:
  3102. repo: Path to the repository
  3103. Yields:
  3104. Branch names (without refs/heads/ prefix) that are merged
  3105. into the current HEAD
  3106. """
  3107. for branch_name, is_merged in _get_branch_merge_status(repo):
  3108. if not is_merged:
  3109. yield branch_name
  3110. def branches_containing(repo: RepoPath, commit: str) -> Iterator[bytes]:
  3111. """List branches that contain the specified commit.
  3112. Args:
  3113. repo: Path to the repository
  3114. commit: Commit-ish string (SHA, branch name, tag, etc.)
  3115. Yields:
  3116. Branch names (without refs/heads/ prefix) that contain the commit
  3117. Raises:
  3118. ValueError: If the commit reference is malformed
  3119. KeyError: If the commit reference does not exist
  3120. """
  3121. with open_repo_closing(repo) as r:
  3122. commit_obj = parse_commit(r, commit)
  3123. commit_sha = commit_obj.id
  3124. for branch_ref, branch_sha in r.refs.as_dict(base=LOCAL_BRANCH_PREFIX).items():
  3125. if can_fast_forward(r, commit_sha, branch_sha):
  3126. yield branch_ref
  3127. def active_branch(repo: RepoPath) -> bytes:
  3128. """Return the active branch in the repository, if any.
  3129. Args:
  3130. repo: Repository to open
  3131. Returns:
  3132. branch name
  3133. Raises:
  3134. KeyError: if the repository does not have a working tree
  3135. IndexError: if HEAD is floating
  3136. """
  3137. with open_repo_closing(repo) as r:
  3138. active_ref = r.refs.follow(b"HEAD")[0][1]
  3139. if not active_ref.startswith(LOCAL_BRANCH_PREFIX):
  3140. raise ValueError(active_ref)
  3141. return active_ref[len(LOCAL_BRANCH_PREFIX) :]
  3142. def get_branch_remote(repo: Union[str, os.PathLike[str], Repo]) -> bytes:
  3143. """Return the active branch's remote name, if any.
  3144. Args:
  3145. repo: Repository to open
  3146. Returns:
  3147. remote name
  3148. Raises:
  3149. KeyError: if the repository does not have a working tree
  3150. """
  3151. with open_repo_closing(repo) as r:
  3152. branch_name = active_branch(r.path)
  3153. config = r.get_config()
  3154. try:
  3155. remote_name = config.get((b"branch", branch_name), b"remote")
  3156. except KeyError:
  3157. remote_name = b"origin"
  3158. return remote_name
  3159. def get_branch_merge(repo: RepoPath, branch_name: Optional[bytes] = None) -> bytes:
  3160. """Return the branch's merge reference (upstream branch), if any.
  3161. Args:
  3162. repo: Repository to open
  3163. branch_name: Name of the branch (defaults to active branch)
  3164. Returns:
  3165. merge reference name (e.g. b"refs/heads/main")
  3166. Raises:
  3167. KeyError: if the branch does not have a merge configuration
  3168. """
  3169. with open_repo_closing(repo) as r:
  3170. if branch_name is None:
  3171. branch_name = active_branch(r.path)
  3172. config = r.get_config()
  3173. return config.get((b"branch", branch_name), b"merge")
  3174. def set_branch_tracking(
  3175. repo: Union[str, os.PathLike[str], Repo],
  3176. branch_name: bytes,
  3177. remote_name: bytes,
  3178. remote_ref: bytes,
  3179. ) -> None:
  3180. """Set up branch tracking configuration.
  3181. Args:
  3182. repo: Repository to open
  3183. branch_name: Name of the local branch
  3184. remote_name: Name of the remote (e.g. b"origin")
  3185. remote_ref: Remote reference to track (e.g. b"refs/heads/main")
  3186. """
  3187. with open_repo_closing(repo) as r:
  3188. config = r.get_config()
  3189. config.set((b"branch", branch_name), b"remote", remote_name)
  3190. config.set((b"branch", branch_name), b"merge", remote_ref)
  3191. config.write_to_path()
  3192. def fetch(
  3193. repo: RepoPath,
  3194. remote_location: Optional[Union[str, bytes]] = None,
  3195. outstream: TextIO = sys.stdout,
  3196. errstream: Union[BinaryIO, RawIOBase] = default_bytes_err_stream,
  3197. message: Optional[bytes] = None,
  3198. depth: Optional[int] = None,
  3199. prune: bool = False,
  3200. prune_tags: bool = False,
  3201. force: bool = False,
  3202. operation: Optional[str] = None,
  3203. thin_packs: bool = True,
  3204. report_activity: Optional[Callable[[int, str], None]] = None,
  3205. quiet: bool = False,
  3206. include_tags: bool = False,
  3207. username: Optional[str] = None,
  3208. password: Optional[str] = None,
  3209. key_filename: Optional[str] = None,
  3210. ssh_command: Optional[str] = None,
  3211. ) -> FetchPackResult:
  3212. """Fetch objects from a remote server.
  3213. Args:
  3214. repo: Path to the repository
  3215. remote_location: String identifying a remote server
  3216. outstream: Output stream (defaults to stdout)
  3217. errstream: Error stream (defaults to stderr)
  3218. message: Reflog message (defaults to b"fetch: from <remote_name>")
  3219. depth: Depth to fetch at
  3220. prune: Prune remote removed refs
  3221. prune_tags: Prune remote removed tags
  3222. force: Force fetching even if it would overwrite local changes
  3223. operation: Git operation for authentication (e.g., "fetch")
  3224. thin_packs: Whether to use thin packs
  3225. report_activity: Optional callback for reporting transport activity
  3226. quiet: Whether to suppress progress output
  3227. include_tags: Whether to include tags
  3228. username: Username for authentication
  3229. password: Password for authentication
  3230. key_filename: SSH key filename
  3231. ssh_command: SSH command to use
  3232. Returns:
  3233. Dictionary with refs on the remote
  3234. """
  3235. with open_repo_closing(repo) as r:
  3236. (remote_name, remote_location) = get_remote_repo(r, remote_location)
  3237. if message is None:
  3238. message = b"fetch: from " + remote_location.encode(DEFAULT_ENCODING)
  3239. client, path = get_transport_and_path(
  3240. remote_location,
  3241. config=r.get_config_stack(),
  3242. operation=operation,
  3243. thin_packs=thin_packs,
  3244. report_activity=report_activity,
  3245. quiet=quiet,
  3246. include_tags=include_tags,
  3247. username=username,
  3248. password=password,
  3249. key_filename=key_filename,
  3250. ssh_command=ssh_command,
  3251. )
  3252. def progress(data: bytes) -> None:
  3253. errstream.write(data)
  3254. fetch_result = client.fetch(path.encode(), r, progress=progress, depth=depth)
  3255. if remote_name is not None:
  3256. _import_remote_refs(
  3257. r.refs,
  3258. remote_name,
  3259. fetch_result.refs,
  3260. message,
  3261. prune=prune,
  3262. prune_tags=prune_tags,
  3263. )
  3264. # Trigger auto GC if needed
  3265. from .gc import maybe_auto_gc
  3266. with open_repo_closing(repo) as r:
  3267. maybe_auto_gc(r)
  3268. return fetch_result
  3269. def for_each_ref(
  3270. repo: Union[Repo, str] = ".",
  3271. pattern: Optional[Union[str, bytes]] = None,
  3272. ) -> list[tuple[bytes, bytes, bytes]]:
  3273. """Iterate over all refs that match the (optional) pattern.
  3274. Args:
  3275. repo: Path to the repository
  3276. pattern: Optional glob (7) patterns to filter the refs with
  3277. Returns: List of bytes tuples with: (sha, object_type, ref_name)
  3278. """
  3279. if isinstance(pattern, str):
  3280. pattern = os.fsencode(pattern)
  3281. with open_repo_closing(repo) as r:
  3282. refs = r.get_refs()
  3283. if pattern:
  3284. matching_refs: dict[bytes, bytes] = {}
  3285. pattern_parts = pattern.split(b"/")
  3286. for ref, sha in refs.items():
  3287. matches = False
  3288. # git for-each-ref uses glob (7) style patterns, but fnmatch
  3289. # is greedy and also matches slashes, unlike glob.glob.
  3290. # We have to check parts of the pattern individually.
  3291. # See https://github.com/python/cpython/issues/72904
  3292. ref_parts = ref.split(b"/")
  3293. if len(ref_parts) > len(pattern_parts):
  3294. continue
  3295. for pat, ref_part in zip(pattern_parts, ref_parts):
  3296. matches = fnmatch.fnmatchcase(ref_part, pat)
  3297. if not matches:
  3298. break
  3299. if matches:
  3300. matching_refs[ref] = sha
  3301. refs = matching_refs
  3302. ret: list[tuple[bytes, bytes, bytes]] = [
  3303. (sha, r.get_object(sha).type_name, ref)
  3304. for ref, sha in sorted(
  3305. refs.items(),
  3306. key=lambda ref_sha: ref_sha[0],
  3307. )
  3308. if ref != b"HEAD"
  3309. ]
  3310. return ret
  3311. def show_ref(
  3312. repo: Union[Repo, str] = ".",
  3313. patterns: Optional[list[Union[str, bytes]]] = None,
  3314. head: bool = False,
  3315. branches: bool = False,
  3316. tags: bool = False,
  3317. dereference: bool = False,
  3318. verify: bool = False,
  3319. ) -> list[tuple[bytes, bytes]]:
  3320. """List references in a local repository.
  3321. Args:
  3322. repo: Path to the repository
  3323. patterns: Optional list of patterns to filter refs (matched from the end)
  3324. head: Show the HEAD reference
  3325. branches: Limit to local branches (refs/heads/)
  3326. tags: Limit to local tags (refs/tags/)
  3327. dereference: Dereference tags into object IDs
  3328. verify: Enable stricter reference checking (exact path match)
  3329. Returns: List of tuples with (sha, ref_name) or (sha, ref_name^{}) for dereferenced tags
  3330. """
  3331. # Convert string patterns to bytes
  3332. byte_patterns: Optional[list[bytes]] = None
  3333. if patterns:
  3334. byte_patterns = [os.fsencode(p) if isinstance(p, str) else p for p in patterns]
  3335. with open_repo_closing(repo) as r:
  3336. refs = r.get_refs()
  3337. # Filter by branches/tags if specified
  3338. if branches or tags:
  3339. prefixes = []
  3340. if branches:
  3341. prefixes.append(LOCAL_BRANCH_PREFIX)
  3342. if tags:
  3343. prefixes.append(LOCAL_TAG_PREFIX)
  3344. filtered_refs = filter_ref_prefix(refs, prefixes)
  3345. else:
  3346. # By default, show tags, heads, and remote refs (but not HEAD)
  3347. filtered_refs = filter_ref_prefix(refs, [b"refs/"])
  3348. # Add HEAD if requested
  3349. if head and b"HEAD" in refs:
  3350. filtered_refs[b"HEAD"] = refs[b"HEAD"]
  3351. # Filter by patterns if specified
  3352. if byte_patterns:
  3353. matching_refs: dict[bytes, bytes] = {}
  3354. for ref, sha in filtered_refs.items():
  3355. for pattern in byte_patterns:
  3356. if verify:
  3357. # Verify mode requires exact match
  3358. if ref == pattern:
  3359. matching_refs[ref] = sha
  3360. break
  3361. else:
  3362. # Pattern matching from the end of the full name
  3363. # Only complete parts are matched
  3364. # E.g., "master" matches "refs/heads/master" but not "refs/heads/mymaster"
  3365. pattern_parts = pattern.split(b"/")
  3366. ref_parts = ref.split(b"/")
  3367. # Try to match from the end
  3368. if len(pattern_parts) <= len(ref_parts):
  3369. # Check if the end of ref matches the pattern
  3370. matches = True
  3371. for i in range(len(pattern_parts)):
  3372. if (
  3373. ref_parts[-(len(pattern_parts) - i)]
  3374. != pattern_parts[i]
  3375. ):
  3376. matches = False
  3377. break
  3378. if matches:
  3379. matching_refs[ref] = sha
  3380. break
  3381. filtered_refs = matching_refs
  3382. # Sort by ref name
  3383. sorted_refs = sorted(filtered_refs.items(), key=lambda x: x[0])
  3384. # Build result list
  3385. result: list[tuple[bytes, bytes]] = []
  3386. for ref, sha in sorted_refs:
  3387. result.append((sha, ref))
  3388. # Dereference tags if requested
  3389. if dereference and ref.startswith(LOCAL_TAG_PREFIX):
  3390. try:
  3391. obj = r.get_object(sha)
  3392. # Peel tag objects to get the underlying commit/object
  3393. from .objects import Tag
  3394. while obj.type_name == b"tag":
  3395. assert isinstance(obj, Tag)
  3396. _obj_class, sha = obj.object
  3397. obj = r.get_object(sha)
  3398. result.append((sha, ref + b"^{}"))
  3399. except KeyError:
  3400. # Object not found, skip dereferencing
  3401. pass
  3402. return result
  3403. def show_branch(
  3404. repo: Union[Repo, str] = ".",
  3405. branches: Optional[list[Union[str, bytes]]] = None,
  3406. all_branches: bool = False,
  3407. remotes: bool = False,
  3408. current: bool = False,
  3409. topo_order: bool = False,
  3410. more: Optional[int] = None,
  3411. list_branches: bool = False,
  3412. independent_branches: bool = False,
  3413. merge_base: bool = False,
  3414. ) -> list[str]:
  3415. """Display branches and their commits.
  3416. Args:
  3417. repo: Path to the repository
  3418. branches: List of specific branches to show (default: all local branches)
  3419. all_branches: Show both local and remote branches
  3420. remotes: Show only remote branches
  3421. current: Include current branch if not specified
  3422. topo_order: Show in topological order instead of chronological
  3423. more: Show N more commits beyond common ancestor (negative to show only headers)
  3424. list_branches: Synonym for more=-1 (show only branch headers)
  3425. independent_branches: Show only branches not reachable from others
  3426. merge_base: Show merge bases instead of commit list
  3427. Returns:
  3428. List of output lines
  3429. """
  3430. from .graph import find_octopus_base, independent
  3431. output_lines: list[str] = []
  3432. with open_repo_closing(repo) as r:
  3433. refs = r.get_refs()
  3434. # Determine which branches to show
  3435. branch_refs: dict[bytes, bytes] = {}
  3436. if branches:
  3437. # Specific branches requested
  3438. for branch in branches:
  3439. branch_bytes = (
  3440. os.fsencode(branch) if isinstance(branch, str) else branch
  3441. )
  3442. # Try as full ref name first
  3443. if branch_bytes in refs:
  3444. branch_refs[branch_bytes] = refs[branch_bytes]
  3445. # Try as branch name
  3446. elif LOCAL_BRANCH_PREFIX + branch_bytes in refs:
  3447. branch_refs[LOCAL_BRANCH_PREFIX + branch_bytes] = refs[
  3448. LOCAL_BRANCH_PREFIX + branch_bytes
  3449. ]
  3450. # Try as remote branch
  3451. elif LOCAL_REMOTE_PREFIX + branch_bytes in refs:
  3452. branch_refs[LOCAL_REMOTE_PREFIX + branch_bytes] = refs[
  3453. LOCAL_REMOTE_PREFIX + branch_bytes
  3454. ]
  3455. else:
  3456. # Default behavior: show local branches
  3457. if all_branches:
  3458. # Show both local and remote branches
  3459. branch_refs = filter_ref_prefix(
  3460. refs, [LOCAL_BRANCH_PREFIX, LOCAL_REMOTE_PREFIX]
  3461. )
  3462. elif remotes:
  3463. # Show only remote branches
  3464. branch_refs = filter_ref_prefix(refs, [LOCAL_REMOTE_PREFIX])
  3465. else:
  3466. # Show only local branches
  3467. branch_refs = filter_ref_prefix(refs, [LOCAL_BRANCH_PREFIX])
  3468. # Add current branch if requested and not already included
  3469. if current:
  3470. try:
  3471. head_refs, _ = r.refs.follow(b"HEAD")
  3472. if head_refs:
  3473. head_ref = head_refs[0]
  3474. if head_ref not in branch_refs and head_ref in refs:
  3475. branch_refs[head_ref] = refs[head_ref]
  3476. except (KeyError, TypeError):
  3477. # HEAD doesn't point to a branch or doesn't exist
  3478. pass
  3479. if not branch_refs:
  3480. return output_lines
  3481. # Sort branches for consistent output
  3482. sorted_branches = sorted(branch_refs.items(), key=lambda x: x[0])
  3483. branch_sha_list = [sha for _, sha in sorted_branches]
  3484. # Handle --independent flag
  3485. if independent_branches:
  3486. independent_shas = independent(r, branch_sha_list)
  3487. for ref_name, sha in sorted_branches:
  3488. if sha in independent_shas:
  3489. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  3490. output_lines.append(ref_str)
  3491. return output_lines
  3492. # Handle --merge-base flag
  3493. if merge_base:
  3494. if len(branch_sha_list) < 2:
  3495. # Need at least 2 branches for merge base
  3496. return output_lines
  3497. merge_bases = find_octopus_base(r, branch_sha_list)
  3498. for sha in merge_bases:
  3499. output_lines.append(sha.decode("ascii"))
  3500. return output_lines
  3501. # Get current branch for marking
  3502. current_branch: Optional[bytes] = None
  3503. try:
  3504. head_refs, _ = r.refs.follow(b"HEAD")
  3505. if head_refs:
  3506. current_branch = head_refs[0]
  3507. except (KeyError, TypeError):
  3508. pass
  3509. # Collect commit information for each branch
  3510. branch_commits: list[tuple[bytes, str]] = [] # (sha, message)
  3511. for ref_name, sha in sorted_branches:
  3512. try:
  3513. commit = r[sha]
  3514. if hasattr(commit, "message"):
  3515. message = commit.message.decode("utf-8", errors="replace").split(
  3516. "\n"
  3517. )[0]
  3518. else:
  3519. message = ""
  3520. branch_commits.append((sha, message))
  3521. except KeyError:
  3522. branch_commits.append((sha, ""))
  3523. # Handle --list flag (show only branch headers)
  3524. if list_branches or (more is not None and more < 0):
  3525. # Just show the branch headers
  3526. for i, (ref_name, sha) in enumerate(sorted_branches):
  3527. is_current = ref_name == current_branch
  3528. marker = "*" if is_current else "!"
  3529. # Create spacing for alignment
  3530. prefix = " " * i + marker + " " * (len(sorted_branches) - i - 1)
  3531. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  3532. _, message = branch_commits[i]
  3533. output_lines.append(f"{prefix}[{ref_str}] {message}")
  3534. return output_lines
  3535. # Build commit history for visualization
  3536. # Collect all commits reachable from any branch
  3537. all_commits: dict[
  3538. bytes, tuple[int, list[bytes], str]
  3539. ] = {} # sha -> (timestamp, parents, message)
  3540. def collect_commits(sha: bytes, branch_idx: int, visited: set[bytes]) -> None:
  3541. """Recursively collect commits."""
  3542. if sha in visited:
  3543. return
  3544. visited.add(sha)
  3545. try:
  3546. commit = r[sha]
  3547. if not hasattr(commit, "commit_time"):
  3548. return
  3549. timestamp = commit.commit_time
  3550. parents = commit.parents if hasattr(commit, "parents") else []
  3551. message = (
  3552. commit.message.decode("utf-8", errors="replace").split("\n")[0]
  3553. if hasattr(commit, "message")
  3554. else ""
  3555. )
  3556. if sha not in all_commits:
  3557. all_commits[sha] = (timestamp, parents, message)
  3558. # Recurse to parents
  3559. for parent in parents:
  3560. collect_commits(parent, branch_idx, visited)
  3561. except KeyError:
  3562. # Commit not found, stop traversal
  3563. pass
  3564. # Collect commits from all branches
  3565. for i, (_, sha) in enumerate(sorted_branches):
  3566. collect_commits(sha, i, set())
  3567. # Find common ancestor
  3568. common_ancestor_sha = None
  3569. if len(branch_sha_list) >= 2:
  3570. try:
  3571. merge_bases = find_octopus_base(r, branch_sha_list)
  3572. if merge_bases:
  3573. common_ancestor_sha = merge_bases[0]
  3574. except (KeyError, IndexError):
  3575. pass
  3576. # Sort commits (chronological by default, or topological if requested)
  3577. if topo_order:
  3578. # Topological sort is more complex, for now use chronological
  3579. # TODO: Implement proper topological ordering
  3580. sorted_commits = sorted(all_commits.items(), key=lambda x: -x[1][0])
  3581. else:
  3582. # Reverse chronological order (newest first)
  3583. sorted_commits = sorted(all_commits.items(), key=lambda x: -x[1][0])
  3584. # Determine how many commits to show
  3585. if more is not None:
  3586. # Find index of common ancestor
  3587. if common_ancestor_sha and common_ancestor_sha in all_commits:
  3588. ancestor_idx = next(
  3589. (
  3590. i
  3591. for i, (sha, _) in enumerate(sorted_commits)
  3592. if sha == common_ancestor_sha
  3593. ),
  3594. None,
  3595. )
  3596. if ancestor_idx is not None:
  3597. # Show commits up to ancestor + more
  3598. sorted_commits = sorted_commits[: ancestor_idx + 1 + more]
  3599. # Determine which branches contain which commits
  3600. branch_contains: list[set[bytes]] = []
  3601. for ref_name, sha in sorted_branches:
  3602. reachable = set()
  3603. def mark_reachable(commit_sha: bytes) -> None:
  3604. if commit_sha in reachable:
  3605. return
  3606. reachable.add(commit_sha)
  3607. if commit_sha in all_commits:
  3608. _, parents, _ = all_commits[commit_sha]
  3609. for parent in parents:
  3610. mark_reachable(parent)
  3611. mark_reachable(sha)
  3612. branch_contains.append(reachable)
  3613. # Output branch headers
  3614. for i, (ref_name, sha) in enumerate(sorted_branches):
  3615. is_current = ref_name == current_branch
  3616. marker = "*" if is_current else "!"
  3617. # Create spacing for alignment
  3618. prefix = " " * i + marker + " " * (len(sorted_branches) - i - 1)
  3619. ref_str = os.fsdecode(shorten_ref_name(ref_name))
  3620. _, message = branch_commits[i]
  3621. output_lines.append(f"{prefix}[{ref_str}] {message}")
  3622. # Output separator
  3623. output_lines.append("-" * (len(sorted_branches) + 2))
  3624. # Output commits
  3625. for commit_sha, (_, _, message) in sorted_commits:
  3626. # Build marker string
  3627. markers = []
  3628. for i, (ref_name, branch_sha) in enumerate(sorted_branches):
  3629. if commit_sha == branch_sha:
  3630. # This is the tip of the branch
  3631. markers.append("*")
  3632. elif commit_sha in branch_contains[i]:
  3633. # This commit is in the branch
  3634. markers.append("+")
  3635. else:
  3636. # This commit is not in the branch
  3637. markers.append(" ")
  3638. marker_str = "".join(markers)
  3639. output_lines.append(f"{marker_str} [{message}]")
  3640. # Limit output to 26 branches (git show-branch limitation)
  3641. if len(sorted_branches) > 26:
  3642. break
  3643. return output_lines
  3644. def ls_remote(
  3645. remote: Union[str, bytes],
  3646. config: Optional[Config] = None,
  3647. operation: Optional[str] = None,
  3648. thin_packs: bool = True,
  3649. report_activity: Optional[Callable[[int, str], None]] = None,
  3650. quiet: bool = False,
  3651. include_tags: bool = False,
  3652. username: Optional[str] = None,
  3653. password: Optional[str] = None,
  3654. key_filename: Optional[str] = None,
  3655. ssh_command: Optional[str] = None,
  3656. ) -> LsRemoteResult:
  3657. """List the refs in a remote.
  3658. Args:
  3659. remote: Remote repository location
  3660. config: Configuration to use
  3661. operation: Operation type
  3662. thin_packs: Whether to use thin packs
  3663. report_activity: Function to report activity
  3664. quiet: Whether to suppress output
  3665. include_tags: Whether to include tags
  3666. username: Username for authentication
  3667. password: Password for authentication
  3668. key_filename: SSH key filename
  3669. ssh_command: SSH command to use
  3670. Returns:
  3671. LsRemoteResult object with refs and symrefs
  3672. """
  3673. if config is None:
  3674. config = StackedConfig.default()
  3675. remote_str = remote.decode() if isinstance(remote, bytes) else remote
  3676. client, host_path = get_transport_and_path(
  3677. remote_str,
  3678. config=config,
  3679. operation=operation,
  3680. thin_packs=thin_packs,
  3681. report_activity=report_activity,
  3682. quiet=quiet,
  3683. include_tags=include_tags,
  3684. username=username,
  3685. password=password,
  3686. key_filename=key_filename,
  3687. ssh_command=ssh_command,
  3688. )
  3689. return client.get_refs(
  3690. host_path.encode() if isinstance(host_path, str) else host_path
  3691. )
  3692. def repack(repo: RepoPath) -> None:
  3693. """Repack loose files in a repository.
  3694. Currently this only packs loose objects.
  3695. Args:
  3696. repo: Path to the repository
  3697. """
  3698. with open_repo_closing(repo) as r:
  3699. r.object_store.pack_loose_objects()
  3700. def pack_objects(
  3701. repo: RepoPath,
  3702. object_ids: Sequence[bytes],
  3703. packf: BinaryIO,
  3704. idxf: Optional[BinaryIO],
  3705. delta_window_size: Optional[int] = None,
  3706. deltify: Optional[bool] = None,
  3707. reuse_deltas: bool = True,
  3708. pack_index_version: Optional[int] = None,
  3709. ) -> None:
  3710. """Pack objects into a file.
  3711. Args:
  3712. repo: Path to the repository
  3713. object_ids: List of object ids to write
  3714. packf: File-like object to write to
  3715. idxf: File-like object to write to (can be None)
  3716. delta_window_size: Sliding window size for searching for deltas;
  3717. Set to None for default window size.
  3718. deltify: Whether to deltify objects
  3719. reuse_deltas: Allow reuse of existing deltas while deltifying
  3720. pack_index_version: Pack index version to use (1, 2, or 3). If None, uses default version.
  3721. """
  3722. with open_repo_closing(repo) as r:
  3723. entries, data_sum = write_pack_from_container(
  3724. packf.write,
  3725. r.object_store,
  3726. [(oid, None) for oid in object_ids],
  3727. deltify=deltify,
  3728. delta_window_size=delta_window_size,
  3729. reuse_deltas=reuse_deltas,
  3730. )
  3731. if idxf is not None:
  3732. index_entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
  3733. write_pack_index(idxf, index_entries, data_sum, version=pack_index_version)
  3734. def ls_tree(
  3735. repo: RepoPath,
  3736. treeish: Union[str, bytes, Commit, Tree, Tag] = b"HEAD",
  3737. outstream: Union[TextIO, BinaryIO] = sys.stdout,
  3738. recursive: bool = False,
  3739. name_only: bool = False,
  3740. ) -> None:
  3741. """List contents of a tree.
  3742. Args:
  3743. repo: Path to the repository
  3744. treeish: Tree id to list
  3745. outstream: Output stream (defaults to stdout)
  3746. recursive: Whether to recursively list files
  3747. name_only: Only print item name
  3748. """
  3749. def list_tree(store: BaseObjectStore, treeid: bytes, base: bytes) -> None:
  3750. tree = store[treeid]
  3751. assert isinstance(tree, Tree)
  3752. for name, mode, sha in tree.iteritems():
  3753. assert name is not None
  3754. assert mode is not None
  3755. assert sha is not None
  3756. if base:
  3757. name = posixpath.join(base, name)
  3758. if name_only:
  3759. if isinstance(outstream, BinaryIO):
  3760. outstream.write(name + b"\n")
  3761. else:
  3762. outstream.write(name.decode("utf-8", "replace") + "\n")
  3763. else:
  3764. formatted = pretty_format_tree_entry(name, mode, sha)
  3765. if isinstance(outstream, BinaryIO):
  3766. outstream.write(formatted.encode("utf-8"))
  3767. else:
  3768. outstream.write(formatted)
  3769. if stat.S_ISDIR(mode) and recursive:
  3770. list_tree(store, sha, name)
  3771. with open_repo_closing(repo) as r:
  3772. tree = parse_tree(r, treeish)
  3773. list_tree(r.object_store, tree.id, b"")
  3774. def remote_add(
  3775. repo: RepoPath,
  3776. name: Union[bytes, str],
  3777. url: Union[bytes, str],
  3778. ) -> None:
  3779. """Add a remote.
  3780. Args:
  3781. repo: Path to the repository
  3782. name: Remote name
  3783. url: Remote URL
  3784. """
  3785. if not isinstance(name, bytes):
  3786. name = name.encode(DEFAULT_ENCODING)
  3787. if not isinstance(url, bytes):
  3788. url = url.encode(DEFAULT_ENCODING)
  3789. with open_repo_closing(repo) as r:
  3790. c = r.get_config()
  3791. section = (b"remote", name)
  3792. if c.has_section(section):
  3793. raise RemoteExists(f"Remote {name.decode()} already exists")
  3794. c.set(section, b"url", url)
  3795. c.write_to_path()
  3796. def remote_remove(repo: Repo, name: Union[bytes, str]) -> None:
  3797. """Remove a remote.
  3798. Args:
  3799. repo: Path to the repository
  3800. name: Remote name
  3801. """
  3802. if not isinstance(name, bytes):
  3803. name = name.encode(DEFAULT_ENCODING)
  3804. with open_repo_closing(repo) as r:
  3805. c = r.get_config()
  3806. section = (b"remote", name)
  3807. del c[section]
  3808. c.write_to_path()
  3809. def _quote_path(path: str) -> str:
  3810. """Quote a path using C-style quoting similar to git's core.quotePath.
  3811. Args:
  3812. path: Path to quote
  3813. Returns:
  3814. Quoted path string
  3815. """
  3816. # Check if path needs quoting (non-ASCII or special characters)
  3817. needs_quoting = False
  3818. for char in path:
  3819. if ord(char) > 127 or char in '"\\':
  3820. needs_quoting = True
  3821. break
  3822. if not needs_quoting:
  3823. return path
  3824. # Apply C-style quoting
  3825. quoted = '"'
  3826. for char in path:
  3827. if ord(char) > 127:
  3828. # Non-ASCII character, encode as octal escape
  3829. utf8_bytes = char.encode("utf-8")
  3830. for byte in utf8_bytes:
  3831. quoted += f"\\{byte:03o}"
  3832. elif char == '"':
  3833. quoted += '\\"'
  3834. elif char == "\\":
  3835. quoted += "\\\\"
  3836. else:
  3837. quoted += char
  3838. quoted += '"'
  3839. return quoted
  3840. def check_ignore(
  3841. repo: RepoPath,
  3842. paths: Sequence[Union[str, bytes, os.PathLike[str]]],
  3843. no_index: bool = False,
  3844. quote_path: bool = True,
  3845. ) -> Iterator[str]:
  3846. r"""Debug gitignore files.
  3847. Args:
  3848. repo: Path to the repository
  3849. paths: List of paths to check for
  3850. no_index: Don't check index
  3851. quote_path: If True, quote non-ASCII characters in returned paths using
  3852. C-style octal escapes (e.g. "тест.txt" becomes "\\321\\202\\320\\265\\321\\201\\321\\202.txt").
  3853. If False, return raw unicode paths.
  3854. Returns: List of ignored files
  3855. """
  3856. with open_repo_closing(repo) as r:
  3857. index = r.open_index()
  3858. ignore_manager = IgnoreFilterManager.from_repo(r)
  3859. for original_path in paths:
  3860. # Convert path to string for consistent handling
  3861. original_path_fspath = os.fspath(original_path)
  3862. # Normalize to str
  3863. original_path_str = os.fsdecode(original_path_fspath)
  3864. if not no_index and path_to_tree_path(r.path, original_path_str) in index:
  3865. continue
  3866. # Preserve whether the original path had a trailing slash
  3867. had_trailing_slash = original_path_str.endswith(("/", os.path.sep))
  3868. if os.path.isabs(original_path_str):
  3869. path = os.path.relpath(original_path_str, r.path)
  3870. # Normalize Windows paths to use forward slashes
  3871. if os.path.sep != "/":
  3872. path = path.replace(os.path.sep, "/")
  3873. else:
  3874. path = original_path_str
  3875. # Restore trailing slash if it was in the original
  3876. if had_trailing_slash and not path.endswith("/"):
  3877. path = path + "/"
  3878. # For directories, check with trailing slash to get correct ignore behavior
  3879. test_path = path
  3880. path_without_slash = path.rstrip("/")
  3881. is_directory = os.path.isdir(os.path.join(r.path, path_without_slash))
  3882. # If this is a directory path, ensure we test it correctly
  3883. if is_directory and not path.endswith("/"):
  3884. test_path = path + "/"
  3885. if ignore_manager.is_ignored(test_path):
  3886. # Return relative path (like git does) when absolute path was provided
  3887. if os.path.isabs(original_path):
  3888. output_path = path
  3889. else:
  3890. output_path = original_path # type: ignore[assignment]
  3891. yield _quote_path(output_path) if quote_path else output_path
  3892. def update_head(
  3893. repo: RepoPath,
  3894. target: Union[str, bytes],
  3895. detached: bool = False,
  3896. new_branch: Optional[Union[str, bytes]] = None,
  3897. ) -> None:
  3898. """Update HEAD to point at a new branch/commit.
  3899. Note that this does not actually update the working tree.
  3900. Args:
  3901. repo: Path to the repository
  3902. detached: Create a detached head
  3903. target: Branch or committish to switch to
  3904. new_branch: New branch to create
  3905. """
  3906. with open_repo_closing(repo) as r:
  3907. if new_branch is not None:
  3908. to_set = _make_branch_ref(new_branch)
  3909. else:
  3910. to_set = b"HEAD"
  3911. if detached:
  3912. # TODO(jelmer): Provide some way so that the actual ref gets
  3913. # updated rather than what it points to, so the delete isn't
  3914. # necessary.
  3915. del r.refs[to_set]
  3916. r.refs[to_set] = parse_commit(r, target).id
  3917. else:
  3918. r.refs.set_symbolic_ref(to_set, parse_ref(r, target))
  3919. if new_branch is not None:
  3920. r.refs.set_symbolic_ref(b"HEAD", to_set)
  3921. def checkout(
  3922. repo: Union[str, os.PathLike[str], Repo],
  3923. target: Optional[Union[str, bytes, Commit, Tag]] = None,
  3924. force: bool = False,
  3925. new_branch: Optional[Union[bytes, str]] = None,
  3926. paths: Optional[list[Union[bytes, str]]] = None,
  3927. ) -> None:
  3928. """Switch to a branch or commit, updating both HEAD and the working tree.
  3929. This is similar to 'git checkout', allowing you to switch to a branch,
  3930. tag, or specific commit. Unlike update_head, this function also updates
  3931. the working tree to match the target.
  3932. Args:
  3933. repo: Path to repository or repository object
  3934. target: Branch name, tag, or commit SHA to checkout. If None and paths is specified,
  3935. restores files from HEAD
  3936. force: Force checkout even if there are local changes
  3937. new_branch: Create a new branch at target (like git checkout -b)
  3938. paths: List of specific paths to checkout. If specified, only these paths are updated
  3939. and HEAD is not changed
  3940. Raises:
  3941. CheckoutError: If checkout cannot be performed due to conflicts
  3942. KeyError: If the target reference cannot be found
  3943. """
  3944. with open_repo_closing(repo) as r:
  3945. # Store the original target for later reference checks
  3946. original_target = target
  3947. worktree = r.get_worktree()
  3948. # Handle path-specific checkout (like git checkout -- <paths>)
  3949. if paths is not None:
  3950. # Convert paths to bytes
  3951. byte_paths = []
  3952. for path in paths:
  3953. if isinstance(path, str):
  3954. byte_paths.append(path.encode(DEFAULT_ENCODING))
  3955. else:
  3956. byte_paths.append(path)
  3957. # If no target specified, use HEAD
  3958. if target is None:
  3959. try:
  3960. target = r.refs[b"HEAD"]
  3961. except KeyError:
  3962. raise CheckoutError("No HEAD reference found")
  3963. else:
  3964. if isinstance(target, str):
  3965. target = target.encode(DEFAULT_ENCODING)
  3966. # Get the target commit and tree
  3967. target_tree = parse_tree(r, target)
  3968. # Get blob normalizer for line ending conversion
  3969. blob_normalizer = r.get_blob_normalizer()
  3970. # Restore specified paths from target tree
  3971. for path in byte_paths:
  3972. try:
  3973. # Look up the path in the target tree
  3974. mode, sha = target_tree.lookup_path(
  3975. r.object_store.__getitem__, path
  3976. )
  3977. obj = r[sha]
  3978. assert isinstance(obj, Blob), "Expected a Blob object"
  3979. except KeyError:
  3980. # Path doesn't exist in target tree
  3981. pass
  3982. else:
  3983. # Create directories if needed
  3984. # Handle path as string
  3985. if isinstance(path, bytes):
  3986. path_str = path.decode(DEFAULT_ENCODING)
  3987. else:
  3988. path_str = path
  3989. file_path = os.path.join(r.path, path_str)
  3990. os.makedirs(os.path.dirname(file_path), exist_ok=True)
  3991. # Write the file content
  3992. if stat.S_ISREG(mode):
  3993. # Apply checkout filters (smudge)
  3994. if blob_normalizer:
  3995. obj = blob_normalizer.checkout_normalize(obj, path)
  3996. flags = os.O_WRONLY | os.O_CREAT | os.O_TRUNC
  3997. if sys.platform == "win32":
  3998. flags |= os.O_BINARY
  3999. with os.fdopen(os.open(file_path, flags, mode), "wb") as f:
  4000. f.write(obj.data)
  4001. # Update the index
  4002. worktree.stage(path)
  4003. return
  4004. # Normal checkout (switching branches/commits)
  4005. if target is None:
  4006. raise ValueError("Target must be specified for branch/commit checkout")
  4007. if isinstance(target, str):
  4008. target_bytes = target.encode(DEFAULT_ENCODING)
  4009. elif isinstance(target, bytes):
  4010. target_bytes = target
  4011. else:
  4012. # For Commit/Tag objects, we'll use their SHA
  4013. target_bytes = target.id
  4014. if isinstance(new_branch, str):
  4015. new_branch = new_branch.encode(DEFAULT_ENCODING)
  4016. # Parse the target to get the commit
  4017. assert (
  4018. original_target is not None
  4019. ) # Guaranteed by earlier check for normal checkout
  4020. target_commit = parse_commit(r, original_target)
  4021. target_tree_id = target_commit.tree
  4022. # Get current HEAD tree for comparison
  4023. try:
  4024. current_head = r.refs[b"HEAD"]
  4025. current_commit = r[current_head]
  4026. assert isinstance(current_commit, Commit), "Expected a Commit object"
  4027. current_tree_id = current_commit.tree
  4028. except KeyError:
  4029. # No HEAD yet (empty repo)
  4030. current_tree_id = None
  4031. # Check for uncommitted changes if not forcing
  4032. if not force and current_tree_id is not None:
  4033. status_report = status(r)
  4034. changes = []
  4035. # staged is a dict with 'add', 'delete', 'modify' keys
  4036. if isinstance(status_report.staged, dict):
  4037. changes.extend(status_report.staged.get("add", []))
  4038. changes.extend(status_report.staged.get("delete", []))
  4039. changes.extend(status_report.staged.get("modify", []))
  4040. # unstaged is a list
  4041. changes.extend(status_report.unstaged)
  4042. if changes:
  4043. # Check if any changes would conflict with checkout
  4044. target_tree_obj = r[target_tree_id]
  4045. assert isinstance(target_tree_obj, Tree), "Expected a Tree object"
  4046. target_tree = target_tree_obj
  4047. for change in changes:
  4048. if isinstance(change, str):
  4049. change = change.encode(DEFAULT_ENCODING)
  4050. try:
  4051. target_tree.lookup_path(r.object_store.__getitem__, change)
  4052. except KeyError:
  4053. # File doesn't exist in target tree - change can be preserved
  4054. pass
  4055. else:
  4056. # File exists in target tree - would overwrite local changes
  4057. raise CheckoutError(
  4058. f"Your local changes to '{change.decode()}' would be "
  4059. "overwritten by checkout. Please commit or stash before switching."
  4060. )
  4061. # Get configuration for working directory update
  4062. config = r.get_config()
  4063. honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt")
  4064. if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"):
  4065. validate_path_element = validate_path_element_ntfs
  4066. else:
  4067. validate_path_element = validate_path_element_default
  4068. if config.get_boolean(b"core", b"symlinks", True):
  4069. def symlink_wrapper(
  4070. source: Union[str, bytes, os.PathLike[str]],
  4071. target: Union[str, bytes, os.PathLike[str]],
  4072. ) -> None:
  4073. symlink(source, target) # type: ignore[arg-type,unused-ignore]
  4074. symlink_fn = symlink_wrapper
  4075. else:
  4076. def symlink_fallback(
  4077. source: Union[str, bytes, os.PathLike[str]],
  4078. target: Union[str, bytes, os.PathLike[str]],
  4079. ) -> None:
  4080. mode = "w" + ("b" if isinstance(source, bytes) else "")
  4081. with open(target, mode) as f:
  4082. f.write(source)
  4083. symlink_fn = symlink_fallback
  4084. # Get blob normalizer for line ending conversion
  4085. blob_normalizer = r.get_blob_normalizer()
  4086. # Update working tree
  4087. tree_change_iterator: Iterator[TreeChange] = tree_changes(
  4088. r.object_store, current_tree_id, target_tree_id
  4089. )
  4090. update_working_tree(
  4091. r,
  4092. current_tree_id,
  4093. target_tree_id,
  4094. change_iterator=tree_change_iterator,
  4095. honor_filemode=honor_filemode,
  4096. validate_path_element=validate_path_element,
  4097. symlink_fn=symlink_fn,
  4098. force_remove_untracked=force,
  4099. blob_normalizer=blob_normalizer,
  4100. allow_overwrite_modified=force,
  4101. )
  4102. # Update HEAD
  4103. if new_branch:
  4104. # Create new branch and switch to it
  4105. branch_create(r, new_branch, objectish=target_commit.id.decode("ascii"))
  4106. update_head(r, new_branch)
  4107. # Set up tracking if creating from a remote branch
  4108. from .refs import LOCAL_REMOTE_PREFIX, parse_remote_ref
  4109. if isinstance(original_target, bytes) and target_bytes.startswith(
  4110. LOCAL_REMOTE_PREFIX
  4111. ):
  4112. try:
  4113. remote_name, branch_name = parse_remote_ref(target_bytes)
  4114. # Set tracking to refs/heads/<branch> on the remote
  4115. set_branch_tracking(
  4116. r, new_branch, remote_name, b"refs/heads/" + branch_name
  4117. )
  4118. except ValueError:
  4119. # Invalid remote ref format, skip tracking setup
  4120. pass
  4121. else:
  4122. # Check if target is a branch name (with or without refs/heads/ prefix)
  4123. branch_ref = None
  4124. if (
  4125. isinstance(original_target, (str, bytes))
  4126. and target_bytes in r.refs.keys()
  4127. ):
  4128. if target_bytes.startswith(LOCAL_BRANCH_PREFIX):
  4129. branch_ref = target_bytes
  4130. else:
  4131. # Try adding refs/heads/ prefix
  4132. potential_branch = (
  4133. _make_branch_ref(target_bytes)
  4134. if isinstance(original_target, (str, bytes))
  4135. else None
  4136. )
  4137. if potential_branch in r.refs.keys():
  4138. branch_ref = potential_branch
  4139. if branch_ref:
  4140. # It's a branch - update HEAD symbolically
  4141. update_head(r, branch_ref)
  4142. else:
  4143. # It's a tag, other ref, or commit SHA - detached HEAD
  4144. update_head(r, target_commit.id.decode("ascii"), detached=True)
  4145. def reset_file(
  4146. repo: Repo,
  4147. file_path: str,
  4148. target: Union[str, bytes, Commit, Tree, Tag] = b"HEAD",
  4149. symlink_fn: Optional[
  4150. Callable[
  4151. [Union[str, bytes, os.PathLike[str]], Union[str, bytes, os.PathLike[str]]],
  4152. None,
  4153. ]
  4154. ] = None,
  4155. ) -> None:
  4156. """Reset the file to specific commit or branch.
  4157. Args:
  4158. repo: dulwich Repo object
  4159. file_path: file to reset, relative to the repository path
  4160. target: branch or commit or b'HEAD' to reset
  4161. symlink_fn: Function to use for creating symlinks
  4162. """
  4163. tree = parse_tree(repo, treeish=target)
  4164. tree_path = _fs_to_tree_path(file_path)
  4165. file_entry = tree.lookup_path(repo.object_store.__getitem__, tree_path)
  4166. full_path = os.path.join(os.fsencode(repo.path), tree_path)
  4167. blob = repo.object_store[file_entry[1]]
  4168. assert isinstance(blob, Blob)
  4169. mode = file_entry[0]
  4170. build_file_from_blob(blob, mode, full_path, symlink_fn=symlink_fn)
  4171. @replace_me(since="0.22.9", remove_in="0.24.0")
  4172. def checkout_branch(
  4173. repo: Union[str, os.PathLike[str], Repo],
  4174. target: Union[bytes, str],
  4175. force: bool = False,
  4176. ) -> None:
  4177. """Switch branches or restore working tree files.
  4178. This is now a wrapper around the general checkout() function.
  4179. Preserved for backward compatibility.
  4180. Args:
  4181. repo: dulwich Repo object
  4182. target: branch name or commit sha to checkout
  4183. force: true or not to force checkout
  4184. """
  4185. # Simply delegate to the new checkout function
  4186. return checkout(repo, target, force=force)
  4187. def sparse_checkout(
  4188. repo: Union[str, os.PathLike[str], Repo],
  4189. patterns: Optional[list[str]] = None,
  4190. force: bool = False,
  4191. cone: Optional[bool] = None,
  4192. ) -> None:
  4193. """Perform a sparse checkout in the repository (either 'full' or 'cone mode').
  4194. Perform sparse checkout in either 'cone' (directory-based) mode or
  4195. 'full pattern' (.gitignore) mode, depending on the ``cone`` parameter.
  4196. If ``cone`` is ``None``, the mode is inferred from the repository's
  4197. ``core.sparseCheckoutCone`` config setting.
  4198. Steps:
  4199. 1) If ``patterns`` is provided, write them to ``.git/info/sparse-checkout``.
  4200. 2) Determine which paths in the index are included vs. excluded.
  4201. - If ``cone=True``, use "cone-compatible" directory-based logic.
  4202. - If ``cone=False``, use standard .gitignore-style matching.
  4203. 3) Update the index's skip-worktree bits and add/remove files in
  4204. the working tree accordingly.
  4205. 4) If ``force=False``, refuse to remove files that have local modifications.
  4206. Args:
  4207. repo: Path to the repository or a Repo object.
  4208. patterns: Optional list of sparse-checkout patterns to write.
  4209. force: Whether to force removal of locally modified files (default False).
  4210. cone: Boolean indicating cone mode (True/False). If None, read from config.
  4211. Returns:
  4212. None
  4213. """
  4214. with open_repo_closing(repo) as repo_obj:
  4215. # --- 0) Possibly infer 'cone' from config ---
  4216. if cone is None:
  4217. cone = repo_obj.get_worktree().infer_cone_mode()
  4218. # --- 1) Read or write patterns ---
  4219. if patterns is None:
  4220. lines = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4221. if lines is None:
  4222. raise Error("No sparse checkout patterns found.")
  4223. else:
  4224. lines = patterns
  4225. repo_obj.get_worktree().set_sparse_checkout_patterns(patterns)
  4226. # --- 2) Determine the set of included paths ---
  4227. index = repo_obj.open_index()
  4228. included_paths = determine_included_paths(index, lines, cone)
  4229. # --- 3) Apply those results to the index & working tree ---
  4230. try:
  4231. apply_included_paths(repo_obj, included_paths, force=force)
  4232. except SparseCheckoutConflictError as exc:
  4233. raise CheckoutError(*exc.args) from exc
  4234. def cone_mode_init(repo: Union[str, os.PathLike[str], Repo]) -> None:
  4235. """Initialize a repository to use sparse checkout in 'cone' mode.
  4236. Sets ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` in the config.
  4237. Writes an initial ``.git/info/sparse-checkout`` file that includes only
  4238. top-level files (and excludes all subdirectories), e.g. ``["/*", "!/*/"]``.
  4239. Then performs a sparse checkout to update the working tree accordingly.
  4240. If no directories are specified, then only top-level files are included:
  4241. https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling
  4242. Args:
  4243. repo: Path to the repository or a Repo object.
  4244. Returns:
  4245. None
  4246. """
  4247. with open_repo_closing(repo) as repo_obj:
  4248. repo_obj.get_worktree().configure_for_cone_mode()
  4249. patterns = ["/*", "!/*/"] # root-level files only
  4250. sparse_checkout(repo_obj, patterns, force=True, cone=True)
  4251. def cone_mode_set(
  4252. repo: Union[str, os.PathLike[str], Repo], dirs: Sequence[str], force: bool = False
  4253. ) -> None:
  4254. """Overwrite the existing 'cone-mode' sparse patterns with a new set of directories.
  4255. Ensures ``core.sparseCheckout`` and ``core.sparseCheckoutCone`` are enabled.
  4256. Writes new patterns so that only the specified directories (and top-level files)
  4257. remain in the working tree, and applies the sparse checkout update.
  4258. Args:
  4259. repo: Path to the repository or a Repo object.
  4260. dirs: List of directory names to include.
  4261. force: Whether to forcibly discard local modifications (default False).
  4262. Returns:
  4263. None
  4264. """
  4265. with open_repo_closing(repo) as repo_obj:
  4266. repo_obj.get_worktree().configure_for_cone_mode()
  4267. repo_obj.get_worktree().set_cone_mode_patterns(dirs=dirs)
  4268. new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4269. # Finally, apply the patterns and update the working tree
  4270. sparse_checkout(repo_obj, new_patterns, force=force, cone=True)
  4271. def cone_mode_add(
  4272. repo: Union[str, os.PathLike[str], Repo], dirs: Sequence[str], force: bool = False
  4273. ) -> None:
  4274. """Add new directories to the existing 'cone-mode' sparse-checkout patterns.
  4275. Reads the current patterns from ``.git/info/sparse-checkout``, adds pattern
  4276. lines to include the specified directories, and then performs a sparse
  4277. checkout to update the working tree accordingly.
  4278. Args:
  4279. repo: Path to the repository or a Repo object.
  4280. dirs: List of directory names to add to the sparse-checkout.
  4281. force: Whether to forcibly discard local modifications (default False).
  4282. Returns:
  4283. None
  4284. """
  4285. with open_repo_closing(repo) as repo_obj:
  4286. repo_obj.get_worktree().configure_for_cone_mode()
  4287. # Do not pass base patterns as dirs
  4288. base_patterns = ["/*", "!/*/"]
  4289. existing_dirs = [
  4290. pat.strip("/")
  4291. for pat in repo_obj.get_worktree().get_sparse_checkout_patterns()
  4292. if pat not in base_patterns
  4293. ]
  4294. added_dirs = existing_dirs + list(dirs or [])
  4295. repo_obj.get_worktree().set_cone_mode_patterns(dirs=added_dirs)
  4296. new_patterns = repo_obj.get_worktree().get_sparse_checkout_patterns()
  4297. sparse_checkout(repo_obj, patterns=new_patterns, force=force, cone=True)
  4298. def check_mailmap(repo: RepoPath, contact: Union[str, bytes]) -> bytes:
  4299. """Check canonical name and email of contact.
  4300. Args:
  4301. repo: Path to the repository
  4302. contact: Contact name and/or email
  4303. Returns: Canonical contact data
  4304. """
  4305. with open_repo_closing(repo) as r:
  4306. from .mailmap import Mailmap
  4307. try:
  4308. mailmap = Mailmap.from_path(os.path.join(r.path, ".mailmap"))
  4309. except FileNotFoundError:
  4310. mailmap = Mailmap()
  4311. contact_bytes = (
  4312. contact.encode(DEFAULT_ENCODING) if isinstance(contact, str) else contact
  4313. )
  4314. result = mailmap.lookup(contact_bytes)
  4315. if isinstance(result, bytes):
  4316. return result
  4317. else:
  4318. # Convert tuple back to bytes format
  4319. name, email = result
  4320. if name is None:
  4321. name = b""
  4322. if email is None:
  4323. email = b""
  4324. return name + b" <" + email + b">"
  4325. def fsck(repo: RepoPath) -> Iterator[tuple[bytes, Exception]]:
  4326. """Check a repository.
  4327. Args:
  4328. repo: A path to the repository
  4329. Returns: Iterator over errors/warnings
  4330. """
  4331. with open_repo_closing(repo) as r:
  4332. # TODO(jelmer): check pack files
  4333. # TODO(jelmer): check graph
  4334. # TODO(jelmer): check refs
  4335. for sha in r.object_store:
  4336. o = r.object_store[sha]
  4337. try:
  4338. o.check()
  4339. except Exception as e:
  4340. yield (sha, e)
  4341. def stash_list(
  4342. repo: Union[str, os.PathLike[str], Repo],
  4343. ) -> Iterator[tuple[int, tuple[bytes, bytes]]]:
  4344. """List all stashes in a repository."""
  4345. with open_repo_closing(repo) as r:
  4346. from .stash import Stash
  4347. stash = Stash.from_repo(r)
  4348. entries = stash.stashes()
  4349. # Convert Entry objects to (old_sha, new_sha) tuples
  4350. return enumerate([(entry.old_sha, entry.new_sha) for entry in entries])
  4351. def stash_push(repo: Union[str, os.PathLike[str], Repo]) -> None:
  4352. """Push a new stash onto the stack."""
  4353. with open_repo_closing(repo) as r:
  4354. from .stash import Stash
  4355. stash = Stash.from_repo(r)
  4356. stash.push()
  4357. def stash_pop(repo: Union[str, os.PathLike[str], Repo]) -> None:
  4358. """Pop a stash from the stack."""
  4359. with open_repo_closing(repo) as r:
  4360. from .stash import Stash
  4361. stash = Stash.from_repo(r)
  4362. stash.pop(0)
  4363. def stash_drop(repo: Union[str, os.PathLike[str], Repo], index: int) -> None:
  4364. """Drop a stash from the stack."""
  4365. with open_repo_closing(repo) as r:
  4366. from .stash import Stash
  4367. stash = Stash.from_repo(r)
  4368. stash.drop(index)
  4369. def ls_files(repo: RepoPath) -> list[bytes]:
  4370. """List all files in an index."""
  4371. with open_repo_closing(repo) as r:
  4372. return sorted(r.open_index())
  4373. def find_unique_abbrev(
  4374. object_store: BaseObjectStore, object_id: Union[str, bytes], min_length: int = 7
  4375. ) -> str:
  4376. """Find the shortest unique abbreviation for an object ID.
  4377. Args:
  4378. object_store: Object store to search in
  4379. object_id: The full object ID to abbreviate
  4380. min_length: Minimum length of abbreviation (default 7)
  4381. Returns:
  4382. The shortest unique prefix of the object ID (at least min_length chars)
  4383. """
  4384. if isinstance(object_id, bytes):
  4385. hex_id = object_id.decode("ascii")
  4386. else:
  4387. hex_id = object_id
  4388. # Start with minimum length
  4389. for length in range(min_length, len(hex_id) + 1):
  4390. prefix = hex_id[:length]
  4391. matches = 0
  4392. # Check if this prefix is unique
  4393. for obj_id in object_store:
  4394. if obj_id.decode("ascii").startswith(prefix):
  4395. matches += 1
  4396. if matches > 1:
  4397. # Not unique, need more characters
  4398. break
  4399. if matches == 1:
  4400. # Found unique prefix
  4401. return prefix
  4402. # If we get here, return the full ID
  4403. return hex_id
  4404. def describe(
  4405. repo: Union[str, os.PathLike[str], Repo], abbrev: Optional[int] = None
  4406. ) -> str:
  4407. """Describe the repository version.
  4408. Args:
  4409. repo: git repository
  4410. abbrev: number of characters of commit to take, default is 7
  4411. Returns: a string description of the current git revision
  4412. Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh".
  4413. """
  4414. abbrev_slice = slice(0, abbrev if abbrev is not None else 7)
  4415. # Get the repository
  4416. with open_repo_closing(repo) as r:
  4417. # Get a list of all tags
  4418. refs = r.get_refs()
  4419. tags = {}
  4420. for key, value in refs.items():
  4421. key_str = key.decode()
  4422. obj = r.get_object(value)
  4423. if "tags" not in key_str:
  4424. continue
  4425. _, tag = key_str.rsplit("/", 1)
  4426. if isinstance(obj, Tag):
  4427. # Annotated tag case
  4428. commit = r.get_object(obj.object[1])
  4429. else:
  4430. # Lightweight tag case - obj is already the commit
  4431. commit = obj
  4432. if not isinstance(commit, Commit):
  4433. raise AssertionError(
  4434. f"Expected Commit object, got {type(commit).__name__}"
  4435. )
  4436. tag_info: list[Any] = [
  4437. datetime.datetime(*time.gmtime(commit.commit_time)[:6]),
  4438. commit.id.decode("ascii"),
  4439. ]
  4440. tags[tag] = tag_info
  4441. # Sort tags by datetime (first element of the value list)
  4442. sorted_tags = sorted(
  4443. tags.items(), key=lambda tag_item: tag_item[1][0], reverse=True
  4444. )
  4445. # Get the latest commit
  4446. latest_commit = r[r.head()]
  4447. # If there are no tags, return the latest commit
  4448. if len(sorted_tags) == 0:
  4449. if abbrev is not None:
  4450. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  4451. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  4452. # We're now 0 commits from the top
  4453. commit_count = 0
  4454. # Walk through all commits
  4455. walker = r.get_walker()
  4456. for entry in walker:
  4457. # Check if tag
  4458. commit_id = entry.commit.id.decode("ascii")
  4459. for tag_item in sorted_tags:
  4460. tag_name = tag_item[0]
  4461. tag_commit = tag_item[1][1]
  4462. if commit_id == tag_commit:
  4463. if commit_count == 0:
  4464. return tag_name
  4465. else:
  4466. if abbrev is not None:
  4467. abbrev_hash = latest_commit.id.decode("ascii")[abbrev_slice]
  4468. else:
  4469. abbrev_hash = find_unique_abbrev(
  4470. r.object_store, latest_commit.id
  4471. )
  4472. return f"{tag_name}-{commit_count}-g{abbrev_hash}"
  4473. commit_count += 1
  4474. # Return plain commit if no parent tag can be found
  4475. if abbrev is not None:
  4476. return "g{}".format(latest_commit.id.decode("ascii")[abbrev_slice])
  4477. return f"g{find_unique_abbrev(r.object_store, latest_commit.id)}"
  4478. def get_object_by_path(
  4479. repo: RepoPath,
  4480. path: Union[str, bytes],
  4481. committish: Optional[Union[str, bytes, Commit, Tag]] = None,
  4482. ) -> Union[Blob, Tree, Commit, Tag]:
  4483. """Get an object by path.
  4484. Args:
  4485. repo: A path to the repository
  4486. path: Path to look up
  4487. committish: Commit to look up path in
  4488. Returns: A `ShaFile` object
  4489. """
  4490. if committish is None:
  4491. committish = "HEAD"
  4492. # Get the repository
  4493. with open_repo_closing(repo) as r:
  4494. commit = parse_commit(r, committish)
  4495. base_tree = commit.tree
  4496. if not isinstance(path, bytes):
  4497. path = commit_encode(commit, path)
  4498. (_mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path)
  4499. obj = r[sha]
  4500. assert isinstance(obj, (Blob, Tree, Commit, Tag))
  4501. return obj
  4502. def write_tree(repo: RepoPath) -> bytes:
  4503. """Write a tree object from the index.
  4504. Args:
  4505. repo: Repository for which to write tree
  4506. Returns: tree id for the tree that was written
  4507. """
  4508. with open_repo_closing(repo) as r:
  4509. return r.open_index().commit(r.object_store)
  4510. def _do_merge(
  4511. r: Repo,
  4512. merge_commit_id: bytes,
  4513. no_commit: bool = False,
  4514. no_ff: bool = False,
  4515. message: Optional[bytes] = None,
  4516. author: Optional[bytes] = None,
  4517. committer: Optional[bytes] = None,
  4518. ) -> tuple[Optional[bytes], list[bytes]]:
  4519. """Internal merge implementation that operates on an open repository.
  4520. Args:
  4521. r: Open repository object
  4522. merge_commit_id: SHA of commit to merge
  4523. no_commit: If True, do not create a merge commit
  4524. no_ff: If True, force creation of a merge commit
  4525. message: Optional merge commit message
  4526. author: Optional author for merge commit
  4527. committer: Optional committer for merge commit
  4528. Returns:
  4529. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  4530. if no_commit=True or there were conflicts
  4531. """
  4532. from .graph import find_merge_base
  4533. from .merge import three_way_merge
  4534. # Get HEAD commit
  4535. try:
  4536. head_commit_id = r.refs[b"HEAD"]
  4537. except KeyError:
  4538. raise Error("No HEAD reference found")
  4539. head_commit = r[head_commit_id]
  4540. assert isinstance(head_commit, Commit), "Expected a Commit object"
  4541. merge_commit = r[merge_commit_id]
  4542. assert isinstance(merge_commit, Commit), "Expected a Commit object"
  4543. # Check if fast-forward is possible
  4544. merge_bases = find_merge_base(r, [head_commit_id, merge_commit_id])
  4545. if not merge_bases:
  4546. raise Error("No common ancestor found")
  4547. # Use the first merge base
  4548. base_commit_id = merge_bases[0]
  4549. # Check if we're trying to merge the same commit
  4550. if head_commit_id == merge_commit_id:
  4551. # Already up to date
  4552. return (None, [])
  4553. # Check for fast-forward
  4554. if base_commit_id == head_commit_id and not no_ff:
  4555. # Fast-forward merge
  4556. r.refs[b"HEAD"] = merge_commit_id
  4557. # Update the working directory
  4558. changes = tree_changes(r.object_store, head_commit.tree, merge_commit.tree)
  4559. update_working_tree(
  4560. r, head_commit.tree, merge_commit.tree, change_iterator=changes
  4561. )
  4562. return (merge_commit_id, [])
  4563. if base_commit_id == merge_commit_id:
  4564. # Already up to date
  4565. return (None, [])
  4566. # Perform three-way merge
  4567. base_commit = r[base_commit_id]
  4568. assert isinstance(base_commit, Commit), "Expected a Commit object"
  4569. gitattributes = r.get_gitattributes()
  4570. config = r.get_config()
  4571. merged_tree, conflicts = three_way_merge(
  4572. r.object_store, base_commit, head_commit, merge_commit, gitattributes, config
  4573. )
  4574. # Add merged tree to object store
  4575. r.object_store.add_object(merged_tree)
  4576. # Update index and working directory
  4577. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  4578. update_working_tree(r, head_commit.tree, merged_tree.id, change_iterator=changes)
  4579. if conflicts or no_commit:
  4580. # Don't create a commit if there are conflicts or no_commit is True
  4581. return (None, conflicts)
  4582. # Create merge commit
  4583. merge_commit_obj = Commit()
  4584. merge_commit_obj.tree = merged_tree.id
  4585. merge_commit_obj.parents = [head_commit_id, merge_commit_id]
  4586. # Set author/committer
  4587. if author is None:
  4588. author = get_user_identity(r.get_config_stack())
  4589. if committer is None:
  4590. committer = author
  4591. merge_commit_obj.author = author
  4592. merge_commit_obj.committer = committer
  4593. # Set timestamps
  4594. timestamp = int(time.time())
  4595. timezone = 0 # UTC
  4596. merge_commit_obj.author_time = timestamp
  4597. merge_commit_obj.author_timezone = timezone
  4598. merge_commit_obj.commit_time = timestamp
  4599. merge_commit_obj.commit_timezone = timezone
  4600. # Set commit message
  4601. if message is None:
  4602. message = f"Merge commit '{merge_commit_id.decode()[:7]}'\n".encode()
  4603. merge_commit_obj.message = message.encode() if isinstance(message, str) else message
  4604. # Add commit to object store
  4605. r.object_store.add_object(merge_commit_obj)
  4606. # Update HEAD
  4607. r.refs[b"HEAD"] = merge_commit_obj.id
  4608. return (merge_commit_obj.id, [])
  4609. def merge(
  4610. repo: Union[str, os.PathLike[str], Repo],
  4611. committish: Union[str, bytes, Commit, Tag],
  4612. no_commit: bool = False,
  4613. no_ff: bool = False,
  4614. message: Optional[bytes] = None,
  4615. author: Optional[bytes] = None,
  4616. committer: Optional[bytes] = None,
  4617. ) -> tuple[Optional[bytes], list[bytes]]:
  4618. """Merge a commit into the current branch.
  4619. Args:
  4620. repo: Repository to merge into
  4621. committish: Commit to merge
  4622. no_commit: If True, do not create a merge commit
  4623. no_ff: If True, force creation of a merge commit
  4624. message: Optional merge commit message
  4625. author: Optional author for merge commit
  4626. committer: Optional committer for merge commit
  4627. Returns:
  4628. Tuple of (merge_commit_sha, conflicts) where merge_commit_sha is None
  4629. if no_commit=True or there were conflicts
  4630. Raises:
  4631. Error: If there is no HEAD reference or commit cannot be found
  4632. """
  4633. with open_repo_closing(repo) as r:
  4634. # Parse the commit to merge
  4635. try:
  4636. merge_commit_id = parse_commit(r, committish).id
  4637. except KeyError:
  4638. raise Error(
  4639. f"Cannot find commit '{committish.decode() if isinstance(committish, bytes) else committish}'"
  4640. )
  4641. result = _do_merge(
  4642. r, merge_commit_id, no_commit, no_ff, message, author, committer
  4643. )
  4644. # Trigger auto GC if needed
  4645. from .gc import maybe_auto_gc
  4646. maybe_auto_gc(r)
  4647. return result
  4648. def unpack_objects(
  4649. pack_path: Union[str, os.PathLike[str]], target: Union[str, os.PathLike[str]] = "."
  4650. ) -> int:
  4651. """Unpack objects from a pack file into the repository.
  4652. Args:
  4653. pack_path: Path to the pack file to unpack
  4654. target: Path to the repository to unpack into
  4655. Returns:
  4656. Number of objects unpacked
  4657. """
  4658. from .pack import Pack
  4659. with open_repo_closing(target) as r:
  4660. pack_basename = os.path.splitext(pack_path)[0]
  4661. with Pack(pack_basename) as pack:
  4662. count = 0
  4663. for unpacked in pack.iter_unpacked():
  4664. obj = unpacked.sha_file()
  4665. r.object_store.add_object(obj)
  4666. count += 1
  4667. return count
  4668. def merge_tree(
  4669. repo: RepoPath,
  4670. base_tree: Optional[Union[str, bytes, Tree, Commit, Tag]],
  4671. our_tree: Union[str, bytes, Tree, Commit, Tag],
  4672. their_tree: Union[str, bytes, Tree, Commit, Tag],
  4673. ) -> tuple[bytes, list[bytes]]:
  4674. """Perform a three-way tree merge without touching the working directory.
  4675. This is similar to git merge-tree, performing a merge at the tree level
  4676. without creating commits or updating any references.
  4677. Args:
  4678. repo: Repository containing the trees
  4679. base_tree: Tree-ish of the common ancestor (or None for no common ancestor)
  4680. our_tree: Tree-ish of our side of the merge
  4681. their_tree: Tree-ish of their side of the merge
  4682. Returns:
  4683. tuple: A tuple of (merged_tree_id, conflicts) where:
  4684. - merged_tree_id is the SHA-1 of the merged tree
  4685. - conflicts is a list of paths (as bytes) that had conflicts
  4686. Raises:
  4687. KeyError: If any of the tree-ish arguments cannot be resolved
  4688. """
  4689. from .merge import Merger
  4690. with open_repo_closing(repo) as r:
  4691. # Resolve tree-ish arguments to actual trees
  4692. base = parse_tree(r, base_tree) if base_tree else None
  4693. ours = parse_tree(r, our_tree)
  4694. theirs = parse_tree(r, their_tree)
  4695. # Perform the merge
  4696. gitattributes = r.get_gitattributes()
  4697. config = r.get_config()
  4698. merger = Merger(r.object_store, gitattributes, config)
  4699. merged_tree, conflicts = merger.merge_trees(base, ours, theirs)
  4700. # Add the merged tree to the object store
  4701. r.object_store.add_object(merged_tree)
  4702. return merged_tree.id, conflicts
  4703. def cherry_pick( # noqa: D417
  4704. repo: Union[str, os.PathLike[str], Repo],
  4705. committish: Union[str, bytes, Commit, Tag, None],
  4706. no_commit: bool = False,
  4707. continue_: bool = False,
  4708. abort: bool = False,
  4709. ) -> Optional[bytes]:
  4710. r"""Cherry-pick a commit onto the current branch.
  4711. Args:
  4712. repo: Repository to cherry-pick into
  4713. committish: Commit to cherry-pick (can be None only when resuming or aborting)
  4714. no_commit: If True, do not create a commit after applying changes
  4715. ``continue_``: Resume an in-progress cherry-pick after resolving conflicts if True
  4716. abort: Abort an in-progress cherry-pick
  4717. Returns:
  4718. The SHA of the newly created commit, or None if no_commit=True or there were conflicts
  4719. Raises:
  4720. Error: If there is no HEAD reference, commit cannot be found, or operation fails
  4721. """
  4722. from .merge import three_way_merge
  4723. # Validate that committish is provided when needed
  4724. if not (continue_ or abort) and committish is None:
  4725. raise ValueError("committish is required when not using --continue or --abort")
  4726. with open_repo_closing(repo) as r:
  4727. # Handle abort
  4728. if abort:
  4729. # Clean up any cherry-pick state
  4730. try:
  4731. os.remove(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"))
  4732. except FileNotFoundError:
  4733. pass
  4734. try:
  4735. os.remove(os.path.join(r.controldir(), "MERGE_MSG"))
  4736. except FileNotFoundError:
  4737. pass
  4738. # Reset index to HEAD
  4739. head_commit = r[b"HEAD"]
  4740. assert isinstance(head_commit, Commit)
  4741. r.get_worktree().reset_index(head_commit.tree)
  4742. return None
  4743. # Handle continue
  4744. if continue_:
  4745. # Check if there's a cherry-pick in progress
  4746. cherry_pick_head_path = os.path.join(r.controldir(), "CHERRY_PICK_HEAD")
  4747. try:
  4748. with open(cherry_pick_head_path, "rb") as f:
  4749. cherry_pick_commit_id = f.read().strip()
  4750. cherry_pick_commit = r[cherry_pick_commit_id]
  4751. except FileNotFoundError:
  4752. raise Error("No cherry-pick in progress")
  4753. # Check for unresolved conflicts
  4754. if r.open_index().has_conflicts():
  4755. raise Error("Unresolved conflicts remain")
  4756. # Create the commit
  4757. tree_id = r.open_index().commit(r.object_store)
  4758. # Read saved message if any
  4759. merge_msg_path = os.path.join(r.controldir(), "MERGE_MSG")
  4760. try:
  4761. with open(merge_msg_path, "rb") as f:
  4762. message = f.read()
  4763. except FileNotFoundError:
  4764. assert isinstance(cherry_pick_commit, Commit)
  4765. message = cherry_pick_commit.message
  4766. assert isinstance(cherry_pick_commit, Commit)
  4767. new_commit = r.get_worktree().commit(
  4768. message=message,
  4769. tree=tree_id,
  4770. author=cherry_pick_commit.author,
  4771. author_timestamp=cherry_pick_commit.author_time,
  4772. author_timezone=cherry_pick_commit.author_timezone,
  4773. )
  4774. # Clean up state files
  4775. try:
  4776. os.remove(cherry_pick_head_path)
  4777. except FileNotFoundError:
  4778. pass
  4779. try:
  4780. os.remove(merge_msg_path)
  4781. except FileNotFoundError:
  4782. pass
  4783. return new_commit
  4784. # Normal cherry-pick operation
  4785. # Get current HEAD
  4786. try:
  4787. head_commit = r[b"HEAD"]
  4788. except KeyError:
  4789. raise Error("No HEAD reference found")
  4790. # Parse the commit to cherry-pick
  4791. # committish cannot be None here due to validation above
  4792. assert committish is not None
  4793. try:
  4794. cherry_pick_commit = parse_commit(r, committish)
  4795. except KeyError:
  4796. raise Error(
  4797. f"Cannot find commit '{committish.decode() if isinstance(committish, bytes) else committish}'"
  4798. )
  4799. # Check if commit has parents
  4800. assert isinstance(cherry_pick_commit, Commit)
  4801. if not cherry_pick_commit.parents:
  4802. raise Error("Cannot cherry-pick root commit")
  4803. # Get parent of cherry-pick commit
  4804. parent_commit = r[cherry_pick_commit.parents[0]]
  4805. assert isinstance(parent_commit, Commit)
  4806. # Perform three-way merge
  4807. assert isinstance(head_commit, Commit)
  4808. merged_tree, conflicts = three_way_merge(
  4809. r.object_store, parent_commit, head_commit, cherry_pick_commit
  4810. )
  4811. # Add merged tree to object store
  4812. r.object_store.add_object(merged_tree)
  4813. # Update working tree and index
  4814. # Reset index to match merged tree
  4815. r.get_worktree().reset_index(merged_tree.id)
  4816. # Update working tree from the new index
  4817. # Allow overwriting because we're applying the merge result
  4818. assert isinstance(head_commit, Commit)
  4819. changes = tree_changes(r.object_store, head_commit.tree, merged_tree.id)
  4820. update_working_tree(
  4821. r,
  4822. head_commit.tree,
  4823. merged_tree.id,
  4824. change_iterator=changes,
  4825. allow_overwrite_modified=True,
  4826. )
  4827. if conflicts:
  4828. # Save state for later continuation
  4829. with open(os.path.join(r.controldir(), "CHERRY_PICK_HEAD"), "wb") as f:
  4830. f.write(cherry_pick_commit.id + b"\n")
  4831. # Save commit message
  4832. with open(os.path.join(r.controldir(), "MERGE_MSG"), "wb") as f:
  4833. f.write(cherry_pick_commit.message)
  4834. raise Error(
  4835. f"Conflicts in: {', '.join(c.decode('utf-8', 'replace') for c in conflicts)}\n"
  4836. f"Fix conflicts and run 'dulwich cherry-pick --continue'"
  4837. )
  4838. if no_commit:
  4839. return None
  4840. # Create the commit
  4841. new_commit = r.get_worktree().commit(
  4842. message=cherry_pick_commit.message,
  4843. tree=merged_tree.id,
  4844. author=cherry_pick_commit.author,
  4845. author_timestamp=cherry_pick_commit.author_time,
  4846. author_timezone=cherry_pick_commit.author_timezone,
  4847. )
  4848. return new_commit
  4849. def revert(
  4850. repo: Union[str, os.PathLike[str], Repo],
  4851. commits: Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]],
  4852. no_commit: bool = False,
  4853. message: Optional[Union[str, bytes]] = None,
  4854. author: Optional[bytes] = None,
  4855. committer: Optional[bytes] = None,
  4856. ) -> Optional[bytes]:
  4857. """Revert one or more commits.
  4858. This creates a new commit that undoes the changes introduced by the
  4859. specified commits. Unlike reset, revert creates a new commit that
  4860. preserves history.
  4861. Args:
  4862. repo: Path to repository or repository object
  4863. commits: List of commit-ish (SHA, ref, etc.) to revert, or a single commit-ish
  4864. no_commit: If True, apply changes to index/working tree but don't commit
  4865. message: Optional commit message (default: "Revert <original subject>")
  4866. author: Optional author for revert commit
  4867. committer: Optional committer for revert commit
  4868. Returns:
  4869. SHA1 of the new revert commit, or None if no_commit=True
  4870. Raises:
  4871. Error: If revert fails due to conflicts or other issues
  4872. """
  4873. from .merge import three_way_merge
  4874. # Normalize commits to a list
  4875. if isinstance(commits, (str, bytes, Commit, Tag)):
  4876. commits = [commits]
  4877. with open_repo_closing(repo) as r:
  4878. # Convert string refs to bytes
  4879. commits_to_revert = []
  4880. for commit_ref in commits:
  4881. if isinstance(commit_ref, str):
  4882. commit_ref = commit_ref.encode("utf-8")
  4883. commit = parse_commit(r, commit_ref)
  4884. commits_to_revert.append(commit)
  4885. # Get current HEAD
  4886. try:
  4887. head_commit_id = r.refs[b"HEAD"]
  4888. except KeyError:
  4889. raise Error("No HEAD reference found")
  4890. head_commit = r[head_commit_id]
  4891. assert isinstance(head_commit, Commit)
  4892. current_tree = head_commit.tree
  4893. # Process commits in order
  4894. for commit_to_revert in commits_to_revert:
  4895. # For revert, we want to apply the inverse of the commit
  4896. # This means using the commit's tree as "base" and its parent as "theirs"
  4897. if not commit_to_revert.parents:
  4898. raise Error(
  4899. f"Cannot revert commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - it has no parents"
  4900. )
  4901. # For simplicity, we only handle commits with one parent (no merge commits)
  4902. if len(commit_to_revert.parents) > 1:
  4903. raise Error(
  4904. f"Cannot revert merge commit {commit_to_revert.id.decode() if isinstance(commit_to_revert.id, bytes) else commit_to_revert.id} - not yet implemented"
  4905. )
  4906. parent_commit = r[commit_to_revert.parents[0]]
  4907. assert isinstance(parent_commit, Commit)
  4908. # Perform three-way merge:
  4909. # - base: the commit we're reverting (what we want to remove)
  4910. # - ours: current HEAD (what we have now)
  4911. # - theirs: parent of commit being reverted (what we want to go back to)
  4912. assert isinstance(commit_to_revert, Commit)
  4913. head_for_merge = r[head_commit_id]
  4914. assert isinstance(head_for_merge, Commit)
  4915. merged_tree, conflicts = three_way_merge(
  4916. r.object_store,
  4917. commit_to_revert, # base
  4918. head_for_merge, # ours
  4919. parent_commit, # theirs
  4920. )
  4921. if conflicts:
  4922. # Update working tree with conflicts
  4923. changes = tree_changes(r.object_store, current_tree, merged_tree.id)
  4924. update_working_tree(
  4925. r, current_tree, merged_tree.id, change_iterator=changes
  4926. )
  4927. conflicted_paths = [c.decode("utf-8", "replace") for c in conflicts]
  4928. raise Error(f"Conflicts while reverting: {', '.join(conflicted_paths)}")
  4929. # Add merged tree to object store
  4930. r.object_store.add_object(merged_tree)
  4931. # Update working tree
  4932. changes = tree_changes(r.object_store, current_tree, merged_tree.id)
  4933. update_working_tree(
  4934. r, current_tree, merged_tree.id, change_iterator=changes
  4935. )
  4936. current_tree = merged_tree.id
  4937. if not no_commit:
  4938. # Create revert commit
  4939. revert_commit = Commit()
  4940. revert_commit.tree = merged_tree.id
  4941. revert_commit.parents = [head_commit_id]
  4942. # Set author/committer
  4943. if author is None:
  4944. author = get_user_identity(r.get_config_stack())
  4945. if committer is None:
  4946. committer = author
  4947. revert_commit.author = author
  4948. revert_commit.committer = committer
  4949. # Set timestamps
  4950. timestamp = int(time.time())
  4951. timezone = 0 # UTC
  4952. revert_commit.author_time = timestamp
  4953. revert_commit.author_timezone = timezone
  4954. revert_commit.commit_time = timestamp
  4955. revert_commit.commit_timezone = timezone
  4956. # Set message
  4957. if message is None:
  4958. # Extract original commit subject
  4959. original_message = commit_to_revert.message
  4960. if isinstance(original_message, bytes):
  4961. original_message = original_message.decode("utf-8", "replace")
  4962. subject = original_message.split("\n")[0]
  4963. message = f'Revert "{subject}"\n\nThis reverts commit {commit_to_revert.id.decode("ascii")}.'.encode()
  4964. elif isinstance(message, str):
  4965. message = message.encode("utf-8")
  4966. revert_commit.message = message
  4967. # Add commit to object store
  4968. r.object_store.add_object(revert_commit)
  4969. # Update HEAD
  4970. r.refs[b"HEAD"] = revert_commit.id
  4971. head_commit_id = revert_commit.id
  4972. return head_commit_id if not no_commit else None
  4973. def gc(
  4974. repo: RepoPath,
  4975. auto: bool = False,
  4976. aggressive: bool = False,
  4977. prune: bool = True,
  4978. grace_period: Optional[int] = 1209600, # 2 weeks default
  4979. dry_run: bool = False,
  4980. progress: Optional[Callable[[str], None]] = None,
  4981. ) -> "GCStats":
  4982. """Run garbage collection on a repository.
  4983. Args:
  4984. repo: Path to the repository or a Repo object
  4985. auto: If True, only run gc if needed
  4986. aggressive: If True, use more aggressive settings
  4987. prune: If True, prune unreachable objects
  4988. grace_period: Grace period in seconds for pruning (default 2 weeks)
  4989. dry_run: If True, only report what would be done
  4990. progress: Optional progress callback
  4991. Returns:
  4992. GCStats object with garbage collection statistics
  4993. """
  4994. from .gc import garbage_collect
  4995. with open_repo_closing(repo) as r:
  4996. return garbage_collect(
  4997. r,
  4998. auto=auto,
  4999. aggressive=aggressive,
  5000. prune=prune,
  5001. grace_period=grace_period,
  5002. dry_run=dry_run,
  5003. progress=progress,
  5004. )
  5005. def prune(
  5006. repo: RepoPath,
  5007. grace_period: Optional[int] = None,
  5008. dry_run: bool = False,
  5009. progress: Optional[Callable[[str], None]] = None,
  5010. ) -> None:
  5011. """Prune/clean up a repository's object store.
  5012. This removes temporary files that were left behind by interrupted
  5013. pack operations.
  5014. Args:
  5015. repo: Path to the repository or a Repo object
  5016. grace_period: Grace period in seconds for removing temporary files
  5017. (default 2 weeks)
  5018. dry_run: If True, only report what would be done
  5019. progress: Optional progress callback
  5020. """
  5021. with open_repo_closing(repo) as r:
  5022. if progress:
  5023. progress("Pruning temporary files")
  5024. if not dry_run:
  5025. r.object_store.prune(grace_period=grace_period)
  5026. def count_objects(repo: RepoPath = ".", verbose: bool = False) -> CountObjectsResult:
  5027. """Count unpacked objects and their disk usage.
  5028. Args:
  5029. repo: Path to repository or repository object
  5030. verbose: Whether to return verbose information
  5031. Returns:
  5032. CountObjectsResult object with detailed statistics
  5033. """
  5034. with open_repo_closing(repo) as r:
  5035. object_store = r.object_store
  5036. # Count loose objects
  5037. loose_count = 0
  5038. loose_size = 0
  5039. for sha in object_store._iter_loose_objects():
  5040. loose_count += 1
  5041. from .object_store import DiskObjectStore
  5042. assert isinstance(object_store, DiskObjectStore)
  5043. path = object_store._get_shafile_path(sha)
  5044. try:
  5045. stat_info = os.stat(path)
  5046. # Git uses disk usage, not file size. st_blocks is always in
  5047. # 512-byte blocks per POSIX standard
  5048. st_blocks = getattr(stat_info, "st_blocks", None)
  5049. if st_blocks is not None:
  5050. # Available on Linux and macOS
  5051. loose_size += st_blocks * 512
  5052. else:
  5053. # Fallback for Windows
  5054. loose_size += stat_info.st_size
  5055. except FileNotFoundError:
  5056. # Object may have been removed between iteration and stat
  5057. pass
  5058. if not verbose:
  5059. return CountObjectsResult(count=loose_count, size=loose_size)
  5060. # Count pack information
  5061. pack_count = len(object_store.packs)
  5062. in_pack_count = 0
  5063. pack_size = 0
  5064. for pack in object_store.packs:
  5065. in_pack_count += len(pack)
  5066. # Get pack file size
  5067. pack_path = pack._data_path
  5068. try:
  5069. pack_size += os.path.getsize(pack_path)
  5070. except FileNotFoundError:
  5071. pass
  5072. # Get index file size
  5073. idx_path = pack._idx_path
  5074. try:
  5075. pack_size += os.path.getsize(idx_path)
  5076. except FileNotFoundError:
  5077. pass
  5078. return CountObjectsResult(
  5079. count=loose_count,
  5080. size=loose_size,
  5081. in_pack=in_pack_count,
  5082. packs=pack_count,
  5083. size_pack=pack_size,
  5084. )
  5085. def is_interactive_rebase(repo: Union[Repo, str]) -> bool:
  5086. """Check if an interactive rebase is in progress.
  5087. Args:
  5088. repo: Repository to check
  5089. Returns:
  5090. True if interactive rebase is in progress, False otherwise
  5091. """
  5092. with open_repo_closing(repo) as r:
  5093. state_manager = r.get_rebase_state_manager()
  5094. if not state_manager.exists():
  5095. return False
  5096. # Check if todo file exists
  5097. todo = state_manager.load_todo()
  5098. return todo is not None
  5099. def rebase(
  5100. repo: Union[Repo, str],
  5101. upstream: Union[bytes, str],
  5102. onto: Optional[Union[bytes, str]] = None,
  5103. branch: Optional[Union[bytes, str]] = None,
  5104. abort: bool = False,
  5105. continue_rebase: bool = False,
  5106. skip: bool = False,
  5107. interactive: bool = False,
  5108. edit_todo: bool = False,
  5109. ) -> list[bytes]:
  5110. """Rebase commits onto another branch.
  5111. Args:
  5112. repo: Repository to rebase in
  5113. upstream: Upstream branch/commit to rebase onto
  5114. onto: Specific commit to rebase onto (defaults to upstream)
  5115. branch: Branch to rebase (defaults to current branch)
  5116. abort: Abort an in-progress rebase
  5117. continue_rebase: Continue an in-progress rebase
  5118. skip: Skip current commit and continue rebase
  5119. interactive: Start an interactive rebase
  5120. edit_todo: Edit the todo list of an interactive rebase
  5121. Returns:
  5122. List of new commit SHAs created by rebase
  5123. Raises:
  5124. Error: If rebase fails or conflicts occur
  5125. """
  5126. from .cli import launch_editor
  5127. from .rebase import (
  5128. RebaseConflict,
  5129. RebaseError,
  5130. Rebaser,
  5131. process_interactive_rebase,
  5132. start_interactive,
  5133. )
  5134. from .rebase import (
  5135. edit_todo as edit_todo_func,
  5136. )
  5137. with open_repo_closing(repo) as r:
  5138. rebaser = Rebaser(r)
  5139. if abort:
  5140. try:
  5141. rebaser.abort()
  5142. return []
  5143. except RebaseError as e:
  5144. raise Error(str(e))
  5145. if edit_todo:
  5146. # Edit the todo list of an interactive rebase
  5147. try:
  5148. edit_todo_func(r, launch_editor)
  5149. print("Todo list updated. Continue with 'rebase --continue'")
  5150. return []
  5151. except RebaseError as e:
  5152. raise Error(str(e))
  5153. if continue_rebase:
  5154. try:
  5155. if interactive:
  5156. # Continue interactive rebase
  5157. is_complete, pause_reason = process_interactive_rebase(
  5158. r, editor_callback=launch_editor
  5159. )
  5160. if is_complete:
  5161. return [c.id for c in rebaser._done]
  5162. else:
  5163. if pause_reason == "conflict":
  5164. raise Error("Conflicts detected. Resolve and continue.")
  5165. elif pause_reason == "edit":
  5166. print("Stopped for editing. Make changes and continue.")
  5167. elif pause_reason == "break":
  5168. print("Rebase paused at break. Continue when ready.")
  5169. else:
  5170. print(f"Rebase paused: {pause_reason}")
  5171. return []
  5172. else:
  5173. # Continue regular rebase
  5174. result = rebaser.continue_()
  5175. if result is None:
  5176. # Rebase complete
  5177. return [c.id for c in rebaser._done]
  5178. elif isinstance(result, tuple) and result[1]:
  5179. # Still have conflicts
  5180. raise Error(
  5181. f"Conflicts in: {', '.join(f.decode('utf-8', 'replace') for f in result[1])}"
  5182. )
  5183. except RebaseError as e:
  5184. raise Error(str(e))
  5185. # Convert string refs to bytes
  5186. if isinstance(upstream, str):
  5187. upstream = upstream.encode("utf-8")
  5188. if isinstance(onto, str):
  5189. onto = onto.encode("utf-8") if onto else None
  5190. if isinstance(branch, str):
  5191. branch = branch.encode("utf-8") if branch else None
  5192. try:
  5193. if interactive:
  5194. # Start interactive rebase
  5195. todo = start_interactive(r, upstream, onto, branch, launch_editor)
  5196. # Process the todo list
  5197. is_complete, pause_reason = process_interactive_rebase(
  5198. r, todo, editor_callback=launch_editor
  5199. )
  5200. if is_complete:
  5201. return [c.id for c in rebaser._done]
  5202. else:
  5203. if pause_reason == "conflict":
  5204. raise Error("Conflicts detected. Resolve and continue.")
  5205. elif pause_reason == "edit":
  5206. print("Stopped for editing. Make changes and continue.")
  5207. elif pause_reason == "break":
  5208. print("Rebase paused at break. Continue when ready.")
  5209. else:
  5210. print(f"Rebase paused: {pause_reason}")
  5211. return []
  5212. else:
  5213. # Regular rebase
  5214. rebaser.start(upstream, onto, branch)
  5215. # Continue rebase automatically
  5216. result = rebaser.continue_()
  5217. if result is not None:
  5218. # Conflicts
  5219. raise RebaseConflict(result[1])
  5220. # Return the SHAs of the rebased commits
  5221. return [c.id for c in rebaser._done]
  5222. except RebaseConflict as e:
  5223. raise Error(str(e))
  5224. except RebaseError as e:
  5225. raise Error(str(e))
  5226. def annotate(
  5227. repo: RepoPath,
  5228. path: Union[str, bytes],
  5229. committish: Optional[Union[str, bytes, Commit, Tag]] = None,
  5230. ) -> list[tuple[tuple[Commit, TreeEntry], bytes]]:
  5231. """Annotate the history of a file.
  5232. :param repo: Path to the repository
  5233. :param path: Path to annotate
  5234. :param committish: Commit id to find path in
  5235. :return: List of ((Commit, TreeChange), line) tuples
  5236. """
  5237. if committish is None:
  5238. committish = "HEAD"
  5239. from dulwich.annotate import annotate_lines
  5240. with open_repo_closing(repo) as r:
  5241. commit_id = parse_commit(r, committish).id
  5242. # Ensure path is bytes
  5243. if isinstance(path, str):
  5244. path = path.encode()
  5245. return annotate_lines(r.object_store, commit_id, path)
  5246. blame = annotate
  5247. def filter_branch(
  5248. repo: RepoPath = ".",
  5249. branch: Union[str, bytes] = "HEAD",
  5250. *,
  5251. filter_fn: Optional[Callable[[Commit], Optional["CommitData"]]] = None,
  5252. filter_author: Optional[Callable[[bytes], Optional[bytes]]] = None,
  5253. filter_committer: Optional[Callable[[bytes], Optional[bytes]]] = None,
  5254. filter_message: Optional[Callable[[bytes], Optional[bytes]]] = None,
  5255. tree_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
  5256. index_filter: Optional[Callable[[bytes, str], Optional[bytes]]] = None,
  5257. parent_filter: Optional[Callable[[Sequence[bytes]], list[bytes]]] = None,
  5258. commit_filter: Optional[Callable[[Commit, bytes], Optional[bytes]]] = None,
  5259. subdirectory_filter: Optional[Union[str, bytes]] = None,
  5260. prune_empty: bool = False,
  5261. tag_name_filter: Optional[Callable[[bytes], Optional[bytes]]] = None,
  5262. force: bool = False,
  5263. keep_original: bool = True,
  5264. refs: Optional[list[bytes]] = None,
  5265. ) -> dict[bytes, bytes]:
  5266. """Rewrite branch history by creating new commits with filtered properties.
  5267. This is similar to git filter-branch, allowing you to rewrite commit
  5268. history by modifying trees, parents, author, committer, or commit messages.
  5269. Args:
  5270. repo: Path to repository
  5271. branch: Branch to rewrite (defaults to HEAD)
  5272. filter_fn: Optional callable that takes a Commit object and returns
  5273. a dict of updated fields (author, committer, message, etc.)
  5274. filter_author: Optional callable that takes author bytes and returns
  5275. updated author bytes or None to keep unchanged
  5276. filter_committer: Optional callable that takes committer bytes and returns
  5277. updated committer bytes or None to keep unchanged
  5278. filter_message: Optional callable that takes commit message bytes
  5279. and returns updated message bytes
  5280. tree_filter: Optional callable that takes (tree_sha, temp_dir) and returns
  5281. new tree SHA after modifying working directory
  5282. index_filter: Optional callable that takes (tree_sha, temp_index_path) and
  5283. returns new tree SHA after modifying index
  5284. parent_filter: Optional callable that takes parent list and returns
  5285. modified parent list
  5286. commit_filter: Optional callable that takes (Commit, tree_sha) and returns
  5287. new commit SHA or None to skip commit
  5288. subdirectory_filter: Optional subdirectory path to extract as new root
  5289. prune_empty: Whether to prune commits that become empty
  5290. tag_name_filter: Optional callable to rename tags
  5291. force: Force operation even if branch has been filtered before
  5292. keep_original: Keep original refs under refs/original/
  5293. refs: List of refs to rewrite (defaults to [branch])
  5294. Returns:
  5295. Dict mapping old commit SHAs to new commit SHAs
  5296. Raises:
  5297. Error: If branch is already filtered and force is False
  5298. """
  5299. from .filter_branch import CommitFilter, filter_refs
  5300. with open_repo_closing(repo) as r:
  5301. # Parse branch/committish
  5302. if isinstance(branch, str):
  5303. branch = branch.encode()
  5304. # Determine which refs to process
  5305. if refs is None:
  5306. if branch == b"HEAD":
  5307. # Resolve HEAD to actual branch
  5308. try:
  5309. resolved = r.refs.follow(b"HEAD")
  5310. if resolved and resolved[0]:
  5311. # resolved is a list of (refname, sha) tuples
  5312. resolved_ref = resolved[0][-1]
  5313. if resolved_ref and resolved_ref != b"HEAD":
  5314. refs = [resolved_ref]
  5315. else:
  5316. # HEAD points directly to a commit
  5317. refs = [b"HEAD"]
  5318. else:
  5319. refs = [b"HEAD"]
  5320. except SymrefLoop:
  5321. refs = [b"HEAD"]
  5322. else:
  5323. # Convert branch name to full ref if needed
  5324. if not branch.startswith(b"refs/"):
  5325. branch = b"refs/heads/" + branch
  5326. refs = [branch]
  5327. # Convert subdirectory filter to bytes if needed
  5328. if subdirectory_filter:
  5329. if isinstance(subdirectory_filter, str):
  5330. subdirectory_filter = subdirectory_filter.encode()
  5331. else:
  5332. subdirectory_filter = None
  5333. # Create commit filter
  5334. filter_obj = CommitFilter(
  5335. r.object_store,
  5336. filter_fn=filter_fn,
  5337. filter_author=filter_author,
  5338. filter_committer=filter_committer,
  5339. filter_message=filter_message,
  5340. tree_filter=tree_filter,
  5341. index_filter=index_filter,
  5342. parent_filter=parent_filter,
  5343. commit_filter=commit_filter,
  5344. subdirectory_filter=subdirectory_filter,
  5345. prune_empty=prune_empty,
  5346. tag_name_filter=tag_name_filter,
  5347. )
  5348. # Tag callback for renaming tags
  5349. def rename_tag(old_ref: bytes, new_ref: bytes) -> None:
  5350. # Copy tag to new name
  5351. r.refs[new_ref] = r.refs[old_ref]
  5352. # Delete old tag
  5353. del r.refs[old_ref]
  5354. # Filter refs
  5355. try:
  5356. return filter_refs(
  5357. r.refs,
  5358. r.object_store,
  5359. refs,
  5360. filter_obj,
  5361. keep_original=keep_original,
  5362. force=force,
  5363. tag_callback=rename_tag if tag_name_filter else None,
  5364. )
  5365. except ValueError as e:
  5366. raise Error(str(e)) from e
  5367. def format_patch(
  5368. repo: RepoPath = ".",
  5369. committish: Optional[Union[bytes, tuple[bytes, bytes]]] = None,
  5370. outstream: TextIO = sys.stdout,
  5371. outdir: Optional[Union[str, os.PathLike[str]]] = None,
  5372. n: int = 1,
  5373. stdout: bool = False,
  5374. version: Optional[str] = None,
  5375. ) -> list[str]:
  5376. """Generate patches suitable for git am.
  5377. Args:
  5378. repo: Path to repository
  5379. committish: Commit-ish or commit range to generate patches for.
  5380. Can be a single commit id, or a tuple of (start, end) commit ids
  5381. for a range. If None, formats the last n commits from HEAD.
  5382. outstream: Stream to write to if stdout=True
  5383. outdir: Directory to write patch files to (default: current directory)
  5384. n: Number of patches to generate if committish is None
  5385. stdout: Write patches to stdout instead of files
  5386. version: Version string to include in patches (default: Dulwich version)
  5387. Returns:
  5388. List of patch filenames that were created (empty if stdout=True)
  5389. """
  5390. if outdir is None:
  5391. outdir = "."
  5392. filenames = []
  5393. with open_repo_closing(repo) as r:
  5394. # Determine which commits to format
  5395. commits_to_format = []
  5396. if committish is None:
  5397. # Get the last n commits from HEAD
  5398. try:
  5399. walker = r.get_walker()
  5400. for entry in walker:
  5401. commits_to_format.append(entry.commit)
  5402. if len(commits_to_format) >= n:
  5403. break
  5404. commits_to_format.reverse()
  5405. except KeyError:
  5406. # No HEAD or empty repository
  5407. pass
  5408. elif isinstance(committish, tuple):
  5409. # Handle commit range (start, end)
  5410. start_commit, end_commit = committish
  5411. # Extract commit IDs from commit objects if needed
  5412. start_id = (
  5413. start_commit.id if isinstance(start_commit, Commit) else start_commit
  5414. )
  5415. end_id = end_commit.id if isinstance(end_commit, Commit) else end_commit
  5416. # Walk from end back to start
  5417. walker = r.get_walker(include=[end_id], exclude=[start_id])
  5418. for entry in walker:
  5419. commits_to_format.append(entry.commit)
  5420. commits_to_format.reverse()
  5421. else:
  5422. # Single commit
  5423. commit = r.object_store[committish]
  5424. assert isinstance(commit, Commit)
  5425. commits_to_format.append(commit)
  5426. # Generate patches
  5427. total = len(commits_to_format)
  5428. for i, commit in enumerate(commits_to_format, 1):
  5429. assert isinstance(commit, Commit)
  5430. # Get the parent
  5431. if commit.parents:
  5432. parent_id = commit.parents[0]
  5433. parent = r.object_store[parent_id]
  5434. assert isinstance(parent, Commit)
  5435. else:
  5436. parent = None
  5437. # Generate the diff
  5438. from io import BytesIO
  5439. diff_content = BytesIO()
  5440. if parent:
  5441. write_tree_diff(
  5442. diff_content,
  5443. r.object_store,
  5444. parent.tree,
  5445. commit.tree,
  5446. )
  5447. else:
  5448. # Initial commit - diff against empty tree
  5449. write_tree_diff(
  5450. diff_content,
  5451. r.object_store,
  5452. None,
  5453. commit.tree,
  5454. )
  5455. # Generate patch with commit metadata
  5456. if stdout:
  5457. # Get binary stream from TextIO
  5458. if hasattr(outstream, "buffer"):
  5459. binary_out: IO[bytes] = outstream.buffer
  5460. else:
  5461. # Fallback for non-text streams
  5462. binary_out = outstream # type: ignore[assignment]
  5463. write_commit_patch(
  5464. binary_out,
  5465. commit,
  5466. diff_content.getvalue(),
  5467. (i, total),
  5468. version=version,
  5469. )
  5470. else:
  5471. # Generate filename
  5472. summary = get_summary(commit)
  5473. filename = os.path.join(outdir, f"{i:04d}-{summary}.patch")
  5474. with open(filename, "wb") as f:
  5475. write_commit_patch(
  5476. f,
  5477. commit,
  5478. diff_content.getvalue(),
  5479. (i, total),
  5480. version=version,
  5481. )
  5482. filenames.append(filename)
  5483. return filenames
  5484. def bisect_start(
  5485. repo: Union[str, os.PathLike[str], Repo] = ".",
  5486. bad: Optional[Union[str, bytes, Commit, Tag]] = None,
  5487. good: Optional[
  5488. Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]]
  5489. ] = None,
  5490. paths: Optional[Sequence[bytes]] = None,
  5491. no_checkout: bool = False,
  5492. term_bad: str = "bad",
  5493. term_good: str = "good",
  5494. ) -> Optional[bytes]:
  5495. """Start a new bisect session.
  5496. Args:
  5497. repo: Path to repository or a Repo object
  5498. bad: The bad commit (defaults to HEAD)
  5499. good: List of good commits or a single good commit
  5500. paths: Optional paths to limit bisect to
  5501. no_checkout: If True, don't checkout commits during bisect
  5502. term_bad: Term to use for bad commits (default: "bad")
  5503. term_good: Term to use for good commits (default: "good")
  5504. """
  5505. with open_repo_closing(repo) as r:
  5506. state = BisectState(r)
  5507. # Convert single good commit to sequence
  5508. if good is not None and isinstance(good, (str, bytes, Commit, Tag)):
  5509. good = [good]
  5510. # Parse commits
  5511. bad_sha = parse_commit(r, bad).id if bad else None
  5512. good_shas = [parse_commit(r, g).id for g in good] if good else None
  5513. state.start(bad_sha, good_shas, paths, no_checkout, term_bad, term_good)
  5514. # Return the next commit to test if we have both good and bad
  5515. if bad_sha and good_shas:
  5516. next_sha = state._find_next_commit()
  5517. if next_sha and not no_checkout:
  5518. # Checkout the next commit
  5519. old_commit = r[r.head()]
  5520. assert isinstance(old_commit, Commit)
  5521. old_tree = old_commit.tree if r.head() else None
  5522. r.refs[b"HEAD"] = next_sha
  5523. commit = r[next_sha]
  5524. assert isinstance(commit, Commit)
  5525. changes = tree_changes(r.object_store, old_tree, commit.tree)
  5526. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  5527. return next_sha
  5528. return None
  5529. def bisect_bad(
  5530. repo: Union[str, os.PathLike[str], Repo] = ".",
  5531. rev: Optional[Union[str, bytes, Commit, Tag]] = None,
  5532. ) -> Optional[bytes]:
  5533. """Mark a commit as bad.
  5534. Args:
  5535. repo: Path to repository or a Repo object
  5536. rev: Commit to mark as bad (defaults to HEAD)
  5537. Returns:
  5538. The SHA of the next commit to test, or None if bisect is complete
  5539. """
  5540. with open_repo_closing(repo) as r:
  5541. state = BisectState(r)
  5542. rev_sha = parse_commit(r, rev).id if rev else None
  5543. next_sha = state.mark_bad(rev_sha)
  5544. if next_sha:
  5545. # Checkout the next commit
  5546. old_commit = r[r.head()]
  5547. assert isinstance(old_commit, Commit)
  5548. old_tree = old_commit.tree if r.head() else None
  5549. r.refs[b"HEAD"] = next_sha
  5550. commit = r[next_sha]
  5551. assert isinstance(commit, Commit)
  5552. changes = tree_changes(r.object_store, old_tree, commit.tree)
  5553. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  5554. return next_sha
  5555. def bisect_good(
  5556. repo: Union[str, os.PathLike[str], Repo] = ".",
  5557. rev: Optional[Union[str, bytes, Commit, Tag]] = None,
  5558. ) -> Optional[bytes]:
  5559. """Mark a commit as good.
  5560. Args:
  5561. repo: Path to repository or a Repo object
  5562. rev: Commit to mark as good (defaults to HEAD)
  5563. Returns:
  5564. The SHA of the next commit to test, or None if bisect is complete
  5565. """
  5566. with open_repo_closing(repo) as r:
  5567. state = BisectState(r)
  5568. rev_sha = parse_commit(r, rev).id if rev else None
  5569. next_sha = state.mark_good(rev_sha)
  5570. if next_sha:
  5571. # Checkout the next commit
  5572. old_commit = r[r.head()]
  5573. assert isinstance(old_commit, Commit)
  5574. old_tree = old_commit.tree if r.head() else None
  5575. r.refs[b"HEAD"] = next_sha
  5576. commit = r[next_sha]
  5577. assert isinstance(commit, Commit)
  5578. changes = tree_changes(r.object_store, old_tree, commit.tree)
  5579. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  5580. return next_sha
  5581. def bisect_skip(
  5582. repo: Union[str, os.PathLike[str], Repo] = ".",
  5583. revs: Optional[
  5584. Union[str, bytes, Commit, Tag, Sequence[Union[str, bytes, Commit, Tag]]]
  5585. ] = None,
  5586. ) -> Optional[bytes]:
  5587. """Skip one or more commits.
  5588. Args:
  5589. repo: Path to repository or a Repo object
  5590. revs: List of commits to skip (defaults to [HEAD])
  5591. Returns:
  5592. The SHA of the next commit to test, or None if bisect is complete
  5593. """
  5594. with open_repo_closing(repo) as r:
  5595. state = BisectState(r)
  5596. if revs is None:
  5597. rev_shas = None
  5598. else:
  5599. # Convert single rev to sequence
  5600. if isinstance(revs, (str, bytes, Commit, Tag)):
  5601. revs = [revs]
  5602. rev_shas = [parse_commit(r, rev).id for rev in revs]
  5603. next_sha = state.skip(rev_shas)
  5604. if next_sha:
  5605. # Checkout the next commit
  5606. old_commit = r[r.head()]
  5607. assert isinstance(old_commit, Commit)
  5608. old_tree = old_commit.tree if r.head() else None
  5609. r.refs[b"HEAD"] = next_sha
  5610. commit = r[next_sha]
  5611. assert isinstance(commit, Commit)
  5612. changes = tree_changes(r.object_store, old_tree, commit.tree)
  5613. update_working_tree(r, old_tree, commit.tree, change_iterator=changes)
  5614. return next_sha
  5615. def bisect_reset(
  5616. repo: Union[str, os.PathLike[str], Repo] = ".",
  5617. commit: Optional[Union[str, bytes, Commit, Tag]] = None,
  5618. ) -> None:
  5619. """Reset bisect state and return to original branch/commit.
  5620. Args:
  5621. repo: Path to repository or a Repo object
  5622. commit: Optional commit to reset to (defaults to original branch/commit)
  5623. """
  5624. with open_repo_closing(repo) as r:
  5625. state = BisectState(r)
  5626. # Get old tree before reset
  5627. try:
  5628. old_commit = r[r.head()]
  5629. assert isinstance(old_commit, Commit)
  5630. old_tree = old_commit.tree
  5631. except KeyError:
  5632. old_tree = None
  5633. commit_sha = parse_commit(r, commit).id if commit else None
  5634. state.reset(commit_sha)
  5635. # Update working tree to new HEAD
  5636. try:
  5637. new_head = r.head()
  5638. if new_head:
  5639. new_commit = r[new_head]
  5640. assert isinstance(new_commit, Commit)
  5641. changes = tree_changes(r.object_store, old_tree, new_commit.tree)
  5642. update_working_tree(
  5643. r, old_tree, new_commit.tree, change_iterator=changes
  5644. )
  5645. except KeyError:
  5646. # No HEAD after reset
  5647. pass
  5648. def bisect_log(repo: Union[str, os.PathLike[str], Repo] = ".") -> str:
  5649. """Get the bisect log.
  5650. Args:
  5651. repo: Path to repository or a Repo object
  5652. Returns:
  5653. The bisect log as a string
  5654. """
  5655. with open_repo_closing(repo) as r:
  5656. state = BisectState(r)
  5657. return state.get_log()
  5658. def bisect_replay(
  5659. repo: Union[str, os.PathLike[str], Repo],
  5660. log_file: Union[str, os.PathLike[str], BinaryIO],
  5661. ) -> None:
  5662. """Replay a bisect log.
  5663. Args:
  5664. repo: Path to repository or a Repo object
  5665. log_file: Path to the log file or file-like object
  5666. """
  5667. with open_repo_closing(repo) as r:
  5668. state = BisectState(r)
  5669. if isinstance(log_file, (str, os.PathLike)):
  5670. with open(log_file) as f:
  5671. log_content = f.read()
  5672. else:
  5673. content = log_file.read()
  5674. log_content = content.decode() if isinstance(content, bytes) else content
  5675. state.replay(log_content)
  5676. def reflog(
  5677. repo: RepoPath = ".", ref: Union[str, bytes] = b"HEAD", all: bool = False
  5678. ) -> Iterator[Union[Any, tuple[bytes, Any]]]:
  5679. """Show reflog entries for a reference or all references.
  5680. Args:
  5681. repo: Path to repository or a Repo object
  5682. ref: Reference name (defaults to HEAD)
  5683. all: If True, show reflogs for all refs (ignores ref parameter)
  5684. Yields:
  5685. If all=False: ReflogEntry objects
  5686. If all=True: Tuples of (ref_name, ReflogEntry) for all refs with reflogs
  5687. """
  5688. import os
  5689. from .reflog import iter_reflogs
  5690. if isinstance(ref, str):
  5691. ref = ref.encode("utf-8")
  5692. with open_repo_closing(repo) as r:
  5693. if not all:
  5694. yield from r.read_reflog(ref)
  5695. else:
  5696. logs_dir = os.path.join(r.controldir(), "logs")
  5697. # Use iter_reflogs to discover all reflogs
  5698. for ref_bytes in iter_reflogs(logs_dir):
  5699. # Read the reflog entries for this ref
  5700. for entry in r.read_reflog(ref_bytes):
  5701. yield (ref_bytes, entry)
  5702. def lfs_track(
  5703. repo: Union[str, os.PathLike[str], Repo] = ".",
  5704. patterns: Optional[Sequence[str]] = None,
  5705. ) -> list[str]:
  5706. """Track file patterns with Git LFS.
  5707. Args:
  5708. repo: Path to repository
  5709. patterns: List of file patterns to track (e.g., ["*.bin", "*.pdf"])
  5710. If None, returns current tracked patterns
  5711. Returns:
  5712. List of tracked patterns
  5713. """
  5714. from .attrs import GitAttributes
  5715. with open_repo_closing(repo) as r:
  5716. gitattributes_path = os.path.join(r.path, ".gitattributes")
  5717. # Load existing GitAttributes
  5718. if os.path.exists(gitattributes_path):
  5719. gitattributes = GitAttributes.from_file(gitattributes_path)
  5720. else:
  5721. gitattributes = GitAttributes()
  5722. if patterns is None:
  5723. # Return current LFS tracked patterns
  5724. tracked = []
  5725. for pattern_obj, attrs in gitattributes:
  5726. if attrs.get(b"filter") == b"lfs":
  5727. tracked.append(pattern_obj.pattern.decode())
  5728. return tracked
  5729. # Add new patterns
  5730. for pattern in patterns:
  5731. # Ensure pattern is bytes
  5732. pattern_bytes = pattern.encode() if isinstance(pattern, str) else pattern
  5733. # Set LFS attributes for the pattern
  5734. gitattributes.set_attribute(pattern_bytes, b"filter", b"lfs")
  5735. gitattributes.set_attribute(pattern_bytes, b"diff", b"lfs")
  5736. gitattributes.set_attribute(pattern_bytes, b"merge", b"lfs")
  5737. gitattributes.set_attribute(pattern_bytes, b"text", False)
  5738. # Write updated attributes
  5739. gitattributes.write_to_file(gitattributes_path)
  5740. # Stage the .gitattributes file
  5741. add(r, [".gitattributes"])
  5742. return lfs_track(r) # Return updated list
  5743. def lfs_untrack(
  5744. repo: Union[str, os.PathLike[str], Repo] = ".",
  5745. patterns: Optional[Sequence[str]] = None,
  5746. ) -> list[str]:
  5747. """Untrack file patterns from Git LFS.
  5748. Args:
  5749. repo: Path to repository
  5750. patterns: List of file patterns to untrack
  5751. Returns:
  5752. List of remaining tracked patterns
  5753. """
  5754. from .attrs import GitAttributes
  5755. if not patterns:
  5756. return lfs_track(repo)
  5757. with open_repo_closing(repo) as r:
  5758. gitattributes_path = os.path.join(r.path, ".gitattributes")
  5759. if not os.path.exists(gitattributes_path):
  5760. return []
  5761. # Load existing GitAttributes
  5762. gitattributes = GitAttributes.from_file(gitattributes_path)
  5763. # Remove specified patterns
  5764. for pattern in patterns:
  5765. pattern_bytes = pattern.encode() if isinstance(pattern, str) else pattern
  5766. # Check if pattern is tracked by LFS
  5767. for pattern_obj, attrs in list(gitattributes):
  5768. if (
  5769. pattern_obj.pattern == pattern_bytes
  5770. and attrs.get(b"filter") == b"lfs"
  5771. ):
  5772. gitattributes.remove_pattern(pattern_bytes)
  5773. break
  5774. # Write updated attributes
  5775. gitattributes.write_to_file(gitattributes_path)
  5776. # Stage the .gitattributes file
  5777. add(r, [".gitattributes"])
  5778. return lfs_track(r) # Return updated list
  5779. def lfs_init(repo: Union[str, os.PathLike[str], Repo] = ".") -> None:
  5780. """Initialize Git LFS in a repository.
  5781. Args:
  5782. repo: Path to repository
  5783. Returns:
  5784. None
  5785. """
  5786. from .lfs import LFSStore
  5787. with open_repo_closing(repo) as r:
  5788. # Create LFS store
  5789. LFSStore.from_repo(r, create=True)
  5790. # Set up Git config for LFS
  5791. config = r.get_config()
  5792. config.set((b"filter", b"lfs"), b"process", b"git-lfs filter-process")
  5793. config.set((b"filter", b"lfs"), b"required", b"true")
  5794. config.set((b"filter", b"lfs"), b"clean", b"git-lfs clean -- %f")
  5795. config.set((b"filter", b"lfs"), b"smudge", b"git-lfs smudge -- %f")
  5796. config.write_to_path()
  5797. def lfs_clean(
  5798. repo: Union[str, os.PathLike[str], Repo] = ".",
  5799. path: Optional[Union[str, os.PathLike[str]]] = None,
  5800. ) -> bytes:
  5801. """Clean a file by converting it to an LFS pointer.
  5802. Args:
  5803. repo: Path to repository
  5804. path: Path to file to clean (relative to repo root)
  5805. Returns:
  5806. LFS pointer content as bytes
  5807. """
  5808. from .lfs import LFSFilterDriver, LFSStore
  5809. with open_repo_closing(repo) as r:
  5810. if path is None:
  5811. raise ValueError("Path must be specified")
  5812. # Get LFS store
  5813. lfs_store = LFSStore.from_repo(r)
  5814. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  5815. # Read file content
  5816. full_path = os.path.join(r.path, path)
  5817. with open(full_path, "rb") as f:
  5818. content = f.read()
  5819. # Clean the content (convert to LFS pointer)
  5820. return filter_driver.clean(content)
  5821. def lfs_smudge(
  5822. repo: Union[str, os.PathLike[str], Repo] = ".",
  5823. pointer_content: Optional[bytes] = None,
  5824. ) -> bytes:
  5825. """Smudge an LFS pointer by retrieving the actual content.
  5826. Args:
  5827. repo: Path to repository
  5828. pointer_content: LFS pointer content as bytes
  5829. Returns:
  5830. Actual file content as bytes
  5831. """
  5832. from .lfs import LFSFilterDriver, LFSStore
  5833. with open_repo_closing(repo) as r:
  5834. if pointer_content is None:
  5835. raise ValueError("Pointer content must be specified")
  5836. # Get LFS store
  5837. lfs_store = LFSStore.from_repo(r)
  5838. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  5839. # Smudge the pointer (retrieve actual content)
  5840. return filter_driver.smudge(pointer_content)
  5841. def lfs_ls_files(
  5842. repo: Union[str, os.PathLike[str], Repo] = ".",
  5843. ref: Optional[Union[str, bytes]] = None,
  5844. ) -> list[tuple[bytes, str, int]]:
  5845. """List files tracked by Git LFS.
  5846. Args:
  5847. repo: Path to repository
  5848. ref: Git ref to check (defaults to HEAD)
  5849. Returns:
  5850. List of (path, oid, size) tuples for LFS files
  5851. """
  5852. from .lfs import LFSPointer
  5853. from .object_store import iter_tree_contents
  5854. with open_repo_closing(repo) as r:
  5855. if ref is None:
  5856. ref = b"HEAD"
  5857. elif isinstance(ref, str):
  5858. ref = ref.encode()
  5859. # Get the commit and tree
  5860. try:
  5861. commit = r[ref]
  5862. assert isinstance(commit, Commit)
  5863. tree = r[commit.tree]
  5864. assert isinstance(tree, Tree)
  5865. except KeyError:
  5866. return []
  5867. lfs_files = []
  5868. # Walk the tree
  5869. for path, mode, sha in iter_tree_contents(r.object_store, tree.id):
  5870. assert path is not None
  5871. assert mode is not None
  5872. assert sha is not None
  5873. if not stat.S_ISREG(mode):
  5874. continue
  5875. # Check if it's an LFS pointer
  5876. obj = r.object_store[sha]
  5877. if not isinstance(obj, Blob):
  5878. raise AssertionError(f"Expected Blob object, got {type(obj).__name__}")
  5879. pointer = LFSPointer.from_bytes(obj.data)
  5880. if pointer is not None:
  5881. lfs_files.append((path, pointer.oid, pointer.size))
  5882. return lfs_files
  5883. def lfs_migrate(
  5884. repo: Union[str, os.PathLike[str], Repo] = ".",
  5885. include: Optional[list[str]] = None,
  5886. exclude: Optional[list[str]] = None,
  5887. everything: bool = False,
  5888. ) -> int:
  5889. """Migrate files to Git LFS.
  5890. Args:
  5891. repo: Path to repository
  5892. include: Patterns of files to include
  5893. exclude: Patterns of files to exclude
  5894. everything: Migrate all files above a certain size
  5895. Returns:
  5896. Number of migrated files
  5897. """
  5898. from .lfs import LFSFilterDriver, LFSStore
  5899. with open_repo_closing(repo) as r:
  5900. # Initialize LFS if needed
  5901. lfs_store = LFSStore.from_repo(r, create=True)
  5902. filter_driver = LFSFilterDriver(lfs_store, config=r.get_config())
  5903. # Get current index
  5904. index = r.open_index()
  5905. migrated = 0
  5906. # Determine files to migrate
  5907. files_to_migrate = []
  5908. if everything:
  5909. # Migrate all files above 100MB
  5910. for path, entry in index.items():
  5911. full_path = os.path.join(r.path, path.decode())
  5912. if os.path.exists(full_path):
  5913. size = os.path.getsize(full_path)
  5914. if size > 100 * 1024 * 1024: # 100MB
  5915. files_to_migrate.append(path.decode())
  5916. else:
  5917. # Use include/exclude patterns
  5918. for path, entry in index.items():
  5919. path_str = path.decode()
  5920. # Check include patterns
  5921. if include:
  5922. matched = any(
  5923. fnmatch.fnmatch(path_str, pattern) for pattern in include
  5924. )
  5925. if not matched:
  5926. continue
  5927. # Check exclude patterns
  5928. if exclude:
  5929. excluded = any(
  5930. fnmatch.fnmatch(path_str, pattern) for pattern in exclude
  5931. )
  5932. if excluded:
  5933. continue
  5934. files_to_migrate.append(path_str)
  5935. # Migrate files
  5936. for path_str in files_to_migrate:
  5937. full_path = os.path.join(r.path, path_str)
  5938. if not os.path.exists(full_path):
  5939. continue
  5940. # Read file content
  5941. with open(full_path, "rb") as f:
  5942. content = f.read()
  5943. # Convert to LFS pointer
  5944. pointer_content = filter_driver.clean(content)
  5945. # Write pointer back to file
  5946. with open(full_path, "wb") as f:
  5947. f.write(pointer_content)
  5948. # Create blob for pointer content and update index
  5949. blob = Blob()
  5950. blob.data = pointer_content
  5951. r.object_store.add_object(blob)
  5952. st = os.stat(full_path)
  5953. index_entry = index_entry_from_stat(st, blob.id, 0)
  5954. path_bytes = path_str.encode() if isinstance(path_str, str) else path_str
  5955. index[path_bytes] = index_entry
  5956. migrated += 1
  5957. # Write updated index
  5958. index.write()
  5959. # Track patterns if include was specified
  5960. if include:
  5961. lfs_track(r, include)
  5962. return migrated
  5963. def lfs_pointer_check(
  5964. repo: Union[str, os.PathLike[str], Repo] = ".",
  5965. paths: Optional[Sequence[str]] = None,
  5966. ) -> dict[str, Optional[Any]]:
  5967. """Check if files are valid LFS pointers.
  5968. Args:
  5969. repo: Path to repository
  5970. paths: List of file paths to check (if None, check all files)
  5971. Returns:
  5972. Dict mapping paths to LFSPointer objects (or None if not a pointer)
  5973. """
  5974. from .lfs import LFSPointer
  5975. with open_repo_closing(repo) as r:
  5976. results = {}
  5977. if paths is None:
  5978. # Check all files in index
  5979. index = r.open_index()
  5980. paths = [path.decode() for path in index]
  5981. for path in paths:
  5982. full_path = os.path.join(r.path, path)
  5983. if os.path.exists(full_path):
  5984. try:
  5985. with open(full_path, "rb") as f:
  5986. content = f.read()
  5987. pointer = LFSPointer.from_bytes(content)
  5988. results[path] = pointer
  5989. except OSError:
  5990. results[path] = None
  5991. else:
  5992. results[path] = None
  5993. return results
  5994. def lfs_fetch(
  5995. repo: Union[str, os.PathLike[str], Repo] = ".",
  5996. remote: str = "origin",
  5997. refs: Optional[list[Union[str, bytes]]] = None,
  5998. ) -> int:
  5999. """Fetch LFS objects from remote.
  6000. Args:
  6001. repo: Path to repository
  6002. remote: Remote name (default: origin)
  6003. refs: Specific refs to fetch LFS objects for (default: all refs)
  6004. Returns:
  6005. Number of objects fetched
  6006. """
  6007. from .lfs import LFSClient, LFSPointer, LFSStore
  6008. with open_repo_closing(repo) as r:
  6009. # Get LFS server URL from config
  6010. config = r.get_config()
  6011. lfs_url_bytes = config.get((b"lfs",), b"url")
  6012. if not lfs_url_bytes:
  6013. # Try remote URL
  6014. remote_url = config.get((b"remote", remote.encode()), b"url")
  6015. if remote_url:
  6016. # Append /info/lfs to remote URL
  6017. remote_url_str = remote_url.decode()
  6018. if remote_url_str.endswith(".git"):
  6019. remote_url_str = remote_url_str[:-4]
  6020. lfs_url = f"{remote_url_str}/info/lfs"
  6021. else:
  6022. raise ValueError(f"No LFS URL configured for remote {remote}")
  6023. else:
  6024. lfs_url = lfs_url_bytes.decode()
  6025. # Get authentication
  6026. auth = None
  6027. # TODO: Support credential helpers and other auth methods
  6028. # Create LFS client and store
  6029. client = LFSClient(lfs_url, auth)
  6030. store = LFSStore.from_repo(r)
  6031. # Find all LFS pointers in the refs
  6032. pointers_to_fetch = []
  6033. if refs is None:
  6034. # Get all refs
  6035. refs = list(r.refs.keys())
  6036. for ref in refs:
  6037. if isinstance(ref, str):
  6038. ref = ref.encode()
  6039. try:
  6040. commit = r[r.refs[ref]]
  6041. except KeyError:
  6042. continue
  6043. # Walk the commit tree
  6044. assert isinstance(commit, Commit)
  6045. for path, mode, sha in r.object_store.iter_tree_contents(commit.tree):
  6046. assert sha is not None
  6047. try:
  6048. obj = r.object_store[sha]
  6049. except KeyError:
  6050. pass
  6051. else:
  6052. if isinstance(obj, Blob):
  6053. pointer = LFSPointer.from_bytes(obj.data)
  6054. if pointer and pointer.is_valid_oid():
  6055. # Check if we already have it
  6056. try:
  6057. store.open_object(pointer.oid)
  6058. except KeyError:
  6059. pointers_to_fetch.append((pointer.oid, pointer.size))
  6060. # Fetch missing objects
  6061. fetched = 0
  6062. for oid, size in pointers_to_fetch:
  6063. content = client.download(oid, size)
  6064. store.write_object([content])
  6065. fetched += 1
  6066. return fetched
  6067. def lfs_pull(
  6068. repo: Union[str, os.PathLike[str], Repo] = ".", remote: str = "origin"
  6069. ) -> int:
  6070. """Pull LFS objects for current checkout.
  6071. Args:
  6072. repo: Path to repository
  6073. remote: Remote name (default: origin)
  6074. Returns:
  6075. Number of objects fetched
  6076. """
  6077. from .lfs import LFSPointer, LFSStore
  6078. with open_repo_closing(repo) as r:
  6079. # First do a fetch for HEAD
  6080. fetched = lfs_fetch(repo, remote, [b"HEAD"])
  6081. # Then checkout LFS files in working directory
  6082. store = LFSStore.from_repo(r)
  6083. index = r.open_index()
  6084. for path, entry in index.items():
  6085. full_path = os.path.join(r.path, path.decode())
  6086. if os.path.exists(full_path):
  6087. with open(full_path, "rb") as f:
  6088. content = f.read()
  6089. pointer = LFSPointer.from_bytes(content)
  6090. if pointer and pointer.is_valid_oid():
  6091. try:
  6092. # Replace pointer with actual content
  6093. with store.open_object(pointer.oid) as lfs_file:
  6094. lfs_content = lfs_file.read()
  6095. with open(full_path, "wb") as f:
  6096. f.write(lfs_content)
  6097. except KeyError:
  6098. # Object not available
  6099. pass
  6100. return fetched
  6101. def lfs_push(
  6102. repo: Union[str, os.PathLike[str], Repo] = ".",
  6103. remote: str = "origin",
  6104. refs: Optional[list[Union[str, bytes]]] = None,
  6105. ) -> int:
  6106. """Push LFS objects to remote.
  6107. Args:
  6108. repo: Path to repository
  6109. remote: Remote name (default: origin)
  6110. refs: Specific refs to push LFS objects for (default: current branch)
  6111. Returns:
  6112. Number of objects pushed
  6113. """
  6114. from .lfs import LFSClient, LFSPointer, LFSStore
  6115. with open_repo_closing(repo) as r:
  6116. # Get LFS server URL from config
  6117. config = r.get_config()
  6118. lfs_url_bytes = config.get((b"lfs",), b"url")
  6119. if not lfs_url_bytes:
  6120. # Try remote URL
  6121. remote_url = config.get((b"remote", remote.encode()), b"url")
  6122. if remote_url:
  6123. # Append /info/lfs to remote URL
  6124. remote_url_str = remote_url.decode()
  6125. if remote_url_str.endswith(".git"):
  6126. remote_url_str = remote_url_str[:-4]
  6127. lfs_url = f"{remote_url_str}/info/lfs"
  6128. else:
  6129. raise ValueError(f"No LFS URL configured for remote {remote}")
  6130. else:
  6131. lfs_url = lfs_url_bytes.decode()
  6132. # Get authentication
  6133. auth = None
  6134. # TODO: Support credential helpers and other auth methods
  6135. # Create LFS client and store
  6136. client = LFSClient(lfs_url, auth)
  6137. store = LFSStore.from_repo(r)
  6138. # Find all LFS objects to push
  6139. if refs is None:
  6140. # Push current branch
  6141. head_ref = r.refs.read_ref(b"HEAD")
  6142. refs = [head_ref] if head_ref else []
  6143. objects_to_push = set()
  6144. for ref in refs:
  6145. if isinstance(ref, str):
  6146. ref = ref.encode()
  6147. try:
  6148. if ref.startswith(b"refs/"):
  6149. commit = r[r.refs[ref]]
  6150. else:
  6151. commit = r[ref]
  6152. except KeyError:
  6153. continue
  6154. # Walk the commit tree
  6155. assert isinstance(commit, Commit)
  6156. for path, mode, sha in r.object_store.iter_tree_contents(commit.tree):
  6157. assert sha is not None
  6158. try:
  6159. obj = r.object_store[sha]
  6160. except KeyError:
  6161. pass
  6162. else:
  6163. if isinstance(obj, Blob):
  6164. pointer = LFSPointer.from_bytes(obj.data)
  6165. if pointer and pointer.is_valid_oid():
  6166. objects_to_push.add((pointer.oid, pointer.size))
  6167. # Push objects
  6168. pushed = 0
  6169. for oid, size in objects_to_push:
  6170. try:
  6171. with store.open_object(oid) as f:
  6172. content = f.read()
  6173. except KeyError:
  6174. # Object not in local store
  6175. logging.warn("LFS object %s not found locally", oid)
  6176. else:
  6177. client.upload(oid, size, content)
  6178. pushed += 1
  6179. return pushed
  6180. def lfs_status(repo: Union[str, os.PathLike[str], Repo] = ".") -> dict[str, list[str]]:
  6181. """Show status of LFS files.
  6182. Args:
  6183. repo: Path to repository
  6184. Returns:
  6185. Dict with status information
  6186. """
  6187. from .lfs import LFSPointer, LFSStore
  6188. with open_repo_closing(repo) as r:
  6189. store = LFSStore.from_repo(r)
  6190. index = r.open_index()
  6191. status: dict[str, list[str]] = {
  6192. "tracked": [],
  6193. "not_staged": [],
  6194. "not_committed": [],
  6195. "not_pushed": [],
  6196. "missing": [],
  6197. }
  6198. # Check working directory files
  6199. for path, entry in index.items():
  6200. path_str = path.decode()
  6201. full_path = os.path.join(r.path, path_str)
  6202. if os.path.exists(full_path):
  6203. with open(full_path, "rb") as f:
  6204. content = f.read()
  6205. pointer = LFSPointer.from_bytes(content)
  6206. if pointer and pointer.is_valid_oid():
  6207. status["tracked"].append(path_str)
  6208. # Check if object exists locally
  6209. try:
  6210. store.open_object(pointer.oid)
  6211. except KeyError:
  6212. status["missing"].append(path_str)
  6213. # Check if file has been modified
  6214. if isinstance(entry, ConflictedIndexEntry):
  6215. continue # Skip conflicted entries
  6216. try:
  6217. staged_obj = r.object_store[entry.sha]
  6218. except KeyError:
  6219. pass
  6220. else:
  6221. if not isinstance(staged_obj, Blob):
  6222. raise AssertionError(
  6223. f"Expected Blob object, got {type(staged_obj).__name__}"
  6224. )
  6225. staged_pointer = LFSPointer.from_bytes(staged_obj.data)
  6226. if staged_pointer and staged_pointer.oid != pointer.oid:
  6227. status["not_staged"].append(path_str)
  6228. # TODO: Check for not committed and not pushed files
  6229. return status
  6230. def worktree_list(repo: RepoPath = ".") -> list[Any]:
  6231. """List all worktrees for a repository.
  6232. Args:
  6233. repo: Path to repository
  6234. Returns:
  6235. List of WorkTreeInfo objects
  6236. """
  6237. from .worktree import list_worktrees
  6238. with open_repo_closing(repo) as r:
  6239. return list_worktrees(r)
  6240. def worktree_add(
  6241. repo: RepoPath = ".",
  6242. path: Optional[Union[str, os.PathLike[str]]] = None,
  6243. branch: Optional[Union[str, bytes]] = None,
  6244. commit: Optional[Union[str, bytes]] = None,
  6245. detach: bool = False,
  6246. force: bool = False,
  6247. ) -> str:
  6248. """Add a new worktree.
  6249. Args:
  6250. repo: Path to repository
  6251. path: Path for new worktree
  6252. branch: Branch to checkout (creates if doesn't exist)
  6253. commit: Specific commit to checkout
  6254. detach: Create with detached HEAD
  6255. force: Force creation even if branch is already checked out
  6256. Returns:
  6257. Path to the newly created worktree
  6258. """
  6259. from .worktree import add_worktree
  6260. if path is None:
  6261. raise ValueError("Path is required for worktree add")
  6262. with open_repo_closing(repo) as r:
  6263. commit_bytes = commit.encode() if isinstance(commit, str) else commit
  6264. wt_repo = add_worktree(
  6265. r, path, branch=branch, commit=commit_bytes, detach=detach, force=force
  6266. )
  6267. return wt_repo.path
  6268. def worktree_remove(
  6269. repo: RepoPath = ".",
  6270. path: Optional[Union[str, os.PathLike[str]]] = None,
  6271. force: bool = False,
  6272. ) -> None:
  6273. """Remove a worktree.
  6274. Args:
  6275. repo: Path to repository
  6276. path: Path to worktree to remove
  6277. force: Force removal even if there are local changes
  6278. """
  6279. from .worktree import remove_worktree
  6280. if path is None:
  6281. raise ValueError("Path is required for worktree remove")
  6282. with open_repo_closing(repo) as r:
  6283. remove_worktree(r, path, force=force)
  6284. def worktree_prune(
  6285. repo: RepoPath = ".", dry_run: bool = False, expire: Optional[int] = None
  6286. ) -> list[str]:
  6287. """Prune worktree administrative files.
  6288. Args:
  6289. repo: Path to repository
  6290. dry_run: Only show what would be removed
  6291. expire: Only prune worktrees older than this many seconds
  6292. Returns:
  6293. List of pruned worktree names
  6294. """
  6295. from .worktree import prune_worktrees
  6296. with open_repo_closing(repo) as r:
  6297. return prune_worktrees(r, expire=expire, dry_run=dry_run)
  6298. def worktree_lock(
  6299. repo: RepoPath = ".",
  6300. path: Optional[Union[str, os.PathLike[str]]] = None,
  6301. reason: Optional[str] = None,
  6302. ) -> None:
  6303. """Lock a worktree to prevent it from being pruned.
  6304. Args:
  6305. repo: Path to repository
  6306. path: Path to worktree to lock
  6307. reason: Optional reason for locking
  6308. """
  6309. from .worktree import lock_worktree
  6310. if path is None:
  6311. raise ValueError("Path is required for worktree lock")
  6312. with open_repo_closing(repo) as r:
  6313. lock_worktree(r, path, reason=reason)
  6314. def worktree_unlock(
  6315. repo: RepoPath = ".", path: Optional[Union[str, os.PathLike[str]]] = None
  6316. ) -> None:
  6317. """Unlock a worktree.
  6318. Args:
  6319. repo: Path to repository
  6320. path: Path to worktree to unlock
  6321. """
  6322. from .worktree import unlock_worktree
  6323. if path is None:
  6324. raise ValueError("Path is required for worktree unlock")
  6325. with open_repo_closing(repo) as r:
  6326. unlock_worktree(r, path)
  6327. def worktree_move(
  6328. repo: RepoPath = ".",
  6329. old_path: Optional[Union[str, os.PathLike[str]]] = None,
  6330. new_path: Optional[Union[str, os.PathLike[str]]] = None,
  6331. ) -> None:
  6332. """Move a worktree to a new location.
  6333. Args:
  6334. repo: Path to repository
  6335. old_path: Current path of worktree
  6336. new_path: New path for worktree
  6337. """
  6338. from .worktree import move_worktree
  6339. if old_path is None or new_path is None:
  6340. raise ValueError("Both old_path and new_path are required for worktree move")
  6341. with open_repo_closing(repo) as r:
  6342. move_worktree(r, old_path, new_path)
  6343. def worktree_repair(
  6344. repo: RepoPath = ".",
  6345. paths: Optional[list[Union[str, os.PathLike[str]]]] = None,
  6346. ) -> list[str]:
  6347. """Repair worktree administrative files.
  6348. Args:
  6349. repo: Path to repository
  6350. paths: Optional list of worktree paths to repair. If None, repairs
  6351. connections from the main repository to all linked worktrees.
  6352. Returns:
  6353. List of repaired worktree paths
  6354. """
  6355. from .worktree import repair_worktree
  6356. with open_repo_closing(repo) as r:
  6357. return repair_worktree(r, paths=paths)
  6358. def merge_base(
  6359. repo: RepoPath = ".",
  6360. committishes: Optional[Sequence[Union[str, bytes]]] = None,
  6361. all: bool = False,
  6362. octopus: bool = False,
  6363. ) -> list[bytes]:
  6364. """Find the best common ancestor(s) between commits.
  6365. Args:
  6366. repo: Path to repository
  6367. committishes: List of commit references (branches, tags, commit IDs)
  6368. all: If True, return all merge bases, not just one
  6369. octopus: If True, find merge base of all commits (n-way merge)
  6370. Returns:
  6371. List of commit IDs that are merge bases
  6372. """
  6373. from .graph import find_merge_base, find_octopus_base
  6374. from .objects import Commit
  6375. from .objectspec import parse_object
  6376. if committishes is None or len(committishes) < 2:
  6377. raise ValueError("At least two commits are required")
  6378. with open_repo_closing(repo) as r:
  6379. # Resolve committish references to commit IDs
  6380. commit_ids = []
  6381. for committish in committishes:
  6382. obj = parse_object(r, committish)
  6383. if not isinstance(obj, Commit):
  6384. raise ValueError(f"Expected commit, got {obj.type_name.decode()}")
  6385. commit_ids.append(obj.id)
  6386. # Find merge base
  6387. if octopus:
  6388. result = find_octopus_base(r, commit_ids)
  6389. else:
  6390. result = find_merge_base(r, commit_ids)
  6391. # Return first result only if all=False
  6392. if not all and result:
  6393. return [result[0]]
  6394. return result
  6395. def is_ancestor(
  6396. repo: RepoPath = ".",
  6397. ancestor: Optional[Union[str, bytes]] = None,
  6398. descendant: Optional[Union[str, bytes]] = None,
  6399. ) -> bool:
  6400. """Check if one commit is an ancestor of another.
  6401. Args:
  6402. repo: Path to repository
  6403. ancestor: Commit that might be the ancestor
  6404. descendant: Commit that might be the descendant
  6405. Returns:
  6406. True if ancestor is an ancestor of descendant, False otherwise
  6407. """
  6408. from .graph import find_merge_base
  6409. from .objects import Commit
  6410. from .objectspec import parse_object
  6411. if ancestor is None or descendant is None:
  6412. raise ValueError("Both ancestor and descendant are required")
  6413. with open_repo_closing(repo) as r:
  6414. # Resolve committish references to commit IDs
  6415. ancestor_obj = parse_object(r, ancestor)
  6416. if not isinstance(ancestor_obj, Commit):
  6417. raise ValueError(f"Expected commit, got {ancestor_obj.type_name.decode()}")
  6418. descendant_obj = parse_object(r, descendant)
  6419. if not isinstance(descendant_obj, Commit):
  6420. raise ValueError(
  6421. f"Expected commit, got {descendant_obj.type_name.decode()}"
  6422. )
  6423. # If ancestor is the merge base of (ancestor, descendant), then it's an ancestor
  6424. merge_bases = find_merge_base(r, [ancestor_obj.id, descendant_obj.id])
  6425. return merge_bases == [ancestor_obj.id]
  6426. def independent_commits(
  6427. repo: RepoPath = ".",
  6428. committishes: Optional[Sequence[Union[str, bytes]]] = None,
  6429. ) -> list[bytes]:
  6430. """Filter commits to only those that are not reachable from others.
  6431. Args:
  6432. repo: Path to repository
  6433. committishes: List of commit references to filter
  6434. Returns:
  6435. List of commit IDs that are not ancestors of any other commits in the list
  6436. """
  6437. from .graph import independent
  6438. from .objects import Commit
  6439. from .objectspec import parse_object
  6440. if committishes is None or len(committishes) == 0:
  6441. return []
  6442. with open_repo_closing(repo) as r:
  6443. # Resolve committish references to commit IDs
  6444. commit_ids = []
  6445. for committish in committishes:
  6446. obj = parse_object(r, committish)
  6447. if not isinstance(obj, Commit):
  6448. raise ValueError(f"Expected commit, got {obj.type_name.decode()}")
  6449. commit_ids.append(obj.id)
  6450. # Filter to independent commits
  6451. return independent(r, commit_ids)
  6452. def mailsplit(
  6453. input_path: Optional[Union[str, os.PathLike[str], IO[bytes]]] = None,
  6454. output_dir: Union[str, os.PathLike[str]] = ".",
  6455. start_number: int = 1,
  6456. precision: int = 4,
  6457. keep_cr: bool = False,
  6458. mboxrd: bool = False,
  6459. is_maildir: bool = False,
  6460. ) -> list[str]:
  6461. r"""Split an mbox file or Maildir into individual message files.
  6462. This is similar to git mailsplit.
  6463. Args:
  6464. input_path: Path to mbox file, Maildir, or file-like object. If None, reads from stdin.
  6465. output_dir: Directory where individual messages will be written
  6466. start_number: Starting number for output files (default: 1)
  6467. precision: Number of digits for output filenames (default: 4)
  6468. keep_cr: If True, preserve \r in lines ending with \r\n (default: False)
  6469. mboxrd: If True, treat input as mboxrd format and reverse escaping (default: False)
  6470. is_maildir: If True, treat input_path as a Maildir (default: False)
  6471. Returns:
  6472. List of output file paths that were created
  6473. Raises:
  6474. ValueError: If output_dir doesn't exist or input is invalid
  6475. OSError: If there are issues reading/writing files
  6476. """
  6477. from .mbox import split_maildir, split_mbox
  6478. if is_maildir:
  6479. if input_path is None:
  6480. raise ValueError("input_path is required for Maildir splitting")
  6481. if not isinstance(input_path, (str, bytes, os.PathLike)):
  6482. raise ValueError("Maildir splitting requires a path, not a file object")
  6483. # Convert PathLike to str for split_maildir
  6484. maildir_path: Union[str, bytes] = (
  6485. os.fspath(input_path) if isinstance(input_path, os.PathLike) else input_path
  6486. )
  6487. out_dir: Union[str, bytes] = (
  6488. os.fspath(output_dir) if isinstance(output_dir, os.PathLike) else output_dir
  6489. )
  6490. return split_maildir(
  6491. maildir_path,
  6492. out_dir,
  6493. start_number=start_number,
  6494. precision=precision,
  6495. keep_cr=keep_cr,
  6496. )
  6497. else:
  6498. from typing import BinaryIO, cast
  6499. if input_path is None:
  6500. # Read from stdin
  6501. input_file: Union[str, bytes, BinaryIO] = sys.stdin.buffer
  6502. else:
  6503. # Convert PathLike to str if needed
  6504. if isinstance(input_path, os.PathLike):
  6505. input_file = os.fspath(input_path)
  6506. else:
  6507. # input_path is either str or IO[bytes] here
  6508. input_file = cast(Union[str, BinaryIO], input_path)
  6509. out_dir = (
  6510. os.fspath(output_dir) if isinstance(output_dir, os.PathLike) else output_dir
  6511. )
  6512. return split_mbox(
  6513. input_file,
  6514. out_dir,
  6515. start_number=start_number,
  6516. precision=precision,
  6517. keep_cr=keep_cr,
  6518. mboxrd=mboxrd,
  6519. )