# client.py -- Implementation of the client side git protocols # Copyright (C) 2008-2013 Jelmer Vernooij # # SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as published by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Client side support for the Git protocol. The Dulwich client supports the following capabilities: * thin-pack * multi_ack_detailed * multi_ack * side-band-64k * ofs-delta * quiet * report-status * delete-refs * shallow Known capabilities that are not supported: * no-progress * include-tag """ import copy import functools import logging import os import select import socket import subprocess import sys from collections.abc import Iterable, Iterator from contextlib import closing from io import BufferedReader, BytesIO from typing import ( IO, TYPE_CHECKING, Callable, ClassVar, Optional, Union, ) from urllib.parse import quote as urlquote from urllib.parse import unquote as urlunquote from urllib.parse import urljoin, urlparse, urlunparse, urlunsplit if TYPE_CHECKING: import urllib3 import dulwich from .config import Config, apply_instead_of, get_xdg_config_home_path from .errors import GitProtocolError, NotGitRepository, SendPackError from .object_store import GraphWalker from .pack import ( PACK_SPOOL_FILE_MAX_SIZE, PackChunkGenerator, PackData, write_pack_from_container, ) from .protocol import ( _RBUFSIZE, CAPABILITIES_REF, CAPABILITY_AGENT, CAPABILITY_DELETE_REFS, CAPABILITY_FETCH, CAPABILITY_FILTER, CAPABILITY_INCLUDE_TAG, CAPABILITY_MULTI_ACK, CAPABILITY_MULTI_ACK_DETAILED, CAPABILITY_OFS_DELTA, CAPABILITY_QUIET, CAPABILITY_REPORT_STATUS, CAPABILITY_SHALLOW, CAPABILITY_SIDE_BAND_64K, CAPABILITY_SYMREF, CAPABILITY_THIN_PACK, COMMAND_DEEPEN, COMMAND_DONE, COMMAND_HAVE, COMMAND_SHALLOW, COMMAND_UNSHALLOW, COMMAND_WANT, DEFAULT_GIT_PROTOCOL_VERSION_FETCH, DEFAULT_GIT_PROTOCOL_VERSION_SEND, GIT_PROTOCOL_VERSIONS, KNOWN_RECEIVE_CAPABILITIES, KNOWN_UPLOAD_CAPABILITIES, SIDE_BAND_CHANNEL_DATA, SIDE_BAND_CHANNEL_FATAL, SIDE_BAND_CHANNEL_PROGRESS, TCP_GIT_PORT, ZERO_SHA, HangupException, PktLineParser, Protocol, agent_string, capability_agent, extract_capabilities, extract_capability_names, parse_capability, pkt_line, pkt_seq, ) from .refs import ( PEELED_TAG_SUFFIX, SYMREF, Ref, _import_remote_refs, _set_default_branch, _set_head, _set_origin_head, filter_ref_prefix, read_info_refs, split_peeled_refs, ) from .repo import BaseRepo, Repo # Default ref prefix, used if none is specified. # GitHub defaults to just sending HEAD if no ref-prefix is # specified, so explicitly request all refs to match # behaviour with v1 when no ref-prefix is specified. DEFAULT_REF_PREFIX = [b"HEAD", b"refs/"] ObjectID = bytes logger = logging.getLogger(__name__) class InvalidWants(Exception): """Invalid wants.""" def __init__(self, wants: set[bytes]) -> None: """Initialize InvalidWants exception. Args: wants: List of invalid wants """ Exception.__init__( self, f"requested wants not in server provided refs: {wants!r}" ) class HTTPUnauthorized(Exception): """Raised when authentication fails.""" def __init__(self, www_authenticate: Optional[str], url: str) -> None: """Initialize HTTPUnauthorized exception. Args: www_authenticate: WWW-Authenticate header value url: URL that requires authentication """ Exception.__init__(self, "No valid credentials provided") self.www_authenticate = www_authenticate self.url = url class HTTPProxyUnauthorized(Exception): """Raised when proxy authentication fails.""" def __init__(self, proxy_authenticate: Optional[str], url: str) -> None: """Initialize HTTPProxyUnauthorized exception. Args: proxy_authenticate: Proxy-Authenticate header value url: URL that requires proxy authentication """ Exception.__init__(self, "No valid proxy credentials provided") self.proxy_authenticate = proxy_authenticate self.url = url def _fileno_can_read(fileno: int) -> bool: """Check if a file descriptor is readable.""" return len(select.select([fileno], [], [], 0)[0]) > 0 def _win32_peek_avail(handle: int) -> int: """Wrapper around PeekNamedPipe to check how many bytes are available.""" from ctypes import ( # type: ignore[attr-defined] byref, windll, # type: ignore[attr-defined] wintypes, ) c_avail = wintypes.DWORD() c_message = wintypes.DWORD() success = windll.kernel32.PeekNamedPipe( # type: ignore[attr-defined] handle, None, 0, None, byref(c_avail), byref(c_message) ) if not success: from ctypes import GetLastError # type: ignore[attr-defined] raise OSError(GetLastError()) return c_avail.value COMMON_CAPABILITIES = [CAPABILITY_OFS_DELTA, CAPABILITY_SIDE_BAND_64K] UPLOAD_CAPABILITIES = [ CAPABILITY_THIN_PACK, CAPABILITY_MULTI_ACK, CAPABILITY_MULTI_ACK_DETAILED, CAPABILITY_SHALLOW, *COMMON_CAPABILITIES, ] RECEIVE_CAPABILITIES = [ CAPABILITY_REPORT_STATUS, CAPABILITY_DELETE_REFS, *COMMON_CAPABILITIES, ] class ReportStatusParser: """Handle status as reported by servers with 'report-status' capability.""" def __init__(self) -> None: """Initialize ReportStatusParser.""" self._done = False self._pack_status: Optional[bytes] = None self._ref_statuses: list[bytes] = [] def check(self) -> Iterator[tuple[bytes, Optional[str]]]: """Check if there were any errors and, if so, raise exceptions. Raises: SendPackError: Raised when the server could not unpack Returns: iterator over refs """ if self._pack_status not in (b"unpack ok", None): raise SendPackError(self._pack_status) for status in self._ref_statuses: try: status, rest = status.split(b" ", 1) except ValueError: # malformed response, move on to the next one continue if status == b"ng": ref, error = rest.split(b" ", 1) yield ref, error.decode("utf-8") elif status == b"ok": yield rest, None else: raise GitProtocolError(f"invalid ref status {status!r}") def handle_packet(self, pkt: Optional[bytes]) -> None: """Handle a packet. Raises: GitProtocolError: Raised when packets are received after a flush packet. """ if self._done: raise GitProtocolError("received more data after status report") if pkt is None: self._done = True return if self._pack_status is None: self._pack_status = pkt.strip() else: ref_status = pkt.strip() self._ref_statuses.append(ref_status) def negotiate_protocol_version(proto: Protocol) -> int: """Negotiate protocol version with the server.""" pkt = proto.read_pkt_line() if pkt is not None and pkt.strip() == b"version 2": return 2 proto.unread_pkt_line(pkt) return 0 def read_server_capabilities(pkt_seq: Iterable[bytes]) -> set[bytes]: """Read server capabilities from packet sequence.""" server_capabilities = [] for pkt in pkt_seq: server_capabilities.append(pkt) return set(server_capabilities) def read_pkt_refs_v2( pkt_seq: Iterable[bytes], ) -> tuple[dict[bytes, Optional[bytes]], dict[bytes, bytes], dict[bytes, bytes]]: """Read references using protocol version 2.""" refs: dict[bytes, Optional[bytes]] = {} symrefs = {} peeled = {} # Receive refs from server for pkt in pkt_seq: parts = pkt.rstrip(b"\n").split(b" ") sha: Optional[bytes] = parts[0] if sha == b"unborn": sha = None ref = parts[1] for part in parts[2:]: if part.startswith(b"peeled:"): peeled[ref] = part[7:] elif part.startswith(b"symref-target:"): symrefs[ref] = part[14:] else: logging.warning("unknown part in pkt-ref: %s", part) refs[ref] = sha return refs, symrefs, peeled def read_pkt_refs_v1( pkt_seq: Iterable[bytes], ) -> tuple[dict[bytes, Optional[bytes]], set[bytes]]: """Read references using protocol version 1.""" server_capabilities = None refs: dict[bytes, Optional[bytes]] = {} # Receive refs from server for pkt in pkt_seq: (sha, ref) = pkt.rstrip(b"\n").split(None, 1) if sha == b"ERR": raise GitProtocolError(ref.decode("utf-8", "replace")) if server_capabilities is None: (ref, server_capabilities) = extract_capabilities(ref) refs[ref] = sha if len(refs) == 0: return {}, set() if refs == {CAPABILITIES_REF: ZERO_SHA}: refs = {} assert server_capabilities is not None return refs, set(server_capabilities) class _DeprecatedDictProxy: """Base class for result objects that provide deprecated dict-like interface.""" refs: dict[bytes, Optional[bytes]] # To be overridden by subclasses _FORWARDED_ATTRS: ClassVar[set[str]] = { "clear", "copy", "fromkeys", "get", "items", "keys", "pop", "popitem", "setdefault", "update", "values", "viewitems", "viewkeys", "viewvalues", } def _warn_deprecated(self) -> None: import warnings warnings.warn( f"Use {self.__class__.__name__}.refs instead.", DeprecationWarning, stacklevel=3, ) def __contains__(self, name: bytes) -> bool: self._warn_deprecated() return name in self.refs def __getitem__(self, name: bytes) -> Optional[bytes]: self._warn_deprecated() return self.refs[name] def __len__(self) -> int: self._warn_deprecated() return len(self.refs) def __iter__(self) -> Iterator[bytes]: self._warn_deprecated() return iter(self.refs) def __getattribute__(self, name: str) -> object: # Avoid infinite recursion by checking against class variable directly if name != "_FORWARDED_ATTRS" and name in type(self)._FORWARDED_ATTRS: self._warn_deprecated() # Direct attribute access to avoid recursion refs = object.__getattribute__(self, "refs") return getattr(refs, name) return super().__getattribute__(name) class FetchPackResult(_DeprecatedDictProxy): """Result of a fetch-pack operation. Attributes: refs: Dictionary with all remote refs symrefs: Dictionary with remote symrefs agent: User agent string """ symrefs: dict[bytes, bytes] agent: Optional[bytes] def __init__( self, refs: dict[bytes, Optional[bytes]], symrefs: dict[bytes, bytes], agent: Optional[bytes], new_shallow: Optional[set[bytes]] = None, new_unshallow: Optional[set[bytes]] = None, ) -> None: """Initialize FetchPackResult. Args: refs: Dictionary with all remote refs symrefs: Dictionary with remote symrefs agent: User agent string new_shallow: New shallow commits new_unshallow: New unshallow commits """ self.refs = refs self.symrefs = symrefs self.agent = agent self.new_shallow = new_shallow self.new_unshallow = new_unshallow def __eq__(self, other: object) -> bool: """Check equality with another object.""" if isinstance(other, dict): self._warn_deprecated() return self.refs == other if not isinstance(other, FetchPackResult): return False return ( self.refs == other.refs and self.symrefs == other.symrefs and self.agent == other.agent ) def __repr__(self) -> str: """Return string representation of FetchPackResult.""" return f"{self.__class__.__name__}({self.refs!r}, {self.symrefs!r}, {self.agent!r})" class LsRemoteResult(_DeprecatedDictProxy): """Result of a ls-remote operation. Attributes: refs: Dictionary with all remote refs symrefs: Dictionary with remote symrefs """ symrefs: dict[bytes, bytes] def __init__( self, refs: dict[bytes, Optional[bytes]], symrefs: dict[bytes, bytes] ) -> None: """Initialize LsRemoteResult. Args: refs: Dictionary with all remote refs symrefs: Dictionary with remote symrefs """ self.refs = refs self.symrefs = symrefs def _warn_deprecated(self) -> None: import warnings warnings.warn( "Treating LsRemoteResult as a dictionary is deprecated. " "Use result.refs instead.", DeprecationWarning, stacklevel=3, ) def __eq__(self, other: object) -> bool: """Check equality with another object.""" if isinstance(other, dict): self._warn_deprecated() return self.refs == other if not isinstance(other, LsRemoteResult): return False return self.refs == other.refs and self.symrefs == other.symrefs def __repr__(self) -> str: """Return string representation of LsRemoteResult.""" return f"{self.__class__.__name__}({self.refs!r}, {self.symrefs!r})" class SendPackResult(_DeprecatedDictProxy): """Result of a upload-pack operation. Attributes: refs: Dictionary with all remote refs agent: User agent string ref_status: Optional dictionary mapping ref name to error message (if it failed to update), or None if it was updated successfully """ def __init__( self, refs: dict[bytes, Optional[bytes]], agent: Optional[bytes] = None, ref_status: Optional[dict[bytes, Optional[str]]] = None, ) -> None: """Initialize SendPackResult. Args: refs: Dictionary with all remote refs agent: User agent string ref_status: Optional dictionary mapping ref name to error message """ self.refs = refs self.agent = agent self.ref_status = ref_status def __eq__(self, other: object) -> bool: """Check equality with another object.""" if isinstance(other, dict): self._warn_deprecated() return self.refs == other if not isinstance(other, SendPackResult): return False return self.refs == other.refs and self.agent == other.agent def __repr__(self) -> str: """Return string representation of SendPackResult.""" return f"{self.__class__.__name__}({self.refs!r}, {self.agent!r})" def _read_shallow_updates(pkt_seq: Iterable[bytes]) -> tuple[set[bytes], set[bytes]]: new_shallow = set() new_unshallow = set() for pkt in pkt_seq: if pkt == b"shallow-info\n": # Git-protocol v2 continue try: cmd, sha = pkt.split(b" ", 1) except ValueError: raise GitProtocolError(f"unknown command {pkt!r}") if cmd == COMMAND_SHALLOW: new_shallow.add(sha.strip()) elif cmd == COMMAND_UNSHALLOW: new_unshallow.add(sha.strip()) else: raise GitProtocolError(f"unknown command {pkt!r}") return (new_shallow, new_unshallow) class _v1ReceivePackHeader: def __init__(self, capabilities: list, old_refs: dict, new_refs: dict) -> None: self.want: set[bytes] = set() self.have: set[bytes] = set() self._it = self._handle_receive_pack_head(capabilities, old_refs, new_refs) self.sent_capabilities = False def __iter__(self) -> Iterator[Optional[bytes]]: return self._it def _handle_receive_pack_head( self, capabilities: list, old_refs: dict, new_refs: dict ) -> Iterator[Optional[bytes]]: """Handle the head of a 'git-receive-pack' request. Args: capabilities: List of negotiated capabilities old_refs: Old refs, as received from the server new_refs: Refs to change Returns: (have, want) tuple """ self.have = {x for x in old_refs.values() if not x == ZERO_SHA} for refname in new_refs: if not isinstance(refname, bytes): raise TypeError(f"refname is not a bytestring: {refname!r}") old_sha1 = old_refs.get(refname, ZERO_SHA) if not isinstance(old_sha1, bytes): raise TypeError( f"old sha1 for {refname!r} is not a bytestring: {old_sha1!r}" ) new_sha1 = new_refs.get(refname, ZERO_SHA) if not isinstance(new_sha1, bytes): raise TypeError( f"old sha1 for {refname!r} is not a bytestring {new_sha1!r}" ) if old_sha1 != new_sha1: logger.debug( "Sending updated ref %r: %r -> %r", refname, old_sha1, new_sha1 ) if self.sent_capabilities: yield old_sha1 + b" " + new_sha1 + b" " + refname else: yield ( old_sha1 + b" " + new_sha1 + b" " + refname + b"\0" + b" ".join(sorted(capabilities)) ) self.sent_capabilities = True if new_sha1 not in self.have and new_sha1 != ZERO_SHA: self.want.add(new_sha1) yield None def _read_side_band64k_data(pkt_seq: Iterable[bytes]) -> Iterator[tuple[int, bytes]]: """Read per-channel data. This requires the side-band-64k capability. Args: pkt_seq: Sequence of packets to read """ for pkt in pkt_seq: channel = ord(pkt[:1]) yield channel, pkt[1:] def find_capability( capabilities: list, key: bytes, value: Optional[bytes] ) -> Optional[bytes]: """Find a capability with a specific key and value.""" for capability in capabilities: k, v = parse_capability(capability) if k != key: continue if value and v and value not in v.split(b" "): continue return capability return None def _handle_upload_pack_head( proto: Protocol, capabilities: list, graph_walker: GraphWalker, wants: list, can_read: Optional[Callable], depth: Optional[int], protocol_version: Optional[int], ) -> tuple[Optional[set[bytes]], Optional[set[bytes]]]: """Handle the head of a 'git-upload-pack' request. Args: proto: Protocol object to read from capabilities: List of negotiated capabilities graph_walker: GraphWalker instance to call .ack() on wants: List of commits to fetch can_read: function that returns a boolean that indicates whether there is extra graph data to read on proto depth: Depth for request protocol_version: Neogiated Git protocol version. """ new_shallow: Optional[set[bytes]] new_unshallow: Optional[set[bytes]] assert isinstance(wants, list) and isinstance(wants[0], bytes) wantcmd = COMMAND_WANT + b" " + wants[0] if protocol_version is None: protocol_version = DEFAULT_GIT_PROTOCOL_VERSION_SEND if protocol_version != 2: wantcmd += b" " + b" ".join(sorted(capabilities)) wantcmd += b"\n" proto.write_pkt_line(wantcmd) for want in wants[1:]: proto.write_pkt_line(COMMAND_WANT + b" " + want + b"\n") if depth not in (0, None) or ( hasattr(graph_walker, "shallow") and graph_walker.shallow ): if protocol_version == 2: if not find_capability(capabilities, CAPABILITY_FETCH, CAPABILITY_SHALLOW): raise GitProtocolError( "server does not support shallow capability required for depth" ) elif CAPABILITY_SHALLOW not in capabilities: raise GitProtocolError( "server does not support shallow capability required for depth" ) if hasattr(graph_walker, "shallow"): for sha in graph_walker.shallow: proto.write_pkt_line(COMMAND_SHALLOW + b" " + sha + b"\n") if depth is not None: proto.write_pkt_line( COMMAND_DEEPEN + b" " + str(depth).encode("ascii") + b"\n" ) if protocol_version != 2: proto.write_pkt_line(None) have = next(graph_walker) while have: proto.write_pkt_line(COMMAND_HAVE + b" " + have + b"\n") if can_read is not None and can_read(): pkt = proto.read_pkt_line() assert pkt is not None parts = pkt.rstrip(b"\n").split(b" ") if parts[0] == b"ACK": graph_walker.ack(parts[1]) if parts[2] in (b"continue", b"common"): pass elif parts[2] == b"ready": break else: raise AssertionError( f"{parts[2]!r} not in ('continue', 'ready', 'common)" ) have = next(graph_walker) proto.write_pkt_line(COMMAND_DONE + b"\n") if protocol_version == 2: proto.write_pkt_line(None) if depth not in (0, None): if can_read is not None: (new_shallow, new_unshallow) = _read_shallow_updates(proto.read_pkt_seq()) else: new_shallow = None new_unshallow = None else: new_shallow = new_unshallow = set() return (new_shallow, new_unshallow) def _handle_upload_pack_tail( proto, capabilities: set[bytes], graph_walker, pack_data: Callable[[bytes], None], progress: Optional[Callable[[bytes], None]] = None, rbufsize=_RBUFSIZE, protocol_version=0, ) -> None: """Handle the tail of a 'git-upload-pack' request. Args: proto: Protocol object to read from capabilities: List of negotiated capabilities graph_walker: GraphWalker instance to call .ack() on pack_data: Function to call with pack data progress: Optional progress reporting function rbufsize: Read buffer size protocol_version: Neogiated Git protocol version. """ pkt = proto.read_pkt_line() while pkt: parts = pkt.rstrip(b"\n").split(b" ") if protocol_version == 2 and parts[0] != "packfile": break else: if parts[0] == b"ACK": graph_walker.ack(parts[1]) if parts[0] == b"NAK": graph_walker.nak() if len(parts) < 3 or parts[2] not in ( b"ready", b"continue", b"common", ): break pkt = proto.read_pkt_line() if CAPABILITY_SIDE_BAND_64K in capabilities or protocol_version == 2: if progress is None: # Just ignore progress data def progress(x: bytes) -> None: pass for chan, data in _read_side_band64k_data(proto.read_pkt_seq()): if chan == SIDE_BAND_CHANNEL_DATA: pack_data(data) elif chan == SIDE_BAND_CHANNEL_PROGRESS: progress(data) else: raise AssertionError(f"Invalid sideband channel {chan}") else: while True: data = proto.read(rbufsize) if data == b"": break pack_data(data) def _extract_symrefs_and_agent(capabilities): """Extract symrefs and agent from capabilities. Args: capabilities: List of capabilities Returns: (symrefs, agent) tuple """ symrefs = {} agent = None for capability in capabilities: k, v = parse_capability(capability) if k == CAPABILITY_SYMREF: assert v is not None (src, dst) = v.split(b":", 1) symrefs[src] = dst if k == CAPABILITY_AGENT: agent = v return (symrefs, agent) # TODO(durin42): this doesn't correctly degrade if the server doesn't # support some capabilities. This should work properly with servers # that don't support multi_ack. class GitClient: """Git smart server client.""" def __init__( self, thin_packs=True, report_activity=None, quiet=False, include_tags=False, **kwargs, ) -> None: """Create a new GitClient instance. Args: thin_packs: Whether or not thin packs should be retrieved report_activity: Optional callback for reporting transport activity. quiet: Whether to suppress output include_tags: send annotated tags when sending the objects they point to **kwargs: Additional keyword arguments """ self._report_activity = report_activity self._report_status_parser: Optional[ReportStatusParser] = None self._fetch_capabilities = set(UPLOAD_CAPABILITIES) self._fetch_capabilities.add(capability_agent()) self._send_capabilities = set(RECEIVE_CAPABILITIES) self._send_capabilities.add(capability_agent()) if quiet: self._send_capabilities.add(CAPABILITY_QUIET) if not thin_packs: self._fetch_capabilities.remove(CAPABILITY_THIN_PACK) if include_tags: self._fetch_capabilities.add(CAPABILITY_INCLUDE_TAG) self.protocol_version = 0 # will be overridden later def get_url(self, path) -> str: """Retrieves full url to given path. Args: path: Repository path (as string) Returns: Url to path (as string) """ raise NotImplementedError(self.get_url) @classmethod def from_parsedurl(cls, parsedurl, **kwargs) -> "GitClient": """Create an instance of this client from a urlparse.parsed object. Args: parsedurl: Result of urlparse() **kwargs: Additional keyword arguments passed to the client constructor Returns: A `GitClient` object """ raise NotImplementedError(cls.from_parsedurl) def send_pack( self, path: str, update_refs, generate_pack_data, progress=None, ) -> SendPackResult: """Upload a pack to a remote repository. Args: path: Repository path (as bytestring) update_refs: Function to determine changes to remote refs. Receive dict with existing remote refs, returns dict with changed refs (name -> sha, where sha=ZERO_SHA for deletions) generate_pack_data: Function that can return a tuple with number of objects and list of pack data to include progress: Optional progress function Returns: SendPackResult object Raises: SendPackError: if server rejects the pack data """ raise NotImplementedError(self.send_pack) def clone( self, path, target_path, mkdir: bool = True, bare: bool = False, origin: Optional[str] = "origin", checkout=None, branch=None, progress=None, depth: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, filter_spec=None, protocol_version: Optional[int] = None, ) -> Repo: """Clone a repository.""" if mkdir: os.mkdir(target_path) try: target = None if not bare: target = Repo.init(target_path) if checkout is None: checkout = True else: if checkout: raise ValueError("checkout and bare are incompatible") target = Repo.init_bare(target_path) # TODO(jelmer): abstract method for get_location? if isinstance(self, (LocalGitClient, SubprocessGitClient)): encoded_path = path.encode("utf-8") else: encoded_path = self.get_url(path).encode("utf-8") assert target is not None if origin is not None: target_config = target.get_config() target_config.set( (b"remote", origin.encode("utf-8")), b"url", encoded_path ) target_config.set( (b"remote", origin.encode("utf-8")), b"fetch", b"+refs/heads/*:refs/remotes/" + origin.encode("utf-8") + b"/*", ) target_config.write_to_path() ref_message = b"clone: from " + encoded_path result = self.fetch( path, target, progress=progress, depth=depth, ref_prefix=ref_prefix, filter_spec=filter_spec, protocol_version=protocol_version, ) if origin is not None: _import_remote_refs( target.refs, origin, result.refs, message=ref_message ) origin_head = result.symrefs.get(b"HEAD") origin_sha = result.refs.get(b"HEAD") if origin is None or (origin_sha and not origin_head): # set detached HEAD if origin_sha is not None: target.refs[b"HEAD"] = origin_sha head = origin_sha else: head = None else: _set_origin_head(target.refs, origin.encode("utf-8"), origin_head) head_ref = _set_default_branch( target.refs, origin.encode("utf-8"), origin_head, branch, ref_message, ) # Update target head if head_ref: head = _set_head(target.refs, head_ref, ref_message) else: head = None if checkout and head is not None: target.get_worktree().reset_index() except BaseException: if target is not None: target.close() if mkdir: import shutil shutil.rmtree(target_path) raise return target def fetch( self, path: str, target: BaseRepo, determine_wants: Optional[ Callable[[dict[bytes, bytes], Optional[int]], list[bytes]] ] = None, progress: Optional[Callable[[bytes], None]] = None, depth: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, filter_spec: Optional[bytes] = None, protocol_version: Optional[int] = None, ) -> FetchPackResult: """Fetch into a target repository. Args: path: Path to fetch from (as bytestring) target: Target repository to fetch into determine_wants: Optional function to determine what refs to fetch. Receives dictionary of name->sha, should return list of shas to fetch. Defaults to all shas. progress: Optional progress function depth: Depth to fetch at ref_prefix: List of prefixes of desired references, as a list of bytestrings. Filtering is done by the server if supported, and client side otherwise. filter_spec: A git-rev-list-style object filter spec, as bytestring. Only used if the server supports the Git protocol-v2 'filter' feature, and ignored otherwise. protocol_version: Desired Git protocol version. By default the highest mutually supported protocol version will be used. Returns: Dictionary with all remote refs (not just those fetched) """ if determine_wants is None: determine_wants = target.object_store.determine_wants_all if CAPABILITY_THIN_PACK in self._fetch_capabilities: from tempfile import SpooledTemporaryFile f: IO[bytes] = SpooledTemporaryFile( max_size=PACK_SPOOL_FILE_MAX_SIZE, prefix="incoming-", dir=getattr(target.object_store, "path", None), ) def commit() -> None: if f.tell(): f.seek(0) target.object_store.add_thin_pack(f.read, None, progress=progress) # type: ignore f.close() def abort() -> None: f.close() else: f, commit, abort = target.object_store.add_pack() try: result = self.fetch_pack( path, determine_wants, target.get_graph_walker(), f.write, progress=progress, depth=depth, ref_prefix=ref_prefix, filter_spec=filter_spec, protocol_version=protocol_version, ) except BaseException: abort() raise else: commit() target.update_shallow(result.new_shallow, result.new_unshallow) return result def fetch_pack( self, path: str, determine_wants, graph_walker, pack_data, *, progress: Optional[Callable[[bytes], None]] = None, depth: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, filter_spec=None, protocol_version: Optional[int] = None, ) -> FetchPackResult: """Retrieve a pack from a git smart server. Args: path: Remote path to fetch from determine_wants: Function determine what refs to fetch. Receives dictionary of name->sha, should return list of shas to fetch. graph_walker: Object with next() and ack(). pack_data: Callback called for each bit of data in the pack progress: Callback for progress reports (strings) depth: Shallow fetch depth ref_prefix: List of prefixes of desired references, as a list of bytestrings. Filtering is done by the server if supported, and client side otherwise. filter_spec: A git-rev-list-style object filter spec, as bytestring. Only used if the server supports the Git protocol-v2 'filter' feature, and ignored otherwise. protocol_version: Desired Git protocol version. By default the highest mutually supported protocol version will be used. Returns: FetchPackResult object """ raise NotImplementedError(self.fetch_pack) def get_refs( self, path, protocol_version: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, ) -> LsRemoteResult: """Retrieve the current refs from a git smart server. Args: path: Path to the repo to fetch from. (as bytestring) protocol_version: Desired Git protocol version. ref_prefix: Prefix filter for refs. Returns: LsRemoteResult object with refs and symrefs """ raise NotImplementedError(self.get_refs) @staticmethod def _should_send_pack(new_refs): # The packfile MUST NOT be sent if the only command used is delete. return any(sha != ZERO_SHA for sha in new_refs.values()) def _negotiate_receive_pack_capabilities(self, server_capabilities): negotiated_capabilities = self._send_capabilities & server_capabilities (agent, _symrefs) = _extract_symrefs_and_agent(server_capabilities) (extract_capability_names(server_capabilities) - KNOWN_RECEIVE_CAPABILITIES) # TODO(jelmer): warn about unknown capabilities return negotiated_capabilities, agent def _handle_receive_pack_tail( self, proto: Protocol, capabilities: set[bytes], progress: Optional[Callable[[bytes], None]] = None, ) -> Optional[dict[bytes, Optional[str]]]: """Handle the tail of a 'git-receive-pack' request. Args: proto: Protocol object to read from capabilities: List of negotiated capabilities progress: Optional progress reporting function Returns: dict mapping ref name to: error message if the ref failed to update None if it was updated successfully """ if CAPABILITY_SIDE_BAND_64K in capabilities or self.protocol_version == 2: if progress is None: def progress(x) -> None: pass if CAPABILITY_REPORT_STATUS in capabilities: assert self._report_status_parser is not None pktline_parser = PktLineParser(self._report_status_parser.handle_packet) for chan, data in _read_side_band64k_data(proto.read_pkt_seq()): if chan == SIDE_BAND_CHANNEL_DATA: if CAPABILITY_REPORT_STATUS in capabilities: pktline_parser.parse(data) elif chan == SIDE_BAND_CHANNEL_PROGRESS: progress(data) else: raise AssertionError(f"Invalid sideband channel {chan}") else: if CAPABILITY_REPORT_STATUS in capabilities: assert self._report_status_parser for pkt in proto.read_pkt_seq(): self._report_status_parser.handle_packet(pkt) if self._report_status_parser is not None: return dict(self._report_status_parser.check()) return None def _negotiate_upload_pack_capabilities(self, server_capabilities): (extract_capability_names(server_capabilities) - KNOWN_UPLOAD_CAPABILITIES) # TODO(jelmer): warn about unknown capabilities fetch_capa = None for capability in server_capabilities: k, v = parse_capability(capability) if self.protocol_version == 2 and k == CAPABILITY_FETCH: fetch_capa = CAPABILITY_FETCH fetch_features = [] assert v is not None v_list = v.strip().split(b" ") if b"shallow" in v_list: fetch_features.append(CAPABILITY_SHALLOW) if b"filter" in v_list: fetch_features.append(CAPABILITY_FILTER) for i in range(len(fetch_features)): if i == 0: fetch_capa += b"=" else: fetch_capa += b" " fetch_capa += fetch_features[i] (symrefs, agent) = _extract_symrefs_and_agent(server_capabilities) negotiated_capabilities = self._fetch_capabilities & server_capabilities if fetch_capa: negotiated_capabilities.add(fetch_capa) return (negotiated_capabilities, symrefs, agent) def archive( self, path, committish, write_data, progress=None, write_error=None, format=None, subdirs=None, prefix=None, ) -> None: """Retrieve an archive of the specified tree.""" raise NotImplementedError(self.archive) @staticmethod def _warn_filter_objects() -> None: import warnings warnings.warn( "object filtering not recognized by server, ignoring", UserWarning, ) def check_wants(wants, refs) -> None: """Check that a set of wants is valid. Args: wants: Set of object SHAs to fetch refs: Refs dictionary to check against """ missing = set(wants) - { v for (k, v) in refs.items() if not k.endswith(PEELED_TAG_SUFFIX) } if missing: raise InvalidWants(missing) def _remote_error_from_stderr(stderr): if stderr is None: return HangupException() lines = [line.rstrip(b"\n") for line in stderr.readlines()] for line in lines: if line.startswith(b"ERROR: "): return GitProtocolError(line[len(b"ERROR: ") :].decode("utf-8", "replace")) return HangupException(lines) class TraditionalGitClient(GitClient): """Traditional Git client.""" DEFAULT_ENCODING = "utf-8" def __init__(self, path_encoding=DEFAULT_ENCODING, **kwargs) -> None: """Initialize a TraditionalGitClient. Args: path_encoding: Encoding for paths (default: utf-8) **kwargs: Additional arguments passed to parent class """ self._remote_path_encoding = path_encoding super().__init__(**kwargs) def _connect( self, cmd: bytes, path: Union[str, bytes], protocol_version: Optional[int] = None, ) -> tuple[Protocol, Callable[[], bool], Optional[IO[bytes]]]: """Create a connection to the server. This method is abstract - concrete implementations should implement their own variant which connects to the server and returns an initialized Protocol object with the service ready for use and a can_read function which may be used to see if reads would block. Args: cmd: The git service name to which we should connect. path: The path we should pass to the service. (as bytestirng) protocol_version: Desired Git protocol version. By default the highest mutually supported protocol version will be used. """ raise NotImplementedError def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload a pack to a remote repository. Args: path: Repository path (as bytestring) update_refs: Function to determine changes to remote refs. Receive dict with existing remote refs, returns dict with changed refs (name -> sha, where sha=ZERO_SHA for deletions) generate_pack_data: Function that can return a tuple with number of objects and pack data to upload. progress: Optional callback called with progress updates Returns: SendPackResult Raises: SendPackError: if server rejects the pack data """ self.protocol_version = DEFAULT_GIT_PROTOCOL_VERSION_SEND proto, unused_can_read, stderr = self._connect(b"receive-pack", path) with proto: try: old_refs, server_capabilities = read_pkt_refs_v1(proto.read_pkt_seq()) except HangupException as exc: raise _remote_error_from_stderr(stderr) from exc ( negotiated_capabilities, agent, ) = self._negotiate_receive_pack_capabilities(server_capabilities) if CAPABILITY_REPORT_STATUS in negotiated_capabilities: self._report_status_parser = ReportStatusParser() report_status_parser = self._report_status_parser try: new_refs = orig_new_refs = update_refs(dict(old_refs)) except BaseException: proto.write_pkt_line(None) raise if set(new_refs.items()).issubset(set(old_refs.items())): proto.write_pkt_line(None) return SendPackResult(new_refs, agent=agent, ref_status={}) if CAPABILITY_DELETE_REFS not in server_capabilities: # Server does not support deletions. Fail later. new_refs = dict(orig_new_refs) for ref, sha in orig_new_refs.items(): if sha == ZERO_SHA: if CAPABILITY_REPORT_STATUS in negotiated_capabilities: assert report_status_parser is not None report_status_parser._ref_statuses.append( b"ng " + ref + b" remote does not support deleting refs" ) del new_refs[ref] if new_refs is None: proto.write_pkt_line(None) return SendPackResult(old_refs, agent=agent, ref_status={}) if len(new_refs) == 0 and orig_new_refs: # NOOP - Original new refs filtered out by policy proto.write_pkt_line(None) if report_status_parser is not None: ref_status = dict(report_status_parser.check()) else: ref_status = None return SendPackResult(old_refs, agent=agent, ref_status=ref_status) header_handler = _v1ReceivePackHeader( negotiated_capabilities, old_refs, new_refs ) for pkt in header_handler: proto.write_pkt_line(pkt) pack_data_count, pack_data = generate_pack_data( header_handler.have, header_handler.want, ofs_delta=(CAPABILITY_OFS_DELTA in negotiated_capabilities), progress=progress, ) if self._should_send_pack(new_refs): for chunk in PackChunkGenerator( pack_data_count, pack_data, progress=progress ): proto.write(chunk) ref_status = self._handle_receive_pack_tail( proto, negotiated_capabilities, progress ) return SendPackResult(new_refs, agent=agent, ref_status=ref_status) def fetch_pack( self, path, determine_wants, graph_walker, pack_data, progress=None, depth: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, filter_spec=None, protocol_version: Optional[int] = None, ): """Retrieve a pack from a git smart server. Args: path: Remote path to fetch from determine_wants: Function determine what refs to fetch. Receives dictionary of name->sha, should return list of shas to fetch. graph_walker: Object with next() and ack(). pack_data: Callback called for each bit of data in the pack progress: Callback for progress reports (strings) depth: Shallow fetch depth ref_prefix: List of prefixes of desired references, as a list of bytestrings. Filtering is done by the server if supported, and client side otherwise. filter_spec: A git-rev-list-style object filter spec, as bytestring. Only used if the server supports the Git protocol-v2 'filter' feature, and ignored otherwise. protocol_version: Desired Git protocol version. By default the highest mutually supported protocol version will be used. Returns: FetchPackResult object """ if ( protocol_version is not None and protocol_version not in GIT_PROTOCOL_VERSIONS ): raise ValueError(f"unknown Git protocol version {protocol_version}") proto, can_read, stderr = self._connect(b"upload-pack", path, protocol_version) server_protocol_version = negotiate_protocol_version(proto) if server_protocol_version not in GIT_PROTOCOL_VERSIONS: raise ValueError( f"unknown Git protocol version {server_protocol_version} used by server" ) if protocol_version and server_protocol_version > protocol_version: raise ValueError( f"bad Git protocol version {server_protocol_version} used by server" ) self.protocol_version = server_protocol_version with proto: if self.protocol_version == 2: try: server_capabilities = read_server_capabilities(proto.read_pkt_seq()) except HangupException as exc: raise _remote_error_from_stderr(stderr) from exc ( negotiated_capabilities, symrefs, agent, ) = self._negotiate_upload_pack_capabilities(server_capabilities) proto.write_pkt_line(b"command=ls-refs\n") proto.write(b"0001") # delim-pkt proto.write_pkt_line(b"symrefs") proto.write_pkt_line(b"peel") if ref_prefix is None: ref_prefix = DEFAULT_REF_PREFIX for prefix in ref_prefix: proto.write_pkt_line(b"ref-prefix " + prefix) proto.write_pkt_line(None) refs, symrefs, _peeled = read_pkt_refs_v2(proto.read_pkt_seq()) else: try: refs, server_capabilities = read_pkt_refs_v1(proto.read_pkt_seq()) except HangupException as exc: raise _remote_error_from_stderr(stderr) from exc ( negotiated_capabilities, symrefs, agent, ) = self._negotiate_upload_pack_capabilities(server_capabilities) if ref_prefix is not None: refs = filter_ref_prefix(refs, ref_prefix) if refs is None: proto.write_pkt_line(None) return FetchPackResult(refs, symrefs, agent) try: if depth is not None: wants = determine_wants(refs, depth=depth) else: wants = determine_wants(refs) except BaseException: proto.write_pkt_line(None) raise if wants is not None: wants = [cid for cid in wants if cid != ZERO_SHA] if not wants: proto.write_pkt_line(None) return FetchPackResult(refs, symrefs, agent) if self.protocol_version == 2: proto.write_pkt_line(b"command=fetch\n") proto.write(b"0001") # delim-pkt if CAPABILITY_THIN_PACK in self._fetch_capabilities: proto.write(pkt_line(b"thin-pack\n")) if ( find_capability( negotiated_capabilities, CAPABILITY_FETCH, CAPABILITY_FILTER ) and filter_spec ): proto.write(pkt_line(b"filter %s\n" % filter_spec)) elif filter_spec: self._warn_filter_objects() elif filter_spec: self._warn_filter_objects() (new_shallow, new_unshallow) = _handle_upload_pack_head( proto, negotiated_capabilities, graph_walker, wants, can_read, depth=depth, protocol_version=self.protocol_version, ) _handle_upload_pack_tail( proto, negotiated_capabilities, graph_walker, pack_data, progress, protocol_version=self.protocol_version, ) return FetchPackResult(refs, symrefs, agent, new_shallow, new_unshallow) def get_refs( self, path, protocol_version: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, ): """Retrieve the current refs from a git smart server.""" # stock `git ls-remote` uses upload-pack if ( protocol_version is not None and protocol_version not in GIT_PROTOCOL_VERSIONS ): raise ValueError(f"unknown Git protocol version {protocol_version}") proto, _, stderr = self._connect(b"upload-pack", path, protocol_version) server_protocol_version = negotiate_protocol_version(proto) if server_protocol_version not in GIT_PROTOCOL_VERSIONS: raise ValueError( f"unknown Git protocol version {server_protocol_version} used by server" ) if protocol_version and server_protocol_version > protocol_version: raise ValueError( f"bad Git protocol version {server_protocol_version} used by server" ) self.protocol_version = server_protocol_version if self.protocol_version == 2: server_capabilities = read_server_capabilities(proto.read_pkt_seq()) proto.write_pkt_line(b"command=ls-refs\n") proto.write(b"0001") # delim-pkt proto.write_pkt_line(b"symrefs") proto.write_pkt_line(b"peel") if ref_prefix is None: ref_prefix = DEFAULT_REF_PREFIX for prefix in ref_prefix: proto.write_pkt_line(b"ref-prefix " + prefix) proto.write_pkt_line(None) with proto: try: refs, symrefs, peeled = read_pkt_refs_v2(proto.read_pkt_seq()) except HangupException as exc: raise _remote_error_from_stderr(stderr) from exc proto.write_pkt_line(None) for refname, refvalue in peeled.items(): refs[refname + PEELED_TAG_SUFFIX] = refvalue return LsRemoteResult(refs, symrefs) else: with proto: try: refs, server_capabilities = read_pkt_refs_v1(proto.read_pkt_seq()) except HangupException as exc: raise _remote_error_from_stderr(stderr) from exc proto.write_pkt_line(None) (symrefs, _agent) = _extract_symrefs_and_agent(server_capabilities) if ref_prefix is not None: refs = filter_ref_prefix(refs, ref_prefix) return LsRemoteResult(refs, symrefs) def archive( self, path, committish, write_data, progress=None, write_error=None, format=None, subdirs=None, prefix=None, ) -> None: """Request an archive of a specific commit. Args: path: Repository path committish: Commit ID or ref to archive write_data: Function to write archive data progress: Optional progress callback write_error: Optional error callback format: Optional archive format subdirs: Optional subdirectories to include prefix: Optional prefix for archived files """ proto, can_read, stderr = self._connect(b"upload-archive", path) with proto: if format is not None: proto.write_pkt_line(b"argument --format=" + format) proto.write_pkt_line(b"argument " + committish) if subdirs is not None: for subdir in subdirs: proto.write_pkt_line(b"argument " + subdir) if prefix is not None: proto.write_pkt_line(b"argument --prefix=" + prefix) proto.write_pkt_line(None) try: pkt = proto.read_pkt_line() except HangupException as exc: raise _remote_error_from_stderr(stderr) from exc if pkt == b"NACK\n" or pkt == b"NACK": return elif pkt == b"ACK\n" or pkt == b"ACK": pass elif pkt and pkt.startswith(b"ERR "): raise GitProtocolError(pkt[4:].rstrip(b"\n").decode("utf-8", "replace")) else: raise AssertionError(f"invalid response {pkt!r}") ret = proto.read_pkt_line() if ret is not None: raise AssertionError("expected pkt tail") for chan, data in _read_side_band64k_data(proto.read_pkt_seq()): if chan == SIDE_BAND_CHANNEL_DATA: write_data(data) elif chan == SIDE_BAND_CHANNEL_PROGRESS: progress(data) elif chan == SIDE_BAND_CHANNEL_FATAL: write_error(data) else: raise AssertionError(f"Invalid sideband channel {chan}") class TCPGitClient(TraditionalGitClient): """A Git Client that works over TCP directly (i.e. git://).""" def __init__(self, host, port=None, **kwargs) -> None: """Initialize a TCPGitClient. Args: host: Hostname or IP address to connect to port: Port number (defaults to TCP_GIT_PORT) **kwargs: Additional arguments for GitClient """ if port is None: port = TCP_GIT_PORT self._host = host self._port = port super().__init__(**kwargs) @classmethod def from_parsedurl(cls, parsedurl, **kwargs): """Create an instance of TCPGitClient from a parsed URL. Args: parsedurl: Result of urlparse() **kwargs: Additional arguments for the client Returns: A TCPGitClient instance """ return cls(parsedurl.hostname, port=parsedurl.port, **kwargs) def get_url(self, path): r"""Get the URL for a TCP git connection. Args: path: Repository path Returns: ``git://`` URL for the path """ netloc = self._host if self._port is not None and self._port != TCP_GIT_PORT: netloc += f":{self._port}" return urlunsplit(("git", netloc, path, "", "")) def _connect( self, cmd: bytes, path: Union[str, bytes], protocol_version: Optional[int] = None, ) -> tuple[Protocol, Callable[[], bool], Optional[IO[bytes]]]: if not isinstance(cmd, bytes): raise TypeError(cmd) if not isinstance(path, bytes): path = path.encode(self._remote_path_encoding) sockaddrs = socket.getaddrinfo( self._host, self._port, socket.AF_UNSPEC, socket.SOCK_STREAM ) s = None err = OSError(f"no address found for {self._host}") for family, socktype, protof, canonname, sockaddr in sockaddrs: s = socket.socket(family, socktype, protof) s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) try: s.connect(sockaddr) break except OSError as e: err = e if s is not None: s.close() s = None if s is None: raise err # -1 means system default buffering rfile = s.makefile("rb", -1) # 0 means unbuffered wfile = s.makefile("wb", 0) def close() -> None: rfile.close() wfile.close() s.close() proto = Protocol( rfile.read, wfile.write, close, report_activity=self._report_activity, ) if path.startswith(b"/~"): path = path[1:] if cmd == b"upload-pack": if protocol_version is None: self.protocol_version = DEFAULT_GIT_PROTOCOL_VERSION_FETCH else: self.protocol_version = protocol_version else: self.protocol_version = DEFAULT_GIT_PROTOCOL_VERSION_SEND if cmd == b"upload-pack" and self.protocol_version == 2: # Git protocol version advertisement is hidden behind two NUL bytes # for compatibility with older Git server implementations, which # would crash if something other than a "host=" header was found # after the first NUL byte. version_str = b"\0\0version=%d\0" % self.protocol_version else: version_str = b"" # TODO(jelmer): Alternative to ascii? proto.send_cmd( b"git-" + cmd, path, b"host=" + self._host.encode("ascii") + version_str ) return proto, lambda: _fileno_can_read(s.fileno()), None class SubprocessWrapper: """A socket-like object that talks to a subprocess via pipes.""" def __init__(self, proc) -> None: """Initialize a SubprocessWrapper. Args: proc: Subprocess.Popen instance to wrap """ self.proc = proc self.read = BufferedReader(proc.stdout).read self.write = proc.stdin.write @property def stderr(self): """Return the stderr stream of the subprocess.""" return self.proc.stderr def can_read(self): """Check if there is data available to read. Returns: True if data is available, False otherwise """ if sys.platform == "win32": from msvcrt import get_osfhandle handle = get_osfhandle(self.proc.stdout.fileno()) return _win32_peek_avail(handle) != 0 else: return _fileno_can_read(self.proc.stdout.fileno()) def close(self, timeout: Optional[int] = 60) -> None: """Close the subprocess and wait for it to terminate. Args: timeout: Maximum time to wait for subprocess to terminate (seconds) Raises: GitProtocolError: If subprocess doesn't terminate within timeout """ self.proc.stdin.close() self.proc.stdout.close() if self.proc.stderr: self.proc.stderr.close() try: self.proc.wait(timeout=timeout) except subprocess.TimeoutExpired as e: self.proc.kill() self.proc.wait() raise GitProtocolError( f"Git subprocess did not terminate within {timeout} seconds; killed it." ) from e def find_git_command() -> list[str]: """Find command to run for system Git (usually C Git).""" if sys.platform == "win32": # support .exe, .bat and .cmd try: # to avoid overhead import pywintypes import win32api except ImportError: # run through cmd.exe with some overhead return ["cmd", "/c", "git"] else: try: status, git = win32api.FindExecutable("git") return [git] except pywintypes.error: return ["cmd", "/c", "git"] else: return ["git"] class SubprocessGitClient(TraditionalGitClient): """Git client that talks to a server using a subprocess.""" @classmethod def from_parsedurl(cls, parsedurl, **kwargs): """Create an instance of SubprocessGitClient from a parsed URL. Args: parsedurl: Result of urlparse() **kwargs: Additional arguments for the client Returns: A SubprocessGitClient instance """ return cls(**kwargs) git_command: Optional[str] = None def _connect( self, service: bytes, path: Union[bytes, str], protocol_version: Optional[int] = None, ) -> tuple[Protocol, Callable[[], bool], Optional[IO[bytes]]]: if not isinstance(service, bytes): raise TypeError(service) if isinstance(path, bytes): path = path.decode(self._remote_path_encoding) if self.git_command is None: git_command = find_git_command() argv = [*git_command, service.decode("ascii"), path] p = subprocess.Popen( argv, bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) pw = SubprocessWrapper(p) return ( Protocol( pw.read, pw.write, pw.close, report_activity=self._report_activity, ), pw.can_read, p.stderr, ) class LocalGitClient(GitClient): """Git Client that just uses a local on-disk repository.""" def __init__( self, thin_packs: bool = True, report_activity=None, config: Optional[Config] = None, ) -> None: """Create a new LocalGitClient instance. Args: thin_packs: Whether or not thin packs should be retrieved report_activity: Optional callback for reporting transport activity. config: Optional configuration object """ self._report_activity = report_activity # Ignore the thin_packs argument def get_url(self, path): """Get the URL for a local file path. Args: path: Local file path Returns: file:// URL for the path """ return urlunsplit(("file", "", path, "", "")) @classmethod def from_parsedurl(cls, parsedurl, **kwargs): """Create an instance of LocalGitClient from a parsed URL. Args: parsedurl: Result of urlparse() **kwargs: Additional arguments for the client Returns: A LocalGitClient instance """ return cls(**kwargs) @classmethod def _open_repo(cls, path): """Open a local repository. Args: path: Repository path (as bytes or str) Returns: Repo instance wrapped in a closing context manager """ if not isinstance(path, str): path = os.fsdecode(path) return closing(Repo(path)) def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload a pack to a local on-disk repository. Args: path: Repository path (as bytestring) update_refs: Function to determine changes to remote refs. Receive dict with existing remote refs, returns dict with changed refs (name -> sha, where sha=ZERO_SHA for deletions) with number of items and pack data to upload. generate_pack_data: Function that generates pack data given have and want object sets progress: Optional progress function Returns: SendPackResult Raises: SendPackError: if server rejects the pack data """ if not progress: def progress(x) -> None: pass with self._open_repo(path) as target: old_refs = target.get_refs() new_refs = update_refs(dict(old_refs)) have = [sha1 for sha1 in old_refs.values() if sha1 != ZERO_SHA] want = [] for refname, new_sha1 in new_refs.items(): if ( new_sha1 not in have and new_sha1 not in want and new_sha1 != ZERO_SHA ): want.append(new_sha1) if not want and set(new_refs.items()).issubset(set(old_refs.items())): return SendPackResult(new_refs, ref_status={}) target.object_store.add_pack_data( *generate_pack_data(have, want, ofs_delta=True) ) ref_status: dict[bytes, Optional[str]] = {} for refname, new_sha1 in new_refs.items(): old_sha1 = old_refs.get(refname, ZERO_SHA) if new_sha1 != ZERO_SHA: if not target.refs.set_if_equals(refname, old_sha1, new_sha1): msg = f"unable to set {refname} to {new_sha1}" progress(msg) ref_status[refname] = msg else: if not target.refs.remove_if_equals(refname, old_sha1): progress(f"unable to remove {refname}") ref_status[refname] = "unable to remove" return SendPackResult(new_refs, ref_status=ref_status) def fetch( self, path: str, target: BaseRepo, determine_wants: Optional[ Callable[[dict[bytes, bytes], Optional[int]], list[bytes]] ] = None, progress: Optional[Callable[[bytes], None]] = None, depth: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, filter_spec: Optional[bytes] = None, protocol_version: Optional[int] = None, **kwargs, ): """Fetch into a target repository. Args: path: Path to fetch from (as bytestring) target: Target repository to fetch into determine_wants: Optional function determine what refs to fetch. Receives dictionary of name->sha, should return list of shas to fetch. Defaults to all shas. progress: Optional progress function depth: Shallow fetch depth ref_prefix: List of prefixes of desired references, as a list of bytestrings. Filtering is done by the server if supported, and client side otherwise. filter_spec: A git-rev-list-style object filter spec, as bytestring. Only used if the server supports the Git protocol-v2 'filter' feature, and ignored otherwise. protocol_version: Optional Git protocol version **kwargs: Additional keyword arguments Returns: FetchPackResult object """ with self._open_repo(path) as r: refs = r.fetch( target, determine_wants=determine_wants, progress=progress, depth=depth, ) return FetchPackResult(refs, r.refs.get_symrefs(), agent_string()) def fetch_pack( self, path, determine_wants, graph_walker, pack_data, progress=None, depth: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, filter_spec: Optional[bytes] = None, protocol_version: Optional[int] = None, ) -> FetchPackResult: """Retrieve a pack from a local on-disk repository. Args: path: Remote path to fetch from determine_wants: Function determine what refs to fetch. Receives dictionary of name->sha, should return list of shas to fetch. graph_walker: Object with next() and ack(). pack_data: Callback called for each bit of data in the pack progress: Callback for progress reports (strings) depth: Shallow fetch depth ref_prefix: List of prefixes of desired references, as a list of bytestrings. Filtering is done by the server if supported, and client side otherwise. filter_spec: A git-rev-list-style object filter spec, as bytestring. Only used if the server supports the Git protocol-v2 'filter' feature, and ignored otherwise. protocol_version: Optional Git protocol version Returns: FetchPackResult object """ with self._open_repo(path) as r: missing_objects = r.find_missing_objects( determine_wants, graph_walker, progress=progress, depth=depth ) other_haves = missing_objects.get_remote_has() object_ids = list(missing_objects) symrefs = r.refs.get_symrefs() agent = agent_string() # Did the process short-circuit (e.g. in a stateless RPC call)? # Note that the client still expects a 0-object pack in most cases. if object_ids is None: return FetchPackResult(None, symrefs, agent) write_pack_from_container( pack_data, r.object_store, object_ids, other_haves=other_haves ) return FetchPackResult(r.get_refs(), symrefs, agent) def get_refs( self, path, protocol_version: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, ): """Retrieve the current refs from a local on-disk repository.""" with self._open_repo(path) as target: refs = target.get_refs() # Extract symrefs from the local repository symrefs = {} for ref in refs: try: # Check if this ref is symbolic by reading it directly ref_value = target.refs.read_ref(ref) if ref_value and ref_value.startswith(SYMREF): # Extract the target from the symref symrefs[ref] = ref_value[len(SYMREF) :] except (KeyError, ValueError): # Not a symbolic ref or error reading it pass return LsRemoteResult(refs, symrefs) class BundleClient(GitClient): """Git Client that reads from a bundle file.""" def __init__( self, thin_packs: bool = True, report_activity=None, config: Optional[Config] = None, ) -> None: """Create a new BundleClient instance. Args: thin_packs: Whether or not thin packs should be retrieved report_activity: Optional callback for reporting transport activity. config: Optional configuration object """ self._report_activity = report_activity def get_url(self, path): """Get the URL for a bundle file path. Args: path: Bundle file path Returns: The path unchanged (bundle files use local paths) """ return path @classmethod def from_parsedurl(cls, parsedurl, **kwargs): """Create an instance of BundleClient from a parsed URL. Args: parsedurl: Result of urlparse() **kwargs: Additional arguments for the client Returns: A BundleClient instance """ return cls(**kwargs) @classmethod def _is_bundle_file(cls, path): """Check if a file is a git bundle by reading the first line.""" try: with open(path, "rb") as f: first_line = f.readline() return first_line in (b"# v2 git bundle\n", b"# v3 git bundle\n") except OSError: return False @classmethod def _open_bundle(cls, path): """Open and parse a bundle file. Args: path: Path to the bundle file (bytes or str) Returns: Bundle object with parsed metadata Raises: AssertionError: If bundle format is unsupported """ if not isinstance(path, str): path = os.fsdecode(path) # Read bundle metadata without PackData to avoid file handle issues with open(path, "rb") as f: from dulwich.bundle import Bundle version = None firstline = f.readline() if firstline == b"# v2 git bundle\n": version = 2 elif firstline == b"# v3 git bundle\n": version = 3 else: raise AssertionError(f"unsupported bundle format header: {firstline!r}") capabilities = {} prerequisites = [] references = {} line = f.readline() if version >= 3: while line.startswith(b"@"): line = line[1:].rstrip(b"\n") try: key, value_bytes = line.split(b"=", 1) value = value_bytes.decode("utf-8") except ValueError: key = line value = None capabilities[key.decode("utf-8")] = value line = f.readline() while line.startswith(b"-"): (obj_id, comment) = line[1:].rstrip(b"\n").split(b" ", 1) prerequisites.append((obj_id, comment)) line = f.readline() while line != b"\n": (obj_id, ref) = line.rstrip(b"\n").split(b" ", 1) references[ref] = obj_id line = f.readline() # Don't read PackData here, we'll do it later bundle = Bundle() bundle.version = version bundle.capabilities = capabilities bundle.prerequisites = prerequisites bundle.references = references bundle.pack_data = None # Will be read on demand return bundle @staticmethod def _skip_to_pack_data(f, version): """Skip to the pack data section in a bundle file. Args: f: File object positioned at the beginning of the bundle version: Bundle format version (2 or 3) Raises: AssertionError: If bundle header is invalid """ # Skip header header = f.readline() if header not in (b"# v2 git bundle\n", b"# v3 git bundle\n"): raise AssertionError(f"Invalid bundle header: {header!r}") line = f.readline() # Skip capabilities (v3 only) if version >= 3: while line.startswith(b"@"): line = f.readline() # Skip prerequisites while line.startswith(b"-"): line = f.readline() # Skip references while line != b"\n": line = f.readline() # Now at pack data def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload is not supported for bundle files.""" raise NotImplementedError("Bundle files are read-only") def fetch( self, path: str, target: BaseRepo, determine_wants: Optional[ Callable[[dict[bytes, bytes], Optional[int]], list[bytes]] ] = None, progress: Optional[Callable[[bytes], None]] = None, depth: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, filter_spec: Optional[bytes] = None, protocol_version: Optional[int] = None, **kwargs, ): """Fetch into a target repository from a bundle file.""" bundle = self._open_bundle(path) # Get references from bundle refs = dict(bundle.references) # Determine what we want to fetch if determine_wants is None: _ = list(refs.values()) else: _ = determine_wants(refs, None) # Add pack data to target repository # Need to reopen the file for pack data access with open(path, "rb") as pack_file: # Skip to pack data section BundleClient._skip_to_pack_data(pack_file, bundle.version) # Read pack data into memory to avoid file positioning issues pack_bytes = pack_file.read() # Create PackData from in-memory bytes from io import BytesIO pack_io = BytesIO(pack_bytes) pack_data = PackData.from_file(pack_io) target.object_store.add_pack_data(len(pack_data), pack_data.iter_unpacked()) # Apply ref filtering if specified if ref_prefix: filtered_refs = {} for ref_name, ref_value in refs.items(): for prefix in ref_prefix: if ref_name.startswith(prefix): filtered_refs[ref_name] = ref_value break refs = filtered_refs return FetchPackResult(refs, {}, agent_string()) def fetch_pack( self, path, determine_wants, graph_walker, pack_data, progress=None, depth: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, filter_spec: Optional[bytes] = None, protocol_version: Optional[int] = None, ) -> FetchPackResult: """Retrieve a pack from a bundle file.""" bundle = self._open_bundle(path) # Get references from bundle refs = dict(bundle.references) # Determine what we want to fetch _ = determine_wants(refs) # Write pack data to the callback # Need to reopen the file for pack data access with open(path, "rb") as pack_file: # Skip to pack data section BundleClient._skip_to_pack_data(pack_file, bundle.version) # Read pack data and write it to the callback pack_bytes = pack_file.read() pack_data(pack_bytes) # Apply ref filtering if specified if ref_prefix: filtered_refs = {} for ref_name, ref_value in refs.items(): for prefix in ref_prefix: if ref_name.startswith(prefix): filtered_refs[ref_name] = ref_value break refs = filtered_refs return FetchPackResult(refs, {}, agent_string()) def get_refs( self, path, protocol_version: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, ): """Retrieve the current refs from a bundle file.""" bundle = self._open_bundle(path) refs = dict(bundle.references) # Apply ref filtering if specified if ref_prefix: filtered_refs = {} for ref_name, ref_value in refs.items(): for prefix in ref_prefix: if ref_name.startswith(prefix): filtered_refs[ref_name] = ref_value break refs = filtered_refs return LsRemoteResult(refs, {}) # What Git client to use for local access default_local_git_client_cls = LocalGitClient class SSHVendor: """A client side SSH implementation.""" def run_command( self, host, command, username=None, port=None, password=None, key_filename=None, ssh_command=None, protocol_version: Optional[int] = None, ): """Connect to an SSH server. Run a command remotely and return a file-like object for interaction with the remote command. Args: host: Host name command: Command to run (as argv array) username: Optional ame of user to log in as port: Optional SSH port to use password: Optional ssh password for login or private key key_filename: Optional path to private keyfile ssh_command: Optional SSH command protocol_version: Desired Git protocol version. By default the highest mutually supported protocol version will be used. """ raise NotImplementedError(self.run_command) class StrangeHostname(Exception): """Refusing to connect to strange SSH hostname.""" def __init__(self, hostname) -> None: """Initialize StrangeHostname exception. Args: hostname: The strange hostname that was rejected """ super().__init__(hostname) class SubprocessSSHVendor(SSHVendor): """SSH vendor that shells out to the local 'ssh' command.""" def run_command( self, host, command, username=None, port=None, password=None, key_filename=None, ssh_command=None, protocol_version: Optional[int] = None, ): """Run a git command over SSH. Args: host: SSH host to connect to command: Git command to run username: Optional username port: Optional port number password: Optional password (not supported) key_filename: Optional SSH key file ssh_command: Optional custom SSH command protocol_version: Optional Git protocol version Returns: Tuple of (subprocess.Popen, Protocol, stderr_stream) """ if password is not None: raise NotImplementedError( "Setting password not supported by SubprocessSSHVendor." ) if ssh_command: import shlex args = [*shlex.split(ssh_command, posix=sys.platform != "win32"), "-x"] else: args = ["ssh", "-x"] if port: args.extend(["-p", str(port)]) if key_filename: args.extend(["-i", str(key_filename)]) if protocol_version is None: protocol_version = DEFAULT_GIT_PROTOCOL_VERSION_FETCH if protocol_version > 0: args.extend(["-o", f"SetEnv GIT_PROTOCOL=version={protocol_version}"]) if username: host = f"{username}@{host}" if host.startswith("-"): raise StrangeHostname(hostname=host) args.append(host) proc = subprocess.Popen( [*args, command], bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) return SubprocessWrapper(proc) class PLinkSSHVendor(SSHVendor): """SSH vendor that shells out to the local 'plink' command.""" def run_command( self, host, command, username=None, port=None, password=None, key_filename=None, ssh_command=None, protocol_version: Optional[int] = None, ): """Run a git command over SSH using PLink. Args: host: SSH host to connect to command: Git command to run username: Optional username port: Optional port number password: Optional password key_filename: Optional SSH key file ssh_command: Optional custom SSH command protocol_version: Optional Git protocol version Returns: Tuple of (subprocess.Popen, Protocol, stderr_stream) """ if ssh_command: import shlex args = [*shlex.split(ssh_command, posix=sys.platform != "win32"), "-ssh"] elif sys.platform == "win32": args = ["plink.exe", "-ssh"] else: args = ["plink", "-ssh"] if password is not None: import warnings warnings.warn( "Invoking PLink with a password exposes the password in the " "process list." ) args.extend(["-pw", str(password)]) if port: args.extend(["-P", str(port)]) if key_filename: args.extend(["-i", str(key_filename)]) if username: host = f"{username}@{host}" if host.startswith("-"): raise StrangeHostname(hostname=host) args.append(host) # plink.exe does not provide a way to pass environment variables # via the command line. The best we can do is set an environment # variable and hope that plink will pass it to the server. If this # does not work then the server should behave as if we had requested # protocol version 0. env = copy.deepcopy(os.environ) if protocol_version is None: protocol_version = DEFAULT_GIT_PROTOCOL_VERSION_FETCH if protocol_version > 0: env["GIT_PROTOCOL"] = f"version={protocol_version}" proc = subprocess.Popen( [*args, command], bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, ) return SubprocessWrapper(proc) def ParamikoSSHVendor(**kwargs): """Create a ParamikoSSHVendor (deprecated).""" import warnings warnings.warn( "ParamikoSSHVendor has been moved to dulwich.contrib.paramiko_vendor.", DeprecationWarning, ) from .contrib.paramiko_vendor import ParamikoSSHVendor return ParamikoSSHVendor(**kwargs) # Can be overridden by users get_ssh_vendor: Callable[[], SSHVendor] = SubprocessSSHVendor class SSHGitClient(TraditionalGitClient): """Git client that connects over SSH.""" def __init__( self, host, port=None, username=None, vendor=None, config=None, password=None, key_filename=None, ssh_command=None, **kwargs, ) -> None: """Initialize SSHGitClient. Args: host: SSH hostname port: Optional SSH port username: Optional username vendor: Optional SSH vendor config: Optional configuration password: Optional password key_filename: Optional SSH key file ssh_command: Optional custom SSH command **kwargs: Additional keyword arguments """ self.host = host self.port = port self.username = username self.password = password self.key_filename = key_filename # Priority: ssh_command parameter, then env vars, then core.sshCommand config if ssh_command: self.ssh_command = ssh_command else: # Check environment variables first self.ssh_command = os.environ.get( "GIT_SSH_COMMAND", os.environ.get("GIT_SSH") ) # Fall back to config if no environment variable set if not self.ssh_command and config is not None: try: config_ssh_command = config.get((b"core",), b"sshCommand") self.ssh_command = ( config_ssh_command.decode() if config_ssh_command else None ) except KeyError: pass super().__init__(**kwargs) self.alternative_paths: dict[bytes, bytes] = {} if vendor is not None: self.ssh_vendor = vendor else: self.ssh_vendor = get_ssh_vendor() def get_url(self, path): """Get the SSH URL for a path.""" netloc = self.host if self.port is not None: netloc += f":{self.port}" if self.username is not None: netloc = urlquote(self.username, "@/:") + "@" + netloc return urlunsplit(("ssh", netloc, path, "", "")) @classmethod def from_parsedurl(cls, parsedurl, **kwargs): """Create an SSHGitClient from a parsed URL.""" return cls( host=parsedurl.hostname, port=parsedurl.port, username=parsedurl.username, **kwargs, ) def _get_cmd_path(self, cmd): cmd = self.alternative_paths.get(cmd, b"git-" + cmd) assert isinstance(cmd, bytes) return cmd def _connect( self, cmd: bytes, path: Union[str, bytes], protocol_version: Optional[int] = None, ) -> tuple[Protocol, Callable[[], bool], Optional[IO[bytes]]]: if not isinstance(cmd, bytes): raise TypeError(cmd) if isinstance(path, bytes): path = path.decode(self._remote_path_encoding) if path.startswith("/~"): path = path[1:] argv = ( self._get_cmd_path(cmd).decode(self._remote_path_encoding) + " '" + path + "'" ) kwargs = {} if self.password is not None: kwargs["password"] = self.password if self.key_filename is not None: kwargs["key_filename"] = self.key_filename # GIT_SSH_COMMAND takes precedence over GIT_SSH if self.ssh_command is not None: kwargs["ssh_command"] = self.ssh_command con = self.ssh_vendor.run_command( self.host, argv, port=self.port, username=self.username, protocol_version=protocol_version, **kwargs, ) return ( Protocol( con.read, con.write, con.close, report_activity=self._report_activity, ), con.can_read, getattr(con, "stderr", None), ) def default_user_agent_string(): """Return the default user agent string for Dulwich.""" # Start user agent with "git/", because GitHub requires this. :-( See # https://github.com/jelmer/dulwich/issues/562 for details. return "git/dulwich/{}".format(".".join([str(x) for x in dulwich.__version__])) def default_urllib3_manager( config, pool_manager_cls=None, proxy_manager_cls=None, base_url=None, timeout=None, **override_kwargs, ) -> Union["urllib3.ProxyManager", "urllib3.PoolManager"]: """Return urllib3 connection pool manager. Honour detected proxy configurations. Args: config: `dulwich.config.ConfigDict` instance with Git configuration. pool_manager_cls: Pool manager class to use proxy_manager_cls: Proxy manager class to use base_url: Base URL for proxy bypass checks timeout: Timeout for HTTP requests in seconds override_kwargs: Additional arguments for `urllib3.ProxyManager` Returns: Either pool_manager_cls (defaults to `urllib3.ProxyManager`) instance for proxy configurations, proxy_manager_cls (defaults to `urllib3.PoolManager`) instance otherwise """ proxy_server = user_agent = None ca_certs = ssl_verify = None if proxy_server is None: for proxyname in ("https_proxy", "http_proxy", "all_proxy"): proxy_server = os.environ.get(proxyname) if proxy_server: break if proxy_server: if check_for_proxy_bypass(base_url): proxy_server = None if config is not None: if proxy_server is None: try: proxy_server = config.get(b"http", b"proxy") except KeyError: pass try: user_agent = config.get(b"http", b"useragent") except KeyError: pass # TODO(jelmer): Support per-host settings try: ssl_verify = config.get_boolean(b"http", b"sslVerify") except KeyError: ssl_verify = True try: ca_certs = config.get(b"http", b"sslCAInfo") except KeyError: ca_certs = None # Check for timeout configuration if timeout is None: try: timeout = config.get(b"http", b"timeout") if timeout is not None: timeout = int(timeout) except KeyError: pass if user_agent is None: user_agent = default_user_agent_string() headers = {"User-agent": user_agent} # Check for extra headers in config if config is not None: try: # Git allows multiple http.extraHeader entries extra_headers = config.get_multivar(b"http", b"extraHeader") for extra_header in extra_headers: if extra_header and b": " in extra_header: # Parse the header (format: "Header-Name: value") header_name, header_value = extra_header.split(b": ", 1) headers[header_name.decode("utf-8")] = header_value.decode("utf-8") except KeyError: pass kwargs = { "ca_certs": ca_certs, } # Add timeout if specified if timeout is not None: kwargs["timeout"] = timeout if ssl_verify is True: kwargs["cert_reqs"] = "CERT_REQUIRED" elif ssl_verify is False: kwargs["cert_reqs"] = "CERT_NONE" else: # Default to SSL verification kwargs["cert_reqs"] = "CERT_REQUIRED" kwargs.update(override_kwargs) import urllib3 if proxy_server is not None: if proxy_manager_cls is None: proxy_manager_cls = urllib3.ProxyManager if not isinstance(proxy_server, str): proxy_server = proxy_server.decode() proxy_server_url = urlparse(proxy_server) if proxy_server_url.username is not None: proxy_headers = urllib3.make_headers( proxy_basic_auth=f"{proxy_server_url.username}:{proxy_server_url.password or ''}" # type: ignore ) else: proxy_headers = {} manager = proxy_manager_cls( proxy_server, proxy_headers=proxy_headers, headers=headers, **kwargs ) else: if pool_manager_cls is None: pool_manager_cls = urllib3.PoolManager manager = pool_manager_cls(headers=headers, **kwargs) return manager def check_for_proxy_bypass(base_url) -> bool: """Check if proxy should be bypassed for the given URL.""" # Check if a proxy bypass is defined with the no_proxy environment variable if base_url: # only check if base_url is provided no_proxy_str = os.environ.get("no_proxy") if no_proxy_str: # implementation based on curl behavior: https://curl.se/libcurl/c/CURLOPT_NOPROXY.html # get hostname of provided parsed url parsed_url = urlparse(base_url) hostname = parsed_url.hostname if hostname: import ipaddress # check if hostname is an ip address try: hostname_ip = ipaddress.ip_address(hostname) except ValueError: hostname_ip = None no_proxy_values = no_proxy_str.split(",") for no_proxy_value in no_proxy_values: no_proxy_value = no_proxy_value.strip() if no_proxy_value: no_proxy_value = no_proxy_value.lower() no_proxy_value = no_proxy_value.lstrip( "." ) # ignore leading dots if hostname_ip: # check if no_proxy_value is a ip network try: no_proxy_value_network = ipaddress.ip_network( no_proxy_value, strict=False ) except ValueError: no_proxy_value_network = None if no_proxy_value_network: # if hostname is a ip address and no_proxy_value is a ip network -> check if ip address is part of network if hostname_ip in no_proxy_value_network: return True if no_proxy_value == "*": # '*' is special case for always bypass proxy return True if hostname == no_proxy_value: return True no_proxy_value = ( "." + no_proxy_value ) # add a dot to only match complete domains if hostname.endswith(no_proxy_value): return True return False class AbstractHttpGitClient(GitClient): """Abstract base class for HTTP Git Clients. This is agonistic of the actual HTTP implementation. Subclasses should provide an implementation of the _http_request method. """ def __init__(self, base_url, dumb=False, **kwargs) -> None: """Initialize AbstractHttpGitClient.""" self._base_url = base_url.rstrip("/") + "/" self.dumb = dumb GitClient.__init__(self, **kwargs) def _http_request(self, url, headers=None, data=None, raise_for_status=True): """Perform HTTP request. Args: url: Request URL. headers: Optional custom headers to override defaults. data: Request data. raise_for_status: Whether to raise an exception for HTTP errors. Returns: Tuple (response, read), where response is an urllib3 response object with additional content_type and redirect_location properties, and read is a consumable read method for the response data. Raises: GitProtocolError """ raise NotImplementedError(self._http_request) def _discover_references( self, service, base_url, protocol_version: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, ) -> tuple[ dict[Ref, Optional[ObjectID]], set[bytes], str, dict[Ref, Ref], dict[Ref, ObjectID], ]: if ( protocol_version is not None and protocol_version not in GIT_PROTOCOL_VERSIONS ): raise ValueError(f"unknown Git protocol version {protocol_version}") assert base_url[-1] == "/" tail = "info/refs" headers = {"Accept": "*/*"} if self.dumb is not True: tail += "?service={}".format(service.decode("ascii")) # Enable protocol v2 only when fetching, not when pushing. # Git does not yet implement push over protocol v2, and as of # git version 2.37.3 git-http-backend's behaviour is erratic if # we try: It responds with a Git-protocol-v1-style ref listing # which lacks the "001f# service=git-receive-pack" marker. if service == b"git-upload-pack": if protocol_version is None: self.protocol_version = DEFAULT_GIT_PROTOCOL_VERSION_FETCH else: self.protocol_version = protocol_version if self.protocol_version == 2: headers["Git-Protocol"] = "version=2" else: self.protocol_version = DEFAULT_GIT_PROTOCOL_VERSION_SEND url = urljoin(base_url, tail) resp, read = self._http_request(url, headers) if resp.redirect_location: # Something changed (redirect!), so let's update the base URL if not resp.redirect_location.endswith(tail): raise GitProtocolError( f"Redirected from URL {url} to URL {resp.redirect_location} without {tail}" ) base_url = urljoin(url, resp.redirect_location[: -len(tail)]) try: self.dumb = resp.content_type is None or not resp.content_type.startswith( "application/x-git-" ) if not self.dumb: def begin_protocol_v2(proto): nonlocal ref_prefix server_capabilities = read_server_capabilities(proto.read_pkt_seq()) if ref_prefix is None: ref_prefix = DEFAULT_REF_PREFIX pkts = [ b"symrefs", b"peel", ] for prefix in ref_prefix: pkts.append(b"ref-prefix " + prefix) body = b"".join( [pkt_line(b"command=ls-refs\n"), b"0001", pkt_seq(*pkts)] ) resp, read = self._smart_request( service.decode("ascii"), base_url, body ) proto = Protocol(read, lambda data: None) return server_capabilities, resp, read, proto proto = Protocol(read, lambda data: None) server_protocol_version = negotiate_protocol_version(proto) if server_protocol_version not in GIT_PROTOCOL_VERSIONS: raise ValueError( f"unknown Git protocol version {server_protocol_version} used by server" ) if protocol_version and server_protocol_version > protocol_version: raise ValueError( f"bad Git protocol version {server_protocol_version} used by server" ) self.protocol_version = server_protocol_version if self.protocol_version == 2: server_capabilities, resp, read, proto = begin_protocol_v2(proto) (refs, symrefs, peeled) = read_pkt_refs_v2(proto.read_pkt_seq()) return refs, server_capabilities, base_url, symrefs, peeled else: try: [pkt] = list(proto.read_pkt_seq()) except ValueError as exc: raise GitProtocolError( "unexpected number of packets received" ) from exc if pkt.rstrip(b"\n") != (b"# service=" + service): raise GitProtocolError( f"unexpected first line {pkt!r} from smart server" ) # Github sends "version 2" after sending the service name. # Try to negotiate protocol version 2 again. server_protocol_version = negotiate_protocol_version(proto) if server_protocol_version not in GIT_PROTOCOL_VERSIONS: raise ValueError( f"unknown Git protocol version {server_protocol_version} used by server" ) if protocol_version and server_protocol_version > protocol_version: raise ValueError( f"bad Git protocol version {server_protocol_version} used by server" ) self.protocol_version = server_protocol_version if self.protocol_version == 2: server_capabilities, resp, read, proto = begin_protocol_v2( proto ) (refs, symrefs, peeled) = read_pkt_refs_v2(proto.read_pkt_seq()) else: ( refs, server_capabilities, ) = read_pkt_refs_v1(proto.read_pkt_seq()) (refs, peeled) = split_peeled_refs(refs) (symrefs, agent) = _extract_symrefs_and_agent( server_capabilities ) if ref_prefix is not None: refs = filter_ref_prefix(refs, ref_prefix) return refs, server_capabilities, base_url, symrefs, peeled else: self.protocol_version = 0 # dumb servers only support protocol v0 # Read all the response data data = b"" while True: chunk = read(4096) if not chunk: break data += chunk from typing import Optional, cast info_refs = read_info_refs(BytesIO(data)) (refs, peeled) = split_peeled_refs( cast(dict[bytes, Optional[bytes]], info_refs) ) if ref_prefix is not None: refs = filter_ref_prefix(refs, ref_prefix) return refs, set(), base_url, {}, peeled finally: resp.close() def _smart_request(self, service, url, data): """Send a 'smart' HTTP request. This is a simple wrapper around _http_request that sets a couple of extra headers. """ assert url[-1] == "/" url = urljoin(url, service) result_content_type = f"application/x-{service}-result" headers = { "Content-Type": f"application/x-{service}-request", "Accept": result_content_type, } if self.protocol_version == 2: headers["Git-Protocol"] = "version=2" if isinstance(data, bytes): headers["Content-Length"] = str(len(data)) resp, read = self._http_request(url, headers, data) if resp.content_type.split(";")[0] != result_content_type: raise GitProtocolError( f"Invalid content-type from server: {resp.content_type}" ) return resp, read def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload a pack to a remote repository. Args: path: Repository path (as bytestring) update_refs: Function to determine changes to remote refs. Receives dict with existing remote refs, returns dict with changed refs (name -> sha, where sha=ZERO_SHA for deletions) generate_pack_data: Function that can return a tuple with number of elements and pack data to upload. progress: Optional progress function Returns: SendPackResult Raises: SendPackError: if server rejects the pack data """ url = self._get_url(path) old_refs, server_capabilities, url, symrefs, peeled = self._discover_references( b"git-receive-pack", url ) ( negotiated_capabilities, agent, ) = self._negotiate_receive_pack_capabilities(server_capabilities) negotiated_capabilities.add(capability_agent()) if CAPABILITY_REPORT_STATUS in negotiated_capabilities: self._report_status_parser = ReportStatusParser() new_refs = update_refs(dict(old_refs)) if new_refs is None: # Determine wants function is aborting the push. return SendPackResult(old_refs, agent=agent, ref_status={}) if set(new_refs.items()).issubset(set(old_refs.items())): return SendPackResult(new_refs, agent=agent, ref_status={}) if self.dumb: raise NotImplementedError(self.fetch_pack) def body_generator(): header_handler = _v1ReceivePackHeader( negotiated_capabilities, old_refs, new_refs ) for pkt in header_handler: yield pkt_line(pkt) pack_data_count, pack_data = generate_pack_data( header_handler.have, header_handler.want, ofs_delta=(CAPABILITY_OFS_DELTA in negotiated_capabilities), ) if self._should_send_pack(new_refs): yield from PackChunkGenerator(pack_data_count, pack_data) resp, read = self._smart_request("git-receive-pack", url, data=body_generator()) try: resp_proto = Protocol(read, lambda data: None) ref_status = self._handle_receive_pack_tail( resp_proto, negotiated_capabilities, progress ) return SendPackResult(new_refs, agent=agent, ref_status=ref_status) finally: resp.close() def fetch_pack( self, path, determine_wants, graph_walker, pack_data, progress=None, depth: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, filter_spec=None, protocol_version: Optional[int] = None, ): """Retrieve a pack from a git smart server. Args: path: Path to fetch from determine_wants: Callback that returns list of commits to fetch graph_walker: Object with next() and ack(). pack_data: Callback called for each bit of data in the pack progress: Callback for progress reports (strings) depth: Depth for request ref_prefix: List of prefixes of desired references, as a list of bytestrings. Filtering is done by the server if supported, and client side otherwise. filter_spec: A git-rev-list-style object filter spec, as bytestring. Only used if the server supports the Git protocol-v2 'filter' feature, and ignored otherwise. protocol_version: Desired Git protocol version. By default the highest mutually supported protocol version will be used. Returns: FetchPackResult object """ url = self._get_url(path) refs, server_capabilities, url, symrefs, peeled = self._discover_references( b"git-upload-pack", url, protocol_version=protocol_version, ref_prefix=ref_prefix, ) ( negotiated_capabilities, capa_symrefs, agent, ) = self._negotiate_upload_pack_capabilities(server_capabilities) if not symrefs and capa_symrefs: symrefs = capa_symrefs if depth is not None: wants = determine_wants(refs, depth=depth) else: wants = determine_wants(refs) if wants is not None: wants = [cid for cid in wants if cid != ZERO_SHA] if not wants and not self.dumb: return FetchPackResult(refs, symrefs, agent) elif self.dumb: # Use dumb HTTP protocol from .dumb import DumbRemoteHTTPRepo # Pass http_request function dumb_repo = DumbRemoteHTTPRepo( url, functools.partial(self._http_request, raise_for_status=False) ) # Fetch pack data from dumb remote pack_data_list = list( dumb_repo.fetch_pack_data( graph_walker, lambda refs: wants, progress=progress, depth=depth ) ) symrefs[b"HEAD"] = dumb_repo.get_head() # Write pack data if pack_data: from .pack import write_pack_data # Write pack data directly using the unpacked objects write_pack_data( pack_data, iter(pack_data_list), num_records=len(pack_data_list), progress=progress, ) return FetchPackResult(refs, symrefs, agent) req_data = BytesIO() req_proto = Protocol(None, req_data.write) # type: ignore (new_shallow, new_unshallow) = _handle_upload_pack_head( req_proto, negotiated_capabilities, graph_walker, wants, can_read=None, depth=depth, protocol_version=self.protocol_version, ) if self.protocol_version == 2: data = pkt_line(b"command=fetch\n") + b"0001" if CAPABILITY_THIN_PACK in self._fetch_capabilities: data += pkt_line(b"thin-pack\n") if ( find_capability( negotiated_capabilities, CAPABILITY_FETCH, CAPABILITY_FILTER ) and filter_spec ): data += pkt_line(b"filter %s\n" % filter_spec) elif filter_spec: self._warn_filter_objects() data += req_data.getvalue() else: if filter_spec: self._warn_filter_objects() data = req_data.getvalue() resp, read = self._smart_request("git-upload-pack", url, data) try: resp_proto = Protocol(read, None) # type: ignore if new_shallow is None and new_unshallow is None: (new_shallow, new_unshallow) = _read_shallow_updates( resp_proto.read_pkt_seq() ) _handle_upload_pack_tail( resp_proto, negotiated_capabilities, graph_walker, pack_data, progress, protocol_version=self.protocol_version, ) return FetchPackResult(refs, symrefs, agent, new_shallow, new_unshallow) finally: resp.close() def get_refs( self, path, protocol_version: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, ): """Retrieve the current refs from a git smart server.""" url = self._get_url(path) refs, _, _, symrefs, peeled = self._discover_references( b"git-upload-pack", url, protocol_version=protocol_version, ref_prefix=ref_prefix, ) for refname, refvalue in peeled.items(): refs[refname + PEELED_TAG_SUFFIX] = refvalue return LsRemoteResult(refs, symrefs) def get_url(self, path): """Get the HTTP URL for a path.""" return self._get_url(path).rstrip("/") def _get_url(self, path): return urljoin(self._base_url, path).rstrip("/") + "/" @classmethod def from_parsedurl(cls, parsedurl, **kwargs): """Create an AbstractHttpGitClient from a parsed URL.""" password = parsedurl.password if password is not None: kwargs["password"] = urlunquote(password) username = parsedurl.username if username is not None: kwargs["username"] = urlunquote(username) return cls(urlunparse(parsedurl), **kwargs) def __repr__(self) -> str: """Return string representation of this client.""" return f"{type(self).__name__}({self._base_url!r}, dumb={self.dumb!r})" def _wrap_urllib3_exceptions(func): from urllib3.exceptions import ProtocolError def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except ProtocolError as error: raise GitProtocolError(str(error)) from error return wrapper class Urllib3HttpGitClient(AbstractHttpGitClient): """Git client that uses urllib3 for HTTP(S) connections.""" def __init__( self, base_url, dumb=None, pool_manager=None, config=None, username=None, password=None, timeout=None, extra_headers=None, **kwargs, ) -> None: """Initialize Urllib3HttpGitClient.""" self._username = username self._password = password self._timeout = timeout self._extra_headers = extra_headers or {} if pool_manager is None: self.pool_manager = default_urllib3_manager( config, base_url=base_url, timeout=timeout ) else: self.pool_manager = pool_manager if username is not None: # No escaping needed: ":" is not allowed in username: # https://tools.ietf.org/html/rfc2617#section-2 credentials = f"{username}:{password or ''}" import urllib3.util basic_auth = urllib3.util.make_headers(basic_auth=credentials) self.pool_manager.headers.update(basic_auth) # type: ignore self.config = config super().__init__(base_url=base_url, dumb=dumb, **kwargs) def _get_url(self, path): if not isinstance(path, str): # urllib3.util.url._encode_invalid_chars() converts the path back # to bytes using the utf-8 codec. path = path.decode("utf-8") return urljoin(self._base_url, path).rstrip("/") + "/" def _http_request(self, url, headers=None, data=None, raise_for_status=True): import urllib3.exceptions req_headers = dict(self.pool_manager.headers) if headers is not None: req_headers.update(headers) req_headers["Pragma"] = "no-cache" try: request_kwargs = { "headers": req_headers, "preload_content": False, } if self._timeout is not None: request_kwargs["timeout"] = self._timeout if data is None: resp = self.pool_manager.request("GET", url, **request_kwargs) # type: ignore[arg-type] else: request_kwargs["body"] = data resp = self.pool_manager.request("POST", url, **request_kwargs) # type: ignore[arg-type] except urllib3.exceptions.HTTPError as e: raise GitProtocolError(str(e)) from e if raise_for_status: if resp.status == 404: raise NotGitRepository if resp.status == 401: raise HTTPUnauthorized(resp.headers.get("WWW-Authenticate"), url) if resp.status == 407: raise HTTPProxyUnauthorized(resp.headers.get("Proxy-Authenticate"), url) if resp.status != 200: raise GitProtocolError(f"unexpected http resp {resp.status} for {url}") resp.content_type = resp.headers.get("Content-Type") # type: ignore[attr-defined] # Check if geturl() is available (urllib3 version >= 1.23) try: resp_url = resp.geturl() except AttributeError: # get_redirect_location() is available for urllib3 >= 1.1 resp.redirect_location = resp.get_redirect_location() # type: ignore[attr-defined] else: resp.redirect_location = resp_url if resp_url != url else "" # type: ignore[attr-defined] return resp, _wrap_urllib3_exceptions(resp.read) HttpGitClient = Urllib3HttpGitClient def _win32_url_to_path(parsed) -> str: """Convert a file: URL to a path. https://datatracker.ietf.org/doc/html/rfc8089 """ assert parsed.scheme == "file" _, netloc, path, _, _, _ = parsed if netloc == "localhost" or not netloc: netloc = "" elif ( netloc and len(netloc) >= 2 and netloc[0].isalpha() and netloc[1:2] in (":", ":/") ): # file://C:/foo.bar/baz or file://C://foo.bar//baz netloc = netloc[:2] else: raise NotImplementedError("Non-local file URLs are not supported") from nturl2path import url2pathname return url2pathname(netloc + path) def get_transport_and_path_from_url( url: str, config: Optional[Config] = None, operation: Optional[str] = None, **kwargs ) -> tuple[GitClient, str]: """Obtain a git client from a URL. Args: url: URL to open (a unicode string) config: Optional config object operation: Kind of operation that'll be performed; "pull" or "push" **kwargs: Additional keyword arguments Keyword Args: thin_packs: Whether or not thin packs should be retrieved report_activity: Optional callback for reporting transport activity. Returns: Tuple with client instance and relative path. """ if config is not None: url = apply_instead_of(config, url, push=(operation == "push")) return _get_transport_and_path_from_url( url, config=config, operation=operation, **kwargs ) def _get_transport_and_path_from_url(url, config, operation, **kwargs): parsed = urlparse(url) if parsed.scheme == "git": return (TCPGitClient.from_parsedurl(parsed, **kwargs), parsed.path) elif parsed.scheme in ("git+ssh", "ssh"): return SSHGitClient.from_parsedurl(parsed, config=config, **kwargs), parsed.path elif parsed.scheme in ("http", "https"): return ( HttpGitClient.from_parsedurl(parsed, config=config, **kwargs), parsed.path, ) elif parsed.scheme == "file": if sys.platform == "win32" or os.name == "nt": return default_local_git_client_cls(**kwargs), _win32_url_to_path(parsed) return ( default_local_git_client_cls.from_parsedurl(parsed, **kwargs), parsed.path, ) raise ValueError(f"unknown scheme '{parsed.scheme}'") def parse_rsync_url(location: str) -> tuple[Optional[str], str, str]: """Parse a rsync-style URL.""" if ":" in location and "@" not in location: # SSH with no user@, zero or one leading slash. (host, path) = location.split(":", 1) user = None elif ":" in location: # SSH with user@host:foo. user_host, path = location.split(":", 1) if "@" in user_host: user, host = user_host.rsplit("@", 1) else: user = None host = user_host else: raise ValueError("not a valid rsync-style URL") return (user, host, path) def get_transport_and_path( location: str, config: Optional[Config] = None, operation: Optional[str] = None, **kwargs, ) -> tuple[GitClient, str]: """Obtain a git client from a URL. Args: location: URL or path (a string) config: Optional config object operation: Kind of operation that'll be performed; "pull" or "push" **kwargs: Additional keyword arguments Keyword Args: thin_packs: Whether or not thin packs should be retrieved report_activity: Optional callback for reporting transport activity. Returns: Tuple with client instance and relative path. """ if config is not None: location = apply_instead_of(config, location, push=(operation == "push")) # First, try to parse it as a URL try: return _get_transport_and_path_from_url( location, config=config, operation=operation, **kwargs ) except ValueError: pass if sys.platform == "win32" and location[0].isalpha() and location[1:3] == ":\\": # Windows local path - but check if it's a bundle file first if BundleClient._is_bundle_file(location): return BundleClient(**kwargs), location return default_local_git_client_cls(**kwargs), location try: (username, hostname, path) = parse_rsync_url(location) except ValueError: # Check if it's a bundle file before assuming it's a local path if BundleClient._is_bundle_file(location): return BundleClient(**kwargs), location # Otherwise, assume it's a local path. return default_local_git_client_cls(**kwargs), location else: return SSHGitClient(hostname, username=username, config=config, **kwargs), path DEFAULT_GIT_CREDENTIALS_PATHS = [ os.path.expanduser("~/.git-credentials"), get_xdg_config_home_path("git", "credentials"), ] def get_credentials_from_store( scheme, hostname, username=None, fnames=DEFAULT_GIT_CREDENTIALS_PATHS ): """Read credentials from a Git credential store.""" for fname in fnames: try: with open(fname, "rb") as f: for line in f: parsed_line = urlparse(line.strip()) if ( parsed_line.scheme == scheme and parsed_line.hostname == hostname and (username is None or parsed_line.username == username) ): return parsed_line.username, parsed_line.password except FileNotFoundError: # If the file doesn't exist, try the next one. continue