|
|
@@ -124,6 +124,7 @@ from typing import (
|
|
|
Any,
|
|
|
BinaryIO,
|
|
|
Generic,
|
|
|
+ Optional,
|
|
|
Protocol,
|
|
|
TypeVar,
|
|
|
cast,
|
|
|
@@ -141,8 +142,7 @@ if TYPE_CHECKING:
|
|
|
|
|
|
from .bitmap import PackBitmap
|
|
|
from .commit_graph import CommitGraph
|
|
|
- from .object_store import BaseObjectStore
|
|
|
- from .refs import Ref
|
|
|
+ from .object_format import ObjectFormat
|
|
|
|
|
|
# For some reason the above try, except fails to set has_mmap = False for plan9
|
|
|
if sys.platform == "Plan9":
|
|
|
@@ -530,7 +530,9 @@ def iter_sha1(iter: Iterable[bytes]) -> bytes:
|
|
|
return sha.hexdigest().encode("ascii")
|
|
|
|
|
|
|
|
|
-def load_pack_index(path: str | os.PathLike[str], object_format: ObjectFormat| None = None) -> "PackIndex":
|
|
|
+def load_pack_index(
|
|
|
+ path: str | os.PathLike[str], object_format: Optional["ObjectFormat"] = None
|
|
|
+) -> "PackIndex":
|
|
|
"""Load an index file by path.
|
|
|
|
|
|
Args:
|
|
|
@@ -574,8 +576,9 @@ def _load_file_contents(
|
|
|
|
|
|
|
|
|
def load_pack_index_file(
|
|
|
- path: str | os.PathLike[str], f: IO[bytes] | _GitFile
|
|
|
- object_format: ObjectFormat | = None,
|
|
|
+ path: str | os.PathLike[str],
|
|
|
+ f: IO[bytes] | _GitFile,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> "PackIndex":
|
|
|
"""Load an index file from a file-like object.
|
|
|
|
|
|
@@ -759,7 +762,7 @@ class MemoryPackIndex(PackIndex):
|
|
|
entries: list[PackIndexEntry],
|
|
|
pack_checksum: bytes | None = None,
|
|
|
*,
|
|
|
- object_format: ObjectFormat | None = None,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> None:
|
|
|
"""Create a new MemoryPackIndex.
|
|
|
|
|
|
@@ -822,7 +825,9 @@ class MemoryPackIndex(PackIndex):
|
|
|
def for_pack(cls, pack_data: "PackData") -> "MemoryPackIndex":
|
|
|
"""Create a MemoryPackIndex from a PackData object."""
|
|
|
return MemoryPackIndex(
|
|
|
- list(pack_data.sorted_entries()), pack_checksum=pack_data.get_stored_checksum(), object_format=pack.object_format,
|
|
|
+ list(pack_data.sorted_entries()),
|
|
|
+ pack_checksum=pack_data.get_stored_checksum(),
|
|
|
+ object_format=pack_data.object_format,
|
|
|
)
|
|
|
|
|
|
@classmethod
|
|
|
@@ -1046,7 +1051,7 @@ class PackIndex1(FilePackIndex):
|
|
|
file: IO[bytes] | _GitFile | None = None,
|
|
|
contents: bytes | None = None,
|
|
|
size: int | None = None,
|
|
|
- object_format: ObjectFormat | None = None,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> None:
|
|
|
"""Initialize a version 1 pack index.
|
|
|
|
|
|
@@ -1055,7 +1060,7 @@ class PackIndex1(FilePackIndex):
|
|
|
file: Optional file object
|
|
|
contents: Optional mmap'd contents
|
|
|
size: Optional size of the index
|
|
|
- hash_algorithm: Hash algorithm used by the repository
|
|
|
+ object_format: Object format used by the repository
|
|
|
"""
|
|
|
super().__init__(filename, file, contents, size)
|
|
|
self.version = 1
|
|
|
@@ -1096,7 +1101,7 @@ class PackIndex2(FilePackIndex):
|
|
|
file: IO[bytes] | _GitFile | None = None,
|
|
|
contents: bytes | None = None,
|
|
|
size: int | None = None,
|
|
|
- object_format: ObjectFormat | None = None,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> None:
|
|
|
"""Initialize a version 2 pack index.
|
|
|
|
|
|
@@ -1105,7 +1110,7 @@ class PackIndex2(FilePackIndex):
|
|
|
file: Optional file object
|
|
|
contents: Optional mmap'd contents
|
|
|
size: Optional size of the index
|
|
|
- hash_algorithm: Hash algorithm used by the repository
|
|
|
+ object_format: Object format used by the repository
|
|
|
"""
|
|
|
super().__init__(filename, file, contents, size)
|
|
|
if self._contents[:4] != b"\377tOc":
|
|
|
@@ -1153,10 +1158,12 @@ class PackIndex2(FilePackIndex):
|
|
|
def get_pack_checksum(self) -> bytes:
|
|
|
"""Return the checksum stored for the corresponding packfile.
|
|
|
|
|
|
- Returns: binary digest (always 20 bytes - SHA1)
|
|
|
+ Returns: binary digest (size depends on hash algorithm)
|
|
|
"""
|
|
|
- # Pack checksums are always SHA1, even in SHA256 repositories
|
|
|
- return bytes(self._contents[-40:-20])
|
|
|
+ # Index ends with: pack_checksum + index_checksum
|
|
|
+ # Each checksum is hash_size bytes
|
|
|
+ checksum_size = self.hash_size
|
|
|
+ return bytes(self._contents[-2 * checksum_size : -checksum_size])
|
|
|
|
|
|
|
|
|
class PackIndex3(FilePackIndex):
|
|
|
@@ -1707,7 +1714,12 @@ class PackData:
|
|
|
return self._filename
|
|
|
|
|
|
@classmethod
|
|
|
- def from_file(cls, file: IO[bytes], size: int | None = None, object_format: ObjectFormat | None = None) -> "PackData":
|
|
|
+ def from_file(
|
|
|
+ cls,
|
|
|
+ file: IO[bytes],
|
|
|
+ size: int | None = None,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
+ ) -> "PackData":
|
|
|
"""Create a PackData object from an open file.
|
|
|
|
|
|
Args:
|
|
|
@@ -1721,7 +1733,11 @@ class PackData:
|
|
|
return cls(str(file), file=file, size=size, object_format=object_format)
|
|
|
|
|
|
@classmethod
|
|
|
- def from_path(cls, path: str | os.PathLike[str], object_format: ObjectFormat | None = None) -> "PackData":
|
|
|
+ def from_path(
|
|
|
+ cls,
|
|
|
+ path: str | os.PathLike[str],
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
+ ) -> "PackData":
|
|
|
"""Create a PackData object from a file path.
|
|
|
|
|
|
Args:
|
|
|
@@ -1815,7 +1831,7 @@ class PackData:
|
|
|
|
|
|
def sorted_entries(
|
|
|
self,
|
|
|
- progress: ProgressFn | None = None,
|
|
|
+ progress: Callable[[int, int], None] | None = None,
|
|
|
resolve_ext_ref: ResolveExtRefFn | None = None,
|
|
|
) -> list[tuple[RawObjectID, int, int]]:
|
|
|
"""Return entries in this pack, sorted by SHA.
|
|
|
@@ -1896,8 +1912,10 @@ class PackData:
|
|
|
progress=progress, resolve_ext_ref=resolve_ext_ref
|
|
|
)
|
|
|
with GitFile(filename, "wb") as f:
|
|
|
+ if hash_format is None:
|
|
|
+ hash_format = 1 # Default to SHA-1
|
|
|
return write_pack_index_v3(
|
|
|
- f, entries, self.calculate_checksum(), hash_format
|
|
|
+ f, entries, self.calculate_checksum(), hash_format=hash_format
|
|
|
)
|
|
|
|
|
|
def create_index(
|
|
|
@@ -1938,8 +1956,9 @@ class PackData:
|
|
|
|
|
|
def get_stored_checksum(self) -> bytes:
|
|
|
"""Return the expected checksum stored in this pack."""
|
|
|
- self._file.seek(-20, SEEK_END)
|
|
|
- return self._file.read(20)
|
|
|
+ checksum_size = self.object_format.oid_length
|
|
|
+ self._file.seek(-checksum_size, SEEK_END)
|
|
|
+ return self._file.read(checksum_size)
|
|
|
|
|
|
def check(self) -> None:
|
|
|
"""Check the consistency of this pack."""
|
|
|
@@ -2575,8 +2594,10 @@ class SHA1Writer(BinaryIO):
|
|
|
|
|
|
|
|
|
def pack_object_header(
|
|
|
- type_num: int, delta_base: bytes | int | None, size: int
|
|
|
- object_format: "ObjectFormat" | None = None
|
|
|
+ type_num: int,
|
|
|
+ delta_base: bytes | int | None,
|
|
|
+ size: int,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> bytearray:
|
|
|
"""Create a pack object header for the given object info.
|
|
|
|
|
|
@@ -2615,6 +2636,7 @@ def pack_object_header(
|
|
|
delta_base >>= 7
|
|
|
header.extend(ret)
|
|
|
elif type_num == REF_DELTA:
|
|
|
+ assert isinstance(delta_base, bytes)
|
|
|
assert len(delta_base) == object_format.oid_length
|
|
|
header += delta_base
|
|
|
return bytearray(header)
|
|
|
@@ -2624,7 +2646,7 @@ def pack_object_chunks(
|
|
|
type: int,
|
|
|
object: list[bytes] | tuple[bytes | int, list[bytes]],
|
|
|
compression_level: int = -1,
|
|
|
- object_format: "ObjectFormat" | None = None,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> Iterator[bytes]:
|
|
|
"""Generate chunks for a pack object.
|
|
|
|
|
|
@@ -2663,7 +2685,11 @@ def pack_object_chunks(
|
|
|
# Shouldn't reach here with proper typing
|
|
|
raise TypeError(f"Unexpected object type: {object.__class__.__name__}")
|
|
|
|
|
|
- yield bytes(pack_object_header(type, delta_base, sum(map(len, chunks)), object_format=object_format))
|
|
|
+ yield bytes(
|
|
|
+ pack_object_header(
|
|
|
+ type, delta_base, sum(map(len, chunks)), object_format=object_format
|
|
|
+ )
|
|
|
+ )
|
|
|
compressor = zlib.compressobj(level=compression_level)
|
|
|
for data in chunks:
|
|
|
yield compressor.compress(data)
|
|
|
@@ -2676,7 +2702,7 @@ def write_pack_object(
|
|
|
object: list[bytes] | tuple[bytes | int, list[bytes]],
|
|
|
sha: "HashObject | None" = None,
|
|
|
compression_level: int = -1,
|
|
|
- object_format: "ObjectFormat" | None = None,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> int:
|
|
|
"""Write pack object to a file.
|
|
|
|
|
|
@@ -2716,7 +2742,7 @@ def write_pack(
|
|
|
deltify: bool | None = None,
|
|
|
delta_window_size: int | None = None,
|
|
|
compression_level: int = -1,
|
|
|
- object_format: "ObjectFormat" | None = None
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> tuple[bytes, bytes]:
|
|
|
"""Write a new pack data file.
|
|
|
|
|
|
@@ -3046,7 +3072,7 @@ def write_pack_from_container(
|
|
|
reuse_deltas: bool = True,
|
|
|
compression_level: int = -1,
|
|
|
other_haves: set[bytes] | None = None,
|
|
|
- object_format: "ObjectFormat" | None = None,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> tuple[dict[bytes, tuple[int, int]], bytes]:
|
|
|
"""Write a new pack data file.
|
|
|
|
|
|
@@ -3089,7 +3115,7 @@ def write_pack_objects(
|
|
|
delta_window_size: int | None = None,
|
|
|
deltify: bool | None = None,
|
|
|
compression_level: int = -1,
|
|
|
- object_format: "ObjectFormat" | None = None
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> tuple[dict[bytes, tuple[int, int]], bytes]:
|
|
|
"""Write a new pack data file.
|
|
|
|
|
|
@@ -3124,7 +3150,7 @@ class PackChunkGenerator:
|
|
|
progress: Callable[..., None] | None = None,
|
|
|
compression_level: int = -1,
|
|
|
reuse_compressed: bool = True,
|
|
|
- object_format: "ObjectFormat" | None = None,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> None:
|
|
|
"""Initialize PackChunkGenerator.
|
|
|
|
|
|
@@ -3251,7 +3277,7 @@ def write_pack_data(
|
|
|
num_records: int | None = None,
|
|
|
progress: Callable[..., None] | None = None,
|
|
|
compression_level: int = -1,
|
|
|
- object_format: "ObjectFormat" | None = None,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> tuple[dict[bytes, tuple[int, int]], bytes]:
|
|
|
"""Write a new pack data file.
|
|
|
|
|
|
@@ -3655,7 +3681,7 @@ class Pack:
|
|
|
depth: int | None = None,
|
|
|
threads: int | None = None,
|
|
|
big_file_threshold: int | None = None,
|
|
|
- object_format: ObjectFormat | None = None,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> None:
|
|
|
"""Initialize a Pack object.
|
|
|
|
|
|
@@ -3683,6 +3709,10 @@ class Pack:
|
|
|
self.depth = depth
|
|
|
self.threads = threads
|
|
|
self.big_file_threshold = big_file_threshold
|
|
|
+ # Always set object_format, defaulting to default
|
|
|
+ from .object_format import DEFAULT_OBJECT_FORMAT
|
|
|
+
|
|
|
+ self.object_format = object_format if object_format else DEFAULT_OBJECT_FORMAT
|
|
|
self._data_load = lambda: PackData(
|
|
|
self._data_path,
|
|
|
delta_window_size=delta_window_size,
|
|
|
@@ -3697,15 +3727,13 @@ class Pack:
|
|
|
self._idx_path, object_format=object_format
|
|
|
)
|
|
|
self.resolve_ext_ref = resolve_ext_ref
|
|
|
- # Always set object_format, defaulting to default
|
|
|
- from .object_format import DEFAULT_OBJECT_FORMAT
|
|
|
-
|
|
|
- self.object_format = object_format if object_format else DEFAULT_OBJECT_FORMAT
|
|
|
|
|
|
@classmethod
|
|
|
def from_lazy_objects(
|
|
|
- cls, data_fn: Callable[[], PackData], idx_fn: Callable[[], PackIndex],
|
|
|
- object_format: ObjectFormat | None = None
|
|
|
+ cls,
|
|
|
+ data_fn: Callable[[], PackData],
|
|
|
+ idx_fn: Callable[[], PackIndex],
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> "Pack":
|
|
|
"""Create a new pack object from callables to load pack data and index objects."""
|
|
|
ret = cls("", object_format=object_format)
|
|
|
@@ -3714,7 +3742,12 @@ class Pack:
|
|
|
return ret
|
|
|
|
|
|
@classmethod
|
|
|
- def from_objects(cls, data: PackData, idx: PackIndex, object_format | ObjectFormat | None = None) -> "Pack":
|
|
|
+ def from_objects(
|
|
|
+ cls,
|
|
|
+ data: PackData,
|
|
|
+ idx: PackIndex,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
+ ) -> "Pack":
|
|
|
"""Create a new pack object from pack data and index objects."""
|
|
|
ret = cls("", object_format=object_format)
|
|
|
ret._data = data
|
|
|
@@ -4086,7 +4119,7 @@ class Pack:
|
|
|
return base_type, chunks
|
|
|
|
|
|
def entries(
|
|
|
- self, progress: ProgressFn | None = None
|
|
|
+ self, progress: Callable[[int, int], None] | None = None
|
|
|
) -> Iterator[PackIndexEntry]:
|
|
|
"""Yield entries summarizing the contents of this pack.
|
|
|
|
|
|
@@ -4100,7 +4133,7 @@ class Pack:
|
|
|
)
|
|
|
|
|
|
def sorted_entries(
|
|
|
- self, progress: ProgressFn | None = None
|
|
|
+ self, progress: Callable[[int, int], None] | None = None
|
|
|
) -> Iterator[PackIndexEntry]:
|
|
|
"""Return entries in this pack, sorted by SHA.
|
|
|
|
|
|
@@ -4145,7 +4178,7 @@ def extend_pack(
|
|
|
*,
|
|
|
compression_level: int = -1,
|
|
|
progress: Callable[[bytes], None] | None = None,
|
|
|
- object_format: ObjectFormat | None = None,
|
|
|
+ object_format: Optional["ObjectFormat"] = None,
|
|
|
) -> tuple[bytes, list[tuple[bytes, int, int]]]:
|
|
|
"""Extend a pack file with more objects.
|
|
|
|