소스 검색

Fix all deprecation warnings by adding object_format parameter propagation

Jelmer Vernooij 2 달 전
부모
커밋
3003e49801

+ 3 - 3
crates/objects/src/lib.rs

@@ -54,7 +54,7 @@ fn parse_tree_with_length(
     mut text: &[u8],
     strict: bool,
     hash_len: usize,
-) -> PyResult<Vec<(PyObject, u32, PyObject)>> {
+) -> PyResult<Vec<(Py<PyAny>, u32, Py<PyAny>)>> {
     let mut entries = Vec::new();
     while !text.is_empty() {
         let mode_end = memchr(b' ', text)
@@ -97,8 +97,8 @@ fn parse_tree(
     py: Python,
     text: &[u8],
     strict: Option<bool>,
-    object_format: Option<PyObject>,
-) -> PyResult<Vec<(PyObject, u32, PyObject)>> {
+    object_format: Option<Py<PyAny>>,
+) -> PyResult<Vec<(Py<PyAny>, u32, Py<PyAny>)>> {
     let strict = strict.unwrap_or(false);
 
     // Determine hash length from object_format if provided

+ 9 - 3
dulwich/bundle.py

@@ -160,8 +160,10 @@ def _read_bundle(f: BinaryIO, version: int) -> Bundle:
 
     from io import BytesIO
 
+    from .object_format import DEFAULT_OBJECT_FORMAT
+
     pack_file = BytesIO(pack_bytes)
-    pack_data = PackData.from_file(pack_file)
+    pack_data = PackData.from_file(pack_file, object_format=DEFAULT_OBJECT_FORMAT)
     ret = Bundle()
     ret.references = references
     ret.capabilities = capabilities
@@ -225,6 +227,7 @@ def write_bundle(f: BinaryIO, bundle: Bundle) -> None:
         cast(Callable[[bytes], None], f.write),
         num_records=len(bundle.pack_data),
         records=bundle.pack_data.iter_unpacked(),
+        object_format=bundle.pack_data.object_format,
     )
 
 
@@ -307,9 +310,12 @@ def create_bundle_from_repo(
     # Store the pack objects directly, we'll write them when saving the bundle
     # For now, create a simple wrapper to hold the data
     class _BundlePackData:
-        def __init__(self, count: int, objects: Iterator[UnpackedObject]) -> None:
+        def __init__(
+            self, count: int, objects: Iterator[UnpackedObject], object_format
+        ) -> None:
             self._count = count
             self._objects = list(objects)  # Materialize the iterator
+            self.object_format = object_format
 
         def __len__(self) -> int:
             return self._count
@@ -317,7 +323,7 @@ def create_bundle_from_repo(
         def iter_unpacked(self) -> Iterator[UnpackedObject]:
             return iter(self._objects)
 
-    pack_data = _BundlePackData(pack_count, pack_objects)
+    pack_data = _BundlePackData(pack_count, pack_objects, repo.object_format)
 
     # Create bundle object
     bundle = Bundle()

+ 10 - 3
dulwich/client.py

@@ -154,6 +154,7 @@ from .bundle import Bundle
 from .config import Config, apply_instead_of, get_xdg_config_home_path
 from .credentials import match_partial_url, match_urls
 from .errors import GitProtocolError, HangupException, NotGitRepository, SendPackError
+from .object_format import DEFAULT_OBJECT_FORMAT
 from .object_store import GraphWalker
 from .objects import ObjectID
 from .pack import (
@@ -1586,7 +1587,9 @@ class TraditionalGitClient(GitClient):
             )
 
             if self._should_send_pack(new_refs):
-                for chunk in PackChunkGenerator(pack_data_count, pack_data):
+                for chunk in PackChunkGenerator(
+                    pack_data_count, pack_data, object_format=DEFAULT_OBJECT_FORMAT
+                ):
                     proto.write(chunk)
 
             ref_status = self._handle_receive_pack_tail(
@@ -2457,6 +2460,7 @@ class LocalGitClient(GitClient):
                 r.object_store,
                 object_ids,
                 other_haves=other_haves,
+                object_format=r.object_format,
             )
             # Convert refs to Optional type for FetchPackResult
             return FetchPackResult(_to_optional_dict(r.get_refs()), symrefs, agent)
@@ -2716,7 +2720,7 @@ class BundleClient(GitClient):
         from io import BytesIO
 
         pack_io = BytesIO(pack_bytes)
-        pack_data = PackData.from_file(pack_io)
+        pack_data = PackData.from_file(pack_io, object_format=DEFAULT_OBJECT_FORMAT)
         target.object_store.add_pack_data(len(pack_data), pack_data.iter_unpacked())
 
         # Apply ref filtering if specified
@@ -3809,7 +3813,9 @@ class AbstractHttpGitClient(GitClient):
                 progress=progress,
             )
             if self._should_send_pack(new_refs):
-                yield from PackChunkGenerator(pack_data_count, pack_data)
+                yield from PackChunkGenerator(
+                    pack_data_count, pack_data, object_format=DEFAULT_OBJECT_FORMAT
+                )
 
         resp, read = self._smart_request("git-receive-pack", url, data=body_generator())
         try:
@@ -3921,6 +3927,7 @@ class AbstractHttpGitClient(GitClient):
                     iter(pack_data_list),
                     num_records=len(pack_data_list),
                     progress=progress,
+                    object_format=DEFAULT_OBJECT_FORMAT,
                 )
 
             return FetchPackResult(refs, symrefs, agent)

+ 22 - 13
dulwich/commit_graph.py

@@ -163,6 +163,7 @@ class CommitGraph:
           object_format: Object format to use (defaults to SHA1)
         """
         import warnings
+
         from .object_format import DEFAULT_OBJECT_FORMAT, SHA256
 
         if object_format is None:
@@ -173,7 +174,9 @@ class CommitGraph:
             )
             object_format = DEFAULT_OBJECT_FORMAT
         self.object_format = object_format
-        self.hash_version = HASH_VERSION_SHA256 if object_format == SHA256 else HASH_VERSION_SHA1
+        self.hash_version = (
+            HASH_VERSION_SHA256 if object_format == SHA256 else HASH_VERSION_SHA1
+        )
         self.chunks: dict[bytes, CommitGraphChunk] = {}
         self.entries: list[CommitGraphEntry] = []
         self._oid_to_index: dict[ObjectID, int] = {}
@@ -181,11 +184,10 @@ class CommitGraph:
     @classmethod
     def from_file(cls, f: BinaryIO) -> "CommitGraph":
         """Read commit graph from file."""
-        graph = cls()
-        graph._read_from_file(f)
-        return graph
+        return cls._read_from_file(f)
 
-    def _read_from_file(self, f: BinaryIO) -> None:
+    @classmethod
+    def _read_from_file(cls, f: BinaryIO) -> "CommitGraph":
         """Read commit graph data from file."""
         # Read header
         signature = f.read(4)
@@ -196,16 +198,21 @@ class CommitGraph:
         if version != COMMIT_GRAPH_VERSION:
             raise ValueError(f"Unsupported commit graph version: {version}")
 
-        self.hash_version = struct.unpack(">B", f.read(1))[0]
+        hash_version = struct.unpack(">B", f.read(1))[0]
 
         # Set object_format based on hash_version from file
         from .object_format import SHA1, SHA256
-        if self.hash_version == HASH_VERSION_SHA1:
-            self.object_format = SHA1
-        elif self.hash_version == HASH_VERSION_SHA256:
-            self.object_format = SHA256
+
+        if hash_version == HASH_VERSION_SHA1:
+            object_format = SHA1
+        elif hash_version == HASH_VERSION_SHA256:
+            object_format = SHA256
         else:
-            raise ValueError(f"Unsupported hash version: {self.hash_version}")
+            raise ValueError(f"Unsupported hash version: {hash_version}")
+
+        # Create instance with correct object_format
+        graph = cls(object_format=object_format)
+        graph.hash_version = hash_version
 
         num_chunks = struct.unpack(">B", f.read(1))[0]
         struct.unpack(">B", f.read(1))[0]
@@ -226,10 +233,12 @@ class CommitGraph:
 
             f.seek(offset)
             chunk_data = f.read(chunk_size)
-            self.chunks[chunk_id] = CommitGraphChunk(chunk_id, chunk_data)
+            graph.chunks[chunk_id] = CommitGraphChunk(chunk_id, chunk_data)
 
         # Parse chunks
-        self._parse_chunks()
+        graph._parse_chunks()
+
+        return graph
 
     def _parse_chunks(self) -> None:
         """Parse chunk data into entries."""

+ 1 - 1
dulwich/contrib/swift.py

@@ -939,7 +939,7 @@ class SwiftObjectStore(PackBasedObjectStore):
 
         # Complete the pack.
         for ext_sha in indexer.ext_refs():  # type: ignore
-            assert len(ext_sha) == 20
+            assert len(ext_sha) in (20, 32)  # SHA-1 or SHA-256
             type_num, data = self.get_raw(ext_sha)
             offset = f.tell()
             crc32 = write_pack_object(f, type_num, data, sha=new_sha)  # type: ignore

+ 4 - 1
dulwich/dumb.py

@@ -62,6 +62,7 @@ class DumbHTTPObjectStore(BaseObjectStore):
         http_request_func: Callable[
             [str, dict[str, str]], tuple[Any, Callable[..., bytes]]
         ],
+        object_format=None,
     ) -> None:
         """Initialize a DumbHTTPObjectStore.
 
@@ -69,7 +70,9 @@ class DumbHTTPObjectStore(BaseObjectStore):
           base_url: Base URL of the remote repository (e.g. "https://example.com/repo.git/")
           http_request_func: Function to make HTTP requests, should accept (url, headers)
                            and return (response, read_func).
+          object_format: Object format to use (defaults to DEFAULT_OBJECT_FORMAT)
         """
+        super().__init__(object_format=object_format)
         self.base_url = base_url.rstrip("/") + "/"
         self._http_request = http_request_func
         self._packs: list[tuple[str, PackIndex | None]] | None = None
@@ -252,7 +255,7 @@ class DumbHTTPObjectStore(BaseObjectStore):
                     f.write(data)
 
             # Open the pack and get the object
-            pack_data = PackData(pack_path)
+            pack_data = PackData(pack_path, object_format=self.object_format)
             pack = Pack.from_objects(pack_data, pack_idx)
             try:
                 return pack.get_raw(binsha)

+ 2 - 2
dulwich/errors.py

@@ -70,13 +70,13 @@ class ChecksumMismatch(Exception):
             got: The actual checksum value (bytes or hex string).
             extra: Optional additional error information.
         """
-        if isinstance(expected, bytes) and len(expected) == 20:
+        if isinstance(expected, bytes) and len(expected) in (20, 32):
             expected_str = binascii.hexlify(expected).decode("ascii")
         else:
             expected_str = (
                 expected if isinstance(expected, str) else expected.decode("ascii")
             )
-        if isinstance(got, bytes) and len(got) == 20:
+        if isinstance(got, bytes) and len(got) in (20, 32):
             got_str = binascii.hexlify(got).decode("ascii")
         else:
             got_str = got if isinstance(got, str) else got.decode("ascii")

+ 3 - 2
dulwich/notes.py

@@ -174,11 +174,12 @@ class NotesTree:
 
         # If we have files at the root level, check if they're full SHA names
         if has_files and not has_dirs:
-            # Check if any file names are full 40-char hex strings
+            # Check if any file names are full hex strings (40 for SHA-1, 64 for SHA-256)
+            hex_length = self._object_store.object_format.hex_length
             for name, mode, sha in self._tree.items():
                 assert name is not None
                 assert mode is not None
-                if stat.S_ISREG(mode) and len(name) == 40:
+                if stat.S_ISREG(mode) and len(name) == hex_length:
                     try:
                         int(name, 16)  # Verify it's a valid hex string
                         return 0  # No fanout

+ 8 - 3
dulwich/object_store.py

@@ -915,6 +915,7 @@ class PackBasedObjectStore(PackCapableObjectStore, PackedObjectContainer):
                 num_records=count,
                 progress=progress,
                 compression_level=self.pack_compression_level,
+                object_format=self.object_format,
             )
         except BaseException:
             abort()
@@ -1855,6 +1856,7 @@ class DiskObjectStore(PackBasedObjectStore):
             get_raw=self.get_raw,
             compression_level=self.pack_compression_level,
             progress=progress,
+            object_format=self.object_format,
         )
         f.flush()
         if self.fsync_object_files:
@@ -1997,7 +1999,7 @@ class DiskObjectStore(PackBasedObjectStore):
             if f.tell() > 0:
                 f.seek(0)
 
-                with PackData(path, f) as pd:
+                with PackData(path, f, object_format=self.object_format) as pd:
                     indexer = PackIndexer.for_pack_data(
                         pd,
                         resolve_ext_ref=self.get_raw,  # type: ignore[arg-type]
@@ -2336,7 +2338,9 @@ class DiskObjectStore(PackBasedObjectStore):
                 if isinstance(ref, bytes) and len(ref) == self.object_format.hex_length:
                     # Already hex ObjectID
                     commit_ids.append(ref)
-                elif isinstance(ref, bytes) and len(ref) == self.object_format.oid_length:
+                elif (
+                    isinstance(ref, bytes) and len(ref) == self.object_format.oid_length
+                ):
                     # Binary SHA, convert to hex ObjectID
                     from .objects import sha_to_hex
 
@@ -2547,7 +2551,7 @@ class MemoryObjectStore(PackCapableObjectStore):
             if size > 0:
                 f.seek(0)
 
-                p = PackData.from_file(f, size)
+                p = PackData.from_file(f, size, object_format=self.object_format)
                 for obj in PackInflater.for_pack_data(p, self.get_raw):  # type: ignore[arg-type]
                     self.add_object(obj)
                 p.close()
@@ -2586,6 +2590,7 @@ class MemoryObjectStore(PackCapableObjectStore):
                 unpacked_objects,
                 num_records=count,
                 progress=progress,
+                object_format=self.object_format,
             )
         except BaseException:
             abort()

+ 90 - 10
dulwich/pack.py

@@ -822,7 +822,7 @@ class MemoryPackIndex(PackIndex):
     def for_pack(cls, pack_data: "PackData") -> "MemoryPackIndex":
         """Create a MemoryPackIndex from a PackData object."""
         return MemoryPackIndex(
-            list(pack_data.sorted_entries()), pack_data.get_stored_checksum()
+            list(pack_data.sorted_entries()), pack_checksum=pack_data.get_stored_checksum(), object_format=pack.object_format,
         )
 
     @classmethod
@@ -1645,6 +1645,7 @@ class PackData:
         depth: int | None = None,
         threads: int | None = None,
         big_file_threshold: int | None = None,
+        object_format=None,
     ) -> None:
         """Create a PackData object representing the pack in the given filename.
 
@@ -1654,6 +1655,16 @@ class PackData:
         Currently there is a restriction on the size of the pack as the python
         mmap implementation is flawed.
         """
+        from .object_format import DEFAULT_OBJECT_FORMAT
+
+        if object_format is None:
+            warnings.warn(
+                "PackData() should be called with object_format parameter",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            object_format = DEFAULT_OBJECT_FORMAT
+        self.object_format = object_format
         self._filename = filename
         self._size = size
         self._header_size = 12
@@ -1696,29 +1707,31 @@ class PackData:
         return self._filename
 
     @classmethod
-    def from_file(cls, file: IO[bytes], size: int | None = None) -> "PackData":
+    def from_file(cls, file: IO[bytes], size: int | None = None, object_format: ObjectFormat | None = None) -> "PackData":
         """Create a PackData object from an open file.
 
         Args:
           file: Open file object
           size: Optional file size
+          object_format: Object format used by the repository
 
         Returns:
           PackData instance
         """
-        return cls(str(file), file=file, size=size)
+        return cls(str(file), file=file, size=size, object_format=object_format)
 
     @classmethod
-    def from_path(cls, path: str | os.PathLike[str]) -> "PackData":
+    def from_path(cls, path: str | os.PathLike[str], object_format: ObjectFormat | None = None) -> "PackData":
         """Create a PackData object from a file path.
 
         Args:
           path: Path to the pack file
+          object_format: Object format used by the repository
 
         Returns:
           PackData instance
         """
-        return cls(filename=path)
+        return cls(filename=path, object_format=object_format)
 
     def close(self) -> None:
         """Close the underlying pack file."""
@@ -2575,6 +2588,7 @@ def pack_object_header(
     Returns: A header for a packed object.
     """
     from .object_format import DEFAULT_OBJECT_FORMAT
+
     if object_format is None:
         warnings.warn(
             "pack_object_header() should be called with object_format parameter",
@@ -2610,6 +2624,7 @@ def pack_object_chunks(
     type: int,
     object: list[bytes] | tuple[bytes | int, list[bytes]],
     compression_level: int = -1,
+    object_format: "ObjectFormat" | None = None,
 ) -> Iterator[bytes]:
     """Generate chunks for a pack object.
 
@@ -2617,8 +2632,18 @@ def pack_object_chunks(
       type: Numeric type of the object
       object: Object to write
       compression_level: the zlib compression level
+      object_format: Object format (hash algorithm) to use
     Returns: Chunks
     """
+    from .object_format import DEFAULT_OBJECT_FORMAT
+
+    if object_format is None:
+        warnings.warn(
+            "pack_object_chunks() should be called with object_format parameter",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        object_format = DEFAULT_OBJECT_FORMAT
     if type in DELTA_TYPES:
         if isinstance(object, tuple):
             delta_base, object = object
@@ -2638,7 +2663,7 @@ def pack_object_chunks(
         # Shouldn't reach here with proper typing
         raise TypeError(f"Unexpected object type: {object.__class__.__name__}")
 
-    yield bytes(pack_object_header(type, delta_base, sum(map(len, chunks))))
+    yield bytes(pack_object_header(type, delta_base, sum(map(len, chunks)), object_format=object_format))
     compressor = zlib.compressobj(level=compression_level)
     for data in chunks:
         yield compressor.compress(data)
@@ -2651,6 +2676,7 @@ def write_pack_object(
     object: list[bytes] | tuple[bytes | int, list[bytes]],
     sha: "HashObject | None" = None,
     compression_level: int = -1,
+    object_format: "ObjectFormat" | None = None,
 ) -> int:
     """Write pack object to a file.
 
@@ -2660,10 +2686,22 @@ def write_pack_object(
       object: Object to write
       sha: Optional SHA-1 hasher to update
       compression_level: the zlib compression level
+      object_format: Object format (hash algorithm) to use
     Returns: CRC32 checksum of the written object
     """
+    from .object_format import DEFAULT_OBJECT_FORMAT
+
+    if object_format is None:
+        warnings.warn(
+            "write_pack_object() should be called with object_format parameter",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        object_format = DEFAULT_OBJECT_FORMAT
     crc32 = 0
-    for chunk in pack_object_chunks(type, object, compression_level=compression_level):
+    for chunk in pack_object_chunks(
+        type, object, compression_level=compression_level, object_format=object_format
+    ):
         write(chunk)
         if sha is not None:
             sha.update(chunk)
@@ -2678,6 +2716,7 @@ def write_pack(
     deltify: bool | None = None,
     delta_window_size: int | None = None,
     compression_level: int = -1,
+    object_format: "ObjectFormat" | None = None
 ) -> tuple[bytes, bytes]:
     """Write a new pack data file.
 
@@ -2687,8 +2726,18 @@ def write_pack(
       delta_window_size: Delta window size
       deltify: Whether to deltify pack objects
       compression_level: the zlib compression level
+      object_format: Object format
     Returns: Tuple with checksum of pack file and index file
     """
+    from .object_format import DEFAULT_OBJECT_FORMAT
+
+    if object_format is None:
+        warnings.warn(
+            "write_pack() should be called with object_format parameter",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        object_format = DEFAULT_OBJECT_FORMAT
     with GitFile(filename + ".pack", "wb") as f:
         entries, data_sum = write_pack_objects(
             f,
@@ -2696,6 +2745,7 @@ def write_pack(
             delta_window_size=delta_window_size,
             deltify=deltify,
             compression_level=compression_level,
+            object_format=object_format,
         )
     entries_list = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
     with GitFile(filename + ".idx", "wb") as f:
@@ -2995,7 +3045,8 @@ def write_pack_from_container(
     deltify: bool | None = None,
     reuse_deltas: bool = True,
     compression_level: int = -1,
-    other_haves: set[ObjectID] | None = None,
+    other_haves: set[bytes] | None = None,
+    object_format: "ObjectFormat" | None = None,
 ) -> tuple[dict[bytes, tuple[int, int]], bytes]:
     """Write a new pack data file.
 
@@ -3009,6 +3060,7 @@ def write_pack_from_container(
       reuse_deltas: Whether to reuse existing deltas
       compression_level: the zlib compression level to use
       other_haves: Set of additional object IDs the receiver has
+      object_format: Object format (hash algorithm) to use
     Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum
     """
     pack_contents_count = len(object_ids)
@@ -3026,6 +3078,7 @@ def write_pack_from_container(
         pack_contents,
         num_records=pack_contents_count,
         compression_level=compression_level,
+        object_format=object_format,
     )
 
 
@@ -3036,6 +3089,7 @@ def write_pack_objects(
     delta_window_size: int | None = None,
     deltify: bool | None = None,
     compression_level: int = -1,
+    object_format: "ObjectFormat" | None = None
 ) -> tuple[dict[bytes, tuple[int, int]], bytes]:
     """Write a new pack data file.
 
@@ -3046,6 +3100,7 @@ def write_pack_objects(
                          Set to None for default window size.
       deltify: Whether to deltify objects
       compression_level: the zlib compression level to use
+      object_format: Object format (hash algorithm) to use
     Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum
     """
     pack_contents_count, pack_contents = pack_objects_to_data(objects, deltify=deltify)
@@ -3055,6 +3110,7 @@ def write_pack_objects(
         pack_contents,
         num_records=pack_contents_count,
         compression_level=compression_level,
+        object_format=object_format,
     )
 
 
@@ -3068,6 +3124,7 @@ class PackChunkGenerator:
         progress: Callable[..., None] | None = None,
         compression_level: int = -1,
         reuse_compressed: bool = True,
+        object_format: "ObjectFormat" | None = None,
     ) -> None:
         """Initialize PackChunkGenerator.
 
@@ -3077,7 +3134,18 @@ class PackChunkGenerator:
             progress: Optional progress callback
             compression_level: Compression level (-1 for default)
             reuse_compressed: Whether to reuse compressed chunks
+            object_format: Object format (hash algorithm) to use
         """
+        from .object_format import DEFAULT_OBJECT_FORMAT
+
+        if object_format is None:
+            warnings.warn(
+                "PackChunkGenerator() should be called with object_format parameter",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            object_format = DEFAULT_OBJECT_FORMAT
+        self.object_format = object_format
         self.cs = sha1(b"")
         self.entries: dict[bytes, tuple[int, int]] = {}
         if records is None:
@@ -3151,7 +3219,10 @@ class PackChunkGenerator:
                 chunks = unpacked.comp_chunks
             else:
                 chunks = pack_object_chunks(
-                    type_num, raw, compression_level=compression_level
+                    type_num,
+                    raw,
+                    compression_level=compression_level,
+                    object_format=self.object_format,
                 )
             crc32 = 0
             object_size = 0
@@ -3180,6 +3251,7 @@ def write_pack_data(
     num_records: int | None = None,
     progress: Callable[..., None] | None = None,
     compression_level: int = -1,
+    object_format: "ObjectFormat" | None = None,
 ) -> tuple[dict[bytes, tuple[int, int]], bytes]:
     """Write a new pack data file.
 
@@ -3189,6 +3261,7 @@ def write_pack_data(
       records: Iterator over type_num, object_id, delta_base, raw
       progress: Function to report progress to
       compression_level: the zlib compression level
+      object_format: Object format (hash algorithm) to use
     Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum
     """
     chunk_generator = PackChunkGenerator(
@@ -3196,6 +3269,7 @@ def write_pack_data(
         records=records,
         progress=progress,
         compression_level=compression_level,
+        object_format=object_format,
     )
     for chunk in chunk_generator:
         if callable(write):
@@ -3617,6 +3691,7 @@ class Pack:
             depth=depth,
             threads=threads,
             big_file_threshold=big_file_threshold,
+            object_format=self.object_format,
         )
         self._idx_load = lambda: load_pack_index(
             self._idx_path, object_format=object_format
@@ -3976,7 +4051,10 @@ class Pack:
                 assert isinstance(base_type, int)
             elif base_type == REF_DELTA:
                 (basename, delta) = base_obj
-                assert isinstance(basename, bytes) and len(basename) == self.object_format.oid_length
+                assert (
+                    isinstance(basename, bytes)
+                    and len(basename) == self.object_format.oid_length
+                )
                 base_offset, base_type, base_obj = get_ref(basename)
                 assert isinstance(base_type, int)
                 if base_offset == prev_offset:  # object is based on itself
@@ -4075,6 +4153,7 @@ def extend_pack(
     that are already in the pack
     """
     from .object_format import DEFAULT_OBJECT_FORMAT
+
     if object_format is None:
         warnings.warn(
             "extend_pack() should be called with object_format parameter",
@@ -4116,6 +4195,7 @@ def extend_pack(
             [data],  # Convert bytes to list[bytes]
             sha=new_sha,
             compression_level=compression_level,
+            object_format=object_format,
         )
         extra_entries.append((object_id, offset, crc32))
     pack_sha = new_sha.digest()

+ 1 - 0
dulwich/porcelain/__init__.py

@@ -4542,6 +4542,7 @@ def pack_objects(
             deltify=deltify,
             delta_window_size=delta_window_size,
             reuse_deltas=reuse_deltas,
+            object_format=r.object_format,
         )
     if idxf is not None:
         index_entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])

+ 1 - 0
dulwich/server.py

@@ -585,6 +585,7 @@ class UploadPackHandler(PackHandler):
             self.write_pack_data,
             self.repo.object_store,
             object_ids,
+            object_format=self.repo.object_format,
         )
         # we are done
         self.proto.write_pkt_line(None)

+ 4 - 1
dulwich/tests/utils.py

@@ -35,6 +35,7 @@ from typing import Any, BinaryIO, TypeVar
 from unittest import SkipTest
 
 from dulwich.index import commit_tree
+from dulwich.object_format import DEFAULT_OBJECT_FORMAT
 from dulwich.object_store import BaseObjectStore
 from dulwich.objects import Commit, FixedSha, ShaFile, Tag, object_class
 from dulwich.pack import (
@@ -276,7 +277,9 @@ def build_pack(
                 base = obj_sha(base_type_num, base_data)
             obj = (base, list(create_delta(base_data, data)))
 
-        crc32 = write_pack_object(sf.write, type_num, obj)
+        crc32 = write_pack_object(
+            sf.write, type_num, obj, object_format=DEFAULT_OBJECT_FORMAT
+        )
         offsets[i] = offset
         crc32s[i] = crc32
 

+ 22 - 7
tests/compat/test_pack.py

@@ -29,6 +29,7 @@ import tempfile
 from typing import NoReturn
 
 from dulwich.file import GitFile
+from dulwich.object_format import DEFAULT_OBJECT_FORMAT
 from dulwich.objects import Blob
 from dulwich.pack import (
     PackData,
@@ -70,7 +71,9 @@ class TestPack(PackTests):
         with self.get_pack(pack1_sha) as origpack:
             self.assertSucceeds(origpack.index.check)
             pack_path = os.path.join(self._tempdir, "Elch")
-            write_pack(pack_path, origpack.pack_tuples())
+            write_pack(
+                pack_path, origpack.pack_tuples(), object_format=DEFAULT_OBJECT_FORMAT
+            )
             output = run_git_or_fail(["verify-pack", "-v", pack_path])
             orig_shas = {o.id for o in origpack.iterobjects()}
             self.assertEqual(orig_shas, _git_verify_pack_object_list(output))
@@ -84,7 +87,9 @@ class TestPack(PackTests):
                 (new_blob, None)
             ]
         pack_path = os.path.join(self._tempdir, "pack_with_deltas")
-        write_pack(pack_path, all_to_pack, deltify=True)
+        write_pack(
+            pack_path, all_to_pack, deltify=True, object_format=DEFAULT_OBJECT_FORMAT
+        )
         output = run_git_or_fail(["verify-pack", "-v", pack_path])
         self.assertEqual(
             {x[0].id for x in all_to_pack},
@@ -115,7 +120,12 @@ class TestPack(PackTests):
                 (new_blob_2, None),
             ]
             pack_path = os.path.join(self._tempdir, "pack_with_deltas")
-            write_pack(pack_path, all_to_pack, deltify=True)
+            write_pack(
+                pack_path,
+                all_to_pack,
+                deltify=True,
+                object_format=DEFAULT_OBJECT_FORMAT,
+            )
         output = run_git_or_fail(["verify-pack", "-v", pack_path])
         self.assertEqual(
             {x[0].id for x in all_to_pack},
@@ -155,7 +165,12 @@ class TestPack(PackTests):
                 (new_blob_2, None),
             ]
             pack_path = os.path.join(self._tempdir, "pack_with_deltas")
-            write_pack(pack_path, all_to_pack, deltify=True)
+            write_pack(
+                pack_path,
+                all_to_pack,
+                deltify=True,
+                object_format=DEFAULT_OBJECT_FORMAT,
+            )
         output = run_git_or_fail(["verify-pack", "-v", pack_path])
         self.assertEqual(
             {x[0].id for x in all_to_pack},
@@ -189,10 +204,10 @@ class TestPackIndexCompat(PackTests):
 
         pack_path = os.path.join(self._tempdir, "test_pack")
         entries = [(blob, None)]
-        write_pack(pack_path, entries)
+        write_pack(pack_path, entries, object_format=DEFAULT_OBJECT_FORMAT)
 
         # Load the pack and create v2 index (most compatible)
-        pack_data = PackData(pack_path + ".pack")
+        pack_data = PackData(pack_path + ".pack", object_format=DEFAULT_OBJECT_FORMAT)
         try:
             pack_data.create_index(pack_path + ".idx", version=2)
         finally:
@@ -210,7 +225,7 @@ class TestPackIndexCompat(PackTests):
 
         pack_path = os.path.join(self._tempdir, "git_pack")
         entries = [(blob, None)]
-        write_pack(pack_path, entries)
+        write_pack(pack_path, entries, object_format=DEFAULT_OBJECT_FORMAT)
 
         # Create index with git
         run_git_or_fail(["index-pack", pack_path + ".pack"])

+ 31 - 28
tests/test_bundle.py

@@ -26,6 +26,7 @@ import tempfile
 from io import BytesIO
 
 from dulwich.bundle import Bundle, create_bundle_from_repo, read_bundle, write_bundle
+from dulwich.object_format import DEFAULT_OBJECT_FORMAT
 from dulwich.objects import Blob, Commit, Tree
 from dulwich.pack import PackData, write_pack_objects
 from dulwich.repo import MemoryRepo
@@ -49,9 +50,9 @@ class BundleTests(TestCase):
 
         # Create a simple pack data
         b = BytesIO()
-        write_pack_objects(b.write, [])
+        write_pack_objects(b.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b.seek(0)
-        bundle.pack_data = PackData.from_file(b)
+        bundle.pack_data = PackData.from_file(b, object_format=DEFAULT_OBJECT_FORMAT)
 
         # Check the repr output
         rep = repr(bundle)
@@ -70,9 +71,9 @@ class BundleTests(TestCase):
         bundle1.references = {b"refs/heads/master": b"ab" * 20}
 
         b1 = BytesIO()
-        write_pack_objects(b1.write, [])
+        write_pack_objects(b1.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b1.seek(0)
-        bundle1.pack_data = PackData.from_file(b1)
+        bundle1.pack_data = PackData.from_file(b1, object_format=DEFAULT_OBJECT_FORMAT)
 
         bundle2 = Bundle()
         bundle2.version = 3
@@ -81,9 +82,9 @@ class BundleTests(TestCase):
         bundle2.references = {b"refs/heads/master": b"ab" * 20}
 
         b2 = BytesIO()
-        write_pack_objects(b2.write, [])
+        write_pack_objects(b2.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b2.seek(0)
-        bundle2.pack_data = PackData.from_file(b2)
+        bundle2.pack_data = PackData.from_file(b2, object_format=DEFAULT_OBJECT_FORMAT)
 
         # Test equality
         self.assertEqual(bundle1, bundle2)
@@ -95,9 +96,9 @@ class BundleTests(TestCase):
         bundle3.prerequisites = [(b"cc" * 20, "comment")]
         bundle3.references = {b"refs/heads/master": b"ab" * 20}
         b3 = BytesIO()
-        write_pack_objects(b3.write, [])
+        write_pack_objects(b3.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b3.seek(0)
-        bundle3.pack_data = PackData.from_file(b3)
+        bundle3.pack_data = PackData.from_file(b3, object_format=DEFAULT_OBJECT_FORMAT)
         self.assertNotEqual(bundle1, bundle3)
 
         bundle4 = Bundle()
@@ -106,9 +107,9 @@ class BundleTests(TestCase):
         bundle4.prerequisites = [(b"cc" * 20, "comment")]
         bundle4.references = {b"refs/heads/master": b"ab" * 20}
         b4 = BytesIO()
-        write_pack_objects(b4.write, [])
+        write_pack_objects(b4.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b4.seek(0)
-        bundle4.pack_data = PackData.from_file(b4)
+        bundle4.pack_data = PackData.from_file(b4, object_format=DEFAULT_OBJECT_FORMAT)
         self.assertNotEqual(bundle1, bundle4)
 
         bundle5 = Bundle()
@@ -117,9 +118,9 @@ class BundleTests(TestCase):
         bundle5.prerequisites = [(b"dd" * 20, "different")]  # Different prerequisites
         bundle5.references = {b"refs/heads/master": b"ab" * 20}
         b5 = BytesIO()
-        write_pack_objects(b5.write, [])
+        write_pack_objects(b5.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b5.seek(0)
-        bundle5.pack_data = PackData.from_file(b5)
+        bundle5.pack_data = PackData.from_file(b5, object_format=DEFAULT_OBJECT_FORMAT)
         self.assertNotEqual(bundle1, bundle5)
 
         bundle6 = Bundle()
@@ -130,9 +131,9 @@ class BundleTests(TestCase):
             b"refs/heads/different": b"ab" * 20
         }  # Different references
         b6 = BytesIO()
-        write_pack_objects(b6.write, [])
+        write_pack_objects(b6.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b6.seek(0)
-        bundle6.pack_data = PackData.from_file(b6)
+        bundle6.pack_data = PackData.from_file(b6, object_format=DEFAULT_OBJECT_FORMAT)
         self.assertNotEqual(bundle1, bundle6)
 
         # Test inequality with different type
@@ -147,7 +148,7 @@ class BundleTests(TestCase):
         f.write(b"\n")
         # Add pack data
         b = BytesIO()
-        write_pack_objects(b.write, [])
+        write_pack_objects(b.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         f.write(b.getvalue())
         f.seek(0)
 
@@ -168,7 +169,7 @@ class BundleTests(TestCase):
         f.write(b"\n")
         # Add pack data
         b = BytesIO()
-        write_pack_objects(b.write, [])
+        write_pack_objects(b.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         f.write(b.getvalue())
         f.seek(0)
 
@@ -199,9 +200,9 @@ class BundleTests(TestCase):
 
         # Create a simple pack data
         b = BytesIO()
-        write_pack_objects(b.write, [])
+        write_pack_objects(b.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b.seek(0)
-        bundle.pack_data = PackData.from_file(b)
+        bundle.pack_data = PackData.from_file(b, object_format=DEFAULT_OBJECT_FORMAT)
 
         # Write the bundle
         f = BytesIO()
@@ -225,9 +226,9 @@ class BundleTests(TestCase):
 
         # Create a simple pack data
         b = BytesIO()
-        write_pack_objects(b.write, [])
+        write_pack_objects(b.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b.seek(0)
-        bundle.pack_data = PackData.from_file(b)
+        bundle.pack_data = PackData.from_file(b, object_format=DEFAULT_OBJECT_FORMAT)
 
         # Write the bundle
         f = BytesIO()
@@ -253,9 +254,9 @@ class BundleTests(TestCase):
         bundle1.references = {b"refs/heads/master": b"ab" * 20}
 
         b1 = BytesIO()
-        write_pack_objects(b1.write, [])
+        write_pack_objects(b1.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b1.seek(0)
-        bundle1.pack_data = PackData.from_file(b1)
+        bundle1.pack_data = PackData.from_file(b1, object_format=DEFAULT_OBJECT_FORMAT)
 
         f1 = BytesIO()
         write_bundle(f1, bundle1)
@@ -271,9 +272,9 @@ class BundleTests(TestCase):
         bundle2.references = {b"refs/heads/master": b"ab" * 20}
 
         b2 = BytesIO()
-        write_pack_objects(b2.write, [])
+        write_pack_objects(b2.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b2.seek(0)
-        bundle2.pack_data = PackData.from_file(b2)
+        bundle2.pack_data = PackData.from_file(b2, object_format=DEFAULT_OBJECT_FORMAT)
 
         f2 = BytesIO()
         write_bundle(f2, bundle2)
@@ -290,9 +291,9 @@ class BundleTests(TestCase):
         bundle.references = {}
 
         b = BytesIO()
-        write_pack_objects(b.write, [])
+        write_pack_objects(b.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b.seek(0)
-        bundle.pack_data = PackData.from_file(b)
+        bundle.pack_data = PackData.from_file(b, object_format=DEFAULT_OBJECT_FORMAT)
 
         f = BytesIO()
         with self.assertRaises(AssertionError):
@@ -305,9 +306,11 @@ class BundleTests(TestCase):
         origbundle.references = {b"refs/heads/master": b"ab" * 20}
         origbundle.prerequisites = [(b"cc" * 20, b"comment")]
         b = BytesIO()
-        write_pack_objects(b.write, [])
+        write_pack_objects(b.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         b.seek(0)
-        origbundle.pack_data = PackData.from_file(b)
+        origbundle.pack_data = PackData.from_file(
+            b, object_format=DEFAULT_OBJECT_FORMAT
+        )
         with tempfile.TemporaryDirectory() as td:
             with open(os.path.join(td, "foo"), "wb") as f:
                 write_bundle(f, origbundle)

+ 7 - 2
tests/test_client.py

@@ -62,6 +62,7 @@ from dulwich.client import (
     parse_rsync_url,
 )
 from dulwich.config import ConfigDict
+from dulwich.object_format import DEFAULT_OBJECT_FORMAT
 from dulwich.objects import Blob, Commit, Tree
 from dulwich.pack import pack_objects_to_data, write_pack_data, write_pack_objects
 from dulwich.protocol import DEFAULT_GIT_PROTOCOL_VERSION_FETCH, TCP_GIT_PORT, Protocol
@@ -461,7 +462,7 @@ class GitClientTests(TestCase):
             return 0, []
 
         f = BytesIO()
-        write_pack_objects(f.write, [])
+        write_pack_objects(f.write, [], object_format=DEFAULT_OBJECT_FORMAT)
         self.client.send_pack("/", update_refs, generate_pack_data)
         self.assertEqual(
             self.rout.getvalue(),
@@ -507,7 +508,11 @@ class GitClientTests(TestCase):
 
         f = BytesIO()
         count, records = generate_pack_data(None, None)
-        write_pack_data(f.write, records, num_records=count)
+        from dulwich.object_format import DEFAULT_OBJECT_FORMAT
+
+        write_pack_data(
+            f.write, records, num_records=count, object_format=DEFAULT_OBJECT_FORMAT
+        )
         self.client.send_pack(b"/", update_refs, generate_pack_data)
         self.assertEqual(
             self.rout.getvalue(),

+ 9 - 8
tests/test_commit_graph.py

@@ -30,6 +30,7 @@ from dulwich.commit_graph import (
     get_reachable_commits,
     read_commit_graph,
 )
+from dulwich.object_format import SHA1
 
 
 class CommitGraphEntryTests(unittest.TestCase):
@@ -76,13 +77,13 @@ class CommitGraphTests(unittest.TestCase):
     """Tests for CommitGraph."""
 
     def test_init(self) -> None:
-        graph = CommitGraph()
+        graph = CommitGraph(object_format=SHA1)
         self.assertEqual(graph.hash_version, HASH_VERSION_SHA1)
         self.assertEqual(len(graph.entries), 0)
         self.assertEqual(len(graph.chunks), 0)
 
     def test_len(self) -> None:
-        graph = CommitGraph()
+        graph = CommitGraph(object_format=SHA1)
         self.assertEqual(len(graph), 0)
 
         # Add a dummy entry
@@ -91,7 +92,7 @@ class CommitGraphTests(unittest.TestCase):
         self.assertEqual(len(graph), 1)
 
     def test_iter(self) -> None:
-        graph = CommitGraph()
+        graph = CommitGraph(object_format=SHA1)
         entry1 = CommitGraphEntry(b"a" * 40, b"b" * 40, [], 1, 1000)
         entry2 = CommitGraphEntry(b"c" * 40, b"d" * 40, [], 2, 2000)
         graph.entries.extend([entry1, entry2])
@@ -102,17 +103,17 @@ class CommitGraphTests(unittest.TestCase):
         self.assertEqual(entries[1], entry2)
 
     def test_get_entry_by_oid_missing(self) -> None:
-        graph = CommitGraph()
+        graph = CommitGraph(object_format=SHA1)
         result = graph.get_entry_by_oid(b"f" * 40)
         self.assertIsNone(result)
 
     def test_get_generation_number_missing(self) -> None:
-        graph = CommitGraph()
+        graph = CommitGraph(object_format=SHA1)
         result = graph.get_generation_number(b"f" * 40)
         self.assertIsNone(result)
 
     def test_get_parents_missing(self) -> None:
-        graph = CommitGraph()
+        graph = CommitGraph(object_format=SHA1)
         result = graph.get_parents(b"f" * 40)
         self.assertIsNone(result)
 
@@ -262,7 +263,7 @@ class CommitGraphTests(unittest.TestCase):
 
     def test_write_empty_graph_raises(self) -> None:
         """Test that writing empty graph raises ValueError."""
-        graph = CommitGraph()
+        graph = CommitGraph(object_format=SHA1)
         f = io.BytesIO()
 
         with self.assertRaises(ValueError):
@@ -271,7 +272,7 @@ class CommitGraphTests(unittest.TestCase):
     def test_write_and_read_round_trip(self) -> None:
         """Test writing and reading a commit graph."""
         # Create a simple commit graph
-        graph = CommitGraph()
+        graph = CommitGraph(object_format=SHA1)
         entry = CommitGraphEntry(
             commit_id=b"aa" + b"00" * 19,
             tree_id=b"bb" + b"00" * 19,

+ 10 - 3
tests/test_object_store.py

@@ -31,6 +31,7 @@ from io import BytesIO
 
 from dulwich.errors import NotTreeError
 from dulwich.index import commit_tree
+from dulwich.object_format import DEFAULT_OBJECT_FORMAT
 from dulwich.object_store import (
     DiskObjectStore,
     MemoryObjectStore,
@@ -75,7 +76,9 @@ class MemoryObjectStoreTests(ObjectStoreTests, TestCase):
         f, commit, abort = o.add_pack()
         try:
             b = make_object(Blob, data=b"more yummy data")
-            write_pack_objects(f.write, [(b, None)])
+            write_pack_objects(
+                f.write, [(b, None)], object_format=DEFAULT_OBJECT_FORMAT
+            )
         except BaseException:
             abort()
             raise
@@ -299,7 +302,9 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
         f, commit, abort = o.add_pack()
         try:
             b = make_object(Blob, data=b"more yummy data")
-            write_pack_objects(f.write, [(b, None)])
+            write_pack_objects(
+                f.write, [(b, None)], object_format=DEFAULT_OBJECT_FORMAT
+            )
         except BaseException:
             abort()
             raise
@@ -839,7 +844,9 @@ class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase):
         with patch("os.fsync") as mock_fsync:
             f, commit, abort = store.add_pack()
             try:
-                write_pack_objects(f.write, [(blob, None)])
+                write_pack_objects(
+                    f.write, [(blob, None)], object_format=DEFAULT_OBJECT_FORMAT
+                )
             except BaseException:
                 abort()
                 raise

+ 39 - 14
tests/test_pack.py

@@ -33,6 +33,7 @@ from typing import NoReturn
 
 from dulwich.errors import ApplyDeltaError, ChecksumMismatch
 from dulwich.file import GitFile
+from dulwich.object_format import DEFAULT_OBJECT_FORMAT
 from dulwich.object_store import MemoryObjectStore
 from dulwich.objects import Blob, Commit, Tree, hex_to_sha, sha_to_hex
 from dulwich.pack import (
@@ -106,7 +107,8 @@ class PackTests(TestCase):
     def get_pack_data(self, sha):
         """Returns a PackData object from the datadir with the given sha."""
         return PackData(
-            os.path.join(self.datadir, "pack-{}.pack".format(sha.decode("ascii")))
+            os.path.join(self.datadir, "pack-{}.pack".format(sha.decode("ascii"))),
+            object_format=DEFAULT_OBJECT_FORMAT,
         )
 
     def get_pack(self, sha):
@@ -472,7 +474,9 @@ class TestPackData(PackTests):
             self.datadir, "pack-{}.pack".format(pack1_sha.decode("ascii"))
         )
         with open(path, "rb") as f:
-            PackData.from_file(f, os.path.getsize(path))
+            PackData.from_file(
+                f, os.path.getsize(path), object_format=DEFAULT_OBJECT_FORMAT
+            )
 
     def test_pack_len(self) -> None:
         with self.get_pack_data(pack1_sha) as p:
@@ -684,7 +688,9 @@ class TestPack(PackTests):
         with self.get_pack(pack1_sha) as origpack:
             self.assertSucceeds(origpack.index.check)
             basename = os.path.join(self.tempdir, "Elch")
-            write_pack(basename, origpack.pack_tuples())
+            write_pack(
+                basename, origpack.pack_tuples(), object_format=DEFAULT_OBJECT_FORMAT
+            )
 
             with Pack(basename) as newpack:
                 self.assertEqual(origpack, newpack)
@@ -711,7 +717,9 @@ class TestPack(PackTests):
 
     def _copy_pack(self, origpack):
         basename = os.path.join(self.tempdir, "somepack")
-        write_pack(basename, origpack.pack_tuples())
+        write_pack(
+            basename, origpack.pack_tuples(), object_format=DEFAULT_OBJECT_FORMAT
+        )
         return Pack(basename)
 
     def test_keep_no_message(self) -> None:
@@ -758,7 +766,7 @@ class TestPack(PackTests):
             write_pack_header(bad_file.write, 9999)
             bad_file.write(data._file.read())
             bad_file = BytesIO(bad_file.getvalue())
-            bad_data = PackData("", file=bad_file)
+            bad_data = PackData("", file=bad_file, object_format=DEFAULT_OBJECT_FORMAT)
             bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
             self.assertRaises(AssertionError, lambda: bad_pack.data)
             self.assertRaises(AssertionError, bad_pack.check_length_and_checksum)
@@ -770,7 +778,7 @@ class TestPack(PackTests):
 
             data._file.seek(0)
             bad_file = BytesIO(data._file.read()[:-20] + (b"\xff" * 20))
-            bad_data = PackData("", file=bad_file)
+            bad_data = PackData("", file=bad_file, object_format=DEFAULT_OBJECT_FORMAT)
             bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index)
             self.assertRaises(ChecksumMismatch, lambda: bad_pack.data)
             self.assertRaises(ChecksumMismatch, bad_pack.check_length_and_checksum)
@@ -846,7 +854,7 @@ class TestThinPack(PackTests):
 
         # Index the new pack.
         with self.make_pack(True) as pack:
-            with PackData(pack._data_path) as data:
+            with PackData(pack._data_path, object_format=DEFAULT_OBJECT_FORMAT) as data:
                 data.create_index(
                     self.pack_prefix + ".idx", resolve_ext_ref=pack.resolve_ext_ref
                 )
@@ -919,7 +927,9 @@ class WritePackTests(TestCase):
         try:
             f.write(b"header")
             offset = f.tell()
-            crc32 = write_pack_object(f.write, Blob.type_num, b"blob")
+            crc32 = write_pack_object(
+                f.write, Blob.type_num, b"blob", object_format=DEFAULT_OBJECT_FORMAT
+            )
             self.assertEqual(crc32, zlib.crc32(f.getvalue()[6:]) & 0xFFFFFFFF)
 
             f.write(b"x")  # unpack_object needs extra trailing data.
@@ -939,7 +949,13 @@ class WritePackTests(TestCase):
         offset = f.tell()
         sha_a = sha1(b"foo")
         sha_b = sha_a.copy()
-        write_pack_object(f.write, Blob.type_num, b"blob", sha=sha_a)
+        write_pack_object(
+            f.write,
+            Blob.type_num,
+            b"blob",
+            sha=sha_a,
+            object_format=DEFAULT_OBJECT_FORMAT,
+        )
         self.assertNotEqual(sha_a.digest(), sha_b.digest())
         sha_b.update(f.getvalue()[offset:])
         self.assertEqual(sha_a.digest(), sha_b.digest())
@@ -951,7 +967,12 @@ class WritePackTests(TestCase):
         sha_a = sha1(b"foo")
         sha_b = sha_a.copy()
         write_pack_object(
-            f.write, Blob.type_num, b"blob", sha=sha_a, compression_level=6
+            f.write,
+            Blob.type_num,
+            b"blob",
+            sha=sha_a,
+            compression_level=6,
+            object_format=DEFAULT_OBJECT_FORMAT,
         )
         self.assertNotEqual(sha_a.digest(), sha_b.digest())
         sha_b.update(f.getvalue()[offset:])
@@ -1050,7 +1071,11 @@ class TestMemoryIndexWriting(TestCase, BaseTestPackIndexWriting):
         self._supports_large = True
 
     def index(self, filename, entries, pack_checksum):
-        return MemoryPackIndex(entries, pack_checksum)
+        from dulwich.object_format import DEFAULT_OBJECT_FORMAT
+
+        return MemoryPackIndex(
+            entries, pack_checksum, object_format=DEFAULT_OBJECT_FORMAT
+        )
 
     def tearDown(self) -> None:
         TestCase.tearDown(self)
@@ -1490,14 +1515,14 @@ class DeltaChainIteratorTests(TestCase):
         if thin is None:
             thin = bool(list(self.store))
         resolve_ext_ref = (thin and self.get_raw_no_repeat) or None
-        data = PackData("test.pack", file=f)
+        data = PackData("test.pack", file=f, object_format=DEFAULT_OBJECT_FORMAT)
         return TestPackIterator.for_pack_data(data, resolve_ext_ref=resolve_ext_ref)
 
     def make_pack_iter_subset(self, f, subset, thin=None):
         if thin is None:
             thin = bool(list(self.store))
         resolve_ext_ref = (thin and self.get_raw_no_repeat) or None
-        data = PackData("test.pack", file=f)
+        data = PackData("test.pack", file=f, object_format=DEFAULT_OBJECT_FORMAT)
         assert data
         index = MemoryPackIndex.for_pack(data)
         pack = Pack.from_objects(data, index)
@@ -1764,7 +1789,7 @@ class DeltaChainIteratorTests(TestCase):
         )
         fsize = f.tell()
         f.seek(0)
-        packdata = PackData.from_file(f, fsize)
+        packdata = PackData.from_file(f, fsize, object_format=DEFAULT_OBJECT_FORMAT)
         td = tempfile.mkdtemp()
         idx_path = os.path.join(td, "test.idx")
         self.addCleanup(shutil.rmtree, td)