Jelmer Vernooij 1 kuukausi sitten
vanhempi
commit
2d2b125a8b

+ 11 - 9
dulwich/cli.py

@@ -311,7 +311,7 @@ class cmd_dump_pack(Command):
         basename, _ = os.path.splitext(args.filename)
         x = Pack(basename)
         print(f"Object names checksum: {x.name()}")
-        print(f"Checksum: {sha_to_hex(x.get_stored_checksum())}")
+        print(f"Checksum: {sha_to_hex(x.get_stored_checksum())!r}")
         x.check()
         print(f"Length: {len(x)}")
         for name in x:
@@ -872,7 +872,7 @@ class cmd_check_mailmap(Command):
 
 
 class cmd_branch(Command):
-    def run(self, args) -> None:
+    def run(self, args) -> Optional[int]:
         parser = argparse.ArgumentParser()
         parser.add_argument(
             "branch",
@@ -888,7 +888,7 @@ class cmd_branch(Command):
         args = parser.parse_args(args)
         if not args.branch:
             print("Usage: dulwich branch [-d] BRANCH_NAME")
-            sys.exit(1)
+            return 1
 
         if args.delete:
             porcelain.branch_delete(".", name=args.branch)
@@ -897,11 +897,12 @@ class cmd_branch(Command):
                 porcelain.branch_create(".", name=args.branch)
             except porcelain.Error as e:
                 sys.stderr.write(f"{e}")
-                sys.exit(1)
+                return 1
+        return 0
 
 
 class cmd_checkout(Command):
-    def run(self, args) -> None:
+    def run(self, args) -> Optional[int]:
         parser = argparse.ArgumentParser()
         parser.add_argument(
             "target",
@@ -923,7 +924,7 @@ class cmd_checkout(Command):
         args = parser.parse_args(args)
         if not args.target:
             print("Usage: dulwich checkout TARGET [--force] [-b NEW_BRANCH]")
-            sys.exit(1)
+            return 1
 
         try:
             porcelain.checkout(
@@ -931,7 +932,8 @@ class cmd_checkout(Command):
             )
         except porcelain.CheckoutError as e:
             sys.stderr.write(f"{e}\n")
-            sys.exit(1)
+            return 1
+        return 0
 
 
 class cmd_stash_list(Command):
@@ -1019,7 +1021,7 @@ class cmd_merge(Command):
                 print(
                     f"Merge successful. Created merge commit {merge_commit_id.decode()}"
                 )
-            return None
+            return 0
         except porcelain.Error as e:
             print(f"Error: {e}")
             return 1
@@ -1503,7 +1505,7 @@ commands = {
 }
 
 
-def main(argv=None):
+def main(argv=None) -> Optional[int]:
     if argv is None:
         argv = sys.argv[1:]
 

+ 4 - 4
dulwich/client.py

@@ -1533,7 +1533,7 @@ class TraditionalGitClient(GitClient):
                 return
             elif pkt == b"ACK\n" or pkt == b"ACK":
                 pass
-            elif pkt.startswith(b"ERR "):
+            elif pkt and pkt.startswith(b"ERR "):
                 raise GitProtocolError(pkt[4:].rstrip(b"\n").decode("utf-8", "replace"))
             else:
                 raise AssertionError(f"invalid response {pkt!r}")
@@ -2489,7 +2489,7 @@ class AbstractHttpGitClient(GitClient):
                     proto = Protocol(read, None)
                     return server_capabilities, resp, read, proto
 
-                proto = Protocol(read, None)
+                proto = Protocol(read, None)  # type: ignore
                 server_protocol_version = negotiate_protocol_version(proto)
                 if server_protocol_version not in GIT_PROTOCOL_VERSIONS:
                     raise ValueError(
@@ -2702,7 +2702,7 @@ class AbstractHttpGitClient(GitClient):
         if self.dumb:
             raise NotImplementedError(self.fetch_pack)
         req_data = BytesIO()
-        req_proto = Protocol(None, req_data.write)
+        req_proto = Protocol(None, req_data.write)  # type: ignore
         (new_shallow, new_unshallow) = _handle_upload_pack_head(
             req_proto,
             negotiated_capabilities,
@@ -2732,7 +2732,7 @@ class AbstractHttpGitClient(GitClient):
             data = req_data.getvalue()
         resp, read = self._smart_request("git-upload-pack", url, data)
         try:
-            resp_proto = Protocol(read, None)
+            resp_proto = Protocol(read, None)  # type: ignore
             if new_shallow is None and new_unshallow is None:
                 (new_shallow, new_unshallow) = _read_shallow_updates(
                     resp_proto.read_pkt_seq()

+ 12 - 10
dulwich/contrib/swift.py

@@ -39,7 +39,7 @@ import zlib
 from collections.abc import Iterator
 from configparser import ConfigParser
 from io import BytesIO
-from typing import BinaryIO, Callable, Optional, cast
+from typing import BinaryIO, Callable, Optional, Union, cast
 
 from geventhttpclient import HTTPClient
 
@@ -97,7 +97,7 @@ cache_length = 20
 
 
 class PackInfoMissingObjectFinder(GreenThreadsMissingObjectFinder):
-    def next(self) -> Optional[tuple[bytes, int, bytes | None]]:
+    def next(self) -> Optional[tuple[bytes, int, Union[bytes, None]]]:
         while True:
             if not self.objects_to_send:
                 return None
@@ -440,7 +440,7 @@ class SwiftConnector:
 
     def get_object(
         self, name: str, range: Optional[str] = None
-    ) -> Optional[bytes | BytesIO]:
+    ) -> Optional[Union[bytes, BytesIO]]:
         """Retrieve an object.
 
         Args:
@@ -590,7 +590,7 @@ class SwiftPackData(PackData):
     using the Range header feature of Swift.
     """
 
-    def __init__(self, scon: SwiftConnector, filename: str) -> None:
+    def __init__(self, scon: SwiftConnector, filename: Union[str, os.PathLike]) -> None:
         """Initialize a SwiftPackReader.
 
         Args:
@@ -600,11 +600,11 @@ class SwiftPackData(PackData):
         self.scon = scon
         self._filename = filename
         self._header_size = 12
-        headers = self.scon.get_object_stat(self._filename)
+        headers = self.scon.get_object_stat(str(self._filename))
         if headers is None:
             raise Exception(f"Could not get stats for {self._filename}")
         self.pack_length = int(headers["content-length"])
-        pack_reader = SwiftPackReader(self.scon, self._filename, self.pack_length)
+        pack_reader = SwiftPackReader(self.scon, str(self._filename), self.pack_length)
         (version, self._num_objects) = read_pack_header(pack_reader.read)
         self._offset_cache = LRUSizeCache(
             1024 * 1024 * self.scon.cache_length,
@@ -614,18 +614,18 @@ class SwiftPackData(PackData):
 
     def get_object_at(
         self, offset: int
-    ) -> tuple[int, tuple[bytes | int, list[bytes]] | list[bytes]]:
+    ) -> tuple[int, Union[tuple[Union[bytes, int], list[bytes]], list[bytes]]]:
         if offset in self._offset_cache:
             return self._offset_cache[offset]
         assert offset >= self._header_size
-        pack_reader = SwiftPackReader(self.scon, self._filename, self.pack_length)
+        pack_reader = SwiftPackReader(self.scon, str(self._filename), self.pack_length)
         pack_reader.seek(offset)
         unpacked, _ = unpack_object(pack_reader.read)
         obj_data = unpacked._obj()
         return (unpacked.pack_type_num, obj_data)
 
     def get_stored_checksum(self) -> bytes:
-        pack_reader = SwiftPackReader(self.scon, self._filename, self.pack_length)
+        pack_reader = SwiftPackReader(self.scon, str(self._filename), self.pack_length)
         return pack_reader.read_checksum()
 
     def close(self) -> None:
@@ -881,7 +881,9 @@ class SwiftInfoRefsContainer(InfoRefsContainer):
             f = BytesIO(f)
         super().__init__(f)
 
-    def _load_check_ref(self, name: bytes, old_ref: Optional[bytes]) -> dict | bool:
+    def _load_check_ref(
+        self, name: bytes, old_ref: Optional[bytes]
+    ) -> Union[dict, bool]:
         self._check_refname(name)
         obj = self.scon.get_object(self.filename)
         if not obj:

+ 1 - 1
dulwich/fastexport.py

@@ -275,7 +275,7 @@ class GitImportProcessor(processor.ImportProcessor):
         tag = Tag()
         tag.tagger = cmd.tagger
         tag.message = cmd.message
-        tag.name = cmd.tag
+        tag.name = cmd.from_
         self.repo.object_store.add_object(tag)
         self.repo.refs["refs/tags/" + tag.name] = tag.id
 

+ 1 - 1
dulwich/ignore.py

@@ -437,7 +437,7 @@ class IgnoreFilter:
         cls, path: Union[str, os.PathLike], ignorecase: bool = False
     ) -> "IgnoreFilter":
         with open(path, "rb") as f:
-            return cls(read_ignore_patterns(f), ignorecase, path=path)
+            return cls(read_ignore_patterns(f), ignorecase, path=str(path))
 
     def __repr__(self) -> str:
         path = getattr(self, "_path", None)

+ 8 - 4
dulwich/index.py

@@ -1167,7 +1167,9 @@ if sys.platform == "win32":
                 src, dst, target_is_directory=target_is_directory, dir_fd=dir_fd
             )
         except PermissionError as e:
-            raise WindowsSymlinkPermissionError(e.errno, e.strerror, e.filename) from e
+            raise WindowsSymlinkPermissionError(
+                e.errno or 0, e.strerror or "", e.filename
+            ) from e
 else:
     symlink = os.symlink
 
@@ -1202,9 +1204,11 @@ def build_file_from_blob(
             os.unlink(target_path)
         if sys.platform == "win32":
             # os.readlink on Python3 on Windows requires a unicode string.
-            contents = contents.decode(tree_encoding)
-            target_path = target_path.decode(tree_encoding)
-        (symlink_fn or symlink)(contents, target_path)
+            contents_str = contents.decode(tree_encoding)
+            target_path_str = target_path.decode(tree_encoding)
+            (symlink_fn or symlink)(contents_str, target_path_str)
+        else:
+            (symlink_fn or symlink)(contents, target_path)
     else:
         if oldstat is not None and oldstat.st_size == len(contents):
             with open(target_path, "rb") as f:

+ 6 - 0
dulwich/merge.py

@@ -371,6 +371,9 @@ class Merger:
                                 f"Expected blob for {path!r}, got {theirs_obj.type_name.decode()}"
                             )
 
+                    assert isinstance(base_blob, Blob)
+                    assert isinstance(ours_blob, Blob)
+                    assert isinstance(theirs_blob, Blob)
                     merged_content, had_conflict = self.merge_blobs(
                         base_blob, ours_blob, theirs_blob
                     )
@@ -431,4 +434,7 @@ def three_way_merge(
     else:
         raise TypeError(f"Expected tree, got {theirs_obj.type_name.decode()}")
 
+    assert isinstance(base_tree, Tree)
+    assert isinstance(ours_tree, Tree)
+    assert isinstance(theirs_tree, Tree)
     return merger.merge_trees(base_tree, ours_tree, theirs_tree)

+ 5 - 1
dulwich/objects.py

@@ -35,10 +35,14 @@ from typing import (
     IO,
     TYPE_CHECKING,
     Optional,
-    TypeGuard,
     Union,
 )
 
+try:
+    from typing import TypeGuard  # type: ignore
+except ImportError:
+    from typing_extensions import TypeGuard
+
 from . import replace_me
 from .errors import (
     ChecksumMismatch,

+ 1 - 1
dulwich/pack.py

@@ -2004,7 +2004,7 @@ def find_reusable_deltas(
         if progress is not None and i % 1000 == 0:
             progress(f"checking for reusable deltas: {i}/{len(object_ids)}\r".encode())
         if unpacked.pack_type_num == REF_DELTA:
-            hexsha = sha_to_hex(unpacked.delta_base)
+            hexsha = sha_to_hex(unpacked.delta_base)  # type: ignore
             if hexsha in object_ids or hexsha in other_haves:
                 yield unpacked
                 reused += 1

+ 8 - 6
dulwich/porcelain.py

@@ -684,17 +684,19 @@ def add(repo: Union[str, os.PathLike, BaseRepo] = ".", paths=None):
                 # Also add unstaged (modified) files within this directory
                 for unstaged_path in all_unstaged_paths:
                     if isinstance(unstaged_path, bytes):
-                        unstaged_path = unstaged_path.decode("utf-8")
+                        unstaged_path_str = unstaged_path.decode("utf-8")
+                    else:
+                        unstaged_path_str = unstaged_path
 
                     # Check if this unstaged file is within the directory we're processing
-                    unstaged_full_path = repo_path / unstaged_path
+                    unstaged_full_path = repo_path / unstaged_path_str
                     try:
                         unstaged_full_path.relative_to(resolved_path)
                         # File is within this directory, add it
-                        if not ignore_manager.is_ignored(unstaged_path):
-                            relpaths.append(unstaged_path)
+                        if not ignore_manager.is_ignored(unstaged_path_str):
+                            relpaths.append(unstaged_path_str)
                         else:
-                            ignored.add(unstaged_path)
+                            ignored.add(unstaged_path_str)
                     except ValueError:
                         # File is not within this directory, skip it
                         continue
@@ -1197,7 +1199,7 @@ def tag_create(
             if tag_timezone is None:
                 tag_timezone = get_user_timezones()[1]
             elif isinstance(tag_timezone, str):
-                tag_timezone = parse_timezone(tag_timezone)
+                tag_timezone = parse_timezone(tag_timezone.encode())
             tag_obj.tag_timezone = tag_timezone
             if sign:
                 tag_obj.sign(sign if isinstance(sign, str) else None)

+ 4 - 2
dulwich/rebase.py

@@ -262,7 +262,7 @@ class Rebaser:
 
         # Initialize state
         self._original_head: Optional[bytes] = None
-        self._onto = None
+        self._onto: Optional[bytes] = None
         self._todo: list[Commit] = []
         self._done: list[Commit] = []
         self._rebasing_branch: Optional[bytes] = None
@@ -328,7 +328,7 @@ class Rebaser:
         """
         # Get the parent of the commit being cherry-picked
         if not commit.parents:
-            raise RebaseError(f"Cannot cherry-pick root commit {commit.id}")
+            raise RebaseError(f"Cannot cherry-pick root commit {commit.id!r}")
 
         parent = self.repo[commit.parents[0]]
         onto_commit = self.repo[onto]
@@ -431,6 +431,8 @@ class Rebaser:
         if self._done:
             onto = self._done[-1].id
         else:
+            if self._onto is None:
+                raise RebaseError("No onto commit set")
             onto = self._onto
 
         # Cherry-pick the commit

+ 25 - 20
dulwich/server.py

@@ -52,9 +52,12 @@ import time
 import zlib
 from collections.abc import Iterable, Iterator
 from functools import partial
-from typing import Optional, cast
+from typing import TYPE_CHECKING, Optional, cast
 from typing import Protocol as TypingProtocol
 
+if TYPE_CHECKING:
+    from .object_store import BaseObjectStore
+
 from dulwich import log_utils
 
 from .archive import tar_stream
@@ -68,7 +71,7 @@ from .errors import (
     UnexpectedCommandError,
 )
 from .object_store import find_shallow
-from .objects import Commit, ObjectID, valid_hexsha
+from .objects import Commit, ObjectID, Tree, valid_hexsha
 from .pack import ObjectContainer, PackedObjectContainer, write_pack_from_container
 from .protocol import (
     CAPABILITIES_REF,
@@ -113,7 +116,7 @@ from .protocol import (
     format_unshallow_line,
     symref_capabilities,
 )
-from .refs import PEELED_TAG_SUFFIX, RefsContainer, write_info_refs
+from .refs import PEELED_TAG_SUFFIX, Ref, RefsContainer, write_info_refs
 from .repo import Repo
 
 logger = log_utils.getLogger(__name__)
@@ -925,8 +928,8 @@ class ReceivePackHandler(PackHandler):
         ]
 
     def _apply_pack(
-        self, refs: list[tuple[bytes, bytes, bytes]]
-    ) -> list[tuple[bytes, bytes]]:
+        self, refs: list[tuple[ObjectID, ObjectID, Ref]]
+    ) -> Iterator[tuple[bytes, bytes]]:
         all_exceptions = (
             IOError,
             OSError,
@@ -937,7 +940,6 @@ class ReceivePackHandler(PackHandler):
             zlib.error,
             ObjectFormatException,
         )
-        status = []
         will_send_pack = False
 
         for command in refs:
@@ -950,15 +952,15 @@ class ReceivePackHandler(PackHandler):
             try:
                 recv = getattr(self.proto, "recv", None)
                 self.repo.object_store.add_thin_pack(self.proto.read, recv)
-                status.append((b"unpack", b"ok"))
+                yield (b"unpack", b"ok")
             except all_exceptions as e:
-                status.append((b"unpack", str(e).replace("\n", "").encode("utf-8")))
+                yield (b"unpack", str(e).replace("\n", "").encode("utf-8"))
                 # The pack may still have been moved in, but it may contain
                 # broken objects. We trust a later GC to clean it up.
         else:
             # The git protocol want to find a status entry related to unpack
             # process even if no pack data has been sent.
-            status.append((b"unpack", b"ok"))
+            yield (b"unpack", b"ok")
 
         for oldsha, sha, ref in refs:
             ref_status = b"ok"
@@ -979,9 +981,7 @@ class ReceivePackHandler(PackHandler):
                         ref_status = b"failed to write"
             except KeyError:
                 ref_status = b"bad ref"
-            status.append((ref, ref_status))
-
-        return status
+            yield (ref, ref_status)
 
     def _report_status(self, status: list[tuple[bytes, bytes]]) -> None:
         if self.has_capability(CAPABILITY_SIDE_BAND_64K):
@@ -1007,7 +1007,7 @@ class ReceivePackHandler(PackHandler):
                 write(b"ok " + name + b"\n")
             else:
                 write(b"ng " + name + b" " + msg + b"\n")
-        write(None)
+        write(None)  # type: ignore
         flush()
 
     def _on_post_receive(self, client_refs) -> None:
@@ -1033,7 +1033,7 @@ class ReceivePackHandler(PackHandler):
                 format_ref_line(
                     refs[0][0],
                     refs[0][1],
-                    self.capabilities() + symref_capabilities(symrefs),
+                    list(self.capabilities()) + symref_capabilities(symrefs),
                 )
             )
             for i in range(1, len(refs)):
@@ -1056,11 +1056,12 @@ class ReceivePackHandler(PackHandler):
 
         # client will now send us a list of (oldsha, newsha, ref)
         while ref:
-            client_refs.append(ref.split())
+            (oldsha, newsha, ref) = ref.split()
+            client_refs.append((oldsha, newsha, ref))
             ref = self.proto.read_pkt_line()
 
         # backend can now deal with this refs and read a pack using self.read
-        status = self._apply_pack(client_refs)
+        status = list(self._apply_pack(client_refs))
 
         self._on_post_receive(client_refs)
 
@@ -1088,7 +1089,7 @@ class UploadArchiveHandler(Handler):
         prefix = b""
         format = "tar"
         i = 0
-        store: ObjectContainer = self.repo.object_store
+        store: BaseObjectStore = self.repo.object_store
         while i < len(arguments):
             argument = arguments[i]
             if argument == b"--prefix":
@@ -1099,12 +1100,16 @@ class UploadArchiveHandler(Handler):
                 format = arguments[i].decode("ascii")
             else:
                 commit_sha = self.repo.refs[argument]
-                tree = store[cast(Commit, store[commit_sha]).tree]
+                tree = cast(Tree, store[cast(Commit, store[commit_sha]).tree])
             i += 1
         self.proto.write_pkt_line(b"ACK")
         self.proto.write_pkt_line(None)
         for chunk in tar_stream(
-            store, tree, mtime=time.time(), prefix=prefix, format=format
+            store,
+            tree,
+            mtime=int(time.time()),
+            prefix=prefix,
+            format=format,  # type: ignore
         ):
             write(chunk)
         self.proto.write_pkt_line(None)
@@ -1130,7 +1135,7 @@ class TCPGitRequestHandler(socketserver.StreamRequestHandler):
 
         cls = self.handlers.get(command, None)
         if not callable(cls):
-            raise GitProtocolError(f"Invalid service {command}")
+            raise GitProtocolError(f"Invalid service {command!r}")
         h = cls(self.server.backend, args, proto)  # type: ignore
         h.handle()
 

+ 5 - 5
dulwich/sparse_patterns.py

@@ -183,11 +183,8 @@ def apply_included_paths(
     def local_modifications_exist(full_path: str, index_entry: IndexEntry) -> bool:
         if not os.path.exists(full_path):
             return False
-        try:
-            with open(full_path, "rb") as f:
-                disk_data = f.read()
-        except OSError:
-            return True
+        with open(full_path, "rb") as f:
+            disk_data = f.read()
         try:
             blob_obj = repo_obj.object_store[index_entry.sha]
         except KeyError:
@@ -234,6 +231,9 @@ def apply_included_paths(
                     pass
                 except FileNotFoundError:
                     pass
+                except PermissionError:
+                    if not force:
+                        raise
         else:
             # Included => materialize if missing
             if not os.path.exists(full_path):

+ 1 - 0
pyproject.toml

@@ -25,6 +25,7 @@ classifiers = [
 requires-python = ">=3.9"
 dependencies = [
     "urllib3>=1.25",
+    'typing_extensions >=4.0 ; python_version < "3.10"',
 ]
 dynamic = ["version"]
 license-files = ["COPYING"]

+ 10 - 9
tests/test_cli.py

@@ -54,27 +54,28 @@ class DulwichCliTestCase(TestCase):
 
     def _run_cli(self, *args, stdout_stream=None):
         """Run CLI command and capture output."""
+
         class MockStream:
             def __init__(self):
                 self._buffer = io.BytesIO()
                 self.buffer = self._buffer
-                
+
             def write(self, data):
                 if isinstance(data, bytes):
                     self._buffer.write(data)
                 else:
-                    self._buffer.write(data.encode('utf-8'))
-                    
+                    self._buffer.write(data.encode("utf-8"))
+
             def getvalue(self):
                 value = self._buffer.getvalue()
                 try:
-                    return value.decode('utf-8')
+                    return value.decode("utf-8")
                 except UnicodeDecodeError:
                     return value
-                    
+
             def __getattr__(self, name):
                 return getattr(self._buffer, name)
-        
+
         old_stdout = sys.stdout
         old_stderr = sys.stderr
         old_cwd = os.getcwd()
@@ -82,13 +83,13 @@ class DulwichCliTestCase(TestCase):
             # Use custom stdout_stream if provided, otherwise use MockStream
             if stdout_stream:
                 sys.stdout = stdout_stream
-                if not hasattr(sys.stdout, 'buffer'):
+                if not hasattr(sys.stdout, "buffer"):
                     sys.stdout.buffer = sys.stdout
             else:
                 sys.stdout = MockStream()
-                
+
             sys.stderr = MockStream()
-            
+
             os.chdir(self.repo_path)
             result = cli.main(list(args))
             return result, sys.stdout.getvalue(), sys.stderr.getvalue()

+ 2 - 2
tests/test_cli_merge.py

@@ -109,8 +109,8 @@ class CLIMergeTests(TestCase):
             try:
                 os.chdir(tmpdir)
                 with patch("sys.stdout", new_callable=io.StringIO) as mock_stdout:
-                    exit_code = main(["merge", "feature"])
-                    self.assertEqual(1, exit_code)
+                    retcode = main(["merge", "feature"])
+                    self.assertEqual(retcode, 1)
                     output = mock_stdout.getvalue()
 
                 self.assertIn("Merge conflicts", output)

+ 1 - 1
tests/test_server.py

@@ -353,7 +353,7 @@ class ReceivePackHandlerTestCase(TestCase):
             [ONE, ZERO_SHA, b"refs/heads/fake-branch"],
         ]
         self._handler.set_client_capabilities([b"delete-refs"])
-        status = self._handler._apply_pack(update_refs)
+        status = list(self._handler._apply_pack(update_refs))
         self.assertEqual(status[0][0], b"unpack")
         self.assertEqual(status[0][1], b"ok")
         self.assertEqual(status[1][0], b"refs/heads/fake-branch")

+ 11 - 4
tests/test_sparse_patterns.py

@@ -500,11 +500,18 @@ class ApplyIncludedPathsTests(TestCase):
         self.assertTrue(idx[b"test_file.txt"].skip_worktree)
 
     def test_local_modifications_ioerror(self):
-        """Test handling of IOError when checking for local modifications."""
+        """Test handling of PermissionError/OSError when checking for local modifications."""
+        import sys
+
         self._commit_blob("special_file.txt", b"content")
         file_path = os.path.join(self.temp_dir, "special_file.txt")
 
-        # Make the file unreadable
+        # On Windows, chmod with 0 doesn't make files unreadable the same way
+        # Skip this test on Windows as the permission model is different
+        if sys.platform == "win32":
+            self.skipTest("File permissions work differently on Windows")
+
+        # Make the file unreadable on Unix-like systems
         os.chmod(file_path, 0)
 
         # Add a cleanup that checks if file exists first
@@ -517,8 +524,8 @@ class ApplyIncludedPathsTests(TestCase):
 
         self.addCleanup(safe_chmod_cleanup)
 
-        # Should raise conflict error with unreadable file and force=False
-        with self.assertRaises(SparseCheckoutConflictError):
+        # Should raise PermissionError with unreadable file and force=False
+        with self.assertRaises((PermissionError, OSError)):
             apply_included_paths(self.repo, included_paths=set(), force=False)
 
         # With force=True, should remove the file anyway