2
0
Эх сурвалжийг харах

Remove deprecated functions

Jelmer Vernooij 6 өдөр өмнө
parent
commit
918d8c1df2

+ 0 - 56
dulwich/line_ending.py

@@ -146,10 +146,6 @@ __all__ = [
     "check_safecrlf",
     "convert_crlf_to_lf",
     "convert_lf_to_crlf",
-    "get_checkin_filter",
-    "get_checkin_filter_autocrlf",
-    "get_checkout_filter",
-    "get_checkout_filter_autocrlf",
     "get_clean_filter",
     "get_clean_filter_autocrlf",
     "get_smudge_filter",
@@ -165,7 +161,6 @@ if TYPE_CHECKING:
     from .config import StackedConfig
     from .object_store import BaseObjectStore
 
-from . import replace_me
 from .attrs import GitAttributes, Pattern
 from .filters import FilterBlobNormalizer, FilterContext, FilterDriver, FilterRegistry
 from .object_store import iter_tree_contents
@@ -442,57 +437,6 @@ def get_clean_filter_autocrlf(
     return None
 
 
-# Backwards compatibility wrappers
-@replace_me(since="0.23.1", remove_in="0.25.0")
-def get_checkout_filter(
-    core_eol: str, core_autocrlf: bool | str, git_attributes: Mapping[str, Any]
-) -> Callable[[bytes], bytes] | None:
-    """Deprecated: Use get_smudge_filter instead."""
-    # Convert core_autocrlf to bytes for compatibility
-    if isinstance(core_autocrlf, bool):
-        autocrlf_bytes = b"true" if core_autocrlf else b"false"
-    else:
-        autocrlf_bytes = (
-            core_autocrlf.encode("utf-8")
-            if isinstance(core_autocrlf, str)
-            else core_autocrlf
-        )
-    return get_smudge_filter(core_eol, autocrlf_bytes)
-
-
-@replace_me(since="0.23.1", remove_in="0.25.0")
-def get_checkin_filter(
-    core_eol: str, core_autocrlf: bool | str, git_attributes: Mapping[str, Any]
-) -> Callable[[bytes], bytes] | None:
-    """Deprecated: Use get_clean_filter instead."""
-    # Convert core_autocrlf to bytes for compatibility
-    if isinstance(core_autocrlf, bool):
-        autocrlf_bytes = b"true" if core_autocrlf else b"false"
-    else:
-        autocrlf_bytes = (
-            core_autocrlf.encode("utf-8")
-            if isinstance(core_autocrlf, str)
-            else core_autocrlf
-        )
-    return get_clean_filter(core_eol, autocrlf_bytes)
-
-
-@replace_me(since="0.23.1", remove_in="0.25.0")
-def get_checkout_filter_autocrlf(
-    core_autocrlf: bytes,
-) -> Callable[[bytes], bytes] | None:
-    """Deprecated: Use get_smudge_filter_autocrlf instead."""
-    return get_smudge_filter_autocrlf(core_autocrlf)
-
-
-@replace_me(since="0.23.1", remove_in="0.25.0")
-def get_checkin_filter_autocrlf(
-    core_autocrlf: bytes,
-) -> Callable[[bytes], bytes] | None:
-    """Deprecated: Use get_clean_filter_autocrlf instead."""
-    return get_clean_filter_autocrlf(core_autocrlf)
-
-
 class BlobNormalizer(FilterBlobNormalizer):
     """An object to store computation result of which filter to apply based on configuration, gitattributes, path and operation (checkin or checkout).
 

+ 0 - 39
dulwich/objects.py

@@ -60,7 +60,6 @@ __all__ = [
     "key_entry_name_order",
     "object_class",
     "object_header",
-    "parse_commit",
     "parse_commit_broken",
     "parse_tree",
     "pretty_format_tree_entry",
@@ -95,7 +94,6 @@ else:
 
 from typing import NewType, TypeGuard
 
-from . import replace_me
 from .errors import (
     ChecksumMismatch,
     FileFormatException,
@@ -2217,19 +2215,6 @@ class Commit(ShaFile):
         doc="Parents of this commit, by their SHA1.",
     )
 
-    @replace_me(since="0.21.0", remove_in="0.24.0")
-    def _get_extra(self) -> list[tuple[bytes, bytes]]:
-        """Return extra settings of this commit."""
-        return self._extra
-
-    extra = property(
-        _get_extra,
-        doc="Extra header fields not understood (presumably added in a "
-        "newer version of git). Kept verbatim so the object can "
-        "be correctly reserialized. For private commit metadata, use "
-        "pseudo-headers in Commit.message, rather than this field.",
-    )
-
     author = serializable_property("author", "The name of the author of the commit")
 
     committer = serializable_property(
@@ -2281,30 +2266,6 @@ for cls in OBJECT_CLASSES:
 # Public API functions
 
 
-@replace_me(since="0.21.0", remove_in="0.24.0")
-def parse_commit(
-    chunks: Iterable[bytes],
-) -> tuple[
-    bytes | None,
-    list[bytes],
-    tuple[bytes | None, int | None, tuple[int | None, bool | None]],
-    tuple[bytes | None, int | None, tuple[int | None, bool | None]],
-    bytes | None,
-    list[Tag],
-    bytes | None,
-    bytes | None,
-    list[tuple[bytes, bytes]],
-]:
-    """Parse a commit object from chunks.
-
-    Args:
-      chunks: Chunks to parse
-    Returns: Tuple of (tree, parents, author_info, commit_info,
-        encoding, mergetag, gpgsig, message, extra)
-    """
-    return _parse_commit(chunks)
-
-
 def parse_commit_broken(data: bytes) -> Commit:
     """Parse a commit with broken author/committer lines.
 

+ 3 - 14
dulwich/pack.py

@@ -147,7 +147,6 @@ if TYPE_CHECKING:
 if sys.platform == "Plan9":
     has_mmap = False
 
-from . import replace_me
 from .errors import ApplyDeltaError, ChecksumMismatch
 from .file import GitFile, _GitFile
 from .lru_cache import LRUSizeCache
@@ -688,18 +687,6 @@ class PackIndex:
         """
         raise NotImplementedError(self.get_pack_checksum)
 
-    @replace_me(since="0.21.0", remove_in="0.23.0")
-    def object_index(self, sha: ObjectID | RawObjectID) -> int:
-        """Return the index for the given SHA.
-
-        Args:
-            sha: SHA-1 hash
-
-        Returns:
-            Index position
-        """
-        return self.object_offset(sha)
-
     def object_offset(self, sha: ObjectID | RawObjectID) -> int:
         """Return the offset in to the corresponding packfile for the object.
 
@@ -2200,7 +2187,9 @@ class DeltaChainIterator(Generic[T]):
             elif unpacked.pack_type_num == REF_DELTA:
                 with suppress(KeyError):
                     assert isinstance(unpacked.delta_base, bytes)
-                    base_ofs = pack.index.object_index(RawObjectID(unpacked.delta_base))
+                    base_ofs = pack.index.object_offset(
+                        RawObjectID(unpacked.delta_base)
+                    )
             if base_ofs is not None and base_ofs not in done:
                 todo.add(base_ofs)
         return walker

+ 0 - 22
dulwich/porcelain/__init__.py

@@ -113,7 +113,6 @@ __all__ = [
     "check_ignore",
     "check_mailmap",
     "checkout",
-    "checkout_branch",
     "cherry",
     "cherry_pick",
     "clean",
@@ -306,7 +305,6 @@ if TYPE_CHECKING:
     from ..gc import GCStats
     from ..maintenance import MaintenanceResult
 
-from .. import replace_me
 from ..archive import tar_stream
 from ..bisect import BisectState
 from ..client import (
@@ -5429,26 +5427,6 @@ def reset_file(
     build_file_from_blob(blob, mode, full_path, symlink_fn=symlink_fn)
 
 
-@replace_me(since="0.22.9", remove_in="0.24.0")
-def checkout_branch(
-    repo: str | os.PathLike[str] | Repo,
-    target: bytes | str,
-    force: bool = False,
-) -> None:
-    """Switch branches or restore working tree files.
-
-    This is now a wrapper around the general checkout() function.
-    Preserved for backward compatibility.
-
-    Args:
-      repo: dulwich Repo object
-      target: branch name or commit sha to checkout
-      force: true or not to force checkout
-    """
-    # Simply delegate to the new checkout function
-    return checkout(repo, target, force=force)
-
-
 def sparse_checkout(
     repo: str | os.PathLike[str] | Repo,
     patterns: list[str] | None = None,

+ 1 - 144
dulwich/repo.py

@@ -90,7 +90,7 @@ if TYPE_CHECKING:
     from .walk import Walker
     from .worktree import WorkTree
 
-from . import reflog, replace_me
+from . import reflog
 from .errors import (
     NoIndexPresent,
     NotBlobError,
@@ -1320,70 +1320,6 @@ class BaseRepo:
             "Working tree operations not supported by this repository type"
         )
 
-    @replace_me(remove_in="0.26.0")
-    def do_commit(
-        self,
-        message: bytes | None = None,
-        committer: bytes | None = None,
-        author: bytes | None = None,
-        commit_timestamp: float | None = None,
-        commit_timezone: int | None = None,
-        author_timestamp: float | None = None,
-        author_timezone: int | None = None,
-        tree: ObjectID | None = None,
-        encoding: bytes | None = None,
-        ref: Ref | None = HEADREF,
-        merge_heads: list[ObjectID] | None = None,
-        no_verify: bool = False,
-        sign: bool = False,
-    ) -> bytes:
-        """Create a new commit.
-
-        If not specified, committer and author default to
-        get_user_identity(..., 'COMMITTER')
-        and get_user_identity(..., 'AUTHOR') respectively.
-
-        Args:
-          message: Commit message (bytes or callable that takes (repo, commit)
-            and returns bytes)
-          committer: Committer fullname
-          author: Author fullname
-          commit_timestamp: Commit timestamp (defaults to now)
-          commit_timezone: Commit timestamp timezone (defaults to GMT)
-          author_timestamp: Author timestamp (defaults to commit
-            timestamp)
-          author_timezone: Author timestamp timezone
-            (defaults to commit timestamp timezone)
-          tree: SHA1 of the tree root to use (if not specified the
-            current index will be committed).
-          encoding: Encoding
-          ref: Optional ref to commit to (defaults to current branch).
-            If None, creates a dangling commit without updating any ref.
-          merge_heads: Merge heads (defaults to .git/MERGE_HEAD)
-          no_verify: Skip pre-commit and commit-msg hooks
-          sign: GPG Sign the commit (bool, defaults to False,
-            pass True to use default GPG key,
-            pass a str containing Key ID to use a specific GPG key)
-
-        Returns:
-          New commit SHA1
-        """
-        return self.get_worktree().commit(
-            message=message,
-            committer=committer,
-            author=author,
-            commit_timestamp=commit_timestamp,
-            commit_timezone=commit_timezone,
-            author_timestamp=author_timestamp,
-            author_timezone=author_timezone,
-            tree=tree,
-            encoding=encoding,
-            ref=ref,
-            merge_heads=merge_heads,
-            no_verify=no_verify,
-            sign=sign,
-        )
-
 
 def read_gitfile(f: BinaryIO) -> str:
     """Read a ``.git`` file.
@@ -1876,31 +1812,6 @@ class Repo(BaseRepo):
         # missing index file, which is treated as empty.
         return not self.bare
 
-    @replace_me(remove_in="0.26.0")
-    def stage(
-        self,
-        fs_paths: str
-        | bytes
-        | os.PathLike[str]
-        | Iterable[str | bytes | os.PathLike[str]],
-    ) -> None:
-        """Stage a set of paths.
-
-        Args:
-          fs_paths: List of paths, relative to the repository path
-        """
-        return self.get_worktree().stage(fs_paths)
-
-    @replace_me(remove_in="0.26.0")
-    def unstage(self, fs_paths: Sequence[str]) -> None:
-        """Unstage specific file in the index.
-
-        Args:
-          fs_paths: a list of files to unstage,
-            relative to the repository path.
-        """
-        return self.get_worktree().unstage(fs_paths)
-
     def clone(
         self,
         target_path: str | bytes | os.PathLike[str],
@@ -1998,15 +1909,6 @@ class Repo(BaseRepo):
             raise
         return target
 
-    @replace_me(remove_in="0.26.0")
-    def reset_index(self, tree: ObjectID | None = None) -> None:
-        """Reset the index back to a specific tree.
-
-        Args:
-          tree: Tree SHA to reset to, None for current HEAD tree.
-        """
-        return self.get_worktree().reset_index(tree)
-
     def _get_config_condition_matchers(self) -> dict[str, "ConditionMatcher"]:
         """Get condition matchers for includeIf conditions.
 
@@ -2531,51 +2433,6 @@ class Repo(BaseRepo):
 
         return GitAttributes(patterns)
 
-    @replace_me(remove_in="0.26.0")
-    def _sparse_checkout_file_path(self) -> str:
-        """Return the path of the sparse-checkout file in this repo's control dir."""
-        return self.get_worktree()._sparse_checkout_file_path()
-
-    @replace_me(remove_in="0.26.0")
-    def configure_for_cone_mode(self) -> None:
-        """Ensure the repository is configured for cone-mode sparse-checkout."""
-        return self.get_worktree().configure_for_cone_mode()
-
-    @replace_me(remove_in="0.26.0")
-    def infer_cone_mode(self) -> bool:
-        """Return True if 'core.sparseCheckoutCone' is set to 'true' in config, else False."""
-        return self.get_worktree().infer_cone_mode()
-
-    @replace_me(remove_in="0.26.0")
-    def get_sparse_checkout_patterns(self) -> list[str]:
-        """Return a list of sparse-checkout patterns from info/sparse-checkout.
-
-        Returns:
-            A list of patterns. Returns an empty list if the file is missing.
-        """
-        return self.get_worktree().get_sparse_checkout_patterns()
-
-    @replace_me(remove_in="0.26.0")
-    def set_sparse_checkout_patterns(self, patterns: Sequence[str]) -> None:
-        """Write the given sparse-checkout patterns into info/sparse-checkout.
-
-        Creates the info/ directory if it does not exist.
-
-        Args:
-            patterns: A list of gitignore-style patterns to store.
-        """
-        return self.get_worktree().set_sparse_checkout_patterns(patterns)
-
-    @replace_me(remove_in="0.26.0")
-    def set_cone_mode_patterns(self, dirs: Sequence[str] | None = None) -> None:
-        """Write the given cone-mode directory patterns into info/sparse-checkout.
-
-        For each directory to include, add an inclusion line that "undoes" the prior
-        ``!/*/`` 'exclude' that re-includes that directory and everything under it.
-        Never add the same line twice.
-        """
-        return self.get_worktree().set_cone_mode_patterns(dirs)
-
 
 class MemoryRepo(BaseRepo):
     """Repo that stores refs, objects, and named files in memory.

+ 3 - 4
tests/test_repository.py

@@ -1682,10 +1682,9 @@ class BuildRepoRootTests(TestCase):
     def test_stage_absolute(self) -> None:
         r = self._repo
         os.remove(os.path.join(r.path, "a"))
-        # Suppress deprecation warning since we're intentionally testing the deprecated method
-        with warnings.catch_warnings():
-            warnings.simplefilter("ignore", DeprecationWarning)
-            self.assertRaises(ValueError, r.stage, [os.path.join(r.path, "a")])
+        self.assertRaises(
+            ValueError, r.get_worktree().stage, [os.path.join(r.path, "a")]
+        )
 
     def test_stage_deleted(self) -> None:
         r = self._repo

+ 0 - 50
tests/test_worktree.py

@@ -390,56 +390,6 @@ class WorkTreeSparseCheckoutTests(WorkTreeTestCase):
 class WorkTreeBackwardCompatibilityTests(WorkTreeTestCase):
     """Tests for backward compatibility of deprecated Repo methods."""
 
-    def test_deprecated_stage_delegates_to_worktree(self):
-        """Test that deprecated Repo.stage delegates to WorkTree."""
-        with open(os.path.join(self.repo.path, "new_file"), "w") as f:
-            f.write("test content")
-
-        # This should show a deprecation warning but still work
-        import warnings
-
-        with warnings.catch_warnings(record=True) as w:
-            warnings.simplefilter("always")
-            self.repo.stage(
-                ["new_file"]
-            )  # Call deprecated method on Repo, not WorkTree
-            self.assertTrue(len(w) > 0)
-            self.assertTrue(issubclass(w[0].category, DeprecationWarning))
-
-    def test_deprecated_unstage_delegates_to_worktree(self):
-        """Test that deprecated Repo.unstage delegates to WorkTree."""
-        # This should show a deprecation warning but still work
-        import warnings
-
-        with warnings.catch_warnings(record=True) as w:
-            warnings.simplefilter("always")
-            self.repo.unstage(["a"])  # Call deprecated method on Repo, not WorkTree
-            self.assertTrue(len(w) > 0)
-            self.assertTrue(issubclass(w[0].category, DeprecationWarning))
-
-    def test_deprecated_sparse_checkout_methods(self):
-        """Test that deprecated sparse checkout methods delegate to WorkTree."""
-        import warnings
-
-        # Test get_sparse_checkout_patterns
-        with warnings.catch_warnings(record=True) as w:
-            warnings.simplefilter("always")
-            patterns = (
-                self.repo.get_sparse_checkout_patterns()
-            )  # Call deprecated method on Repo
-            self.assertEqual([], patterns)
-            self.assertTrue(len(w) > 0)
-            self.assertTrue(issubclass(w[0].category, DeprecationWarning))
-
-        # Test set_sparse_checkout_patterns
-        with warnings.catch_warnings(record=True) as w:
-            warnings.simplefilter("always")
-            self.repo.set_sparse_checkout_patterns(
-                ["*.py"]
-            )  # Call deprecated method on Repo
-            self.assertTrue(len(w) > 0)
-            self.assertTrue(issubclass(w[0].category, DeprecationWarning))
-
     def test_pre_commit_hook_fail(self):
         """Test that failing pre-commit hook raises CommitError."""
         if os.name != "posix":