elasticsearch7.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114
  1. from copy import deepcopy
  2. from elasticsearch import NotFoundError
  3. from elasticsearch.helpers import bulk
  4. from wagtail.search.backends.elasticsearch5 import (
  5. ElasticsearchAutocompleteQueryCompilerImpl,
  6. )
  7. from wagtail.search.backends.elasticsearch6 import (
  8. Elasticsearch6Index,
  9. Elasticsearch6Mapping,
  10. Elasticsearch6SearchBackend,
  11. Elasticsearch6SearchQueryCompiler,
  12. Elasticsearch6SearchResults,
  13. )
  14. from wagtail.search.index import class_is_indexed
  15. class Elasticsearch7Mapping(Elasticsearch6Mapping):
  16. def get_mapping(self):
  17. mapping = super().get_mapping()
  18. return mapping[self.get_document_type()]
  19. class Elasticsearch7Index(Elasticsearch6Index):
  20. def add_model(self, model):
  21. # Get mapping
  22. mapping = self.mapping_class(model)
  23. # Put mapping
  24. self.es.indices.put_mapping(index=self.name, body=mapping.get_mapping())
  25. def add_item(self, item):
  26. # Make sure the object can be indexed
  27. if not class_is_indexed(item.__class__):
  28. return
  29. # Get mapping
  30. mapping = self.mapping_class(item.__class__)
  31. # Add document to index
  32. self.es.index(
  33. self.name, mapping.get_document(item), id=mapping.get_document_id(item)
  34. )
  35. def add_items(self, model, items):
  36. if not class_is_indexed(model):
  37. return
  38. # Get mapping
  39. mapping = self.mapping_class(model)
  40. # Create list of actions
  41. actions = []
  42. for item in items:
  43. # Create the action
  44. action = {"_id": mapping.get_document_id(item)}
  45. action.update(mapping.get_document(item))
  46. actions.append(action)
  47. # Run the actions
  48. bulk(self.es, actions, index=self.name)
  49. def delete_item(self, item):
  50. # Make sure the object can be indexed
  51. if not class_is_indexed(item.__class__):
  52. return
  53. # Get mapping
  54. mapping = self.mapping_class(item.__class__)
  55. # Delete document
  56. try:
  57. self.es.delete(self.name, mapping.get_document_id(item))
  58. except NotFoundError:
  59. pass # Document doesn't exist, ignore this exception
  60. class Elasticsearch7SearchQueryCompiler(Elasticsearch6SearchQueryCompiler):
  61. mapping_class = Elasticsearch7Mapping
  62. class Elasticsearch7SearchResults(Elasticsearch6SearchResults):
  63. pass
  64. class Elasticsearch7AutocompleteQueryCompiler(
  65. ElasticsearchAutocompleteQueryCompilerImpl, Elasticsearch6SearchQueryCompiler
  66. ):
  67. pass
  68. class Elasticsearch7SearchBackend(Elasticsearch6SearchBackend):
  69. mapping_class = Elasticsearch7Mapping
  70. index_class = Elasticsearch7Index
  71. query_compiler_class = Elasticsearch7SearchQueryCompiler
  72. autocomplete_query_compiler_class = Elasticsearch7AutocompleteQueryCompiler
  73. results_class = Elasticsearch7SearchResults
  74. settings = deepcopy(Elasticsearch6SearchBackend.settings)
  75. settings["settings"]["index"] = {"max_ngram_diff": 12}
  76. # Fix ElasticsearchDeprecationWarnings for tokenizer names and token filter names:
  77. # - [edgeNGram] -> [edge_ngram]
  78. # - [nGram] -> [ngram]
  79. settings["settings"]["analysis"]["filter"]["edgengram"]["type"] = "edge_ngram"
  80. settings["settings"]["analysis"]["filter"]["ngram"]["type"] = "ngram"
  81. settings["settings"]["analysis"]["tokenizer"]["edgengram_tokenizer"][
  82. "type"
  83. ] = "edge_ngram"
  84. settings["settings"]["analysis"]["tokenizer"]["ngram_tokenizer"]["type"] = "ngram"
  85. SearchBackend = Elasticsearch7SearchBackend