Skip to content

Commit

Permalink
Merge branch 'main' into simple_querystring
Browse files Browse the repository at this point in the history
  • Loading branch information
instification committed Mar 19, 2024
2 parents b05900d + 686e6a7 commit e25fbc1
Show file tree
Hide file tree
Showing 9 changed files with 84 additions and 21 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,19 @@

- Tests: Wait for elasticsearch service @maethu

- Fix restricted object lookup @maethu

- Add support for highlight feature of elasticsearch @instification

- Use _old_searchResults when patching safeSearchResults @instification

- Handle negative term filters (fixes #101) @instification

- Check addon is installed before processing queue (fixes #108) @instification

- Add support for optional es host in worker via PLONE_ELASTICSEARCH_HOST env variable @maethu


## 5.0.0 (2022-10-11)

- Rename `master` branch to `main` @ericof
Expand Down
2 changes: 2 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,5 +80,7 @@
entry_points="""
[z3c.autoinclude.plugin]
target = plone
[plone.autoinclude.plugin]
target = plone
""",
)
44 changes: 42 additions & 2 deletions src/collective/elasticsearch/queueprocessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,54 @@
from collective.elasticsearch.manager import ElasticSearchManager
from collective.elasticsearch.utils import getESOnlyIndexes
from collective.elasticsearch.utils import use_redis
from pkg_resources import parse_version
from plone import api
from plone.app.uuid.utils import uuidToCatalogBrain
from plone.dexterity.utils import iterSchemata
from plone.indexer.interfaces import IIndexableObject
from plone.indexer.interfaces import IIndexer
from plone.namedfile.interfaces import INamedBlobFileField
from zope.component import getAdapters
from zope.component import queryMultiAdapter
from zope.component.hooks import getSite
from zope.globalrequest import getRequest
from zope.interface import implementer
from zope.schema import getFields

import transaction


if parse_version(api.env.plone_version()) < parse_version("6"):

def uuidToObject(uuid, unrestricted=False):
"""Variation of this method, which support the parameter
'unrestricted', like the one from plone 6.
"""

brain = uuidToCatalogBrain(uuid)
if brain is None:
return None

path = brain.getPath()

if not path:
return
site = getSite()
if site is None:
return
# Go to the parent of the item without restrictions.
parent_path, final_path = path.rpartition("/")[::2]
parent = site.unrestrictedTraverse(parent_path)
# Do check restrictions for the final object.
# Check if the object has restrictions
if unrestricted:
return parent.unrestrictedTraverse(final_path)
return parent.restrictedTraverse(final_path)

else:
from plone.app.uuid.utils import uuidToObject


@implementer(IElasticSearchIndexQueueProcessor)
class IndexProcessor:
"""A queue processor for elasticsearch"""
Expand Down Expand Up @@ -89,6 +123,8 @@ def _uuid_path(self, obj):

def index(self, obj, attributes=None):
"""Index the specified attributes for an obj."""
if not self.manager.active:
return
actions = self.actions
uuid, path = self._uuid_path(obj)
actions.uuid_path[uuid] = path
Expand Down Expand Up @@ -122,10 +158,14 @@ def index(self, obj, attributes=None):

def reindex(self, obj, attributes=None, update_metadata=False):
"""Reindex the specified attributes for an obj."""
if not self.manager.active:
return
self.index(obj, attributes)

def unindex(self, obj):
"""Unindex the obj."""
if not self.manager.active:
return
actions = self.actions
uuid, path = self._uuid_path(obj)
actions.uuid_path[uuid] = path
Expand Down Expand Up @@ -210,7 +250,7 @@ def get_data_for_redis(self, uuid, attributes=None):

def get_data_for_es(self, uuid, attributes=None):
"""Data to be sent to elasticsearch."""
obj = api.portal.get() if uuid == "/" else api.content.get(UID=uuid)
obj = api.portal.get() if uuid == "/" else uuidToObject(uuid, unrestricted=True)
wrapped_object = self.wrap_object(obj)
index_data = {}
attributes = attributes if attributes else self.all_attributes
Expand All @@ -228,7 +268,7 @@ def get_data_for_es(self, uuid, attributes=None):
if value in (None, "None"):
# yes, we'll index null data...
value = None
elif index_name in self._es_attributes:
elif index_name in self.es_attributes:
indexer = queryMultiAdapter(
(wrapped_object, catalog), IIndexer, name=index_name
)
Expand Down
10 changes: 4 additions & 6 deletions src/collective/elasticsearch/redis/fetch.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from collective.elasticsearch import utils

import io
import os
import requests
Expand All @@ -20,7 +18,8 @@


def fetch_data(uuid, attributes):
url = utils.PLONE_BACKEND + "/@elasticsearch_extractdata"
backend = os.environ.get("PLONE_BACKEND", None)
url = backend + "/@elasticsearch_extractdata"
payload = {"uuid": uuid, "attributes:list": attributes}
response = session.get(url, params=payload, verify=False, timeout=60)
if response.status_code == 200:
Expand All @@ -32,8 +31,7 @@ def fetch_data(uuid, attributes):


def fetch_blob_data(fieldname, data):
download_url = "/".join(
[utils.PLONE_BACKEND, data[fieldname]["path"], "@@download", fieldname]
)
backend = os.environ.get("PLONE_BACKEND", None)
download_url = "/".join([backend, data[fieldname]["path"], "@@download", fieldname])
file_ = session_data.get(download_url)
return io.BytesIO(file_.content)
2 changes: 2 additions & 0 deletions src/collective/elasticsearch/redis/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ def bulk_update(hosts, params, index_name, body):
"""
Collects all the data and updates elasticsearch
"""
hosts = os.environ.get("PLONE_ELASTICSEARCH_HOST", hosts)
connection = es_connection(hosts, **params)

for item in body:
Expand Down Expand Up @@ -81,6 +82,7 @@ def update_file_data(hosts, params, index_name, body):
"""
Get blob data from plone and index it via elasticsearch attachment pipeline
"""
hosts = os.environ.get("PLONE_ELASTICSEARCH_HOST", hosts)
connection = es_connection(hosts, **params)
uuid, data = body

Expand Down
10 changes: 4 additions & 6 deletions src/collective/elasticsearch/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,10 @@ def setUpPloneSite(self, portal):
super().setUpPloneSite(portal)

# Setup environ for redis testing
os.environ["PLONE_BACKEND"] = utils.PLONE_BACKEND = portal.absolute_url()
os.environ["PLONE_USERNAME"] = utils.PLONE_USERNAME = SITE_OWNER_NAME
os.environ["PLONE_PASSWORD"] = utils.PLONE_PASSWORD = SITE_OWNER_PASSWORD
os.environ[
"PLONE_REDIS_DSN"
] = utils.PLONE_REDIS_DSN = "redis://localhost:6379/0"
os.environ["PLONE_BACKEND"] = portal.absolute_url()
os.environ["PLONE_USERNAME"] = SITE_OWNER_NAME
os.environ["PLONE_PASSWORD"] = SITE_OWNER_PASSWORD
os.environ["PLONE_REDIS_DSN"] = "redis://localhost:6379/0"

# Make sure tasks are not handled async in tests
# from collective.elasticsearch.redis.tasks import queue
Expand Down
2 changes: 1 addition & 1 deletion src/collective/elasticsearch/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def setUp(self):
self.request.environ["testing"] = True
self.app = self.layer["app"]

os.environ["PLONE_BACKEND"] = utils.PLONE_BACKEND = self.portal.absolute_url()
os.environ["PLONE_BACKEND"] = self.portal.absolute_url()

settings = utils.get_settings()
# disable sniffing hosts in tests because docker...
Expand Down
22 changes: 22 additions & 0 deletions src/collective/elasticsearch/tests/test_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
from plone import api
from plone.app.contentrules.actions.move import MoveAction
from plone.app.contentrules.tests.dummy import DummyEvent
from plone.app.testing import login
from plone.app.testing import TEST_USER_PASSWORD
from plone.contentrules.rule.interfaces import IExecutable
from Products.CMFCore.indexing import processQueue
from zope.component import getMultiAdapter
Expand Down Expand Up @@ -72,6 +74,26 @@ def test_moved_content(self):
self.assertEqual(True, ex())
self.assertIn(obj_uid, processor.actions.index)

def test_index_even_if_access_to_obj_might_be_restricted(self):
processor = self.get_processor()
user = api.user.create(
username="worker",
email="ordinary_person@example.com",
password=TEST_USER_PASSWORD,
roles=("Member",),
)

folder = api.content.create(self.portal, "Folder", "folder1", title="A folder")
folder.manage_permission(
"Access contents information", roles=["Manager"], acquire=False
)
obj = api.content.create(folder, "Event", "event1", title="Some Event")

login(self.portal, user.getId())
obj.reindexObject()
processQueue()
self.assertIn(obj.UID(), processor.actions.index)


@parameterized_class(
[
Expand Down
6 changes: 0 additions & 6 deletions src/collective/elasticsearch/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,6 @@
HAS_REDIS_MODULE = False


PLONE_REDIS_DSN = os.environ.get("PLONE_REDIS_DSN", None)
PLONE_USERNAME = os.environ.get("PLONE_USERNAME", None)
PLONE_PASSWORD = os.environ.get("PLONE_PASSWORD", None)
PLONE_BACKEND = os.environ.get("PLONE_BACKEND", None)


def getUID(obj):
value = IUUID(obj, None)
if not value and hasattr(obj, "UID"):
Expand Down

0 comments on commit e25fbc1

Please sign in to comment.