Skip to content

Commit

Permalink
opensearch: refact unindexing
Browse files Browse the repository at this point in the history
  • Loading branch information
drfho committed May 27, 2024
1 parent 936b091 commit de70f0f
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 15 deletions.
37 changes: 27 additions & 10 deletions Products/zms/ZMSZCatalogAdapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,19 +228,36 @@ def reindex_node(self, node, forced=False):
# --------------------------------------------------------------------------
# ZMSZCatalogAdapter.unindex_node
# --------------------------------------------------------------------------
def unindex_node(self, node, forced=False):
standard.writeBlock(node, "[unindex_node]")
def unindex_nodes(self, nodes=[], forced=False):
# Is triggered by zmscontainerobject.moveObjsToTrashcan().
standard.writeBlock(self, "[unindex_nodes]")

# Get closest catalog-connectors.
path_nodes = self.breadcrumbs_obj_path()
path_nodes.reverse()
for path_node in path_nodes:
if path_node.getCatalogAdapter():
connectors = path_node.getCatalogAdapter().get_connectors()
break
if not connectors:
root = self.getRootElement()
connectors = root.getCatalogAdapter().get_connectors()

try:
if self.getConfProperty('ZMS.CatalogAwareness.active', 1) or forced:
nodes = node.breadcrumbs_obj_path()
nodes.reverse()
for node in nodes:
if self.matches_ids_filter(node):
for connector in self.get_connectors():
connector.manage_objects_remove([node])
break
# [1] Reindex page-container nodes of deleted page-elements.
pageelement_nodes = [node for node in nodes if not node.isPage()]
if pageelement_nodes:
for pageelement_node in pageelement_nodes:
# Todo: Avoid redundant reindexing of page-container.
self.reindex_node(node=pageelement_node)
# [2] Unindex deleted page-nodes if filter-match.
nodes = [node for node in nodes if self.matches_ids_filter(node)]
for connector in connectors:
connector.manage_objects_remove(nodes)
return True
except:
standard.writeError( self, "can't unindex_node")
standard.writeError( self, "unindex_nodes not successful")
return False

# --------------------------------------------------------------------------
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from opensearchpy import OpenSearch
from opensearchpy.helpers import bulk

langs = ['ger','eng']

def get_opensearch_client(self):
# ${opensearch.url:https://localhost:9200, https://localhost:9201}
Expand Down Expand Up @@ -49,18 +50,24 @@ def bulk_opensearch_delete(self, sources):
# Name adaption to opensearch schema
for x in sources:
# Create language specific opensearch id
_id = "%s:%s"%(x['uid'],x.get('lang',self.getPrimaryLanguage()))
d = {"_op_type":"delete", "_index":index_name, "_id":_id}
actions.append(d)
for lang in globals()['langs']:
_id = "%s:%s"%(x['uid'],lang)
d = {"_op_type":"delete", "_index":index_name, "_id":_id}
actions.append(d)
if client:
return bulk(client, actions)
return 0, len(actions)

def manage_opensearch_objects_remove( self, nodes):
sources = [{'uid':x.get_uid()} for x in nodes]
for node in nodes:
# Set node's language list as global variable.
global langs
langs = node.getLangIds()
break
try:
success, failed = bulk_opensearch_delete(self, sources)
except Exception as e:
print(e)
standard.writeBlock( self, str(e))
return 0, len(sources)
return success, failed or 0
2 changes: 1 addition & 1 deletion Products/zms/zmscontainerobject.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def moveObjsToTrashcan(self, ids, REQUEST):
trashcan.normalizeSortIds()
trashcan.run_garbage_collection(forced=1)
# Synchronize search.
self.getCatalogAdapter().unindex_node(self)
self.getCatalogAdapter().unindex_nodes(nodes=children)
# Sort-IDs.
self.normalizeSortIds()
[standard.triggerEvent(child,'afterDeleteObjsEvt') for child in children]
Expand Down

0 comments on commit de70f0f

Please sign in to comment.