Skip to content

Commit

Permalink
feat: adding save records endpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
awaisdar001 committed Dec 24, 2023
1 parent 7f23fc2 commit d9f211e
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 2 deletions.
5 changes: 3 additions & 2 deletions algoliasearch_django/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,11 @@
get_adapter_from_instance = algolia_engine.get_adapter_from_instance

save_record = algolia_engine.save_record
save_records = algolia_engine.save_records
delete_record = algolia_engine.delete_record
update_records = algolia_engine.update_records
raw_search = algolia_engine.raw_search
clear_index = algolia_engine.clear_index # TODO: deprecate
clear_index = algolia_engine.clear_index # TODO: deprecate
clear_objects = algolia_engine.clear_objects
reindex_all = algolia_engine.reindex_all

Expand All @@ -44,7 +45,7 @@ def emit(self, record):


def autodiscover():
autodiscover_modules('index')
autodiscover_modules("index")


logging.getLogger(__name__.split('.')[0]).addHandler(NullHandler())
Expand Down
52 changes: 52 additions & 0 deletions algoliasearch_django/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,6 +316,58 @@ def save_record(self, instance, update_fields=None, **kwargs):
logger.warning('%s FROM %s NOT SAVED: %s', obj['objectID'],
self.model, e)

def save_records(self, qs, batch_size=1000, **kwargs):
"""Saves multiple records to the index in batches.
Parameters:
- qs (QuerySet): A set of records to be saved.
- batch_size (int): The size of each batch for saving records. Defaults to 1000.
- **kwargs: Additional keyword arguments.
"""
self.__tmp_index.clear_objects()
logger.debug('CLEAR INDEX %s_tmp', self.index_name)

to_update_batch = []
to_delete_batch = []
for instance in qs:
if not self._should_index(instance):
# Should not index, but since we don't know the state of the
# instance, we need to send a DELETE request.
to_delete_batch.append(instance)
continue

to_update_batch.append(self.get_raw_record(instance))
if len(to_update_batch) >= batch_size:
self.__tmp_index.save_objects(to_update_batch)
logger.info(
'SAVE %d OBJECTS TO %s_tmp', len(to_update_batch), self.index_name
)
to_update_batch = []

if len(to_update_batch) > 0:
self.__tmp_index.save_objects(to_update_batch)
logger.info(
'SAVE %d OBJECTS TO %s_tmp', len(to_update_batch), self.index_name
)

self.__client.move_index(self.tmp_index_name, self.index_name)
logger.info('MOVE INDEX %s_tmp TO %s', self.index_name, self.index_name)

if len(to_delete_batch) > 0:
self.delete_records(to_delete_batch)

def delete_records(self, objects):
"""Delete multiple records."""
objectIDs = [self.objectID(instance) for instance in objects]
try:
self.__index.delete_objects(object_ids=objectIDs)
logger.info('DELETE %s FROM %s', objectIDs, self.model)
except AlgoliaException as e:
if DEBUG:
raise e
else:
logger.warning('%s FROM %s NOT DELETED: %s', objectIDs, self.model, e)

def delete_record(self, instance):
"""Deletes the record."""
objectID = self.objectID(instance)
Expand Down
9 changes: 9 additions & 0 deletions algoliasearch_django/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,15 @@ def save_record(self, instance, **kwargs):
adapter = self.get_adapter_from_instance(instance)
adapter.save_record(instance, **kwargs)

def save_records(self, model, qs, **kwargs):
"""
>>> from algoliasearch_django import update_records
>>> qs = MyModel.objects.filter()
>>> save_records(MyModel, qs)
"""
adapter = self.get_adapter(model)
adapter.save_records(qs, **kwargs)

def delete_record(self, instance):
"""Deletes the record."""
adapter = self.get_adapter_from_instance(instance)
Expand Down

0 comments on commit d9f211e

Please sign in to comment.