Merge pull request #4695 from FinnStutzenstein/logging
Logging the cache buildup process
This commit is contained in:
commit
4495985d4c
@ -1,5 +1,6 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from time import sleep
|
||||
@ -19,6 +20,9 @@ from .redis import use_redis
|
||||
from .utils import get_element_id, split_element_id
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ElementCache:
|
||||
"""
|
||||
Cache for the elements.
|
||||
@ -99,6 +103,7 @@ class ElementCache:
|
||||
lock_name = "ensure_cache"
|
||||
# Set a lock so only one process builds the cache
|
||||
if async_to_sync(self.cache_provider.set_lock)(lock_name):
|
||||
logger.info("Building up the cache data...")
|
||||
try:
|
||||
mapping = {}
|
||||
for collection_string, cachable in self.cachables.items():
|
||||
@ -110,12 +115,17 @@ class ElementCache:
|
||||
): json.dumps(element)
|
||||
}
|
||||
)
|
||||
logger.info("Done building the cache data.")
|
||||
logger.info("Saving cache data into the cache...")
|
||||
async_to_sync(self.cache_provider.reset_full_cache)(mapping)
|
||||
logger.info("Done saving the cache data.")
|
||||
finally:
|
||||
async_to_sync(self.cache_provider.del_lock)(lock_name)
|
||||
else:
|
||||
logger.info("Wait for another process to build up the cache...")
|
||||
while async_to_sync(self.cache_provider.get_lock)(lock_name):
|
||||
sleep(0.01)
|
||||
logger.info("Cache is ready (built by another process).")
|
||||
|
||||
self.ensured = True
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
@ -9,6 +11,9 @@ from .rest_api import model_serializer_classes
|
||||
from .utils import convert_camel_case_to_pseudo_snake_case
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MinMaxIntegerField(models.IntegerField):
|
||||
"""
|
||||
IntegerField with options to set a min- and a max-value.
|
||||
@ -120,6 +125,7 @@ class RESTModelMixin:
|
||||
"""
|
||||
Returns all elements as full_data.
|
||||
"""
|
||||
logger.info(f"Loading {cls.get_collection_string()}")
|
||||
# Get the query to receive all data from the database.
|
||||
try:
|
||||
query = cls.objects.get_full_queryset() # type: ignore
|
||||
@ -129,7 +135,21 @@ class RESTModelMixin:
|
||||
query = cls.objects # type: ignore
|
||||
|
||||
# Build a dict from the instance id to the full_data
|
||||
return [instance.get_full_data() for instance in query.all()]
|
||||
instances = query.all()
|
||||
full_data = []
|
||||
|
||||
# For logging the progress
|
||||
last_time = time.time()
|
||||
instances_length = len(instances)
|
||||
for i, instance in enumerate(instances):
|
||||
# Append full data from this instance
|
||||
full_data.append(instance.get_full_data())
|
||||
# log progress every 5 seconds
|
||||
current_time = time.time()
|
||||
if current_time > last_time + 5:
|
||||
last_time = current_time
|
||||
logger.info(f"\t{i+1}/{instances_length}...")
|
||||
return full_data
|
||||
|
||||
@classmethod
|
||||
async def restrict_elements(
|
||||
|
Loading…
Reference in New Issue
Block a user