Merge pull request #4695 from FinnStutzenstein/logging

Logging the cache buildup process
This commit is contained in:
Emanuel Schütze 2019-05-10 16:39:29 +02:00 committed by GitHub
commit 4495985d4c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 31 additions and 1 deletions

View File

@ -1,5 +1,6 @@
import asyncio import asyncio
import json import json
import logging
from collections import defaultdict from collections import defaultdict
from datetime import datetime from datetime import datetime
from time import sleep from time import sleep
@ -19,6 +20,9 @@ from .redis import use_redis
from .utils import get_element_id, split_element_id from .utils import get_element_id, split_element_id
logger = logging.getLogger(__name__)
class ElementCache: class ElementCache:
""" """
Cache for the elements. Cache for the elements.
@ -99,6 +103,7 @@ class ElementCache:
lock_name = "ensure_cache" lock_name = "ensure_cache"
# Set a lock so only one process builds the cache # Set a lock so only one process builds the cache
if async_to_sync(self.cache_provider.set_lock)(lock_name): if async_to_sync(self.cache_provider.set_lock)(lock_name):
logger.info("Building up the cache data...")
try: try:
mapping = {} mapping = {}
for collection_string, cachable in self.cachables.items(): for collection_string, cachable in self.cachables.items():
@ -110,12 +115,17 @@ class ElementCache:
): json.dumps(element) ): json.dumps(element)
} }
) )
logger.info("Done building the cache data.")
logger.info("Saving cache data into the cache...")
async_to_sync(self.cache_provider.reset_full_cache)(mapping) async_to_sync(self.cache_provider.reset_full_cache)(mapping)
logger.info("Done saving the cache data.")
finally: finally:
async_to_sync(self.cache_provider.del_lock)(lock_name) async_to_sync(self.cache_provider.del_lock)(lock_name)
else: else:
logger.info("Wait for another process to build up the cache...")
while async_to_sync(self.cache_provider.get_lock)(lock_name): while async_to_sync(self.cache_provider.get_lock)(lock_name):
sleep(0.01) sleep(0.01)
logger.info("Cache is ready (built by another process).")
self.ensured = True self.ensured = True

View File

@ -1,3 +1,5 @@
import logging
import time
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import ImproperlyConfigured
@ -9,6 +11,9 @@ from .rest_api import model_serializer_classes
from .utils import convert_camel_case_to_pseudo_snake_case from .utils import convert_camel_case_to_pseudo_snake_case
logger = logging.getLogger(__name__)
class MinMaxIntegerField(models.IntegerField): class MinMaxIntegerField(models.IntegerField):
""" """
IntegerField with options to set a min- and a max-value. IntegerField with options to set a min- and a max-value.
@ -120,6 +125,7 @@ class RESTModelMixin:
""" """
Returns all elements as full_data. Returns all elements as full_data.
""" """
logger.info(f"Loading {cls.get_collection_string()}")
# Get the query to receive all data from the database. # Get the query to receive all data from the database.
try: try:
query = cls.objects.get_full_queryset() # type: ignore query = cls.objects.get_full_queryset() # type: ignore
@ -129,7 +135,21 @@ class RESTModelMixin:
query = cls.objects # type: ignore query = cls.objects # type: ignore
# Build a dict from the instance id to the full_data # Build a dict from the instance id to the full_data
return [instance.get_full_data() for instance in query.all()] instances = query.all()
full_data = []
# For logging the progress
last_time = time.time()
instances_length = len(instances)
for i, instance in enumerate(instances):
# Append full data from this instance
full_data.append(instance.get_full_data())
# log progress every 5 seconds
current_time = time.time()
if current_time > last_time + 5:
last_time = current_time
logger.info(f"\t{i+1}/{instances_length}...")
return full_data
@classmethod @classmethod
async def restrict_elements( async def restrict_elements(