Merge pull request #4548 from ostcar/fix_mass_decoding

decode only the needed data when calculating the required users
This commit is contained in:
Emanuel Schütze 2019-04-02 11:03:39 +02:00 committed by GitHub
commit 7acfb7f080
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 61 additions and 8 deletions

View File

@ -68,8 +68,10 @@ class ConfigHandler:
This uses the element_cache. It expects, that the config values are in the database This uses the element_cache. It expects, that the config values are in the database
before this is called. before this is called.
""" """
all_data = await element_cache.get_all_full_data() config_full_data = await element_cache.get_collection_full_data(
elements = all_data[self.get_collection_string()] self.get_collection_string()
)
elements = config_full_data.values()
self.key_to_id = {} self.key_to_id = {}
for element in elements: for element in elements:
self.key_to_id[element["key"]] = element["id"] self.key_to_id[element["key"]] = element["id"]

View File

@ -492,9 +492,9 @@ class WhoAmIDataView(APIView):
# collect all permissions # collect all permissions
permissions: Set[str] = set() permissions: Set[str] = set()
group_all_data = async_to_sync(element_cache.get_all_full_data_ordered)()[ group_all_data = async_to_sync(element_cache.get_collection_full_data)(
"users/group" "users/group"
] )
for group_id in group_ids: for group_id in group_ids:
permissions.update(group_all_data[group_id]["permissions"]) permissions.update(group_all_data[group_id]["permissions"])

View File

@ -85,16 +85,17 @@ class RequiredUsers:
""" """
user_ids: Set[int] = set() user_ids: Set[int] = set()
all_full_data = await element_cache.get_all_full_data()
for collection_string in collection_strings: for collection_string in collection_strings:
collection_full_data = await element_cache.get_collection_full_data(
collection_string
)
# Get the callable for the collection_string # Get the callable for the collection_string
get_user_ids = self.callables.get(collection_string) get_user_ids = self.callables.get(collection_string)
elements = all_full_data.get(collection_string, {}) if not (get_user_ids and collection_full_data):
if not (get_user_ids and elements):
# if the collection_string is unknown or it has no data, do nothing # if the collection_string is unknown or it has no data, do nothing
continue continue
for element in elements: for element in collection_full_data.values():
user_ids.update(get_user_ids(element)) user_ids.update(get_user_ids(element))
return user_ids return user_ids

View File

@ -224,6 +224,17 @@ class ElementCache:
deleted_elements, deleted_elements,
) )
async def get_collection_full_data(
self, collection_string: str
) -> Dict[int, Dict[str, Any]]:
full_data = await self.cache_provider.get_collection_data(collection_string)
out = {}
for element_id, data in full_data.items():
returned_collection_string, id = split_element_id(element_id)
if returned_collection_string == collection_string:
out[id] = json.loads(data.decode())
return out
async def get_element_full_data( async def get_element_full_data(
self, collection_string: str, id: int self, collection_string: str, id: int
) -> Optional[Dict[str, Any]]: ) -> Optional[Dict[str, Any]]:

View File

@ -44,6 +44,11 @@ class ElementCacheProvider(Protocol):
async def get_all_data(self, user_id: Optional[int] = None) -> Dict[bytes, bytes]: async def get_all_data(self, user_id: Optional[int] = None) -> Dict[bytes, bytes]:
... ...
async def get_collection_data(
self, collection: str, user_id: Optional[int] = None
) -> Dict[bytes, bytes]:
...
async def get_data_since( async def get_data_since(
self, change_id: int, user_id: Optional[int] = None, max_change_id: int = -1 self, change_id: int, user_id: Optional[int] = None, max_change_id: int = -1
) -> Tuple[Dict[str, List[bytes]], List[str]]: ) -> Tuple[Dict[str, List[bytes]], List[str]]:
@ -204,6 +209,23 @@ class RedisCacheProvider:
async with get_connection() as redis: async with get_connection() as redis:
return await redis.hgetall(cache_key) return await redis.hgetall(cache_key)
async def get_collection_data(
self, collection: str, user_id: Optional[int] = None
) -> Dict[bytes, bytes]:
"""
Returns all elements for a collection from the cache.
"""
if user_id is None:
cache_key = self.get_full_data_cache_key()
else:
cache_key = self.get_restricted_data_cache_key(user_id)
async with get_connection() as redis:
out = {}
async for k, v in redis.ihscan(cache_key, match=f"{collection}:*"):
out[k] = v
return out
async def get_element( async def get_element(
self, element_id: str, user_id: Optional[int] = None self, element_id: str, user_id: Optional[int] = None
) -> Optional[bytes]: ) -> Optional[bytes]:
@ -435,6 +457,20 @@ class MemmoryCacheProvider:
return str_dict_to_bytes(cache_dict) return str_dict_to_bytes(cache_dict)
async def get_collection_data(
self, collection: str, user_id: Optional[int] = None
) -> Dict[bytes, bytes]:
if user_id is None:
cache_dict = self.full_data
else:
cache_dict = self.restricted_data.get(user_id, {})
out = {}
for key, value in cache_dict.items():
if key.startswith(f"{collection}:"):
out[key] = value
return str_dict_to_bytes(out)
async def get_element( async def get_element(
self, element_id: str, user_id: Optional[int] = None self, element_id: str, user_id: Optional[int] = None
) -> Optional[bytes]: ) -> Optional[bytes]:

View File

@ -244,6 +244,9 @@ class ListModelMixin(_ListModelMixin):
# The corresponding queryset does not support caching. # The corresponding queryset does not support caching.
response = super().list(request, *args, **kwargs) response = super().list(request, *args, **kwargs)
else: else:
# This loads all data from the cache, not only the requested data.
# If we would use the rest api, we should add a method
# element_cache.get_collection_restricted_data
all_restricted_data = async_to_sync(element_cache.get_all_restricted_data)( all_restricted_data = async_to_sync(element_cache.get_all_restricted_data)(
request.user.pk or 0 request.user.pk or 0
) )