Merge pull request #4548 from ostcar/fix_mass_decoding
decode only the needed data when calculating the required users
This commit is contained in:
commit
7acfb7f080
@ -68,8 +68,10 @@ class ConfigHandler:
|
||||
This uses the element_cache. It expects, that the config values are in the database
|
||||
before this is called.
|
||||
"""
|
||||
all_data = await element_cache.get_all_full_data()
|
||||
elements = all_data[self.get_collection_string()]
|
||||
config_full_data = await element_cache.get_collection_full_data(
|
||||
self.get_collection_string()
|
||||
)
|
||||
elements = config_full_data.values()
|
||||
self.key_to_id = {}
|
||||
for element in elements:
|
||||
self.key_to_id[element["key"]] = element["id"]
|
||||
|
@ -492,9 +492,9 @@ class WhoAmIDataView(APIView):
|
||||
|
||||
# collect all permissions
|
||||
permissions: Set[str] = set()
|
||||
group_all_data = async_to_sync(element_cache.get_all_full_data_ordered)()[
|
||||
group_all_data = async_to_sync(element_cache.get_collection_full_data)(
|
||||
"users/group"
|
||||
]
|
||||
)
|
||||
for group_id in group_ids:
|
||||
permissions.update(group_all_data[group_id]["permissions"])
|
||||
|
||||
|
@ -85,16 +85,17 @@ class RequiredUsers:
|
||||
"""
|
||||
user_ids: Set[int] = set()
|
||||
|
||||
all_full_data = await element_cache.get_all_full_data()
|
||||
for collection_string in collection_strings:
|
||||
collection_full_data = await element_cache.get_collection_full_data(
|
||||
collection_string
|
||||
)
|
||||
# Get the callable for the collection_string
|
||||
get_user_ids = self.callables.get(collection_string)
|
||||
elements = all_full_data.get(collection_string, {})
|
||||
if not (get_user_ids and elements):
|
||||
if not (get_user_ids and collection_full_data):
|
||||
# if the collection_string is unknown or it has no data, do nothing
|
||||
continue
|
||||
|
||||
for element in elements:
|
||||
for element in collection_full_data.values():
|
||||
user_ids.update(get_user_ids(element))
|
||||
|
||||
return user_ids
|
||||
|
@ -224,6 +224,17 @@ class ElementCache:
|
||||
deleted_elements,
|
||||
)
|
||||
|
||||
async def get_collection_full_data(
|
||||
self, collection_string: str
|
||||
) -> Dict[int, Dict[str, Any]]:
|
||||
full_data = await self.cache_provider.get_collection_data(collection_string)
|
||||
out = {}
|
||||
for element_id, data in full_data.items():
|
||||
returned_collection_string, id = split_element_id(element_id)
|
||||
if returned_collection_string == collection_string:
|
||||
out[id] = json.loads(data.decode())
|
||||
return out
|
||||
|
||||
async def get_element_full_data(
|
||||
self, collection_string: str, id: int
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
|
@ -44,6 +44,11 @@ class ElementCacheProvider(Protocol):
|
||||
async def get_all_data(self, user_id: Optional[int] = None) -> Dict[bytes, bytes]:
|
||||
...
|
||||
|
||||
async def get_collection_data(
|
||||
self, collection: str, user_id: Optional[int] = None
|
||||
) -> Dict[bytes, bytes]:
|
||||
...
|
||||
|
||||
async def get_data_since(
|
||||
self, change_id: int, user_id: Optional[int] = None, max_change_id: int = -1
|
||||
) -> Tuple[Dict[str, List[bytes]], List[str]]:
|
||||
@ -204,6 +209,23 @@ class RedisCacheProvider:
|
||||
async with get_connection() as redis:
|
||||
return await redis.hgetall(cache_key)
|
||||
|
||||
async def get_collection_data(
|
||||
self, collection: str, user_id: Optional[int] = None
|
||||
) -> Dict[bytes, bytes]:
|
||||
"""
|
||||
Returns all elements for a collection from the cache.
|
||||
"""
|
||||
if user_id is None:
|
||||
cache_key = self.get_full_data_cache_key()
|
||||
else:
|
||||
cache_key = self.get_restricted_data_cache_key(user_id)
|
||||
|
||||
async with get_connection() as redis:
|
||||
out = {}
|
||||
async for k, v in redis.ihscan(cache_key, match=f"{collection}:*"):
|
||||
out[k] = v
|
||||
return out
|
||||
|
||||
async def get_element(
|
||||
self, element_id: str, user_id: Optional[int] = None
|
||||
) -> Optional[bytes]:
|
||||
@ -435,6 +457,20 @@ class MemmoryCacheProvider:
|
||||
|
||||
return str_dict_to_bytes(cache_dict)
|
||||
|
||||
async def get_collection_data(
|
||||
self, collection: str, user_id: Optional[int] = None
|
||||
) -> Dict[bytes, bytes]:
|
||||
if user_id is None:
|
||||
cache_dict = self.full_data
|
||||
else:
|
||||
cache_dict = self.restricted_data.get(user_id, {})
|
||||
|
||||
out = {}
|
||||
for key, value in cache_dict.items():
|
||||
if key.startswith(f"{collection}:"):
|
||||
out[key] = value
|
||||
return str_dict_to_bytes(out)
|
||||
|
||||
async def get_element(
|
||||
self, element_id: str, user_id: Optional[int] = None
|
||||
) -> Optional[bytes]:
|
||||
|
@ -244,6 +244,9 @@ class ListModelMixin(_ListModelMixin):
|
||||
# The corresponding queryset does not support caching.
|
||||
response = super().list(request, *args, **kwargs)
|
||||
else:
|
||||
# This loads all data from the cache, not only the requested data.
|
||||
# If we would use the rest api, we should add a method
|
||||
# element_cache.get_collection_restricted_data
|
||||
all_restricted_data = async_to_sync(element_cache.get_all_restricted_data)(
|
||||
request.user.pk or 0
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user