2018-07-09 23:22:26 +02:00
|
|
|
import asyncio
|
2017-08-18 07:56:16 +02:00
|
|
|
import json
|
2016-12-17 09:30:20 +01:00
|
|
|
from collections import defaultdict
|
2018-07-09 23:22:26 +02:00
|
|
|
from datetime import datetime
|
2018-09-01 08:00:00 +02:00
|
|
|
from time import sleep
|
2018-11-03 23:40:20 +01:00
|
|
|
from typing import Any, Callable, Dict, List, Optional, Tuple, Type
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-11-01 17:30:18 +01:00
|
|
|
from asgiref.sync import async_to_sync
|
2017-09-14 07:19:56 +02:00
|
|
|
from django.conf import settings
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
from .cache_providers import (
|
|
|
|
Cachable,
|
2018-10-28 10:04:52 +01:00
|
|
|
ElementCacheProvider,
|
2018-07-09 23:22:26 +02:00
|
|
|
MemmoryCacheProvider,
|
|
|
|
RedisCacheProvider,
|
|
|
|
get_all_cachables,
|
|
|
|
)
|
2018-11-01 17:30:18 +01:00
|
|
|
from .redis import use_redis
|
2018-11-03 23:40:20 +01:00
|
|
|
from .utils import get_element_id, split_element_id
|
2016-12-17 09:30:20 +01:00
|
|
|
|
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
class ElementCache:
|
2016-12-17 09:30:20 +01:00
|
|
|
"""
|
2018-11-03 23:40:20 +01:00
|
|
|
Cache for the elements.
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
Saves the full_data and if enabled the restricted data.
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
There is one redis Hash (simular to python dict) for the full_data and one
|
|
|
|
Hash for every user.
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
The key of the Hashes is COLLECTIONSTRING:ID where COLLECTIONSTRING is the
|
|
|
|
collection_string of a collection and id the id of an element.
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
All elements have to be in the cache. If one element is missing, the cache
|
|
|
|
is invalid, but this can not be detected. When a plugin with a new
|
|
|
|
collection is added to OpenSlides, then the cache has to be rebuild manualy.
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
There is an sorted set in redis with the change id as score. The values are
|
|
|
|
COLLETIONSTRING:ID for the elements that have been changed with that change
|
|
|
|
id. With this key it is possible, to get all elements as full_data or as
|
|
|
|
restricted_data that are newer then a specific change id.
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
All method of this class are async. You either have to call them with
|
|
|
|
await in an async environment or use asgiref.sync.async_to_sync().
|
2016-12-17 09:30:20 +01:00
|
|
|
"""
|
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
def __init__(
|
2019-01-06 16:22:33 +01:00
|
|
|
self,
|
|
|
|
use_restricted_data_cache: bool = False,
|
|
|
|
cache_provider_class: Type[ElementCacheProvider] = RedisCacheProvider,
|
|
|
|
cachable_provider: Callable[[], List[Cachable]] = get_all_cachables,
|
|
|
|
start_time: int = None,
|
|
|
|
) -> None:
|
2016-12-17 09:30:20 +01:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
Initializes the cache.
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
When restricted_data_cache is false, no restricted data is saved.
|
2016-12-17 09:30:20 +01:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
self.use_restricted_data_cache = use_restricted_data_cache
|
2018-11-01 17:30:18 +01:00
|
|
|
self.cache_provider = cache_provider_class()
|
2018-07-09 23:22:26 +02:00
|
|
|
self.cachable_provider = cachable_provider
|
2018-08-22 22:00:08 +02:00
|
|
|
self._cachables: Optional[Dict[str, Cachable]] = None
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
# Start time is used as first change_id if there is non in redis
|
|
|
|
if start_time is None:
|
2018-09-23 22:02:09 +02:00
|
|
|
# Use the miliseconds (rounted) since the 2016-02-29.
|
2019-01-06 16:22:33 +01:00
|
|
|
start_time = (
|
|
|
|
int((datetime.utcnow() - datetime(2016, 2, 29)).total_seconds()) * 1000
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
self.start_time = start_time
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
# Contains Futures to controll, that only one client updates the restricted_data.
|
2018-08-22 22:00:08 +02:00
|
|
|
self.restricted_data_cache_updater: Dict[int, asyncio.Future] = {}
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
# Tells if self.ensure_cache was called.
|
|
|
|
self.ensured = False
|
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
@property
|
|
|
|
def cachables(self) -> Dict[str, Cachable]:
|
2016-12-17 09:30:20 +01:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
Returns all Cachables as a dict where the key is the collection_string of the cachable.
|
2016-12-17 09:30:20 +01:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
# This method is neccessary to lazy load the cachables
|
|
|
|
if self._cachables is None:
|
2019-01-06 16:22:33 +01:00
|
|
|
self._cachables = {
|
|
|
|
cachable.get_collection_string(): cachable
|
|
|
|
for cachable in self.cachable_provider()
|
|
|
|
}
|
2018-07-09 23:22:26 +02:00
|
|
|
return self._cachables
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
def ensure_cache(self, reset: bool = False) -> None:
|
2016-12-17 09:30:20 +01:00
|
|
|
"""
|
2018-09-01 08:00:00 +02:00
|
|
|
Makes sure that the cache exist.
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
Builds the cache if not. If reset is True, it will be reset in any case.
|
|
|
|
|
|
|
|
This method is sync, so it can be run when OpenSlides starts.
|
2016-12-17 09:30:20 +01:00
|
|
|
"""
|
2018-09-01 08:00:00 +02:00
|
|
|
cache_exists = async_to_sync(self.cache_provider.data_exists)()
|
|
|
|
|
|
|
|
if reset or not cache_exists:
|
2019-01-06 16:22:33 +01:00
|
|
|
lock_name = "ensure_cache"
|
2018-09-01 08:00:00 +02:00
|
|
|
# Set a lock so only one process builds the cache
|
|
|
|
if async_to_sync(self.cache_provider.set_lock)(lock_name):
|
|
|
|
try:
|
|
|
|
mapping = {}
|
|
|
|
for collection_string, cachable in self.cachables.items():
|
|
|
|
for element in cachable.get_elements():
|
|
|
|
mapping.update(
|
2019-01-06 16:22:33 +01:00
|
|
|
{
|
|
|
|
get_element_id(
|
|
|
|
collection_string, element["id"]
|
|
|
|
): json.dumps(element)
|
|
|
|
}
|
|
|
|
)
|
2018-09-01 08:00:00 +02:00
|
|
|
async_to_sync(self.cache_provider.reset_full_cache)(mapping)
|
|
|
|
finally:
|
|
|
|
async_to_sync(self.cache_provider.del_lock)(lock_name)
|
|
|
|
else:
|
|
|
|
while async_to_sync(self.cache_provider.get_lock)(lock_name):
|
|
|
|
sleep(0.01)
|
|
|
|
|
|
|
|
self.ensured = True
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
async def change_elements(
|
2019-01-06 16:22:33 +01:00
|
|
|
self, elements: Dict[str, Optional[Dict[str, Any]]]
|
|
|
|
) -> int:
|
2016-12-17 09:30:20 +01:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
Changes elements in the cache.
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
elements is a list of the changed elements as dict. When the value is None,
|
|
|
|
it is interpreded as deleted. The key has to be an element_id.
|
|
|
|
|
|
|
|
Returns the new generated change_id.
|
2016-12-17 09:30:20 +01:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
deleted_elements = []
|
|
|
|
changed_elements = []
|
|
|
|
for element_id, data in elements.items():
|
|
|
|
if data:
|
|
|
|
# The arguments for redis.hset is pairs of key value
|
|
|
|
changed_elements.append(element_id)
|
|
|
|
changed_elements.append(json.dumps(data))
|
|
|
|
else:
|
|
|
|
deleted_elements.append(element_id)
|
2016-12-17 09:30:20 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
if changed_elements:
|
|
|
|
await self.cache_provider.add_elements(changed_elements)
|
|
|
|
if deleted_elements:
|
|
|
|
await self.cache_provider.del_elements(deleted_elements)
|
2017-09-04 00:25:45 +02:00
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
return await self.cache_provider.add_changed_elements(
|
|
|
|
self.start_time + 1, elements.keys()
|
|
|
|
)
|
2017-09-04 00:25:45 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
async def get_all_full_data(self) -> Dict[str, List[Dict[str, Any]]]:
|
2017-09-04 00:25:45 +02:00
|
|
|
"""
|
2018-12-23 11:05:38 +01:00
|
|
|
Returns all full_data.
|
2017-09-04 00:25:45 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
The returned value is a dict where the key is the collection_string and
|
|
|
|
the value is a list of data.
|
2017-09-04 00:25:45 +02:00
|
|
|
"""
|
2018-12-23 11:05:38 +01:00
|
|
|
all_data = await self.get_all_full_data_ordered()
|
2018-09-01 08:00:00 +02:00
|
|
|
out: Dict[str, List[Dict[str, Any]]] = defaultdict(list)
|
2018-12-23 11:05:38 +01:00
|
|
|
for collection_string, collection_data in all_data.items():
|
|
|
|
for data in collection_data.values():
|
|
|
|
out[collection_string].append(data)
|
|
|
|
return dict(out)
|
|
|
|
|
|
|
|
async def get_all_full_data_ordered(self) -> Dict[str, Dict[int, Dict[str, Any]]]:
|
|
|
|
"""
|
|
|
|
Like get_all_full_data but orders the element of one collection by there
|
|
|
|
id.
|
|
|
|
"""
|
|
|
|
out: Dict[str, Dict[int, Dict[str, Any]]] = defaultdict(dict)
|
2018-09-01 08:00:00 +02:00
|
|
|
full_data = await self.cache_provider.get_all_data()
|
|
|
|
for element_id, data in full_data.items():
|
2018-12-23 11:05:38 +01:00
|
|
|
collection_string, id = split_element_id(element_id)
|
|
|
|
out[collection_string][id] = json.loads(data.decode())
|
2018-07-09 23:22:26 +02:00
|
|
|
return dict(out)
|
2017-09-04 00:25:45 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
async def get_full_data(
|
2019-01-06 16:22:33 +01:00
|
|
|
self, change_id: int = 0, max_change_id: int = -1
|
|
|
|
) -> Tuple[Dict[str, List[Dict[str, Any]]], List[str]]:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2018-09-23 22:02:09 +02:00
|
|
|
Returns all full_data since change_id until max_change_id (including).
|
|
|
|
max_change_id -1 means the highest change_id.
|
2017-09-04 00:25:45 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
Returns two values inside a tuple. The first value is a dict where the
|
|
|
|
key is the collection_string and the value is a list of data. The second
|
|
|
|
is a list of element_ids with deleted elements.
|
2017-09-04 00:25:45 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
Only returns elements with the change_id or newer. When change_id is 0,
|
|
|
|
all elements are returned.
|
2017-09-04 00:25:45 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
Raises a RuntimeError when the lowest change_id in redis is higher then
|
|
|
|
the requested change_id. In this case the method has to be rerun with
|
|
|
|
change_id=0. This is importend because there could be deleted elements
|
|
|
|
that the cache does not know about.
|
2017-09-04 00:25:45 +02:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
if change_id == 0:
|
|
|
|
return (await self.get_all_full_data(), [])
|
2017-09-04 00:25:45 +02:00
|
|
|
|
2018-10-27 14:41:27 +02:00
|
|
|
# This raises a Runtime Exception, if there is no change_id
|
2018-07-09 23:22:26 +02:00
|
|
|
lowest_change_id = await self.get_lowest_change_id()
|
2018-10-27 14:41:27 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
if change_id < lowest_change_id:
|
|
|
|
# When change_id is lower then the lowest change_id in redis, we can
|
|
|
|
# not inform the user about deleted elements.
|
|
|
|
raise RuntimeError(
|
2019-01-12 23:01:42 +01:00
|
|
|
f"change_id {change_id} is lower then the lowest change_id in redis {lowest_change_id}. "
|
|
|
|
"Catch this exception and rerun the method with change_id=0."
|
2019-01-06 16:22:33 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
raw_changed_elements, deleted_elements = await self.cache_provider.get_data_since(
|
|
|
|
change_id, max_change_id=max_change_id
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
return (
|
2019-01-06 16:22:33 +01:00
|
|
|
{
|
|
|
|
collection_string: [json.loads(value.decode()) for value in value_list]
|
|
|
|
for collection_string, value_list in raw_changed_elements.items()
|
|
|
|
},
|
|
|
|
deleted_elements,
|
|
|
|
)
|
|
|
|
|
|
|
|
async def get_element_full_data(
|
|
|
|
self, collection_string: str, id: int
|
|
|
|
) -> Optional[Dict[str, Any]]:
|
2017-09-04 00:25:45 +02:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
Returns one element as full data.
|
2017-09-04 00:25:45 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
Returns None if the element does not exist.
|
2017-09-04 00:25:45 +02:00
|
|
|
"""
|
2019-01-06 16:22:33 +01:00
|
|
|
element = await self.cache_provider.get_element(
|
|
|
|
get_element_id(collection_string, id)
|
|
|
|
)
|
2017-09-04 00:25:45 +02:00
|
|
|
|
|
|
|
if element is None:
|
2018-07-09 23:22:26 +02:00
|
|
|
return None
|
2017-09-04 00:25:45 +02:00
|
|
|
return json.loads(element.decode())
|
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
async def exists_restricted_data(self, user_id: int) -> bool:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
Returns True, if the restricted_data exists for the user.
|
|
|
|
"""
|
|
|
|
if not self.use_restricted_data_cache:
|
|
|
|
return False
|
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
return await self.cache_provider.data_exists(user_id)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
async def del_user(self, user_id: int) -> None:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
Removes one user from the resticted_data_cache.
|
|
|
|
"""
|
2018-11-03 23:40:20 +01:00
|
|
|
await self.cache_provider.del_restricted_data(user_id)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
async def update_restricted_data(self, user_id: int) -> None:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
Updates the restricted data for an user from the full_data_cache.
|
|
|
|
"""
|
|
|
|
# TODO: When elements are changed at the same time then this method run
|
|
|
|
# this could make the cache invalid.
|
|
|
|
# This could be fixed when get_full_data would be used with a
|
|
|
|
# max change_id.
|
|
|
|
if not self.use_restricted_data_cache:
|
|
|
|
# If the restricted_data_cache is not used, there is nothing to do
|
|
|
|
return
|
|
|
|
|
|
|
|
# Try to write a special key.
|
|
|
|
# If this succeeds, there is noone else currently updating the cache.
|
|
|
|
# TODO: Make a timeout. Else this could block forever
|
2019-01-12 23:01:42 +01:00
|
|
|
lock_name = f"restricted_data_{user_id}"
|
2018-09-01 08:00:00 +02:00
|
|
|
if await self.cache_provider.set_lock(lock_name):
|
2018-08-22 22:00:08 +02:00
|
|
|
future: asyncio.Future = asyncio.Future()
|
2018-11-03 23:40:20 +01:00
|
|
|
self.restricted_data_cache_updater[user_id] = future
|
2018-07-09 23:22:26 +02:00
|
|
|
# Get change_id for this user
|
2018-11-03 23:40:20 +01:00
|
|
|
value = await self.cache_provider.get_change_id_user(user_id)
|
2018-07-09 23:22:26 +02:00
|
|
|
# If the change id is not in the cache yet, use -1 to get all data since 0
|
|
|
|
user_change_id = int(value) if value else -1
|
|
|
|
change_id = await self.get_current_change_id()
|
|
|
|
if change_id > user_change_id:
|
|
|
|
try:
|
2019-01-06 16:22:33 +01:00
|
|
|
full_data_elements, deleted_elements = await self.get_full_data(
|
|
|
|
user_change_id + 1
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
except RuntimeError:
|
|
|
|
# The user_change_id is lower then the lowest change_id in the cache.
|
|
|
|
# The whole restricted_data for that user has to be recreated.
|
|
|
|
full_data_elements = await self.get_all_full_data()
|
2019-02-01 00:13:56 +01:00
|
|
|
deleted_elements = []
|
2018-11-03 23:40:20 +01:00
|
|
|
await self.cache_provider.del_restricted_data(user_id)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
|
|
|
mapping = {}
|
|
|
|
for collection_string, full_data in full_data_elements.items():
|
|
|
|
restricter = self.cachables[collection_string].restrict_elements
|
2019-02-01 00:13:56 +01:00
|
|
|
restricted_elements = await restricter(user_id, full_data)
|
|
|
|
|
|
|
|
# find all elements the user can not see at all
|
|
|
|
full_data_ids = set(element["id"] for element in full_data)
|
|
|
|
restricted_data_ids = set(
|
|
|
|
element["id"] for element in restricted_elements
|
|
|
|
)
|
|
|
|
for item_id in full_data_ids - restricted_data_ids:
|
|
|
|
deleted_elements.append(
|
|
|
|
get_element_id(collection_string, item_id)
|
|
|
|
)
|
|
|
|
|
|
|
|
for element in restricted_elements:
|
|
|
|
# The user can see the element
|
2018-07-09 23:22:26 +02:00
|
|
|
mapping.update(
|
2019-01-06 16:22:33 +01:00
|
|
|
{
|
|
|
|
get_element_id(
|
|
|
|
collection_string, element["id"]
|
|
|
|
): json.dumps(element)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
mapping["_config:change_id"] = str(change_id)
|
2018-11-03 23:40:20 +01:00
|
|
|
await self.cache_provider.update_restricted_data(user_id, mapping)
|
2019-02-01 00:13:56 +01:00
|
|
|
# Remove deleted elements
|
|
|
|
if deleted_elements:
|
|
|
|
await self.cache_provider.del_elements(deleted_elements, user_id)
|
2018-07-09 23:22:26 +02:00
|
|
|
# Unset the lock
|
2018-09-01 08:00:00 +02:00
|
|
|
await self.cache_provider.del_lock(lock_name)
|
2018-07-09 23:22:26 +02:00
|
|
|
future.set_result(1)
|
|
|
|
else:
|
|
|
|
# Wait until the update if finshed
|
2018-11-03 23:40:20 +01:00
|
|
|
if user_id in self.restricted_data_cache_updater:
|
2018-07-09 23:22:26 +02:00
|
|
|
# The active worker is on the same asgi server, we can use the future
|
2018-11-03 23:40:20 +01:00
|
|
|
await self.restricted_data_cache_updater[user_id]
|
2018-07-09 23:22:26 +02:00
|
|
|
else:
|
2018-09-01 08:00:00 +02:00
|
|
|
while await self.cache_provider.get_lock(lock_name):
|
2018-07-09 23:22:26 +02:00
|
|
|
await asyncio.sleep(0.01)
|
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
async def get_all_restricted_data(
|
|
|
|
self, user_id: int
|
|
|
|
) -> Dict[str, List[Dict[str, Any]]]:
|
2017-09-04 00:25:45 +02:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
Like get_all_full_data but with restricted_data for an user.
|
2017-09-04 00:25:45 +02:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
if not self.use_restricted_data_cache:
|
|
|
|
all_restricted_data = {}
|
2019-01-06 16:22:33 +01:00
|
|
|
for collection_string, full_data in (
|
|
|
|
await self.get_all_full_data()
|
|
|
|
).items():
|
2018-07-09 23:22:26 +02:00
|
|
|
restricter = self.cachables[collection_string].restrict_elements
|
2018-11-03 23:40:20 +01:00
|
|
|
elements = await restricter(user_id, full_data)
|
2018-07-09 23:22:26 +02:00
|
|
|
all_restricted_data[collection_string] = elements
|
|
|
|
return all_restricted_data
|
2017-08-18 07:56:16 +02:00
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
await self.update_restricted_data(user_id)
|
2017-04-28 00:50:37 +02:00
|
|
|
|
2018-08-22 22:00:08 +02:00
|
|
|
out: Dict[str, List[Dict[str, Any]]] = defaultdict(list)
|
2018-11-03 23:40:20 +01:00
|
|
|
restricted_data = await self.cache_provider.get_all_data(user_id)
|
2018-07-09 23:22:26 +02:00
|
|
|
for element_id, data in restricted_data.items():
|
2019-01-06 16:22:33 +01:00
|
|
|
if element_id.decode().startswith("_config"):
|
2018-07-09 23:22:26 +02:00
|
|
|
continue
|
|
|
|
collection_string, __ = split_element_id(element_id)
|
|
|
|
out[collection_string].append(json.loads(data.decode()))
|
|
|
|
return dict(out)
|
2017-08-18 07:56:16 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
async def get_restricted_data(
|
2019-01-06 16:22:33 +01:00
|
|
|
self, user_id: int, change_id: int = 0, max_change_id: int = -1
|
|
|
|
) -> Tuple[Dict[str, List[Dict[str, Any]]], List[str]]:
|
2017-11-08 10:34:47 +01:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
Like get_full_data but with restricted_data for an user.
|
2017-11-08 10:34:47 +01:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
if change_id == 0:
|
|
|
|
# Return all data
|
2018-11-03 23:40:20 +01:00
|
|
|
return (await self.get_all_restricted_data(user_id), [])
|
2017-11-08 10:34:47 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
if not self.use_restricted_data_cache:
|
2019-01-06 16:22:33 +01:00
|
|
|
changed_elements, deleted_elements = await self.get_full_data(
|
|
|
|
change_id, max_change_id
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
restricted_data = {}
|
|
|
|
for collection_string, full_data in changed_elements.items():
|
|
|
|
restricter = self.cachables[collection_string].restrict_elements
|
2018-11-03 23:40:20 +01:00
|
|
|
elements = await restricter(user_id, full_data)
|
2018-07-09 23:22:26 +02:00
|
|
|
restricted_data[collection_string] = elements
|
|
|
|
return restricted_data, deleted_elements
|
2017-04-28 00:50:37 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
lowest_change_id = await self.get_lowest_change_id()
|
|
|
|
if change_id < lowest_change_id:
|
|
|
|
# When change_id is lower then the lowest change_id in redis, we can
|
|
|
|
# not inform the user about deleted elements.
|
|
|
|
raise RuntimeError(
|
2019-01-12 23:01:42 +01:00
|
|
|
f"change_id {change_id} is lower then the lowest change_id in redis {lowest_change_id}. "
|
|
|
|
"Catch this exception and rerun the method with change_id=0."
|
2019-01-06 16:22:33 +01:00
|
|
|
)
|
2017-04-28 00:50:37 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
# If another coroutine or another daphne server also updates the restricted
|
|
|
|
# data, this waits until it is done.
|
2018-11-03 23:40:20 +01:00
|
|
|
await self.update_restricted_data(user_id)
|
2017-04-28 00:50:37 +02:00
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
raw_changed_elements, deleted_elements = await self.cache_provider.get_data_since(
|
|
|
|
change_id, user_id, max_change_id
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
return (
|
2019-01-06 16:22:33 +01:00
|
|
|
{
|
|
|
|
collection_string: [json.loads(value.decode()) for value in value_list]
|
|
|
|
for collection_string, value_list in raw_changed_elements.items()
|
|
|
|
},
|
|
|
|
deleted_elements,
|
|
|
|
)
|
|
|
|
|
|
|
|
async def get_element_restricted_data(
|
|
|
|
self, user_id: int, collection_string: str, id: int
|
|
|
|
) -> Optional[Dict[str, Any]]:
|
2018-11-01 17:30:18 +01:00
|
|
|
"""
|
|
|
|
Returns the restricted_data of one element.
|
|
|
|
|
|
|
|
Returns None, if the element does not exists or the user has no permission to see it.
|
|
|
|
"""
|
|
|
|
if not self.use_restricted_data_cache:
|
|
|
|
full_data = await self.get_element_full_data(collection_string, id)
|
|
|
|
if full_data is None:
|
|
|
|
return None
|
|
|
|
restricter = self.cachables[collection_string].restrict_elements
|
2018-11-03 23:40:20 +01:00
|
|
|
restricted_data = await restricter(user_id, [full_data])
|
2018-11-01 17:30:18 +01:00
|
|
|
return restricted_data[0] if restricted_data else None
|
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
await self.update_restricted_data(user_id)
|
2018-11-01 17:30:18 +01:00
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
out = await self.cache_provider.get_element(
|
|
|
|
get_element_id(collection_string, id), user_id
|
|
|
|
)
|
2018-11-01 17:30:18 +01:00
|
|
|
return json.loads(out.decode()) if out else None
|
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
async def get_current_change_id(self) -> int:
|
2018-02-24 17:54:59 +01:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
Returns the current change id.
|
2018-02-24 17:54:59 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
Returns start_time if there is no change id yet.
|
2018-02-24 17:54:59 +01:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
value = await self.cache_provider.get_current_change_id()
|
|
|
|
if not value:
|
|
|
|
return self.start_time
|
|
|
|
# Return the score (second element) of the first (and only) element
|
|
|
|
return value[0][1]
|
2018-02-24 17:54:59 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
async def get_lowest_change_id(self) -> int:
|
2017-04-28 00:50:37 +02:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
Returns the lowest change id.
|
2017-04-28 00:50:37 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
Raises a RuntimeError if there is no change_id.
|
2017-04-28 00:50:37 +02:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
value = await self.cache_provider.get_lowest_change_id()
|
|
|
|
if not value:
|
2019-01-06 16:22:33 +01:00
|
|
|
raise RuntimeError("There is no known change_id.")
|
2018-07-09 23:22:26 +02:00
|
|
|
# Return the score (second element) of the first (and only) element
|
|
|
|
return value
|
2017-08-18 07:56:16 +02:00
|
|
|
|
2017-04-28 00:50:37 +02:00
|
|
|
|
2018-11-01 17:30:18 +01:00
|
|
|
def load_element_cache(restricted_data: bool = True) -> ElementCache:
|
2017-08-18 07:56:16 +02:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
Generates an element cache instance.
|
2017-08-18 07:56:16 +02:00
|
|
|
"""
|
2018-11-01 17:30:18 +01:00
|
|
|
if use_redis:
|
|
|
|
cache_provider_class: Type[ElementCacheProvider] = RedisCacheProvider
|
|
|
|
else:
|
|
|
|
cache_provider_class = MemmoryCacheProvider
|
2017-08-18 07:56:16 +02:00
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
return ElementCache(
|
|
|
|
cache_provider_class=cache_provider_class,
|
|
|
|
use_restricted_data_cache=restricted_data,
|
|
|
|
)
|
2017-08-18 07:56:16 +02:00
|
|
|
|
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
# Set the element_cache
|
2019-01-06 16:22:33 +01:00
|
|
|
use_restricted_data = getattr(settings, "RESTRICTED_DATA_CACHE", True)
|
2018-11-01 17:30:18 +01:00
|
|
|
element_cache = load_element_cache(restricted_data=use_restricted_data)
|