Merge pull request #3843 from ostcar/ensuce_cache
ensures test on startup
This commit is contained in:
commit
b6968fdfd5
@ -21,6 +21,7 @@ class CoreAppConfig(AppConfig):
|
|||||||
# Import all required stuff.
|
# Import all required stuff.
|
||||||
from .config import config
|
from .config import config
|
||||||
from ..utils.rest_api import router
|
from ..utils.rest_api import router
|
||||||
|
from ..utils.cache import element_cache
|
||||||
from .projector import get_projector_elements
|
from .projector import get_projector_elements
|
||||||
from .signals import (
|
from .signals import (
|
||||||
delete_django_app_permissions,
|
delete_django_app_permissions,
|
||||||
@ -74,6 +75,13 @@ class CoreAppConfig(AppConfig):
|
|||||||
router.register(self.get_model('ProjectorMessage').get_collection_string(), ProjectorMessageViewSet)
|
router.register(self.get_model('ProjectorMessage').get_collection_string(), ProjectorMessageViewSet)
|
||||||
router.register(self.get_model('Countdown').get_collection_string(), CountdownViewSet)
|
router.register(self.get_model('Countdown').get_collection_string(), CountdownViewSet)
|
||||||
|
|
||||||
|
# Sets the cache
|
||||||
|
try:
|
||||||
|
element_cache.ensure_cache()
|
||||||
|
except (ImproperlyConfigured, OperationalError):
|
||||||
|
# This happens in the tests or in migrations. Do nothing
|
||||||
|
pass
|
||||||
|
|
||||||
def get_config_variables(self):
|
def get_config_variables(self):
|
||||||
from .config_variables import get_config_variables
|
from .config_variables import get_config_variables
|
||||||
return get_config_variables()
|
return get_config_variables()
|
||||||
|
@ -236,7 +236,6 @@ OnChangeType = Callable[[], None]
|
|||||||
ConfigVariableDict = TypedDict('ConfigVariableDict', {
|
ConfigVariableDict = TypedDict('ConfigVariableDict', {
|
||||||
'key': str,
|
'key': str,
|
||||||
'default_value': Any,
|
'default_value': Any,
|
||||||
'value': Any,
|
|
||||||
'input_type': str,
|
'input_type': str,
|
||||||
'label': str,
|
'label': str,
|
||||||
'help_text': str,
|
'help_text': str,
|
||||||
@ -303,7 +302,6 @@ class ConfigVariable:
|
|||||||
return ConfigVariableDict(
|
return ConfigVariableDict(
|
||||||
key=self.name,
|
key=self.name,
|
||||||
default_value=self.default_value,
|
default_value=self.default_value,
|
||||||
value=config[self.name],
|
|
||||||
input_type=self.input_type,
|
input_type=self.input_type,
|
||||||
label=self.label,
|
label=self.label,
|
||||||
help_text=self.help_text,
|
help_text=self.help_text,
|
||||||
|
@ -4,7 +4,6 @@ from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
|
|||||||
|
|
||||||
from asgiref.sync import async_to_sync
|
from asgiref.sync import async_to_sync
|
||||||
from channels.layers import get_channel_layer
|
from channels.layers import get_channel_layer
|
||||||
from django.conf import settings
|
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
|
|
||||||
from .cache import element_cache, get_element_id
|
from .cache import element_cache, get_element_id
|
||||||
@ -146,12 +145,7 @@ async def send_autoupdate(collection_elements: Iterable[CollectionElement]) -> N
|
|||||||
else:
|
else:
|
||||||
cache_elements[element_id] = element.get_full_data()
|
cache_elements[element_id] = element.get_full_data()
|
||||||
|
|
||||||
if not getattr(settings, 'SKIP_CACHE', False):
|
change_id = await element_cache.change_elements(cache_elements)
|
||||||
# Hack for django 2.0 and channels 2.1 to stay in the same thread.
|
|
||||||
# This is needed for the tests.
|
|
||||||
change_id = await element_cache.change_elements(cache_elements)
|
|
||||||
else:
|
|
||||||
change_id = 1
|
|
||||||
|
|
||||||
channel_layer = get_channel_layer()
|
channel_layer = get_channel_layer()
|
||||||
# TODO: don't await. They can be send in parallel
|
# TODO: don't await. They can be send in parallel
|
||||||
|
@ -2,6 +2,7 @@ import asyncio
|
|||||||
import json
|
import json
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from time import sleep
|
||||||
from typing import (
|
from typing import (
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
Any,
|
Any,
|
||||||
@ -13,8 +14,7 @@ from typing import (
|
|||||||
Type,
|
Type,
|
||||||
)
|
)
|
||||||
|
|
||||||
from asgiref.sync import sync_to_async
|
from asgiref.sync import async_to_sync, sync_to_async
|
||||||
from channels.db import database_sync_to_async
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
from .cache_providers import (
|
from .cache_providers import (
|
||||||
@ -83,6 +83,9 @@ class ElementCache:
|
|||||||
# Contains Futures to controll, that only one client updates the restricted_data.
|
# Contains Futures to controll, that only one client updates the restricted_data.
|
||||||
self.restricted_data_cache_updater: Dict[int, asyncio.Future] = {}
|
self.restricted_data_cache_updater: Dict[int, asyncio.Future] = {}
|
||||||
|
|
||||||
|
# Tells if self.ensure_cache was called.
|
||||||
|
self.ensured = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cachables(self) -> Dict[str, Cachable]:
|
def cachables(self) -> Dict[str, Cachable]:
|
||||||
"""
|
"""
|
||||||
@ -93,33 +96,35 @@ class ElementCache:
|
|||||||
self._cachables = {cachable.get_collection_string(): cachable for cachable in self.cachable_provider()}
|
self._cachables = {cachable.get_collection_string(): cachable for cachable in self.cachable_provider()}
|
||||||
return self._cachables
|
return self._cachables
|
||||||
|
|
||||||
async def save_full_data(self, db_data: Dict[str, List[Dict[str, Any]]]) -> None:
|
def ensure_cache(self, reset: bool = False) -> None:
|
||||||
"""
|
"""
|
||||||
Saves the full data.
|
Makes sure that the cache exist.
|
||||||
"""
|
|
||||||
mapping = {}
|
|
||||||
for collection_string, elements in db_data.items():
|
|
||||||
for element in elements:
|
|
||||||
mapping.update(
|
|
||||||
{get_element_id(collection_string, element['id']):
|
|
||||||
json.dumps(element)})
|
|
||||||
await self.cache_provider.reset_full_cache(mapping)
|
|
||||||
|
|
||||||
async def build_full_data(self) -> Dict[str, List[Dict[str, Any]]]:
|
Builds the cache if not. If reset is True, it will be reset in any case.
|
||||||
"""
|
|
||||||
Build or rebuild the full_data cache.
|
|
||||||
"""
|
|
||||||
db_data = {}
|
|
||||||
for collection_string, cachable in self.cachables.items():
|
|
||||||
db_data[collection_string] = await database_sync_to_async(cachable.get_elements)()
|
|
||||||
await self.save_full_data(db_data)
|
|
||||||
return db_data
|
|
||||||
|
|
||||||
async def exists_full_data(self) -> bool:
|
This method is sync, so it can be run when OpenSlides starts.
|
||||||
"""
|
"""
|
||||||
Returns True, if the full_data_cache exists.
|
cache_exists = async_to_sync(self.cache_provider.data_exists)()
|
||||||
"""
|
|
||||||
return await self.cache_provider.data_exists()
|
if reset or not cache_exists:
|
||||||
|
lock_name = 'ensure_cache'
|
||||||
|
# Set a lock so only one process builds the cache
|
||||||
|
if async_to_sync(self.cache_provider.set_lock)(lock_name):
|
||||||
|
try:
|
||||||
|
mapping = {}
|
||||||
|
for collection_string, cachable in self.cachables.items():
|
||||||
|
for element in cachable.get_elements():
|
||||||
|
mapping.update(
|
||||||
|
{get_element_id(collection_string, element['id']):
|
||||||
|
json.dumps(element)})
|
||||||
|
async_to_sync(self.cache_provider.reset_full_cache)(mapping)
|
||||||
|
finally:
|
||||||
|
async_to_sync(self.cache_provider.del_lock)(lock_name)
|
||||||
|
else:
|
||||||
|
while async_to_sync(self.cache_provider.get_lock)(lock_name):
|
||||||
|
sleep(0.01)
|
||||||
|
|
||||||
|
self.ensured = True
|
||||||
|
|
||||||
async def change_elements(
|
async def change_elements(
|
||||||
self, elements: Dict[str, Optional[Dict[str, Any]]]) -> int:
|
self, elements: Dict[str, Optional[Dict[str, Any]]]) -> int:
|
||||||
@ -131,9 +136,6 @@ class ElementCache:
|
|||||||
|
|
||||||
Returns the new generated change_id.
|
Returns the new generated change_id.
|
||||||
"""
|
"""
|
||||||
if not await self.exists_full_data():
|
|
||||||
await self.build_full_data()
|
|
||||||
|
|
||||||
deleted_elements = []
|
deleted_elements = []
|
||||||
changed_elements = []
|
changed_elements = []
|
||||||
for element_id, data in elements.items():
|
for element_id, data in elements.items():
|
||||||
@ -164,14 +166,11 @@ class ElementCache:
|
|||||||
The returned value is a dict where the key is the collection_string and
|
The returned value is a dict where the key is the collection_string and
|
||||||
the value is a list of data.
|
the value is a list of data.
|
||||||
"""
|
"""
|
||||||
if not await self.exists_full_data():
|
out: Dict[str, List[Dict[str, Any]]] = defaultdict(list)
|
||||||
out = await self.build_full_data()
|
full_data = await self.cache_provider.get_all_data()
|
||||||
else:
|
for element_id, data in full_data.items():
|
||||||
out = defaultdict(list)
|
collection_string, __ = split_element_id(element_id)
|
||||||
full_data = await self.cache_provider.get_all_data()
|
out[collection_string].append(json.loads(data.decode()))
|
||||||
for element_id, data in full_data.items():
|
|
||||||
collection_string, __ = split_element_id(element_id)
|
|
||||||
out[collection_string].append(json.loads(data.decode()))
|
|
||||||
return dict(out)
|
return dict(out)
|
||||||
|
|
||||||
async def get_full_data(
|
async def get_full_data(
|
||||||
@ -203,10 +202,6 @@ class ElementCache:
|
|||||||
"Catch this exception and rerun the method with change_id=0."
|
"Catch this exception and rerun the method with change_id=0."
|
||||||
.format(change_id, lowest_change_id))
|
.format(change_id, lowest_change_id))
|
||||||
|
|
||||||
if not await self.exists_full_data():
|
|
||||||
# If the cache does not exist, create it.
|
|
||||||
await self.build_full_data()
|
|
||||||
|
|
||||||
raw_changed_elements, deleted_elements = await self.cache_provider.get_data_since(change_id)
|
raw_changed_elements, deleted_elements = await self.cache_provider.get_data_since(change_id)
|
||||||
return (
|
return (
|
||||||
{collection_string: [json.loads(value.decode()) for value in value_list]
|
{collection_string: [json.loads(value.decode()) for value in value_list]
|
||||||
@ -221,9 +216,6 @@ class ElementCache:
|
|||||||
|
|
||||||
Returns None if the element does not exist.
|
Returns None if the element does not exist.
|
||||||
"""
|
"""
|
||||||
if not await self.exists_full_data():
|
|
||||||
await self.build_full_data()
|
|
||||||
|
|
||||||
element = await self.cache_provider.get_element(get_element_id(collection_string, id))
|
element = await self.cache_provider.get_element(get_element_id(collection_string, id))
|
||||||
|
|
||||||
if element is None:
|
if element is None:
|
||||||
@ -261,7 +253,8 @@ class ElementCache:
|
|||||||
# Try to write a special key.
|
# Try to write a special key.
|
||||||
# If this succeeds, there is noone else currently updating the cache.
|
# If this succeeds, there is noone else currently updating the cache.
|
||||||
# TODO: Make a timeout. Else this could block forever
|
# TODO: Make a timeout. Else this could block forever
|
||||||
if await self.cache_provider.set_lock_restricted_data(get_user_id(user)):
|
lock_name = "restricted_data_{}".format(get_user_id(user))
|
||||||
|
if await self.cache_provider.set_lock(lock_name):
|
||||||
future: asyncio.Future = asyncio.Future()
|
future: asyncio.Future = asyncio.Future()
|
||||||
self.restricted_data_cache_updater[get_user_id(user)] = future
|
self.restricted_data_cache_updater[get_user_id(user)] = future
|
||||||
# Get change_id for this user
|
# Get change_id for this user
|
||||||
@ -293,7 +286,7 @@ class ElementCache:
|
|||||||
mapping['_config:change_id'] = str(change_id)
|
mapping['_config:change_id'] = str(change_id)
|
||||||
await self.cache_provider.update_restricted_data(get_user_id(user), mapping)
|
await self.cache_provider.update_restricted_data(get_user_id(user), mapping)
|
||||||
# Unset the lock
|
# Unset the lock
|
||||||
await self.cache_provider.del_lock_restricted_data(get_user_id(user))
|
await self.cache_provider.del_lock(lock_name)
|
||||||
future.set_result(1)
|
future.set_result(1)
|
||||||
else:
|
else:
|
||||||
# Wait until the update if finshed
|
# Wait until the update if finshed
|
||||||
@ -301,7 +294,7 @@ class ElementCache:
|
|||||||
# The active worker is on the same asgi server, we can use the future
|
# The active worker is on the same asgi server, we can use the future
|
||||||
await self.restricted_data_cache_updater[get_user_id(user)]
|
await self.restricted_data_cache_updater[get_user_id(user)]
|
||||||
else:
|
else:
|
||||||
while await self.cache_provider.get_lock_restricted_data(get_user_id(user)):
|
while await self.cache_provider.get_lock(lock_name):
|
||||||
await asyncio.sleep(0.01)
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
async def get_all_restricted_data(self, user: Optional['CollectionElement']) -> Dict[str, List[Dict[str, Any]]]:
|
async def get_all_restricted_data(self, user: Optional['CollectionElement']) -> Dict[str, List[Dict[str, Any]]]:
|
||||||
@ -412,6 +405,7 @@ def load_element_cache(redis_addr: str = '', restricted_data: bool = True) -> El
|
|||||||
return ElementCache(redis=redis_addr, use_restricted_data_cache=restricted_data)
|
return ElementCache(redis=redis_addr, use_restricted_data_cache=restricted_data)
|
||||||
|
|
||||||
|
|
||||||
|
# Set the element_cache
|
||||||
redis_address = getattr(settings, 'REDIS_ADDRESS', '')
|
redis_address = getattr(settings, 'REDIS_ADDRESS', '')
|
||||||
use_restricted_data = getattr(settings, 'RESTRICTED_DATA_CACHE', True)
|
use_restricted_data = getattr(settings, 'RESTRICTED_DATA_CACHE', True)
|
||||||
element_cache = load_element_cache(redis_addr=redis_address, restricted_data=use_restricted_data)
|
element_cache = load_element_cache(redis_addr=redis_address, restricted_data=use_restricted_data)
|
||||||
|
@ -52,7 +52,7 @@ class BaseCacheProvider:
|
|||||||
def get_change_id_cache_key(self) -> str:
|
def get_change_id_cache_key(self) -> str:
|
||||||
return self.change_id_cache_key
|
return self.change_id_cache_key
|
||||||
|
|
||||||
def clear_cache(self) -> None:
|
async def clear_cache(self) -> None:
|
||||||
raise NotImplementedError("CacheProvider has to implement the method clear_cache().")
|
raise NotImplementedError("CacheProvider has to implement the method clear_cache().")
|
||||||
|
|
||||||
async def reset_full_cache(self, data: Dict[str, str]) -> None:
|
async def reset_full_cache(self, data: Dict[str, str]) -> None:
|
||||||
@ -82,14 +82,14 @@ class BaseCacheProvider:
|
|||||||
async def del_restricted_data(self, user_id: int) -> None:
|
async def del_restricted_data(self, user_id: int) -> None:
|
||||||
raise NotImplementedError("CacheProvider has to implement the method del_restricted_data().")
|
raise NotImplementedError("CacheProvider has to implement the method del_restricted_data().")
|
||||||
|
|
||||||
async def set_lock_restricted_data(self, user_id: int) -> bool:
|
async def set_lock(self, lock_name: str) -> bool:
|
||||||
raise NotImplementedError("CacheProvider has to implement the method set_lock_restricted_data().")
|
raise NotImplementedError("CacheProvider has to implement the method set_lock().")
|
||||||
|
|
||||||
async def get_lock_restricted_data(self, user_id: int) -> bool:
|
async def get_lock(self, lock_name: str) -> bool:
|
||||||
raise NotImplementedError("CacheProvider has to implement the method get_lock_restricted_data().")
|
raise NotImplementedError("CacheProvider has to implement the method get_lock().")
|
||||||
|
|
||||||
async def del_lock_restricted_data(self, user_id: int) -> None:
|
async def del_lock(self, lock_name: str) -> None:
|
||||||
raise NotImplementedError("CacheProvider has to implement the method del_lock_restricted_data().")
|
raise NotImplementedError("CacheProvider has to implement the method del_lock().")
|
||||||
|
|
||||||
async def get_change_id_user(self, user_id: int) -> Optional[int]:
|
async def get_change_id_user(self, user_id: int) -> Optional[int]:
|
||||||
raise NotImplementedError("CacheProvider has to implement the method get_change_id_user().")
|
raise NotImplementedError("CacheProvider has to implement the method get_change_id_user().")
|
||||||
@ -104,6 +104,23 @@ class BaseCacheProvider:
|
|||||||
raise NotImplementedError("CacheProvider has to implement the method get_lowest_change_id().")
|
raise NotImplementedError("CacheProvider has to implement the method get_lowest_change_id().")
|
||||||
|
|
||||||
|
|
||||||
|
class RedisConnectionContextManager:
|
||||||
|
"""
|
||||||
|
Async context manager for connections
|
||||||
|
"""
|
||||||
|
# TODO: contextlib.asynccontextmanager can be used in python 3.7
|
||||||
|
|
||||||
|
def __init__(self, redis_address: str) -> None:
|
||||||
|
self.redis_address = redis_address
|
||||||
|
|
||||||
|
async def __aenter__(self) -> 'aioredis.RedisConnection':
|
||||||
|
self.conn = await aioredis.create_redis(self.redis_address)
|
||||||
|
return self.conn
|
||||||
|
|
||||||
|
async def __aexit__(self, exc_type: Any, exc: Any, tb: Any) -> None:
|
||||||
|
self.conn.close()
|
||||||
|
|
||||||
|
|
||||||
class RedisCacheProvider(BaseCacheProvider):
|
class RedisCacheProvider(BaseCacheProvider):
|
||||||
"""
|
"""
|
||||||
Cache provider that loads and saves the data to redis.
|
Cache provider that loads and saves the data to redis.
|
||||||
@ -113,22 +130,28 @@ class RedisCacheProvider(BaseCacheProvider):
|
|||||||
def __init__(self, redis: str) -> None:
|
def __init__(self, redis: str) -> None:
|
||||||
self.redis_address = redis
|
self.redis_address = redis
|
||||||
|
|
||||||
async def get_connection(self) -> 'aioredis.RedisConnection':
|
def get_connection(self) -> RedisConnectionContextManager:
|
||||||
"""
|
"""
|
||||||
Returns a redis connection.
|
Returns contextmanager for a redis connection.
|
||||||
"""
|
"""
|
||||||
if self.redis_pool is None:
|
return RedisConnectionContextManager(self.redis_address)
|
||||||
self.redis_pool = await aioredis.create_redis_pool(self.redis_address)
|
|
||||||
return self.redis_pool
|
async def clear_cache(self) -> None:
|
||||||
|
"""
|
||||||
|
Deleted all cache entries created with this element cache.
|
||||||
|
"""
|
||||||
|
async with self.get_connection() as redis:
|
||||||
|
# TODO: Fix me. Do only delete keys, that are created with this cache.
|
||||||
|
await redis.flushall()
|
||||||
|
|
||||||
async def reset_full_cache(self, data: Dict[str, str]) -> None:
|
async def reset_full_cache(self, data: Dict[str, str]) -> None:
|
||||||
"""
|
"""
|
||||||
Deletes the cache and write new data in it.
|
Deletes the cache and write new data in it.
|
||||||
"""
|
"""
|
||||||
# TODO: lua or transaction
|
# TODO: lua or transaction
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
await redis.delete(self.get_full_data_cache_key())
|
await redis.delete(self.get_full_data_cache_key())
|
||||||
await redis.hmset_dict(self.get_full_data_cache_key(), data)
|
await redis.hmset_dict(self.get_full_data_cache_key(), data)
|
||||||
|
|
||||||
async def data_exists(self, user_id: Optional[int] = None) -> bool:
|
async def data_exists(self, user_id: Optional[int] = None) -> bool:
|
||||||
"""
|
"""
|
||||||
@ -137,12 +160,12 @@ class RedisCacheProvider(BaseCacheProvider):
|
|||||||
If user_id is None, the method tests for full_data. If user_id is an int, it tests
|
If user_id is None, the method tests for full_data. If user_id is an int, it tests
|
||||||
for the restricted_data_cache for the user with the user_id. 0 is for anonymous.
|
for the restricted_data_cache for the user with the user_id. 0 is for anonymous.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
if user_id is None:
|
if user_id is None:
|
||||||
cache_key = self.get_full_data_cache_key()
|
cache_key = self.get_full_data_cache_key()
|
||||||
else:
|
else:
|
||||||
cache_key = self.get_restricted_data_cache_key(user_id)
|
cache_key = self.get_restricted_data_cache_key(user_id)
|
||||||
return await redis.exists(cache_key)
|
return await redis.exists(cache_key)
|
||||||
|
|
||||||
async def add_elements(self, elements: List[str]) -> None:
|
async def add_elements(self, elements: List[str]) -> None:
|
||||||
"""
|
"""
|
||||||
@ -151,10 +174,10 @@ class RedisCacheProvider(BaseCacheProvider):
|
|||||||
elements is a list with an even len. the odd values are the element_ids and the even
|
elements is a list with an even len. the odd values are the element_ids and the even
|
||||||
values are the elements. The elements have to be encoded, for example with json.
|
values are the elements. The elements have to be encoded, for example with json.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
await redis.hmset(
|
await redis.hmset(
|
||||||
self.get_full_data_cache_key(),
|
self.get_full_data_cache_key(),
|
||||||
*elements)
|
*elements)
|
||||||
|
|
||||||
async def del_elements(self, elements: List[str], user_id: Optional[int] = None) -> None:
|
async def del_elements(self, elements: List[str], user_id: Optional[int] = None) -> None:
|
||||||
"""
|
"""
|
||||||
@ -165,14 +188,14 @@ class RedisCacheProvider(BaseCacheProvider):
|
|||||||
If user_id is None, the elements are deleted from the full_data cache. If user_id is an
|
If user_id is None, the elements are deleted from the full_data cache. If user_id is an
|
||||||
int, the elements are deleted one restricted_data_cache. 0 is for anonymous.
|
int, the elements are deleted one restricted_data_cache. 0 is for anonymous.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
if user_id is None:
|
if user_id is None:
|
||||||
cache_key = self.get_full_data_cache_key()
|
cache_key = self.get_full_data_cache_key()
|
||||||
else:
|
else:
|
||||||
cache_key = self.get_restricted_data_cache_key(user_id)
|
cache_key = self.get_restricted_data_cache_key(user_id)
|
||||||
await redis.hdel(
|
await redis.hdel(
|
||||||
cache_key,
|
cache_key,
|
||||||
*elements)
|
*elements)
|
||||||
|
|
||||||
async def add_changed_elements(self, change_id: int, element_ids: Iterable[str]) -> None:
|
async def add_changed_elements(self, change_id: int, element_ids: Iterable[str]) -> None:
|
||||||
"""
|
"""
|
||||||
@ -189,10 +212,10 @@ class RedisCacheProvider(BaseCacheProvider):
|
|||||||
yield change_id
|
yield change_id
|
||||||
yield element_id
|
yield element_id
|
||||||
|
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
await redis.zadd(self.get_change_id_cache_key(), *zadd_args(change_id))
|
await redis.zadd(self.get_change_id_cache_key(), *zadd_args(change_id))
|
||||||
# Saves the lowest_change_id if it does not exist
|
# Saves the lowest_change_id if it does not exist
|
||||||
await redis.zadd(self.get_change_id_cache_key(), change_id, '_config:lowest_change_id', exist='ZSET_IF_NOT_EXIST')
|
await redis.zadd(self.get_change_id_cache_key(), change_id, '_config:lowest_change_id', exist='ZSET_IF_NOT_EXIST')
|
||||||
|
|
||||||
async def get_all_data(self, user_id: Optional[int] = None) -> Dict[bytes, bytes]:
|
async def get_all_data(self, user_id: Optional[int] = None) -> Dict[bytes, bytes]:
|
||||||
"""
|
"""
|
||||||
@ -205,8 +228,8 @@ class RedisCacheProvider(BaseCacheProvider):
|
|||||||
cache_key = self.get_full_data_cache_key()
|
cache_key = self.get_full_data_cache_key()
|
||||||
else:
|
else:
|
||||||
cache_key = self.get_restricted_data_cache_key(user_id)
|
cache_key = self.get_restricted_data_cache_key(user_id)
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
return await redis.hgetall(cache_key)
|
return await redis.hgetall(cache_key)
|
||||||
|
|
||||||
async def get_element(self, element_id: str) -> Optional[bytes]:
|
async def get_element(self, element_id: str) -> Optional[bytes]:
|
||||||
"""
|
"""
|
||||||
@ -214,10 +237,10 @@ class RedisCacheProvider(BaseCacheProvider):
|
|||||||
|
|
||||||
Returns None, when the element does not exist.
|
Returns None, when the element does not exist.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
return await redis.hget(
|
return await redis.hget(
|
||||||
self.get_full_data_cache_key(),
|
self.get_full_data_cache_key(),
|
||||||
element_id)
|
element_id)
|
||||||
|
|
||||||
async def get_data_since(self, change_id: int, user_id: Optional[int] = None) -> Tuple[Dict[str, List[bytes]], List[str]]:
|
async def get_data_since(self, change_id: int, user_id: Optional[int] = None) -> Tuple[Dict[str, List[bytes]], List[str]]:
|
||||||
"""
|
"""
|
||||||
@ -231,53 +254,53 @@ class RedisCacheProvider(BaseCacheProvider):
|
|||||||
for an user is used. 0 is for the anonymous user.
|
for an user is used. 0 is for the anonymous user.
|
||||||
"""
|
"""
|
||||||
# TODO: rewrite with lua to get all elements with one request
|
# TODO: rewrite with lua to get all elements with one request
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
changed_elements: Dict[str, List[bytes]] = defaultdict(list)
|
changed_elements: Dict[str, List[bytes]] = defaultdict(list)
|
||||||
deleted_elements: List[str] = []
|
deleted_elements: List[str] = []
|
||||||
for element_id in await redis.zrangebyscore(self.get_change_id_cache_key(), min=change_id):
|
for element_id in await redis.zrangebyscore(self.get_change_id_cache_key(), min=change_id):
|
||||||
if element_id.startswith(b'_config'):
|
if element_id.startswith(b'_config'):
|
||||||
continue
|
continue
|
||||||
element_json = await redis.hget(self.get_full_data_cache_key(), element_id) # Optional[bytes]
|
element_json = await redis.hget(self.get_full_data_cache_key(), element_id) # Optional[bytes]
|
||||||
if element_json is None:
|
if element_json is None:
|
||||||
# The element is not in the cache. It has to be deleted.
|
# The element is not in the cache. It has to be deleted.
|
||||||
deleted_elements.append(element_id)
|
deleted_elements.append(element_id)
|
||||||
else:
|
else:
|
||||||
collection_string, id = split_element_id(element_id)
|
collection_string, id = split_element_id(element_id)
|
||||||
changed_elements[collection_string].append(element_json)
|
changed_elements[collection_string].append(element_json)
|
||||||
return changed_elements, deleted_elements
|
return changed_elements, deleted_elements
|
||||||
|
|
||||||
async def del_restricted_data(self, user_id: int) -> None:
|
async def del_restricted_data(self, user_id: int) -> None:
|
||||||
"""
|
"""
|
||||||
Deletes all restricted_data for an user. 0 is for the anonymous user.
|
Deletes all restricted_data for an user. 0 is for the anonymous user.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
await redis.delete(self.get_restricted_data_cache_key(user_id))
|
await redis.delete(self.get_restricted_data_cache_key(user_id))
|
||||||
|
|
||||||
async def set_lock_restricted_data(self, user_id: int) -> bool:
|
async def set_lock(self, lock_name: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Tries to sets a lock for the restricted_data of an user.
|
Tries to sets a lock.
|
||||||
|
|
||||||
Returns True when the lock could be set.
|
Returns True when the lock could be set.
|
||||||
|
|
||||||
Returns False when the lock was already set.
|
Returns False when the lock was already set.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
return await redis.hsetnx(self.get_restricted_data_cache_key(user_id), self.lock_key, 1)
|
return await redis.hsetnx("lock_{}".format(lock_name), self.lock_key, 1)
|
||||||
|
|
||||||
async def get_lock_restricted_data(self, user_id: int) -> bool:
|
async def get_lock(self, lock_name: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Returns True, when the lock for the restricted_data of an user is set. Else False.
|
Returns True, when the lock for the restricted_data of an user is set. Else False.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
return await redis.hget(self.get_restricted_data_cache_key(user_id), self.lock_key)
|
return await redis.hget("lock_{}".format(lock_name), self.lock_key)
|
||||||
|
|
||||||
async def del_lock_restricted_data(self, user_id: int) -> None:
|
async def del_lock(self, lock_name: str) -> None:
|
||||||
"""
|
"""
|
||||||
Deletes the lock for the restricted_data of an user. Does nothing when the
|
Deletes the lock for the restricted_data of an user. Does nothing when the
|
||||||
lock is not set.
|
lock is not set.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
await redis.hdel(self.get_restricted_data_cache_key(user_id), self.lock_key)
|
await redis.hdel("lock_{}".format(lock_name), self.lock_key)
|
||||||
|
|
||||||
async def get_change_id_user(self, user_id: int) -> Optional[int]:
|
async def get_change_id_user(self, user_id: int) -> Optional[int]:
|
||||||
"""
|
"""
|
||||||
@ -285,8 +308,8 @@ class RedisCacheProvider(BaseCacheProvider):
|
|||||||
|
|
||||||
This is the change_id where the restricted_data was last calculated.
|
This is the change_id where the restricted_data was last calculated.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
return await redis.hget(self.get_restricted_data_cache_key(user_id), '_config:change_id')
|
return await redis.hget(self.get_restricted_data_cache_key(user_id), '_config:change_id')
|
||||||
|
|
||||||
async def update_restricted_data(self, user_id: int, data: Dict[str, str]) -> None:
|
async def update_restricted_data(self, user_id: int, data: Dict[str, str]) -> None:
|
||||||
"""
|
"""
|
||||||
@ -295,19 +318,19 @@ class RedisCacheProvider(BaseCacheProvider):
|
|||||||
data has to be a dict where the key is an element_id and the value the (json-) encoded
|
data has to be a dict where the key is an element_id and the value the (json-) encoded
|
||||||
element.
|
element.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
await redis.hmset_dict(self.get_restricted_data_cache_key(user_id), data)
|
await redis.hmset_dict(self.get_restricted_data_cache_key(user_id), data)
|
||||||
|
|
||||||
async def get_current_change_id(self) -> List[Tuple[str, int]]:
|
async def get_current_change_id(self) -> List[Tuple[str, int]]:
|
||||||
"""
|
"""
|
||||||
Get the highest change_id from redis.
|
Get the highest change_id from redis.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
return await redis.zrevrangebyscore(
|
return await redis.zrevrangebyscore(
|
||||||
self.get_change_id_cache_key(),
|
self.get_change_id_cache_key(),
|
||||||
withscores=True,
|
withscores=True,
|
||||||
count=1,
|
count=1,
|
||||||
offset=0)
|
offset=0)
|
||||||
|
|
||||||
async def get_lowest_change_id(self) -> Optional[int]:
|
async def get_lowest_change_id(self) -> Optional[int]:
|
||||||
"""
|
"""
|
||||||
@ -315,10 +338,10 @@ class RedisCacheProvider(BaseCacheProvider):
|
|||||||
|
|
||||||
Returns None if lowest score does not exist.
|
Returns None if lowest score does not exist.
|
||||||
"""
|
"""
|
||||||
redis = await self.get_connection()
|
async with self.get_connection() as redis:
|
||||||
return await redis.zscore(
|
return await redis.zscore(
|
||||||
self.get_change_id_cache_key(),
|
self.get_change_id_cache_key(),
|
||||||
'_config:lowest_change_id')
|
'_config:lowest_change_id')
|
||||||
|
|
||||||
|
|
||||||
class MemmoryCacheProvider(BaseCacheProvider):
|
class MemmoryCacheProvider(BaseCacheProvider):
|
||||||
@ -332,12 +355,16 @@ class MemmoryCacheProvider(BaseCacheProvider):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
self.clear_cache()
|
self.set_data_dicts()
|
||||||
|
|
||||||
def clear_cache(self) -> None:
|
def set_data_dicts(self) -> None:
|
||||||
self.full_data: Dict[str, str] = {}
|
self.full_data: Dict[str, str] = {}
|
||||||
self.restricted_data: Dict[int, Dict[str, str]] = {}
|
self.restricted_data: Dict[int, Dict[str, str]] = {}
|
||||||
self.change_id_data: Dict[int, Set[str]] = {}
|
self.change_id_data: Dict[int, Set[str]] = {}
|
||||||
|
self.locks: Dict[str, str] = {}
|
||||||
|
|
||||||
|
async def clear_cache(self) -> None:
|
||||||
|
self.set_data_dicts()
|
||||||
|
|
||||||
async def reset_full_cache(self, data: Dict[str, str]) -> None:
|
async def reset_full_cache(self, data: Dict[str, str]) -> None:
|
||||||
self.full_data = data
|
self.full_data = data
|
||||||
@ -417,21 +444,18 @@ class MemmoryCacheProvider(BaseCacheProvider):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def set_lock_restricted_data(self, user_id: int) -> bool:
|
async def set_lock(self, lock_name: str) -> bool:
|
||||||
data = self.restricted_data.setdefault(user_id, {})
|
if lock_name in self.locks:
|
||||||
if self.lock_key in data:
|
|
||||||
return False
|
return False
|
||||||
data[self.lock_key] = "1"
|
self.locks[lock_name] = "1"
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def get_lock_restricted_data(self, user_id: int) -> bool:
|
async def get_lock(self, lock_name: str) -> bool:
|
||||||
data = self.restricted_data.get(user_id, {})
|
return lock_name in self.locks
|
||||||
return self.lock_key in data
|
|
||||||
|
|
||||||
async def del_lock_restricted_data(self, user_id: int) -> None:
|
async def del_lock(self, lock_name: str) -> None:
|
||||||
data = self.restricted_data.get(user_id, {})
|
|
||||||
try:
|
try:
|
||||||
del data[self.lock_key]
|
del self.locks[lock_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -12,7 +12,6 @@ from typing import (
|
|||||||
|
|
||||||
from asgiref.sync import async_to_sync
|
from asgiref.sync import async_to_sync
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.conf import settings
|
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from mypy_extensions import TypedDict
|
from mypy_extensions import TypedDict
|
||||||
|
|
||||||
@ -201,12 +200,7 @@ class CollectionElement:
|
|||||||
if self.instance is None:
|
if self.instance is None:
|
||||||
# The type of data has to be set for mypy
|
# The type of data has to be set for mypy
|
||||||
data: Optional[Dict[str, Any]] = None
|
data: Optional[Dict[str, Any]] = None
|
||||||
if getattr(settings, 'SKIP_CACHE', False):
|
data = async_to_sync(element_cache.get_element_full_data)(self.collection_string, self.id)
|
||||||
# Hack for django 2.0 and channels 2.1 to stay in the same thread.
|
|
||||||
# This is needed for the tests.
|
|
||||||
data = self.get_element_from_db()
|
|
||||||
else:
|
|
||||||
data = async_to_sync(element_cache.get_element_full_data)(self.collection_string, self.id)
|
|
||||||
if data is None:
|
if data is None:
|
||||||
raise self.get_model().DoesNotExist(
|
raise self.get_model().DoesNotExist(
|
||||||
"Collection {} with id {} does not exist".format(self.collection_string, self.id))
|
"Collection {} with id {} does not exist".format(self.collection_string, self.id))
|
||||||
@ -278,12 +272,7 @@ class Collection(Cachable):
|
|||||||
if self.full_data is None:
|
if self.full_data is None:
|
||||||
# The type of all_full_data has to be set for mypy
|
# The type of all_full_data has to be set for mypy
|
||||||
all_full_data: Dict[str, List[Dict[str, Any]]] = {}
|
all_full_data: Dict[str, List[Dict[str, Any]]] = {}
|
||||||
if getattr(settings, 'SKIP_CACHE', False):
|
all_full_data = async_to_sync(element_cache.get_all_full_data)()
|
||||||
# Hack for django 2.0 and channels 2.1 to stay in the same thread.
|
|
||||||
# This is needed for the tests.
|
|
||||||
all_full_data = self.get_elements_from_db()
|
|
||||||
else:
|
|
||||||
all_full_data = async_to_sync(element_cache.get_all_full_data)()
|
|
||||||
self.full_data = all_full_data.get(self.collection_string, [])
|
self.full_data = all_full_data.get(self.collection_string, [])
|
||||||
return self.full_data # type: ignore
|
return self.full_data # type: ignore
|
||||||
|
|
||||||
|
@ -3,6 +3,8 @@ from django.test import TestCase, TransactionTestCase
|
|||||||
from pytest_django.django_compat import is_django_unittest
|
from pytest_django.django_compat import is_django_unittest
|
||||||
from pytest_django.plugin import validate_django_db
|
from pytest_django.plugin import validate_django_db
|
||||||
|
|
||||||
|
from openslides.utils.cache import element_cache
|
||||||
|
|
||||||
|
|
||||||
def pytest_collection_modifyitems(items):
|
def pytest_collection_modifyitems(items):
|
||||||
"""
|
"""
|
||||||
@ -57,3 +59,13 @@ def constants(request):
|
|||||||
else:
|
else:
|
||||||
# Else: Use fake constants
|
# Else: Use fake constants
|
||||||
set_constants({'constant1': 'value1', 'constant2': 'value2'})
|
set_constants({'constant1': 'value1', 'constant2': 'value2'})
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def reset_cache(request):
|
||||||
|
"""
|
||||||
|
Resetts the cache for every test
|
||||||
|
"""
|
||||||
|
if 'django_db' in request.node.keywords or is_django_unittest(request):
|
||||||
|
# When the db is created, use the original cachables
|
||||||
|
element_cache.ensure_cache(reset=True)
|
||||||
|
@ -12,6 +12,8 @@ from openslides.core.config import config
|
|||||||
from openslides.core.models import Countdown
|
from openslides.core.models import Countdown
|
||||||
from openslides.motions.models import Motion
|
from openslides.motions.models import Motion
|
||||||
from openslides.topics.models import Topic
|
from openslides.topics.models import Topic
|
||||||
|
from openslides.users.models import Group
|
||||||
|
from openslides.utils.autoupdate import inform_changed_data
|
||||||
from openslides.utils.collection import CollectionElement
|
from openslides.utils.collection import CollectionElement
|
||||||
from openslides.utils.test import TestCase
|
from openslides.utils.test import TestCase
|
||||||
|
|
||||||
@ -43,20 +45,22 @@ class RetrieveItem(TestCase):
|
|||||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
def test_hidden_by_anonymous_with_manage_perms(self):
|
def test_hidden_by_anonymous_with_manage_perms(self):
|
||||||
group = get_user_model().groups.field.related_model.objects.get(pk=1) # Group with pk 1 is for anonymous users.
|
group = Group.objects.get(pk=1) # Group with pk 1 is for anonymous users.
|
||||||
permission_string = 'agenda.can_manage'
|
permission_string = 'agenda.can_manage'
|
||||||
app_label, codename = permission_string.split('.')
|
app_label, codename = permission_string.split('.')
|
||||||
permission = Permission.objects.get(content_type__app_label=app_label, codename=codename)
|
permission = Permission.objects.get(content_type__app_label=app_label, codename=codename)
|
||||||
group.permissions.add(permission)
|
group.permissions.add(permission)
|
||||||
|
inform_changed_data(group)
|
||||||
response = self.client.get(reverse('item-detail', args=[self.item.pk]))
|
response = self.client.get(reverse('item-detail', args=[self.item.pk]))
|
||||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||||
|
|
||||||
def test_internal_by_anonymous_without_perm_to_see_internal_items(self):
|
def test_internal_by_anonymous_without_perm_to_see_internal_items(self):
|
||||||
group = get_user_model().groups.field.related_model.objects.get(pk=1) # Group with pk 1 is for anonymous users.
|
group = Group.objects.get(pk=1) # Group with pk 1 is for anonymous users.
|
||||||
permission_string = 'agenda.can_see_internal_items'
|
permission_string = 'agenda.can_see_internal_items'
|
||||||
app_label, codename = permission_string.split('.')
|
app_label, codename = permission_string.split('.')
|
||||||
permission = group.permissions.get(content_type__app_label=app_label, codename=codename)
|
permission = group.permissions.get(content_type__app_label=app_label, codename=codename)
|
||||||
group.permissions.remove(permission)
|
group.permissions.remove(permission)
|
||||||
|
inform_changed_data(group)
|
||||||
self.item.type = Item.INTERNAL_ITEM
|
self.item.type = Item.INTERNAL_ITEM
|
||||||
self.item.save()
|
self.item.save()
|
||||||
response = self.client.get(reverse('item-detail', args=[self.item.pk]))
|
response = self.client.get(reverse('item-detail', args=[self.item.pk]))
|
||||||
@ -194,6 +198,7 @@ class ManageSpeaker(TestCase):
|
|||||||
group_delegates = type(group_admin).objects.get(name='Delegates')
|
group_delegates = type(group_admin).objects.get(name='Delegates')
|
||||||
admin.groups.add(group_delegates)
|
admin.groups.add(group_delegates)
|
||||||
admin.groups.remove(group_admin)
|
admin.groups.remove(group_admin)
|
||||||
|
inform_changed_data(admin)
|
||||||
CollectionElement.from_instance(admin)
|
CollectionElement.from_instance(admin)
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
@ -231,7 +236,7 @@ class ManageSpeaker(TestCase):
|
|||||||
group_delegates = type(group_admin).objects.get(name='Delegates')
|
group_delegates = type(group_admin).objects.get(name='Delegates')
|
||||||
admin.groups.add(group_delegates)
|
admin.groups.add(group_delegates)
|
||||||
admin.groups.remove(group_admin)
|
admin.groups.remove(group_admin)
|
||||||
CollectionElement.from_instance(admin)
|
inform_changed_data(admin)
|
||||||
speaker = Speaker.objects.add(self.user, self.item)
|
speaker = Speaker.objects.add(self.user, self.item)
|
||||||
|
|
||||||
response = self.client.delete(
|
response = self.client.delete(
|
||||||
@ -259,7 +264,7 @@ class ManageSpeaker(TestCase):
|
|||||||
group_delegates = type(group_admin).objects.get(name='Delegates')
|
group_delegates = type(group_admin).objects.get(name='Delegates')
|
||||||
admin.groups.add(group_delegates)
|
admin.groups.add(group_delegates)
|
||||||
admin.groups.remove(group_admin)
|
admin.groups.remove(group_admin)
|
||||||
CollectionElement.from_instance(admin)
|
inform_changed_data(admin)
|
||||||
Speaker.objects.add(self.user, self.item)
|
Speaker.objects.add(self.user, self.item)
|
||||||
|
|
||||||
response = self.client.patch(
|
response = self.client.patch(
|
||||||
|
@ -5,6 +5,7 @@ from rest_framework import status
|
|||||||
from rest_framework.test import APIClient
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
from openslides.assignments.models import Assignment
|
from openslides.assignments.models import Assignment
|
||||||
|
from openslides.utils.autoupdate import inform_changed_data
|
||||||
from openslides.utils.test import TestCase
|
from openslides.utils.test import TestCase
|
||||||
|
|
||||||
from ..helpers import count_queries
|
from ..helpers import count_queries
|
||||||
@ -77,6 +78,7 @@ class CanidatureSelf(TestCase):
|
|||||||
group_delegates = type(group_admin).objects.get(name='Delegates')
|
group_delegates = type(group_admin).objects.get(name='Delegates')
|
||||||
admin.groups.add(group_delegates)
|
admin.groups.add(group_delegates)
|
||||||
admin.groups.remove(group_admin)
|
admin.groups.remove(group_admin)
|
||||||
|
inform_changed_data(admin)
|
||||||
|
|
||||||
response = self.client.post(reverse('assignment-candidature-self', args=[self.assignment.pk]))
|
response = self.client.post(reverse('assignment-candidature-self', args=[self.assignment.pk]))
|
||||||
|
|
||||||
@ -123,6 +125,7 @@ class CanidatureSelf(TestCase):
|
|||||||
group_delegates = type(group_admin).objects.get(name='Delegates')
|
group_delegates = type(group_admin).objects.get(name='Delegates')
|
||||||
admin.groups.add(group_delegates)
|
admin.groups.add(group_delegates)
|
||||||
admin.groups.remove(group_admin)
|
admin.groups.remove(group_admin)
|
||||||
|
inform_changed_data(admin)
|
||||||
|
|
||||||
response = self.client.delete(reverse('assignment-candidature-self', args=[self.assignment.pk]))
|
response = self.client.delete(reverse('assignment-candidature-self', args=[self.assignment.pk]))
|
||||||
|
|
||||||
@ -203,6 +206,7 @@ class CandidatureOther(TestCase):
|
|||||||
group_delegates = type(group_admin).objects.get(name='Delegates')
|
group_delegates = type(group_admin).objects.get(name='Delegates')
|
||||||
admin.groups.add(group_delegates)
|
admin.groups.add(group_delegates)
|
||||||
admin.groups.remove(group_admin)
|
admin.groups.remove(group_admin)
|
||||||
|
inform_changed_data(admin)
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
reverse('assignment-candidature-other', args=[self.assignment.pk]),
|
reverse('assignment-candidature-other', args=[self.assignment.pk]),
|
||||||
@ -258,6 +262,7 @@ class CandidatureOther(TestCase):
|
|||||||
group_delegates = type(group_admin).objects.get(name='Delegates')
|
group_delegates = type(group_admin).objects.get(name='Delegates')
|
||||||
admin.groups.add(group_delegates)
|
admin.groups.add(group_delegates)
|
||||||
admin.groups.remove(group_admin)
|
admin.groups.remove(group_admin)
|
||||||
|
inform_changed_data(admin)
|
||||||
|
|
||||||
response = self.client.delete(
|
response = self.client.delete(
|
||||||
reverse('assignment-candidature-other', args=[self.assignment.pk]),
|
reverse('assignment-candidature-other', args=[self.assignment.pk]),
|
||||||
|
@ -50,9 +50,6 @@ async def set_config(key, value):
|
|||||||
"""
|
"""
|
||||||
Set a config variable in the element_cache without hitting the database.
|
Set a config variable in the element_cache without hitting the database.
|
||||||
"""
|
"""
|
||||||
if not await element_cache.exists_full_data():
|
|
||||||
# Encure that the cache exists and the default values of the config are in it.
|
|
||||||
await element_cache.build_full_data()
|
|
||||||
collection_string = config.get_collection_string()
|
collection_string = config.get_collection_string()
|
||||||
config_id = config.key_to_id[key] # type: ignore
|
config_id = config.key_to_id[key] # type: ignore
|
||||||
full_data = {'id': config_id, 'key': key, 'value': value}
|
full_data = {'id': config_id, 'key': key, 'value': value}
|
||||||
|
@ -20,7 +20,7 @@ from openslides.motions.models import (
|
|||||||
Workflow,
|
Workflow,
|
||||||
)
|
)
|
||||||
from openslides.utils.auth import get_group_model
|
from openslides.utils.auth import get_group_model
|
||||||
from openslides.utils.collection import CollectionElement
|
from openslides.utils.autoupdate import inform_changed_data
|
||||||
from openslides.utils.test import TestCase
|
from openslides.utils.test import TestCase
|
||||||
|
|
||||||
from ..helpers import count_queries
|
from ..helpers import count_queries
|
||||||
@ -207,6 +207,7 @@ class CreateMotion(TestCase):
|
|||||||
self.admin = get_user_model().objects.get(username='admin')
|
self.admin = get_user_model().objects.get(username='admin')
|
||||||
self.admin.groups.add(2)
|
self.admin.groups.add(2)
|
||||||
self.admin.groups.remove(4)
|
self.admin.groups.remove(4)
|
||||||
|
inform_changed_data(self.admin)
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
reverse('motion-list'),
|
reverse('motion-list'),
|
||||||
@ -258,6 +259,7 @@ class CreateMotion(TestCase):
|
|||||||
self.admin = get_user_model().objects.get(username='admin')
|
self.admin = get_user_model().objects.get(username='admin')
|
||||||
self.admin.groups.add(2)
|
self.admin.groups.add(2)
|
||||||
self.admin.groups.remove(4)
|
self.admin.groups.remove(4)
|
||||||
|
inform_changed_data(self.admin)
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
reverse('motion-list'),
|
reverse('motion-list'),
|
||||||
@ -306,6 +308,7 @@ class RetrieveMotion(TestCase):
|
|||||||
state.save()
|
state.save()
|
||||||
# The cache has to be cleared, see:
|
# The cache has to be cleared, see:
|
||||||
# https://github.com/OpenSlides/OpenSlides/issues/3396
|
# https://github.com/OpenSlides/OpenSlides/issues/3396
|
||||||
|
inform_changed_data(self.motion)
|
||||||
|
|
||||||
response = guest_client.get(reverse('motion-detail', args=[self.motion.pk]))
|
response = guest_client.get(reverse('motion-detail', args=[self.motion.pk]))
|
||||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||||
@ -340,6 +343,7 @@ class RetrieveMotion(TestCase):
|
|||||||
group.permissions.remove(permission)
|
group.permissions.remove(permission)
|
||||||
config['general_system_enable_anonymous'] = True
|
config['general_system_enable_anonymous'] = True
|
||||||
guest_client = APIClient()
|
guest_client = APIClient()
|
||||||
|
inform_changed_data(group)
|
||||||
|
|
||||||
response_1 = guest_client.get(reverse('motion-detail', args=[self.motion.pk]))
|
response_1 = guest_client.get(reverse('motion-detail', args=[self.motion.pk]))
|
||||||
self.assertEqual(response_1.status_code, status.HTTP_200_OK)
|
self.assertEqual(response_1.status_code, status.HTTP_200_OK)
|
||||||
@ -431,6 +435,7 @@ class UpdateMotion(TestCase):
|
|||||||
self.motion.supporters.add(supporter)
|
self.motion.supporters.add(supporter)
|
||||||
config['motions_remove_supporters'] = True
|
config['motions_remove_supporters'] = True
|
||||||
self.assertEqual(self.motion.supporters.count(), 1)
|
self.assertEqual(self.motion.supporters.count(), 1)
|
||||||
|
inform_changed_data((admin, self.motion))
|
||||||
|
|
||||||
response = self.client.patch(
|
response = self.client.patch(
|
||||||
reverse('motion-detail', args=[self.motion.pk]),
|
reverse('motion-detail', args=[self.motion.pk]),
|
||||||
@ -467,7 +472,7 @@ class DeleteMotion(TestCase):
|
|||||||
group_delegates = get_group_model().objects.get(name='Delegates')
|
group_delegates = get_group_model().objects.get(name='Delegates')
|
||||||
self.admin.groups.remove(group_admin)
|
self.admin.groups.remove(group_admin)
|
||||||
self.admin.groups.add(group_delegates)
|
self.admin.groups.add(group_delegates)
|
||||||
CollectionElement.from_instance(self.admin)
|
inform_changed_data(self.admin)
|
||||||
|
|
||||||
def put_motion_in_complex_workflow(self):
|
def put_motion_in_complex_workflow(self):
|
||||||
workflow = Workflow.objects.get(name='Complex Workflow')
|
workflow = Workflow.objects.get(name='Complex Workflow')
|
||||||
@ -551,7 +556,7 @@ class ManageSubmitters(TestCase):
|
|||||||
group_delegates = type(group_admin).objects.get(name='Delegates')
|
group_delegates = type(group_admin).objects.get(name='Delegates')
|
||||||
admin.groups.add(group_delegates)
|
admin.groups.add(group_delegates)
|
||||||
admin.groups.remove(group_admin)
|
admin.groups.remove(group_admin)
|
||||||
CollectionElement.from_instance(admin)
|
inform_changed_data(admin)
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
reverse('motion-manage-submitters', args=[self.motion.pk]),
|
reverse('motion-manage-submitters', args=[self.motion.pk]),
|
||||||
@ -884,6 +889,7 @@ class TestMotionCommentSection(TestCase):
|
|||||||
any of the read_groups.
|
any of the read_groups.
|
||||||
"""
|
"""
|
||||||
self.admin.groups.remove(self.group_in)
|
self.admin.groups.remove(self.group_in)
|
||||||
|
inform_changed_data(self.admin)
|
||||||
|
|
||||||
section = MotionCommentSection(name='test_name_f3jOF3m8fp.<qiqmf32=')
|
section = MotionCommentSection(name='test_name_f3jOF3m8fp.<qiqmf32=')
|
||||||
section.save()
|
section.save()
|
||||||
@ -912,6 +918,7 @@ class TestMotionCommentSection(TestCase):
|
|||||||
Try to create a section without can_manage permissions.
|
Try to create a section without can_manage permissions.
|
||||||
"""
|
"""
|
||||||
self.admin.groups.remove(self.group_in)
|
self.admin.groups.remove(self.group_in)
|
||||||
|
inform_changed_data(self.admin)
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
reverse('motioncommentsection-list'),
|
reverse('motioncommentsection-list'),
|
||||||
@ -1097,6 +1104,7 @@ class TestMotionCommentSection(TestCase):
|
|||||||
Try to delete a section without can_manage permissions
|
Try to delete a section without can_manage permissions
|
||||||
"""
|
"""
|
||||||
self.admin.groups.remove(self.group_in)
|
self.admin.groups.remove(self.group_in)
|
||||||
|
inform_changed_data(self.admin)
|
||||||
|
|
||||||
section = MotionCommentSection(name='test_name_wl2oxmmhe/2kd92lwPSi')
|
section = MotionCommentSection(name='test_name_wl2oxmmhe/2kd92lwPSi')
|
||||||
section.save()
|
section.save()
|
||||||
@ -1190,6 +1198,7 @@ class SupportMotion(TestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.admin = get_user_model().objects.get(username='admin')
|
self.admin = get_user_model().objects.get(username='admin')
|
||||||
self.admin.groups.add(2)
|
self.admin.groups.add(2)
|
||||||
|
inform_changed_data(self.admin)
|
||||||
self.client.login(username='admin', password='admin')
|
self.client.login(username='admin', password='admin')
|
||||||
self.motion = Motion(
|
self.motion = Motion(
|
||||||
title='test_title_chee7ahCha6bingaew4e',
|
title='test_title_chee7ahCha6bingaew4e',
|
||||||
|
@ -7,6 +7,7 @@ from rest_framework.test import APIClient
|
|||||||
from openslides.core.config import config
|
from openslides.core.config import config
|
||||||
from openslides.users.models import Group, PersonalNote, User
|
from openslides.users.models import Group, PersonalNote, User
|
||||||
from openslides.users.serializers import UserFullSerializer
|
from openslides.users.serializers import UserFullSerializer
|
||||||
|
from openslides.utils.autoupdate import inform_changed_data
|
||||||
from openslides.utils.test import TestCase
|
from openslides.utils.test import TestCase
|
||||||
|
|
||||||
from ..helpers import count_queries
|
from ..helpers import count_queries
|
||||||
@ -62,6 +63,7 @@ class UserGetTest(TestCase):
|
|||||||
app_label, codename = permission_string.split('.')
|
app_label, codename = permission_string.split('.')
|
||||||
permission = group.permissions.get(content_type__app_label=app_label, codename=codename)
|
permission = group.permissions.get(content_type__app_label=app_label, codename=codename)
|
||||||
group.permissions.remove(permission)
|
group.permissions.remove(permission)
|
||||||
|
inform_changed_data(group)
|
||||||
config['general_system_enable_anonymous'] = True
|
config['general_system_enable_anonymous'] = True
|
||||||
guest_client = APIClient()
|
guest_client = APIClient()
|
||||||
|
|
||||||
|
@ -1,25 +1,9 @@
|
|||||||
from unittest import skip
|
|
||||||
|
|
||||||
from openslides.topics.models import Topic
|
from openslides.topics.models import Topic
|
||||||
from openslides.utils import collection
|
from openslides.utils import collection
|
||||||
from openslides.utils.test import TestCase
|
from openslides.utils.test import TestCase
|
||||||
|
|
||||||
|
|
||||||
class TestCollectionElementCache(TestCase):
|
class TestCollectionElementCache(TestCase):
|
||||||
@skip("Does not work as long as caching does not work in the tests")
|
|
||||||
def test_clean_cache(self):
|
|
||||||
"""
|
|
||||||
Tests that the data is retrieved from the database.
|
|
||||||
"""
|
|
||||||
topic = Topic.objects.create(title='test topic')
|
|
||||||
|
|
||||||
with self.assertNumQueries(3):
|
|
||||||
collection_element = collection.CollectionElement.from_values('topics/topic', 1)
|
|
||||||
instance = collection_element.get_full_data()
|
|
||||||
|
|
||||||
self.assertEqual(topic.title, instance['title'])
|
|
||||||
|
|
||||||
@skip("Does not work as long as caching does not work in the tests")
|
|
||||||
def test_with_cache(self):
|
def test_with_cache(self):
|
||||||
"""
|
"""
|
||||||
Tests that no db query is used when the valie is in the cache.
|
Tests that no db query is used when the valie is in the cache.
|
||||||
@ -44,21 +28,7 @@ class TestCollectionElementCache(TestCase):
|
|||||||
collection.CollectionElement.from_values('topics/topic', 999)
|
collection.CollectionElement.from_values('topics/topic', 999)
|
||||||
|
|
||||||
|
|
||||||
@skip("Does not work as long as caching does not work in the tests")
|
|
||||||
class TestCollectionCache(TestCase):
|
class TestCollectionCache(TestCase):
|
||||||
def test_clean_cache(self):
|
|
||||||
"""
|
|
||||||
Tests that the instances are retrieved from the database.
|
|
||||||
"""
|
|
||||||
Topic.objects.create(title='test topic1')
|
|
||||||
Topic.objects.create(title='test topic2')
|
|
||||||
Topic.objects.create(title='test topic3')
|
|
||||||
topic_collection = collection.Collection('topics/topic')
|
|
||||||
|
|
||||||
with self.assertNumQueries(3):
|
|
||||||
instance_list = list(topic_collection.get_full_data())
|
|
||||||
self.assertEqual(len(instance_list), 3)
|
|
||||||
|
|
||||||
def test_with_cache(self):
|
def test_with_cache(self):
|
||||||
"""
|
"""
|
||||||
Tests that no db query is used when the list is received twice.
|
Tests that no db query is used when the list is received twice.
|
||||||
|
@ -24,22 +24,22 @@ from ..helpers import TConfig, TUser, set_config
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def prepare_element_cache(settings):
|
async def prepare_element_cache(settings):
|
||||||
"""
|
"""
|
||||||
Resets the element cache.
|
Resets the element cache.
|
||||||
|
|
||||||
Uses a cacheable_provider for tests with example data.
|
Uses a cacheable_provider for tests with example data.
|
||||||
"""
|
"""
|
||||||
settings.SKIP_CACHE = False
|
await element_cache.cache_provider.clear_cache()
|
||||||
element_cache.cache_provider.clear_cache()
|
|
||||||
orig_cachable_provider = element_cache.cachable_provider
|
orig_cachable_provider = element_cache.cachable_provider
|
||||||
element_cache.cachable_provider = get_cachable_provider([Collection1(), Collection2(), TConfig(), TUser()])
|
element_cache.cachable_provider = get_cachable_provider([Collection1(), Collection2(), TConfig(), TUser()])
|
||||||
element_cache._cachables = None
|
element_cache._cachables = None
|
||||||
|
await sync_to_async(element_cache.ensure_cache)()
|
||||||
yield
|
yield
|
||||||
# Reset the cachable_provider
|
# Reset the cachable_provider
|
||||||
element_cache.cachable_provider = orig_cachable_provider
|
element_cache.cachable_provider = orig_cachable_provider
|
||||||
element_cache._cachables = None
|
element_cache._cachables = None
|
||||||
element_cache.cache_provider.clear_cache()
|
await element_cache.cache_provider.clear_cache()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
|
|
||||||
from openslides.core.config import ConfigVariable, config
|
from openslides.core.config import ConfigVariable, config
|
||||||
from openslides.core.exceptions import ConfigError, ConfigNotFound
|
from openslides.core.exceptions import ConfigError
|
||||||
from openslides.utils.test import TestCase
|
from openslides.utils.test import TestCase
|
||||||
|
|
||||||
|
|
||||||
@ -32,17 +32,6 @@ class HandleConfigTest(TestCase):
|
|||||||
def set_config_var(self, key, value):
|
def set_config_var(self, key, value):
|
||||||
config[key] = value
|
config[key] = value
|
||||||
|
|
||||||
def test_get_config_default_value(self):
|
|
||||||
self.assertEqual(config['string_var'], 'default_string_rien4ooCZieng6ah')
|
|
||||||
self.assertTrue(config['bool_var'])
|
|
||||||
self.assertEqual(config['integer_var'], 3)
|
|
||||||
self.assertEqual(config['choices_var'], '1')
|
|
||||||
self.assertEqual(config['none_config_var'], None)
|
|
||||||
with self.assertRaisesMessage(
|
|
||||||
ConfigNotFound,
|
|
||||||
'The config variable unknown_config_var was not found.'):
|
|
||||||
self.get_config_var('unknown_config_var')
|
|
||||||
|
|
||||||
def test_get_multiple_config_var_error(self):
|
def test_get_multiple_config_var_error(self):
|
||||||
with self.assertRaisesMessage(
|
with self.assertRaisesMessage(
|
||||||
ConfigError,
|
ConfigError,
|
||||||
@ -54,14 +43,6 @@ class HandleConfigTest(TestCase):
|
|||||||
self.assertRaises(TypeError, ConfigVariable, name='foo')
|
self.assertRaises(TypeError, ConfigVariable, name='foo')
|
||||||
self.assertRaises(TypeError, ConfigVariable, default_value='foo')
|
self.assertRaises(TypeError, ConfigVariable, default_value='foo')
|
||||||
|
|
||||||
def test_change_config_value(self):
|
|
||||||
self.assertEqual(config['string_var'], 'default_string_rien4ooCZieng6ah')
|
|
||||||
config['string_var'] = 'other_special_unique_string dauTex9eAiy7jeen'
|
|
||||||
self.assertEqual(config['string_var'], 'other_special_unique_string dauTex9eAiy7jeen')
|
|
||||||
|
|
||||||
def test_missing_cache_(self):
|
|
||||||
self.assertEqual(config['string_var'], 'default_string_rien4ooCZieng6ah')
|
|
||||||
|
|
||||||
def test_config_exists(self):
|
def test_config_exists(self):
|
||||||
self.assertTrue(config.exists('string_var'))
|
self.assertTrue(config.exists('string_var'))
|
||||||
self.assertFalse(config.exists('unknown_config_var'))
|
self.assertFalse(config.exists('unknown_config_var'))
|
||||||
|
@ -42,7 +42,7 @@ DATABASES = {
|
|||||||
'ENGINE': 'django.db.backends.sqlite3',
|
'ENGINE': 'django.db.backends.sqlite3',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
REDIS_ADDRESS = "redis://127.0.0.1"
|
||||||
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
|
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
|
||||||
|
|
||||||
# Internationalization
|
# Internationalization
|
||||||
@ -75,7 +75,3 @@ MOTION_IDENTIFIER_MIN_DIGITS = 1
|
|||||||
PASSWORD_HASHERS = [
|
PASSWORD_HASHERS = [
|
||||||
'django.contrib.auth.hashers.MD5PasswordHasher',
|
'django.contrib.auth.hashers.MD5PasswordHasher',
|
||||||
]
|
]
|
||||||
|
|
||||||
# At least in Django 2.1 and Channels 2.1 the django transactions can not be shared between
|
|
||||||
# threads. So we have to skip the asyncio-cache.
|
|
||||||
SKIP_CACHE = True
|
|
||||||
|
@ -72,11 +72,11 @@ class TTestCacheProvider(MemmoryCacheProvider):
|
|||||||
testing.
|
testing.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
async def del_lock_restricted_data_after_wait(self, user_id: int, future: asyncio.Future = None) -> None:
|
async def del_lock_after_wait(self, lock_name: str, future: asyncio.Future = None) -> None:
|
||||||
if future is None:
|
if future is None:
|
||||||
asyncio.ensure_future(self.del_lock_restricted_data(user_id))
|
asyncio.ensure_future(self.del_lock(lock_name))
|
||||||
else:
|
else:
|
||||||
async def set_future() -> None:
|
async def set_future() -> None:
|
||||||
await self.del_lock_restricted_data(user_id)
|
await self.del_lock(lock_name)
|
||||||
future.set_result(1) # type: ignore
|
future.set_result(1) # type: ignore
|
||||||
asyncio.ensure_future(set_future())
|
asyncio.ensure_future(set_future())
|
||||||
|
@ -29,53 +29,13 @@ def sort_dict(encoded_dict: Dict[str, List[Dict[str, Any]]]) -> Dict[str, List[D
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def element_cache():
|
def element_cache():
|
||||||
return ElementCache(
|
element_cache = ElementCache(
|
||||||
'test_redis',
|
'test_redis',
|
||||||
cache_provider_class=TTestCacheProvider,
|
cache_provider_class=TTestCacheProvider,
|
||||||
cachable_provider=get_cachable_provider(),
|
cachable_provider=get_cachable_provider(),
|
||||||
start_time=0)
|
start_time=0)
|
||||||
|
element_cache.ensure_cache()
|
||||||
|
return element_cache
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_save_full_data(element_cache):
|
|
||||||
input_data = {
|
|
||||||
'app/collection1': [
|
|
||||||
{'id': 1, 'value': 'value1'},
|
|
||||||
{'id': 2, 'value': 'value2'}],
|
|
||||||
'app/collection2': [
|
|
||||||
{'id': 1, 'key': 'value1'},
|
|
||||||
{'id': 2, 'key': 'value2'}]}
|
|
||||||
calculated_data = {
|
|
||||||
'app/collection1:1': '{"id": 1, "value": "value1"}',
|
|
||||||
'app/collection1:2': '{"id": 2, "value": "value2"}',
|
|
||||||
'app/collection2:1': '{"id": 1, "key": "value1"}',
|
|
||||||
'app/collection2:2': '{"id": 2, "key": "value2"}'}
|
|
||||||
|
|
||||||
await element_cache.save_full_data(input_data)
|
|
||||||
|
|
||||||
assert decode_dict(element_cache.cache_provider.full_data) == decode_dict(calculated_data)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_build_full_data(element_cache):
|
|
||||||
result = await element_cache.build_full_data()
|
|
||||||
|
|
||||||
assert result == example_data()
|
|
||||||
assert decode_dict(element_cache.cache_provider.full_data) == decode_dict({
|
|
||||||
'app/collection1:1': '{"id": 1, "value": "value1"}',
|
|
||||||
'app/collection1:2': '{"id": 2, "value": "value2"}',
|
|
||||||
'app/collection2:1': '{"id": 1, "key": "value1"}',
|
|
||||||
'app/collection2:2': '{"id": 2, "key": "value2"}'})
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_exists_full_data(element_cache):
|
|
||||||
"""
|
|
||||||
Test that the return value of exists_full_data is the the same as from the
|
|
||||||
cache_provider.
|
|
||||||
"""
|
|
||||||
element_cache.cache_provider.full_data = 'test_value'
|
|
||||||
assert await element_cache.exists_full_data()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@ -245,7 +205,7 @@ async def test_get_element_full_data_full_redis(element_cache):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_exist_restricted_data(element_cache):
|
async def test_exists_restricted_data(element_cache):
|
||||||
element_cache.use_restricted_data_cache = True
|
element_cache.use_restricted_data_cache = True
|
||||||
element_cache.cache_provider.restricted_data = {0: {
|
element_cache.cache_provider.restricted_data = {0: {
|
||||||
'app/collection1:1': '{"id": 1, "value": "value1"}',
|
'app/collection1:1': '{"id": 1, "value": "value1"}',
|
||||||
@ -259,7 +219,7 @@ async def test_exist_restricted_data(element_cache):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_exist_restricted_data_do_not_use_restricted_data(element_cache):
|
async def test_exists_restricted_data_do_not_use_restricted_data(element_cache):
|
||||||
element_cache.use_restricted_data_cache = False
|
element_cache.use_restricted_data_cache = False
|
||||||
element_cache.cache_provider.restricted_data = {0: {
|
element_cache.cache_provider.restricted_data = {0: {
|
||||||
'app/collection1:1': '{"id": 1, "value": "value1"}',
|
'app/collection1:1': '{"id": 1, "value": "value1"}',
|
||||||
@ -308,7 +268,7 @@ async def test_update_restricted_data(element_cache):
|
|||||||
'app/collection2:2': '{"id": 2, "key": "restricted_value2"}',
|
'app/collection2:2': '{"id": 2, "key": "restricted_value2"}',
|
||||||
'_config:change_id': '0'})
|
'_config:change_id': '0'})
|
||||||
# Make sure the lock is deleted
|
# Make sure the lock is deleted
|
||||||
assert not await element_cache.cache_provider.get_lock_restricted_data(0)
|
assert not await element_cache.cache_provider.get_lock("restricted_data_0")
|
||||||
# And the future is done
|
# And the future is done
|
||||||
assert element_cache.restricted_data_cache_updater[0].done()
|
assert element_cache.restricted_data_cache_updater[0].done()
|
||||||
|
|
||||||
@ -379,8 +339,8 @@ async def test_update_restricted_data_second_worker_on_different_server(element_
|
|||||||
"""
|
"""
|
||||||
element_cache.use_restricted_data_cache = True
|
element_cache.use_restricted_data_cache = True
|
||||||
element_cache.cache_provider.restricted_data = {0: {}}
|
element_cache.cache_provider.restricted_data = {0: {}}
|
||||||
await element_cache.cache_provider.set_lock_restricted_data(0)
|
await element_cache.cache_provider.set_lock("restricted_data_0")
|
||||||
await element_cache.cache_provider.del_lock_restricted_data_after_wait(0)
|
await element_cache.cache_provider.del_lock_after_wait("restricted_data_0")
|
||||||
|
|
||||||
await element_cache.update_restricted_data(None)
|
await element_cache.update_restricted_data(None)
|
||||||
|
|
||||||
@ -399,8 +359,8 @@ async def test_update_restricted_data_second_worker_on_same_server(element_cache
|
|||||||
element_cache.cache_provider.restricted_data = {0: {}}
|
element_cache.cache_provider.restricted_data = {0: {}}
|
||||||
future: asyncio.Future = asyncio.Future()
|
future: asyncio.Future = asyncio.Future()
|
||||||
element_cache.restricted_data_cache_updater[0] = future
|
element_cache.restricted_data_cache_updater[0] = future
|
||||||
await element_cache.cache_provider.set_lock_restricted_data(0)
|
await element_cache.cache_provider.set_lock("restricted_data_0")
|
||||||
await element_cache.cache_provider.del_lock_restricted_data_after_wait(0, future)
|
await element_cache.cache_provider.del_lock_after_wait("restricted_data_0", future)
|
||||||
|
|
||||||
await element_cache.update_restricted_data(None)
|
await element_cache.update_restricted_data(None)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user