Merge pull request #3973 from ostcar/test_with_redis

add possebility to run tests with redis
This commit is contained in:
Oskar Hahn 2018-11-03 20:54:55 +01:00 committed by GitHub
commit 93dfd9ef67
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 17 additions and 3 deletions

View File

@ -100,9 +100,17 @@ class RedisCacheProvider:
async def reset_full_cache(self, data: Dict[str, str]) -> None: async def reset_full_cache(self, data: Dict[str, str]) -> None:
""" """
Deletes the full_data_cache and write new data in it. Deletes the full_data_cache and write new data in it.
Also deletes the restricted_data_cache and the change_id_cache.
""" """
async with get_connection() as redis: async with get_connection() as redis:
tr = redis.multi_exec() tr = redis.multi_exec()
# like clear_cache but does not delete a lock
tr.eval(
"return redis.call('del', 'fake_key', unpack(redis.call('keys', ARGV[1])))",
keys=[],
args=["{}{}*".format(self.prefix, self.restricted_user_cache_key)])
tr.delete(self.get_change_id_cache_key())
tr.delete(self.get_full_data_cache_key()) tr.delete(self.get_full_data_cache_key())
tr.hmset_dict(self.get_full_data_cache_key(), data) tr.hmset_dict(self.get_full_data_cache_key(), data)
await tr.execute() await tr.execute()

View File

@ -71,3 +71,6 @@ def reset_cache(request):
# When the db is created, use the original cachables # When the db is created, use the original cachables
async_to_sync(element_cache.cache_provider.clear_cache)() async_to_sync(element_cache.cache_provider.clear_cache)()
element_cache.ensure_cache(reset=True) element_cache.ensure_cache(reset=True)
# Set constant start_time
element_cache.start_time = 1

View File

@ -127,7 +127,7 @@ async def test_connection_with_invalid_change_id(get_communicator):
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_connection_with_to_big_change_id(get_communicator): async def test_connection_with_to_big_change_id(get_communicator):
await set_config('general_system_enable_anonymous', True) await set_config('general_system_enable_anonymous', True)
communicator = get_communicator('change_id=1000000000000') communicator = get_communicator('change_id=100')
connected, __ = await communicator.connect() connected, __ = await communicator.connect()
@ -295,7 +295,7 @@ async def test_send_get_elements_to_big_change_id(communicator):
await set_config('general_system_enable_anonymous', True) await set_config('general_system_enable_anonymous', True)
await communicator.connect() await communicator.connect()
await communicator.send_json_to({'type': 'getElements', 'content': {'change_id': 1_000_000_000_000}, 'id': 'test_id'}) await communicator.send_json_to({'type': 'getElements', 'content': {'change_id': 100}, 'id': 'test_id'})
response = await communicator.receive_json_from() response = await communicator.receive_json_from()
type = response.get('type') type = response.get('type')
@ -346,7 +346,7 @@ async def test_send_connect_twice_with_clear_change_id_cache_same_change_id_then
A client should not do this but request for change_id+1 A client should not do this but request for change_id+1
""" """
await set_config('general_system_enable_anonymous', True) await set_config('general_system_enable_anonymous', True)
element_cache.cache_provider.change_id_data = {} # type: ignore await element_cache.cache_provider.clear_cache()
await communicator.connect() await communicator.connect()
await communicator.send_json_to({'type': 'getElements', 'content': {'change_id': 0}, 'id': 'test_id'}) await communicator.send_json_to({'type': 'getElements', 'content': {'change_id': 0}, 'id': 'test_id'})
response1 = await communicator.receive_json_from() response1 = await communicator.receive_json_from()

View File

@ -31,6 +31,9 @@ SECRET_KEY = 'secret'
DEBUG = False DEBUG = False
# Uncomment to test with the redis cache
# REDIS_ADDRESS = "redis://127.0.0.1"
# Database # Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases # https://docs.djangoproject.com/en/1.10/ref/settings/#databases