2019-07-29 15:19:59 +02:00
|
|
|
import functools
|
|
|
|
import hashlib
|
|
|
|
import logging
|
2018-07-09 23:22:26 +02:00
|
|
|
from collections import defaultdict
|
2019-07-29 15:19:59 +02:00
|
|
|
from textwrap import dedent
|
|
|
|
from typing import Any, Callable, Coroutine, Dict, List, Optional, Set, Tuple
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
from django.core.exceptions import ImproperlyConfigured
|
2018-10-28 10:04:52 +01:00
|
|
|
from typing_extensions import Protocol
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-11-01 17:30:18 +01:00
|
|
|
from .redis import use_redis
|
2019-07-29 15:19:59 +02:00
|
|
|
from .schema_version import SchemaVersion
|
2018-07-09 23:22:26 +02:00
|
|
|
from .utils import split_element_id, str_dict_to_bytes
|
|
|
|
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-11-01 17:30:18 +01:00
|
|
|
if use_redis:
|
|
|
|
from .redis import get_connection, aioredis
|
|
|
|
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
class CacheReset(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2018-10-28 10:04:52 +01:00
|
|
|
class ElementCacheProvider(Protocol):
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
Base class for cache provider.
|
|
|
|
|
|
|
|
See RedisCacheProvider as reverence implementation.
|
|
|
|
"""
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
def __init__(self, ensure_cache: Callable[[], Coroutine[Any, Any, None]]) -> None:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def ensure_cache(self) -> None:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def clear_cache(self) -> None:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def reset_full_cache(self, data: Dict[str, str]) -> None:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def data_exists(self) -> bool:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_all_data(self) -> Dict[bytes, bytes]:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_collection_data(self, collection: str) -> Dict[int, bytes]:
|
2019-03-29 20:54:56 +01:00
|
|
|
...
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_element_data(self, element_id: str) -> Optional[bytes]:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def add_changed_elements(
|
|
|
|
self,
|
|
|
|
changed_elements: List[str],
|
|
|
|
deleted_element_ids: List[str],
|
|
|
|
default_change_id: int,
|
|
|
|
) -> int:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_data_since(
|
|
|
|
self, change_id: int, max_change_id: int = -1
|
|
|
|
) -> Tuple[Dict[str, List[bytes]], List[str]]:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
async def set_lock(self, lock_name: str) -> bool:
|
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
async def get_lock(self, lock_name: str) -> bool:
|
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
async def del_lock(self, lock_name: str) -> None:
|
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_current_change_id(self) -> Optional[int]:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_lowest_change_id(self) -> Optional[int]:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_schema_version(self) -> Optional[SchemaVersion]:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def set_schema_version(self, schema_version: SchemaVersion) -> None:
|
2019-01-06 16:22:33 +01:00
|
|
|
...
|
2018-07-09 23:22:26 +02:00
|
|
|
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
def ensure_cache_wrapper() -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
|
|
"""
|
|
|
|
Wraps a cache function to ensure, that the cache is filled.
|
|
|
|
When the function raises a CacheReset-Error the cache will be ensured (call
|
|
|
|
to `ensure_cache`) and the method will be recalled. This is done, until the
|
|
|
|
operation was successful.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def wrapper(func: Callable[..., Any]) -> Callable[..., Any]:
|
|
|
|
@functools.wraps(func)
|
|
|
|
async def wrapped(
|
|
|
|
cache_provider: ElementCacheProvider, *args: Any, **kwargs: Any
|
|
|
|
) -> Any:
|
|
|
|
success = False
|
|
|
|
while not success:
|
|
|
|
try:
|
|
|
|
result = await func(cache_provider, *args, **kwargs)
|
|
|
|
success = True
|
|
|
|
except CacheReset:
|
|
|
|
logger.warn(
|
|
|
|
f"Redis was flushed before method '{func.__name__}'. Ensures cache now."
|
|
|
|
)
|
|
|
|
await cache_provider.ensure_cache()
|
|
|
|
return result
|
|
|
|
|
|
|
|
return wrapped
|
|
|
|
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2018-10-28 10:04:52 +01:00
|
|
|
class RedisCacheProvider:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
Cache provider that loads and saves the data to redis.
|
|
|
|
"""
|
2019-01-06 16:22:33 +01:00
|
|
|
|
|
|
|
full_data_cache_key: str = "full_data"
|
|
|
|
change_id_cache_key: str = "change_id"
|
2019-07-29 15:19:59 +02:00
|
|
|
schema_cache_key: str = "schema"
|
2019-01-06 16:22:33 +01:00
|
|
|
prefix: str = "element_cache_"
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
# All lua-scripts used by this provider. Every entry is a Tuple (str, bool) with the
|
|
|
|
# script and an ensure_cache-indicator. If the indicator is True, a short ensure_cache-script
|
|
|
|
# will be prepended to the script which raises a CacheReset, if the full data cache is empty.
|
|
|
|
# This requires the full_data_cache_key to be the first key given in `keys`!
|
|
|
|
# All scripts are dedented and hashed for faster execution. Convention: The keys of this
|
|
|
|
# member are the methods that needs these scripts.
|
|
|
|
scripts = {
|
|
|
|
"clear_cache": (
|
|
|
|
"return redis.call('del', 'fake_key', unpack(redis.call('keys', ARGV[1])))",
|
|
|
|
False,
|
|
|
|
),
|
|
|
|
"get_all_data": ("return redis.call('hgetall', KEYS[1])", True),
|
|
|
|
"get_collection_data": (
|
|
|
|
"""
|
|
|
|
local cursor = 0
|
|
|
|
local collection = {}
|
|
|
|
repeat
|
|
|
|
local result = redis.call('HSCAN', KEYS[1], cursor, 'MATCH', ARGV[1])
|
|
|
|
cursor = tonumber(result[1])
|
|
|
|
for _, v in pairs(result[2]) do
|
|
|
|
table.insert(collection, v)
|
|
|
|
end
|
|
|
|
until cursor == 0
|
|
|
|
return collection
|
|
|
|
""",
|
|
|
|
True,
|
|
|
|
),
|
|
|
|
"get_element_data": ("return redis.call('hget', KEYS[1], ARGV[1])", True),
|
|
|
|
"add_changed_elements": (
|
|
|
|
"""
|
|
|
|
-- Generate a new change_id
|
|
|
|
local tmp = redis.call('zrevrangebyscore', KEYS[2], '+inf', '-inf', 'WITHSCORES', 'LIMIT', 0, 1)
|
|
|
|
local change_id
|
|
|
|
if next(tmp) == nil then
|
|
|
|
-- The key does not exist
|
|
|
|
change_id = ARGV[1]
|
|
|
|
else
|
|
|
|
change_id = tmp[2] + 1
|
|
|
|
end
|
|
|
|
|
|
|
|
local nc = tonumber(ARGV[2])
|
|
|
|
local nd = tonumber(ARGV[3])
|
|
|
|
|
|
|
|
local i, max
|
|
|
|
-- Add changed_elements to the cache and sorted set (the first of the pairs)
|
|
|
|
if (nc > 0) then
|
|
|
|
max = 2 + nc
|
|
|
|
redis.call('hmset', KEYS[1], unpack(ARGV, 4, max + 1))
|
|
|
|
for i = 4, max, 2 do
|
|
|
|
redis.call('zadd', KEYS[2], change_id, ARGV[i])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
-- Delete deleted_element_ids and add them to sorted set
|
|
|
|
if (nd > 0) then
|
|
|
|
max = 3 + nc + nd
|
|
|
|
redis.call('hdel', KEYS[1], unpack(ARGV, 4 + nc, max))
|
2019-08-13 16:45:39 +02:00
|
|
|
for i = 4 + nc, max, 1 do
|
2019-07-29 15:19:59 +02:00
|
|
|
redis.call('zadd', KEYS[2], change_id, ARGV[i])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
-- Set lowest_change_id if it does not exist
|
|
|
|
redis.call('zadd', KEYS[2], 'NX', change_id, '_config:lowest_change_id')
|
|
|
|
|
|
|
|
return change_id
|
|
|
|
""",
|
|
|
|
True,
|
|
|
|
),
|
|
|
|
"get_data_since": (
|
|
|
|
"""
|
|
|
|
-- Get change ids of changed elements
|
|
|
|
local element_ids = redis.call('zrangebyscore', KEYS[2], ARGV[1], ARGV[2])
|
|
|
|
|
|
|
|
-- Save elements in array. Rotate element_id and element_json
|
|
|
|
local elements = {}
|
|
|
|
for _, element_id in pairs(element_ids) do
|
|
|
|
table.insert(elements, element_id)
|
|
|
|
table.insert(elements, redis.call('hget', KEYS[1], element_id))
|
|
|
|
end
|
|
|
|
return elements
|
|
|
|
""",
|
|
|
|
True,
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
def __init__(self, ensure_cache: Callable[[], Coroutine[Any, Any, None]]) -> None:
|
|
|
|
self._ensure_cache = ensure_cache
|
|
|
|
|
|
|
|
# hash all scripts and remove indentation.
|
|
|
|
for key in self.scripts.keys():
|
|
|
|
script, add_ensure_cache = self.scripts[key]
|
|
|
|
script = dedent(script)
|
|
|
|
if add_ensure_cache:
|
|
|
|
script = (
|
|
|
|
dedent(
|
|
|
|
"""
|
|
|
|
local exist = redis.call('exists', KEYS[1])
|
|
|
|
if (exist == 0) then
|
|
|
|
redis.log(redis.LOG_WARNING, "empty: "..KEYS[1])
|
|
|
|
return redis.error_reply("cache_reset")
|
|
|
|
end
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
+ script
|
|
|
|
)
|
|
|
|
self.scripts[key] = (script, add_ensure_cache)
|
|
|
|
self._script_hashes = {
|
|
|
|
key: hashlib.sha1(script.encode()).hexdigest()
|
|
|
|
for key, (script, _) in self.scripts.items()
|
|
|
|
}
|
|
|
|
|
|
|
|
async def ensure_cache(self) -> None:
|
|
|
|
await self._ensure_cache()
|
|
|
|
|
2018-10-28 10:04:52 +01:00
|
|
|
def get_full_data_cache_key(self) -> str:
|
|
|
|
return "".join((self.prefix, self.full_data_cache_key))
|
|
|
|
|
|
|
|
def get_change_id_cache_key(self) -> str:
|
|
|
|
return "".join((self.prefix, self.change_id_cache_key))
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
def get_schema_cache_key(self) -> str:
|
|
|
|
return "".join((self.prefix, self.schema_cache_key))
|
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
async def clear_cache(self) -> None:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2018-09-01 08:00:00 +02:00
|
|
|
Deleted all cache entries created with this element cache.
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
await self.eval("clear_cache", keys=[], args=[f"{self.prefix}*"])
|
2018-07-09 23:22:26 +02:00
|
|
|
|
|
|
|
async def reset_full_cache(self, data: Dict[str, str]) -> None:
|
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
Deletes the full_data_cache and write new data in it. Clears the change id key.
|
|
|
|
Does not clear locks.
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2018-11-01 17:30:18 +01:00
|
|
|
async with get_connection() as redis:
|
2018-09-23 22:02:09 +02:00
|
|
|
tr = redis.multi_exec()
|
2018-11-03 15:59:24 +01:00
|
|
|
tr.delete(self.get_change_id_cache_key())
|
2018-09-23 22:02:09 +02:00
|
|
|
tr.delete(self.get_full_data_cache_key())
|
|
|
|
tr.hmset_dict(self.get_full_data_cache_key(), data)
|
|
|
|
await tr.execute()
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def data_exists(self) -> bool:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
Returns True, when there is data in the cache.
|
|
|
|
"""
|
2018-11-01 17:30:18 +01:00
|
|
|
async with get_connection() as redis:
|
2019-07-29 15:19:59 +02:00
|
|
|
return await redis.exists(self.get_full_data_cache_key())
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
@ensure_cache_wrapper()
|
|
|
|
async def get_all_data(self) -> Dict[bytes, bytes]:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
Returns all data from the full_data_cache in a mapping from element_id to the element.
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
return await aioredis.util.wait_make_dict(
|
|
|
|
self.eval("get_all_data", [self.get_full_data_cache_key()])
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
@ensure_cache_wrapper()
|
|
|
|
async def get_collection_data(self, collection: str) -> Dict[int, bytes]:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
Returns all elements for a collection from the cache. The data is mapped
|
|
|
|
from element_id to the element.
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
response = await self.eval(
|
|
|
|
"get_collection_data", [self.get_full_data_cache_key()], [f"{collection}:*"]
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
collection_data = {}
|
|
|
|
for i in range(0, len(response), 2):
|
|
|
|
_, id = split_element_id(response[i])
|
|
|
|
collection_data[id] = response[i + 1]
|
2018-11-01 17:30:18 +01:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
return collection_data
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
@ensure_cache_wrapper()
|
|
|
|
async def get_element_data(self, element_id: str) -> Optional[bytes]:
|
2019-03-29 20:54:56 +01:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
Returns one element from the cache. Returns None, when the element does not exist.
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-08-12 15:01:57 +02:00
|
|
|
return await self.eval(
|
|
|
|
"get_element_data", [self.get_full_data_cache_key()], [element_id]
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
@ensure_cache_wrapper()
|
|
|
|
async def add_changed_elements(
|
|
|
|
self,
|
|
|
|
changed_elements: List[str],
|
|
|
|
deleted_element_ids: List[str],
|
|
|
|
default_change_id: int,
|
|
|
|
) -> int:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
Modified the full_data_cache to insert the changed_elements and removes the
|
|
|
|
deleted_element_ids (in this order). Generates a new change_id and inserts all
|
|
|
|
element_ids (changed and deleted) with the change_id into the change_id_cache.
|
|
|
|
The newly generated change_id is returned.
|
|
|
|
"""
|
|
|
|
return int(
|
|
|
|
await self.eval(
|
|
|
|
"add_changed_elements",
|
|
|
|
keys=[self.get_full_data_cache_key(), self.get_change_id_cache_key()],
|
|
|
|
args=[
|
|
|
|
default_change_id,
|
|
|
|
len(changed_elements),
|
|
|
|
len(deleted_element_ids),
|
|
|
|
*(changed_elements + deleted_element_ids),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
@ensure_cache_wrapper()
|
2018-09-23 22:02:09 +02:00
|
|
|
async def get_data_since(
|
2019-07-29 15:19:59 +02:00
|
|
|
self, change_id: int, max_change_id: int = -1
|
2019-01-06 16:22:33 +01:00
|
|
|
) -> Tuple[Dict[str, List[bytes]], List[str]]:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
Returns all elements since a change_id (included) and until the max_change_id (included).
|
2018-07-09 23:22:26 +02:00
|
|
|
|
|
|
|
The returend value is a two element tuple. The first value is a dict the elements where
|
|
|
|
the key is the collection_string and the value a list of (json-) encoded elements. The
|
|
|
|
second element is a list of element_ids, that have been deleted since the change_id.
|
|
|
|
"""
|
2018-09-23 22:02:09 +02:00
|
|
|
changed_elements: Dict[str, List[bytes]] = defaultdict(list)
|
|
|
|
deleted_elements: List[str] = []
|
|
|
|
|
|
|
|
# Convert max_change_id to a string. If its negative, use the string '+inf'
|
|
|
|
redis_max_change_id = "+inf" if max_change_id < 0 else str(max_change_id)
|
2019-07-29 15:19:59 +02:00
|
|
|
# lua script that returns gets all element_ids from change_id_cache_key
|
|
|
|
# and then uses each element_id on full_data or restricted_data.
|
|
|
|
# It returns a list where the odd values are the change_id and the
|
|
|
|
# even values the element as json. The function wait_make_dict creates
|
|
|
|
# a python dict from the returned list.
|
|
|
|
elements: Dict[bytes, Optional[bytes]] = await aioredis.util.wait_make_dict(
|
|
|
|
self.eval(
|
|
|
|
"get_data_since",
|
|
|
|
keys=[self.get_full_data_cache_key(), self.get_change_id_cache_key()],
|
|
|
|
args=[change_id, redis_max_change_id],
|
2019-01-06 16:22:33 +01:00
|
|
|
)
|
2019-07-29 15:19:59 +02:00
|
|
|
)
|
2018-09-23 22:02:09 +02:00
|
|
|
|
|
|
|
for element_id, element_json in elements.items():
|
2019-01-06 16:22:33 +01:00
|
|
|
if element_id.startswith(b"_config"):
|
2018-09-23 22:02:09 +02:00
|
|
|
# Ignore config values from the change_id cache key
|
|
|
|
continue
|
|
|
|
if element_json is None:
|
|
|
|
# The element is not in the cache. It has to be deleted.
|
|
|
|
deleted_elements.append(element_id.decode())
|
|
|
|
else:
|
|
|
|
collection_string, id = split_element_id(element_id)
|
|
|
|
changed_elements[collection_string].append(element_json)
|
|
|
|
return changed_elements, deleted_elements
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
async def set_lock(self, lock_name: str) -> bool:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2018-09-01 08:00:00 +02:00
|
|
|
Tries to sets a lock.
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
Returns True when the lock could be set and False, if it was already set.
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2018-09-23 22:02:09 +02:00
|
|
|
# TODO: Improve lock. See: https://redis.io/topics/distlock
|
2018-11-01 17:30:18 +01:00
|
|
|
async with get_connection() as redis:
|
2019-01-12 23:01:42 +01:00
|
|
|
return await redis.setnx(f"{self.prefix}lock_{lock_name}", 1)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
async def get_lock(self, lock_name: str) -> bool:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
Returns True, when the lock is set. Else False.
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2018-11-01 17:30:18 +01:00
|
|
|
async with get_connection() as redis:
|
2019-01-12 23:01:42 +01:00
|
|
|
return await redis.get(f"{self.prefix}lock_{lock_name}")
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
async def del_lock(self, lock_name: str) -> None:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
Deletes the lock. Does nothing when the lock is not set.
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2018-11-01 17:30:18 +01:00
|
|
|
async with get_connection() as redis:
|
2019-01-12 23:01:42 +01:00
|
|
|
await redis.delete(f"{self.prefix}lock_{lock_name}")
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_current_change_id(self) -> Optional[int]:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
Get the highest change_id from redis.
|
|
|
|
"""
|
2018-11-01 17:30:18 +01:00
|
|
|
async with get_connection() as redis:
|
2019-07-29 15:19:59 +02:00
|
|
|
value = await redis.zrevrangebyscore(
|
2019-01-06 16:22:33 +01:00
|
|
|
self.get_change_id_cache_key(), withscores=True, count=1, offset=0
|
|
|
|
)
|
2019-07-29 15:19:59 +02:00
|
|
|
# Return the score (second element) of the first (and only) element, if exists.
|
|
|
|
return value[0][1] if value else None
|
2018-07-09 23:22:26 +02:00
|
|
|
|
|
|
|
async def get_lowest_change_id(self) -> Optional[int]:
|
|
|
|
"""
|
|
|
|
Get the lowest change_id from redis.
|
|
|
|
|
|
|
|
Returns None if lowest score does not exist.
|
|
|
|
"""
|
2018-11-01 17:30:18 +01:00
|
|
|
async with get_connection() as redis:
|
2018-09-01 08:00:00 +02:00
|
|
|
return await redis.zscore(
|
2019-01-06 16:22:33 +01:00
|
|
|
self.get_change_id_cache_key(), "_config:lowest_change_id"
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_schema_version(self) -> Optional[SchemaVersion]:
|
|
|
|
""" Retrieves the schema version of the cache or None, if not existent """
|
|
|
|
async with get_connection() as redis:
|
|
|
|
schema_version = await redis.hgetall(self.get_schema_cache_key())
|
|
|
|
if not schema_version:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return {
|
|
|
|
"migration": int(schema_version[b"migration"].decode()),
|
|
|
|
"config": int(schema_version[b"config"].decode()),
|
|
|
|
"db": schema_version[b"db"].decode(),
|
|
|
|
}
|
|
|
|
|
|
|
|
async def set_schema_version(self, schema_version: SchemaVersion) -> None:
|
|
|
|
""" Sets the schema version for this cache. """
|
|
|
|
async with get_connection() as redis:
|
|
|
|
await redis.hmset_dict(self.get_schema_cache_key(), schema_version)
|
|
|
|
|
|
|
|
async def eval(
|
|
|
|
self, script_name: str, keys: List[str] = [], args: List[Any] = []
|
|
|
|
) -> Any:
|
|
|
|
"""
|
|
|
|
Runs a lua script in redis. This wrapper around redis.eval tries to make
|
|
|
|
usage of redis script cache. First the hash is send to the server and if
|
|
|
|
the script is not present there (NOSCRIPT error) the actual script will be
|
|
|
|
send.
|
|
|
|
If the script uses the ensure_cache-prefix, the first key must be the full_data
|
|
|
|
cache key. This is checked here.
|
2019-08-12 15:01:57 +02:00
|
|
|
Also this method incudes the custom "CacheReset" error, which will be raised in
|
|
|
|
python, if the lua-script returns a "cache_reset" string as an error response.
|
2019-07-29 15:19:59 +02:00
|
|
|
"""
|
|
|
|
hash = self._script_hashes[script_name]
|
|
|
|
if (
|
|
|
|
self.scripts[script_name][1]
|
|
|
|
and not keys[0] == self.get_full_data_cache_key()
|
|
|
|
):
|
|
|
|
raise ImproperlyConfigured(
|
|
|
|
"A script with a ensure_cache prefix must have the full_data cache key as its first key"
|
|
|
|
)
|
|
|
|
|
|
|
|
async with get_connection() as redis:
|
|
|
|
try:
|
|
|
|
return await redis.evalsha(hash, keys, args)
|
|
|
|
except aioredis.errors.ReplyError as e:
|
|
|
|
if str(e).startswith("NOSCRIPT"):
|
2019-08-12 15:01:57 +02:00
|
|
|
return await self._eval(redis, script_name, keys=keys, args=args)
|
|
|
|
elif str(e) == "cache_reset":
|
|
|
|
raise CacheReset()
|
2019-07-29 15:19:59 +02:00
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
|
2019-08-12 15:01:57 +02:00
|
|
|
async def _eval(
|
|
|
|
self, redis: Any, script_name: str, keys: List[str] = [], args: List[Any] = []
|
|
|
|
) -> Any:
|
|
|
|
""" Do a real eval of the script (no hash used here). Catches "cache_reset". """
|
|
|
|
try:
|
|
|
|
return await redis.eval(self.scripts[script_name][0], keys, args)
|
|
|
|
except aioredis.errors.ReplyError as e:
|
|
|
|
if str(e) == "cache_reset":
|
|
|
|
raise CacheReset()
|
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-10-28 10:04:52 +01:00
|
|
|
class MemmoryCacheProvider:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
CacheProvider for the ElementCache that uses only the memory.
|
|
|
|
|
|
|
|
See the RedisCacheProvider for a description of the methods.
|
|
|
|
|
|
|
|
This provider supports only one process. It saves the data into the memory.
|
|
|
|
When you use different processes they will use diffrent data.
|
2019-07-29 15:19:59 +02:00
|
|
|
|
|
|
|
For this reason, the ensure_cache is not used and the schema version always
|
|
|
|
returns an invalid schema to always buold the cache.
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
def __init__(self, ensure_cache: Callable[[], Coroutine[Any, Any, None]]) -> None:
|
2018-09-01 08:00:00 +02:00
|
|
|
self.set_data_dicts()
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
def set_data_dicts(self) -> None:
|
2018-08-22 22:00:08 +02:00
|
|
|
self.full_data: Dict[str, str] = {}
|
|
|
|
self.change_id_data: Dict[int, Set[str]] = {}
|
2018-09-01 08:00:00 +02:00
|
|
|
self.locks: Dict[str, str] = {}
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def ensure_cache(self) -> None:
|
|
|
|
pass
|
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
async def clear_cache(self) -> None:
|
|
|
|
self.set_data_dicts()
|
2018-07-09 23:22:26 +02:00
|
|
|
|
|
|
|
async def reset_full_cache(self, data: Dict[str, str]) -> None:
|
2019-07-29 15:19:59 +02:00
|
|
|
self.change_id_data = {}
|
2018-07-09 23:22:26 +02:00
|
|
|
self.full_data = data
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def data_exists(self) -> bool:
|
|
|
|
return bool(self.full_data)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_all_data(self) -> Dict[bytes, bytes]:
|
|
|
|
return str_dict_to_bytes(self.full_data)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_collection_data(self, collection: str) -> Dict[int, bytes]:
|
|
|
|
out = {}
|
|
|
|
query = f"{collection}:"
|
|
|
|
for element_id, value in self.full_data.items():
|
|
|
|
if element_id.startswith(query):
|
|
|
|
_, id = split_element_id(element_id)
|
|
|
|
out[id] = value.encode()
|
|
|
|
return out
|
|
|
|
|
|
|
|
async def get_element_data(self, element_id: str) -> Optional[bytes]:
|
|
|
|
value = self.full_data.get(element_id, None)
|
|
|
|
return value.encode() if value is not None else None
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
async def add_changed_elements(
|
2019-07-29 15:19:59 +02:00
|
|
|
self,
|
|
|
|
changed_elements: List[str],
|
|
|
|
deleted_element_ids: List[str],
|
|
|
|
default_change_id: int,
|
2019-01-06 16:22:33 +01:00
|
|
|
) -> int:
|
2019-07-29 15:19:59 +02:00
|
|
|
current_change_id = await self.get_current_change_id()
|
|
|
|
if current_change_id is None:
|
2018-09-23 22:02:09 +02:00
|
|
|
change_id = default_change_id
|
2019-07-29 15:19:59 +02:00
|
|
|
else:
|
|
|
|
change_id = current_change_id + 1
|
|
|
|
|
|
|
|
for i in range(0, len(changed_elements), 2):
|
|
|
|
element_id = changed_elements[i]
|
|
|
|
self.full_data[element_id] = changed_elements[i + 1]
|
2018-07-09 23:22:26 +02:00
|
|
|
|
|
|
|
if change_id in self.change_id_data:
|
|
|
|
self.change_id_data[change_id].add(element_id)
|
|
|
|
else:
|
|
|
|
self.change_id_data[change_id] = {element_id}
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
for element_id in deleted_element_ids:
|
|
|
|
try:
|
|
|
|
del self.full_data[element_id]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
if change_id in self.change_id_data:
|
|
|
|
self.change_id_data[change_id].add(element_id)
|
|
|
|
else:
|
|
|
|
self.change_id_data[change_id] = {element_id}
|
2018-11-01 17:30:18 +01:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
return change_id
|
2018-07-09 23:22:26 +02:00
|
|
|
|
|
|
|
async def get_data_since(
|
2019-07-29 15:19:59 +02:00
|
|
|
self, change_id: int, max_change_id: int = -1
|
2019-01-06 16:22:33 +01:00
|
|
|
) -> Tuple[Dict[str, List[bytes]], List[str]]:
|
2018-08-22 22:00:08 +02:00
|
|
|
changed_elements: Dict[str, List[bytes]] = defaultdict(list)
|
|
|
|
deleted_elements: List[str] = []
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-11-18 07:57:44 +01:00
|
|
|
all_element_ids: Set[str] = set()
|
2018-07-09 23:22:26 +02:00
|
|
|
for data_change_id, element_ids in self.change_id_data.items():
|
2019-01-06 16:22:33 +01:00
|
|
|
if data_change_id >= change_id and (
|
|
|
|
max_change_id == -1 or data_change_id <= max_change_id
|
|
|
|
):
|
2018-11-18 07:57:44 +01:00
|
|
|
all_element_ids.update(element_ids)
|
|
|
|
|
|
|
|
for element_id in all_element_ids:
|
2019-07-29 15:19:59 +02:00
|
|
|
element_json = self.full_data.get(element_id, None)
|
2018-11-18 07:57:44 +01:00
|
|
|
if element_json is None:
|
|
|
|
deleted_elements.append(element_id)
|
|
|
|
else:
|
|
|
|
collection_string, id = split_element_id(element_id)
|
|
|
|
changed_elements[collection_string].append(element_json.encode())
|
2018-07-09 23:22:26 +02:00
|
|
|
return changed_elements, deleted_elements
|
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
async def set_lock(self, lock_name: str) -> bool:
|
|
|
|
if lock_name in self.locks:
|
2018-07-09 23:22:26 +02:00
|
|
|
return False
|
2018-09-01 08:00:00 +02:00
|
|
|
self.locks[lock_name] = "1"
|
2018-07-09 23:22:26 +02:00
|
|
|
return True
|
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
async def get_lock(self, lock_name: str) -> bool:
|
|
|
|
return lock_name in self.locks
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-09-01 08:00:00 +02:00
|
|
|
async def del_lock(self, lock_name: str) -> None:
|
2018-07-09 23:22:26 +02:00
|
|
|
try:
|
2018-09-01 08:00:00 +02:00
|
|
|
del self.locks[lock_name]
|
2018-07-09 23:22:26 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_current_change_id(self) -> Optional[int]:
|
2018-07-09 23:22:26 +02:00
|
|
|
change_data = self.change_id_data
|
|
|
|
if change_data:
|
2019-07-29 15:19:59 +02:00
|
|
|
return max(change_data.keys())
|
|
|
|
return None
|
2018-07-09 23:22:26 +02:00
|
|
|
|
|
|
|
async def get_lowest_change_id(self) -> Optional[int]:
|
|
|
|
change_data = self.change_id_data
|
|
|
|
if change_data:
|
|
|
|
return min(change_data.keys())
|
|
|
|
return None
|
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def get_schema_version(self) -> Optional[SchemaVersion]:
|
|
|
|
return None
|
|
|
|
|
|
|
|
async def set_schema_version(self, schema_version: SchemaVersion) -> None:
|
|
|
|
pass
|
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-10-28 10:04:52 +01:00
|
|
|
class Cachable(Protocol):
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
A Cachable is an object that returns elements that can be cached.
|
|
|
|
|
|
|
|
It needs at least the methods defined here.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def get_collection_string(self) -> str:
|
|
|
|
"""
|
|
|
|
Returns the string representing the name of the cachable.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def get_elements(self) -> List[Dict[str, Any]]:
|
|
|
|
"""
|
|
|
|
Returns all elements of the cachable.
|
|
|
|
"""
|
|
|
|
|
2018-11-01 17:30:18 +01:00
|
|
|
async def restrict_elements(
|
2019-01-06 16:22:33 +01:00
|
|
|
self, user_id: int, elements: List[Dict[str, Any]]
|
|
|
|
) -> List[Dict[str, Any]]:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
Converts full_data to restricted_data.
|
|
|
|
|
|
|
|
elements can be an empty list, a list with some elements of the cachable or with all
|
|
|
|
elements of the cachable.
|
|
|
|
"""
|