2020-05-15 18:24:21 +02:00
|
|
|
import json
|
2018-01-20 13:57:25 +01:00
|
|
|
import threading
|
2019-11-04 14:56:01 +01:00
|
|
|
from collections import defaultdict
|
2018-07-09 23:22:26 +02:00
|
|
|
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
|
2015-01-18 15:53:03 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
from asgiref.sync import async_to_sync
|
|
|
|
from channels.layers import get_channel_layer
|
2017-08-24 12:26:55 +02:00
|
|
|
from django.db.models import Model
|
2018-11-03 23:40:20 +01:00
|
|
|
from mypy_extensions import TypedDict
|
2016-01-10 00:17:00 +01:00
|
|
|
|
2020-10-05 12:07:04 +02:00
|
|
|
from .auth import UserDoesNotExist
|
2020-05-15 18:24:21 +02:00
|
|
|
from .cache import ChangeIdTooLowError, element_cache, get_element_id
|
2019-01-10 15:06:10 +01:00
|
|
|
from .projector import get_projector_data
|
2020-05-15 18:24:21 +02:00
|
|
|
from .timing import Timing
|
|
|
|
from .utils import get_model_from_collection_string, is_iterable, split_element_id
|
|
|
|
|
|
|
|
|
|
|
|
AutoupdateFormat = TypedDict(
|
|
|
|
"AutoupdateFormat",
|
|
|
|
{
|
|
|
|
"changed": Dict[str, List[Dict[str, Any]]],
|
|
|
|
"deleted": Dict[str, List[int]],
|
|
|
|
"from_change_id": int,
|
|
|
|
"to_change_id": int,
|
|
|
|
"all_data": bool,
|
|
|
|
},
|
|
|
|
)
|
2018-11-03 23:40:20 +01:00
|
|
|
|
|
|
|
|
2019-11-04 14:56:01 +01:00
|
|
|
class AutoupdateElementBase(TypedDict):
|
2019-01-19 14:02:13 +01:00
|
|
|
id: int
|
|
|
|
collection_string: str
|
|
|
|
|
|
|
|
|
2019-11-04 14:56:01 +01:00
|
|
|
class AutoupdateElement(AutoupdateElementBase, total=False):
|
2019-01-19 14:32:11 +01:00
|
|
|
"""
|
|
|
|
Data container to handle one root rest element for the autoupdate, history
|
|
|
|
and caching process.
|
|
|
|
|
2019-11-04 14:56:01 +01:00
|
|
|
The fields `id` and `collection_string` are required to identify the element. All
|
|
|
|
other fields are optional:
|
|
|
|
|
|
|
|
full_data: If a value is given (dict or None), it won't be loaded from the DB.
|
|
|
|
If otherwise no value is given, the AutoupdateBundle will try to resolve the object
|
|
|
|
from the DB and serialize it into the full_data.
|
2019-01-19 14:32:11 +01:00
|
|
|
|
2019-11-04 14:56:01 +01:00
|
|
|
information and user_id: These fields are for the history indicating what and who
|
|
|
|
made changes.
|
|
|
|
|
|
|
|
disable_history: If this is True, the element (and the containing full_data) won't
|
|
|
|
be saved into the history. Information and user_id is then irrelevant.
|
2019-10-18 14:18:49 +02:00
|
|
|
|
|
|
|
no_delete_on_restriction is a flag, which is saved into the models in the cache
|
|
|
|
as the _no_delete_on_restriction key. If this is true, there should neither be an
|
|
|
|
entry for one specific model in the changed *nor the deleted* part of the
|
|
|
|
autoupdate, if the model was restricted.
|
2019-01-19 14:32:11 +01:00
|
|
|
"""
|
|
|
|
|
2019-01-19 15:49:46 +01:00
|
|
|
information: List[str]
|
2019-01-19 14:02:13 +01:00
|
|
|
user_id: Optional[int]
|
|
|
|
disable_history: bool
|
2019-10-18 14:18:49 +02:00
|
|
|
no_delete_on_restriction: bool
|
2019-11-04 14:56:01 +01:00
|
|
|
full_data: Optional[Dict[str, Any]]
|
|
|
|
|
|
|
|
|
|
|
|
class AutoupdateBundle:
|
|
|
|
"""
|
|
|
|
Collects changed elements via inform*_data. After the collecting-step is finished,
|
|
|
|
the bundle releases all changes to the history and element cache via `.done()`.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self) -> None:
|
|
|
|
self.autoupdate_elements: Dict[str, Dict[int, AutoupdateElement]] = defaultdict(
|
|
|
|
dict
|
|
|
|
)
|
|
|
|
|
|
|
|
def add(self, elements: Iterable[AutoupdateElement]) -> None:
|
|
|
|
""" Adds the elements to the bundle """
|
|
|
|
for element in elements:
|
|
|
|
self.autoupdate_elements[element["collection_string"]][
|
|
|
|
element["id"]
|
|
|
|
] = element
|
|
|
|
|
2020-05-15 18:24:21 +02:00
|
|
|
def done(self) -> Optional[int]:
|
2019-11-04 14:56:01 +01:00
|
|
|
"""
|
|
|
|
Finishes the bundle by resolving all missing data and passing it to
|
|
|
|
the history and element cache.
|
2020-05-15 18:24:21 +02:00
|
|
|
|
|
|
|
Returns the change id, if there are autoupdate elements. Otherwise none.
|
2019-11-04 14:56:01 +01:00
|
|
|
"""
|
|
|
|
if not self.autoupdate_elements:
|
2020-05-15 18:24:21 +02:00
|
|
|
return None
|
2019-11-04 14:56:01 +01:00
|
|
|
|
|
|
|
for collection, elements in self.autoupdate_elements.items():
|
|
|
|
# Get all ids, that do not have a full_data key
|
|
|
|
# (element["full_data"]=None will not be resolved again!)
|
|
|
|
ids = [
|
|
|
|
element["id"]
|
|
|
|
for element in elements.values()
|
|
|
|
if "full_data" not in element
|
|
|
|
]
|
|
|
|
if ids:
|
|
|
|
# Get all missing models. If e.g. an id could not be found it
|
|
|
|
# means, it was deleted. Since there is not full_data entry
|
|
|
|
# for the element, the data will be interpreted as None, which
|
|
|
|
# is correct for deleted elements.
|
|
|
|
model_class = get_model_from_collection_string(collection)
|
|
|
|
for full_data in model_class.get_elements(ids):
|
|
|
|
elements[full_data["id"]]["full_data"] = full_data
|
|
|
|
|
|
|
|
# Save histroy here using sync code.
|
2020-05-15 11:47:43 +02:00
|
|
|
save_history(self.element_iterator)
|
2019-11-04 14:56:01 +01:00
|
|
|
|
|
|
|
# Update cache and send autoupdate using async code.
|
2020-05-28 13:53:01 +02:00
|
|
|
change_id = async_to_sync(self.dispatch_autoupdate)()
|
|
|
|
return change_id
|
2019-11-04 14:56:01 +01:00
|
|
|
|
|
|
|
@property
|
2020-05-15 11:47:43 +02:00
|
|
|
def element_iterator(self) -> Iterable[AutoupdateElement]:
|
2019-11-04 14:56:01 +01:00
|
|
|
""" Iterator for all elements in this bundle """
|
|
|
|
for elements in self.autoupdate_elements.values():
|
|
|
|
yield from elements.values()
|
|
|
|
|
|
|
|
async def update_cache(self) -> int:
|
|
|
|
"""
|
|
|
|
Async helper function to update the cache.
|
|
|
|
|
|
|
|
Returns the change_id
|
|
|
|
"""
|
|
|
|
cache_elements: Dict[str, Optional[Dict[str, Any]]] = {}
|
2020-05-15 11:47:43 +02:00
|
|
|
for element in self.element_iterator:
|
2019-11-04 14:56:01 +01:00
|
|
|
element_id = get_element_id(element["collection_string"], element["id"])
|
|
|
|
full_data = element.get("full_data")
|
|
|
|
if full_data:
|
|
|
|
full_data["_no_delete_on_restriction"] = element.get(
|
|
|
|
"no_delete_on_restriction", False
|
|
|
|
)
|
|
|
|
cache_elements[element_id] = full_data
|
|
|
|
return await element_cache.change_elements(cache_elements)
|
2019-01-19 14:02:13 +01:00
|
|
|
|
2020-05-15 18:24:21 +02:00
|
|
|
async def dispatch_autoupdate(self) -> int:
|
2019-11-04 14:56:01 +01:00
|
|
|
"""
|
|
|
|
Async helper function to update cache and send autoupdate.
|
2020-05-15 18:24:21 +02:00
|
|
|
|
|
|
|
Return the change_id
|
2019-11-04 14:56:01 +01:00
|
|
|
"""
|
|
|
|
# Update cache
|
|
|
|
change_id = await self.update_cache()
|
2018-11-03 23:40:20 +01:00
|
|
|
|
2019-11-04 14:56:01 +01:00
|
|
|
# Send autoupdate
|
|
|
|
channel_layer = get_channel_layer()
|
|
|
|
await channel_layer.group_send(
|
2020-05-15 18:24:21 +02:00
|
|
|
"autoupdate", {"type": "msg_new_change_id", "change_id": change_id}
|
2019-11-04 14:56:01 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# Send projector
|
2020-05-15 18:24:21 +02:00
|
|
|
projector_data = await get_projector_data()
|
2019-11-04 14:56:01 +01:00
|
|
|
channel_layer = get_channel_layer()
|
|
|
|
await channel_layer.group_send(
|
|
|
|
"projector",
|
|
|
|
{
|
2020-05-15 18:24:21 +02:00
|
|
|
"type": "msg_projector_data",
|
2019-11-04 14:56:01 +01:00
|
|
|
"data": projector_data,
|
|
|
|
"change_id": change_id,
|
|
|
|
},
|
|
|
|
)
|
2016-09-18 16:00:31 +02:00
|
|
|
|
2020-05-15 18:24:21 +02:00
|
|
|
return change_id
|
|
|
|
|
2016-05-29 08:29:14 +02:00
|
|
|
|
2018-11-04 14:02:30 +01:00
|
|
|
def inform_changed_data(
|
2019-01-06 16:22:33 +01:00
|
|
|
instances: Union[Iterable[Model], Model],
|
2019-01-19 15:49:46 +01:00
|
|
|
information: List[str] = None,
|
2019-01-06 16:22:33 +01:00
|
|
|
user_id: Optional[int] = None,
|
2019-11-05 11:26:43 +01:00
|
|
|
disable_history: bool = False,
|
2019-10-18 14:18:49 +02:00
|
|
|
no_delete_on_restriction: bool = False,
|
2020-05-28 13:53:01 +02:00
|
|
|
final_data: bool = False,
|
2019-01-06 16:22:33 +01:00
|
|
|
) -> None:
|
2016-09-18 16:00:31 +02:00
|
|
|
"""
|
|
|
|
Informs the autoupdate system and the caching system about the creation or
|
2018-07-09 23:22:26 +02:00
|
|
|
update of an element.
|
2016-10-01 15:26:00 +02:00
|
|
|
|
2018-01-20 13:57:25 +01:00
|
|
|
The argument instances can be one instance or an iterable over instances.
|
2018-11-04 14:02:30 +01:00
|
|
|
|
|
|
|
History creation is enabled.
|
2016-09-18 16:00:31 +02:00
|
|
|
"""
|
2019-01-19 15:49:46 +01:00
|
|
|
if information is None:
|
|
|
|
information = []
|
2019-11-05 11:26:43 +01:00
|
|
|
if not is_iterable(instances):
|
2019-01-06 16:22:33 +01:00
|
|
|
instances = (instances,)
|
2017-08-18 07:56:16 +02:00
|
|
|
|
2019-11-04 14:56:01 +01:00
|
|
|
root_instances = set(instance.get_root_rest_element() for instance in instances)
|
2020-05-28 13:53:01 +02:00
|
|
|
|
|
|
|
elements = []
|
|
|
|
for root_instance in root_instances:
|
|
|
|
element = AutoupdateElement(
|
2018-11-03 23:40:20 +01:00
|
|
|
id=root_instance.get_rest_pk(),
|
|
|
|
collection_string=root_instance.get_collection_string(),
|
2019-11-05 11:26:43 +01:00
|
|
|
disable_history=disable_history,
|
2018-11-04 14:02:30 +01:00
|
|
|
information=information,
|
|
|
|
user_id=user_id,
|
2019-10-18 14:18:49 +02:00
|
|
|
no_delete_on_restriction=no_delete_on_restriction,
|
2018-11-04 14:02:30 +01:00
|
|
|
)
|
2020-05-28 13:53:01 +02:00
|
|
|
if final_data:
|
|
|
|
element["full_data"] = root_instance.get_full_data()
|
|
|
|
elements.append(element)
|
2019-11-04 14:56:01 +01:00
|
|
|
inform_elements(elements)
|
2015-01-17 14:01:44 +01:00
|
|
|
|
2016-09-30 20:42:58 +02:00
|
|
|
|
2018-11-04 14:02:30 +01:00
|
|
|
def inform_deleted_data(
|
2019-01-06 16:22:33 +01:00
|
|
|
deleted_elements: Iterable[Tuple[str, int]],
|
2019-01-19 15:49:46 +01:00
|
|
|
information: List[str] = None,
|
2019-01-06 16:22:33 +01:00
|
|
|
user_id: Optional[int] = None,
|
|
|
|
) -> None:
|
2015-01-17 14:01:44 +01:00
|
|
|
"""
|
2016-09-18 16:00:31 +02:00
|
|
|
Informs the autoupdate system and the caching system about the deletion of
|
2018-07-09 23:22:26 +02:00
|
|
|
elements.
|
2018-11-04 14:02:30 +01:00
|
|
|
|
|
|
|
History creation is enabled.
|
2016-10-01 15:26:00 +02:00
|
|
|
"""
|
2019-01-19 15:49:46 +01:00
|
|
|
if information is None:
|
|
|
|
information = []
|
2019-11-04 14:56:01 +01:00
|
|
|
|
|
|
|
elements = [
|
|
|
|
AutoupdateElement(
|
2018-11-04 14:02:30 +01:00
|
|
|
id=deleted_element[1],
|
|
|
|
collection_string=deleted_element[0],
|
|
|
|
full_data=None,
|
|
|
|
information=information,
|
|
|
|
user_id=user_id,
|
|
|
|
)
|
2019-11-04 14:56:01 +01:00
|
|
|
for deleted_element in deleted_elements
|
|
|
|
]
|
|
|
|
inform_elements(elements)
|
2016-02-11 11:29:19 +01:00
|
|
|
|
|
|
|
|
2019-11-04 14:56:01 +01:00
|
|
|
def inform_elements(elements: Iterable[AutoupdateElement]) -> None:
|
2017-03-06 16:34:20 +01:00
|
|
|
"""
|
2018-11-04 14:02:30 +01:00
|
|
|
Informs the autoupdate system about some elements. This is used just to send
|
|
|
|
some data to all users.
|
|
|
|
|
|
|
|
If you want to save history information, user id or disable history you
|
|
|
|
have to put information or flag inside the elements.
|
2017-03-06 16:34:20 +01:00
|
|
|
"""
|
2018-01-20 13:57:25 +01:00
|
|
|
bundle = autoupdate_bundle.get(threading.get_ident())
|
|
|
|
if bundle is not None:
|
2019-11-04 14:56:01 +01:00
|
|
|
# Put all elements into the autoupdate_bundle.
|
|
|
|
bundle.add(elements)
|
2018-07-09 23:22:26 +02:00
|
|
|
else:
|
|
|
|
# Send autoupdate directly
|
2019-11-04 14:56:01 +01:00
|
|
|
bundle = AutoupdateBundle()
|
|
|
|
bundle.add(elements)
|
|
|
|
bundle.done()
|
2018-01-20 13:57:25 +01:00
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
Global container for autoupdate bundles
|
|
|
|
"""
|
2019-11-04 14:56:01 +01:00
|
|
|
autoupdate_bundle: Dict[int, AutoupdateBundle] = {}
|
2018-01-20 13:57:25 +01:00
|
|
|
|
|
|
|
|
|
|
|
class AutoupdateBundleMiddleware:
|
|
|
|
"""
|
|
|
|
Middleware to handle autoupdate bundling.
|
|
|
|
"""
|
2019-01-06 16:22:33 +01:00
|
|
|
|
2018-02-24 16:38:17 +01:00
|
|
|
def __init__(self, get_response: Any) -> None:
|
2018-01-20 13:57:25 +01:00
|
|
|
self.get_response = get_response
|
|
|
|
# One-time configuration and initialization.
|
|
|
|
|
2018-02-24 16:38:17 +01:00
|
|
|
def __call__(self, request: Any) -> Any:
|
2018-01-20 13:57:25 +01:00
|
|
|
thread_id = threading.get_ident()
|
2019-11-04 14:56:01 +01:00
|
|
|
autoupdate_bundle[thread_id] = AutoupdateBundle()
|
2018-01-20 13:57:25 +01:00
|
|
|
|
2020-05-15 18:24:21 +02:00
|
|
|
timing = Timing("request")
|
|
|
|
|
2018-01-20 13:57:25 +01:00
|
|
|
response = self.get_response(request)
|
|
|
|
|
2020-05-15 18:24:21 +02:00
|
|
|
timing()
|
|
|
|
|
2020-06-12 14:37:43 +02:00
|
|
|
status_ok = response.status_code >= 200 and response.status_code < 300
|
|
|
|
status_redirect = response.status_code >= 300 and response.status_code < 400
|
|
|
|
|
2020-05-15 18:24:21 +02:00
|
|
|
# rewrite the response by adding the autoupdate on any success-case (2xx status)
|
2019-11-04 14:56:01 +01:00
|
|
|
bundle: AutoupdateBundle = autoupdate_bundle.pop(thread_id)
|
2020-06-12 14:37:43 +02:00
|
|
|
if status_ok or status_redirect:
|
2020-05-15 18:24:21 +02:00
|
|
|
change_id = bundle.done()
|
|
|
|
|
2020-06-12 14:37:43 +02:00
|
|
|
# inject the autoupdate, if there is an autoupdate and the status is
|
|
|
|
# ok (and not redirect; redirects do not have a useful content)
|
|
|
|
if change_id is not None and status_ok:
|
2020-05-15 18:24:21 +02:00
|
|
|
user_id = request.user.pk or 0
|
|
|
|
# Inject the autoupdate in the response.
|
|
|
|
# The complete response body will be overwritten!
|
2020-06-03 14:11:25 +02:00
|
|
|
_, autoupdate = async_to_sync(get_autoupdate_data)(change_id, user_id)
|
2020-05-15 18:24:21 +02:00
|
|
|
content = {"autoupdate": autoupdate, "data": response.data}
|
|
|
|
# Note: autoupdate may be none on skipped ones (which should not happen
|
|
|
|
# since the user has made the request....)
|
|
|
|
response.content = json.dumps(content)
|
|
|
|
|
|
|
|
timing(True)
|
2018-01-20 13:57:25 +01:00
|
|
|
return response
|
2017-03-06 16:34:20 +01:00
|
|
|
|
|
|
|
|
2020-05-15 18:24:21 +02:00
|
|
|
async def get_autoupdate_data(
|
2020-06-03 14:11:25 +02:00
|
|
|
from_change_id: int, user_id: int
|
2020-10-05 12:07:04 +02:00
|
|
|
) -> Tuple[int, Optional[AutoupdateFormat]]:
|
|
|
|
try:
|
|
|
|
return await _get_autoupdate_data(from_change_id, user_id)
|
|
|
|
except UserDoesNotExist:
|
|
|
|
return 0, None
|
|
|
|
|
|
|
|
|
|
|
|
async def _get_autoupdate_data(
|
|
|
|
from_change_id: int, user_id: int
|
2020-06-03 14:11:25 +02:00
|
|
|
) -> Tuple[int, Optional[AutoupdateFormat]]:
|
|
|
|
"""
|
|
|
|
Returns the max_change_id and the autoupdate from from_change_id to max_change_id
|
|
|
|
"""
|
2020-05-15 18:24:21 +02:00
|
|
|
try:
|
2020-06-03 14:11:25 +02:00
|
|
|
(
|
|
|
|
max_change_id,
|
|
|
|
changed_elements,
|
|
|
|
deleted_element_ids,
|
|
|
|
) = await element_cache.get_data_since(user_id, from_change_id)
|
2020-05-15 18:24:21 +02:00
|
|
|
except ChangeIdTooLowError:
|
|
|
|
# The change_id is lower the the lowerst change_id in redis. Return all data
|
2020-06-03 14:11:25 +02:00
|
|
|
(
|
|
|
|
max_change_id,
|
|
|
|
changed_elements,
|
|
|
|
) = await element_cache.get_all_data_list_with_max_change_id(user_id)
|
2020-05-15 18:24:21 +02:00
|
|
|
deleted_elements: Dict[str, List[int]] = {}
|
2020-06-03 14:11:25 +02:00
|
|
|
all_data = True
|
2020-05-15 18:24:21 +02:00
|
|
|
else:
|
|
|
|
all_data = False
|
|
|
|
deleted_elements = defaultdict(list)
|
|
|
|
for element_id in deleted_element_ids:
|
|
|
|
collection_string, id = split_element_id(element_id)
|
|
|
|
deleted_elements[collection_string].append(id)
|
|
|
|
|
|
|
|
# Check, if the autoupdate has any data.
|
|
|
|
if not changed_elements and not deleted_element_ids:
|
|
|
|
# Skip empty updates
|
2020-06-03 14:11:25 +02:00
|
|
|
return max_change_id, None
|
2020-05-15 18:24:21 +02:00
|
|
|
else:
|
|
|
|
# Normal autoupdate with data
|
2020-06-03 14:11:25 +02:00
|
|
|
return (
|
|
|
|
max_change_id,
|
|
|
|
AutoupdateFormat(
|
|
|
|
changed=changed_elements,
|
|
|
|
deleted=deleted_elements,
|
|
|
|
from_change_id=from_change_id,
|
|
|
|
to_change_id=max_change_id,
|
|
|
|
all_data=all_data,
|
|
|
|
),
|
2020-05-15 18:24:21 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-05-15 11:47:43 +02:00
|
|
|
def save_history(element_iterator: Iterable[AutoupdateElement]) -> Iterable:
|
2018-11-04 14:02:30 +01:00
|
|
|
"""
|
|
|
|
Thin wrapper around the call of history saving manager method.
|
|
|
|
|
|
|
|
This is separated to patch it during tests.
|
|
|
|
"""
|
|
|
|
from ..core.models import History
|
2018-10-30 13:07:24 +01:00
|
|
|
|
2020-05-15 11:47:43 +02:00
|
|
|
return History.objects.add_elements(element_iterator)
|