2018-11-04 14:02:30 +01:00
|
|
|
import itertools
|
2018-01-20 13:57:25 +01:00
|
|
|
import threading
|
2018-07-09 23:22:26 +02:00
|
|
|
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
|
2015-01-18 15:53:03 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
from asgiref.sync import async_to_sync
|
|
|
|
from channels.layers import get_channel_layer
|
2017-08-24 12:26:55 +02:00
|
|
|
from django.db.models import Model
|
2018-11-03 23:40:20 +01:00
|
|
|
from mypy_extensions import TypedDict
|
2016-01-10 00:17:00 +01:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
from .cache import element_cache, get_element_id
|
2018-12-23 11:05:38 +01:00
|
|
|
from .projector import get_projectot_data
|
2019-01-19 14:02:13 +01:00
|
|
|
from .utils import get_model_from_collection_string
|
2018-11-03 23:40:20 +01:00
|
|
|
|
|
|
|
|
2019-01-19 14:02:13 +01:00
|
|
|
class ElementBase(TypedDict):
|
|
|
|
id: int
|
|
|
|
collection_string: str
|
|
|
|
full_data: Optional[Dict[str, Any]]
|
|
|
|
|
|
|
|
|
|
|
|
class Element(ElementBase, total=False):
|
|
|
|
information: str
|
|
|
|
user_id: Optional[int]
|
|
|
|
disable_history: bool
|
|
|
|
reload: bool
|
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
|
|
|
|
AutoupdateFormat = TypedDict(
|
2019-01-06 16:22:33 +01:00
|
|
|
"AutoupdateFormat",
|
2018-11-03 23:40:20 +01:00
|
|
|
{
|
2019-01-06 16:22:33 +01:00
|
|
|
"changed": Dict[str, List[Dict[str, Any]]],
|
|
|
|
"deleted": Dict[str, List[int]],
|
|
|
|
"from_change_id": int,
|
|
|
|
"to_change_id": int,
|
|
|
|
"all_data": bool,
|
2018-11-03 23:40:20 +01:00
|
|
|
},
|
|
|
|
)
|
2016-09-18 16:00:31 +02:00
|
|
|
|
2016-05-29 08:29:14 +02:00
|
|
|
|
2018-11-04 14:02:30 +01:00
|
|
|
def inform_changed_data(
|
2019-01-06 16:22:33 +01:00
|
|
|
instances: Union[Iterable[Model], Model],
|
|
|
|
information: str = "",
|
|
|
|
user_id: Optional[int] = None,
|
|
|
|
) -> None:
|
2016-09-18 16:00:31 +02:00
|
|
|
"""
|
|
|
|
Informs the autoupdate system and the caching system about the creation or
|
2018-07-09 23:22:26 +02:00
|
|
|
update of an element.
|
2016-10-01 15:26:00 +02:00
|
|
|
|
2018-01-20 13:57:25 +01:00
|
|
|
The argument instances can be one instance or an iterable over instances.
|
2018-11-04 14:02:30 +01:00
|
|
|
|
|
|
|
History creation is enabled.
|
2016-09-18 16:00:31 +02:00
|
|
|
"""
|
2016-10-01 15:26:00 +02:00
|
|
|
root_instances = set()
|
|
|
|
if not isinstance(instances, Iterable):
|
2019-01-06 16:22:33 +01:00
|
|
|
instances = (instances,)
|
2017-08-18 07:56:16 +02:00
|
|
|
|
2016-10-01 15:26:00 +02:00
|
|
|
for instance in instances:
|
|
|
|
try:
|
|
|
|
root_instances.add(instance.get_root_rest_element())
|
|
|
|
except AttributeError:
|
|
|
|
# Instance has no method get_root_rest_element. Just ignore it.
|
|
|
|
pass
|
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
elements: Dict[str, Element] = {}
|
2018-07-09 23:22:26 +02:00
|
|
|
for root_instance in root_instances:
|
2018-08-22 07:59:22 +02:00
|
|
|
key = root_instance.get_collection_string() + str(root_instance.get_rest_pk())
|
2018-11-03 23:40:20 +01:00
|
|
|
elements[key] = Element(
|
|
|
|
id=root_instance.get_rest_pk(),
|
|
|
|
collection_string=root_instance.get_collection_string(),
|
2018-11-04 14:02:30 +01:00
|
|
|
full_data=root_instance.get_full_data(),
|
|
|
|
information=information,
|
|
|
|
user_id=user_id,
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-01-20 13:57:25 +01:00
|
|
|
bundle = autoupdate_bundle.get(threading.get_ident())
|
|
|
|
if bundle is not None:
|
2018-11-03 23:40:20 +01:00
|
|
|
# Put all elements into the autoupdate_bundle.
|
|
|
|
bundle.update(elements)
|
2018-07-09 23:22:26 +02:00
|
|
|
else:
|
|
|
|
# Send autoupdate directly
|
2018-11-03 23:40:20 +01:00
|
|
|
handle_changed_elements(elements.values())
|
2015-01-17 14:01:44 +01:00
|
|
|
|
2016-09-30 20:42:58 +02:00
|
|
|
|
2018-11-04 14:02:30 +01:00
|
|
|
def inform_deleted_data(
|
2019-01-06 16:22:33 +01:00
|
|
|
deleted_elements: Iterable[Tuple[str, int]],
|
|
|
|
information: str = "",
|
|
|
|
user_id: Optional[int] = None,
|
|
|
|
) -> None:
|
2015-01-17 14:01:44 +01:00
|
|
|
"""
|
2016-09-18 16:00:31 +02:00
|
|
|
Informs the autoupdate system and the caching system about the deletion of
|
2018-07-09 23:22:26 +02:00
|
|
|
elements.
|
2018-11-04 14:02:30 +01:00
|
|
|
|
|
|
|
History creation is enabled.
|
2016-10-01 15:26:00 +02:00
|
|
|
"""
|
2018-11-03 23:40:20 +01:00
|
|
|
elements: Dict[str, Element] = {}
|
|
|
|
for deleted_element in deleted_elements:
|
|
|
|
key = deleted_element[0] + str(deleted_element[1])
|
2018-11-04 14:02:30 +01:00
|
|
|
elements[key] = Element(
|
|
|
|
id=deleted_element[1],
|
|
|
|
collection_string=deleted_element[0],
|
|
|
|
full_data=None,
|
|
|
|
information=information,
|
|
|
|
user_id=user_id,
|
|
|
|
)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-01-20 13:57:25 +01:00
|
|
|
bundle = autoupdate_bundle.get(threading.get_ident())
|
|
|
|
if bundle is not None:
|
2018-11-03 23:40:20 +01:00
|
|
|
# Put all elements into the autoupdate_bundle.
|
|
|
|
bundle.update(elements)
|
2018-07-09 23:22:26 +02:00
|
|
|
else:
|
|
|
|
# Send autoupdate directly
|
2018-11-03 23:40:20 +01:00
|
|
|
handle_changed_elements(elements.values())
|
2016-02-11 11:29:19 +01:00
|
|
|
|
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
def inform_changed_elements(changed_elements: Iterable[Element]) -> None:
|
2017-03-06 16:34:20 +01:00
|
|
|
"""
|
2018-11-04 14:02:30 +01:00
|
|
|
Informs the autoupdate system about some elements. This is used just to send
|
|
|
|
some data to all users.
|
|
|
|
|
|
|
|
If you want to save history information, user id or disable history you
|
|
|
|
have to put information or flag inside the elements.
|
2017-03-06 16:34:20 +01:00
|
|
|
"""
|
2018-07-09 23:22:26 +02:00
|
|
|
elements = {}
|
2018-11-03 23:40:20 +01:00
|
|
|
for changed_element in changed_elements:
|
2019-01-06 16:22:33 +01:00
|
|
|
key = changed_element["collection_string"] + str(changed_element["id"])
|
2018-11-03 23:40:20 +01:00
|
|
|
elements[key] = changed_element
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-01-20 13:57:25 +01:00
|
|
|
bundle = autoupdate_bundle.get(threading.get_ident())
|
|
|
|
if bundle is not None:
|
2018-07-09 23:22:26 +02:00
|
|
|
# Put all collection elements into the autoupdate_bundle.
|
|
|
|
bundle.update(elements)
|
|
|
|
else:
|
|
|
|
# Send autoupdate directly
|
2018-11-03 23:40:20 +01:00
|
|
|
handle_changed_elements(elements.values())
|
2018-01-20 13:57:25 +01:00
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
Global container for autoupdate bundles
|
|
|
|
"""
|
2018-11-03 23:40:20 +01:00
|
|
|
autoupdate_bundle: Dict[int, Dict[str, Element]] = {}
|
2018-01-20 13:57:25 +01:00
|
|
|
|
|
|
|
|
|
|
|
class AutoupdateBundleMiddleware:
|
|
|
|
"""
|
|
|
|
Middleware to handle autoupdate bundling.
|
|
|
|
"""
|
2019-01-06 16:22:33 +01:00
|
|
|
|
2018-02-24 16:38:17 +01:00
|
|
|
def __init__(self, get_response: Any) -> None:
|
2018-01-20 13:57:25 +01:00
|
|
|
self.get_response = get_response
|
|
|
|
# One-time configuration and initialization.
|
|
|
|
|
2018-02-24 16:38:17 +01:00
|
|
|
def __call__(self, request: Any) -> Any:
|
2018-01-20 13:57:25 +01:00
|
|
|
thread_id = threading.get_ident()
|
|
|
|
autoupdate_bundle[thread_id] = {}
|
|
|
|
|
|
|
|
response = self.get_response(request)
|
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
bundle: Dict[str, Element] = autoupdate_bundle.pop(thread_id)
|
|
|
|
handle_changed_elements(bundle.values())
|
2018-01-20 13:57:25 +01:00
|
|
|
return response
|
2017-03-06 16:34:20 +01:00
|
|
|
|
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
def handle_changed_elements(elements: Iterable[Element]) -> None:
|
2016-02-11 11:29:19 +01:00
|
|
|
"""
|
2018-11-03 23:40:20 +01:00
|
|
|
Helper function, that sends elements through a channel to the
|
2018-10-30 13:07:24 +01:00
|
|
|
autoupdate system and updates the cache.
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
Does nothing if elements is empty.
|
2016-02-11 11:29:19 +01:00
|
|
|
"""
|
2019-01-06 16:22:33 +01:00
|
|
|
|
2018-11-04 14:02:30 +01:00
|
|
|
async def update_cache(elements: Iterable[Element]) -> int:
|
2018-10-30 13:07:24 +01:00
|
|
|
"""
|
|
|
|
Async helper function to update the cache.
|
|
|
|
|
|
|
|
Returns the change_id
|
|
|
|
"""
|
2018-08-22 22:00:08 +02:00
|
|
|
cache_elements: Dict[str, Optional[Dict[str, Any]]] = {}
|
2018-11-03 23:40:20 +01:00
|
|
|
for element in elements:
|
2019-01-06 16:22:33 +01:00
|
|
|
element_id = get_element_id(element["collection_string"], element["id"])
|
|
|
|
cache_elements[element_id] = element["full_data"]
|
2018-10-30 13:07:24 +01:00
|
|
|
return await element_cache.change_elements(cache_elements)
|
2017-08-18 07:56:16 +02:00
|
|
|
|
2018-11-04 14:02:30 +01:00
|
|
|
async def async_handle_collection_elements(elements: Iterable[Element]) -> None:
|
2018-10-30 13:07:24 +01:00
|
|
|
"""
|
|
|
|
Async helper function to update cache and send autoupdate.
|
|
|
|
"""
|
|
|
|
# Update cache
|
2018-11-04 14:02:30 +01:00
|
|
|
change_id = await update_cache(elements)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-10-30 13:07:24 +01:00
|
|
|
# Send autoupdate
|
2018-07-09 23:22:26 +02:00
|
|
|
channel_layer = get_channel_layer()
|
|
|
|
await channel_layer.group_send(
|
2019-01-06 16:22:33 +01:00
|
|
|
"autoupdate", {"type": "send_data", "change_id": change_id}
|
2018-07-09 23:22:26 +02:00
|
|
|
)
|
2018-10-30 13:07:24 +01:00
|
|
|
|
2018-12-23 11:05:38 +01:00
|
|
|
projector_data = await get_projectot_data()
|
|
|
|
# Send projector
|
|
|
|
channel_layer = get_channel_layer()
|
|
|
|
await channel_layer.group_send(
|
|
|
|
"projector", {"type": "projector_changed", "data": projector_data}
|
|
|
|
)
|
|
|
|
|
2018-11-03 23:40:20 +01:00
|
|
|
if elements:
|
2019-01-19 14:02:13 +01:00
|
|
|
for element in elements:
|
|
|
|
if element.get("reload"):
|
|
|
|
model = get_model_from_collection_string(element["collection_string"])
|
|
|
|
instance = model.objects.get(pk=element["id"])
|
|
|
|
element["full_data"] = instance.get_full_data()
|
|
|
|
|
2018-11-04 14:02:30 +01:00
|
|
|
# Save histroy here using sync code.
|
|
|
|
history_instances = save_history(elements)
|
|
|
|
|
|
|
|
# Convert history instances to Elements.
|
|
|
|
history_elements: List[Element] = []
|
|
|
|
for history_instance in history_instances:
|
2019-01-06 16:22:33 +01:00
|
|
|
history_elements.append(
|
|
|
|
Element(
|
|
|
|
id=history_instance.get_rest_pk(),
|
|
|
|
collection_string=history_instance.get_collection_string(),
|
|
|
|
full_data=history_instance.get_full_data(),
|
|
|
|
disable_history=True, # This does not matter because history elements can never be part of the history itself.
|
|
|
|
)
|
|
|
|
)
|
2018-11-04 14:02:30 +01:00
|
|
|
|
|
|
|
# Chain elements and history elements.
|
|
|
|
itertools.chain(elements, history_elements)
|
|
|
|
|
|
|
|
# Update cache and send autoupdate using async code.
|
|
|
|
async_to_sync(async_handle_collection_elements)(
|
|
|
|
itertools.chain(elements, history_elements)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-01-19 14:02:13 +01:00
|
|
|
def save_history(elements: Iterable[Element]) -> Iterable:
|
|
|
|
# TODO: Try to write Iterable[History] here
|
2018-11-04 14:02:30 +01:00
|
|
|
"""
|
|
|
|
Thin wrapper around the call of history saving manager method.
|
|
|
|
|
|
|
|
This is separated to patch it during tests.
|
|
|
|
"""
|
|
|
|
from ..core.models import History
|
2018-10-30 13:07:24 +01:00
|
|
|
|
2018-11-04 14:02:30 +01:00
|
|
|
return History.objects.add_elements(elements)
|