2019-05-06 13:00:40 +02:00
|
|
|
import time
|
2018-10-14 08:26:51 +02:00
|
|
|
from collections import defaultdict
|
2019-07-29 15:19:59 +02:00
|
|
|
from typing import Any, Dict, List, Optional
|
2018-10-19 16:32:48 +02:00
|
|
|
from urllib.parse import parse_qs
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
from ..utils.websocket import WEBSOCKET_CHANGE_ID_TOO_HIGH
|
2019-08-29 14:25:02 +02:00
|
|
|
from . import logging
|
2018-11-03 23:40:20 +01:00
|
|
|
from .auth import async_anonymous_is_enabled
|
|
|
|
from .autoupdate import AutoupdateFormat
|
2019-09-02 08:50:22 +02:00
|
|
|
from .cache import ChangeIdTooLowError, element_cache, split_element_id
|
2019-05-06 13:00:40 +02:00
|
|
|
from .utils import get_worker_id
|
2019-07-29 15:19:59 +02:00
|
|
|
from .websocket import ProtocollAsyncJsonWebsocketConsumer
|
2018-08-22 16:50:23 +02:00
|
|
|
|
|
|
|
|
2019-05-06 13:00:40 +02:00
|
|
|
logger = logging.getLogger("openslides.websocket")
|
|
|
|
|
|
|
|
|
2018-08-22 16:50:23 +02:00
|
|
|
class SiteConsumer(ProtocollAsyncJsonWebsocketConsumer):
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
|
|
|
Websocket Consumer for the site.
|
|
|
|
"""
|
2018-10-19 16:32:48 +02:00
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
groups = ["site"]
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-05-06 13:00:40 +02:00
|
|
|
ID_COUNTER = 0
|
|
|
|
"""
|
|
|
|
ID counter for assigning each instance of this class an unique id.
|
|
|
|
"""
|
|
|
|
|
2019-09-02 13:57:12 +02:00
|
|
|
skipped_autoupdate_from_change_id: Optional[int] = None
|
|
|
|
|
2018-12-23 11:05:38 +01:00
|
|
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
|
|
self.projector_hash: Dict[int, int] = {}
|
2019-05-06 13:00:40 +02:00
|
|
|
SiteConsumer.ID_COUNTER += 1
|
|
|
|
self._id = get_worker_id() + "-" + str(SiteConsumer.ID_COUNTER)
|
2018-12-23 11:05:38 +01:00
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
async def connect(self) -> None:
|
|
|
|
"""
|
|
|
|
A user connects to the site.
|
|
|
|
|
|
|
|
If it is an anonymous user and anonymous is disabled, the connection is closed.
|
|
|
|
|
|
|
|
Sends the startup data to the user.
|
|
|
|
"""
|
2019-05-06 13:00:40 +02:00
|
|
|
self.connect_time = time.time()
|
2018-11-03 23:40:20 +01:00
|
|
|
# self.scope['user'] is the full_data dict of the user. For an
|
|
|
|
# anonymous user is it the dict {'id': 0}
|
2018-10-19 16:32:48 +02:00
|
|
|
change_id = None
|
2019-01-06 16:22:33 +01:00
|
|
|
if not await async_anonymous_is_enabled() and not self.scope["user"]["id"]:
|
2019-01-24 16:47:53 +01:00
|
|
|
await self.accept() # workaround for #4009
|
2018-07-09 23:22:26 +02:00
|
|
|
await self.close()
|
2019-05-06 13:00:40 +02:00
|
|
|
logger.debug(f"connect: denied ({self._id})")
|
2018-10-19 16:32:48 +02:00
|
|
|
return
|
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
query_string = parse_qs(self.scope["query_string"])
|
|
|
|
if b"change_id" in query_string:
|
2018-10-19 16:32:48 +02:00
|
|
|
try:
|
2019-01-06 16:22:33 +01:00
|
|
|
change_id = int(query_string[b"change_id"][0])
|
2018-10-19 16:32:48 +02:00
|
|
|
except ValueError:
|
2019-01-24 16:47:53 +01:00
|
|
|
await self.accept() # workaround for #4009
|
2018-10-19 16:32:48 +02:00
|
|
|
await self.close() # TODO: Find a way to send an error code
|
2019-05-06 13:00:40 +02:00
|
|
|
logger.debug(f"connect: wrong change id ({self._id})")
|
2018-10-19 16:32:48 +02:00
|
|
|
return
|
|
|
|
|
2019-01-06 16:22:33 +01:00
|
|
|
if b"autoupdate" in query_string and query_string[b"autoupdate"][
|
|
|
|
0
|
|
|
|
].lower() not in [b"0", b"off", b"false"]:
|
2018-10-19 16:32:48 +02:00
|
|
|
# a positive value in autoupdate. Start autoupdate
|
2019-01-06 16:22:33 +01:00
|
|
|
await self.channel_layer.group_add("autoupdate", self.channel_name)
|
2018-10-19 16:32:48 +02:00
|
|
|
|
|
|
|
await self.accept()
|
2018-11-02 05:10:49 +01:00
|
|
|
|
2018-10-19 16:32:48 +02:00
|
|
|
if change_id is not None:
|
2019-05-06 13:00:40 +02:00
|
|
|
logger.debug(f"connect: change id {change_id} ({self._id})")
|
2019-07-29 15:19:59 +02:00
|
|
|
await self.send_autoupdate(change_id)
|
2019-05-06 13:00:40 +02:00
|
|
|
else:
|
|
|
|
logger.debug(f"connect: no change id ({self._id})")
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2018-10-19 16:32:48 +02:00
|
|
|
async def disconnect(self, close_code: int) -> None:
|
|
|
|
"""
|
|
|
|
A user disconnects. Remove it from autoupdate.
|
|
|
|
"""
|
2019-01-06 16:22:33 +01:00
|
|
|
await self.channel_layer.group_discard("autoupdate", self.channel_name)
|
2019-05-06 13:00:40 +02:00
|
|
|
active_seconds = int(time.time() - self.connect_time)
|
|
|
|
logger.debug(
|
|
|
|
f"disconnect code={close_code} active_secs={active_seconds} ({self._id})"
|
|
|
|
)
|
2018-10-19 16:32:48 +02:00
|
|
|
|
2018-07-09 23:22:26 +02:00
|
|
|
async def send_notify(self, event: Dict[str, Any]) -> None:
|
|
|
|
"""
|
|
|
|
Send a notify message to the user.
|
|
|
|
"""
|
2019-01-06 16:22:33 +01:00
|
|
|
user_id = self.scope["user"]["id"]
|
2018-10-17 18:04:06 +02:00
|
|
|
item = event["incomming"]
|
|
|
|
|
|
|
|
users = item.get("users")
|
|
|
|
reply_channels = item.get("replyChannels")
|
|
|
|
if (
|
|
|
|
(isinstance(users, bool) and users)
|
|
|
|
or (isinstance(users, list) and user_id in users)
|
|
|
|
or (
|
|
|
|
isinstance(reply_channels, list) and self.channel_name in reply_channels
|
|
|
|
)
|
|
|
|
or (users is None and reply_channels is None)
|
|
|
|
):
|
|
|
|
item["senderChannelName"] = event["senderChannelName"]
|
|
|
|
item["senderUserId"] = event["senderUserId"]
|
|
|
|
await self.send_json(type="notify", content=item)
|
2018-07-09 23:22:26 +02:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def send_autoupdate(
|
|
|
|
self,
|
|
|
|
change_id: int,
|
|
|
|
max_change_id: Optional[int] = None,
|
|
|
|
in_response: Optional[str] = None,
|
|
|
|
) -> None:
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
Sends an autoupdate to the client from change_id to max_change_id.
|
|
|
|
If max_change_id is None, the current change id will be used.
|
2018-07-09 23:22:26 +02:00
|
|
|
"""
|
2019-07-29 15:19:59 +02:00
|
|
|
user_id = self.scope["user"]["id"]
|
|
|
|
|
|
|
|
if max_change_id is None:
|
|
|
|
max_change_id = await element_cache.get_current_change_id()
|
|
|
|
|
|
|
|
if change_id == max_change_id + 1:
|
|
|
|
# The client is up-to-date, so nothing will be done
|
|
|
|
return
|
|
|
|
|
|
|
|
if change_id > max_change_id:
|
|
|
|
message = f"Requested change_id {change_id} is higher this highest change_id {max_change_id}."
|
|
|
|
await self.send_error(
|
|
|
|
code=WEBSOCKET_CHANGE_ID_TOO_HIGH,
|
|
|
|
message=message,
|
|
|
|
in_response=in_response,
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
changed_elements, deleted_element_ids = await element_cache.get_data_since(
|
|
|
|
user_id, change_id, max_change_id
|
|
|
|
)
|
2019-09-02 08:50:22 +02:00
|
|
|
except ChangeIdTooLowError:
|
2019-07-29 15:19:59 +02:00
|
|
|
# The change_id is lower the the lowerst change_id in redis. Return all data
|
|
|
|
changed_elements = await element_cache.get_all_data_list(user_id)
|
|
|
|
all_data = True
|
|
|
|
deleted_elements: Dict[str, List[int]] = {}
|
|
|
|
else:
|
|
|
|
all_data = False
|
|
|
|
deleted_elements = defaultdict(list)
|
|
|
|
for element_id in deleted_element_ids:
|
|
|
|
collection_string, id = split_element_id(element_id)
|
|
|
|
deleted_elements[collection_string].append(id)
|
2018-10-14 08:26:51 +02:00
|
|
|
|
2019-09-02 13:57:12 +02:00
|
|
|
# Check, if the autoupdate has any data.
|
|
|
|
if not changed_elements and not deleted_element_ids:
|
|
|
|
# Set the current from_change_id, if it is the first skipped autoupdate
|
|
|
|
if not self.skipped_autoupdate_from_change_id:
|
|
|
|
self.skipped_autoupdate_from_change_id = change_id
|
|
|
|
else:
|
|
|
|
# Normal autoupdate with data
|
|
|
|
from_change_id = change_id
|
|
|
|
|
|
|
|
# If there is at least one skipped autoupdate, take the saved from_change_id
|
|
|
|
if self.skipped_autoupdate_from_change_id:
|
|
|
|
from_change_id = self.skipped_autoupdate_from_change_id
|
|
|
|
self.skipped_autoupdate_from_change_id = None
|
|
|
|
|
|
|
|
await self.send_json(
|
|
|
|
type="autoupdate",
|
|
|
|
content=AutoupdateFormat(
|
|
|
|
changed=changed_elements,
|
|
|
|
deleted=deleted_elements,
|
|
|
|
from_change_id=from_change_id,
|
|
|
|
to_change_id=max_change_id,
|
|
|
|
all_data=all_data,
|
|
|
|
),
|
|
|
|
in_response=in_response,
|
|
|
|
)
|
2018-12-23 11:05:38 +01:00
|
|
|
|
2019-07-29 15:19:59 +02:00
|
|
|
async def send_data(self, event: Dict[str, Any]) -> None:
|
|
|
|
"""
|
|
|
|
Send changed or deleted elements to the user.
|
|
|
|
"""
|
|
|
|
change_id = event["change_id"]
|
|
|
|
await self.send_autoupdate(change_id, max_change_id=change_id)
|
|
|
|
|
2018-12-23 11:05:38 +01:00
|
|
|
async def projector_changed(self, event: Dict[str, Any]) -> None:
|
|
|
|
"""
|
|
|
|
The projector has changed.
|
|
|
|
"""
|
|
|
|
all_projector_data = event["data"]
|
2019-10-01 15:36:59 +02:00
|
|
|
change_id = event["change_id"]
|
|
|
|
|
2018-12-23 11:05:38 +01:00
|
|
|
projector_data: Dict[int, Dict[str, Any]] = {}
|
|
|
|
for projector_id in self.listen_projector_ids:
|
2019-02-01 13:56:08 +01:00
|
|
|
data = all_projector_data.get(projector_id, [])
|
2018-12-23 11:05:38 +01:00
|
|
|
new_hash = hash(str(data))
|
2019-01-18 19:11:22 +01:00
|
|
|
if new_hash != self.projector_hash.get(projector_id):
|
2018-12-23 11:05:38 +01:00
|
|
|
projector_data[projector_id] = data
|
|
|
|
self.projector_hash[projector_id] = new_hash
|
|
|
|
|
|
|
|
if projector_data:
|
2019-10-01 15:36:59 +02:00
|
|
|
await self.send_projector_data(projector_data, change_id=change_id)
|
|
|
|
|
|
|
|
async def send_projector_data(
|
|
|
|
self,
|
|
|
|
data: Dict[int, Dict[str, Any]],
|
|
|
|
change_id: Optional[int] = None,
|
|
|
|
in_response: Optional[str] = None,
|
|
|
|
) -> None:
|
|
|
|
"""
|
|
|
|
Sends projector data to the consumer.
|
|
|
|
"""
|
|
|
|
if change_id is None:
|
|
|
|
change_id = await element_cache.get_current_change_id()
|
|
|
|
|
|
|
|
content = {"change_id": change_id, "data": data}
|
|
|
|
await self.send_json(type="projector", content=content, in_response=in_response)
|