generate less queries in the autoupdate system
This commit is contained in:
parent
6605934a33
commit
046a152ec5
@ -12,20 +12,24 @@ from openslides.core.config import config
|
|||||||
from openslides.core.models import Countdown
|
from openslides.core.models import Countdown
|
||||||
from openslides.utils.autoupdate import inform_changed_data
|
from openslides.utils.autoupdate import inform_changed_data
|
||||||
from openslides.utils.exceptions import OpenSlidesError
|
from openslides.utils.exceptions import OpenSlidesError
|
||||||
from openslides.utils.models import RESTModelMixin
|
from openslides.utils.manager import BaseManager
|
||||||
|
from openslides.utils.models import (
|
||||||
|
CASCADE_AND_AUTOUPDATE,
|
||||||
|
SET_NULL_AND_AUTOUPDATE,
|
||||||
|
RESTModelMixin,
|
||||||
|
)
|
||||||
from openslides.utils.utils import to_roman
|
from openslides.utils.utils import to_roman
|
||||||
|
|
||||||
from ..utils.models import CASCADE_AND_AUTOUPDATE, SET_NULL_AND_AUTOUPDATE
|
|
||||||
from .access_permissions import ItemAccessPermissions, ListOfSpeakersAccessPermissions
|
from .access_permissions import ItemAccessPermissions, ListOfSpeakersAccessPermissions
|
||||||
|
|
||||||
|
|
||||||
class ItemManager(models.Manager):
|
class ItemManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager with special methods for agenda tree and
|
Customized model manager with special methods for agenda tree and
|
||||||
numbering.
|
numbering.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all items. In the background all
|
Returns the normal queryset with all items. In the background all
|
||||||
related items (topics, motions, assignments) are prefetched from the database.
|
related items (topics, motions, assignments) are prefetched from the database.
|
||||||
@ -34,7 +38,11 @@ class ItemManager(models.Manager):
|
|||||||
# because this is some kind of cyclic lookup. The _prefetched_objects_cache of every
|
# because this is some kind of cyclic lookup. The _prefetched_objects_cache of every
|
||||||
# content object will hold wrong values for the agenda item.
|
# content object will hold wrong values for the agenda item.
|
||||||
# See issue #4738
|
# See issue #4738
|
||||||
return self.get_queryset().prefetch_related("content_object")
|
return (
|
||||||
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
|
.prefetch_related("content_object", "parent")
|
||||||
|
)
|
||||||
|
|
||||||
def get_only_non_public_items(self):
|
def get_only_non_public_items(self):
|
||||||
"""
|
"""
|
||||||
@ -331,17 +339,18 @@ class Item(RESTModelMixin, models.Model):
|
|||||||
return self.parent.level + 1
|
return self.parent.level + 1
|
||||||
|
|
||||||
|
|
||||||
class ListOfSpeakersManager(models.Manager):
|
class ListOfSpeakersManager(BaseManager):
|
||||||
"""
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
|
||||||
|
|
||||||
def get_full_queryset(self):
|
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all items. In the background all
|
Returns the normal queryset with all items. In the background all
|
||||||
speakers and related items (topics, motions, assignments) are
|
speakers and related items (topics, motions, assignments) are
|
||||||
prefetched from the database.
|
prefetched from the database.
|
||||||
"""
|
"""
|
||||||
return self.get_queryset().prefetch_related("speakers", "content_object")
|
return (
|
||||||
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
|
.prefetch_related("speakers", "content_object")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ListOfSpeakers(RESTModelMixin, models.Model):
|
class ListOfSpeakers(RESTModelMixin, models.Model):
|
||||||
|
@ -12,6 +12,7 @@ from openslides.mediafiles.models import Mediafile
|
|||||||
from openslides.poll.models import BaseOption, BasePoll, BaseVote
|
from openslides.poll.models import BaseOption, BasePoll, BaseVote
|
||||||
from openslides.utils.autoupdate import inform_changed_data
|
from openslides.utils.autoupdate import inform_changed_data
|
||||||
from openslides.utils.exceptions import OpenSlidesError
|
from openslides.utils.exceptions import OpenSlidesError
|
||||||
|
from openslides.utils.manager import BaseManager
|
||||||
from openslides.utils.models import RESTModelMixin
|
from openslides.utils.models import RESTModelMixin
|
||||||
|
|
||||||
from ..utils.models import CASCADE_AND_AUTOUPDATE, SET_NULL_AND_AUTOUPDATE
|
from ..utils.models import CASCADE_AND_AUTOUPDATE, SET_NULL_AND_AUTOUPDATE
|
||||||
@ -63,25 +64,29 @@ class AssignmentRelatedUser(RESTModelMixin, models.Model):
|
|||||||
return self.assignment
|
return self.assignment
|
||||||
|
|
||||||
|
|
||||||
class AssignmentManager(models.Manager):
|
class AssignmentManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all assignments. In the background
|
Returns the normal queryset with all assignments. In the background
|
||||||
all related users (candidates), the related agenda item and all
|
all related users (candidates), the related agenda item and all
|
||||||
polls are prefetched from the database.
|
polls are prefetched from the database.
|
||||||
"""
|
"""
|
||||||
return self.get_queryset().prefetch_related(
|
|
||||||
"related_users",
|
return (
|
||||||
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
|
.prefetch_related(
|
||||||
|
"assignment_related_users",
|
||||||
"agenda_items",
|
"agenda_items",
|
||||||
"lists_of_speakers",
|
"lists_of_speakers",
|
||||||
"polls",
|
|
||||||
"tags",
|
"tags",
|
||||||
"attachments",
|
"attachments",
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Assignment(RESTModelMixin, AgendaItemWithListOfSpeakersMixin, models.Model):
|
class Assignment(RESTModelMixin, AgendaItemWithListOfSpeakersMixin, models.Model):
|
||||||
@ -233,17 +238,21 @@ class Assignment(RESTModelMixin, AgendaItemWithListOfSpeakersMixin, models.Model
|
|||||||
return {"title": self.title}
|
return {"title": self.title}
|
||||||
|
|
||||||
|
|
||||||
class AssignmentVoteManager(models.Manager):
|
class AssignmentVoteManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all assignment votes. In the background we
|
Returns the normal queryset with all assignment votes. In the background we
|
||||||
join and prefetch all related models.
|
join and prefetch all related models.
|
||||||
"""
|
"""
|
||||||
return self.get_queryset().select_related("user", "option", "option__poll")
|
return (
|
||||||
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
|
.select_related("user", "option", "option__poll")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class AssignmentVote(RESTModelMixin, BaseVote):
|
class AssignmentVote(RESTModelMixin, BaseVote):
|
||||||
@ -276,18 +285,19 @@ class AssignmentOption(RESTModelMixin, BaseOption):
|
|||||||
return self.poll
|
return self.poll
|
||||||
|
|
||||||
|
|
||||||
class AssignmentPollManager(models.Manager):
|
class AssignmentPollManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all assignment polls. In the background we
|
Returns the normal queryset with all assignment polls. In the background we
|
||||||
join and prefetch all related models.
|
join and prefetch all related models.
|
||||||
"""
|
"""
|
||||||
return (
|
return (
|
||||||
self.get_queryset()
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
.select_related("assignment")
|
.select_related("assignment")
|
||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
"options", "options__user", "options__votes", "groups", "voted"
|
"options", "options__user", "options__votes", "groups", "voted"
|
||||||
|
@ -1,13 +1,16 @@
|
|||||||
|
from typing import Iterable
|
||||||
|
|
||||||
from asgiref.sync import async_to_sync
|
from asgiref.sync import async_to_sync
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import models, transaction
|
from django.db import models, transaction
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from jsonfield import JSONField
|
from jsonfield import JSONField
|
||||||
|
|
||||||
from ..utils.autoupdate import Element
|
from openslides.utils.autoupdate import AutoupdateElement
|
||||||
from ..utils.cache import element_cache, get_element_id
|
from openslides.utils.cache import element_cache, get_element_id
|
||||||
from ..utils.locking import locking
|
from openslides.utils.manager import BaseManager
|
||||||
from ..utils.models import SET_NULL_AND_AUTOUPDATE, RESTModelMixin
|
from openslides.utils.models import SET_NULL_AND_AUTOUPDATE, RESTModelMixin
|
||||||
|
|
||||||
from .access_permissions import (
|
from .access_permissions import (
|
||||||
ConfigAccessPermissions,
|
ConfigAccessPermissions,
|
||||||
CountdownAccessPermissions,
|
CountdownAccessPermissions,
|
||||||
@ -18,17 +21,21 @@ from .access_permissions import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ProjectorManager(models.Manager):
|
class ProjectorManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all projectors. In the background
|
Returns the normal queryset with all projectors. In the background
|
||||||
projector defaults are prefetched from the database.
|
projector defaults are prefetched from the database.
|
||||||
"""
|
"""
|
||||||
return self.get_queryset().prefetch_related("projectiondefaults")
|
return (
|
||||||
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
|
.prefetch_related("projectiondefaults")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Projector(RESTModelMixin, models.Model):
|
class Projector(RESTModelMixin, models.Model):
|
||||||
@ -249,12 +256,12 @@ class HistoryData(models.Model):
|
|||||||
default_permissions = ()
|
default_permissions = ()
|
||||||
|
|
||||||
|
|
||||||
class HistoryManager(models.Manager):
|
class HistoryManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager for the history model.
|
Customized model manager for the history model.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def add_elements(self, elements):
|
def add_elements(self, elements: Iterable[AutoupdateElement]):
|
||||||
"""
|
"""
|
||||||
Method to add elements to the history. This does not trigger autoupdate.
|
Method to add elements to the history. This does not trigger autoupdate.
|
||||||
"""
|
"""
|
||||||
@ -266,7 +273,7 @@ class HistoryManager(models.Manager):
|
|||||||
# Do not update history if history is disabled.
|
# Do not update history if history is disabled.
|
||||||
continue
|
continue
|
||||||
# HistoryData is not a root rest element so there is no autoupdate and not history saving here.
|
# HistoryData is not a root rest element so there is no autoupdate and not history saving here.
|
||||||
data = HistoryData.objects.create(full_data=element["full_data"])
|
data = HistoryData.objects.create(full_data=element.get("full_data"))
|
||||||
instance = self.model(
|
instance = self.model(
|
||||||
element_id=get_element_id(
|
element_id=get_element_id(
|
||||||
element["collection_string"], element["id"]
|
element["collection_string"], element["id"]
|
||||||
@ -296,7 +303,7 @@ class HistoryManager(models.Manager):
|
|||||||
for collection_string, data in all_full_data.items():
|
for collection_string, data in all_full_data.items():
|
||||||
for full_data in data:
|
for full_data in data:
|
||||||
elements.append(
|
elements.append(
|
||||||
Element(
|
AutoupdateElement(
|
||||||
id=full_data["id"],
|
id=full_data["id"],
|
||||||
collection_string=collection_string,
|
collection_string=collection_string,
|
||||||
full_data=full_data,
|
full_data=full_data,
|
||||||
|
@ -9,7 +9,7 @@ from django.db.models import Q
|
|||||||
from django.dispatch import Signal
|
from django.dispatch import Signal
|
||||||
|
|
||||||
from ..utils import logging
|
from ..utils import logging
|
||||||
from ..utils.autoupdate import Element, inform_changed_elements
|
from ..utils.autoupdate import AutoupdateElement, inform_elements
|
||||||
|
|
||||||
|
|
||||||
# This signal is send when the migrate command is done. That means it is sent
|
# This signal is send when the migrate command is done. That means it is sent
|
||||||
@ -100,18 +100,16 @@ def autoupdate_for_many_to_many_relations(sender, instance, **kwargs):
|
|||||||
)
|
)
|
||||||
for field in m2m_fields:
|
for field in m2m_fields:
|
||||||
queryset = getattr(instance, field.get_accessor_name()).all()
|
queryset = getattr(instance, field.get_accessor_name()).all()
|
||||||
|
elements = []
|
||||||
for related_instance in queryset:
|
for related_instance in queryset:
|
||||||
if hasattr(related_instance, "get_root_rest_element"):
|
if hasattr(related_instance, "get_root_rest_element"):
|
||||||
# The related instance is or has a root rest element.
|
# The related instance is or has a root rest element.
|
||||||
# So lets send it via autoupdate.
|
# So lets send it via autoupdate.
|
||||||
root_rest_element = related_instance.get_root_rest_element()
|
root_rest_element = related_instance.get_root_rest_element()
|
||||||
inform_changed_elements(
|
elements.append(
|
||||||
[
|
AutoupdateElement(
|
||||||
Element(
|
|
||||||
collection_string=root_rest_element.get_collection_string(),
|
collection_string=root_rest_element.get_collection_string(),
|
||||||
id=root_rest_element.pk,
|
id=root_rest_element.pk,
|
||||||
full_data=None,
|
|
||||||
reload=True,
|
|
||||||
)
|
)
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
inform_elements(elements)
|
||||||
|
@ -12,12 +12,12 @@ def watch_and_update_configs():
|
|||||||
are updated.
|
are updated.
|
||||||
"""
|
"""
|
||||||
# 1) map logo and font config keys to mediafile ids
|
# 1) map logo and font config keys to mediafile ids
|
||||||
mediafiles = Mediafile.objects.get_full_queryset().all()
|
mediafiles = Mediafile.objects.get_prefetched_queryset().all()
|
||||||
logos = build_mapping("logos_available", mediafiles)
|
logos = build_mapping("logos_available", mediafiles)
|
||||||
fonts = build_mapping("fonts_available", mediafiles)
|
fonts = build_mapping("fonts_available", mediafiles)
|
||||||
yield
|
yield
|
||||||
# 2) update changed paths/urls
|
# 2) update changed paths/urls
|
||||||
mediafiles = Mediafile.objects.get_full_queryset().all()
|
mediafiles = Mediafile.objects.get_prefetched_queryset().all()
|
||||||
update_mapping(logos, mediafiles)
|
update_mapping(logos, mediafiles)
|
||||||
update_mapping(fonts, mediafiles)
|
update_mapping(fonts, mediafiles)
|
||||||
|
|
||||||
|
@ -8,6 +8,8 @@ from jsonfield import JSONField
|
|||||||
|
|
||||||
from openslides.utils import logging
|
from openslides.utils import logging
|
||||||
|
|
||||||
|
from openslides.utils.manager import BaseManager
|
||||||
|
|
||||||
from ..agenda.mixins import ListOfSpeakersMixin
|
from ..agenda.mixins import ListOfSpeakersMixin
|
||||||
from ..core.config import config
|
from ..core.config import config
|
||||||
from ..utils.models import RESTModelMixin
|
from ..utils.models import RESTModelMixin
|
||||||
@ -23,18 +25,20 @@ if "mediafiles" in connections:
|
|||||||
logger.info("Using a standalone mediafile database")
|
logger.info("Using a standalone mediafile database")
|
||||||
|
|
||||||
|
|
||||||
class MediafileManager(models.Manager):
|
class MediafileManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_full_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all mediafiles. In the background
|
Returns the normal queryset with all mediafiles. In the background
|
||||||
all related list of speakers are prefetched from the database.
|
all related list of speakers are prefetched from the database.
|
||||||
"""
|
"""
|
||||||
return self.get_queryset().prefetch_related(
|
return (
|
||||||
"lists_of_speakers", "parent", "access_groups"
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
|
.prefetch_related("lists_of_speakers", "parent", "access_groups")
|
||||||
)
|
)
|
||||||
|
|
||||||
def delete(self, *args, **kwargs):
|
def delete(self, *args, **kwargs):
|
||||||
|
@ -11,6 +11,7 @@ from openslides.mediafiles.models import Mediafile
|
|||||||
from openslides.poll.models import BaseOption, BasePoll, BaseVote
|
from openslides.poll.models import BaseOption, BasePoll, BaseVote
|
||||||
from openslides.utils.autoupdate import inform_changed_data
|
from openslides.utils.autoupdate import inform_changed_data
|
||||||
from openslides.utils.exceptions import OpenSlidesError
|
from openslides.utils.exceptions import OpenSlidesError
|
||||||
|
from openslides.utils.manager import BaseManager
|
||||||
from openslides.utils.models import RESTModelMixin
|
from openslides.utils.models import RESTModelMixin
|
||||||
from openslides.utils.rest_api import ValidationError
|
from openslides.utils.rest_api import ValidationError
|
||||||
|
|
||||||
@ -56,18 +57,19 @@ class StatuteParagraph(RESTModelMixin, models.Model):
|
|||||||
return self.title
|
return self.title
|
||||||
|
|
||||||
|
|
||||||
class MotionManager(models.Manager):
|
class MotionManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all motions. In the background we
|
Returns the normal queryset with all motions. In the background we
|
||||||
join and prefetch all related models.
|
join and prefetch all related models.
|
||||||
"""
|
"""
|
||||||
return (
|
return (
|
||||||
self.get_queryset()
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
.select_related("state")
|
.select_related("state")
|
||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
"state__workflow",
|
"state__workflow",
|
||||||
@ -76,12 +78,6 @@ class MotionManager(models.Manager):
|
|||||||
"comments__section__read_groups",
|
"comments__section__read_groups",
|
||||||
"agenda_items",
|
"agenda_items",
|
||||||
"lists_of_speakers",
|
"lists_of_speakers",
|
||||||
"polls",
|
|
||||||
"polls__groups",
|
|
||||||
"polls__voted",
|
|
||||||
"polls__options",
|
|
||||||
"polls__options__votes",
|
|
||||||
"polls__options__votes__user",
|
|
||||||
"attachments",
|
"attachments",
|
||||||
"tags",
|
"tags",
|
||||||
"submitters",
|
"submitters",
|
||||||
@ -669,19 +665,6 @@ class Submitter(RESTModelMixin, models.Model):
|
|||||||
return self.motion
|
return self.motion
|
||||||
|
|
||||||
|
|
||||||
class MotionChangeRecommendationManager(models.Manager):
|
|
||||||
"""
|
|
||||||
Customized model manager to support our get_full_queryset method.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def get_full_queryset(self):
|
|
||||||
"""
|
|
||||||
Returns the normal queryset with all change recommendations. In the background we
|
|
||||||
join and prefetch all related models.
|
|
||||||
"""
|
|
||||||
return self.get_queryset()
|
|
||||||
|
|
||||||
|
|
||||||
class MotionChangeRecommendation(RESTModelMixin, models.Model):
|
class MotionChangeRecommendation(RESTModelMixin, models.Model):
|
||||||
"""
|
"""
|
||||||
A MotionChangeRecommendation object saves change recommendations for a specific Motion
|
A MotionChangeRecommendation object saves change recommendations for a specific Motion
|
||||||
@ -689,8 +672,6 @@ class MotionChangeRecommendation(RESTModelMixin, models.Model):
|
|||||||
|
|
||||||
access_permissions = MotionChangeRecommendationAccessPermissions()
|
access_permissions = MotionChangeRecommendationAccessPermissions()
|
||||||
|
|
||||||
objects = MotionChangeRecommendationManager()
|
|
||||||
|
|
||||||
motion = models.ForeignKey(
|
motion = models.ForeignKey(
|
||||||
Motion, on_delete=CASCADE_AND_AUTOUPDATE, related_name="change_recommendations"
|
Motion, on_delete=CASCADE_AND_AUTOUPDATE, related_name="change_recommendations"
|
||||||
)
|
)
|
||||||
@ -826,17 +807,21 @@ class Category(RESTModelMixin, models.Model):
|
|||||||
return self.parent.level + 1
|
return self.parent.level + 1
|
||||||
|
|
||||||
|
|
||||||
class MotionBlockManager(models.Manager):
|
class MotionBlockManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all motion blocks. In the
|
Returns the normal queryset with all motion blocks. In the
|
||||||
background the related agenda item is prefetched from the database.
|
background the related agenda item is prefetched from the database.
|
||||||
"""
|
"""
|
||||||
return self.get_queryset().prefetch_related("agenda_items", "lists_of_speakers")
|
return (
|
||||||
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
|
.prefetch_related("agenda_items", "lists_of_speakers")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MotionBlock(RESTModelMixin, AgendaItemWithListOfSpeakersMixin, models.Model):
|
class MotionBlock(RESTModelMixin, AgendaItemWithListOfSpeakersMixin, models.Model):
|
||||||
@ -867,17 +852,21 @@ class MotionBlock(RESTModelMixin, AgendaItemWithListOfSpeakersMixin, models.Mode
|
|||||||
return {"title": self.title}
|
return {"title": self.title}
|
||||||
|
|
||||||
|
|
||||||
class MotionVoteManager(models.Manager):
|
class MotionVoteManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all motion votes. In the background we
|
Returns the normal queryset with all motion votes. In the background we
|
||||||
join and prefetch all related models.
|
join and prefetch all related models.
|
||||||
"""
|
"""
|
||||||
return self.get_queryset().select_related("user", "option", "option__poll")
|
return (
|
||||||
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
|
.select_related("user", "option", "option__poll")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MotionVote(RESTModelMixin, BaseVote):
|
class MotionVote(RESTModelMixin, BaseVote):
|
||||||
@ -906,18 +895,19 @@ class MotionOption(RESTModelMixin, BaseOption):
|
|||||||
return self.poll
|
return self.poll
|
||||||
|
|
||||||
|
|
||||||
class MotionPollManager(models.Manager):
|
class MotionPollManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all motion polls. In the background we
|
Returns the normal queryset with all motion polls. In the background we
|
||||||
join and prefetch all related models.
|
join and prefetch all related models.
|
||||||
"""
|
"""
|
||||||
return (
|
return (
|
||||||
self.get_queryset()
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
.select_related("motion")
|
.select_related("motion")
|
||||||
.prefetch_related("options", "options__votes", "groups", "voted")
|
.prefetch_related("options", "options__votes", "groups", "voted")
|
||||||
)
|
)
|
||||||
@ -1084,21 +1074,18 @@ class State(RESTModelMixin, models.Model):
|
|||||||
return state_id in next_state_ids or state_id in previous_state_ids
|
return state_id in next_state_ids or state_id in previous_state_ids
|
||||||
|
|
||||||
|
|
||||||
class WorkflowManager(models.Manager):
|
class WorkflowManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all workflows. In the background
|
Returns the normal queryset with all workflows. In the background
|
||||||
the first state is joined and all states and next states are
|
all states are prefetched from the database.
|
||||||
prefetched from the database.
|
|
||||||
"""
|
"""
|
||||||
return (
|
return (
|
||||||
self.get_queryset()
|
super().get_prefetched_queryset(*args, **kwargs).prefetch_related("states")
|
||||||
.select_related("first_state")
|
|
||||||
.prefetch_related("states", "states__next_states")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,24 +1,28 @@
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
|
from openslides.utils.manager import BaseManager
|
||||||
|
|
||||||
from ..agenda.mixins import AgendaItemWithListOfSpeakersMixin
|
from ..agenda.mixins import AgendaItemWithListOfSpeakersMixin
|
||||||
from ..mediafiles.models import Mediafile
|
from ..mediafiles.models import Mediafile
|
||||||
from ..utils.models import RESTModelMixin
|
from ..utils.models import RESTModelMixin
|
||||||
from .access_permissions import TopicAccessPermissions
|
from .access_permissions import TopicAccessPermissions
|
||||||
|
|
||||||
|
|
||||||
class TopicManager(models.Manager):
|
class TopicManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all topics. In the background all
|
Returns the normal queryset with all topics. In the background all
|
||||||
attachments and the related agenda item are prefetched from the
|
attachments and the related agenda item are prefetched from the
|
||||||
database.
|
database.
|
||||||
"""
|
"""
|
||||||
return self.get_queryset().prefetch_related(
|
return (
|
||||||
"attachments", "lists_of_speakers", "agenda_items"
|
super()
|
||||||
|
.get_prefetched_queryset(*args, **kwargs)
|
||||||
|
.prefetch_related("attachments", "lists_of_speakers", "agenda_items")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,6 +17,8 @@ from django.db.models import Prefetch
|
|||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from jsonfield import JSONField
|
from jsonfield import JSONField
|
||||||
|
|
||||||
|
from openslides.utils.manager import BaseManager
|
||||||
|
|
||||||
from ..core.config import config
|
from ..core.config import config
|
||||||
from ..utils.auth import GROUP_ADMIN_PK
|
from ..utils.auth import GROUP_ADMIN_PK
|
||||||
from ..utils.models import CASCADE_AND_AUTOUPDATE, RESTModelMixin
|
from ..utils.models import CASCADE_AND_AUTOUPDATE, RESTModelMixin
|
||||||
@ -30,16 +32,19 @@ from .access_permissions import (
|
|||||||
class UserManager(BaseUserManager):
|
class UserManager(BaseUserManager):
|
||||||
"""
|
"""
|
||||||
Customized manager that creates new users only with a password and a
|
Customized manager that creates new users only with a password and a
|
||||||
username. It also supports our get_full_queryset method.
|
username. It also supports our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, ids=None):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all users. In the background all
|
Returns the normal queryset with all users. In the background all
|
||||||
groups are prefetched from the database together with all permissions
|
groups are prefetched from the database together with all permissions
|
||||||
and content types.
|
and content types.
|
||||||
"""
|
"""
|
||||||
return self.get_queryset().prefetch_related(
|
queryset = self.get_queryset()
|
||||||
|
if ids:
|
||||||
|
queryset = queryset.filter(pk__in=ids)
|
||||||
|
return queryset.prefetch_related(
|
||||||
Prefetch(
|
Prefetch(
|
||||||
"groups",
|
"groups",
|
||||||
queryset=Group.objects.select_related("group_ptr").prefetch_related(
|
queryset=Group.objects.select_related("group_ptr").prefetch_related(
|
||||||
@ -293,24 +298,23 @@ class User(RESTModelMixin, PermissionsMixin, AbstractBaseUser):
|
|||||||
|
|
||||||
class GroupManager(_GroupManager):
|
class GroupManager(_GroupManager):
|
||||||
"""
|
"""
|
||||||
Customized manager that supports our get_full_queryset method.
|
Customized manager that supports our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, ids=None):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all groups. In the background all
|
Returns the normal queryset with all groups. In the background all
|
||||||
permissions with the content types are prefetched from the database.
|
permissions with the content types are prefetched from the database.
|
||||||
"""
|
"""
|
||||||
return (
|
queryset = self.get_queryset()
|
||||||
self.get_queryset()
|
if ids:
|
||||||
.select_related("group_ptr")
|
queryset = queryset.filter(pk__in=ids)
|
||||||
.prefetch_related(
|
return queryset.select_related("group_ptr").prefetch_related(
|
||||||
Prefetch(
|
Prefetch(
|
||||||
"permissions",
|
"permissions",
|
||||||
queryset=Permission.objects.select_related("content_type"),
|
queryset=Permission.objects.select_related("content_type"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Group(RESTModelMixin, DjangoGroup):
|
class Group(RESTModelMixin, DjangoGroup):
|
||||||
@ -325,17 +329,17 @@ class Group(RESTModelMixin, DjangoGroup):
|
|||||||
default_permissions = ()
|
default_permissions = ()
|
||||||
|
|
||||||
|
|
||||||
class PersonalNoteManager(models.Manager):
|
class PersonalNoteManager(BaseManager):
|
||||||
"""
|
"""
|
||||||
Customized model manager to support our get_full_queryset method.
|
Customized model manager to support our get_prefetched_queryset method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_full_queryset(self):
|
def get_prefetched_queryset(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns the normal queryset with all personal notes. In the background all
|
Returns the normal queryset with all personal notes. In the background all
|
||||||
users are prefetched from the database.
|
users are prefetched from the database.
|
||||||
"""
|
"""
|
||||||
return self.get_queryset().select_related("user")
|
return super().get_prefetched_queryset(*args, **kwargs).select_related("user")
|
||||||
|
|
||||||
|
|
||||||
class PersonalNote(RESTModelMixin, models.Model):
|
class PersonalNote(RESTModelMixin, models.Model):
|
||||||
|
@ -32,7 +32,7 @@ from ..utils.auth import (
|
|||||||
anonymous_is_enabled,
|
anonymous_is_enabled,
|
||||||
has_perm,
|
has_perm,
|
||||||
)
|
)
|
||||||
from ..utils.autoupdate import Element, inform_changed_data, inform_changed_elements
|
from ..utils.autoupdate import AutoupdateElement, inform_changed_data, inform_elements
|
||||||
from ..utils.cache import element_cache
|
from ..utils.cache import element_cache
|
||||||
from ..utils.rest_api import (
|
from ..utils.rest_api import (
|
||||||
ModelViewSet,
|
ModelViewSet,
|
||||||
@ -613,8 +613,7 @@ class GroupViewSet(ModelViewSet):
|
|||||||
"""
|
"""
|
||||||
Updates every users, if some permission changes. For this, every affected collection
|
Updates every users, if some permission changes. For this, every affected collection
|
||||||
is fetched via the permission_change signal and every object of the collection passed
|
is fetched via the permission_change signal and every object of the collection passed
|
||||||
into the cache/autoupdate system. Also the personal (restrcited) cache of every affected
|
into the cache/autoupdate system.
|
||||||
user (all users of the group) will be deleted, so it is rebuild after this permission change.
|
|
||||||
"""
|
"""
|
||||||
if isinstance(changed_permissions, Permission):
|
if isinstance(changed_permissions, Permission):
|
||||||
changed_permissions = [changed_permissions]
|
changed_permissions = [changed_permissions]
|
||||||
@ -622,7 +621,7 @@ class GroupViewSet(ModelViewSet):
|
|||||||
if not changed_permissions:
|
if not changed_permissions:
|
||||||
return # either None or empty list.
|
return # either None or empty list.
|
||||||
|
|
||||||
elements: List[Element] = []
|
elements: List[AutoupdateElement] = []
|
||||||
signal_results = permission_change.send(None, permissions=changed_permissions)
|
signal_results = permission_change.send(None, permissions=changed_permissions)
|
||||||
all_full_data = async_to_sync(element_cache.get_all_data_list)()
|
all_full_data = async_to_sync(element_cache.get_all_data_list)()
|
||||||
for _, signal_collections in signal_results:
|
for _, signal_collections in signal_results:
|
||||||
@ -631,14 +630,14 @@ class GroupViewSet(ModelViewSet):
|
|||||||
cachable.get_collection_string(), {}
|
cachable.get_collection_string(), {}
|
||||||
):
|
):
|
||||||
elements.append(
|
elements.append(
|
||||||
Element(
|
AutoupdateElement(
|
||||||
id=full_data["id"],
|
id=full_data["id"],
|
||||||
collection_string=cachable.get_collection_string(),
|
collection_string=cachable.get_collection_string(),
|
||||||
full_data=full_data,
|
full_data=full_data,
|
||||||
disable_history=True,
|
disable_history=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
inform_changed_elements(elements)
|
inform_elements(elements)
|
||||||
|
|
||||||
|
|
||||||
class PersonalNoteViewSet(ModelViewSet):
|
class PersonalNoteViewSet(ModelViewSet):
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import threading
|
import threading
|
||||||
|
from collections import defaultdict
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
|
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
|
||||||
|
|
||||||
from asgiref.sync import async_to_sync
|
from asgiref.sync import async_to_sync
|
||||||
@ -11,23 +12,28 @@ from .projector import get_projector_data
|
|||||||
from .utils import get_model_from_collection_string
|
from .utils import get_model_from_collection_string
|
||||||
|
|
||||||
|
|
||||||
class ElementBase(TypedDict):
|
class AutoupdateElementBase(TypedDict):
|
||||||
id: int
|
id: int
|
||||||
collection_string: str
|
collection_string: str
|
||||||
full_data: Optional[Dict[str, Any]]
|
|
||||||
|
|
||||||
|
|
||||||
class Element(ElementBase, total=False):
|
class AutoupdateElement(AutoupdateElementBase, total=False):
|
||||||
"""
|
"""
|
||||||
Data container to handle one root rest element for the autoupdate, history
|
Data container to handle one root rest element for the autoupdate, history
|
||||||
and caching process.
|
and caching process.
|
||||||
|
|
||||||
The fields `id`, `collection_string` and `full_data` are required, the other
|
The fields `id` and `collection_string` are required to identify the element. All
|
||||||
fields are optional.
|
other fields are optional:
|
||||||
|
|
||||||
if full_data is None, it means, that the element was deleted. If reload is
|
full_data: If a value is given (dict or None), it won't be loaded from the DB.
|
||||||
True, full_data is ignored and reloaded from the database later in the
|
If otherwise no value is given, the AutoupdateBundle will try to resolve the object
|
||||||
process.
|
from the DB and serialize it into the full_data.
|
||||||
|
|
||||||
|
information and user_id: These fields are for the history indicating what and who
|
||||||
|
made changes.
|
||||||
|
|
||||||
|
disable_history: If this is True, the element (and the containing full_data) won't
|
||||||
|
be saved into the history. Information and user_id is then irrelevant.
|
||||||
|
|
||||||
no_delete_on_restriction is a flag, which is saved into the models in the cache
|
no_delete_on_restriction is a flag, which is saved into the models in the cache
|
||||||
as the _no_delete_on_restriction key. If this is true, there should neither be an
|
as the _no_delete_on_restriction key. If this is true, there should neither be an
|
||||||
@ -38,18 +44,104 @@ class Element(ElementBase, total=False):
|
|||||||
information: List[str]
|
information: List[str]
|
||||||
user_id: Optional[int]
|
user_id: Optional[int]
|
||||||
disable_history: bool
|
disable_history: bool
|
||||||
reload: bool
|
|
||||||
no_delete_on_restriction: bool
|
no_delete_on_restriction: bool
|
||||||
|
full_data: Optional[Dict[str, Any]]
|
||||||
|
|
||||||
|
|
||||||
AutoupdateFormat = TypedDict(
|
class AutoupdateBundle:
|
||||||
"AutoupdateFormat",
|
"""
|
||||||
|
Collects changed elements via inform*_data. After the collecting-step is finished,
|
||||||
|
the bundle releases all changes to the history and element cache via `.done()`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.autoupdate_elements: Dict[str, Dict[int, AutoupdateElement]] = defaultdict(
|
||||||
|
dict
|
||||||
|
)
|
||||||
|
|
||||||
|
def add(self, elements: Iterable[AutoupdateElement]) -> None:
|
||||||
|
""" Adds the elements to the bundle """
|
||||||
|
for element in elements:
|
||||||
|
self.autoupdate_elements[element["collection_string"]][
|
||||||
|
element["id"]
|
||||||
|
] = element
|
||||||
|
|
||||||
|
def done(self) -> None:
|
||||||
|
"""
|
||||||
|
Finishes the bundle by resolving all missing data and passing it to
|
||||||
|
the history and element cache.
|
||||||
|
"""
|
||||||
|
if not self.autoupdate_elements:
|
||||||
|
return
|
||||||
|
|
||||||
|
for collection, elements in self.autoupdate_elements.items():
|
||||||
|
# Get all ids, that do not have a full_data key
|
||||||
|
# (element["full_data"]=None will not be resolved again!)
|
||||||
|
ids = [
|
||||||
|
element["id"]
|
||||||
|
for element in elements.values()
|
||||||
|
if "full_data" not in element
|
||||||
|
]
|
||||||
|
if ids:
|
||||||
|
# Get all missing models. If e.g. an id could not be found it
|
||||||
|
# means, it was deleted. Since there is not full_data entry
|
||||||
|
# for the element, the data will be interpreted as None, which
|
||||||
|
# is correct for deleted elements.
|
||||||
|
model_class = get_model_from_collection_string(collection)
|
||||||
|
for full_data in model_class.get_elements(ids):
|
||||||
|
elements[full_data["id"]]["full_data"] = full_data
|
||||||
|
|
||||||
|
# Save histroy here using sync code.
|
||||||
|
save_history(self.elements)
|
||||||
|
|
||||||
|
# Update cache and send autoupdate using async code.
|
||||||
|
async_to_sync(self.async_handle_collection_elements)()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def elements(self) -> Iterable[AutoupdateElement]:
|
||||||
|
""" Iterator for all elements in this bundle """
|
||||||
|
for elements in self.autoupdate_elements.values():
|
||||||
|
yield from elements.values()
|
||||||
|
|
||||||
|
async def update_cache(self) -> int:
|
||||||
|
"""
|
||||||
|
Async helper function to update the cache.
|
||||||
|
|
||||||
|
Returns the change_id
|
||||||
|
"""
|
||||||
|
cache_elements: Dict[str, Optional[Dict[str, Any]]] = {}
|
||||||
|
for element in self.elements:
|
||||||
|
element_id = get_element_id(element["collection_string"], element["id"])
|
||||||
|
full_data = element.get("full_data")
|
||||||
|
if full_data:
|
||||||
|
full_data["_no_delete_on_restriction"] = element.get(
|
||||||
|
"no_delete_on_restriction", False
|
||||||
|
)
|
||||||
|
cache_elements[element_id] = full_data
|
||||||
|
return await element_cache.change_elements(cache_elements)
|
||||||
|
|
||||||
|
async def async_handle_collection_elements(self) -> None:
|
||||||
|
"""
|
||||||
|
Async helper function to update cache and send autoupdate.
|
||||||
|
"""
|
||||||
|
# Update cache
|
||||||
|
change_id = await self.update_cache()
|
||||||
|
|
||||||
|
# Send autoupdate
|
||||||
|
channel_layer = get_channel_layer()
|
||||||
|
await channel_layer.group_send(
|
||||||
|
"autoupdate", {"type": "send_data", "change_id": change_id}
|
||||||
|
)
|
||||||
|
|
||||||
|
projector_data = await get_projector_data()
|
||||||
|
# Send projector
|
||||||
|
channel_layer = get_channel_layer()
|
||||||
|
await channel_layer.group_send(
|
||||||
|
"projector",
|
||||||
{
|
{
|
||||||
"changed": Dict[str, List[Dict[str, Any]]],
|
"type": "projector_changed",
|
||||||
"deleted": Dict[str, List[int]],
|
"data": projector_data,
|
||||||
"from_change_id": int,
|
"change_id": change_id,
|
||||||
"to_change_id": int,
|
|
||||||
"all_data": bool,
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -70,36 +162,21 @@ def inform_changed_data(
|
|||||||
"""
|
"""
|
||||||
if information is None:
|
if information is None:
|
||||||
information = []
|
information = []
|
||||||
root_instances = set()
|
|
||||||
if not isinstance(instances, Iterable):
|
if not isinstance(instances, Iterable):
|
||||||
instances = (instances,)
|
instances = (instances,)
|
||||||
|
|
||||||
for instance in instances:
|
root_instances = set(instance.get_root_rest_element() for instance in instances)
|
||||||
try:
|
elements = [
|
||||||
root_instances.add(instance.get_root_rest_element())
|
AutoupdateElement(
|
||||||
except AttributeError:
|
|
||||||
# Instance has no method get_root_rest_element. Just ignore it.
|
|
||||||
pass
|
|
||||||
|
|
||||||
elements: Dict[str, Element] = {}
|
|
||||||
for root_instance in root_instances:
|
|
||||||
key = root_instance.get_collection_string() + str(root_instance.get_rest_pk())
|
|
||||||
elements[key] = Element(
|
|
||||||
id=root_instance.get_rest_pk(),
|
id=root_instance.get_rest_pk(),
|
||||||
collection_string=root_instance.get_collection_string(),
|
collection_string=root_instance.get_collection_string(),
|
||||||
full_data=root_instance.get_full_data(),
|
|
||||||
information=information,
|
information=information,
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
no_delete_on_restriction=no_delete_on_restriction,
|
no_delete_on_restriction=no_delete_on_restriction,
|
||||||
)
|
)
|
||||||
|
for root_instance in root_instances
|
||||||
bundle = autoupdate_bundle.get(threading.get_ident())
|
]
|
||||||
if bundle is not None:
|
inform_elements(elements)
|
||||||
# Put all elements into the autoupdate_bundle.
|
|
||||||
bundle.update(elements)
|
|
||||||
else:
|
|
||||||
# Send autoupdate directly
|
|
||||||
handle_changed_elements(elements.values())
|
|
||||||
|
|
||||||
|
|
||||||
def inform_deleted_data(
|
def inform_deleted_data(
|
||||||
@ -115,27 +192,21 @@ def inform_deleted_data(
|
|||||||
"""
|
"""
|
||||||
if information is None:
|
if information is None:
|
||||||
information = []
|
information = []
|
||||||
elements: Dict[str, Element] = {}
|
|
||||||
for deleted_element in deleted_elements:
|
elements = [
|
||||||
key = deleted_element[0] + str(deleted_element[1])
|
AutoupdateElement(
|
||||||
elements[key] = Element(
|
|
||||||
id=deleted_element[1],
|
id=deleted_element[1],
|
||||||
collection_string=deleted_element[0],
|
collection_string=deleted_element[0],
|
||||||
full_data=None,
|
full_data=None,
|
||||||
information=information,
|
information=information,
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
)
|
)
|
||||||
|
for deleted_element in deleted_elements
|
||||||
bundle = autoupdate_bundle.get(threading.get_ident())
|
]
|
||||||
if bundle is not None:
|
inform_elements(elements)
|
||||||
# Put all elements into the autoupdate_bundle.
|
|
||||||
bundle.update(elements)
|
|
||||||
else:
|
|
||||||
# Send autoupdate directly
|
|
||||||
handle_changed_elements(elements.values())
|
|
||||||
|
|
||||||
|
|
||||||
def inform_changed_elements(changed_elements: Iterable[Element]) -> None:
|
def inform_elements(elements: Iterable[AutoupdateElement]) -> None:
|
||||||
"""
|
"""
|
||||||
Informs the autoupdate system about some elements. This is used just to send
|
Informs the autoupdate system about some elements. This is used just to send
|
||||||
some data to all users.
|
some data to all users.
|
||||||
@ -143,24 +214,21 @@ def inform_changed_elements(changed_elements: Iterable[Element]) -> None:
|
|||||||
If you want to save history information, user id or disable history you
|
If you want to save history information, user id or disable history you
|
||||||
have to put information or flag inside the elements.
|
have to put information or flag inside the elements.
|
||||||
"""
|
"""
|
||||||
elements = {}
|
|
||||||
for changed_element in changed_elements:
|
|
||||||
key = changed_element["collection_string"] + str(changed_element["id"])
|
|
||||||
elements[key] = changed_element
|
|
||||||
|
|
||||||
bundle = autoupdate_bundle.get(threading.get_ident())
|
bundle = autoupdate_bundle.get(threading.get_ident())
|
||||||
if bundle is not None:
|
if bundle is not None:
|
||||||
# Put all collection elements into the autoupdate_bundle.
|
# Put all elements into the autoupdate_bundle.
|
||||||
bundle.update(elements)
|
bundle.add(elements)
|
||||||
else:
|
else:
|
||||||
# Send autoupdate directly
|
# Send autoupdate directly
|
||||||
handle_changed_elements(elements.values())
|
bundle = AutoupdateBundle()
|
||||||
|
bundle.add(elements)
|
||||||
|
bundle.done()
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Global container for autoupdate bundles
|
Global container for autoupdate bundles
|
||||||
"""
|
"""
|
||||||
autoupdate_bundle: Dict[int, Dict[str, Element]] = {}
|
autoupdate_bundle: Dict[int, AutoupdateBundle] = {}
|
||||||
|
|
||||||
|
|
||||||
class AutoupdateBundleMiddleware:
|
class AutoupdateBundleMiddleware:
|
||||||
@ -174,86 +242,16 @@ class AutoupdateBundleMiddleware:
|
|||||||
|
|
||||||
def __call__(self, request: Any) -> Any:
|
def __call__(self, request: Any) -> Any:
|
||||||
thread_id = threading.get_ident()
|
thread_id = threading.get_ident()
|
||||||
autoupdate_bundle[thread_id] = {}
|
autoupdate_bundle[thread_id] = AutoupdateBundle()
|
||||||
|
|
||||||
response = self.get_response(request)
|
response = self.get_response(request)
|
||||||
|
|
||||||
bundle: Dict[str, Element] = autoupdate_bundle.pop(thread_id)
|
bundle: AutoupdateBundle = autoupdate_bundle.pop(thread_id)
|
||||||
handle_changed_elements(bundle.values())
|
bundle.done()
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def handle_changed_elements(elements: Iterable[Element]) -> None:
|
def save_history(elements: Iterable[AutoupdateElement]) -> Iterable:
|
||||||
"""
|
|
||||||
Helper function, that sends elements through a channel to the
|
|
||||||
autoupdate system and updates the cache.
|
|
||||||
|
|
||||||
Does nothing if elements is empty.
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def update_cache(elements: Iterable[Element]) -> int:
|
|
||||||
"""
|
|
||||||
Async helper function to update the cache.
|
|
||||||
|
|
||||||
Returns the change_id
|
|
||||||
"""
|
|
||||||
cache_elements: Dict[str, Optional[Dict[str, Any]]] = {}
|
|
||||||
for element in elements:
|
|
||||||
element_id = get_element_id(element["collection_string"], element["id"])
|
|
||||||
full_data = element["full_data"]
|
|
||||||
if full_data:
|
|
||||||
full_data["_no_delete_on_restriction"] = element.get(
|
|
||||||
"no_delete_on_restriction", False
|
|
||||||
)
|
|
||||||
cache_elements[element_id] = full_data
|
|
||||||
return await element_cache.change_elements(cache_elements)
|
|
||||||
|
|
||||||
async def async_handle_collection_elements(elements: Iterable[Element]) -> None:
|
|
||||||
"""
|
|
||||||
Async helper function to update cache and send autoupdate.
|
|
||||||
"""
|
|
||||||
# Update cache
|
|
||||||
change_id = await update_cache(elements)
|
|
||||||
|
|
||||||
# Send autoupdate
|
|
||||||
channel_layer = get_channel_layer()
|
|
||||||
await channel_layer.group_send(
|
|
||||||
"autoupdate", {"type": "send_data", "change_id": change_id}
|
|
||||||
)
|
|
||||||
|
|
||||||
projector_data = await get_projector_data()
|
|
||||||
# Send projector
|
|
||||||
channel_layer = get_channel_layer()
|
|
||||||
await channel_layer.group_send(
|
|
||||||
"projector",
|
|
||||||
{
|
|
||||||
"type": "projector_changed",
|
|
||||||
"data": projector_data,
|
|
||||||
"change_id": change_id,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if elements:
|
|
||||||
for element in elements:
|
|
||||||
if element.get("reload"):
|
|
||||||
model = get_model_from_collection_string(element["collection_string"])
|
|
||||||
try:
|
|
||||||
instance = model.objects.get(pk=element["id"])
|
|
||||||
except model.DoesNotExist:
|
|
||||||
# The instance was deleted so we set full_data explicitly to None.
|
|
||||||
element["full_data"] = None
|
|
||||||
else:
|
|
||||||
element["full_data"] = instance.get_full_data()
|
|
||||||
|
|
||||||
# Save histroy here using sync code.
|
|
||||||
save_history(elements)
|
|
||||||
|
|
||||||
# Update cache and send autoupdate using async code.
|
|
||||||
async_to_sync(async_handle_collection_elements)(elements)
|
|
||||||
|
|
||||||
|
|
||||||
def save_history(elements: Iterable[Element]) -> Iterable:
|
|
||||||
# TODO: Try to write Iterable[History] here
|
|
||||||
"""
|
"""
|
||||||
Thin wrapper around the call of history saving manager method.
|
Thin wrapper around the call of history saving manager method.
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@ from typing import Any, Dict, List, Optional, cast
|
|||||||
from urllib.parse import parse_qs
|
from urllib.parse import parse_qs
|
||||||
|
|
||||||
from channels.generic.websocket import AsyncWebsocketConsumer
|
from channels.generic.websocket import AsyncWebsocketConsumer
|
||||||
|
from mypy_extensions import TypedDict
|
||||||
|
|
||||||
from ..utils.websocket import WEBSOCKET_CHANGE_ID_TOO_HIGH
|
from ..utils.websocket import WEBSOCKET_CHANGE_ID_TOO_HIGH
|
||||||
from . import logging
|
from . import logging
|
||||||
@ -16,6 +17,17 @@ from .websocket import ProtocollAsyncJsonWebsocketConsumer
|
|||||||
|
|
||||||
logger = logging.getLogger("openslides.websocket")
|
logger = logging.getLogger("openslides.websocket")
|
||||||
|
|
||||||
|
AutoupdateFormat = TypedDict(
|
||||||
|
"AutoupdateFormat",
|
||||||
|
{
|
||||||
|
"changed": Dict[str, List[Dict[str, Any]]],
|
||||||
|
"deleted": Dict[str, List[int]],
|
||||||
|
"from_change_id": int,
|
||||||
|
"to_change_id": int,
|
||||||
|
"all_data": bool,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class SiteConsumer(ProtocollAsyncJsonWebsocketConsumer):
|
class SiteConsumer(ProtocollAsyncJsonWebsocketConsumer):
|
||||||
"""
|
"""
|
||||||
|
20
openslides/utils/manager.py
Normal file
20
openslides/utils/manager.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
|
from django.db.models import Manager, QuerySet
|
||||||
|
|
||||||
|
|
||||||
|
class BaseManager(Manager):
|
||||||
|
"""
|
||||||
|
A base manager for all REST-models.
|
||||||
|
Provides a base implementation for `get_prefetched_queryset` and
|
||||||
|
allows filtering of the queryset by ids.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_queryset(self, ids: Optional[List[int]] = None) -> QuerySet:
|
||||||
|
queryset = super().get_queryset()
|
||||||
|
if ids:
|
||||||
|
queryset = queryset.filter(pk__in=ids)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
def get_prefetched_queryset(self, *args: Any, **kwargs: Any) -> QuerySet:
|
||||||
|
return self.get_queryset(*args, **kwargs)
|
@ -6,7 +6,7 @@ from django.db import models
|
|||||||
|
|
||||||
from . import logging
|
from . import logging
|
||||||
from .access_permissions import BaseAccessPermissions
|
from .access_permissions import BaseAccessPermissions
|
||||||
from .autoupdate import Element, inform_changed_data, inform_changed_elements
|
from .autoupdate import AutoupdateElement, inform_changed_data, inform_elements
|
||||||
from .rest_api import model_serializer_classes
|
from .rest_api import model_serializer_classes
|
||||||
from .utils import convert_camel_case_to_pseudo_snake_case, get_element_id
|
from .utils import convert_camel_case_to_pseudo_snake_case, get_element_id
|
||||||
|
|
||||||
@ -142,18 +142,20 @@ class RESTModelMixin:
|
|||||||
return return_value
|
return return_value
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_elements(cls) -> List[Dict[str, Any]]:
|
def get_elements(cls, ids: Optional[List[int]] = None) -> List[Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Returns all elements as full_data.
|
Returns all elements as full_data.
|
||||||
"""
|
"""
|
||||||
logger.info(f"Loading {cls.get_collection_string()}")
|
logger.info(f"Loading {cls.get_collection_string()}")
|
||||||
# Get the query to receive all data from the database.
|
# Get the query to receive all data from the database.
|
||||||
try:
|
try:
|
||||||
query = cls.objects.get_full_queryset() # type: ignore
|
query = cls.objects.get_prefetched_queryset(ids=ids) # type: ignore
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
# If the model des not have to method get_full_queryset(), then use
|
# If the model des not have to method get_prefetched_queryset(), then use
|
||||||
# the default queryset from django.
|
# the default queryset from django.
|
||||||
query = cls.objects # type: ignore
|
query = cls.objects # type: ignore
|
||||||
|
if ids:
|
||||||
|
query = query.filter(pk__in=ids)
|
||||||
|
|
||||||
# Build a dict from the instance id to the full_data
|
# Build a dict from the instance id to the full_data
|
||||||
instances = query.all()
|
instances = query.all()
|
||||||
@ -223,12 +225,10 @@ def CASCADE_AND_AUTOUPDATE(
|
|||||||
for sub_obj in sub_objs:
|
for sub_obj in sub_objs:
|
||||||
root_rest_element = sub_obj.get_root_rest_element()
|
root_rest_element = sub_obj.get_root_rest_element()
|
||||||
elements.append(
|
elements.append(
|
||||||
Element(
|
AutoupdateElement(
|
||||||
collection_string=root_rest_element.get_collection_string(),
|
collection_string=root_rest_element.get_collection_string(),
|
||||||
id=root_rest_element.pk,
|
id=root_rest_element.get_rest_pk(),
|
||||||
full_data=None,
|
|
||||||
reload=True,
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
inform_changed_elements(elements)
|
inform_elements(elements)
|
||||||
models.CASCADE(collector, field, sub_objs, using)
|
models.CASCADE(collector, field, sub_objs, using)
|
||||||
|
@ -1,32 +1,46 @@
|
|||||||
|
from typing import Callable
|
||||||
|
|
||||||
from django.db import DEFAULT_DB_ALIAS, connections
|
from django.db import DEFAULT_DB_ALIAS, connections
|
||||||
from django.test.utils import CaptureQueriesContext
|
from django.test.utils import CaptureQueriesContext
|
||||||
|
|
||||||
|
|
||||||
def count_queries(func, verbose=False, *args, **kwargs) -> int:
|
def count_queries(func, verbose=False) -> Callable[..., int]:
|
||||||
|
def wrapper(*args, **kwargs) -> int:
|
||||||
context = CaptureQueriesContext(connections[DEFAULT_DB_ALIAS])
|
context = CaptureQueriesContext(connections[DEFAULT_DB_ALIAS])
|
||||||
with context:
|
with context:
|
||||||
func(*args, **kwargs)
|
func(*args, **kwargs)
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
|
print(get_verbose_queries(context))
|
||||||
|
|
||||||
|
return len(context)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class AssertNumQueriesContext(CaptureQueriesContext):
|
||||||
|
def __init__(self, test_case, num, verbose):
|
||||||
|
self.test_case = test_case
|
||||||
|
self.num = num
|
||||||
|
self.verbose = verbose
|
||||||
|
super().__init__(connections[DEFAULT_DB_ALIAS])
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
super().__exit__(exc_type, exc_value, traceback)
|
||||||
|
if exc_type is not None:
|
||||||
|
return
|
||||||
|
executed = len(self)
|
||||||
|
verbose_queries = get_verbose_queries(self)
|
||||||
|
if self.verbose:
|
||||||
|
print(verbose_queries)
|
||||||
|
self.test_case.assertEqual(executed, self.num)
|
||||||
|
else:
|
||||||
|
self.test_case.assertEqual(executed, self.num, verbose_queries)
|
||||||
|
|
||||||
|
|
||||||
|
def get_verbose_queries(context):
|
||||||
queries = "\n".join(
|
queries = "\n".join(
|
||||||
f"{i}. {query['sql']}"
|
f"{i}. {query['sql']}"
|
||||||
for i, query in enumerate(context.captured_queries, start=1)
|
for i, query in enumerate(context.captured_queries, start=1)
|
||||||
)
|
)
|
||||||
print(f"{len(context)} queries executed\nCaptured queries were:\n{queries}")
|
return f"{len(context)} queries executed\nCaptured queries were:\n{queries}"
|
||||||
|
|
||||||
return len(context)
|
|
||||||
|
|
||||||
|
|
||||||
def assert_query_count(should_be, verbose=False):
|
|
||||||
"""
|
|
||||||
Decorator to easily count queries on any test you want.
|
|
||||||
should_be defines how many queries are to be expected
|
|
||||||
"""
|
|
||||||
|
|
||||||
def outer(func):
|
|
||||||
def inner(*args, **kwargs):
|
|
||||||
assert count_queries(func, verbose, *args, **kwargs) == should_be
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
||||||
return outer
|
|
||||||
|
@ -12,7 +12,7 @@ from openslides.assignments.models import Assignment
|
|||||||
from openslides.core.config import config
|
from openslides.core.config import config
|
||||||
from openslides.core.models import Countdown
|
from openslides.core.models import Countdown
|
||||||
from openslides.mediafiles.models import Mediafile
|
from openslides.mediafiles.models import Mediafile
|
||||||
from openslides.motions.models import Motion
|
from openslides.motions.models import Motion, MotionBlock
|
||||||
from openslides.topics.models import Topic
|
from openslides.topics.models import Topic
|
||||||
from openslides.users.models import Group
|
from openslides.users.models import Group
|
||||||
from openslides.utils.autoupdate import inform_changed_data
|
from openslides.utils.autoupdate import inform_changed_data
|
||||||
@ -22,6 +22,56 @@ from tests.test_case import TestCase
|
|||||||
from ...common_groups import GROUP_DEFAULT_PK
|
from ...common_groups import GROUP_DEFAULT_PK
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db(transaction=False)
|
||||||
|
def test_agenda_item_db_queries():
|
||||||
|
"""
|
||||||
|
Tests that only the following db queries are done:
|
||||||
|
* 1 request to get the list of all agenda items,
|
||||||
|
* 1 request to get all assignments,
|
||||||
|
* 1 request to get all motions,
|
||||||
|
* 1 request to get all topics,
|
||||||
|
* 1 request to get all motion blocks and
|
||||||
|
* 1 request to get all parents
|
||||||
|
"""
|
||||||
|
parent = Topic.objects.create(title="parent").agenda_item
|
||||||
|
for index in range(10):
|
||||||
|
item = Topic.objects.create(title=f"topic{index}").agenda_item
|
||||||
|
item.parent = parent
|
||||||
|
item.save()
|
||||||
|
Motion.objects.create(title="motion1")
|
||||||
|
Motion.objects.create(title="motion2")
|
||||||
|
Assignment.objects.create(title="assignment1", open_posts=5)
|
||||||
|
Assignment.objects.create(title="assignment2", open_posts=5)
|
||||||
|
MotionBlock.objects.create(title="block1")
|
||||||
|
MotionBlock.objects.create(title="block1")
|
||||||
|
|
||||||
|
assert count_queries(Item.get_elements)() == 6
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db(transaction=False)
|
||||||
|
def test_list_of_speakers_db_queries():
|
||||||
|
"""
|
||||||
|
Tests that only the following db queries are done:
|
||||||
|
* 1 requests to get the list of all lists of speakers
|
||||||
|
* 1 request to get all speakers
|
||||||
|
* 4 requests to get the assignments, motions, topics and mediafiles and
|
||||||
|
"""
|
||||||
|
for index in range(10):
|
||||||
|
Topic.objects.create(title=f"topic{index}")
|
||||||
|
parent = Topic.objects.create(title="parent").agenda_item
|
||||||
|
child = Topic.objects.create(title="child").agenda_item
|
||||||
|
child.parent = parent
|
||||||
|
child.save()
|
||||||
|
Motion.objects.create(title="motion1")
|
||||||
|
Motion.objects.create(title="motion2")
|
||||||
|
Assignment.objects.create(title="assignment", open_posts=5)
|
||||||
|
Mediafile.objects.create(
|
||||||
|
title=f"mediafile", mediafile=SimpleUploadedFile(f"some_file", b"some content.")
|
||||||
|
)
|
||||||
|
|
||||||
|
assert count_queries(ListOfSpeakers.get_elements)() == 6
|
||||||
|
|
||||||
|
|
||||||
class ContentObjects(TestCase):
|
class ContentObjects(TestCase):
|
||||||
"""
|
"""
|
||||||
Tests content objects with Topic as a content object of items and
|
Tests content objects with Topic as a content object of items and
|
||||||
@ -233,52 +283,6 @@ class RetrieveListOfSpeakers(TestCase):
|
|||||||
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
|
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=False)
|
|
||||||
def test_agenda_item_db_queries():
|
|
||||||
"""
|
|
||||||
Tests that only the following db queries are done:
|
|
||||||
* 1 requests to get the list of all agenda items,
|
|
||||||
* 3 requests to get the assignments, motions and topics and
|
|
||||||
* 1 request to get an agenda item (why?)
|
|
||||||
TODO: The last three request are a bug.
|
|
||||||
"""
|
|
||||||
for index in range(10):
|
|
||||||
Topic.objects.create(title=f"topic{index}")
|
|
||||||
parent = Topic.objects.create(title="parent").agenda_item
|
|
||||||
child = Topic.objects.create(title="child").agenda_item
|
|
||||||
child.parent = parent
|
|
||||||
child.save()
|
|
||||||
Motion.objects.create(title="motion1")
|
|
||||||
Motion.objects.create(title="motion2")
|
|
||||||
Assignment.objects.create(title="assignment", open_posts=5)
|
|
||||||
|
|
||||||
assert count_queries(Item.get_elements) == 5
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=False)
|
|
||||||
def test_list_of_speakers_db_queries():
|
|
||||||
"""
|
|
||||||
Tests that only the following db queries are done:
|
|
||||||
* 1 requests to get the list of all lists of speakers
|
|
||||||
* 1 request to get all speakers
|
|
||||||
* 4 requests to get the assignments, motions, topics and mediafiles and
|
|
||||||
"""
|
|
||||||
for index in range(10):
|
|
||||||
Topic.objects.create(title=f"topic{index}")
|
|
||||||
parent = Topic.objects.create(title="parent").agenda_item
|
|
||||||
child = Topic.objects.create(title="child").agenda_item
|
|
||||||
child.parent = parent
|
|
||||||
child.save()
|
|
||||||
Motion.objects.create(title="motion1")
|
|
||||||
Motion.objects.create(title="motion2")
|
|
||||||
Assignment.objects.create(title="assignment", open_posts=5)
|
|
||||||
Mediafile.objects.create(
|
|
||||||
title=f"mediafile", mediafile=SimpleUploadedFile(f"some_file", b"some content.")
|
|
||||||
)
|
|
||||||
|
|
||||||
assert count_queries(ListOfSpeakers.get_elements) == 6
|
|
||||||
|
|
||||||
|
|
||||||
class ManageSpeaker(TestCase):
|
class ManageSpeaker(TestCase):
|
||||||
"""
|
"""
|
||||||
Tests managing speakers.
|
Tests managing speakers.
|
||||||
|
@ -18,7 +18,7 @@ from openslides.poll.models import BasePoll
|
|||||||
from openslides.utils.auth import get_group_model
|
from openslides.utils.auth import get_group_model
|
||||||
from openslides.utils.autoupdate import inform_changed_data
|
from openslides.utils.autoupdate import inform_changed_data
|
||||||
from tests.common_groups import GROUP_ADMIN_PK, GROUP_DELEGATE_PK
|
from tests.common_groups import GROUP_ADMIN_PK, GROUP_DELEGATE_PK
|
||||||
from tests.count_queries import assert_query_count, count_queries
|
from tests.count_queries import count_queries
|
||||||
from tests.test_case import TestCase
|
from tests.test_case import TestCase
|
||||||
|
|
||||||
|
|
||||||
@ -35,7 +35,7 @@ def test_assignment_poll_db_queries():
|
|||||||
= 6 queries
|
= 6 queries
|
||||||
"""
|
"""
|
||||||
create_assignment_polls()
|
create_assignment_polls()
|
||||||
assert count_queries(AssignmentPoll.get_elements) == 6
|
assert count_queries(AssignmentPoll.get_elements)() == 6
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=False)
|
@pytest.mark.django_db(transaction=False)
|
||||||
@ -44,7 +44,7 @@ def test_assignment_vote_db_queries():
|
|||||||
Tests that only 1 query is done when fetching AssignmentVotes
|
Tests that only 1 query is done when fetching AssignmentVotes
|
||||||
"""
|
"""
|
||||||
create_assignment_polls()
|
create_assignment_polls()
|
||||||
assert count_queries(AssignmentVote.get_elements) == 1
|
assert count_queries(AssignmentVote.get_elements)() == 1
|
||||||
|
|
||||||
|
|
||||||
def create_assignment_polls():
|
def create_assignment_polls():
|
||||||
@ -94,9 +94,8 @@ class CreateAssignmentPoll(TestCase):
|
|||||||
)
|
)
|
||||||
self.assignment.add_candidate(self.admin)
|
self.assignment.add_candidate(self.admin)
|
||||||
|
|
||||||
# TODO lower query count
|
|
||||||
@assert_query_count(47, True)
|
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
|
with self.assertNumQueries(35):
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
reverse("assignmentpoll-list"),
|
reverse("assignmentpoll-list"),
|
||||||
{
|
{
|
||||||
|
@ -5,7 +5,7 @@ from django.urls import reverse
|
|||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.test import APIClient
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
from openslides.assignments.models import Assignment
|
from openslides.assignments.models import Assignment, AssignmentPoll
|
||||||
from openslides.core.models import Tag
|
from openslides.core.models import Tag
|
||||||
from openslides.mediafiles.models import Mediafile
|
from openslides.mediafiles.models import Mediafile
|
||||||
from openslides.utils.autoupdate import inform_changed_data
|
from openslides.utils.autoupdate import inform_changed_data
|
||||||
@ -21,18 +21,20 @@ def test_assignment_db_queries():
|
|||||||
* 1 request to get all related users,
|
* 1 request to get all related users,
|
||||||
* 1 request to get the agenda item,
|
* 1 request to get the agenda item,
|
||||||
* 1 request to get the list of speakers,
|
* 1 request to get the list of speakers,
|
||||||
* 1 request to get the polls,
|
|
||||||
* 1 request to get the tags,
|
* 1 request to get the tags,
|
||||||
* 1 request to get the attachments and
|
* 1 request to get the attachments and
|
||||||
|
|
||||||
* 10 request to fetch each related user again.
|
|
||||||
|
|
||||||
TODO: The last requests are a bug.
|
|
||||||
"""
|
"""
|
||||||
for index in range(10):
|
for index in range(10):
|
||||||
Assignment.objects.create(title=f"assignment{index}", open_posts=1)
|
assignment = Assignment.objects.create(title=f"assignment{index}", open_posts=1)
|
||||||
|
for i in range(2):
|
||||||
|
AssignmentPoll.objects.create(
|
||||||
|
assignment=assignment,
|
||||||
|
title="test_title_nah5Ahh6IkeeM8rah3ai",
|
||||||
|
pollmethod=AssignmentPoll.POLLMETHOD_YN,
|
||||||
|
type=AssignmentPoll.TYPE_NAMED,
|
||||||
|
)
|
||||||
|
|
||||||
assert count_queries(Assignment.get_elements) == 17
|
assert count_queries(Assignment.get_elements)() == 6
|
||||||
|
|
||||||
|
|
||||||
class CreateAssignment(TestCase):
|
class CreateAssignment(TestCase):
|
||||||
|
@ -28,7 +28,7 @@ def test_projector_db_queries():
|
|||||||
for index in range(10):
|
for index in range(10):
|
||||||
Projector.objects.create(name=f"Projector{index}")
|
Projector.objects.create(name=f"Projector{index}")
|
||||||
|
|
||||||
assert count_queries(Projector.get_elements) == 2
|
assert count_queries(Projector.get_elements)() == 2
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=False)
|
@pytest.mark.django_db(transaction=False)
|
||||||
@ -40,7 +40,7 @@ def test_tag_db_queries():
|
|||||||
for index in range(10):
|
for index in range(10):
|
||||||
Tag.objects.create(name=f"tag{index}")
|
Tag.objects.create(name=f"tag{index}")
|
||||||
|
|
||||||
assert count_queries(Tag.get_elements) == 1
|
assert count_queries(Tag.get_elements)() == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=False)
|
@pytest.mark.django_db(transaction=False)
|
||||||
@ -51,7 +51,7 @@ def test_config_db_queries():
|
|||||||
"""
|
"""
|
||||||
config.save_default_values()
|
config.save_default_values()
|
||||||
|
|
||||||
assert count_queries(Tag.get_elements) == 1
|
assert count_queries(Tag.get_elements)() == 1
|
||||||
|
|
||||||
|
|
||||||
class ProjectorViewSet(TestCase):
|
class ProjectorViewSet(TestCase):
|
||||||
|
@ -27,7 +27,7 @@ def test_mediafiles_db_queries():
|
|||||||
mediafile=SimpleUploadedFile(f"some_file{index}", b"some content."),
|
mediafile=SimpleUploadedFile(f"some_file{index}", b"some content."),
|
||||||
)
|
)
|
||||||
|
|
||||||
assert count_queries(Mediafile.get_elements) == 4
|
assert count_queries(Mediafile.get_elements)() == 4
|
||||||
|
|
||||||
|
|
||||||
class TestCreation(TestCase):
|
class TestCreation(TestCase):
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import json
|
import json
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
@ -12,11 +11,11 @@ from openslides.core.models import Tag
|
|||||||
from openslides.motions.models import (
|
from openslides.motions.models import (
|
||||||
Category,
|
Category,
|
||||||
Motion,
|
Motion,
|
||||||
|
MotionBlock,
|
||||||
MotionChangeRecommendation,
|
MotionChangeRecommendation,
|
||||||
MotionComment,
|
MotionComment,
|
||||||
MotionCommentSection,
|
MotionCommentSection,
|
||||||
MotionPoll,
|
MotionPoll,
|
||||||
MotionVote,
|
|
||||||
Submitter,
|
Submitter,
|
||||||
Workflow,
|
Workflow,
|
||||||
)
|
)
|
||||||
@ -24,7 +23,7 @@ from openslides.poll.models import BasePoll
|
|||||||
from openslides.utils.auth import get_group_model
|
from openslides.utils.auth import get_group_model
|
||||||
from openslides.utils.autoupdate import inform_changed_data
|
from openslides.utils.autoupdate import inform_changed_data
|
||||||
from tests.common_groups import GROUP_ADMIN_PK, GROUP_DEFAULT_PK, GROUP_DELEGATE_PK
|
from tests.common_groups import GROUP_ADMIN_PK, GROUP_DEFAULT_PK, GROUP_DELEGATE_PK
|
||||||
from tests.count_queries import assert_query_count, count_queries
|
from tests.count_queries import count_queries
|
||||||
from tests.test_case import TestCase
|
from tests.test_case import TestCase
|
||||||
|
|
||||||
|
|
||||||
@ -39,12 +38,6 @@ def test_motion_db_queries():
|
|||||||
* 1 request for all users required for the read_groups of the sections
|
* 1 request for all users required for the read_groups of the sections
|
||||||
* 1 request to get the agenda item,
|
* 1 request to get the agenda item,
|
||||||
* 1 request to get the list of speakers,
|
* 1 request to get the list of speakers,
|
||||||
* 1 request to get the polls,
|
|
||||||
* 1 request to get all poll groups,
|
|
||||||
* 1 request to get all poll voted users,
|
|
||||||
* 1 request to get all options for all polls,
|
|
||||||
* 1 request to get all votes for all options,
|
|
||||||
* 1 request to get all users for all votes,
|
|
||||||
* 1 request to get the attachments,
|
* 1 request to get the attachments,
|
||||||
* 1 request to get the tags,
|
* 1 request to get the tags,
|
||||||
* 2 requests to get the submitters and supporters,
|
* 2 requests to get the submitters and supporters,
|
||||||
@ -71,6 +64,10 @@ def test_motion_db_queries():
|
|||||||
for index in range(10):
|
for index in range(10):
|
||||||
motion = Motion.objects.create(title=f"motion{index}")
|
motion = Motion.objects.create(title=f"motion{index}")
|
||||||
|
|
||||||
|
motion.supporters.add(user1, user2)
|
||||||
|
Submitter.objects.add(user2, motion)
|
||||||
|
Submitter.objects.add(user3, motion)
|
||||||
|
|
||||||
MotionComment.objects.create(
|
MotionComment.objects.create(
|
||||||
comment="test_comment", motion=motion, section=section1
|
comment="test_comment", motion=motion, section=section1
|
||||||
)
|
)
|
||||||
@ -78,46 +75,22 @@ def test_motion_db_queries():
|
|||||||
comment="test_comment2", motion=motion, section=section2
|
comment="test_comment2", motion=motion, section=section2
|
||||||
)
|
)
|
||||||
|
|
||||||
get_user_model().objects.create_user(
|
block = MotionBlock.objects.create(title=f"block_{index}")
|
||||||
username=f"user_{index}", password="password"
|
motion.motion_block = block
|
||||||
)
|
category = Category.objects.create(name=f"category_{index}")
|
||||||
|
motion.category = category
|
||||||
|
motion.save()
|
||||||
|
|
||||||
# Create some fully populated polls:
|
# Create a poll:
|
||||||
poll1 = MotionPoll.objects.create(
|
poll = MotionPoll.objects.create(
|
||||||
motion=motion,
|
motion=motion,
|
||||||
title="test_title_XeejaeFez3chahpei9qu",
|
title="test_title_XeejaeFez3chahpei9qu",
|
||||||
pollmethod="YNA",
|
pollmethod="YNA",
|
||||||
type=BasePoll.TYPE_NAMED,
|
type=BasePoll.TYPE_NAMED,
|
||||||
)
|
)
|
||||||
poll1.create_options()
|
poll.create_options()
|
||||||
option = poll1.options.get()
|
|
||||||
MotionVote.objects.create(
|
|
||||||
user=user1, option=option, value="Y", weight=Decimal(1)
|
|
||||||
)
|
|
||||||
poll1.voted.add(user1)
|
|
||||||
MotionVote.objects.create(
|
|
||||||
user=user2, option=option, value="N", weight=Decimal(1)
|
|
||||||
)
|
|
||||||
poll1.voted.add(user2)
|
|
||||||
|
|
||||||
poll2 = MotionPoll.objects.create(
|
assert count_queries(Motion.get_elements)() == 12
|
||||||
motion=motion,
|
|
||||||
title="test_title_iecuW7eekeGh4uunow1e",
|
|
||||||
pollmethod="YNA",
|
|
||||||
type=BasePoll.TYPE_NAMED,
|
|
||||||
)
|
|
||||||
poll2.create_options()
|
|
||||||
option = poll2.options.get()
|
|
||||||
MotionVote.objects.create(
|
|
||||||
user=user2, option=option, value="Y", weight=Decimal(1)
|
|
||||||
)
|
|
||||||
poll2.voted.add(user2)
|
|
||||||
MotionVote.objects.create(
|
|
||||||
user=user3, option=option, value="N", weight=Decimal(1)
|
|
||||||
)
|
|
||||||
poll2.voted.add(user3)
|
|
||||||
|
|
||||||
assert count_queries(Motion.get_elements) == 18
|
|
||||||
|
|
||||||
|
|
||||||
class CreateMotion(TestCase):
|
class CreateMotion(TestCase):
|
||||||
@ -125,13 +98,10 @@ class CreateMotion(TestCase):
|
|||||||
Tests motion creation.
|
Tests motion creation.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
maxDiff = None
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.client = APIClient()
|
self.client = APIClient()
|
||||||
self.client.login(username="admin", password="admin")
|
self.client.login(username="admin", password="admin")
|
||||||
|
|
||||||
@assert_query_count(85, True)
|
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
"""
|
"""
|
||||||
Tests that a motion is created with a specific title and text.
|
Tests that a motion is created with a specific title and text.
|
||||||
@ -139,6 +109,7 @@ class CreateMotion(TestCase):
|
|||||||
The created motion should have an identifier and the admin user should
|
The created motion should have an identifier and the admin user should
|
||||||
be the submitter.
|
be the submitter.
|
||||||
"""
|
"""
|
||||||
|
with self.assertNumQueries(51, verbose=True):
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
reverse("motion-list"),
|
reverse("motion-list"),
|
||||||
{
|
{
|
||||||
|
@ -28,7 +28,7 @@ def test_motion_poll_db_queries():
|
|||||||
= 5 queries
|
= 5 queries
|
||||||
"""
|
"""
|
||||||
create_motion_polls()
|
create_motion_polls()
|
||||||
assert count_queries(MotionPoll.get_elements) == 5
|
assert count_queries(MotionPoll.get_elements)() == 5
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=False)
|
@pytest.mark.django_db(transaction=False)
|
||||||
@ -37,7 +37,7 @@ def test_motion_vote_db_queries():
|
|||||||
Tests that only 1 query is done when fetching MotionVotes
|
Tests that only 1 query is done when fetching MotionVotes
|
||||||
"""
|
"""
|
||||||
create_motion_polls()
|
create_motion_polls()
|
||||||
assert count_queries(MotionVote.get_elements) == 1
|
assert count_queries(MotionVote.get_elements)() == 1
|
||||||
|
|
||||||
|
|
||||||
def create_motion_polls():
|
def create_motion_polls():
|
||||||
|
@ -32,7 +32,7 @@ def test_category_db_queries():
|
|||||||
for index in range(10):
|
for index in range(10):
|
||||||
Category.objects.create(name=f"category{index}")
|
Category.objects.create(name=f"category{index}")
|
||||||
|
|
||||||
assert count_queries(Category.get_elements) == 1
|
assert count_queries(Category.get_elements)() == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=False)
|
@pytest.mark.django_db(transaction=False)
|
||||||
@ -46,19 +46,18 @@ def test_statute_paragraph_db_queries():
|
|||||||
title=f"statute_paragraph{index}", text=f"text{index}"
|
title=f"statute_paragraph{index}", text=f"text{index}"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert count_queries(StatuteParagraph.get_elements) == 1
|
assert count_queries(StatuteParagraph.get_elements)() == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=False)
|
@pytest.mark.django_db(transaction=False)
|
||||||
def test_workflow_db_queries():
|
def test_workflow_db_queries():
|
||||||
"""
|
"""
|
||||||
Tests that only the following db queries are done:
|
Tests that only the following db queries are done:
|
||||||
* 1 requests to get the list of all workflows,
|
* 1 requests to get the list of all workflows and
|
||||||
* 1 request to get all states and
|
* 1 request to get all states.
|
||||||
* 1 request to get the next states of all states.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
assert count_queries(Workflow.get_elements) == 3
|
assert count_queries(Workflow.get_elements)() == 2
|
||||||
|
|
||||||
|
|
||||||
class TestStatuteParagraphs(TestCase):
|
class TestStatuteParagraphs(TestCase):
|
||||||
|
@ -20,7 +20,7 @@ def test_topic_item_db_queries():
|
|||||||
for index in range(10):
|
for index in range(10):
|
||||||
Topic.objects.create(title=f"topic-{index}")
|
Topic.objects.create(title=f"topic-{index}")
|
||||||
|
|
||||||
assert count_queries(Topic.get_elements) == 4
|
assert count_queries(Topic.get_elements)() == 4
|
||||||
|
|
||||||
|
|
||||||
class TopicCreate(TestCase):
|
class TopicCreate(TestCase):
|
||||||
|
@ -29,7 +29,7 @@ def test_user_db_queries():
|
|||||||
for index in range(10):
|
for index in range(10):
|
||||||
User.objects.create(username=f"user{index}")
|
User.objects.create(username=f"user{index}")
|
||||||
|
|
||||||
assert count_queries(User.get_elements) == 3
|
assert count_queries(User.get_elements)() == 3
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=False)
|
@pytest.mark.django_db(transaction=False)
|
||||||
@ -42,7 +42,7 @@ def test_group_db_queries():
|
|||||||
for index in range(10):
|
for index in range(10):
|
||||||
Group.objects.create(name=f"group{index}")
|
Group.objects.create(name=f"group{index}")
|
||||||
|
|
||||||
assert count_queries(Group.get_elements) == 2
|
assert count_queries(Group.get_elements)() == 2
|
||||||
|
|
||||||
|
|
||||||
class UserGetTest(TestCase):
|
class UserGetTest(TestCase):
|
||||||
|
@ -11,9 +11,9 @@ from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_K
|
|||||||
from openslides.asgi import application
|
from openslides.asgi import application
|
||||||
from openslides.core.config import config
|
from openslides.core.config import config
|
||||||
from openslides.utils.autoupdate import (
|
from openslides.utils.autoupdate import (
|
||||||
Element,
|
AutoupdateElement,
|
||||||
inform_changed_elements,
|
|
||||||
inform_deleted_data,
|
inform_deleted_data,
|
||||||
|
inform_elements,
|
||||||
)
|
)
|
||||||
from openslides.utils.cache import element_cache
|
from openslides.utils.cache import element_cache
|
||||||
from openslides.utils.websocket import (
|
from openslides.utils.websocket import (
|
||||||
@ -93,9 +93,9 @@ async def set_config():
|
|||||||
collection_string = config.get_collection_string()
|
collection_string = config.get_collection_string()
|
||||||
config_id = config.key_to_id[key] # type: ignore
|
config_id = config.key_to_id[key] # type: ignore
|
||||||
full_data = {"id": config_id, "key": key, "value": value}
|
full_data = {"id": config_id, "key": key, "value": value}
|
||||||
await sync_to_async(inform_changed_elements)(
|
await sync_to_async(inform_elements)(
|
||||||
[
|
[
|
||||||
Element(
|
AutoupdateElement(
|
||||||
id=config_id,
|
id=config_id,
|
||||||
collection_string=collection_string,
|
collection_string=collection_string,
|
||||||
full_data=full_data,
|
full_data=full_data,
|
||||||
@ -227,9 +227,9 @@ async def test_skipping_autoupdate(set_config, get_communicator):
|
|||||||
await communicator.connect()
|
await communicator.connect()
|
||||||
|
|
||||||
with patch("openslides.utils.autoupdate.save_history"):
|
with patch("openslides.utils.autoupdate.save_history"):
|
||||||
await sync_to_async(inform_changed_elements)(
|
await sync_to_async(inform_elements)(
|
||||||
[
|
[
|
||||||
Element(
|
AutoupdateElement(
|
||||||
id=2,
|
id=2,
|
||||||
collection_string=PersonalizedCollection().get_collection_string(),
|
collection_string=PersonalizedCollection().get_collection_string(),
|
||||||
full_data={"id": 2, "value": "new value 1", "user_id": 2},
|
full_data={"id": 2, "value": "new value 1", "user_id": 2},
|
||||||
@ -237,9 +237,9 @@ async def test_skipping_autoupdate(set_config, get_communicator):
|
|||||||
)
|
)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
await sync_to_async(inform_changed_elements)(
|
await sync_to_async(inform_elements)(
|
||||||
[
|
[
|
||||||
Element(
|
AutoupdateElement(
|
||||||
id=2,
|
id=2,
|
||||||
collection_string=PersonalizedCollection().get_collection_string(),
|
collection_string=PersonalizedCollection().get_collection_string(),
|
||||||
full_data={"id": 2, "value": "new value 2", "user_id": 2},
|
full_data={"id": 2, "value": "new value 2", "user_id": 2},
|
||||||
|
@ -11,6 +11,7 @@ from openslides.utils.autoupdate import inform_changed_data
|
|||||||
from openslides.utils.cache import element_cache
|
from openslides.utils.cache import element_cache
|
||||||
from openslides.utils.utils import get_element_id
|
from openslides.utils.utils import get_element_id
|
||||||
from tests.common_groups import GROUP_ADMIN_PK, GROUP_DELEGATE_PK
|
from tests.common_groups import GROUP_ADMIN_PK, GROUP_DELEGATE_PK
|
||||||
|
from tests.count_queries import AssertNumQueriesContext
|
||||||
|
|
||||||
|
|
||||||
class TestCase(_TestCase):
|
class TestCase(_TestCase):
|
||||||
@ -70,6 +71,14 @@ class TestCase(_TestCase):
|
|||||||
model.get_element_id() in self.get_last_autoupdate(user=user)[1]
|
model.get_element_id() in self.get_last_autoupdate(user=user)[1]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def assertNumQueries(self, num, func=None, *args, verbose=False, **kwargs):
|
||||||
|
context = AssertNumQueriesContext(self, num, verbose)
|
||||||
|
if func is None:
|
||||||
|
return context
|
||||||
|
|
||||||
|
with context:
|
||||||
|
func(*args, **kwargs)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Create Helper functions
|
Create Helper functions
|
||||||
"""
|
"""
|
||||||
|
Loading…
Reference in New Issue
Block a user