diff --git a/client/src/app/base.component.ts b/client/src/app/base.component.ts index e83b01003..74da24608 100644 --- a/client/src/app/base.component.ts +++ b/client/src/app/base.component.ts @@ -112,6 +112,5 @@ export abstract class BaseComponent { protected onLeaveTinyMce(event: any): void { console.log('tinyevent:', event.event.type); this.saveHint = false; - // console.log("event: ", event.event.type); } } diff --git a/client/src/app/core/core-services/autoupdate-throttle.service.ts b/client/src/app/core/core-services/autoupdate-throttle.service.ts index f4dc66faa..e0127b9ef 100644 --- a/client/src/app/core/core-services/autoupdate-throttle.service.ts +++ b/client/src/app/core/core-services/autoupdate-throttle.service.ts @@ -76,7 +76,6 @@ export class AutoupdateThrottleService { public disableUntil(changeId: number): void { // Wait for an autoupdate with to_id >= changeId. - console.log(this.delay); if (!this.isActive) { return; } @@ -99,6 +98,7 @@ export class AutoupdateThrottleService { } this.pendingAutoupdates = []; + console.log(`Processing ${autoupdates.length} pending autoupdates`); const autoupdate = this.mergeAutoupdates(autoupdates); this._autoupdatesToInject.next(autoupdate); } diff --git a/client/src/app/core/core-services/constants.service.ts b/client/src/app/core/core-services/constants.service.ts index 1faea4de3..7ea0315b1 100644 --- a/client/src/app/core/core-services/constants.service.ts +++ b/client/src/app/core/core-services/constants.service.ts @@ -40,9 +40,7 @@ export class ConstantsService { public constructor(communicationManager: CommunicationManagerService, private http: HttpService) { communicationManager.startCommunicationEvent.subscribe(async () => { - console.log('start communication'); this.constants = await this.http.get(environment.urlPrefix + '/core/constants/'); - console.log('constants:', this.constants); Object.keys(this.subjects).forEach(key => { this.subjects[key].next(this.constants[key]); }); diff --git a/client/src/app/core/core-services/notify.service.ts b/client/src/app/core/core-services/notify.service.ts index b19e1c633..e3f6cc969 100644 --- a/client/src/app/core/core-services/notify.service.ts +++ b/client/src/app/core/core-services/notify.service.ts @@ -133,7 +133,7 @@ export class NotifyService { ); } catch (e) { if (!(e instanceof OfflineError)) { - console.log(e); + console.error(e); } } } diff --git a/client/src/app/core/core-services/projector-data.service.ts b/client/src/app/core/core-services/projector-data.service.ts index f12e459b3..cd072e8c5 100644 --- a/client/src/app/core/core-services/projector-data.service.ts +++ b/client/src/app/core/core-services/projector-data.service.ts @@ -94,7 +94,7 @@ export class ProjectorDataService { ); } catch (e) { if (!(e instanceof OfflineError)) { - console.log(e); + console.error(e); } } } diff --git a/client/src/app/core/core-services/relation-manager.service.ts b/client/src/app/core/core-services/relation-manager.service.ts index aee102088..93e62b6cc 100644 --- a/client/src/app/core/core-services/relation-manager.service.ts +++ b/client/src/app/core/core-services/relation-manager.service.ts @@ -215,7 +215,6 @@ export class RelationManagerService { result = descriptor.get.bind(viewModel)(); } else { result = target[property]; - // console.log(property, target); } } else if (property in _model) { result = _model[property]; diff --git a/client/src/app/core/repositories/users/user-repository.service.ts b/client/src/app/core/repositories/users/user-repository.service.ts index 8a86bc553..5189f00e3 100644 --- a/client/src/app/core/repositories/users/user-repository.service.ts +++ b/client/src/app/core/repositories/users/user-repository.service.ts @@ -269,8 +269,6 @@ export class UserRepositoryService extends BaseRepository, viewModel: ViewUser): Promise { this.preventAlterationOnDemoUsers(viewModel); - console.log('update: ', update); - return super.update(update, viewModel); } diff --git a/client/src/app/site/users/components/user-import/user-import-list.component.ts b/client/src/app/site/users/components/user-import/user-import-list.component.ts index 874d61457..1b17053dc 100644 --- a/client/src/app/site/users/components/user-import/user-import-list.component.ts +++ b/client/src/app/site/users/components/user-import/user-import-list.component.ts @@ -54,7 +54,6 @@ export class UserImportListComponent extends BaseImportListComponentDirective 0 logs only + # statements running at least this number + # of milliseconds + + +# - What to Log - + +#debug_print_parse = off +#debug_print_rewritten = off +#debug_print_plan = off +#debug_pretty_print = on +#log_checkpoints = off +#log_connections = off +#log_disconnections = off +#log_duration = off +#log_error_verbosity = default # terse, default, or verbose messages +#log_hostname = off +#log_line_prefix = '%m [%p] ' # special values: + # %a = application name + # %u = user name + # %d = database name + # %r = remote host and port + # %h = remote host + # %p = process ID + # %t = timestamp without milliseconds + # %m = timestamp with milliseconds + # %n = timestamp with milliseconds (as a Unix epoch) + # %i = command tag + # %e = SQL state + # %c = session ID + # %l = session line number + # %s = session start timestamp + # %v = virtual transaction ID + # %x = transaction ID (0 if none) + # %q = stop here in non-session + # processes + # %% = '%' + # e.g. '<%u%%%d> ' +#log_lock_waits = off # log lock waits >= deadlock_timeout +log_statement = 'all' # none, ddl, mod, all +#log_replication_commands = off +#log_temp_files = -1 # log temporary files equal or larger + # than the specified size in kilobytes; + # -1 disables, 0 logs all temp files +#log_timezone = 'GMT' + +#------------------------------------------------------------------------------ +# PROCESS TITLE +#------------------------------------------------------------------------------ + +#cluster_name = '' # added to process titles if nonempty + # (change requires restart) +#update_process_title = on + + +#------------------------------------------------------------------------------ +# STATISTICS +#------------------------------------------------------------------------------ + +# - Query and Index Statistics Collector - + +#track_activities = on +#track_counts = on +#track_io_timing = off +#track_functions = none # none, pl, all +#track_activity_query_size = 1024 # (change requires restart) +#stats_temp_directory = 'pg_stat_tmp' + + +# - Monitoring - + +#log_parser_stats = off +#log_planner_stats = off +#log_executor_stats = off +#log_statement_stats = off + + +#------------------------------------------------------------------------------ +# AUTOVACUUM +#------------------------------------------------------------------------------ + +#autovacuum = on # Enable autovacuum subprocess? 'on' + # requires track_counts to also be on. +#log_autovacuum_min_duration = -1 # -1 disables, 0 logs all actions and + # their durations, > 0 logs only + # actions running at least this number + # of milliseconds. +#autovacuum_max_workers = 3 # max number of autovacuum subprocesses + # (change requires restart) +#autovacuum_naptime = 1min # time between autovacuum runs +#autovacuum_vacuum_threshold = 50 # min number of row updates before + # vacuum +#autovacuum_analyze_threshold = 50 # min number of row updates before + # analyze +#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum +#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze +#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum + # (change requires restart) +#autovacuum_multixact_freeze_max_age = 400000000 # maximum multixact age + # before forced vacuum + # (change requires restart) +#autovacuum_vacuum_cost_delay = 20ms # default vacuum cost delay for + # autovacuum, in milliseconds; + # -1 means use vacuum_cost_delay +#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for + # autovacuum, -1 means use + # vacuum_cost_limit + + +#------------------------------------------------------------------------------ +# CLIENT CONNECTION DEFAULTS +#------------------------------------------------------------------------------ + +# - Statement Behavior - + +#client_min_messages = notice # values in order of decreasing detail: + # debug5 + # debug4 + # debug3 + # debug2 + # debug1 + # log + # notice + # warning + # error +#search_path = '"$user", public' # schema names +#row_security = on +#default_tablespace = '' # a tablespace name, '' uses the default +#temp_tablespaces = '' # a list of tablespace names, '' uses + # only default tablespace +#check_function_bodies = on +#default_transaction_isolation = 'read committed' +#default_transaction_read_only = off +#default_transaction_deferrable = off +#session_replication_role = 'origin' +#statement_timeout = 0 # in milliseconds, 0 is disabled +#lock_timeout = 0 # in milliseconds, 0 is disabled +#idle_in_transaction_session_timeout = 0 # in milliseconds, 0 is disabled +#vacuum_freeze_min_age = 50000000 +#vacuum_freeze_table_age = 150000000 +#vacuum_multixact_freeze_min_age = 5000000 +#vacuum_multixact_freeze_table_age = 150000000 +#vacuum_cleanup_index_scale_factor = 0.1 # fraction of total number of tuples + # before index cleanup, 0 always performs + # index cleanup +#bytea_output = 'hex' # hex, escape +#xmlbinary = 'base64' +#xmloption = 'content' +#gin_fuzzy_search_limit = 0 +#gin_pending_list_limit = 4MB + +# - Locale and Formatting - + +#datestyle = 'iso, mdy' +#intervalstyle = 'postgres' +#timezone = 'GMT' +#timezone_abbreviations = 'Default' # Select the set of available time zone + # abbreviations. Currently, there are + # Default + # Australia (historical usage) + # India + # You can create your own file in + # share/timezonesets/. +#extra_float_digits = 0 # min -15, max 3 +#client_encoding = sql_ascii # actually, defaults to database + # encoding + +# These settings are initialized by initdb, but they can be changed. +#lc_messages = 'C' # locale for system error message + # strings +#lc_monetary = 'C' # locale for monetary formatting +#lc_numeric = 'C' # locale for number formatting +#lc_time = 'C' # locale for time formatting + +# default configuration for text search +#default_text_search_config = 'pg_catalog.simple' + +# - Shared Library Preloading - + +#shared_preload_libraries = '' # (change requires restart) +#local_preload_libraries = '' +#session_preload_libraries = '' +#jit_provider = 'llvmjit' # JIT library to use + +# - Other Defaults - + +#dynamic_library_path = '$libdir' + + +#------------------------------------------------------------------------------ +# LOCK MANAGEMENT +#------------------------------------------------------------------------------ + +#deadlock_timeout = 1s +#max_locks_per_transaction = 64 # min 10 + # (change requires restart) +#max_pred_locks_per_transaction = 64 # min 10 + # (change requires restart) +#max_pred_locks_per_relation = -2 # negative values mean + # (max_pred_locks_per_transaction + # / -max_pred_locks_per_relation) - 1 +#max_pred_locks_per_page = 2 # min 0 + + +#------------------------------------------------------------------------------ +# VERSION AND PLATFORM COMPATIBILITY +#------------------------------------------------------------------------------ + +# - Previous PostgreSQL Versions - + +#array_nulls = on +#backslash_quote = safe_encoding # on, off, or safe_encoding +#default_with_oids = off +#escape_string_warning = on +#lo_compat_privileges = off +#operator_precedence_warning = off +#quote_all_identifiers = off +#standard_conforming_strings = on +#synchronize_seqscans = on + +# - Other Platforms and Clients - + +#transform_null_equals = off + + +#------------------------------------------------------------------------------ +# ERROR HANDLING +#------------------------------------------------------------------------------ + +#exit_on_error = off # terminate session on any error? +#restart_after_crash = on # reinitialize after backend crash? +#data_sync_retry = off # retry or panic on failure to fsync + # data? + # (change requires restart) + + +#------------------------------------------------------------------------------ +# CONFIG FILE INCLUDES +#------------------------------------------------------------------------------ + +# These options allow settings to be loaded from files other than the +# default postgresql.conf. Note that these are directives, not variable +# assignments, so they can usefully be given more than once. + +#include_dir = '...' # include files ending in '.conf' from + # a directory, e.g., 'conf.d' +#include_if_exists = '...' # include file only if it exists +#include = '...' # include file + + +#------------------------------------------------------------------------------ +# CUSTOMIZED OPTIONS +#------------------------------------------------------------------------------ + +# Add settings for extensions here diff --git a/server/docker/Dockerfile.dev b/server/docker/Dockerfile.dev index b5318d528..fb782abaa 100644 --- a/server/docker/Dockerfile.dev +++ b/server/docker/Dockerfile.dev @@ -10,6 +10,7 @@ RUN apt-get -y update && apt-get install --no-install-recommends -y \ wait-for-it \ gcc \ git \ + vim \ libxml2-dev \ libxmlsec1-dev \ libxmlsec1-openssl \ @@ -27,4 +28,4 @@ EXPOSE 8000 COPY docker/entrypoint-dev /usr/local/sbin/ COPY . . ENTRYPOINT ["/usr/local/sbin/entrypoint-dev"] -CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"] +CMD ["python", "-u", "manage.py", "runserver", "0.0.0.0:8000"] diff --git a/server/docker/entrypoint-db-setup b/server/docker/entrypoint-db-setup index 155c2c703..f534ef05f 100755 --- a/server/docker/entrypoint-db-setup +++ b/server/docker/entrypoint-db-setup @@ -39,7 +39,7 @@ wait-for-it redis:6379 wait-for-it redis-slave:6379 echo 'running migrations' -python manage.py migrate +python -u manage.py migrate # Admin if [[ -f /run/secrets/os_admin ]]; then diff --git a/server/docker/entrypoint-dev b/server/docker/entrypoint-dev index 94833dd72..25632f973 100755 --- a/server/docker/entrypoint-dev +++ b/server/docker/entrypoint-dev @@ -4,6 +4,11 @@ set -e wait-for-it -t 0 redis:6379 +until pg_isready -h postgres -p 5432 -U openslides; do + echo "Waiting for Postgres to become available..." + sleep 3 +done + if [[ ! -f "/app/personal_data/var/settings.py" ]]; then echo "Create settings" python manage.py createsettings diff --git a/server/docker/settings.py b/server/docker/settings.py index b69274321..fb4ed16de 100644 --- a/server/docker/settings.py +++ b/server/docker/settings.py @@ -83,6 +83,8 @@ DATABASES = { "PASSWORD": get_env("DATABASE_PASSWORD", "openslides"), "HOST": get_env("DATABASE_HOST", "db"), "PORT": get_env("DATABASE_PORT", "5432"), + "USE_TZ": False, # Requires postgresql to have UTC set as default + "DISABLE_SERVER_SIDE_CURSORS": True, }, "mediafiles": { "ENGINE": "django.db.backends.postgresql", diff --git a/server/openslides/core/models.py b/server/openslides/core/models.py index 18f829ccc..fb871c0dc 100644 --- a/server/openslides/core/models.py +++ b/server/openslides/core/models.py @@ -11,6 +11,7 @@ from openslides.utils.cache import element_cache, get_element_id from openslides.utils.locking import locking from openslides.utils.manager import BaseManager from openslides.utils.models import SET_NULL_AND_AUTOUPDATE, RESTModelMixin +from openslides.utils.postgres import is_postgres from .access_permissions import ( ConfigAccessPermissions, @@ -268,31 +269,60 @@ class HistoryManager(BaseManager): """ Method to add elements to the history. This does not trigger autoupdate. """ + history_time = now() + elements = [ + element for element in elements if not element.get("disable_history", False) + ] + with transaction.atomic(): - instances = [] - history_time = now() - for element in elements: - if element.get("disable_history"): - # Do not update history if history is disabled. - continue - # HistoryData is not a root rest element so there is no autoupdate and not history saving here. - data = HistoryData.objects.create(full_data=element.get("full_data")) - instance = self.model( - element_id=get_element_id( - element["collection_string"], element["id"] - ), - now=history_time, - information=element.get("information", []), - user_id=element.get("user_id"), - full_data=data, - ) - instance.save() - instances.append(instance) - return instances + if is_postgres(): + return self._add_elements_postgres(elements, history_time) + else: + return self._add_elements_other_dbs(elements, history_time) + + def _add_elements_postgres(self, elements, history_time): + """ + Postgres supports returning ids from bulk requests, so after doing `bulk_create` + every HistoryData has an id. This can be used to bulk_create History-Models in a + second step. + """ + history_data = [ + HistoryData(full_data=element.get("full_data")) for element in elements + ] + HistoryData.objects.bulk_create(history_data) + + history_entries = [ + self.model( + element_id=get_element_id(element["collection_string"], element["id"]), + now=history_time, + information=element.get("information", []), + user_id=element.get("user_id"), + full_data_id=hd.id, + ) + for element, hd in zip(elements, history_data) + ] + self.bulk_create(history_entries) + return history_entries + + def _add_elements_other_dbs(self, elements, history_time): + history_entries = [] + for element in elements: + # HistoryData is not a root rest element so there is no autoupdate and not history saving here. + data = HistoryData.objects.create(full_data=element.get("full_data")) + instance = self.model( + element_id=get_element_id(element["collection_string"], element["id"]), + now=history_time, + information=element.get("information", []), + user_id=element.get("user_id"), + full_data=data, + ) + instance.save() + history_entries.append(instance) + return history_entries def build_history(self): """ - Method to add all cachables to the history. + Method to add all cacheables to the history. """ async_to_sync(self.async_build_history)() diff --git a/server/openslides/global_settings.py b/server/openslides/global_settings.py index 1130593d0..625ce5560 100644 --- a/server/openslides/global_settings.py +++ b/server/openslides/global_settings.py @@ -23,7 +23,7 @@ INSTALLED_APPS = [ "openslides.mediafiles", ] -INSTALLED_PLUGINS = collect_plugins() # Adds all automaticly collected plugins +INSTALLED_PLUGINS = collect_plugins() # Adds all automatically collected plugins MIDDLEWARE = [ "django.middleware.security.SecurityMiddleware", @@ -33,7 +33,7 @@ MIDDLEWARE = [ "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", - "openslides.utils.autoupdate.AutoupdateBundleMiddleware", + "openslides.utils.autoupdate_bundle.AutoupdateBundleMiddleware", ] ROOT_URLCONF = "openslides.urls" diff --git a/server/openslides/motions/views.py b/server/openslides/motions/views.py index ff3895569..1db1fcd90 100644 --- a/server/openslides/motions/views.py +++ b/server/openslides/motions/views.py @@ -1260,15 +1260,18 @@ class MotionPollViewSet(BasePollViewSet): self, data, poll, weight_user, vote_user, request_user ): option = poll.options.get() - vote = MotionVote.objects.create( - user=vote_user, delegated_user=request_user, option=option - ) - vote.value = data - vote.weight = ( + weight = ( weight_user.vote_weight if config["users_activate_vote_weight"] else Decimal(1) ) + vote = MotionVote( + user=vote_user, + delegated_user=request_user, + option=option, + value=data, + weight=weight, + ) vote.save(no_delete_on_restriction=True) inform_changed_data(option) diff --git a/server/openslides/poll/views.py b/server/openslides/poll/views.py index 33867d861..069386286 100644 --- a/server/openslides/poll/views.py +++ b/server/openslides/poll/views.py @@ -7,7 +7,7 @@ from django.db.utils import IntegrityError from rest_framework import status from openslides.utils.auth import in_some_groups -from openslides.utils.autoupdate import inform_changed_data +from openslides.utils.autoupdate import disable_history, inform_changed_data from openslides.utils.rest_api import ( DecimalField, GenericViewSet, @@ -158,10 +158,14 @@ class BasePollViewSet(ModelViewSet): poll.state = BasePoll.STATE_PUBLISHED poll.save() inform_changed_data( - (vote.user for vote in poll.get_votes().all() if vote.user), final_data=True + ( + vote.user + for vote in poll.get_votes().prefetch_related("user").all() + if vote.user + ) ) - inform_changed_data(poll.get_votes(), final_data=True) - inform_changed_data(poll.get_options(), final_data=True) + inform_changed_data(poll.get_votes()) + inform_changed_data(poll.get_options()) return Response() @detail_route(methods=["POST"]) @@ -194,6 +198,9 @@ class BasePollViewSet(ModelViewSet): """ poll = self.get_object() + # Disable history for these requests + disable_history() + if isinstance(request.user, AnonymousUser): self.permission_denied(request) @@ -205,7 +212,11 @@ class BasePollViewSet(ModelViewSet): if "data" not in data: raise ValidationError({"detail": "No data provided."}) vote_data = data["data"] - if "user_id" in data and poll.type != BasePoll.TYPE_ANALOG: + if ( + "user_id" in data + and data["user_id"] != request.user.id + and poll.type != BasePoll.TYPE_ANALOG + ): try: vote_user = get_user_model().objects.get(pk=data["user_id"]) except get_user_model().DoesNotExist: @@ -241,9 +252,9 @@ class BasePollViewSet(ModelViewSet): @transaction.atomic def refresh(self, request, pk): poll = self.get_object() - inform_changed_data(poll, final_data=True) - inform_changed_data(poll.get_options(), final_data=True) - inform_changed_data(poll.get_votes(), final_data=True) + inform_changed_data(poll) + inform_changed_data(poll.get_options()) + inform_changed_data(poll.get_votes()) return Response() def assert_can_vote(self, poll, request, vote_user): diff --git a/server/openslides/users/migrations/0016_remove_user_ordering.py b/server/openslides/users/migrations/0016_remove_user_ordering.py new file mode 100644 index 000000000..0cac0d1e2 --- /dev/null +++ b/server/openslides/users/migrations/0016_remove_user_ordering.py @@ -0,0 +1,28 @@ +# Generated by Django 2.2.17 on 2021-01-20 14:33 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("users", "0015_user_vote_delegated_to"), + ] + + operations = [ + migrations.AlterModelOptions( + name="user", + options={ + "default_permissions": (), + "permissions": ( + ("can_see_name", "Can see names of users"), + ( + "can_see_extra_data", + "Can see extra data of users (e.g. email and comment)", + ), + ("can_change_password", "Can change its own password"), + ("can_manage", "Can manage users"), + ), + }, + ), + ] diff --git a/server/openslides/users/models.py b/server/openslides/users/models.py index 087d196ec..dcf5984b2 100644 --- a/server/openslides/users/models.py +++ b/server/openslides/users/models.py @@ -193,7 +193,6 @@ class User(RESTModelMixin, PermissionsMixin, AbstractBaseUser): ("can_change_password", "Can change its own password"), ("can_manage", "Can manage users"), ) - ordering = ("last_name", "first_name", "username") def __str__(self): # Strip white spaces from the name parts diff --git a/server/openslides/utils/__init__.py b/server/openslides/utils/__init__.py index e69de29bb..d5f8e4b54 100644 --- a/server/openslides/utils/__init__.py +++ b/server/openslides/utils/__init__.py @@ -0,0 +1,11 @@ +# overwrite the builtin print to always flush +def flushprint(*args, **kwargs): # type: ignore + if "flush" not in kwargs: + kwargs["flush"] = True + + __builtins__["oldprint"](*args, **kwargs) # type: ignore + + +if "oldprint" not in __builtins__: # type: ignore + __builtins__["oldprint"] = __builtins__["print"] # type: ignore +__builtins__["print"] = flushprint # type: ignore diff --git a/server/openslides/utils/autoupdate.py b/server/openslides/utils/autoupdate.py index 6c5d6ae8e..aee885544 100644 --- a/server/openslides/utils/autoupdate.py +++ b/server/openslides/utils/autoupdate.py @@ -1,17 +1,13 @@ -import json -import threading from collections import defaultdict from typing import Any, Dict, Iterable, List, Optional, Tuple, Union -from asgiref.sync import async_to_sync from django.db.models import Model from mypy_extensions import TypedDict from .auth import UserDoesNotExist -from .cache import ChangeIdTooLowError, element_cache, get_element_id -from .stream import stream -from .timing import Timing -from .utils import get_model_from_collection_string, is_iterable, split_element_id +from .autoupdate_bundle import AutoupdateElement, autoupdate_bundle +from .cache import ChangeIdTooLowError, element_cache +from .utils import is_iterable, split_element_id AutoupdateFormat = TypedDict( @@ -26,131 +22,10 @@ AutoupdateFormat = TypedDict( ) -class AutoupdateElementBase(TypedDict): - id: int - collection_string: str - - -class AutoupdateElement(AutoupdateElementBase, total=False): - """ - Data container to handle one root rest element for the autoupdate, history - and caching process. - - The fields `id` and `collection_string` are required to identify the element. All - other fields are optional: - - full_data: If a value is given (dict or None), it won't be loaded from the DB. - If otherwise no value is given, the AutoupdateBundle will try to resolve the object - from the DB and serialize it into the full_data. - - information and user_id: These fields are for the history indicating what and who - made changes. - - disable_history: If this is True, the element (and the containing full_data) won't - be saved into the history. Information and user_id is then irrelevant. - - no_delete_on_restriction is a flag, which is saved into the models in the cache - as the _no_delete_on_restriction key. If this is true, there should neither be an - entry for one specific model in the changed *nor the deleted* part of the - autoupdate, if the model was restricted. - """ - - information: List[str] - user_id: Optional[int] - disable_history: bool - no_delete_on_restriction: bool - full_data: Optional[Dict[str, Any]] - - -class AutoupdateBundle: - """ - Collects changed elements via inform*_data. After the collecting-step is finished, - the bundle releases all changes to the history and element cache via `.done()`. - """ - - def __init__(self) -> None: - self.autoupdate_elements: Dict[str, Dict[int, AutoupdateElement]] = defaultdict( - dict - ) - - def add(self, elements: Iterable[AutoupdateElement]) -> None: - """ Adds the elements to the bundle """ - for element in elements: - self.autoupdate_elements[element["collection_string"]][ - element["id"] - ] = element - - def done(self) -> Optional[int]: - """ - Finishes the bundle by resolving all missing data and passing it to - the history and element cache. - - Returns the change id, if there are autoupdate elements. Otherwise none. - """ - if not self.autoupdate_elements: - return None - - for collection, elements in self.autoupdate_elements.items(): - # Get all ids, that do not have a full_data key - # (element["full_data"]=None will not be resolved again!) - ids = [ - element["id"] - for element in elements.values() - if "full_data" not in element - ] - if ids: - # Get all missing models. If e.g. an id could not be found it - # means, it was deleted. Since there is not full_data entry - # for the element, the data will be interpreted as None, which - # is correct for deleted elements. - model_class = get_model_from_collection_string(collection) - for full_data in model_class.get_elements(ids): - elements[full_data["id"]]["full_data"] = full_data - - # Save histroy here using sync code. - save_history(self.element_iterator) - - # Update cache and send autoupdate using async code. - return async_to_sync(self.dispatch_autoupdate)() - - @property - def element_iterator(self) -> Iterable[AutoupdateElement]: - """ Iterator for all elements in this bundle """ - for elements in self.autoupdate_elements.values(): - yield from elements.values() - - async def get_data_for_cache(self) -> Dict[str, Optional[Dict[str, Any]]]: - """ - Async helper function to update the cache. - - Returns the change_id - """ - cache_elements: Dict[str, Optional[Dict[str, Any]]] = {} - for element in self.element_iterator: - element_id = get_element_id(element["collection_string"], element["id"]) - full_data = element.get("full_data") - if full_data: - full_data["_no_delete_on_restriction"] = element.get( - "no_delete_on_restriction", False - ) - cache_elements[element_id] = full_data - return cache_elements - - async def dispatch_autoupdate(self) -> int: - """ - Async helper function to update cache and send autoupdate. - - Return the change_id - """ - # Update cache - cache_elements = await self.get_data_for_cache() - change_id = await element_cache.change_elements(cache_elements) - - # Send autoupdate - autoupdate_payload = {"elements": cache_elements, "change_id": change_id} - await stream.send("autoupdate", autoupdate_payload) - - return change_id +def disable_history() -> None: + """""" + with autoupdate_bundle() as bundle: + bundle.disable_history() def inform_changed_data( @@ -159,7 +34,6 @@ def inform_changed_data( user_id: Optional[int] = None, disable_history: bool = False, no_delete_on_restriction: bool = False, - final_data: bool = False, ) -> None: """ Informs the autoupdate system and the caching system about the creation or @@ -186,8 +60,6 @@ def inform_changed_data( user_id=user_id, no_delete_on_restriction=no_delete_on_restriction, ) - if final_data: - element["full_data"] = root_instance.get_full_data() elements.append(element) inform_elements(elements) @@ -227,62 +99,8 @@ def inform_elements(elements: Iterable[AutoupdateElement]) -> None: If you want to save history information, user id or disable history you have to put information or flag inside the elements. """ - bundle = autoupdate_bundle.get(threading.get_ident()) - if bundle is not None: - # Put all elements into the autoupdate_bundle. + with autoupdate_bundle() as bundle: bundle.add(elements) - else: - # Send autoupdate directly - bundle = AutoupdateBundle() - bundle.add(elements) - bundle.done() - - -""" -Global container for autoupdate bundles -""" -autoupdate_bundle: Dict[int, AutoupdateBundle] = {} - - -class AutoupdateBundleMiddleware: - """ - Middleware to handle autoupdate bundling. - """ - - def __init__(self, get_response: Any) -> None: - self.get_response = get_response - # One-time configuration and initialization. - - def __call__(self, request: Any) -> Any: - thread_id = threading.get_ident() - autoupdate_bundle[thread_id] = AutoupdateBundle() - - timing = Timing("request") - - response = self.get_response(request) - - timing() - - status_ok = response.status_code >= 200 and response.status_code < 300 - status_redirect = response.status_code >= 300 and response.status_code < 400 - - # rewrite the response by adding the autoupdate on any success-case (2xx status) - bundle: AutoupdateBundle = autoupdate_bundle.pop(thread_id) - if status_ok or status_redirect: - change_id = bundle.done() - - # inject the change id, if there was an autoupdate and the response status is - # ok (and not redirect; redirects do not have a useful content) - if change_id is not None and status_ok: - # Inject the autoupdate in the response. - # The complete response body will be overwritten! - content = {"change_id": change_id, "data": response.data} - # Note: autoupdate may be none on skipped ones (which should not happen - # since the user has made the request....) - response.content = json.dumps(content) - - timing(True) - return response async def get_autoupdate_data( @@ -337,14 +155,3 @@ async def _get_autoupdate_data( all_data=all_data, ), ) - - -def save_history(element_iterator: Iterable[AutoupdateElement]) -> Iterable: - """ - Thin wrapper around the call of history saving manager method. - - This is separated to patch it during tests. - """ - from ..core.models import History - - return History.objects.add_elements(element_iterator) diff --git a/server/openslides/utils/autoupdate_bundle.py b/server/openslides/utils/autoupdate_bundle.py new file mode 100644 index 000000000..3f168d85b --- /dev/null +++ b/server/openslides/utils/autoupdate_bundle.py @@ -0,0 +1,219 @@ +import json +import threading +from collections import defaultdict +from contextlib import contextmanager +from typing import Any, Dict, Iterable, Iterator, List, Optional + +from asgiref.sync import async_to_sync +from mypy_extensions import TypedDict + +from .cache import element_cache, get_element_id +from .stream import stream +from .timing import Timing +from .utils import get_model_from_collection_string + + +class AutoupdateElementBase(TypedDict): + id: int + collection_string: str + + +class AutoupdateElement(AutoupdateElementBase, total=False): + """ + Data container to handle one root rest element for the autoupdate, history + and caching process. + + The fields `id` and `collection_string` are required to identify the element. All + other fields are optional: + + full_data: If a value is given (dict or None), it won't be loaded from the DB. + If otherwise no value is given, the AutoupdateBundle will try to resolve the object + from the DB and serialize it into the full_data. + + information and user_id: These fields are for the history indicating what and who + made changes. + + disable_history: If this is True, the element (and the containing full_data) won't + be saved into the history. Information and user_id is then irrelevant. + + no_delete_on_restriction is a flag, which is saved into the models in the cache + as the _no_delete_on_restriction key. If this is true, there should neither be an + entry for one specific model in the changed *nor the deleted* part of the + autoupdate, if the model was restricted. + """ + + information: List[str] + user_id: Optional[int] + disable_history: bool + no_delete_on_restriction: bool + full_data: Optional[Dict[str, Any]] + + +class AutoupdateBundle: + """ + Collects changed elements via inform*_data. After the collecting-step is finished, + the bundle releases all changes to the history and element cache via `.done()`. + """ + + def __init__(self) -> None: + self.autoupdate_elements: Dict[str, Dict[int, AutoupdateElement]] = defaultdict( + dict + ) + self._disable_history = False + + def add(self, elements: Iterable[AutoupdateElement]) -> None: + """ Adds the elements to the bundle """ + for element in elements: + self.autoupdate_elements[element["collection_string"]][ + element["id"] + ] = element + + def disable_history(self) -> None: + self._disable_history = True + + def done(self) -> Optional[int]: + """ + Finishes the bundle by resolving all missing data and passing it to + the history and element cache. + + Returns the change id, if there are autoupdate elements. Otherwise none. + """ + if not self.autoupdate_elements: + return None + + for collection, elements in self.autoupdate_elements.items(): + # Get all ids, that do not have a full_data key + # (element["full_data"]=None will not be resolved again!) + ids = [ + element["id"] + for element in elements.values() + if "full_data" not in element + ] + if ids: + # Get all missing models. If e.g. an id could not be found it + # means, it was deleted. Since there is not full_data entry + # for the element, the data will be interpreted as None, which + # is correct for deleted elements. + model_class = get_model_from_collection_string(collection) + for full_data in model_class.get_elements(ids): + elements[full_data["id"]]["full_data"] = full_data + + # Save histroy here using sync code. + if not self._disable_history: + save_history(self.element_iterator) + + # Update cache and send autoupdate using async code. + change_id = async_to_sync(self.dispatch_autoupdate)() + + return change_id + + @property + def element_iterator(self) -> Iterable[AutoupdateElement]: + """ Iterator for all elements in this bundle """ + for elements in self.autoupdate_elements.values(): + yield from elements.values() + + async def get_data_for_cache(self) -> Dict[str, Optional[Dict[str, Any]]]: + """ + Async helper function to update the cache. + + Returns the change_id + """ + cache_elements: Dict[str, Optional[Dict[str, Any]]] = {} + for element in self.element_iterator: + element_id = get_element_id(element["collection_string"], element["id"]) + full_data = element.get("full_data") + if full_data: + full_data["_no_delete_on_restriction"] = element.get( + "no_delete_on_restriction", False + ) + cache_elements[element_id] = full_data + return cache_elements + + async def dispatch_autoupdate(self) -> int: + """ + Async helper function to update cache and send autoupdate. + + Return the change_id + """ + # Update cache + cache_elements = await self.get_data_for_cache() + change_id = await element_cache.change_elements(cache_elements) + + # Send autoupdate + autoupdate_payload = {"elements": cache_elements, "change_id": change_id} + await stream.send("autoupdate", autoupdate_payload) + + return change_id + + +@contextmanager +def autoupdate_bundle() -> Iterator[AutoupdateBundle]: + bundle = _autoupdate_bundle.get(threading.get_ident()) + autodone = False + if bundle is None: + bundle = AutoupdateBundle() + autodone = True + + yield bundle + + if autodone: + bundle.done() + + +""" +Global container for autoupdate bundles +""" +_autoupdate_bundle: Dict[int, AutoupdateBundle] = {} + + +class AutoupdateBundleMiddleware: + """ + Middleware to handle autoupdate bundling. + """ + + def __init__(self, get_response: Any) -> None: + self.get_response = get_response + # One-time configuration and initialization. + + def __call__(self, request: Any) -> Any: + thread_id = threading.get_ident() + _autoupdate_bundle[thread_id] = AutoupdateBundle() + + timing = Timing("request") + + response = self.get_response(request) + + timing() + + status_ok = response.status_code >= 200 and response.status_code < 300 + status_redirect = response.status_code >= 300 and response.status_code < 400 + + # rewrite the response by adding the autoupdate on any success-case (2xx status) + bundle: AutoupdateBundle = _autoupdate_bundle.pop(thread_id) + if status_ok or status_redirect: + change_id = bundle.done() + + # inject the change id, if there was an autoupdate and the response status is + # ok (and not redirect; redirects do not have a useful content) + if change_id is not None and status_ok: + # Inject the autoupdate in the response. + # The complete response body will be overwritten! + content = {"change_id": change_id, "data": response.data} + # Note: autoupdate may be none on skipped ones (which should not happen + # since the user has made the request....) + response.content = json.dumps(content) + + timing(True) + return response + + +def save_history(elements: Iterable[AutoupdateElement]) -> Iterable: + """ + Thin wrapper around the call of history saving manager method. + + This is separated to patch it during tests. + """ + from ..core.models import History + + return History.objects.add_elements(elements) diff --git a/server/openslides/utils/models.py b/server/openslides/utils/models.py index 4ecc1ab12..febee7a03 100644 --- a/server/openslides/utils/models.py +++ b/server/openslides/utils/models.py @@ -167,7 +167,8 @@ class RESTModelMixin: # For logging the progress last_time = time.time() - instances_length = len(instances) + instances_length = len(instances) # this evaluates the query + for i, instance in enumerate(instances): # Append full data from this instance full_data.append(instance.get_full_data()) @@ -177,6 +178,7 @@ class RESTModelMixin: if current_time > last_time + 5: last_time = current_time logger.info(f" {i+1}/{instances_length}...") + return full_data @classmethod diff --git a/server/openslides/utils/postgres.py b/server/openslides/utils/postgres.py index c52010ed1..61c95800e 100644 --- a/server/openslides/utils/postgres.py +++ b/server/openslides/utils/postgres.py @@ -1,17 +1,21 @@ from django.db import connection +def is_postgres() -> bool: + return connection.vendor == "postgresql" + + def restart_id_sequence(table_name: str) -> None: """ This function resets the id sequence from the given table (the current auto increment value for the id field) to the max_id+1. This is needed, when manually inserting object id, because Postgresql does not update the id sequence in this case. """ - if connection.vendor == "postgresql": - with connection.cursor() as cursor: - cursor.execute(f"SELECT max(id) + 1 as max FROM {table_name};") - max_id = cursor.fetchone()[0] - if max_id is not None: - cursor.execute( - f"ALTER SEQUENCE {table_name}_id_seq RESTART WITH {max_id};" - ) + if not is_postgres(): + return + + with connection.cursor() as cursor: + cursor.execute(f"SELECT max(id) + 1 as max FROM {table_name};") + max_id = cursor.fetchone()[0] + if max_id is not None: + cursor.execute(f"ALTER SEQUENCE {table_name}_id_seq RESTART WITH {max_id};") diff --git a/server/openslides/utils/redis_connection_pool.py b/server/openslides/utils/redis_connection_pool.py index 1f71b1e34..48042907e 100644 --- a/server/openslides/utils/redis_connection_pool.py +++ b/server/openslides/utils/redis_connection_pool.py @@ -11,7 +11,7 @@ from . import logging logger = logging.getLogger(__name__) -connection_pool_limit = getattr(settings, "CONNECTION_POOL_LIMIT", 100) +connection_pool_limit = getattr(settings, "CONNECTION_POOL_LIMIT", 10) logger.info(f"CONNECTION_POOL_LIMIT={connection_pool_limit}")