diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 1a61d179d905..c537a5a60f9f 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -32,12 +32,15 @@ jobs: with: python-version: "3.x" poetry-version: "1.2.0b1" + extras: "all" # Dump installed versions for debugging. - run: poetry run pip list > before.txt # Upgrade all runtime dependencies only. This is intended to mimic a fresh # `pip install matrix-synapse[all]` as closely as possible. - run: poetry update --no-dev - run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true) + - name: Remove warn_unused_ignores from mypy config + run: sed '/warn_unused_ignores = True/d' -i mypy.ini - run: poetry run mypy trial: runs-on: ubuntu-latest diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 8fc1affb7746..5f0671f3503a 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -24,6 +24,8 @@ jobs: poetry remove twisted poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk poetry install --no-interaction --extras "all test" + - name: Remove warn_unused_ignores from mypy config + run: sed '/warn_unused_ignores = True/d' -i mypy.ini - run: poetry run mypy trial: diff --git a/CHANGES.md b/CHANGES.md index 1fbe0815dedf..31f156127424 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,11 @@ +Synapse 1.58.0 (2022-05-03) +=========================== + +As of this release, the groups/communities feature in Synapse is now disabled by default. See [\#11584](https://github.com/matrix-org/synapse/issues/11584) for details. As mentioned in [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1580), this feature will be removed in Synapse 1.61. + +No significant changes since 1.58.0rc2. + + Synapse 1.58.0rc2 (2022-04-26) ============================== @@ -19,8 +27,6 @@ Internal Changes Synapse 1.58.0rc1 (2022-04-26) ============================== -As of this release, the groups/communities feature in Synapse is now disabled by default. See [\#11584](https://github.com/matrix-org/synapse/issues/11584) for details. As mentioned in [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1580), this feature will be removed in Synapse 1.61. - Features -------- diff --git a/changelog.d/12556.misc b/changelog.d/12556.misc new file mode 100644 index 000000000000..dc245397fbcd --- /dev/null +++ b/changelog.d/12556.misc @@ -0,0 +1 @@ +Release script: confirm the commit to be tagged before tagging. diff --git a/changelog.d/12570.bugfix b/changelog.d/12570.bugfix new file mode 100644 index 000000000000..1038646f358d --- /dev/null +++ b/changelog.d/12570.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in Synapse 1.57 which could cause `Failed to calculate hosts in room` errors to be logged for outbound federation. diff --git a/changelog.d/12576.misc b/changelog.d/12576.misc new file mode 100644 index 000000000000..71022c86337f --- /dev/null +++ b/changelog.d/12576.misc @@ -0,0 +1 @@ +Allow unused `#type: ignore` comments in bleeding edge CI jobs. diff --git a/changelog.d/12589.misc b/changelog.d/12589.misc new file mode 100644 index 000000000000..d362828d2e59 --- /dev/null +++ b/changelog.d/12589.misc @@ -0,0 +1 @@ +Remove special-case for `twisted` logger from default log config. diff --git a/changelog.d/12594.bugfix b/changelog.d/12594.bugfix new file mode 100644 index 000000000000..7411d9c07934 --- /dev/null +++ b/changelog.d/12594.bugfix @@ -0,0 +1 @@ +Fix race when persisting an event and deleting a room that could lead to outbound federation breaking. diff --git a/changelog.d/12596.removal b/changelog.d/12596.removal new file mode 100644 index 000000000000..14fbfb39540a --- /dev/null +++ b/changelog.d/12596.removal @@ -0,0 +1 @@ +Remove unstable identifiers from [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069). diff --git a/changelog.d/12608.misc b/changelog.d/12608.misc new file mode 100644 index 000000000000..38272118fbe8 --- /dev/null +++ b/changelog.d/12608.misc @@ -0,0 +1 @@ +Remove redundant lines of config from `mypy.ini`. \ No newline at end of file diff --git a/changelog.d/12612.bugfix b/changelog.d/12612.bugfix new file mode 100644 index 000000000000..c39e97f0cbaa --- /dev/null +++ b/changelog.d/12612.bugfix @@ -0,0 +1 @@ +Fix a typo in the announcement text generated by the Synapse release development script. \ No newline at end of file diff --git a/changelog.d/12613.removal b/changelog.d/12613.removal new file mode 100644 index 000000000000..b1a9e207b00f --- /dev/null +++ b/changelog.d/12613.removal @@ -0,0 +1 @@ +Synapse now requires at least Python 3.7.1 (up from 3.7.0), for compatibility with the latest Twisted trunk. diff --git a/changelog.d/12614.misc b/changelog.d/12614.misc new file mode 100644 index 000000000000..79022df127d0 --- /dev/null +++ b/changelog.d/12614.misc @@ -0,0 +1 @@ +Add extra debug logging to federation sender. diff --git a/changelog.d/12620.misc b/changelog.d/12620.misc new file mode 100644 index 000000000000..63f8e540c37c --- /dev/null +++ b/changelog.d/12620.misc @@ -0,0 +1 @@ +Add a consistency check on events which we read from the database. diff --git a/debian/changelog b/debian/changelog index 5f1bf872bbcc..53b2387776f5 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.58.0) stable; urgency=medium + + * New Synapse release 1.58.0. + + -- Synapse Packaging team Tue, 03 May 2022 10:52:58 +0100 + matrix-synapse-py3 (1.58.0~rc2) stable; urgency=medium * New Synapse release 1.58.0rc2. diff --git a/docs/sample_log_config.yaml b/docs/sample_log_config.yaml index 2485ad25edfc..3065a0e2d986 100644 --- a/docs/sample_log_config.yaml +++ b/docs/sample_log_config.yaml @@ -62,13 +62,6 @@ loggers: # information such as access tokens. level: INFO - twisted: - # We send the twisted logging directly to the file handler, - # to work around https://github.com/matrix-org/synapse/issues/3471 - # when using "buffer" logger. Use "console" to log to stderr instead. - handlers: [file] - propagate: false - root: level: INFO diff --git a/mypy.ini b/mypy.ini index ef28216418a5..78699e318704 100644 --- a/mypy.ini +++ b/mypy.ini @@ -241,98 +241,49 @@ disallow_untyped_defs = True [mypy-authlib.*] ignore_missing_imports = True -[mypy-bcrypt] -ignore_missing_imports = True - [mypy-canonicaljson] ignore_missing_imports = True [mypy-constantly] ignore_missing_imports = True -[mypy-daemonize] -ignore_missing_imports = True - -[mypy-h11] -ignore_missing_imports = True - -[mypy-hiredis] -ignore_missing_imports = True - -[mypy-hyperlink] -ignore_missing_imports = True - [mypy-ijson.*] ignore_missing_imports = True -[mypy-importlib_metadata.*] -ignore_missing_imports = True - -[mypy-jaeger_client.*] -ignore_missing_imports = True - -[mypy-josepy.*] -ignore_missing_imports = True - -[mypy-jwt.*] -ignore_missing_imports = True - [mypy-lxml] ignore_missing_imports = True [mypy-msgpack] ignore_missing_imports = True -[mypy-nacl.*] -ignore_missing_imports = True - +# Note: WIP stubs available at +# https://github.com/microsoft/python-type-stubs/tree/64934207f523ad6b611e6cfe039d85d7175d7d0d/netaddr [mypy-netaddr] ignore_missing_imports = True [mypy-parameterized.*] ignore_missing_imports = True -[mypy-phonenumbers.*] -ignore_missing_imports = True - -[mypy-prometheus_client.*] -ignore_missing_imports = True - [mypy-pymacaroons.*] ignore_missing_imports = True [mypy-pympler.*] ignore_missing_imports = True -[mypy-redbaron.*] -ignore_missing_imports = True - [mypy-rust_python_jaeger_reporter.*] ignore_missing_imports = True [mypy-saml2.*] ignore_missing_imports = True -[mypy-sentry_sdk] -ignore_missing_imports = True - [mypy-service_identity.*] ignore_missing_imports = True -[mypy-signedjson.*] -ignore_missing_imports = True - [mypy-srvlookup.*] ignore_missing_imports = True [mypy-treq.*] ignore_missing_imports = True -[mypy-twisted.*] -ignore_missing_imports = True - -[mypy-zope] -ignore_missing_imports = True - [mypy-incremental.*] ignore_missing_imports = True diff --git a/poetry.lock b/poetry.lock index e27a44989cd4..89e78576b7b6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1561,8 +1561,8 @@ url_preview = ["lxml"] [metadata] lock-version = "1.1" -python-versions = "^3.7" -content-hash = "3825cef058b8c9f520ef4b7acb92519be95db9a663a61c2e89a5fe431ed55655" +python-versions = "^3.7.1" +content-hash = "2bda1a7cfc8cc02832b4a7d16bf7e1615cb05e0639bdb30688aadf692d851942" [metadata.files] attrs = [ diff --git a/pyproject.toml b/pyproject.toml index e6f2dc16cd16..cf3982d12467 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,7 @@ skip_gitignore = true [tool.poetry] name = "matrix-synapse" -version = "1.58.0rc2" +version = "1.58.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" @@ -100,7 +100,7 @@ synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main" update_synapse_database = "synapse._scripts.update_synapse_database:main" [tool.poetry.dependencies] -python = "^3.7" +python = "^3.7.1" # Mandatory Dependencies # ---------------------- diff --git a/scripts-dev/release.py b/scripts-dev/release.py index 14f3f3a45d8a..0031ba3e4b2f 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -89,13 +89,7 @@ def prepare() -> None: """ # Make sure we're in a git repo. - try: - repo = git.Repo() - except git.InvalidGitRepositoryError: - raise click.ClickException("Not in Synapse repo.") - - if repo.is_dirty(): - raise click.ClickException("Uncommitted changes exist.") + repo = get_repo_and_check_clean_checkout() click.secho("Updating git repo...") repo.remote().fetch() @@ -171,9 +165,7 @@ def prepare() -> None: assert not parsed_new_version.is_devrelease assert not parsed_new_version.is_postrelease - release_branch_name = ( - f"release-v{parsed_new_version.major}.{parsed_new_version.minor}" - ) + release_branch_name = get_release_branch_name(parsed_new_version) release_branch = find_ref(repo, release_branch_name) if release_branch: if release_branch.is_remote(): @@ -274,13 +266,7 @@ def tag(gh_token: Optional[str]) -> None: """Tags the release and generates a draft GitHub release""" # Make sure we're in a git repo. - try: - repo = git.Repo() - except git.InvalidGitRepositoryError: - raise click.ClickException("Not in Synapse repo.") - - if repo.is_dirty(): - raise click.ClickException("Uncommitted changes exist.") + repo = get_repo_and_check_clean_checkout() click.secho("Updating git repo...") repo.remote().fetch() @@ -293,6 +279,15 @@ def tag(gh_token: Optional[str]) -> None: if tag_name in repo.tags: raise click.ClickException(f"Tag {tag_name} already exists!\n") + # Check we're on the right release branch + release_branch = get_release_branch_name(current_version) + if repo.active_branch.name != release_branch: + click.echo( + f"Need to be on the release branch ({release_branch}) before tagging. " + f"Currently on ({repo.active_branch.name})." + ) + click.get_current_context().abort() + # Get the appropriate changelogs and tag. changes = get_changes_for_version(current_version) @@ -358,21 +353,15 @@ def tag(gh_token: Optional[str]) -> None: @cli.command() @click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True) def publish(gh_token: str) -> None: - """Publish release.""" + """Publish release on GitHub.""" # Make sure we're in a git repo. - try: - repo = git.Repo() - except git.InvalidGitRepositoryError: - raise click.ClickException("Not in Synapse repo.") - - if repo.is_dirty(): - raise click.ClickException("Uncommitted changes exist.") + get_repo_and_check_clean_checkout() current_version = get_package_version() tag_name = f"v{current_version}" - if not click.confirm(f"Publish {tag_name}?", default=True): + if not click.confirm(f"Publish release {tag_name} on GitHub?", default=True): return # Publish the draft release @@ -406,6 +395,13 @@ def upload() -> None: current_version = get_package_version() tag_name = f"v{current_version}" + # Check we have the right tag checked out. + repo = get_repo_and_check_clean_checkout() + tag = repo.tag(f"refs/tags/{tag_name}") + if repo.head.commit != tag.commit: + click.echo("Tag {tag_name} (tag.commit) is not currently checked out!") + click.get_current_context().abort() + pypi_asset_names = [ f"matrix_synapse-{current_version}-py3-none-any.whl", f"matrix-synapse-{current_version}.tar.gz", @@ -438,7 +434,7 @@ def announce() -> None: f""" Hi everyone. Synapse {current_version} has just been released. -[notes](https://github.com/matrix-org/synapse/releases/tag/{tag_name}) |\ +[notes](https://github.com/matrix-org/synapse/releases/tag/{tag_name}) | \ [docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \ [debs](https://packages.matrix.org/debian/) | \ [pypi](https://pypi.org/project/matrix-synapse/{current_version}/)""" @@ -469,6 +465,21 @@ def get_package_version() -> version.Version: return version.Version(version_string) +def get_release_branch_name(version_number: version.Version) -> str: + return f"release-v{version_number.major}.{version_number.minor}" + + +def get_repo_and_check_clean_checkout() -> git.Repo: + """Get the project repo and check it's not got any uncommitted changes.""" + try: + repo = git.Repo() + except git.InvalidGitRepositoryError: + raise click.ClickException("Not in Synapse repo.") + if repo.is_dirty(): + raise click.ClickException("Uncommitted changes exist.") + return repo + + def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]: """Find the branch/ref, looking first locally then in the remote.""" if ref_name in repo.references: diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 99db9e1e3910..470b8b44929c 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -110,13 +110,6 @@ # information such as access tokens. level: INFO - twisted: - # We send the twisted logging directly to the file handler, - # to work around https://github.com/matrix-org/synapse/issues/3471 - # when using "buffer" logger. Use "console" to log to stderr instead. - handlers: [file] - propagate: false - root: level: INFO diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 9acb3c0cc454..c238376caf62 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -213,10 +213,17 @@ def is_outlier(self) -> bool: return self.outlier def is_out_of_band_membership(self) -> bool: - """Whether this is an out of band membership, like an invite or an invite - rejection. This is needed as those events are marked as outliers, but - they still need to be processed as if they're new events (e.g. updating - invite state in the database, relaying to clients, etc). + """Whether this event is an out-of-band membership. + + OOB memberships are a special case of outlier events: they are membership events + for federated rooms that we aren't full members of. Examples include invites + received over federation, and rejections for such invites. + + The concept of an OOB membership is needed because these events need to be + processed as if they're new regular events (e.g. updating membership state in + the database, relaying to clients via /sync, etc) despite being outliers. + + See also https://matrix-org.github.io/synapse/develop/development/room-dag-concepts.html#out-of-band-membership-events. (Added in synapse 0.99.0, so may be unreliable for events received before that) """ diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 30e2421efc6d..6d2f46318bea 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -343,9 +343,16 @@ async def _process_event_queue_loop(self) -> None: last_token, self._last_poked_id, limit=100 ) - logger.debug("Handling %s -> %s", last_token, next_token) + logger.debug( + "Handling %i -> %i: %i events to send (current id %i)", + last_token, + next_token, + len(events), + self._last_poked_id, + ) if not events and next_token >= self._last_poked_id: + logger.debug("All events processed") break async def handle_event(event: EventBase) -> None: @@ -353,9 +360,53 @@ async def handle_event(event: EventBase) -> None: send_on_behalf_of = event.internal_metadata.get_send_on_behalf_of() is_mine = self.is_mine_id(event.sender) if not is_mine and send_on_behalf_of is None: + logger.debug("Not sending remote-origin event %s", event) + return + + # We also want to not send out-of-band membership events. + # + # OOB memberships are used in three (and a half) situations: + # + # (1) invite events which we have received over federation. Those + # will have a `sender` on a different server, so will be + # skipped by the "is_mine" test above anyway. + # + # (2) rejections of invites to federated rooms - either remotely + # or locally generated. (Such rejections are normally + # created via federation, in which case the remote server is + # responsible for sending out the rejection. If that fails, + # we'll create a leave event locally, but that's only really + # for the benefit of the invited user - we don't have enough + # information to send it out over federation). + # + # (2a) rescinded knocks. These are identical to rejected invites. + # + # (3) knock events which we have sent over federation. As with + # invite rejections, the remote server should send them out to + # the federation. + # + # So, in all the above cases, we want to ignore such events. + # + # OOB memberships are always(?) outliers anyway, so if we *don't* + # ignore them, we'll get an exception further down when we try to + # fetch the membership list for the room. + # + # Arguably, we could equivalently ignore all outliers here, since + # in theory the only way for an outlier with a local `sender` to + # exist is by being an OOB membership (via one of (2), (2a) or (3) + # above). + # + if event.internal_metadata.is_out_of_band_membership(): + logger.debug("Not sending OOB membership event %s", event) return + # Finally, there are some other events that we should not send out + # until someone asks for them. They are explicitly flagged as such + # with `proactively_send: False`. if not event.internal_metadata.should_proactively_send(): + logger.debug( + "Not sending event with proactively_send=false: %s", event + ) return destinations: Optional[Set[str]] = None @@ -419,7 +470,10 @@ async def handle_event(event: EventBase) -> None: "federation_sender" ).observe((now - ts) / 1000) - async def handle_room_events(events: Iterable[EventBase]) -> None: + async def handle_room_events(events: List[EventBase]) -> None: + logger.debug( + "Handling %i events in room %s", len(events), events[0].room_id + ) with Measure(self.clock, "handle_room_events"): for event in events: await handle_event(event) @@ -438,6 +492,7 @@ async def handle_room_events(events: Iterable[EventBase]) -> None: ) ) + logger.debug("Successfully handled up to %i", next_token) await self.store.update_federation_out_pos("events", next_token) if events: diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index 5587cae98a61..bdc4a9c0683d 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -882,9 +882,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: response = { "user_id": requester.user.to_string(), - # MSC: https://github.com/matrix-org/matrix-doc/pull/3069 # Entered spec in Matrix 1.2 - "org.matrix.msc3069.is_guest": bool(requester.is_guest), "is_guest": bool(requester.is_guest), } diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index 2a1e567ce08e..9a6c2fd47a55 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -47,6 +47,7 @@ ) from synapse.storage.databases.main.events_worker import EventCacheEntry from synapse.storage.databases.main.search import SearchEntry +from synapse.storage.engines.postgres import PostgresEngine from synapse.storage.util.id_generators import AbstractStreamIdGenerator from synapse.storage.util.sequence import SequenceGenerator from synapse.types import StateMap, get_domain_from_id @@ -364,6 +365,20 @@ def _persist_events_txn( min_stream_order = events_and_contexts[0][0].internal_metadata.stream_ordering max_stream_order = events_and_contexts[-1][0].internal_metadata.stream_ordering + # We check that the room still exists for events we're trying to + # persist. This is to protect against races with deleting a room. + # + # Annoyingly SQLite doesn't support row level locking. + if isinstance(self.database_engine, PostgresEngine): + for room_id in {e.room_id for e, _ in events_and_contexts}: + txn.execute( + "SELECT room_version FROM rooms WHERE room_id = ? FOR SHARE", + (room_id,), + ) + row = txn.fetchone() + if row is None: + raise Exception(f"Room does not exist {room_id}") + # stream orderings should have been assigned by now assert min_stream_order assert max_stream_order diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index c31fc00eaace..0a48e5d29f18 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -1094,6 +1094,18 @@ async def _get_events_from_db( original_ev.internal_metadata.stream_ordering = row.stream_ordering original_ev.internal_metadata.outlier = row.outlier + # Consistency check: if the content of the event has been modified in the + # database, then the calculated event ID will not match the event id in the + # database. + if original_ev.event_id != event_id: + # it's difficult to see what to do here. Pretty much all bets are off + # if Synapse cannot rely on the consistency of its database. + raise RuntimeError( + f"Database corruption: Event {event_id} in room {d['room_id']} " + f"from the database appears to have been modified (calculated " + f"event id {original_ev.event_id})" + ) + event_map[event_id] = original_ev # finally, we can decide whether each one needs redacting, and build diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py index 2e3818e43244..bfc85b3add98 100644 --- a/synapse/storage/databases/main/purge_events.py +++ b/synapse/storage/databases/main/purge_events.py @@ -324,7 +324,12 @@ async def purge_room(self, room_id: str) -> List[int]: ) def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[int]: - # First we fetch all the state groups that should be deleted, before + # We *immediately* delete the room from the rooms table. This ensures + # that we don't race when persisting events (as that transaction checks + # that the room exists). + txn.execute("DELETE FROM rooms WHERE room_id = ?", (room_id,)) + + # Next, we fetch all the state groups that should be deleted, before # we delete that information. txn.execute( """ @@ -403,7 +408,6 @@ def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[int]: "room_stats_state", "room_stats_current", "room_stats_earliest_token", - "rooms", "stream_ordering_to_exterm", "users_in_public_rooms", "users_who_share_private_rooms", diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py index e00b5c171c78..e0a11da97b67 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py @@ -520,8 +520,6 @@ def test_GET_whoami(self) -> None: { "user_id": user_id, "device_id": device_id, - # MSC3069 entered spec in Matrix 1.2 but maintained compatibility - "org.matrix.msc3069.is_guest": False, "is_guest": False, }, ) @@ -540,8 +538,6 @@ def test_GET_whoami_guests(self) -> None: { "user_id": user_id, "device_id": device_id, - # MSC3069 entered spec in Matrix 1.2 but maintained compatibility - "org.matrix.msc3069.is_guest": True, "is_guest": True, }, ) @@ -564,8 +560,6 @@ def test_GET_whoami_appservices(self) -> None: whoami, { "user_id": user_id, - # MSC3069 entered spec in Matrix 1.2 but maintained compatibility - "org.matrix.msc3069.is_guest": False, "is_guest": False, }, ) diff --git a/tests/storage/databases/main/test_events_worker.py b/tests/storage/databases/main/test_events_worker.py index bf6374f93d52..c237a8c7e228 100644 --- a/tests/storage/databases/main/test_events_worker.py +++ b/tests/storage/databases/main/test_events_worker.py @@ -13,7 +13,7 @@ # limitations under the License. import json from contextlib import contextmanager -from typing import Generator, Tuple +from typing import Generator, List, Tuple from unittest import mock from twisted.enterprise.adbapi import ConnectionPool @@ -21,6 +21,7 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.api.room_versions import EventFormatVersions, RoomVersions +from synapse.events import make_event_from_dict from synapse.logging.context import LoggingContext from synapse.rest import admin from synapse.rest.client import login, room @@ -49,23 +50,28 @@ def prepare(self, reactor, clock, hs): ) ) - for idx, (rid, eid) in enumerate( + self.event_ids: List[str] = [] + for idx, rid in enumerate( ( - ("room1", "event10"), - ("room1", "event11"), - ("room1", "event12"), - ("room2", "event20"), + "room1", + "room1", + "room1", + "room2", ) ): + event_json = {"type": f"test {idx}", "room_id": rid} + event = make_event_from_dict(event_json, room_version=RoomVersions.V4) + event_id = event.event_id + self.get_success( self.store.db_pool.simple_insert( "events", { - "event_id": eid, + "event_id": event_id, "room_id": rid, "topological_ordering": idx, "stream_ordering": idx, - "type": "test", + "type": event.type, "processed": True, "outlier": False, }, @@ -75,21 +81,22 @@ def prepare(self, reactor, clock, hs): self.store.db_pool.simple_insert( "event_json", { - "event_id": eid, + "event_id": event_id, "room_id": rid, - "json": json.dumps({"type": "test", "room_id": rid}), + "json": json.dumps(event_json), "internal_metadata": "{}", "format_version": 3, }, ) ) + self.event_ids.append(event_id) def test_simple(self): with LoggingContext(name="test") as ctx: res = self.get_success( - self.store.have_seen_events("room1", ["event10", "event19"]) + self.store.have_seen_events("room1", [self.event_ids[0], "event19"]) ) - self.assertEqual(res, {"event10"}) + self.assertEqual(res, {self.event_ids[0]}) # that should result in a single db query self.assertEqual(ctx.get_resource_usage().db_txn_count, 1) @@ -97,19 +104,21 @@ def test_simple(self): # a second lookup of the same events should cause no queries with LoggingContext(name="test") as ctx: res = self.get_success( - self.store.have_seen_events("room1", ["event10", "event19"]) + self.store.have_seen_events("room1", [self.event_ids[0], "event19"]) ) - self.assertEqual(res, {"event10"}) + self.assertEqual(res, {self.event_ids[0]}) self.assertEqual(ctx.get_resource_usage().db_txn_count, 0) def test_query_via_event_cache(self): # fetch an event into the event cache - self.get_success(self.store.get_event("event10")) + self.get_success(self.store.get_event(self.event_ids[0])) # looking it up should now cause no db hits with LoggingContext(name="test") as ctx: - res = self.get_success(self.store.have_seen_events("room1", ["event10"])) - self.assertEqual(res, {"event10"}) + res = self.get_success( + self.store.have_seen_events("room1", [self.event_ids[0]]) + ) + self.assertEqual(res, {self.event_ids[0]}) self.assertEqual(ctx.get_resource_usage().db_txn_count, 0) @@ -167,7 +176,6 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer): self.store: EventsWorkerStore = hs.get_datastores().main self.room_id = f"!room:{hs.hostname}" - self.event_ids = [f"event{i}" for i in range(20)] self._populate_events() @@ -190,8 +198,14 @@ def _populate_events(self) -> None: ) ) - self.event_ids = [f"event{i}" for i in range(20)] - for idx, event_id in enumerate(self.event_ids): + self.event_ids: List[str] = [] + for idx in range(20): + event_json = { + "type": f"test {idx}", + "room_id": self.room_id, + } + event = make_event_from_dict(event_json, room_version=RoomVersions.V4) + event_id = event.event_id self.get_success( self.store.db_pool.simple_upsert( "events", @@ -201,7 +215,7 @@ def _populate_events(self) -> None: "room_id": self.room_id, "topological_ordering": idx, "stream_ordering": idx, - "type": "test", + "type": event.type, "processed": True, "outlier": False, }, @@ -213,12 +227,13 @@ def _populate_events(self) -> None: {"event_id": event_id}, { "room_id": self.room_id, - "json": json.dumps({"type": "test", "room_id": self.room_id}), + "json": json.dumps(event_json), "internal_metadata": "{}", "format_version": EventFormatVersions.V3, }, ) ) + self.event_ids.append(event_id) @contextmanager def _outage(self) -> Generator[None, None, None]: