Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Fix (final) Bugbear violations #9838

Merged
merged 2 commits into from
Apr 20, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog.d/9838.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Introduce flake8-bugbear to the test suite and fix some of its lint violations.
2 changes: 1 addition & 1 deletion scripts-dev/definitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def used_names(prefix, item, defs, names):

definitions = {}
for directory in args.directories:
for root, dirs, files in os.walk(directory):
for root, _, files in os.walk(directory):
for filename in files:
if filename.endswith(".py"):
filepath = os.path.join(root, filename)
Expand Down
2 changes: 1 addition & 1 deletion scripts-dev/list_url_patterns.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def find_patterns_in_file(filepath):


for directory in args.directories:
for root, dirs, files in os.walk(directory):
for root, _, files in os.walk(directory):
for filename in files:
if filename.endswith(".py"):
filepath = os.path.join(root, filename)
Expand Down
3 changes: 1 addition & 2 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@ ignore =
# E203: whitespace before ':' (which is contrary to pep8?)
# E731: do not assign a lambda expression, use a def
# E501: Line too long (black enforces this for us)
# B007: Subsection of the bugbear suite (TODO: add in remaining fixes)
ignore=W503,W504,E203,E731,E501,B007
ignore=W503,W504,E203,E731,E501

[isort]
line_length = 88
Expand Down
2 changes: 1 addition & 1 deletion synapse/event_auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -670,7 +670,7 @@ def _verify_third_party_invite(event: EventBase, auth_events: StateMap[EventBase
public_key = public_key_object["public_key"]
try:
for server, signature_block in signed["signatures"].items():
for key_name, encoded_signature in signature_block.items():
for key_name in signature_block.keys():
if not key_name.startswith("ed25519:"):
continue
verify_key = decode_verify_key_bytes(
Expand Down
4 changes: 2 additions & 2 deletions synapse/federation/send_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,10 +567,10 @@ def process_rows_for_federation(
states=[state], destinations=destinations
)

for destination, edu_map in buff.keyed_edus.items():
for edu_map in buff.keyed_edus.values():
for key, edu in edu_map.items():
transaction_queue.send_edu(edu, key)

for destination, edu_list in buff.edus.items():
for edu_list in buff.edus.values():
for edu in edu_list:
transaction_queue.send_edu(edu, None)
2 changes: 1 addition & 1 deletion synapse/handlers/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -1248,7 +1248,7 @@ async def delete_access_tokens_for_user(

# see if any of our auth providers want to know about this
for provider in self.password_providers:
for token, token_id, device_id in tokens_and_devices:
for token, _, device_id in tokens_and_devices:
await provider.on_logged_out(
user_id=user_id, device_id=device_id, access_token=token
)
Expand Down
13 changes: 5 additions & 8 deletions synapse/handlers/device.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,7 @@ async def get_user_ids_changed(
# The user may have left the room
# TODO: Check if they actually did or if we were just invited.
if room_id not in room_ids:
for key, event_id in current_state_ids.items():
etype, state_key = key
for etype, state_key in current_state_ids.keys():
if etype != EventTypes.Member:
continue
possibly_left.add(state_key)
Expand All @@ -179,8 +178,7 @@ async def get_user_ids_changed(
log_kv(
{"event": "encountered empty previous state", "room_id": room_id}
)
for key, event_id in current_state_ids.items():
etype, state_key = key
for etype, state_key in current_state_ids.keys():
if etype != EventTypes.Member:
continue
possibly_changed.add(state_key)
Expand All @@ -198,8 +196,7 @@ async def get_user_ids_changed(
for state_dict in prev_state_ids.values():
member_event = state_dict.get((EventTypes.Member, user_id), None)
if not member_event or member_event != current_member_id:
for key, event_id in current_state_ids.items():
etype, state_key = key
for etype, state_key in current_state_ids.keys():
if etype != EventTypes.Member:
continue
possibly_changed.add(state_key)
Expand Down Expand Up @@ -714,7 +711,7 @@ async def _handle_device_updates(self, user_id: str) -> None:
# This can happen since we batch updates
return

for device_id, stream_id, prev_ids, content in pending_updates:
for device_id, stream_id, prev_ids, _ in pending_updates:
logger.debug(
"Handling update %r/%r, ID: %r, prev: %r ",
user_id,
Expand All @@ -740,7 +737,7 @@ async def _handle_device_updates(self, user_id: str) -> None:
else:
# Simply update the single device, since we know that is the only
# change (because of the single prev_id matching the current cache)
for device_id, stream_id, prev_ids, content in pending_updates:
for device_id, stream_id, _, content in pending_updates:
await self.store.update_remote_device_list_cache_entry(
user_id, device_id, content, stream_id
)
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/federation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2956,7 +2956,7 @@ async def _check_signature(self, event: EventBase, context: EventContext) -> Non
try:
# for each sig on the third_party_invite block of the actual invite
for server, signature_block in signed["signatures"].items():
for key_name, encoded_signature in signature_block.items():
for key_name in signature_block.keys():
if not key_name.startswith("ed25519:"):
continue

Expand Down
4 changes: 2 additions & 2 deletions synapse/logging/_remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,11 +226,11 @@ def _handle_pressure(self) -> None:
old_buffer = self._buffer
self._buffer = deque()

for i in range(buffer_split):
for _ in range(buffer_split):
self._buffer.append(old_buffer.popleft())

end_buffer = []
for i in range(buffer_split):
for _ in range(buffer_split):
end_buffer.append(old_buffer.pop())

self._buffer.extend(reversed(end_buffer))
Expand Down
4 changes: 2 additions & 2 deletions synapse/rest/key/v2/remote_key_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ async def query_keys(self, request, query, query_remote_on_cache_miss=False):

# Note that the value is unused.
cache_misses = {} # type: Dict[str, Dict[str, int]]
for (server_name, key_id, from_server), results in cached.items():
for (server_name, key_id, _), results in cached.items():
results = [(result["ts_added_ms"], result) for result in results]

if not results and key_id is not None:
Expand Down Expand Up @@ -206,7 +206,7 @@ async def query_keys(self, request, query, query_remote_on_cache_miss=False):
# Cast to bytes since postgresql returns a memoryview.
json_results.add(bytes(most_recent_result["key_json"]))
else:
for ts_added, result in results:
for _, result in results:
# Cast to bytes since postgresql returns a memoryview.
json_results.add(bytes(result["key_json"]))

Expand Down
10 changes: 5 additions & 5 deletions synapse/storage/databases/main/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ async def _persist_events_and_state_updates(
)

async with stream_ordering_manager as stream_orderings:
for (event, context), stream in zip(events_and_contexts, stream_orderings):
for (event, _), stream in zip(events_and_contexts, stream_orderings):
event.internal_metadata.stream_ordering = stream

await self.db_pool.runInteraction(
Expand Down Expand Up @@ -297,7 +297,7 @@ def _get_prevs_before_rejected_txn(txn, batch):
txn.execute(sql + clause, args)
to_recursively_check = []

for event_id, prev_event_id, metadata, rejected in txn:
for _, prev_event_id, metadata, rejected in txn:
if prev_event_id in existing_prevs:
continue

Expand Down Expand Up @@ -1127,7 +1127,7 @@ def _upsert_room_version_txn(self, txn: LoggingTransaction, room_id: str):
def _update_forward_extremities_txn(
self, txn, new_forward_extremities, max_stream_order
):
for room_id, new_extrem in new_forward_extremities.items():
for room_id in new_forward_extremities.keys():
self.db_pool.simple_delete_txn(
txn, table="event_forward_extremities", keyvalues={"room_id": room_id}
)
Expand Down Expand Up @@ -1399,7 +1399,7 @@ def get_internal_metadata(event):
]

state_values = []
for event, context in state_events_and_contexts:
for event, _ in state_events_and_contexts:
vals = {
"event_id": event.event_id,
"room_id": event.room_id,
Expand Down Expand Up @@ -1468,7 +1468,7 @@ def _update_metadata_tables_txn(
# nothing to do here
return

for event, context in events_and_contexts:
for event, _ in events_and_contexts:
if event.type == EventTypes.Redaction and event.redacts is not None:
# Remove the entries in the event_push_actions table for the
# redacted event.
Expand Down
2 changes: 1 addition & 1 deletion tests/handlers/test_federation.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def create_invite():
room_version,
)

for i in range(3):
for _ in range(3):
event = create_invite()
self.get_success(
self.handler.on_invite_request(
Expand Down
4 changes: 2 additions & 2 deletions tests/replication/tcp/streams/test_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ def test_update_function_huge_state_change(self):

# the state rows are unsorted
state_rows = [] # type: List[EventsStreamCurrentStateRow]
for stream_name, token, row in received_rows:
for stream_name, _, row in received_rows:
self.assertEqual("events", stream_name)
self.assertIsInstance(row, EventsStreamRow)
self.assertEqual(row.type, "state")
Expand Down Expand Up @@ -356,7 +356,7 @@ def test_update_function_state_row_limit(self):

# the state rows are unsorted
state_rows = [] # type: List[EventsStreamCurrentStateRow]
for j in range(STATES_PER_USER + 1):
for _ in range(STATES_PER_USER + 1):
stream_name, token, row = received_rows.pop(0)
self.assertEqual("events", stream_name)
self.assertIsInstance(row, EventsStreamRow)
Expand Down
4 changes: 2 additions & 2 deletions tests/rest/admin/test_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ def test_get_devices(self):
"""
# Create devices
number_devices = 5
for n in range(number_devices):
for _ in range(number_devices):
self.login("user", "pass")

# Get devices
Expand Down Expand Up @@ -547,7 +547,7 @@ def test_delete_devices(self):

# Create devices
number_devices = 5
for n in range(number_devices):
for _ in range(number_devices):
self.login("user", "pass")

# Get devices
Expand Down
8 changes: 4 additions & 4 deletions tests/rest/admin/test_event_reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,22 +48,22 @@ def prepare(self, reactor, clock, hs):
self.helper.join(self.room_id2, user=self.admin_user, tok=self.admin_user_tok)

# Two rooms and two users. Every user sends and reports every room event
for i in range(5):
for _ in range(5):
self._create_event_and_report(
room_id=self.room_id1,
user_tok=self.other_user_tok,
)
for i in range(5):
for _ in range(5):
self._create_event_and_report(
room_id=self.room_id2,
user_tok=self.other_user_tok,
)
for i in range(5):
for _ in range(5):
self._create_event_and_report(
room_id=self.room_id1,
user_tok=self.admin_user_tok,
)
for i in range(5):
for _ in range(5):
self._create_event_and_report(
room_id=self.room_id2,
user_tok=self.admin_user_tok,
Expand Down
8 changes: 4 additions & 4 deletions tests/rest/admin/test_room.py
Original file line number Diff line number Diff line change
Expand Up @@ -615,7 +615,7 @@ def test_list_rooms(self):
# Create 3 test rooms
total_rooms = 3
room_ids = []
for x in range(total_rooms):
for _ in range(total_rooms):
room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
Expand Down Expand Up @@ -679,7 +679,7 @@ def test_list_rooms_pagination(self):
# Create 5 test rooms
total_rooms = 5
room_ids = []
for x in range(total_rooms):
for _ in range(total_rooms):
room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
Expand Down Expand Up @@ -1577,15 +1577,15 @@ def test_context_as_admin(self):
channel.json_body["event"]["event_id"], events[midway]["event_id"]
)

for i, found_event in enumerate(channel.json_body["events_before"]):
for found_event in channel.json_body["events_before"]:
for j, posted_event in enumerate(events):
if found_event["event_id"] == posted_event["event_id"]:
self.assertTrue(j < midway)
break
else:
self.fail("Event %s from events_before not found" % j)

for i, found_event in enumerate(channel.json_body["events_after"]):
for found_event in channel.json_body["events_after"]:
for j, posted_event in enumerate(events):
if found_event["event_id"] == posted_event["event_id"]:
self.assertTrue(j > midway)
Expand Down
2 changes: 1 addition & 1 deletion tests/rest/admin/test_statistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,7 @@ def _create_media(self, user_token: str, number_media: int):
number_media: Number of media to be created for the user
"""
upload_resource = self.media_repo.children[b"upload"]
for i in range(number_media):
for _ in range(number_media):
# file size is 67 Byte
image_data = unhexlify(
b"89504e470d0a1a0a0000000d4948445200000001000000010806"
Expand Down
4 changes: 2 additions & 2 deletions tests/rest/admin/test_user.py
Original file line number Diff line number Diff line change
Expand Up @@ -1937,7 +1937,7 @@ def test_get_rooms(self):
# Create rooms and join
other_user_tok = self.login("user", "pass")
number_rooms = 5
for n in range(number_rooms):
for _ in range(number_rooms):
self.helper.create_room_as(self.other_user, tok=other_user_tok)

# Get rooms
Expand Down Expand Up @@ -2517,7 +2517,7 @@ def _create_media_for_user(self, user_token: str, number_media: int):
user_token: Access token of the user
number_media: Number of media to be created for the user
"""
for i in range(number_media):
for _ in range(number_media):
# file size is 67 Byte
image_data = unhexlify(
b"89504e470d0a1a0a0000000d4948445200000001000000010806"
Expand Down
6 changes: 3 additions & 3 deletions tests/rest/client/v1/test_rooms.py
Original file line number Diff line number Diff line change
Expand Up @@ -646,7 +646,7 @@ def test_invites_by_rooms_ratelimit(self):
def test_invites_by_users_ratelimit(self):
"""Tests that invites to a specific user are actually rate-limited."""

for i in range(3):
for _ in range(3):
room_id = self.helper.create_room_as(self.user_id)
self.helper.invite(room_id, self.user_id, "@other-users:red")

Expand All @@ -668,7 +668,7 @@ class RoomJoinRatelimitTestCase(RoomBase):
)
def test_join_local_ratelimit(self):
"""Tests that local joins are actually rate-limited."""
for i in range(3):
for _ in range(3):
self.helper.create_room_as(self.user_id)

self.helper.create_room_as(self.user_id, expect_code=429)
Expand Down Expand Up @@ -733,7 +733,7 @@ def test_join_local_ratelimit_idempotent(self):
for path in paths_to_test:
# Make sure we send more requests than the rate-limiting config would allow
# if all of these requests ended up joining the user to a room.
for i in range(4):
for _ in range(4):
channel = self.make_request("POST", path % room_id, {})
self.assertEquals(channel.code, 200)

Expand Down
4 changes: 2 additions & 2 deletions tests/storage/test_event_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,12 @@ def test_exposed_to_prometheus(self):
last_event = None

# Make a real event chain
for i in range(event_count):
for _ in range(event_count):
ev = self.create_and_send_event(room_id, user, False, last_event)
last_event = [ev]

# Sprinkle in some extremities
for i in range(extrems):
for _ in range(extrems):
ev = self.create_and_send_event(room_id, user, False, last_event)

# Let it run for a while, then pull out the statistics from the
Expand Down
2 changes: 1 addition & 1 deletion tests/unittest.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def tearDown(orig):
def assertObjectHasAttributes(self, attrs, obj):
"""Asserts that the given object has each of the attributes given, and
that the value of each matches according to assertEquals."""
for (key, value) in attrs.items():
for key in attrs.keys():
if not hasattr(obj, key):
raise AssertionError("Expected obj to have a '.%s'" % key)
try:
Expand Down
2 changes: 1 addition & 1 deletion tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ def cleanup():
# database for a few more seconds due to flakiness, preventing
# us from dropping it when the test is over. If we can't drop
# it, warn and move on.
for x in range(5):
for _ in range(5):
try:
cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
db_conn.commit()
Expand Down