Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Clarify list/set/dict/tuple comprehensions and enforce via flake8 (#6957
Browse files Browse the repository at this point in the history
)

Ensure good comprehension hygiene using flake8-comprehensions.
  • Loading branch information
clokep authored Feb 21, 2020
1 parent 272eee1 commit 509e381
Show file tree
Hide file tree
Showing 73 changed files with 251 additions and 276 deletions.
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ python 3.6 and to install each tool:

```
# Install the dependencies
pip install -U black flake8 isort
pip install -U black flake8 flake8-comprehensions isort
# Run the linter script
./scripts-dev/lint.sh
Expand Down
1 change: 1 addition & 0 deletions changelog.d/6957.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions.
2 changes: 1 addition & 1 deletion docs/code_style.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ The necessary tools are detailed below.

Install `flake8` with:

pip install --upgrade flake8
pip install --upgrade flake8 flake8-comprehensions

Check all application and test code with:

Expand Down
2 changes: 1 addition & 1 deletion scripts-dev/convert_server_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def main():

yaml.safe_dump(result, sys.stdout, default_flow_style=False)

rows = list(row for server, json in result.items() for row in rows_v2(server, json))
rows = [row for server, json in result.items() for row in rows_v2(server, json)]

cursor = connection.cursor()
cursor.executemany(
Expand Down
2 changes: 1 addition & 1 deletion synapse/app/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def run():

def quit_with_error(error_string):
message_lines = error_string.split("\n")
line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
line_length = max(len(l) for l in message_lines if len(l) < 80) + 2
sys.stderr.write("*" * line_length + "\n")
for line in message_lines:
sys.stderr.write(" %s\n" % (line.rstrip(),))
Expand Down
4 changes: 2 additions & 2 deletions synapse/app/federation_sender.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,15 +262,15 @@ def process_replication_rows(self, stream_name, token, rows):

# ... as well as device updates and messages
elif stream_name == DeviceListsStream.NAME:
hosts = set(row.destination for row in rows)
hosts = {row.destination for row in rows}
for host in hosts:
self.federation_sender.send_device_messages(host)

elif stream_name == ToDeviceStream.NAME:
# The to_device stream includes stuff to be pushed to both local
# clients and remote servers, so we ignore entities that start with
# '@' (since they'll be local users rather than destinations).
hosts = set(row.entity for row in rows if not row.entity.startswith("@"))
hosts = {row.entity for row in rows if not row.entity.startswith("@")}
for host in hosts:
self.federation_sender.send_device_messages(host)

Expand Down
2 changes: 1 addition & 1 deletion synapse/app/pusher.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def poke_pushers(self, stream_name, token, rows):
yield self.pusher_pool.on_new_notifications(token, token)
elif stream_name == "receipts":
yield self.pusher_pool.on_new_receipts(
token, token, set(row.room_id for row in rows)
token, token, {row.room_id for row in rows}
)
except Exception:
logger.exception("Error poking pushers")
Expand Down
4 changes: 2 additions & 2 deletions synapse/config/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -1066,12 +1066,12 @@ def _warn_if_webclient_configured(listeners):


def _check_resource_config(listeners):
resource_names = set(
resource_names = {
res_name
for listener in listeners
for res in listener.get("resources", [])
for res_name in res.get("names", [])
)
}

for resource in resource_names:
if resource not in KNOWN_RESOURCES:
Expand Down
2 changes: 1 addition & 1 deletion synapse/config/tls.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ def read_certificate_from_disk(self, require_cert_and_key):
crypto.FILETYPE_ASN1, self.tls_certificate
)
sha256_fingerprint = encode_base64(sha256(x509_certificate_bytes).digest())
sha256_fingerprints = set(f["sha256"] for f in self.tls_fingerprints)
sha256_fingerprints = {f["sha256"] for f in self.tls_fingerprints}
if sha256_fingerprint not in sha256_fingerprints:
self.tls_fingerprints.append({"sha256": sha256_fingerprint})

Expand Down
6 changes: 2 additions & 4 deletions synapse/crypto/keyring.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,9 +326,7 @@ def _get_server_verify_keys(self, verify_requests):
verify_requests (list[VerifyJsonRequest]): list of verify requests
"""

remaining_requests = set(
(rq for rq in verify_requests if not rq.key_ready.called)
)
remaining_requests = {rq for rq in verify_requests if not rq.key_ready.called}

@defer.inlineCallbacks
def do_iterations():
Expand Down Expand Up @@ -396,7 +394,7 @@ def _attempt_key_fetches_with_fetcher(self, fetcher, remaining_requests):

results = yield fetcher.get_keys(missing_keys)

completed = list()
completed = []
for verify_request in remaining_requests:
server_name = verify_request.server_name

Expand Down
4 changes: 2 additions & 2 deletions synapse/federation/send_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,9 @@ def _clear_queue_before_pos(self, position_to_delete):
for key in keys[:i]:
del self.presence_changed[key]

user_ids = set(
user_ids = {
user_id for uids in self.presence_changed.values() for user_id in uids
)
}

keys = self.presence_destinations.keys()
i = self.presence_destinations.bisect_left(position_to_delete)
Expand Down
2 changes: 1 addition & 1 deletion synapse/groups/groups_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -608,7 +608,7 @@ def invite_to_group(self, group_id, user_id, requester_user_id, content):
user_results = yield self.store.get_users_in_group(
group_id, include_private=True
)
if user_id in [user_result["user_id"] for user_result in user_results]:
if user_id in (user_result["user_id"] for user_result in user_results):
raise SynapseError(400, "User already in group")

content = {
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/device.py
Original file line number Diff line number Diff line change
Expand Up @@ -742,6 +742,6 @@ def user_device_resync(self, user_id):

# We clobber the seen updates since we've re-synced from a given
# point.
self._seen_updates[user_id] = set([stream_id])
self._seen_updates[user_id] = {stream_id}

defer.returnValue(result)
4 changes: 2 additions & 2 deletions synapse/handlers/directory.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def _create_association(self, room_alias, room_id, servers=None, creator=None):
# TODO(erikj): Check if there is a current association.
if not servers:
users = yield self.state.get_current_users_in_room(room_id)
servers = set(get_domain_from_id(u) for u in users)
servers = {get_domain_from_id(u) for u in users}

if not servers:
raise SynapseError(400, "Failed to get server list")
Expand Down Expand Up @@ -255,7 +255,7 @@ def get_association(self, room_alias):
)

users = yield self.state.get_current_users_in_room(room_id)
extra_servers = set(get_domain_from_id(u) for u in users)
extra_servers = {get_domain_from_id(u) for u in users}
servers = set(extra_servers) | set(servers)

# If this server is in the list of servers, return it first.
Expand Down
18 changes: 9 additions & 9 deletions synapse/handlers/federation.py
Original file line number Diff line number Diff line change
Expand Up @@ -659,11 +659,11 @@ async def _get_events_from_store_or_dest(
# this can happen if a remote server claims that the state or
# auth_events at an event in room A are actually events in room B

bad_events = list(
bad_events = [
(event_id, event.room_id)
for event_id, event in fetched_events.items()
if event.room_id != room_id
)
]

for bad_event_id, bad_room_id in bad_events:
# This is a bogus situation, but since we may only discover it a long time
Expand Down Expand Up @@ -856,7 +856,7 @@ async def backfill(self, dest, room_id, limit, extremities):

# Don't bother processing events we already have.
seen_events = await self.store.have_events_in_timeline(
set(e.event_id for e in events)
{e.event_id for e in events}
)

events = [e for e in events if e.event_id not in seen_events]
Expand All @@ -866,7 +866,7 @@ async def backfill(self, dest, room_id, limit, extremities):

event_map = {e.event_id: e for e in events}

event_ids = set(e.event_id for e in events)
event_ids = {e.event_id for e in events}

# build a list of events whose prev_events weren't in the batch.
# (XXX: this will include events whose prev_events we already have; that doesn't
Expand All @@ -892,13 +892,13 @@ async def backfill(self, dest, room_id, limit, extremities):
state_events.update({s.event_id: s for s in state})
events_to_state[e_id] = state

required_auth = set(
required_auth = {
a_id
for event in events
+ list(state_events.values())
+ list(auth_events.values())
for a_id in event.auth_event_ids()
)
}
auth_events.update(
{e_id: event_map[e_id] for e_id in required_auth if e_id in event_map}
)
Expand Down Expand Up @@ -1247,7 +1247,7 @@ async def send_invite(self, target_host, event):
async def on_event_auth(self, event_id: str) -> List[EventBase]:
event = await self.store.get_event(event_id)
auth = await self.store.get_auth_chain(
[auth_id for auth_id in event.auth_event_ids()], include_given=True
list(event.auth_event_ids()), include_given=True
)
return list(auth)

Expand Down Expand Up @@ -2152,7 +2152,7 @@ async def on_query_auth(

# Now get the current auth_chain for the event.
local_auth_chain = await self.store.get_auth_chain(
[auth_id for auth_id in event.auth_event_ids()], include_given=True
list(event.auth_event_ids()), include_given=True
)

# TODO: Check if we would now reject event_id. If so we need to tell
Expand Down Expand Up @@ -2654,7 +2654,7 @@ def exchange_third_party_invite(
member_handler = self.hs.get_room_member_handler()
yield member_handler.send_membership_event(None, event, context)
else:
destinations = set(x.split(":", 1)[-1] for x in (sender_user_id, room_id))
destinations = {x.split(":", 1)[-1] for x in (sender_user_id, room_id)}
yield self.federation_client.forward_third_party_invite(
destinations, room_id, event_dict
)
Expand Down
6 changes: 3 additions & 3 deletions synapse/handlers/presence.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ def _update_states(self, new_states):
notified_presence_counter.inc(len(to_notify))
yield self._persist_and_notify(list(to_notify.values()))

self.unpersisted_users_changes |= set(s.user_id for s in new_states)
self.unpersisted_users_changes |= {s.user_id for s in new_states}
self.unpersisted_users_changes -= set(to_notify.keys())

to_federation_ping = {
Expand Down Expand Up @@ -698,7 +698,7 @@ def get_states(self, target_user_ids, as_event=False):
updates = yield self.current_state_for_users(target_user_ids)
updates = list(updates.values())

for user_id in set(target_user_ids) - set(u.user_id for u in updates):
for user_id in set(target_user_ids) - {u.user_id for u in updates}:
updates.append(UserPresenceState.default(user_id))

now = self.clock.time_msec()
Expand Down Expand Up @@ -886,7 +886,7 @@ def _on_user_joined_room(self, room_id, user_id):
hosts = yield self.state.get_current_hosts_in_room(room_id)

# Filter out ourselves.
hosts = set(host for host in hosts if host != self.server_name)
hosts = {host for host in hosts if host != self.server_name}

self.federation.send_presence_to_destinations(
states=[state], destinations=hosts
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/receipts.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ async def _handle_new_receipts(self, receipts):
# no new receipts
return False

affected_room_ids = list(set([r.room_id for r in receipts]))
affected_room_ids = list({r.room_id for r in receipts})

self.notifier.on_new_event("receipt_key", max_batch_id, rooms=affected_room_ids)
# Note that the min here shouldn't be relied upon to be accurate.
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/room.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,7 @@ def clone_existing_room(
# If so, mark the new room as non-federatable as well
creation_content["m.federate"] = False

initial_state = dict()
initial_state = {}

# Replicate relevant room events
types_to_copy = (
Expand Down
8 changes: 4 additions & 4 deletions synapse/handlers/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def search(self, user, content, batch=None):
membership_list=[Membership.JOIN],
# membership_list=[Membership.JOIN, Membership.LEAVE, Membership.Ban],
)
room_ids = set(r.room_id for r in rooms)
room_ids = {r.room_id for r in rooms}

# If doing a subset of all rooms seearch, check if any of the rooms
# are from an upgraded room, and search their contents as well
Expand Down Expand Up @@ -374,12 +374,12 @@ def search(self, user, content, batch=None):
).to_string()

if include_profile:
senders = set(
senders = {
ev.sender
for ev in itertools.chain(
res["events_before"], [event], res["events_after"]
)
)
}

if res["events_after"]:
last_event_id = res["events_after"][-1].event_id
Expand Down Expand Up @@ -421,7 +421,7 @@ def search(self, user, content, batch=None):

state_results = {}
if include_state:
rooms = set(e.room_id for e in allowed_events)
rooms = {e.room_id for e in allowed_events}
for room_id in rooms:
state = yield self.state_handler.get_current_state(room_id)
state_results[room_id] = list(state.values())
Expand Down
22 changes: 10 additions & 12 deletions synapse/handlers/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -682,11 +682,9 @@ async def compute_summary(

# FIXME: order by stream ordering rather than as returned by SQL
if joined_user_ids or invited_user_ids:
summary["m.heroes"] = sorted(
[user_id for user_id in (joined_user_ids + invited_user_ids)]
)[0:5]
summary["m.heroes"] = sorted(joined_user_ids + invited_user_ids)[0:5]
else:
summary["m.heroes"] = sorted([user_id for user_id in gone_user_ids])[0:5]
summary["m.heroes"] = sorted(gone_user_ids)[0:5]

if not sync_config.filter_collection.lazy_load_members():
return summary
Expand All @@ -697,9 +695,9 @@ async def compute_summary(

# track which members the client should already know about via LL:
# Ones which are already in state...
existing_members = set(
existing_members = {
user_id for (typ, user_id) in state.keys() if typ == EventTypes.Member
)
}

# ...or ones which are in the timeline...
for ev in batch.events:
Expand Down Expand Up @@ -773,10 +771,10 @@ async def compute_state_delta(
# We only request state for the members needed to display the
# timeline:

members_to_fetch = set(
members_to_fetch = {
event.sender # FIXME: we also care about invite targets etc.
for event in batch.events
)
}

if full_state:
# always make sure we LL ourselves so we know we're in the room
Expand Down Expand Up @@ -1993,10 +1991,10 @@ def _calculate_state(
)
}

c_ids = set(e for e in itervalues(current))
ts_ids = set(e for e in itervalues(timeline_start))
p_ids = set(e for e in itervalues(previous))
tc_ids = set(e for e in itervalues(timeline_contains))
c_ids = set(itervalues(current))
ts_ids = set(itervalues(timeline_start))
p_ids = set(itervalues(previous))
tc_ids = set(itervalues(timeline_contains))

# If we are lazyloading room members, we explicitly add the membership events
# for the senders in the timeline into the state block returned by /sync,
Expand Down
4 changes: 2 additions & 2 deletions synapse/handlers/typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def _push_remote(self, member, typing):
now=now, obj=member, then=now + FEDERATION_PING_INTERVAL
)

for domain in set(get_domain_from_id(u) for u in users):
for domain in {get_domain_from_id(u) for u in users}:
if domain != self.server_name:
logger.debug("sending typing update to %s", domain)
self.federation.build_and_send_edu(
Expand Down Expand Up @@ -231,7 +231,7 @@ def _recv_edu(self, origin, content):
return

users = yield self.state.get_current_users_in_room(room_id)
domains = set(get_domain_from_id(u) for u in users)
domains = {get_domain_from_id(u) for u in users}

if self.server_name in domains:
logger.info("Got typing update from %s: %r", user_id, content)
Expand Down
2 changes: 1 addition & 1 deletion synapse/logging/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def wrapped(*args, **kwargs):
pathname=pathname,
lineno=lineno,
msg=msg,
args=tuple(),
args=(),
exc_info=None,
)

Expand Down
2 changes: 1 addition & 1 deletion synapse/metrics/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ def collect(self):
res.append(["+Inf", sum(data.values())])

metric = HistogramMetricFamily(
self.name, "", buckets=res, sum_value=sum([x * y for x, y in data.items()])
self.name, "", buckets=res, sum_value=sum(x * y for x, y in data.items())
)
yield metric

Expand Down
Loading

0 comments on commit 509e381

Please sign in to comment.