From 09cf3c47f447b4de56d6e45cee5e8fac6bf3f7ab Mon Sep 17 00:00:00 2001 From: Deimos Date: Mon, 13 Aug 2018 00:00:34 -0600 Subject: [PATCH] Apply Black code formatter This commit contains only changes that were made automatically by Black (except for some minor fixes to string un-wrapping and two format-disabling blocks in the user and group schemas). Some manual cleanup/adjustments will probably need to be made in a follow-up commit, but this one contains the result of running Black on the codebase without significant further manual tweaking. --- tildes/alembic/env.py | 22 +-- ...b3_add_setting_to_open_links_in_new_tab.py | 26 ++- ..._add_setting_to_open_text_links_in_new_.py | 13 +- ...dded_user_tag_type_comment_notification.py | 35 ++-- ...04_update_comment_triggers_for_removals.py | 124 ++++++++---- .../comment_user_mentions_generator.py | 21 +- tildes/consumers/topic_metadata_generator.py | 29 ++- tildes/scripts/breached_passwords.py | 57 +++--- tildes/scripts/clean_private_data.py | 51 +++-- tildes/scripts/initialize_db.py | 50 ++--- tildes/setup.py | 6 +- tildes/tests/conftest.py | 71 ++++--- tildes/tests/fixtures.py | 8 +- tildes/tests/test_comment.py | 64 +++---- tildes/tests/test_comment_user_mentions.py | 72 ++++--- tildes/tests/test_datetime.py | 14 +- tildes/tests/test_group.py | 41 ++-- tildes/tests/test_id.py | 10 +- tildes/tests/test_markdown.py | 167 ++++++++-------- tildes/tests/test_markdown_field.py | 22 +-- tildes/tests/test_messages.py | 36 ++-- tildes/tests/test_metrics.py | 2 +- tildes/tests/test_ratelimit.py | 36 ++-- tildes/tests/test_simplestring_field.py | 22 +-- tildes/tests/test_string.py | 58 +++--- tildes/tests/test_title.py | 36 ++-- tildes/tests/test_topic.py | 44 ++--- tildes/tests/test_topic_permissions.py | 37 ++-- tildes/tests/test_topic_tags.py | 22 +-- tildes/tests/test_triggers_comments.py | 6 +- tildes/tests/test_url.py | 22 +-- tildes/tests/test_user.py | 51 +++-- tildes/tests/test_username.py | 16 +- tildes/tests/test_webassets.py | 10 +- tildes/tests/webtests/test_user_page.py | 6 +- tildes/tildes/__init__.py | 93 ++++----- tildes/tildes/api.py | 6 +- tildes/tildes/auth.py | 46 ++--- tildes/tildes/database.py | 27 ++- tildes/tildes/enums.py | 20 +- tildes/tildes/jinja.py | 26 ++- tildes/tildes/json.py | 6 +- tildes/tildes/lib/amqp.py | 16 +- tildes/tildes/lib/cmark.py | 12 +- tildes/tildes/lib/database.py | 26 ++- tildes/tildes/lib/datetime.py | 30 +-- tildes/tildes/lib/hash.py | 3 +- tildes/tildes/lib/id.py | 10 +- tildes/tildes/lib/markdown.py | 181 +++++++++--------- tildes/tildes/lib/message.py | 2 +- tildes/tildes/lib/password.py | 11 +- tildes/tildes/lib/ratelimit.py | 95 ++++----- tildes/tildes/lib/string.py | 45 ++--- tildes/tildes/lib/url.py | 4 +- tildes/tildes/metrics.py | 60 ++---- tildes/tildes/models/comment/comment.py | 97 ++++------ .../models/comment/comment_notification.py | 71 +++---- .../comment/comment_notification_query.py | 10 +- tildes/tildes/models/comment/comment_query.py | 6 +- tildes/tildes/models/comment/comment_tag.py | 29 +-- tildes/tildes/models/comment/comment_tree.py | 23 +-- tildes/tildes/models/comment/comment_vote.py | 20 +- tildes/tildes/models/database_model.py | 34 ++-- tildes/tildes/models/group/group.py | 44 ++--- tildes/tildes/models/group/group_query.py | 6 +- .../tildes/models/group/group_subscription.py | 20 +- tildes/tildes/models/log/log.py | 135 ++++++------- tildes/tildes/models/message/message.py | 85 ++++---- tildes/tildes/models/model_query.py | 37 ++-- tildes/tildes/models/pagination.py | 14 +- tildes/tildes/models/topic/topic.py | 174 +++++++---------- tildes/tildes/models/topic/topic_query.py | 45 ++--- tildes/tildes/models/topic/topic_visit.py | 26 +-- tildes/tildes/models/topic/topic_vote.py | 20 +- tildes/tildes/models/user/user.py | 70 +++---- .../tildes/models/user/user_group_settings.py | 16 +- tildes/tildes/models/user/user_invite_code.py | 37 ++-- tildes/tildes/resources/__init__.py | 6 +- tildes/tildes/resources/comment.py | 16 +- tildes/tildes/resources/group.py | 11 +- tildes/tildes/resources/message.py | 11 +- tildes/tildes/resources/topic.py | 9 +- tildes/tildes/resources/user.py | 5 +- tildes/tildes/routes.py | 160 ++++++---------- tildes/tildes/schemas/fields.py | 45 ++--- tildes/tildes/schemas/group.py | 27 +-- tildes/tildes/schemas/topic.py | 58 +++--- tildes/tildes/schemas/topic_listing.py | 13 +- tildes/tildes/schemas/user.py | 46 ++--- tildes/tildes/views/__init__.py | 2 +- tildes/tildes/views/api/v0/group.py | 2 +- tildes/tildes/views/api/v0/topic.py | 4 +- tildes/tildes/views/api/v0/user.py | 2 +- tildes/tildes/views/api/web/comment.py | 167 +++++++--------- tildes/tildes/views/api/web/exceptions.py | 21 +- tildes/tildes/views/api/web/group.py | 41 ++-- tildes/tildes/views/api/web/message.py | 16 +- tildes/tildes/views/api/web/topic.py | 172 ++++++++--------- tildes/tildes/views/api/web/user.py | 139 +++++++------- tildes/tildes/views/decorators.py | 16 +- tildes/tildes/views/donate.py | 36 ++-- tildes/tildes/views/exceptions.py | 2 +- tildes/tildes/views/group.py | 4 +- tildes/tildes/views/login.py | 33 ++-- tildes/tildes/views/message.py | 101 +++++----- tildes/tildes/views/metrics.py | 8 +- tildes/tildes/views/notifications.py | 20 +- tildes/tildes/views/register.py | 65 +++---- tildes/tildes/views/settings.py | 56 +++--- tildes/tildes/views/topic.py | 135 +++++-------- tildes/tildes/views/user.py | 38 ++-- 111 files changed, 2009 insertions(+), 2523 deletions(-) diff --git a/tildes/alembic/env.py b/tildes/alembic/env.py index 226a22e..050df0c 100644 --- a/tildes/alembic/env.py +++ b/tildes/alembic/env.py @@ -12,12 +12,7 @@ config = context.config fileConfig(config.config_file_name) # import all DatabaseModel subclasses here for autogenerate support -from tildes.models.comment import ( - Comment, - CommentNotification, - CommentTag, - CommentVote, -) +from tildes.models.comment import Comment, CommentNotification, CommentTag, CommentVote from tildes.models.group import Group, GroupSubscription from tildes.models.log import Log from tildes.models.message import MessageConversation, MessageReply @@ -25,6 +20,7 @@ from tildes.models.topic import Topic, TopicVisit, TopicVote from tildes.models.user import User, UserGroupSettings, UserInviteCode from tildes.models import DatabaseModel + target_metadata = DatabaseModel.metadata # other values from the config, defined by the needs of env.py, @@ -46,8 +42,7 @@ def run_migrations_offline(): """ url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, target_metadata=target_metadata, literal_binds=True) + context.configure(url=url, target_metadata=target_metadata, literal_binds=True) with context.begin_transaction(): context.run_migrations() @@ -62,18 +57,17 @@ def run_migrations_online(): """ connectable = engine_from_config( config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool) + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) with connectable.connect() as connection: - context.configure( - connection=connection, - target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() + if context.is_offline_mode(): run_migrations_offline() else: diff --git a/tildes/alembic/versions/2512581c91b3_add_setting_to_open_links_in_new_tab.py b/tildes/alembic/versions/2512581c91b3_add_setting_to_open_links_in_new_tab.py index a05b76b..d04050b 100644 --- a/tildes/alembic/versions/2512581c91b3_add_setting_to_open_links_in_new_tab.py +++ b/tildes/alembic/versions/2512581c91b3_add_setting_to_open_links_in_new_tab.py @@ -10,17 +10,33 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = '2512581c91b3' +revision = "2512581c91b3" down_revision = None branch_labels = None depends_on = None def upgrade(): - op.add_column('users', sa.Column('open_new_tab_external', sa.Boolean(), server_default='false', nullable=False)) - op.add_column('users', sa.Column('open_new_tab_internal', sa.Boolean(), server_default='false', nullable=False)) + op.add_column( + "users", + sa.Column( + "open_new_tab_external", + sa.Boolean(), + server_default="false", + nullable=False, + ), + ) + op.add_column( + "users", + sa.Column( + "open_new_tab_internal", + sa.Boolean(), + server_default="false", + nullable=False, + ), + ) def downgrade(): - op.drop_column('users', 'open_new_tab_internal') - op.drop_column('users', 'open_new_tab_external') + op.drop_column("users", "open_new_tab_internal") + op.drop_column("users", "open_new_tab_external") diff --git a/tildes/alembic/versions/de83b8750123_add_setting_to_open_text_links_in_new_.py b/tildes/alembic/versions/de83b8750123_add_setting_to_open_text_links_in_new_.py index e8705db..8653976 100644 --- a/tildes/alembic/versions/de83b8750123_add_setting_to_open_text_links_in_new_.py +++ b/tildes/alembic/versions/de83b8750123_add_setting_to_open_text_links_in_new_.py @@ -10,15 +10,20 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = 'de83b8750123' -down_revision = '2512581c91b3' +revision = "de83b8750123" +down_revision = "2512581c91b3" branch_labels = None depends_on = None def upgrade(): - op.add_column('users', sa.Column('open_new_tab_text', sa.Boolean(), server_default='false', nullable=False)) + op.add_column( + "users", + sa.Column( + "open_new_tab_text", sa.Boolean(), server_default="false", nullable=False + ), + ) def downgrade(): - op.drop_column('users', 'open_new_tab_text') + op.drop_column("users", "open_new_tab_text") diff --git a/tildes/alembic/versions/f1ecbf24c212_added_user_tag_type_comment_notification.py b/tildes/alembic/versions/f1ecbf24c212_added_user_tag_type_comment_notification.py index a675a0e..e3ba0ae 100644 --- a/tildes/alembic/versions/f1ecbf24c212_added_user_tag_type_comment_notification.py +++ b/tildes/alembic/versions/f1ecbf24c212_added_user_tag_type_comment_notification.py @@ -9,8 +9,8 @@ from alembic import op # revision identifiers, used by Alembic. -revision = 'f1ecbf24c212' -down_revision = 'de83b8750123' +revision = "f1ecbf24c212" +down_revision = "de83b8750123" branch_labels = None depends_on = None @@ -20,18 +20,18 @@ def upgrade(): connection = None if not op.get_context().as_sql: connection = op.get_bind() - connection.execution_options(isolation_level='AUTOCOMMIT') + connection.execution_options(isolation_level="AUTOCOMMIT") op.execute( - "ALTER TYPE commentnotificationtype " - "ADD VALUE IF NOT EXISTS 'USER_MENTION'" + "ALTER TYPE commentnotificationtype ADD VALUE IF NOT EXISTS 'USER_MENTION'" ) # re-activate the transaction for any future migrations if connection is not None: - connection.execution_options(isolation_level='READ_COMMITTED') + connection.execution_options(isolation_level="READ_COMMITTED") - op.execute(''' + op.execute( + """ CREATE OR REPLACE FUNCTION send_rabbitmq_message_for_comment() RETURNS TRIGGER AS $$ DECLARE affected_row RECORD; @@ -50,23 +50,28 @@ def upgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - ''') - op.execute(''' + """ + ) + op.execute( + """ CREATE TRIGGER send_rabbitmq_message_for_comment_insert AFTER INSERT ON comments FOR EACH ROW EXECUTE PROCEDURE send_rabbitmq_message_for_comment('created'); - ''') - op.execute(''' + """ + ) + op.execute( + """ CREATE TRIGGER send_rabbitmq_message_for_comment_edit AFTER UPDATE ON comments FOR EACH ROW WHEN (OLD.markdown IS DISTINCT FROM NEW.markdown) EXECUTE PROCEDURE send_rabbitmq_message_for_comment('edited'); - ''') + """ + ) def downgrade(): - op.execute('DROP TRIGGER send_rabbitmq_message_for_comment_insert ON comments') - op.execute('DROP TRIGGER send_rabbitmq_message_for_comment_edit ON comments') - op.execute('DROP FUNCTION send_rabbitmq_message_for_comment') + op.execute("DROP TRIGGER send_rabbitmq_message_for_comment_insert ON comments") + op.execute("DROP TRIGGER send_rabbitmq_message_for_comment_edit ON comments") + op.execute("DROP FUNCTION send_rabbitmq_message_for_comment") diff --git a/tildes/alembic/versions/fab922a8bb04_update_comment_triggers_for_removals.py b/tildes/alembic/versions/fab922a8bb04_update_comment_triggers_for_removals.py index 2851c1d..6a4b666 100644 --- a/tildes/alembic/versions/fab922a8bb04_update_comment_triggers_for_removals.py +++ b/tildes/alembic/versions/fab922a8bb04_update_comment_triggers_for_removals.py @@ -10,8 +10,8 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = 'fab922a8bb04' -down_revision = 'f1ecbf24c212' +revision = "fab922a8bb04" +down_revision = "f1ecbf24c212" branch_labels = None depends_on = None @@ -19,17 +19,20 @@ depends_on = None def upgrade(): # comment_notifications op.execute("DROP TRIGGER delete_comment_notifications_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER delete_comment_notifications_update AFTER UPDATE ON comments FOR EACH ROW WHEN ((OLD.is_deleted = false AND NEW.is_deleted = true) OR (OLD.is_removed = false AND NEW.is_removed = true)) EXECUTE PROCEDURE delete_comment_notifications(); - """) + """ + ) # comments - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION set_comment_deleted_time() RETURNS TRIGGER AS $$ BEGIN IF (NEW.is_deleted = TRUE) THEN @@ -41,17 +44,21 @@ def upgrade(): RETURN NEW; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER delete_comment_set_deleted_time_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER delete_comment_set_deleted_time_update BEFORE UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) EXECUTE PROCEDURE set_comment_deleted_time(); - """) + """ + ) - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION set_comment_removed_time() RETURNS TRIGGER AS $$ BEGIN IF (NEW.is_removed = TRUE) THEN @@ -63,19 +70,23 @@ def upgrade(): RETURN NEW; END; $$ LANGUAGE plpgsql; - """) - op.execute(""" + """ + ) + op.execute( + """ CREATE TRIGGER remove_comment_set_removed_time_update BEFORE UPDATE ON comments FOR EACH ROW WHEN (OLD.is_removed IS DISTINCT FROM NEW.is_removed) EXECUTE PROCEDURE set_comment_removed_time(); - """) + """ + ) # topic_visits op.execute("DROP TRIGGER update_topic_visits_num_comments_update ON comments") op.execute("DROP FUNCTION decrement_all_topic_visit_num_comments()") - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION update_all_topic_visit_num_comments() RETURNS TRIGGER AS $$ DECLARE old_visible BOOLEAN := NOT (OLD.is_deleted OR OLD.is_removed); @@ -96,18 +107,22 @@ def upgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) - op.execute(""" + """ + ) + op.execute( + """ CREATE TRIGGER update_topic_visits_num_comments_update AFTER UPDATE ON comments FOR EACH ROW WHEN ((OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) OR (OLD.is_removed IS DISTINCT FROM NEW.is_removed)) EXECUTE PROCEDURE update_all_topic_visit_num_comments(); - """) + """ + ) # topics - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION update_topics_num_comments() RETURNS TRIGGER AS $$ BEGIN IF (TG_OP = 'INSERT') THEN @@ -140,18 +155,22 @@ def upgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER update_topics_num_comments_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER update_topics_num_comments_update AFTER UPDATE ON comments FOR EACH ROW WHEN ((OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) OR (OLD.is_removed IS DISTINCT FROM NEW.is_removed)) EXECUTE PROCEDURE update_topics_num_comments(); - """) + """ + ) - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION update_topics_last_activity_time() RETURNS TRIGGER AS $$ DECLARE most_recent_comment RECORD; @@ -182,31 +201,37 @@ def upgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER update_topics_last_activity_time_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER update_topics_last_activity_time_update AFTER UPDATE ON comments FOR EACH ROW WHEN ((OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) OR (OLD.is_removed IS DISTINCT FROM NEW.is_removed)) EXECUTE PROCEDURE update_topics_last_activity_time(); - """) + """ + ) def downgrade(): # comment_notifications op.execute("DROP TRIGGER delete_comment_notifications_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER delete_comment_notifications_update AFTER UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted = false AND NEW.is_deleted = true) EXECUTE PROCEDURE delete_comment_notifications(); - """) + """ + ) # comments - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION set_comment_deleted_time() RETURNS TRIGGER AS $$ BEGIN NEW.deleted_time := current_timestamp; @@ -214,15 +239,18 @@ def downgrade(): RETURN NEW; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER delete_comment_set_deleted_time_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER delete_comment_set_deleted_time_update BEFORE UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted = false AND NEW.is_deleted = true) EXECUTE PROCEDURE set_comment_deleted_time(); - """) + """ + ) op.execute("DROP TRIGGER remove_comment_set_removed_time_update ON comments") op.execute("DROP FUNCTION set_comment_removed_time()") @@ -230,7 +258,8 @@ def downgrade(): # topic_visits op.execute("DROP TRIGGER update_topic_visits_num_comments_update ON comments") op.execute("DROP FUNCTION update_all_topic_visit_num_comments()") - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION decrement_all_topic_visit_num_comments() RETURNS TRIGGER AS $$ BEGIN UPDATE topic_visits @@ -241,17 +270,21 @@ def downgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) - op.execute(""" + """ + ) + op.execute( + """ CREATE TRIGGER update_topic_visits_num_comments_update AFTER UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted = false AND NEW.is_deleted = true) EXECUTE PROCEDURE decrement_all_topic_visit_num_comments(); - """) + """ + ) # topics - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION update_topics_num_comments() RETURNS TRIGGER AS $$ BEGIN IF (TG_OP = 'INSERT' AND NEW.is_deleted = FALSE) THEN @@ -277,17 +310,21 @@ def downgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER update_topics_num_comments_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER update_topics_num_comments_update AFTER UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) EXECUTE PROCEDURE update_topics_num_comments(); - """) + """ + ) - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION update_topics_last_activity_time() RETURNS TRIGGER AS $$ DECLARE most_recent_comment RECORD; @@ -317,12 +354,15 @@ def downgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER update_topics_last_activity_time_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER update_topics_last_activity_time_update AFTER UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) EXECUTE PROCEDURE update_topics_last_activity_time(); - """) + """ + ) diff --git a/tildes/consumers/comment_user_mentions_generator.py b/tildes/consumers/comment_user_mentions_generator.py index 265bde8..bb6691b 100644 --- a/tildes/consumers/comment_user_mentions_generator.py +++ b/tildes/consumers/comment_user_mentions_generator.py @@ -13,7 +13,7 @@ class CommentUserMentionGenerator(PgsqlQueueConsumer): """Process a delivered message.""" comment = ( self.db_session.query(Comment) - .filter_by(comment_id=msg.body['comment_id']) + .filter_by(comment_id=msg.body["comment_id"]) .one() ) @@ -22,15 +22,16 @@ class CommentUserMentionGenerator(PgsqlQueueConsumer): return new_mentions = CommentNotification.get_mentions_for_comment( - self.db_session, comment) + self.db_session, comment + ) - if msg.delivery_info['routing_key'] == 'comment.created': + if msg.delivery_info["routing_key"] == "comment.created": for user_mention in new_mentions: self.db_session.add(user_mention) - elif msg.delivery_info['routing_key'] == 'comment.edited': - to_delete, to_add = ( - CommentNotification.prevent_duplicate_notifications( - self.db_session, comment, new_mentions)) + elif msg.delivery_info["routing_key"] == "comment.edited": + to_delete, to_add = CommentNotification.prevent_duplicate_notifications( + self.db_session, comment, new_mentions + ) for user_mention in to_delete: self.db_session.delete(user_mention) @@ -39,8 +40,8 @@ class CommentUserMentionGenerator(PgsqlQueueConsumer): self.db_session.add(user_mention) -if __name__ == '__main__': +if __name__ == "__main__": CommentUserMentionGenerator( - queue_name='comment_user_mentions_generator.q', - routing_keys=['comment.created', 'comment.edited'], + queue_name="comment_user_mentions_generator.q", + routing_keys=["comment.created", "comment.edited"], ).consume_queue() diff --git a/tildes/consumers/topic_metadata_generator.py b/tildes/consumers/topic_metadata_generator.py index 23593b7..7075f9f 100644 --- a/tildes/consumers/topic_metadata_generator.py +++ b/tildes/consumers/topic_metadata_generator.py @@ -26,9 +26,7 @@ class TopicMetadataGenerator(PgsqlQueueConsumer): def run(self, msg: Message) -> None: """Process a delivered message.""" topic = ( - self.db_session.query(Topic) - .filter_by(topic_id=msg.body['topic_id']) - .one() + self.db_session.query(Topic).filter_by(topic_id=msg.body["topic_id"]).one() ) if topic.is_text_type: @@ -42,22 +40,19 @@ class TopicMetadataGenerator(PgsqlQueueConsumer): html_tree = HTMLParser().parseFragment(topic.rendered_html) # extract the text from all of the HTML elements - extracted_text = ''.join( - [element_text for element_text in html_tree.itertext()]) + extracted_text = "".join( + [element_text for element_text in html_tree.itertext()] + ) # sanitize unicode, remove leading/trailing whitespace, etc. extracted_text = simplify_string(extracted_text) # create a short excerpt by truncating the simplified string - excerpt = truncate_string( - extracted_text, - length=200, - truncate_at_chars=' ', - ) + excerpt = truncate_string(extracted_text, length=200, truncate_at_chars=" ") topic.content_metadata = { - 'word_count': word_count(extracted_text), - 'excerpt': excerpt, + "word_count": word_count(extracted_text), + "excerpt": excerpt, } def _generate_link_metadata(self, topic: Topic) -> None: @@ -68,13 +63,11 @@ class TopicMetadataGenerator(PgsqlQueueConsumer): parsed_domain = get_domain_from_url(topic.link) domain = self.public_suffix_list.get_public_suffix(parsed_domain) - topic.content_metadata = { - 'domain': domain, - } + topic.content_metadata = {"domain": domain} -if __name__ == '__main__': +if __name__ == "__main__": TopicMetadataGenerator( - queue_name='topic_metadata_generator.q', - routing_keys=['topic.created', 'topic.edited'], + queue_name="topic_metadata_generator.q", + routing_keys=["topic.created", "topic.edited"], ).consume_queue() diff --git a/tildes/scripts/breached_passwords.py b/tildes/scripts/breached_passwords.py index f588809..2ac941d 100644 --- a/tildes/scripts/breached_passwords.py +++ b/tildes/scripts/breached_passwords.py @@ -46,11 +46,11 @@ def generate_redis_protocol(*elements: Any) -> str: Based on the example Ruby code from https://redis.io/topics/mass-insert#generating-redis-protocol """ - command = f'*{len(elements)}\r\n' + command = f"*{len(elements)}\r\n" for element in elements: element = str(element) - command += f'${len(element)}\r\n{element}\r\n' + command += f"${len(element)}\r\n{element}\r\n" return command @@ -65,27 +65,27 @@ def validate_init_error_rate(ctx: Any, param: Any, value: Any) -> float: """Validate the --error-rate arg for the init command.""" # pylint: disable=unused-argument if not 0 < value < 1: - raise click.BadParameter('error rate must be a float between 0 and 1') + raise click.BadParameter("error rate must be a float between 0 and 1") return value -@cli.command(help='Initialize a new empty bloom filter') +@cli.command(help="Initialize a new empty bloom filter") @click.option( - '--estimate', + "--estimate", required=True, type=int, - help='Expected number of passwords that will be added', + help="Expected number of passwords that will be added", ) @click.option( - '--error-rate', + "--error-rate", default=0.01, show_default=True, - help='Bloom filter desired false positive ratio', + help="Bloom filter desired false positive ratio", callback=validate_init_error_rate, ) @click.confirmation_option( - prompt='Are you sure you want to clear any existing bloom filter?', + prompt="Are you sure you want to clear any existing bloom filter?" ) def init(estimate: int, error_rate: float) -> None: """Initialize a new bloom filter (destroying any existing one). @@ -102,22 +102,16 @@ def init(estimate: int, error_rate: float) -> None: REDIS.delete(BREACHED_PASSWORDS_BF_KEY) # BF.RESERVE {key} {error_rate} {size} - REDIS.execute_command( - 'BF.RESERVE', - BREACHED_PASSWORDS_BF_KEY, - error_rate, - estimate, - ) + REDIS.execute_command("BF.RESERVE", BREACHED_PASSWORDS_BF_KEY, error_rate, estimate) click.echo( - 'Initialized bloom filter with expected size of {:,} and false ' - 'positive rate of {}%' - .format(estimate, error_rate * 100) + "Initialized bloom filter with expected size of {:,} and false " + "positive rate of {}%".format(estimate, error_rate * 100) ) -@cli.command(help='Add hashes from a file to the bloom filter') -@click.argument('filename', type=click.Path(exists=True, dir_okay=False)) +@cli.command(help="Add hashes from a file to the bloom filter") +@click.argument("filename", type=click.Path(exists=True, dir_okay=False)) def addhashes(filename: str) -> None: """Add all hashes from a file to the bloom filter. @@ -127,26 +121,26 @@ def addhashes(filename: str) -> None: """ # make sure the key exists and is a bloom filter try: - REDIS.execute_command('BF.DEBUG', BREACHED_PASSWORDS_BF_KEY) + REDIS.execute_command("BF.DEBUG", BREACHED_PASSWORDS_BF_KEY) except ResponseError: - click.echo('Bloom filter is not set up properly - run init first.') + click.echo("Bloom filter is not set up properly - run init first.") raise click.Abort # call wc to count the number of lines in the file for the progress bar - click.echo('Determining hash count...') - result = subprocess.run(['wc', '-l', filename], stdout=subprocess.PIPE) - line_count = int(result.stdout.split(b' ')[0]) + click.echo("Determining hash count...") + result = subprocess.run(["wc", "-l", filename], stdout=subprocess.PIPE) + line_count = int(result.stdout.split(b" ")[0]) progress_bar: Any = click.progressbar(length=line_count) update_interval = 100_000 - click.echo('Adding {:,} hashes to bloom filter...'.format(line_count)) + click.echo("Adding {:,} hashes to bloom filter...".format(line_count)) redis_pipe = subprocess.Popen( - ['redis-cli', '-s', BREACHED_PASSWORDS_REDIS_SOCKET, '--pipe'], + ["redis-cli", "-s", BREACHED_PASSWORDS_REDIS_SOCKET, "--pipe"], stdin=subprocess.PIPE, stdout=subprocess.DEVNULL, - encoding='utf-8', + encoding="utf-8", ) for count, line in enumerate(open(filename), start=1): @@ -155,10 +149,9 @@ def addhashes(filename: str) -> None: # the Pwned Passwords hash lists now have a frequency count for each # hash, which is separated from the hash with a colon, so we need to # handle that if it's present - hashval = hashval.split(':')[0] + hashval = hashval.split(":")[0] - command = generate_redis_protocol( - 'BF.ADD', BREACHED_PASSWORDS_BF_KEY, hashval) + command = generate_redis_protocol("BF.ADD", BREACHED_PASSWORDS_BF_KEY, hashval) redis_pipe.stdin.write(command) if count % update_interval == 0: @@ -173,5 +166,5 @@ def addhashes(filename: str) -> None: progress_bar.render_finish() -if __name__ == '__main__': +if __name__ == "__main__": cli() diff --git a/tildes/scripts/clean_private_data.py b/tildes/scripts/clean_private_data.py index 5f8a3f2..e745f35 100644 --- a/tildes/scripts/clean_private_data.py +++ b/tildes/scripts/clean_private_data.py @@ -33,22 +33,17 @@ def clean_all_data(config_path: str) -> None: cleaner.clean_all() -class DataCleaner(): +class DataCleaner: """Container class for all methods related to cleaning up old data.""" - def __init__( - self, - db_session: Session, - retention_period: timedelta, - ) -> None: + def __init__(self, db_session: Session, retention_period: timedelta) -> None: """Create a new DataCleaner.""" self.db_session = db_session self.retention_cutoff = datetime.now() - retention_period def clean_all(self) -> None: """Call all the cleanup functions.""" - logging.info( - f'Cleaning up all data (retention cutoff {self.retention_cutoff})') + logging.info(f"Cleaning up all data (retention cutoff {self.retention_cutoff})") self.delete_old_log_entries() self.delete_old_topic_visits() @@ -68,7 +63,7 @@ class DataCleaner(): .delete(synchronize_session=False) ) self.db_session.commit() - logging.info(f'Deleted {deleted} old log entries.') + logging.info(f"Deleted {deleted} old log entries.") def delete_old_topic_visits(self) -> None: """Delete all topic visits older than the retention cutoff.""" @@ -78,7 +73,7 @@ class DataCleaner(): .delete(synchronize_session=False) ) self.db_session.commit() - logging.info(f'Deleted {deleted} old topic visits.') + logging.info(f"Deleted {deleted} old topic visits.") def clean_old_deleted_comments(self) -> None: """Clean the data of old deleted comments. @@ -92,14 +87,13 @@ class DataCleaner(): Comment.deleted_time <= self.retention_cutoff, # type: ignore Comment.user_id != 0, ) - .update({ - 'user_id': 0, - 'markdown': '', - 'rendered_html': '', - }, synchronize_session=False) + .update( + {"user_id": 0, "markdown": "", "rendered_html": ""}, + synchronize_session=False, + ) ) self.db_session.commit() - logging.info(f'Cleaned {updated} old deleted comments.') + logging.info(f"Cleaned {updated} old deleted comments.") def clean_old_deleted_topics(self) -> None: """Clean the data of old deleted topics. @@ -113,16 +107,19 @@ class DataCleaner(): Topic.deleted_time <= self.retention_cutoff, # type: ignore Topic.user_id != 0, ) - .update({ - 'user_id': 0, - 'title': '', - 'topic_type': 'TEXT', - 'markdown': None, - 'rendered_html': None, - 'link': None, - 'content_metadata': None, - '_tags': [], - }, synchronize_session=False) + .update( + { + "user_id": 0, + "title": "", + "topic_type": "TEXT", + "markdown": None, + "rendered_html": None, + "link": None, + "content_metadata": None, + "_tags": [], + }, + synchronize_session=False, + ) ) self.db_session.commit() - logging.info(f'Cleaned {updated} old deleted topics.') + logging.info(f"Cleaned {updated} old deleted topics.") diff --git a/tildes/scripts/initialize_db.py b/tildes/scripts/initialize_db.py index 5b5cb58..3565b39 100644 --- a/tildes/scripts/initialize_db.py +++ b/tildes/scripts/initialize_db.py @@ -15,27 +15,24 @@ from tildes.models.log import Log from tildes.models.user import User -def initialize_db( - config_path: str, - alembic_config_path: Optional[str] = None, -) -> None: +def initialize_db(config_path: str, alembic_config_path: Optional[str] = None) -> None: """Load the app config and create the database tables.""" db_session = get_session_from_config(config_path) engine = db_session.bind create_tables(engine) - run_sql_scripts_in_dir('sql/init/', engine) + run_sql_scripts_in_dir("sql/init/", engine) # if an Alembic config file wasn't specified, assume it's alembic.ini in # the same directory if not alembic_config_path: path = os.path.split(config_path)[0] - alembic_config_path = os.path.join(path, 'alembic.ini') + alembic_config_path = os.path.join(path, "alembic.ini") # mark current Alembic revision in db so migrations start from this point alembic_cfg = Config(alembic_config_path) - command.stamp(alembic_cfg, 'head') + command.stamp(alembic_cfg, "head") def create_tables(connectable: Connectable) -> None: @@ -44,7 +41,8 @@ def create_tables(connectable: Connectable) -> None: excluded_tables = Log.INHERITED_TABLES tables = [ - table for table in DatabaseModel.metadata.tables.values() + table + for table in DatabaseModel.metadata.tables.values() if table.name not in excluded_tables ] DatabaseModel.metadata.create_all(connectable, tables=tables) @@ -53,29 +51,31 @@ def create_tables(connectable: Connectable) -> None: def run_sql_scripts_in_dir(path: str, engine: Engine) -> None: """Run all sql scripts in a directory.""" for root, _, files in os.walk(path): - sql_files = [ - filename for filename in files - if filename.endswith('.sql') - ] + sql_files = [filename for filename in files if filename.endswith(".sql")] for sql_file in sql_files: - subprocess.call([ - 'psql', - '-U', engine.url.username, - '-f', os.path.join(root, sql_file), - engine.url.database, - ]) + subprocess.call( + [ + "psql", + "-U", + engine.url.username, + "-f", + os.path.join(root, sql_file), + engine.url.database, + ] + ) def insert_dev_data(config_path: str) -> None: """Load the app config and insert some "starter" data for a dev version.""" session = get_session_from_config(config_path) - session.add_all([ - User('TestUser', 'password'), - Group( - 'testing', - 'An automatically created group to use for testing purposes', - ), - ]) + session.add_all( + [ + User("TestUser", "password"), + Group( + "testing", "An automatically created group to use for testing purposes" + ), + ] + ) session.commit() diff --git a/tildes/setup.py b/tildes/setup.py index 422012a..c0f0748 100644 --- a/tildes/setup.py +++ b/tildes/setup.py @@ -4,11 +4,11 @@ from setuptools import find_packages, setup setup( - name='tildes', - version='0.1', + name="tildes", + version="0.1", packages=find_packages(), entry_points=""" [paste.app_factory] main = tildes:main - """ + """, ) diff --git a/tildes/tests/conftest.py b/tildes/tests/conftest.py index 9e65784..e96cf74 100644 --- a/tildes/tests/conftest.py +++ b/tildes/tests/conftest.py @@ -18,7 +18,7 @@ from tildes.models.user import User # include the fixtures defined in fixtures.py -pytest_plugins = ['tests.fixtures'] +pytest_plugins = ["tests.fixtures"] class NestedSessionWrapper(Session): @@ -40,25 +40,25 @@ class NestedSessionWrapper(Session): super().rollback() -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def pyramid_config(): """Set up the Pyramid environment.""" - settings = get_appsettings('development.ini') + settings = get_appsettings("development.ini") config = testing.setUp(settings=settings) - config.include('tildes.auth') + config.include("tildes.auth") yield config testing.tearDown() -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def overall_db_session(pyramid_config): """Handle setup and teardown of test database for testing session.""" # read the database url from the pyramid INI file, and replace the db name - sqlalchemy_url = pyramid_config.registry.settings['sqlalchemy.url'] + sqlalchemy_url = pyramid_config.registry.settings["sqlalchemy.url"] parsed_url = make_url(sqlalchemy_url) - parsed_url.database = 'tildes_test' + parsed_url.database = "tildes_test" engine = create_engine(parsed_url) session_factory = sessionmaker(bind=engine) @@ -69,12 +69,9 @@ def overall_db_session(pyramid_config): # SQL init scripts need to be executed "manually" instead of using psql # like the normal database init process does, since the tables only exist # inside this transaction - init_scripts_dir = 'sql/init/' + init_scripts_dir = "sql/init/" for root, _, files in os.walk(init_scripts_dir): - sql_files = [ - filename for filename in files - if filename.endswith('.sql') - ] + sql_files = [filename for filename in files if filename.endswith(".sql")] for sql_file in sql_files: with open(os.path.join(root, sql_file)) as current_file: session.execute(current_file.read()) @@ -90,7 +87,7 @@ def overall_db_session(pyramid_config): session.rollback() -@fixture(scope='session') +@fixture(scope="session") def sdb(overall_db_session): """Testing-session-level db session with a nested transaction.""" overall_db_session.begin_nested() @@ -100,7 +97,7 @@ def sdb(overall_db_session): overall_db_session.rollback_all_nested() -@fixture(scope='function') +@fixture(scope="function") def db(overall_db_session): """Function-level db session with a nested transaction.""" overall_db_session.begin_nested() @@ -110,25 +107,23 @@ def db(overall_db_session): overall_db_session.rollback_all_nested() -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def overall_redis_session(): """Create a session-level connection to a temporary redis server.""" # list of redis modules that need to be loaded (would be much nicer to do # this automatically somehow, maybe reading from the real redis.conf?) - redis_modules = [ - '/opt/redis-cell/libredis_cell.so', - ] + redis_modules = ["/opt/redis-cell/libredis_cell.so"] with RedisServer() as temp_redis_server: redis = StrictRedis(**temp_redis_server.dsn()) for module in redis_modules: - redis.execute_command('MODULE LOAD', module) + redis.execute_command("MODULE LOAD", module) yield redis -@fixture(scope='function') +@fixture(scope="function") def redis(overall_redis_session): """Create a function-level redis connection that wipes the db after use.""" yield overall_redis_session @@ -136,47 +131,47 @@ def redis(overall_redis_session): overall_redis_session.flushdb() -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def session_user(sdb): """Create a user named 'SessionUser' in the db for test session.""" # note that some tests may depend on this username/password having these # specific values, so make sure to search for and update those tests if you # change the username or password for any reason - user = User('SessionUser', 'session user password') + user = User("SessionUser", "session user password") sdb.add(user) sdb.commit() yield user -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def session_user2(sdb): """Create a second user named 'OtherUser' in the db for test session. This is useful for cases where two different users are needed, such as when testing private messages. """ - user = User('OtherUser', 'other user password') + user = User("OtherUser", "other user password") sdb.add(user) sdb.commit() yield user -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def session_group(sdb): """Create a group named 'sessiongroup' in the db for test session.""" - group = Group('sessiongroup') + group = Group("sessiongroup") sdb.add(group) sdb.commit() yield group -@fixture(scope='session') +@fixture(scope="session") def base_app(overall_redis_session, sdb): """Configure a base WSGI app that webtest can create TestApps based on.""" - testing_app = get_app('development.ini') + testing_app = get_app("development.ini") # replace the redis connection used by the redis-sessions library with a # connection to the temporary server for this test session @@ -185,38 +180,38 @@ def base_app(overall_redis_session, sdb): def redis_factory(request): # pylint: disable=unused-argument return overall_redis_session - testing_app.app.registry['redis_connection_factory'] = redis_factory + + testing_app.app.registry["redis_connection_factory"] = redis_factory # replace the session factory function with one that will return the # testing db session (inside a nested transaction) def session_factory(): return sdb - testing_app.app.registry['db_session_factory'] = session_factory + + testing_app.app.registry["db_session_factory"] = session_factory yield testing_app -@fixture(scope='session') +@fixture(scope="session") def webtest(base_app): """Create a webtest TestApp and log in as the SessionUser account in it.""" # create the TestApp - note that specifying wsgi.url_scheme is necessary # so that the secure cookies from the session library will work app = TestApp( - base_app, - extra_environ={'wsgi.url_scheme': 'https'}, - cookiejar=CookieJar(), + base_app, extra_environ={"wsgi.url_scheme": "https"}, cookiejar=CookieJar() ) # fetch the login page, fill in the form, and submit it (sets the cookie) - login_page = app.get('/login') - login_page.form['username'] = 'SessionUser' - login_page.form['password'] = 'session user password' + login_page = app.get("/login") + login_page.form["username"] = "SessionUser" + login_page.form["password"] = "session user password" login_page.form.submit() yield app -@fixture(scope='session') +@fixture(scope="session") def webtest_loggedout(base_app): """Create a logged-out webtest TestApp (no cookies retained).""" yield TestApp(base_app) diff --git a/tildes/tests/fixtures.py b/tildes/tests/fixtures.py index 26cb8f8..abda44c 100644 --- a/tildes/tests/fixtures.py +++ b/tildes/tests/fixtures.py @@ -8,7 +8,8 @@ from tildes.models.topic import Topic def text_topic(db, session_group, session_user): """Create a text topic, delete it as teardown (including comments).""" new_topic = Topic.create_text_topic( - session_group, session_user, 'A Text Topic', 'the text') + session_group, session_user, "A Text Topic", "the text" + ) db.add(new_topic) db.commit() @@ -23,7 +24,8 @@ def text_topic(db, session_group, session_user): def link_topic(db, session_group, session_user): """Create a link topic, delete it as teardown (including comments).""" new_topic = Topic.create_link_topic( - session_group, session_user, 'A Link Topic', 'http://example.com') + session_group, session_user, "A Link Topic", "http://example.com" + ) db.add(new_topic) db.commit() @@ -43,7 +45,7 @@ def topic(text_topic): @fixture def comment(db, session_user, topic): """Create a comment in the database, delete it as teardown.""" - new_comment = Comment(topic, session_user, 'A comment') + new_comment = Comment(topic, session_user, "A comment") db.add(new_comment) db.commit() diff --git a/tildes/tests/test_comment.py b/tildes/tests/test_comment.py index e1128f6..633bdb1 100644 --- a/tildes/tests/test_comment.py +++ b/tildes/tests/test_comment.py @@ -1,81 +1,73 @@ from datetime import timedelta from freezegun import freeze_time -from pyramid.security import ( - Authenticated, - Everyone, - principals_allowed_by_permission, -) +from pyramid.security import Authenticated, Everyone, principals_allowed_by_permission from tildes.enums import CommentSortOption from tildes.lib.datetime import utc_now -from tildes.models.comment import ( - Comment, - CommentTree, - EDIT_GRACE_PERIOD, -) +from tildes.models.comment import Comment, CommentTree, EDIT_GRACE_PERIOD from tildes.schemas.comment import CommentSchema from tildes.schemas.fields import Markdown def test_comment_creation_validates_schema(mocker, session_user, topic): """Ensure that comment creation goes through schema validation.""" - mocker.spy(CommentSchema, 'load') + mocker.spy(CommentSchema, "load") - Comment(topic, session_user, 'A test comment') + Comment(topic, session_user, "A test comment") call_args = CommentSchema.load.call_args[0] - assert {'markdown': 'A test comment'} in call_args + assert {"markdown": "A test comment"} in call_args def test_comment_creation_uses_markdown_field(mocker, session_user, topic): """Ensure the Markdown field class is validating new comments.""" - mocker.spy(Markdown, '_validate') + mocker.spy(Markdown, "_validate") - Comment(topic, session_user, 'A test comment') + Comment(topic, session_user, "A test comment") assert Markdown._validate.called def test_comment_edit_uses_markdown_field(mocker, comment): """Ensure editing a comment is validated by the Markdown field class.""" - mocker.spy(Markdown, '_validate') + mocker.spy(Markdown, "_validate") - comment.markdown = 'Some new text after edit' + comment.markdown = "Some new text after edit" assert Markdown._validate.called def test_edit_markdown_updates_html(comment): """Ensure editing a comment works and the markdown and HTML update.""" - comment.markdown = 'Updated comment' - assert 'Updated' in comment.markdown - assert 'Updated' in comment.rendered_html + comment.markdown = "Updated comment" + assert "Updated" in comment.markdown + assert "Updated" in comment.rendered_html def test_comment_viewing_permission(comment): """Ensure that anyone can view a comment by default.""" - assert Everyone in principals_allowed_by_permission(comment, 'view') + assert Everyone in principals_allowed_by_permission(comment, "view") def test_comment_editing_permission(comment): """Ensure that only the comment's author can edit it.""" - principals = principals_allowed_by_permission(comment, 'edit') + principals = principals_allowed_by_permission(comment, "edit") assert principals == {comment.user_id} def test_comment_deleting_permission(comment): """Ensure that only the comment's author can delete it.""" - principals = principals_allowed_by_permission(comment, 'delete') + principals = principals_allowed_by_permission(comment, "delete") assert principals == {comment.user_id} def test_comment_replying_permission(comment): """Ensure that any authenticated user can reply to a comment.""" - assert Authenticated in principals_allowed_by_permission(comment, 'reply') + assert Authenticated in principals_allowed_by_permission(comment, "reply") def test_comment_reply_locked_thread_permission(comment): """Ensure that only admins can reply in locked threads.""" comment.topic.is_locked = True - assert principals_allowed_by_permission(comment, 'reply') == {'admin'} + assert principals_allowed_by_permission(comment, "reply") == {"admin"} def test_deleted_comment_permissions_removed(comment): @@ -90,8 +82,8 @@ def test_deleted_comment_permissions_removed(comment): def test_removed_comment_view_permission(comment): """Ensure a removed comment can only be viewed by its author and admins.""" comment.is_removed = True - principals = principals_allowed_by_permission(comment, 'view') - assert principals == {'admin', comment.user_id} + principals = principals_allowed_by_permission(comment, "view") + assert principals == {"admin", comment.user_id} def test_edit_grace_period(comment): @@ -100,7 +92,7 @@ def test_edit_grace_period(comment): edit_time = comment.created_time + EDIT_GRACE_PERIOD - one_sec with freeze_time(edit_time): - comment.markdown = 'some new markdown' + comment.markdown = "some new markdown" assert not comment.last_edited_time @@ -111,7 +103,7 @@ def test_edit_after_grace_period(comment): edit_time = comment.created_time + EDIT_GRACE_PERIOD + one_sec with freeze_time(edit_time): - comment.markdown = 'some new markdown' + comment.markdown = "some new markdown" assert comment.last_edited_time == utc_now() @@ -123,7 +115,7 @@ def test_multiple_edits_update_time(comment): for minutes in range(0, 4): edit_time = initial_time + timedelta(minutes=minutes) with freeze_time(edit_time): - comment.markdown = f'edit #{minutes}' + comment.markdown = f"edit #{minutes}" assert comment.last_edited_time == utc_now() @@ -134,8 +126,8 @@ def test_comment_tree(db, topic, session_user): sort = CommentSortOption.POSTED # add two root comments - root = Comment(topic, session_user, 'root') - root2 = Comment(topic, session_user, 'root2') + root = Comment(topic, session_user, "root") + root2 = Comment(topic, session_user, "root2") all_comments.extend([root, root2]) db.add_all(all_comments) db.commit() @@ -151,8 +143,8 @@ def test_comment_tree(db, topic, session_user): assert tree == [root] # add two replies to the remaining root comment - child = Comment(topic, session_user, '1', parent_comment=root) - child2 = Comment(topic, session_user, '2', parent_comment=root) + child = Comment(topic, session_user, "1", parent_comment=root) + child2 = Comment(topic, session_user, "2", parent_comment=root) all_comments.extend([child, child2]) db.add_all(all_comments) db.commit() @@ -165,8 +157,8 @@ def test_comment_tree(db, topic, session_user): assert child2.replies == [] # add two more replies to the second depth-1 comment - subchild = Comment(topic, session_user, '2a', parent_comment=child2) - subchild2 = Comment(topic, session_user, '2b', parent_comment=child2) + subchild = Comment(topic, session_user, "2a", parent_comment=child2) + subchild2 = Comment(topic, session_user, "2b", parent_comment=child2) all_comments.extend([subchild, subchild2]) db.add_all(all_comments) db.commit() diff --git a/tildes/tests/test_comment_user_mentions.py b/tildes/tests/test_comment_user_mentions.py index 51ec837..ef4e1e8 100644 --- a/tildes/tests/test_comment_user_mentions.py +++ b/tildes/tests/test_comment_user_mentions.py @@ -3,10 +3,7 @@ from pytest import fixture from sqlalchemy import and_ from tildes.enums import CommentNotificationType -from tildes.models.comment import ( - Comment, - CommentNotification, -) +from tildes.models.comment import Comment, CommentNotification from tildes.models.topic import Topic from tildes.models.user import User @@ -15,8 +12,8 @@ from tildes.models.user import User def user_list(db): """Create several users.""" users = [] - for name in ['foo', 'bar', 'baz']: - user = User(name, 'password') + for name in ["foo", "bar", "baz"]: + user = User(name, "password") users.append(user) db.add(user) db.commit() @@ -30,44 +27,40 @@ def user_list(db): def test_get_mentions_for_comment(db, user_list, comment): """Test that notifications are generated and returned.""" - comment.markdown = '@foo @bar. @baz!' - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + comment.markdown = "@foo @bar. @baz!" + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert len(mentions) == 3 for index, user in enumerate(user_list): assert mentions[index].user == user -def test_mention_filtering_parent_comment( - mocker, db, topic, user_list): +def test_mention_filtering_parent_comment(mocker, db, topic, user_list): """Test notification filtering for parent comments.""" - parent_comment = Comment(topic, user_list[0], 'Comment content.') + parent_comment = Comment(topic, user_list[0], "Comment content.") parent_comment.user_id = user_list[0].user_id comment = mocker.Mock( user_id=user_list[1].user_id, - markdown=f'@{user_list[0].username}', + markdown=f"@{user_list[0].username}", parent_comment=parent_comment, ) - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert not mentions def test_mention_filtering_self_mention(db, user_list, topic): """Test notification filtering for self-mentions.""" - comment = Comment(topic, user_list[0], f'@{user_list[0]}') - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + comment = Comment(topic, user_list[0], f"@{user_list[0]}") + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert not mentions def test_mention_filtering_top_level(db, user_list, session_group): """Test notification filtering for top-level comments.""" topic = Topic.create_text_topic( - session_group, user_list[0], 'Some title', 'some text') - comment = Comment(topic, user_list[1], f'@{user_list[0].username}') - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + session_group, user_list[0], "Some title", "some text" + ) + comment = Comment(topic, user_list[1], f"@{user_list[0].username}") + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert not mentions @@ -82,36 +75,35 @@ def test_prevent_duplicate_notifications(db, user_list, topic): 4. The comment is deleted. """ # 1 - comment = Comment(topic, user_list[0], f'@{user_list[1].username}') + comment = Comment(topic, user_list[0], f"@{user_list[1].username}") db.add(comment) db.commit() - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert len(mentions) == 1 assert mentions[0].user == user_list[1] db.add_all(mentions) db.commit() # 2 - comment.markdown = f'@{user_list[2].username}' + comment.markdown = f"@{user_list[2].username}" db.commit() - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert len(mentions) == 1 to_delete, to_add = CommentNotification.prevent_duplicate_notifications( - db, comment, mentions) + db, comment, mentions + ) assert len(to_delete) == 1 assert mentions == to_add assert to_delete[0].user.username == user_list[1].username # 3 - comment.markdown = f'@{user_list[1].username} @{user_list[2].username}' + comment.markdown = f"@{user_list[1].username} @{user_list[2].username}" db.commit() - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert len(mentions) == 2 to_delete, to_add = CommentNotification.prevent_duplicate_notifications( - db, comment, mentions) + db, comment, mentions + ) assert not to_delete assert len(to_add) == 1 @@ -120,9 +112,13 @@ def test_prevent_duplicate_notifications(db, user_list, topic): db.commit() notifications = ( db.query(CommentNotification.user_id) - .filter(and_( - CommentNotification.comment_id == comment.comment_id, - CommentNotification.notification_type == - CommentNotificationType.USER_MENTION, - )).all()) + .filter( + and_( + CommentNotification.comment_id == comment.comment_id, + CommentNotification.notification_type + == CommentNotificationType.USER_MENTION, + ) + ) + .all() + ) assert not notifications diff --git a/tildes/tests/test_datetime.py b/tildes/tests/test_datetime.py index 1a359d1..04355e4 100644 --- a/tildes/tests/test_datetime.py +++ b/tildes/tests/test_datetime.py @@ -20,40 +20,40 @@ def test_utc_now_accurate(): def test_descriptive_timedelta_basic(): """Ensure a simple descriptive timedelta works correctly.""" test_time = utc_now() - timedelta(hours=3) - assert descriptive_timedelta(test_time) == '3 hours ago' + assert descriptive_timedelta(test_time) == "3 hours ago" def test_more_precise_longer_descriptive_timedelta(): """Ensure a longer time period gets the extra precision level.""" test_time = utc_now() - timedelta(days=2, hours=5) - assert descriptive_timedelta(test_time) == '2 days, 5 hours ago' + assert descriptive_timedelta(test_time) == "2 days, 5 hours ago" def test_no_small_precision_descriptive_timedelta(): """Ensure the extra precision doesn't apply to small units.""" test_time = utc_now() - timedelta(days=6, minutes=10) - assert descriptive_timedelta(test_time) == '6 days ago' + assert descriptive_timedelta(test_time) == "6 days ago" def test_single_precision_below_an_hour(): """Ensure times under an hour only have one precision level.""" test_time = utc_now() - timedelta(minutes=59, seconds=59) - assert descriptive_timedelta(test_time) == '59 minutes ago' + assert descriptive_timedelta(test_time) == "59 minutes ago" def test_more_precision_above_an_hour(): """Ensure the second precision level gets added just above an hour.""" test_time = utc_now() - timedelta(hours=1, minutes=1) - assert descriptive_timedelta(test_time) == '1 hour, 1 minute ago' + assert descriptive_timedelta(test_time) == "1 hour, 1 minute ago" def test_subsecond_descriptive_timedelta(): """Ensure time less than a second returns the special phrase.""" test_time = utc_now() - timedelta(microseconds=100) - assert descriptive_timedelta(test_time) == 'a moment ago' + assert descriptive_timedelta(test_time) == "a moment ago" def test_above_second_descriptive_timedelta(): """Ensure it starts describing time in seconds above 1 second.""" test_time = utc_now() - timedelta(seconds=1, microseconds=100) - assert descriptive_timedelta(test_time) == '1 second ago' + assert descriptive_timedelta(test_time) == "1 second ago" diff --git a/tildes/tests/test_group.py b/tildes/tests/test_group.py index 47fc8ad..99a8b59 100644 --- a/tildes/tests/test_group.py +++ b/tildes/tests/test_group.py @@ -3,46 +3,43 @@ from sqlalchemy.exc import IntegrityError from tildes.models.group import Group from tildes.schemas.fields import Ltree, SimpleString -from tildes.schemas.group import ( - GroupSchema, - is_valid_group_path, -) +from tildes.schemas.group import GroupSchema, is_valid_group_path def test_empty_path_invalid(): """Ensure empty group path is invalid.""" - assert not is_valid_group_path('') + assert not is_valid_group_path("") def test_typical_path_valid(): """Ensure a "normal-looking" group path is valid.""" - assert is_valid_group_path('games.video.nintendo_3ds') + assert is_valid_group_path("games.video.nintendo_3ds") def test_start_with_underscore(): """Ensure you can't start a path with an underscore.""" - assert not is_valid_group_path('_x.y.z') + assert not is_valid_group_path("_x.y.z") def test_middle_element_start_with_underscore(): """Ensure a middle path element can't start with an underscore.""" - assert not is_valid_group_path('x._y.z') + assert not is_valid_group_path("x._y.z") def test_end_with_underscore(): """Ensure you can't end a path with an underscore.""" - assert not is_valid_group_path('x.y.z_') + assert not is_valid_group_path("x.y.z_") def test_middle_element_end_with_underscore(): """Ensure a middle path element can't end with an underscore.""" - assert not is_valid_group_path('x.y_.z') + assert not is_valid_group_path("x.y_.z") def test_uppercase_letters_invalid(): """Ensure a group path can't contain uppercase chars.""" - assert is_valid_group_path('comp.lang.c') - assert not is_valid_group_path('comp.lang.C') + assert is_valid_group_path("comp.lang.c") + assert not is_valid_group_path("comp.lang.C") def test_paths_with_invalid_characters(): @@ -50,34 +47,34 @@ def test_paths_with_invalid_characters(): invalid_chars = ' ~!@#$%^&*()+={}[]|\\:;"<>,?/' for char in invalid_chars: - path = f'abc{char}xyz' + path = f"abc{char}xyz" assert not is_valid_group_path(path) def test_paths_with_unicode_characters(): """Ensure that paths can't use unicode chars (not comprehensive).""" - for path in ('games.pokémon', 'ポケモン', 'bites.møøse'): + for path in ("games.pokémon", "ポケモン", "bites.møøse"): assert not is_valid_group_path(path) def test_creation_validates_schema(mocker): """Ensure that group creation goes through expected validation.""" - mocker.spy(GroupSchema, 'load') - mocker.spy(Ltree, '_validate') - mocker.spy(SimpleString, '_validate') + mocker.spy(GroupSchema, "load") + mocker.spy(Ltree, "_validate") + mocker.spy(SimpleString, "_validate") - Group('testing', 'with a short description') + Group("testing", "with a short description") assert GroupSchema.load.called - assert Ltree._validate.call_args[0][1] == 'testing' - assert SimpleString._validate.call_args[0][1] == 'with a short description' + assert Ltree._validate.call_args[0][1] == "testing" + assert SimpleString._validate.call_args[0][1] == "with a short description" def test_duplicate_group(db): """Ensure groups with duplicate paths can't be created.""" - original = Group('twins') + original = Group("twins") db.add(original) - duplicate = Group('twins') + duplicate = Group("twins") db.add(duplicate) with raises(IntegrityError): diff --git a/tildes/tests/test_id.py b/tildes/tests/test_id.py index 4b214c7..b4fb6a8 100644 --- a/tildes/tests/test_id.py +++ b/tildes/tests/test_id.py @@ -7,12 +7,12 @@ from tildes.lib.id import id_to_id36, id36_to_id def test_id_to_id36(): """Make sure an ID->ID36 conversion is correct.""" - assert id_to_id36(571049189) == '9fzkdh' + assert id_to_id36(571049189) == "9fzkdh" def test_id36_to_id(): """Make sure an ID36->ID conversion is correct.""" - assert id36_to_id('x48l4z') == 2002502915 + assert id36_to_id("x48l4z") == 2002502915 def test_reversed_conversion_from_id(): @@ -23,7 +23,7 @@ def test_reversed_conversion_from_id(): def test_reversed_conversion_from_id36(): """Make sure an ID36->ID->ID36 conversion returns to original value.""" - original = 'h2l4pe' + original = "h2l4pe" assert id_to_id36(id36_to_id(original)) == original @@ -36,7 +36,7 @@ def test_zero_id_conversion_blocked(): def test_zero_id36_conversion_blocked(): """Ensure the ID36 conversion function doesn't accept zero.""" with raises(ValueError): - id36_to_id('0') + id36_to_id("0") def test_negative_id_conversion_blocked(): @@ -48,4 +48,4 @@ def test_negative_id_conversion_blocked(): def test_negative_id36_conversion_blocked(): """Ensure the ID36 conversion function doesn't accept negative numbers.""" with raises(ValueError): - id36_to_id('-1') + id36_to_id("-1") diff --git a/tildes/tests/test_markdown.py b/tildes/tests/test_markdown.py index f96c9ff..5ac9979 100644 --- a/tildes/tests/test_markdown.py +++ b/tildes/tests/test_markdown.py @@ -3,29 +3,29 @@ from tildes.lib.markdown import convert_markdown_to_safe_html def test_script_tag_escaped(): """Ensure that a ' + markdown = "" sanitized = convert_markdown_to_safe_html(markdown) - assert '