diff --git a/git_hooks/pre-commit b/git_hooks/pre-commit index 1aa452c..5f6c465 100755 --- a/git_hooks/pre-commit +++ b/git_hooks/pre-commit @@ -4,4 +4,5 @@ vagrant ssh -c ". activate \ && echo 'Checking mypy type annotations...' && mypy . \ + && echo 'Checking if Black would reformat any code...' && black --check . \ && echo -n 'Running tests: ' && pytest -q" diff --git a/git_hooks/pre-push b/git_hooks/pre-push index dafcae3..5595417 100755 --- a/git_hooks/pre-push +++ b/git_hooks/pre-push @@ -4,5 +4,6 @@ vagrant ssh -c ". activate \ && echo 'Checking mypy type annotations...' && mypy . \ + && echo 'Checking if Black would reformat any code...' && black --check . \ && echo -n 'Running tests: ' && pytest -q \ - && echo 'Checking code style (takes a while)...' && pylama" + && echo 'Checking code style fully (takes a while)...' && pylama" diff --git a/salt/salt/boussole.service.jinja2 b/salt/salt/boussole.service.jinja2 index 96132ea..85c35ae 100644 --- a/salt/salt/boussole.service.jinja2 +++ b/salt/salt/boussole.service.jinja2 @@ -1,11 +1,11 @@ -{% from 'common.jinja2' import app_dir, bin_dir -%} +{% from 'common.jinja2' import app_dir -%} [Unit] Description=Boussole - auto-compile SCSS files on change [Service] WorkingDirectory={{ app_dir }} Environment="LC_ALL=C.UTF-8" "LANG=C.UTF-8" -ExecStart={{ bin_dir }}/boussole watch --backend=yaml --config=boussole.yaml --poll +ExecStart=/opt/venvs/boussole/bin/boussole watch --backend=yaml --config=boussole.yaml --poll Restart=always RestartSec=5 diff --git a/salt/salt/boussole.sls b/salt/salt/boussole.sls index 28d7c69..b10ee0c 100644 --- a/salt/salt/boussole.sls +++ b/salt/salt/boussole.sls @@ -1,4 +1,20 @@ -{% from 'common.jinja2' import app_dir, bin_dir %} +{% from 'common.jinja2' import app_dir, python_version %} + +{% set boussole_venv_dir = '/opt/venvs/boussole' %} + +# Salt seems to use the deprecated pyvenv script, manual for now +boussole-venv-setup: + cmd.run: + - name: /usr/local/pyenv/versions/{{ python_version }}/bin/python -m venv {{ boussole_venv_dir }} + - creates: {{ boussole_venv_dir }} + - require: + - pkg: python3-venv + - pyenv: {{ python_version }} + +boussole-pip-installs: + cmd.run: + - name: {{ boussole_venv_dir }}/bin/pip install boussole + - unless: ls {{ boussole_venv_dir }}/lib/python3.6/site-packages/boussole /etc/systemd/system/boussole.service: file.managed: @@ -22,7 +38,7 @@ create-css-directory: initial-boussole-run: cmd.run: - - name: {{ bin_dir }}/boussole compile --backend=yaml --config=boussole.yaml + - name: {{ boussole_venv_dir }}/bin/boussole compile --backend=yaml --config=boussole.yaml - cwd: {{ app_dir }} - env: - LC_ALL: C.UTF-8 diff --git a/tildes/alembic/env.py b/tildes/alembic/env.py index 226a22e..050df0c 100644 --- a/tildes/alembic/env.py +++ b/tildes/alembic/env.py @@ -12,12 +12,7 @@ config = context.config fileConfig(config.config_file_name) # import all DatabaseModel subclasses here for autogenerate support -from tildes.models.comment import ( - Comment, - CommentNotification, - CommentTag, - CommentVote, -) +from tildes.models.comment import Comment, CommentNotification, CommentTag, CommentVote from tildes.models.group import Group, GroupSubscription from tildes.models.log import Log from tildes.models.message import MessageConversation, MessageReply @@ -25,6 +20,7 @@ from tildes.models.topic import Topic, TopicVisit, TopicVote from tildes.models.user import User, UserGroupSettings, UserInviteCode from tildes.models import DatabaseModel + target_metadata = DatabaseModel.metadata # other values from the config, defined by the needs of env.py, @@ -46,8 +42,7 @@ def run_migrations_offline(): """ url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, target_metadata=target_metadata, literal_binds=True) + context.configure(url=url, target_metadata=target_metadata, literal_binds=True) with context.begin_transaction(): context.run_migrations() @@ -62,18 +57,17 @@ def run_migrations_online(): """ connectable = engine_from_config( config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool) + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) with connectable.connect() as connection: - context.configure( - connection=connection, - target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() + if context.is_offline_mode(): run_migrations_offline() else: diff --git a/tildes/alembic/versions/2512581c91b3_add_setting_to_open_links_in_new_tab.py b/tildes/alembic/versions/2512581c91b3_add_setting_to_open_links_in_new_tab.py index a05b76b..d04050b 100644 --- a/tildes/alembic/versions/2512581c91b3_add_setting_to_open_links_in_new_tab.py +++ b/tildes/alembic/versions/2512581c91b3_add_setting_to_open_links_in_new_tab.py @@ -10,17 +10,33 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = '2512581c91b3' +revision = "2512581c91b3" down_revision = None branch_labels = None depends_on = None def upgrade(): - op.add_column('users', sa.Column('open_new_tab_external', sa.Boolean(), server_default='false', nullable=False)) - op.add_column('users', sa.Column('open_new_tab_internal', sa.Boolean(), server_default='false', nullable=False)) + op.add_column( + "users", + sa.Column( + "open_new_tab_external", + sa.Boolean(), + server_default="false", + nullable=False, + ), + ) + op.add_column( + "users", + sa.Column( + "open_new_tab_internal", + sa.Boolean(), + server_default="false", + nullable=False, + ), + ) def downgrade(): - op.drop_column('users', 'open_new_tab_internal') - op.drop_column('users', 'open_new_tab_external') + op.drop_column("users", "open_new_tab_internal") + op.drop_column("users", "open_new_tab_external") diff --git a/tildes/alembic/versions/347859b0355e_added_account_default_theme_setting.py b/tildes/alembic/versions/347859b0355e_added_account_default_theme_setting.py index 4195207..b188a0c 100644 --- a/tildes/alembic/versions/347859b0355e_added_account_default_theme_setting.py +++ b/tildes/alembic/versions/347859b0355e_added_account_default_theme_setting.py @@ -10,15 +10,20 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = '347859b0355e' -down_revision = 'fab922a8bb04' +revision = "347859b0355e" +down_revision = "fab922a8bb04" branch_labels = None depends_on = None def upgrade(): - op.add_column('users', sa.Column('theme_account_default', sa.Text(), server_default='', nullable=False)) + op.add_column( + "users", + sa.Column( + "theme_account_default", sa.Text(), server_default="", nullable=False + ), + ) def downgrade(): - op.drop_column('users', 'theme_account_default') + op.drop_column("users", "theme_account_default") diff --git a/tildes/alembic/versions/de83b8750123_add_setting_to_open_text_links_in_new_.py b/tildes/alembic/versions/de83b8750123_add_setting_to_open_text_links_in_new_.py index e8705db..8653976 100644 --- a/tildes/alembic/versions/de83b8750123_add_setting_to_open_text_links_in_new_.py +++ b/tildes/alembic/versions/de83b8750123_add_setting_to_open_text_links_in_new_.py @@ -10,15 +10,20 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = 'de83b8750123' -down_revision = '2512581c91b3' +revision = "de83b8750123" +down_revision = "2512581c91b3" branch_labels = None depends_on = None def upgrade(): - op.add_column('users', sa.Column('open_new_tab_text', sa.Boolean(), server_default='false', nullable=False)) + op.add_column( + "users", + sa.Column( + "open_new_tab_text", sa.Boolean(), server_default="false", nullable=False + ), + ) def downgrade(): - op.drop_column('users', 'open_new_tab_text') + op.drop_column("users", "open_new_tab_text") diff --git a/tildes/alembic/versions/f1ecbf24c212_added_user_tag_type_comment_notification.py b/tildes/alembic/versions/f1ecbf24c212_added_user_tag_type_comment_notification.py index a675a0e..e3ba0ae 100644 --- a/tildes/alembic/versions/f1ecbf24c212_added_user_tag_type_comment_notification.py +++ b/tildes/alembic/versions/f1ecbf24c212_added_user_tag_type_comment_notification.py @@ -9,8 +9,8 @@ from alembic import op # revision identifiers, used by Alembic. -revision = 'f1ecbf24c212' -down_revision = 'de83b8750123' +revision = "f1ecbf24c212" +down_revision = "de83b8750123" branch_labels = None depends_on = None @@ -20,18 +20,18 @@ def upgrade(): connection = None if not op.get_context().as_sql: connection = op.get_bind() - connection.execution_options(isolation_level='AUTOCOMMIT') + connection.execution_options(isolation_level="AUTOCOMMIT") op.execute( - "ALTER TYPE commentnotificationtype " - "ADD VALUE IF NOT EXISTS 'USER_MENTION'" + "ALTER TYPE commentnotificationtype ADD VALUE IF NOT EXISTS 'USER_MENTION'" ) # re-activate the transaction for any future migrations if connection is not None: - connection.execution_options(isolation_level='READ_COMMITTED') + connection.execution_options(isolation_level="READ_COMMITTED") - op.execute(''' + op.execute( + """ CREATE OR REPLACE FUNCTION send_rabbitmq_message_for_comment() RETURNS TRIGGER AS $$ DECLARE affected_row RECORD; @@ -50,23 +50,28 @@ def upgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - ''') - op.execute(''' + """ + ) + op.execute( + """ CREATE TRIGGER send_rabbitmq_message_for_comment_insert AFTER INSERT ON comments FOR EACH ROW EXECUTE PROCEDURE send_rabbitmq_message_for_comment('created'); - ''') - op.execute(''' + """ + ) + op.execute( + """ CREATE TRIGGER send_rabbitmq_message_for_comment_edit AFTER UPDATE ON comments FOR EACH ROW WHEN (OLD.markdown IS DISTINCT FROM NEW.markdown) EXECUTE PROCEDURE send_rabbitmq_message_for_comment('edited'); - ''') + """ + ) def downgrade(): - op.execute('DROP TRIGGER send_rabbitmq_message_for_comment_insert ON comments') - op.execute('DROP TRIGGER send_rabbitmq_message_for_comment_edit ON comments') - op.execute('DROP FUNCTION send_rabbitmq_message_for_comment') + op.execute("DROP TRIGGER send_rabbitmq_message_for_comment_insert ON comments") + op.execute("DROP TRIGGER send_rabbitmq_message_for_comment_edit ON comments") + op.execute("DROP FUNCTION send_rabbitmq_message_for_comment") diff --git a/tildes/alembic/versions/fab922a8bb04_update_comment_triggers_for_removals.py b/tildes/alembic/versions/fab922a8bb04_update_comment_triggers_for_removals.py index 2851c1d..6a4b666 100644 --- a/tildes/alembic/versions/fab922a8bb04_update_comment_triggers_for_removals.py +++ b/tildes/alembic/versions/fab922a8bb04_update_comment_triggers_for_removals.py @@ -10,8 +10,8 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = 'fab922a8bb04' -down_revision = 'f1ecbf24c212' +revision = "fab922a8bb04" +down_revision = "f1ecbf24c212" branch_labels = None depends_on = None @@ -19,17 +19,20 @@ depends_on = None def upgrade(): # comment_notifications op.execute("DROP TRIGGER delete_comment_notifications_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER delete_comment_notifications_update AFTER UPDATE ON comments FOR EACH ROW WHEN ((OLD.is_deleted = false AND NEW.is_deleted = true) OR (OLD.is_removed = false AND NEW.is_removed = true)) EXECUTE PROCEDURE delete_comment_notifications(); - """) + """ + ) # comments - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION set_comment_deleted_time() RETURNS TRIGGER AS $$ BEGIN IF (NEW.is_deleted = TRUE) THEN @@ -41,17 +44,21 @@ def upgrade(): RETURN NEW; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER delete_comment_set_deleted_time_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER delete_comment_set_deleted_time_update BEFORE UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) EXECUTE PROCEDURE set_comment_deleted_time(); - """) + """ + ) - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION set_comment_removed_time() RETURNS TRIGGER AS $$ BEGIN IF (NEW.is_removed = TRUE) THEN @@ -63,19 +70,23 @@ def upgrade(): RETURN NEW; END; $$ LANGUAGE plpgsql; - """) - op.execute(""" + """ + ) + op.execute( + """ CREATE TRIGGER remove_comment_set_removed_time_update BEFORE UPDATE ON comments FOR EACH ROW WHEN (OLD.is_removed IS DISTINCT FROM NEW.is_removed) EXECUTE PROCEDURE set_comment_removed_time(); - """) + """ + ) # topic_visits op.execute("DROP TRIGGER update_topic_visits_num_comments_update ON comments") op.execute("DROP FUNCTION decrement_all_topic_visit_num_comments()") - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION update_all_topic_visit_num_comments() RETURNS TRIGGER AS $$ DECLARE old_visible BOOLEAN := NOT (OLD.is_deleted OR OLD.is_removed); @@ -96,18 +107,22 @@ def upgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) - op.execute(""" + """ + ) + op.execute( + """ CREATE TRIGGER update_topic_visits_num_comments_update AFTER UPDATE ON comments FOR EACH ROW WHEN ((OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) OR (OLD.is_removed IS DISTINCT FROM NEW.is_removed)) EXECUTE PROCEDURE update_all_topic_visit_num_comments(); - """) + """ + ) # topics - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION update_topics_num_comments() RETURNS TRIGGER AS $$ BEGIN IF (TG_OP = 'INSERT') THEN @@ -140,18 +155,22 @@ def upgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER update_topics_num_comments_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER update_topics_num_comments_update AFTER UPDATE ON comments FOR EACH ROW WHEN ((OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) OR (OLD.is_removed IS DISTINCT FROM NEW.is_removed)) EXECUTE PROCEDURE update_topics_num_comments(); - """) + """ + ) - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION update_topics_last_activity_time() RETURNS TRIGGER AS $$ DECLARE most_recent_comment RECORD; @@ -182,31 +201,37 @@ def upgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER update_topics_last_activity_time_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER update_topics_last_activity_time_update AFTER UPDATE ON comments FOR EACH ROW WHEN ((OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) OR (OLD.is_removed IS DISTINCT FROM NEW.is_removed)) EXECUTE PROCEDURE update_topics_last_activity_time(); - """) + """ + ) def downgrade(): # comment_notifications op.execute("DROP TRIGGER delete_comment_notifications_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER delete_comment_notifications_update AFTER UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted = false AND NEW.is_deleted = true) EXECUTE PROCEDURE delete_comment_notifications(); - """) + """ + ) # comments - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION set_comment_deleted_time() RETURNS TRIGGER AS $$ BEGIN NEW.deleted_time := current_timestamp; @@ -214,15 +239,18 @@ def downgrade(): RETURN NEW; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER delete_comment_set_deleted_time_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER delete_comment_set_deleted_time_update BEFORE UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted = false AND NEW.is_deleted = true) EXECUTE PROCEDURE set_comment_deleted_time(); - """) + """ + ) op.execute("DROP TRIGGER remove_comment_set_removed_time_update ON comments") op.execute("DROP FUNCTION set_comment_removed_time()") @@ -230,7 +258,8 @@ def downgrade(): # topic_visits op.execute("DROP TRIGGER update_topic_visits_num_comments_update ON comments") op.execute("DROP FUNCTION update_all_topic_visit_num_comments()") - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION decrement_all_topic_visit_num_comments() RETURNS TRIGGER AS $$ BEGIN UPDATE topic_visits @@ -241,17 +270,21 @@ def downgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) - op.execute(""" + """ + ) + op.execute( + """ CREATE TRIGGER update_topic_visits_num_comments_update AFTER UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted = false AND NEW.is_deleted = true) EXECUTE PROCEDURE decrement_all_topic_visit_num_comments(); - """) + """ + ) # topics - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION update_topics_num_comments() RETURNS TRIGGER AS $$ BEGIN IF (TG_OP = 'INSERT' AND NEW.is_deleted = FALSE) THEN @@ -277,17 +310,21 @@ def downgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER update_topics_num_comments_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER update_topics_num_comments_update AFTER UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) EXECUTE PROCEDURE update_topics_num_comments(); - """) + """ + ) - op.execute(""" + op.execute( + """ CREATE OR REPLACE FUNCTION update_topics_last_activity_time() RETURNS TRIGGER AS $$ DECLARE most_recent_comment RECORD; @@ -317,12 +354,15 @@ def downgrade(): RETURN NULL; END; $$ LANGUAGE plpgsql; - """) + """ + ) op.execute("DROP TRIGGER update_topics_last_activity_time_update ON comments") - op.execute(""" + op.execute( + """ CREATE TRIGGER update_topics_last_activity_time_update AFTER UPDATE ON comments FOR EACH ROW WHEN (OLD.is_deleted IS DISTINCT FROM NEW.is_deleted) EXECUTE PROCEDURE update_topics_last_activity_time(); - """) + """ + ) diff --git a/tildes/consumers/comment_user_mentions_generator.py b/tildes/consumers/comment_user_mentions_generator.py index 265bde8..bb6691b 100644 --- a/tildes/consumers/comment_user_mentions_generator.py +++ b/tildes/consumers/comment_user_mentions_generator.py @@ -13,7 +13,7 @@ class CommentUserMentionGenerator(PgsqlQueueConsumer): """Process a delivered message.""" comment = ( self.db_session.query(Comment) - .filter_by(comment_id=msg.body['comment_id']) + .filter_by(comment_id=msg.body["comment_id"]) .one() ) @@ -22,15 +22,16 @@ class CommentUserMentionGenerator(PgsqlQueueConsumer): return new_mentions = CommentNotification.get_mentions_for_comment( - self.db_session, comment) + self.db_session, comment + ) - if msg.delivery_info['routing_key'] == 'comment.created': + if msg.delivery_info["routing_key"] == "comment.created": for user_mention in new_mentions: self.db_session.add(user_mention) - elif msg.delivery_info['routing_key'] == 'comment.edited': - to_delete, to_add = ( - CommentNotification.prevent_duplicate_notifications( - self.db_session, comment, new_mentions)) + elif msg.delivery_info["routing_key"] == "comment.edited": + to_delete, to_add = CommentNotification.prevent_duplicate_notifications( + self.db_session, comment, new_mentions + ) for user_mention in to_delete: self.db_session.delete(user_mention) @@ -39,8 +40,8 @@ class CommentUserMentionGenerator(PgsqlQueueConsumer): self.db_session.add(user_mention) -if __name__ == '__main__': +if __name__ == "__main__": CommentUserMentionGenerator( - queue_name='comment_user_mentions_generator.q', - routing_keys=['comment.created', 'comment.edited'], + queue_name="comment_user_mentions_generator.q", + routing_keys=["comment.created", "comment.edited"], ).consume_queue() diff --git a/tildes/consumers/topic_metadata_generator.py b/tildes/consumers/topic_metadata_generator.py index 23593b7..7075f9f 100644 --- a/tildes/consumers/topic_metadata_generator.py +++ b/tildes/consumers/topic_metadata_generator.py @@ -26,9 +26,7 @@ class TopicMetadataGenerator(PgsqlQueueConsumer): def run(self, msg: Message) -> None: """Process a delivered message.""" topic = ( - self.db_session.query(Topic) - .filter_by(topic_id=msg.body['topic_id']) - .one() + self.db_session.query(Topic).filter_by(topic_id=msg.body["topic_id"]).one() ) if topic.is_text_type: @@ -42,22 +40,19 @@ class TopicMetadataGenerator(PgsqlQueueConsumer): html_tree = HTMLParser().parseFragment(topic.rendered_html) # extract the text from all of the HTML elements - extracted_text = ''.join( - [element_text for element_text in html_tree.itertext()]) + extracted_text = "".join( + [element_text for element_text in html_tree.itertext()] + ) # sanitize unicode, remove leading/trailing whitespace, etc. extracted_text = simplify_string(extracted_text) # create a short excerpt by truncating the simplified string - excerpt = truncate_string( - extracted_text, - length=200, - truncate_at_chars=' ', - ) + excerpt = truncate_string(extracted_text, length=200, truncate_at_chars=" ") topic.content_metadata = { - 'word_count': word_count(extracted_text), - 'excerpt': excerpt, + "word_count": word_count(extracted_text), + "excerpt": excerpt, } def _generate_link_metadata(self, topic: Topic) -> None: @@ -68,13 +63,11 @@ class TopicMetadataGenerator(PgsqlQueueConsumer): parsed_domain = get_domain_from_url(topic.link) domain = self.public_suffix_list.get_public_suffix(parsed_domain) - topic.content_metadata = { - 'domain': domain, - } + topic.content_metadata = {"domain": domain} -if __name__ == '__main__': +if __name__ == "__main__": TopicMetadataGenerator( - queue_name='topic_metadata_generator.q', - routing_keys=['topic.created', 'topic.edited'], + queue_name="topic_metadata_generator.q", + routing_keys=["topic.created", "topic.edited"], ).consume_queue() diff --git a/tildes/gunicorn_config.py b/tildes/gunicorn_config.py index adf43e7..8f09a46 100644 --- a/tildes/gunicorn_config.py +++ b/tildes/gunicorn_config.py @@ -6,9 +6,8 @@ from prometheus_client import multiprocess def child_exit(server, worker): # type: ignore """Mark worker processes as dead for Prometheus when the worker exits. - Note that this uses the child_exit hook instead of worker_exit so that - it's handled by the master process (and will still be called if a worker - crashes). + Note that this uses the child_exit hook instead of worker_exit so that it's handled + by the master process (and will still be called if a worker crashes). """ # pylint: disable=unused-argument multiprocess.mark_process_dead(worker.pid) diff --git a/tildes/pylama.ini b/tildes/pylama.ini index 6ffcd2b..866ed4f 100644 --- a/tildes/pylama.ini +++ b/tildes/pylama.ini @@ -3,6 +3,9 @@ linters = mccabe,pycodestyle,pydocstyle,pyflakes,pylint skip = alembic/* # ignored checks: +# - D202 - pydocstyle check for blank lines after a function docstring, but +# Black will add one when the first code in the function is another +# function definition. # - D203 - pydocstyle has two mutually exclusive checks (D203/D211) # for whether a class docstring should have a blank line before # it or not. I don't want a blank line, so D203 is disabled. @@ -10,12 +13,18 @@ skip = alembic/* # time for whether a multi-line docstring's summary line should be # on the first or second line. I want it to be on the first line, # so D213 needs to be disabled. -ignore = D203,D213 +# - E203 - checks for whitespace around : in slices, but Black adds it +# in some cases. +ignore = D202,D203,D213,E203 + +[pylama:pycodestyle] +max_line_length = 88 [pylama:pylint] enable = all # disabled pylint checks: +# - bad-continuation (Black will handle wrapping lines properly) # - missing-docstring (already reported by pydocstyle) # - too-few-public-methods (more annoying than helpful, especially early on) # - too-many-instance-attributes (overly-picky when models need many) @@ -23,6 +32,7 @@ enable = all # - locally-enabled (or when checks are (re-)enabled) # - suppressed-message (...a different message when I disable one?) disable = + bad-continuation, missing-docstring, too-few-public-methods, too-many-instance-attributes, @@ -40,6 +50,11 @@ ignored-classes = APIv0, venusian.AttachInfo # - R0201 - method could be a function (for @pre_load-type methods) ignore = R0201 +[pylama:tildes/views/api/web/*] +# ignored checks for web API specifically: +# - C0103 - invalid function names (endpoints can have very long ones) +ignore = C0103 + [pylama:tests/*] # ignored checks for tests specifically: # - D100 - missing module-level docstrings diff --git a/tildes/requirements-to-freeze.txt b/tildes/requirements-to-freeze.txt index 5c14cb1..69c181d 100644 --- a/tildes/requirements-to-freeze.txt +++ b/tildes/requirements-to-freeze.txt @@ -3,9 +3,9 @@ alembic amqpy argon2_cffi astroid==1.5.3 # pylama has issues with pylint 1.8.1 +black bleach -boussole -click==5.1 # boussole needs < 6.0 +click cornice freezegun gunicorn @@ -29,6 +29,7 @@ pyramid-tm pyramid-webassets pytest pytest-mock +PyYAML # needs to be installed separately for webassets SQLAlchemy SQLAlchemy-Utils stripe diff --git a/tildes/requirements.txt b/tildes/requirements.txt index 9dc2c76..2047941 100644 --- a/tildes/requirements.txt +++ b/tildes/requirements.txt @@ -1,21 +1,19 @@ ago==0.0.92 alembic==1.0.0 amqpy==0.13.1 -argh==0.26.2 +appdirs==1.4.3 argon2-cffi==18.1.0 astroid==1.5.3 atomicwrites==1.1.5 attrs==18.1.0 backcall==0.1.0 -beautifulsoup4==4.6.0 +beautifulsoup4==4.6.3 +black==18.6b4 bleach==2.1.3 -boussole==1.2.3 certifi==2018.4.16 cffi==1.11.5 chardet==3.0.4 -click==5.1 -colorama==0.3.9 -colorlog==3.1.4 +click==6.7 cornice==3.4.0 decorator==4.3.0 freezegun==0.3.10 @@ -23,35 +21,32 @@ gunicorn==19.9.0 html5lib==1.0.1 hupper==1.3 idna==2.7 -ipython==6.4.0 +ipython==6.5.0 ipython-genutils==0.2.0 isort==4.3.4 jedi==0.12.1 Jinja2==2.10 lazy-object-proxy==1.3.1 -libsass==0.14.5 Mako==1.0.7 MarkupSafe==1.0 -marshmallow==2.15.3 +marshmallow==2.15.4 mccabe==0.6.1 -more-itertools==4.2.0 +more-itertools==4.3.0 mypy==0.620 mypy-extensions==0.3.0 parso==0.3.1 PasteDeploy==1.5.2 -pathtools==0.1.2 pexpect==4.6.0 pickleshare==0.7.4 plaster==1.0 plaster-pastedeploy==0.6 -pluggy==0.6.0 -prometheus-client==0.3.0 +pluggy==0.7.1 +prometheus-client==0.3.1 prompt-toolkit==1.0.15 psycopg2==2.7.5 ptyprocess==0.6.0 publicsuffix2==2.20160818 py==1.5.4 -pyaml==17.12.1 pycodestyle==2.4.0 pycparser==2.18 pydocstyle==2.1.1 @@ -68,7 +63,7 @@ pyramid-mako==1.0.2 pyramid-session-redis==1.4.1 pyramid-tm==2.2 pyramid-webassets==0.9 -pytest==3.6.3 +pytest==3.7.1 pytest-mock==1.10.0 python-dateutil==2.7.3 python-editor==1.0.3 @@ -82,9 +77,10 @@ six==1.11.0 snowballstemmer==1.2.1 SQLAlchemy==1.2.10 SQLAlchemy-Utils==0.33.3 -stripe==2.0.1 +stripe==2.4.0 testing.common.database==2.0.3 testing.redis==1.1.1 +toml==0.9.4 traitlets==4.3.2 transaction==2.2.1 translationstring==1.3 @@ -92,7 +88,6 @@ typed-ast==1.1.0 urllib3==1.23 venusian==1.1.0 waitress==1.1.0 -watchdog==0.8.3 wcwidth==0.1.7 webargs==4.0.0 webassets==0.12.1 diff --git a/tildes/scripts/breached_passwords.py b/tildes/scripts/breached_passwords.py index f588809..aaa025d 100644 --- a/tildes/scripts/breached_passwords.py +++ b/tildes/scripts/breached_passwords.py @@ -1,17 +1,15 @@ """Command-line tools for managing a breached-passwords bloom filter. -This tool will help with creating and updating a bloom filter in Redis (using -ReBloom: https://github.com/RedisLabsModules/rebloom) to hold hashes for -passwords that have been revealed through data breaches (to prevent users from -using these passwords here). The dumps are likely primarily sourced from Troy -Hunt's "Pwned Passwords" files: +This tool will help with creating and updating a bloom filter in Redis (using ReBloom: +https://github.com/RedisLabsModules/rebloom) to hold hashes for passwords that have been +revealed through data breaches (to prevent users from using these passwords here). The +dumps are likely primarily sourced from Troy Hunt's "Pwned Passwords" files: https://haveibeenpwned.com/Passwords -Specifically, the commands in this tool allow building the bloom filter -somewhere else, then the RDB file can be transferred to the production server. -Note that it is expected that a separate redis server instance is running -solely for holding this bloom filter. Replacing the RDB file will result in all -other keys being lost. +Specifically, the commands in this tool allow building the bloom filter somewhere else, +then the RDB file can be transferred to the production server. Note that it is expected +that a separate redis server instance is running solely for holding this bloom filter. +Replacing the RDB file will result in all other keys being lost. Expected usage of this tool should look something like: @@ -20,8 +18,8 @@ On the machine building the bloom filter: python breached_passwords.py addhashes pwned-passwords-1.0.txt python breached_passwords.py addhashes pwned-passwords-update-1.txt -Then the RDB file can simply be transferred to the production server, -overwriting any previous RDB file. +Then the RDB file can simply be transferred to the production server, overwriting any +previous RDB file. """ @@ -46,11 +44,11 @@ def generate_redis_protocol(*elements: Any) -> str: Based on the example Ruby code from https://redis.io/topics/mass-insert#generating-redis-protocol """ - command = f'*{len(elements)}\r\n' + command = f"*{len(elements)}\r\n" for element in elements: element = str(element) - command += f'${len(element)}\r\n{element}\r\n' + command += f"${len(element)}\r\n{element}\r\n" return command @@ -65,100 +63,92 @@ def validate_init_error_rate(ctx: Any, param: Any, value: Any) -> float: """Validate the --error-rate arg for the init command.""" # pylint: disable=unused-argument if not 0 < value < 1: - raise click.BadParameter('error rate must be a float between 0 and 1') + raise click.BadParameter("error rate must be a float between 0 and 1") return value -@cli.command(help='Initialize a new empty bloom filter') +@cli.command(help="Initialize a new empty bloom filter") @click.option( - '--estimate', + "--estimate", required=True, type=int, - help='Expected number of passwords that will be added', + help="Expected number of passwords that will be added", ) @click.option( - '--error-rate', + "--error-rate", default=0.01, show_default=True, - help='Bloom filter desired false positive ratio', + help="Bloom filter desired false positive ratio", callback=validate_init_error_rate, ) @click.confirmation_option( - prompt='Are you sure you want to clear any existing bloom filter?', + prompt="Are you sure you want to clear any existing bloom filter?" ) def init(estimate: int, error_rate: float) -> None: """Initialize a new bloom filter (destroying any existing one). - It generally shouldn't be necessary to re-init a new bloom filter very - often with this command, only if the previous one was created with too low - of an estimate for number of passwords, or to change to a different false - positive rate. For choosing an estimate value, according to the ReBloom - documentation: "Performance will begin to degrade after adding more items - than this number. The actual degradation will depend on how far the limit - has been exceeded. Performance will degrade linearly as the number of - entries grow exponentially." + It generally shouldn't be necessary to re-init a new bloom filter very often with + this command, only if the previous one was created with too low of an estimate for + number of passwords, or to change to a different false positive rate. For choosing + an estimate value, according to the ReBloom documentation: "Performance will begin + to degrade after adding more items than this number. The actual degradation will + depend on how far the limit has been exceeded. Performance will degrade linearly as + the number of entries grow exponentially." """ REDIS.delete(BREACHED_PASSWORDS_BF_KEY) # BF.RESERVE {key} {error_rate} {size} - REDIS.execute_command( - 'BF.RESERVE', - BREACHED_PASSWORDS_BF_KEY, - error_rate, - estimate, - ) + REDIS.execute_command("BF.RESERVE", BREACHED_PASSWORDS_BF_KEY, error_rate, estimate) click.echo( - 'Initialized bloom filter with expected size of {:,} and false ' - 'positive rate of {}%' - .format(estimate, error_rate * 100) + "Initialized bloom filter with expected size of {:,} and false " + "positive rate of {}%".format(estimate, error_rate * 100) ) -@cli.command(help='Add hashes from a file to the bloom filter') -@click.argument('filename', type=click.Path(exists=True, dir_okay=False)) +@cli.command(help="Add hashes from a file to the bloom filter") +@click.argument("filename", type=click.Path(exists=True, dir_okay=False)) def addhashes(filename: str) -> None: """Add all hashes from a file to the bloom filter. - This uses the method of generating commands in Redis protocol and feeding - them into an instance of `redis-cli --pipe`, as recommended in + This uses the method of generating commands in Redis protocol and feeding them into + an instance of `redis-cli --pipe`, as recommended in https://redis.io/topics/mass-insert """ # make sure the key exists and is a bloom filter try: - REDIS.execute_command('BF.DEBUG', BREACHED_PASSWORDS_BF_KEY) + REDIS.execute_command("BF.DEBUG", BREACHED_PASSWORDS_BF_KEY) except ResponseError: - click.echo('Bloom filter is not set up properly - run init first.') + click.echo("Bloom filter is not set up properly - run init first.") raise click.Abort # call wc to count the number of lines in the file for the progress bar - click.echo('Determining hash count...') - result = subprocess.run(['wc', '-l', filename], stdout=subprocess.PIPE) - line_count = int(result.stdout.split(b' ')[0]) + click.echo("Determining hash count...") + result = subprocess.run(["wc", "-l", filename], stdout=subprocess.PIPE) + line_count = int(result.stdout.split(b" ")[0]) progress_bar: Any = click.progressbar(length=line_count) update_interval = 100_000 - click.echo('Adding {:,} hashes to bloom filter...'.format(line_count)) + click.echo("Adding {:,} hashes to bloom filter...".format(line_count)) redis_pipe = subprocess.Popen( - ['redis-cli', '-s', BREACHED_PASSWORDS_REDIS_SOCKET, '--pipe'], + ["redis-cli", "-s", BREACHED_PASSWORDS_REDIS_SOCKET, "--pipe"], stdin=subprocess.PIPE, stdout=subprocess.DEVNULL, - encoding='utf-8', + encoding="utf-8", ) for count, line in enumerate(open(filename), start=1): hashval = line.strip().lower() - # the Pwned Passwords hash lists now have a frequency count for each - # hash, which is separated from the hash with a colon, so we need to - # handle that if it's present - hashval = hashval.split(':')[0] + # the Pwned Passwords hash lists now have a frequency count for each hash, which + # is separated from the hash with a colon, so we need to handle that if it's + # present + hashval = hashval.split(":")[0] - command = generate_redis_protocol( - 'BF.ADD', BREACHED_PASSWORDS_BF_KEY, hashval) + command = generate_redis_protocol("BF.ADD", BREACHED_PASSWORDS_BF_KEY, hashval) redis_pipe.stdin.write(command) if count % update_interval == 0: @@ -173,5 +163,5 @@ def addhashes(filename: str) -> None: progress_bar.render_finish() -if __name__ == '__main__': +if __name__ == "__main__": cli() diff --git a/tildes/scripts/clean_private_data.py b/tildes/scripts/clean_private_data.py index 5f8a3f2..cb0d84c 100644 --- a/tildes/scripts/clean_private_data.py +++ b/tildes/scripts/clean_private_data.py @@ -24,8 +24,8 @@ RETENTION_PERIOD = timedelta(days=30) def clean_all_data(config_path: str) -> None: """Clean all private/deleted data. - This should generally be the only function called in most cases, and will - initiate the full cleanup process. + This should generally be the only function called in most cases, and will initiate + the full cleanup process. """ db_session = get_session_from_config(config_path) @@ -33,22 +33,17 @@ def clean_all_data(config_path: str) -> None: cleaner.clean_all() -class DataCleaner(): +class DataCleaner: """Container class for all methods related to cleaning up old data.""" - def __init__( - self, - db_session: Session, - retention_period: timedelta, - ) -> None: + def __init__(self, db_session: Session, retention_period: timedelta) -> None: """Create a new DataCleaner.""" self.db_session = db_session self.retention_cutoff = datetime.now() - retention_period def clean_all(self) -> None: """Call all the cleanup functions.""" - logging.info( - f'Cleaning up all data (retention cutoff {self.retention_cutoff})') + logging.info(f"Cleaning up all data (retention cutoff {self.retention_cutoff})") self.delete_old_log_entries() self.delete_old_topic_visits() @@ -59,8 +54,8 @@ class DataCleaner(): def delete_old_log_entries(self) -> None: """Delete all log entries older than the retention cutoff. - Note that this will also delete all entries from the child tables that - inherit from Log (LogTopics, etc.). + Note that this will also delete all entries from the child tables that inherit + from Log (LogTopics, etc.). """ deleted = ( self.db_session.query(Log) @@ -68,7 +63,7 @@ class DataCleaner(): .delete(synchronize_session=False) ) self.db_session.commit() - logging.info(f'Deleted {deleted} old log entries.') + logging.info(f"Deleted {deleted} old log entries.") def delete_old_topic_visits(self) -> None: """Delete all topic visits older than the retention cutoff.""" @@ -78,13 +73,13 @@ class DataCleaner(): .delete(synchronize_session=False) ) self.db_session.commit() - logging.info(f'Deleted {deleted} old topic visits.') + logging.info(f"Deleted {deleted} old topic visits.") def clean_old_deleted_comments(self) -> None: """Clean the data of old deleted comments. - Change the comment's author to the "unknown user" (id 0), and delete - its contents. + Change the comment's author to the "unknown user" (id 0), and delete its + contents. """ updated = ( self.db_session.query(Comment) @@ -92,20 +87,19 @@ class DataCleaner(): Comment.deleted_time <= self.retention_cutoff, # type: ignore Comment.user_id != 0, ) - .update({ - 'user_id': 0, - 'markdown': '', - 'rendered_html': '', - }, synchronize_session=False) + .update( + {"user_id": 0, "markdown": "", "rendered_html": ""}, + synchronize_session=False, + ) ) self.db_session.commit() - logging.info(f'Cleaned {updated} old deleted comments.') + logging.info(f"Cleaned {updated} old deleted comments.") def clean_old_deleted_topics(self) -> None: """Clean the data of old deleted topics. - Change the topic's author to the "unknown user" (id 0), and delete its - title, contents, tags, and metadata. + Change the topic's author to the "unknown user" (id 0), and delete its title, + contents, tags, and metadata. """ updated = ( self.db_session.query(Topic) @@ -113,16 +107,19 @@ class DataCleaner(): Topic.deleted_time <= self.retention_cutoff, # type: ignore Topic.user_id != 0, ) - .update({ - 'user_id': 0, - 'title': '', - 'topic_type': 'TEXT', - 'markdown': None, - 'rendered_html': None, - 'link': None, - 'content_metadata': None, - '_tags': [], - }, synchronize_session=False) + .update( + { + "user_id": 0, + "title": "", + "topic_type": "TEXT", + "markdown": None, + "rendered_html": None, + "link": None, + "content_metadata": None, + "_tags": [], + }, + synchronize_session=False, + ) ) self.db_session.commit() - logging.info(f'Cleaned {updated} old deleted topics.') + logging.info(f"Cleaned {updated} old deleted topics.") diff --git a/tildes/scripts/initialize_db.py b/tildes/scripts/initialize_db.py index 5b5cb58..6f4f472 100644 --- a/tildes/scripts/initialize_db.py +++ b/tildes/scripts/initialize_db.py @@ -15,27 +15,24 @@ from tildes.models.log import Log from tildes.models.user import User -def initialize_db( - config_path: str, - alembic_config_path: Optional[str] = None, -) -> None: +def initialize_db(config_path: str, alembic_config_path: Optional[str] = None) -> None: """Load the app config and create the database tables.""" db_session = get_session_from_config(config_path) engine = db_session.bind create_tables(engine) - run_sql_scripts_in_dir('sql/init/', engine) + run_sql_scripts_in_dir("sql/init/", engine) - # if an Alembic config file wasn't specified, assume it's alembic.ini in - # the same directory + # if an Alembic config file wasn't specified, assume it's alembic.ini in the same + # directory if not alembic_config_path: path = os.path.split(config_path)[0] - alembic_config_path = os.path.join(path, 'alembic.ini') + alembic_config_path = os.path.join(path, "alembic.ini") # mark current Alembic revision in db so migrations start from this point alembic_cfg = Config(alembic_config_path) - command.stamp(alembic_cfg, 'head') + command.stamp(alembic_cfg, "head") def create_tables(connectable: Connectable) -> None: @@ -44,7 +41,8 @@ def create_tables(connectable: Connectable) -> None: excluded_tables = Log.INHERITED_TABLES tables = [ - table for table in DatabaseModel.metadata.tables.values() + table + for table in DatabaseModel.metadata.tables.values() if table.name not in excluded_tables ] DatabaseModel.metadata.create_all(connectable, tables=tables) @@ -53,29 +51,31 @@ def create_tables(connectable: Connectable) -> None: def run_sql_scripts_in_dir(path: str, engine: Engine) -> None: """Run all sql scripts in a directory.""" for root, _, files in os.walk(path): - sql_files = [ - filename for filename in files - if filename.endswith('.sql') - ] + sql_files = [filename for filename in files if filename.endswith(".sql")] for sql_file in sql_files: - subprocess.call([ - 'psql', - '-U', engine.url.username, - '-f', os.path.join(root, sql_file), - engine.url.database, - ]) + subprocess.call( + [ + "psql", + "-U", + engine.url.username, + "-f", + os.path.join(root, sql_file), + engine.url.database, + ] + ) def insert_dev_data(config_path: str) -> None: """Load the app config and insert some "starter" data for a dev version.""" session = get_session_from_config(config_path) - session.add_all([ - User('TestUser', 'password'), - Group( - 'testing', - 'An automatically created group to use for testing purposes', - ), - ]) + session.add_all( + [ + User("TestUser", "password"), + Group( + "testing", "An automatically created group to use for testing purposes" + ), + ] + ) session.commit() diff --git a/tildes/setup.py b/tildes/setup.py index 422012a..c0f0748 100644 --- a/tildes/setup.py +++ b/tildes/setup.py @@ -4,11 +4,11 @@ from setuptools import find_packages, setup setup( - name='tildes', - version='0.1', + name="tildes", + version="0.1", packages=find_packages(), entry_points=""" [paste.app_factory] main = tildes:main - """ + """, ) diff --git a/tildes/tests/conftest.py b/tildes/tests/conftest.py index 9e65784..ea6da3d 100644 --- a/tildes/tests/conftest.py +++ b/tildes/tests/conftest.py @@ -18,7 +18,7 @@ from tildes.models.user import User # include the fixtures defined in fixtures.py -pytest_plugins = ['tests.fixtures'] +pytest_plugins = ["tests.fixtures"] class NestedSessionWrapper(Session): @@ -40,25 +40,25 @@ class NestedSessionWrapper(Session): super().rollback() -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def pyramid_config(): """Set up the Pyramid environment.""" - settings = get_appsettings('development.ini') + settings = get_appsettings("development.ini") config = testing.setUp(settings=settings) - config.include('tildes.auth') + config.include("tildes.auth") yield config testing.tearDown() -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def overall_db_session(pyramid_config): """Handle setup and teardown of test database for testing session.""" # read the database url from the pyramid INI file, and replace the db name - sqlalchemy_url = pyramid_config.registry.settings['sqlalchemy.url'] + sqlalchemy_url = pyramid_config.registry.settings["sqlalchemy.url"] parsed_url = make_url(sqlalchemy_url) - parsed_url.database = 'tildes_test' + parsed_url.database = "tildes_test" engine = create_engine(parsed_url) session_factory = sessionmaker(bind=engine) @@ -66,21 +66,18 @@ def overall_db_session(pyramid_config): create_tables(session.connection()) - # SQL init scripts need to be executed "manually" instead of using psql - # like the normal database init process does, since the tables only exist - # inside this transaction - init_scripts_dir = 'sql/init/' + # SQL init scripts need to be executed "manually" instead of using psql like the + # normal database init process does, since the tables only exist inside this + # transaction + init_scripts_dir = "sql/init/" for root, _, files in os.walk(init_scripts_dir): - sql_files = [ - filename for filename in files - if filename.endswith('.sql') - ] + sql_files = [filename for filename in files if filename.endswith(".sql")] for sql_file in sql_files: with open(os.path.join(root, sql_file)) as current_file: session.execute(current_file.read()) - # convert the Session to the wrapper class to enforce staying inside - # nested transactions in the test functions + # convert the Session to the wrapper class to enforce staying inside nested + # transactions in the test functions session.__class__ = NestedSessionWrapper yield session @@ -90,7 +87,7 @@ def overall_db_session(pyramid_config): session.rollback() -@fixture(scope='session') +@fixture(scope="session") def sdb(overall_db_session): """Testing-session-level db session with a nested transaction.""" overall_db_session.begin_nested() @@ -100,7 +97,7 @@ def sdb(overall_db_session): overall_db_session.rollback_all_nested() -@fixture(scope='function') +@fixture(scope="function") def db(overall_db_session): """Function-level db session with a nested transaction.""" overall_db_session.begin_nested() @@ -110,25 +107,23 @@ def db(overall_db_session): overall_db_session.rollback_all_nested() -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def overall_redis_session(): """Create a session-level connection to a temporary redis server.""" - # list of redis modules that need to be loaded (would be much nicer to do - # this automatically somehow, maybe reading from the real redis.conf?) - redis_modules = [ - '/opt/redis-cell/libredis_cell.so', - ] + # list of redis modules that need to be loaded (would be much nicer to do this + # automatically somehow, maybe reading from the real redis.conf?) + redis_modules = ["/opt/redis-cell/libredis_cell.so"] with RedisServer() as temp_redis_server: redis = StrictRedis(**temp_redis_server.dsn()) for module in redis_modules: - redis.execute_command('MODULE LOAD', module) + redis.execute_command("MODULE LOAD", module) yield redis -@fixture(scope='function') +@fixture(scope="function") def redis(overall_redis_session): """Create a function-level redis connection that wipes the db after use.""" yield overall_redis_session @@ -136,87 +131,87 @@ def redis(overall_redis_session): overall_redis_session.flushdb() -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def session_user(sdb): """Create a user named 'SessionUser' in the db for test session.""" - # note that some tests may depend on this username/password having these - # specific values, so make sure to search for and update those tests if you - # change the username or password for any reason - user = User('SessionUser', 'session user password') + # note that some tests may depend on this username/password having these specific + # values, so make sure to search for and update those tests if you change the + # username or password for any reason + user = User("SessionUser", "session user password") sdb.add(user) sdb.commit() yield user -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def session_user2(sdb): """Create a second user named 'OtherUser' in the db for test session. - This is useful for cases where two different users are needed, such as - when testing private messages. + This is useful for cases where two different users are needed, such as when testing + private messages. """ - user = User('OtherUser', 'other user password') + user = User("OtherUser", "other user password") sdb.add(user) sdb.commit() yield user -@fixture(scope='session', autouse=True) +@fixture(scope="session", autouse=True) def session_group(sdb): """Create a group named 'sessiongroup' in the db for test session.""" - group = Group('sessiongroup') + group = Group("sessiongroup") sdb.add(group) sdb.commit() yield group -@fixture(scope='session') +@fixture(scope="session") def base_app(overall_redis_session, sdb): """Configure a base WSGI app that webtest can create TestApps based on.""" - testing_app = get_app('development.ini') + testing_app = get_app("development.ini") - # replace the redis connection used by the redis-sessions library with a - # connection to the temporary server for this test session + # replace the redis connection used by the redis-sessions library with a connection + # to the temporary server for this test session testing_app.app.registry._redis_sessions = overall_redis_session def redis_factory(request): # pylint: disable=unused-argument return overall_redis_session - testing_app.app.registry['redis_connection_factory'] = redis_factory - # replace the session factory function with one that will return the - # testing db session (inside a nested transaction) + testing_app.app.registry["redis_connection_factory"] = redis_factory + + # replace the session factory function with one that will return the testing db + # session (inside a nested transaction) def session_factory(): return sdb - testing_app.app.registry['db_session_factory'] = session_factory + + testing_app.app.registry["db_session_factory"] = session_factory yield testing_app -@fixture(scope='session') +@fixture(scope="session") def webtest(base_app): """Create a webtest TestApp and log in as the SessionUser account in it.""" - # create the TestApp - note that specifying wsgi.url_scheme is necessary - # so that the secure cookies from the session library will work + # create the TestApp - note that specifying wsgi.url_scheme is necessary so that the + # secure cookies from the session library will work app = TestApp( - base_app, - extra_environ={'wsgi.url_scheme': 'https'}, - cookiejar=CookieJar(), + base_app, extra_environ={"wsgi.url_scheme": "https"}, cookiejar=CookieJar() ) # fetch the login page, fill in the form, and submit it (sets the cookie) - login_page = app.get('/login') - login_page.form['username'] = 'SessionUser' - login_page.form['password'] = 'session user password' + login_page = app.get("/login") + login_page.form["username"] = "SessionUser" + login_page.form["password"] = "session user password" login_page.form.submit() yield app -@fixture(scope='session') +@fixture(scope="session") def webtest_loggedout(base_app): """Create a logged-out webtest TestApp (no cookies retained).""" yield TestApp(base_app) diff --git a/tildes/tests/fixtures.py b/tildes/tests/fixtures.py index 26cb8f8..abda44c 100644 --- a/tildes/tests/fixtures.py +++ b/tildes/tests/fixtures.py @@ -8,7 +8,8 @@ from tildes.models.topic import Topic def text_topic(db, session_group, session_user): """Create a text topic, delete it as teardown (including comments).""" new_topic = Topic.create_text_topic( - session_group, session_user, 'A Text Topic', 'the text') + session_group, session_user, "A Text Topic", "the text" + ) db.add(new_topic) db.commit() @@ -23,7 +24,8 @@ def text_topic(db, session_group, session_user): def link_topic(db, session_group, session_user): """Create a link topic, delete it as teardown (including comments).""" new_topic = Topic.create_link_topic( - session_group, session_user, 'A Link Topic', 'http://example.com') + session_group, session_user, "A Link Topic", "http://example.com" + ) db.add(new_topic) db.commit() @@ -43,7 +45,7 @@ def topic(text_topic): @fixture def comment(db, session_user, topic): """Create a comment in the database, delete it as teardown.""" - new_comment = Comment(topic, session_user, 'A comment') + new_comment = Comment(topic, session_user, "A comment") db.add(new_comment) db.commit() diff --git a/tildes/tests/test_comment.py b/tildes/tests/test_comment.py index e1128f6..633bdb1 100644 --- a/tildes/tests/test_comment.py +++ b/tildes/tests/test_comment.py @@ -1,81 +1,73 @@ from datetime import timedelta from freezegun import freeze_time -from pyramid.security import ( - Authenticated, - Everyone, - principals_allowed_by_permission, -) +from pyramid.security import Authenticated, Everyone, principals_allowed_by_permission from tildes.enums import CommentSortOption from tildes.lib.datetime import utc_now -from tildes.models.comment import ( - Comment, - CommentTree, - EDIT_GRACE_PERIOD, -) +from tildes.models.comment import Comment, CommentTree, EDIT_GRACE_PERIOD from tildes.schemas.comment import CommentSchema from tildes.schemas.fields import Markdown def test_comment_creation_validates_schema(mocker, session_user, topic): """Ensure that comment creation goes through schema validation.""" - mocker.spy(CommentSchema, 'load') + mocker.spy(CommentSchema, "load") - Comment(topic, session_user, 'A test comment') + Comment(topic, session_user, "A test comment") call_args = CommentSchema.load.call_args[0] - assert {'markdown': 'A test comment'} in call_args + assert {"markdown": "A test comment"} in call_args def test_comment_creation_uses_markdown_field(mocker, session_user, topic): """Ensure the Markdown field class is validating new comments.""" - mocker.spy(Markdown, '_validate') + mocker.spy(Markdown, "_validate") - Comment(topic, session_user, 'A test comment') + Comment(topic, session_user, "A test comment") assert Markdown._validate.called def test_comment_edit_uses_markdown_field(mocker, comment): """Ensure editing a comment is validated by the Markdown field class.""" - mocker.spy(Markdown, '_validate') + mocker.spy(Markdown, "_validate") - comment.markdown = 'Some new text after edit' + comment.markdown = "Some new text after edit" assert Markdown._validate.called def test_edit_markdown_updates_html(comment): """Ensure editing a comment works and the markdown and HTML update.""" - comment.markdown = 'Updated comment' - assert 'Updated' in comment.markdown - assert 'Updated' in comment.rendered_html + comment.markdown = "Updated comment" + assert "Updated" in comment.markdown + assert "Updated" in comment.rendered_html def test_comment_viewing_permission(comment): """Ensure that anyone can view a comment by default.""" - assert Everyone in principals_allowed_by_permission(comment, 'view') + assert Everyone in principals_allowed_by_permission(comment, "view") def test_comment_editing_permission(comment): """Ensure that only the comment's author can edit it.""" - principals = principals_allowed_by_permission(comment, 'edit') + principals = principals_allowed_by_permission(comment, "edit") assert principals == {comment.user_id} def test_comment_deleting_permission(comment): """Ensure that only the comment's author can delete it.""" - principals = principals_allowed_by_permission(comment, 'delete') + principals = principals_allowed_by_permission(comment, "delete") assert principals == {comment.user_id} def test_comment_replying_permission(comment): """Ensure that any authenticated user can reply to a comment.""" - assert Authenticated in principals_allowed_by_permission(comment, 'reply') + assert Authenticated in principals_allowed_by_permission(comment, "reply") def test_comment_reply_locked_thread_permission(comment): """Ensure that only admins can reply in locked threads.""" comment.topic.is_locked = True - assert principals_allowed_by_permission(comment, 'reply') == {'admin'} + assert principals_allowed_by_permission(comment, "reply") == {"admin"} def test_deleted_comment_permissions_removed(comment): @@ -90,8 +82,8 @@ def test_deleted_comment_permissions_removed(comment): def test_removed_comment_view_permission(comment): """Ensure a removed comment can only be viewed by its author and admins.""" comment.is_removed = True - principals = principals_allowed_by_permission(comment, 'view') - assert principals == {'admin', comment.user_id} + principals = principals_allowed_by_permission(comment, "view") + assert principals == {"admin", comment.user_id} def test_edit_grace_period(comment): @@ -100,7 +92,7 @@ def test_edit_grace_period(comment): edit_time = comment.created_time + EDIT_GRACE_PERIOD - one_sec with freeze_time(edit_time): - comment.markdown = 'some new markdown' + comment.markdown = "some new markdown" assert not comment.last_edited_time @@ -111,7 +103,7 @@ def test_edit_after_grace_period(comment): edit_time = comment.created_time + EDIT_GRACE_PERIOD + one_sec with freeze_time(edit_time): - comment.markdown = 'some new markdown' + comment.markdown = "some new markdown" assert comment.last_edited_time == utc_now() @@ -123,7 +115,7 @@ def test_multiple_edits_update_time(comment): for minutes in range(0, 4): edit_time = initial_time + timedelta(minutes=minutes) with freeze_time(edit_time): - comment.markdown = f'edit #{minutes}' + comment.markdown = f"edit #{minutes}" assert comment.last_edited_time == utc_now() @@ -134,8 +126,8 @@ def test_comment_tree(db, topic, session_user): sort = CommentSortOption.POSTED # add two root comments - root = Comment(topic, session_user, 'root') - root2 = Comment(topic, session_user, 'root2') + root = Comment(topic, session_user, "root") + root2 = Comment(topic, session_user, "root2") all_comments.extend([root, root2]) db.add_all(all_comments) db.commit() @@ -151,8 +143,8 @@ def test_comment_tree(db, topic, session_user): assert tree == [root] # add two replies to the remaining root comment - child = Comment(topic, session_user, '1', parent_comment=root) - child2 = Comment(topic, session_user, '2', parent_comment=root) + child = Comment(topic, session_user, "1", parent_comment=root) + child2 = Comment(topic, session_user, "2", parent_comment=root) all_comments.extend([child, child2]) db.add_all(all_comments) db.commit() @@ -165,8 +157,8 @@ def test_comment_tree(db, topic, session_user): assert child2.replies == [] # add two more replies to the second depth-1 comment - subchild = Comment(topic, session_user, '2a', parent_comment=child2) - subchild2 = Comment(topic, session_user, '2b', parent_comment=child2) + subchild = Comment(topic, session_user, "2a", parent_comment=child2) + subchild2 = Comment(topic, session_user, "2b", parent_comment=child2) all_comments.extend([subchild, subchild2]) db.add_all(all_comments) db.commit() diff --git a/tildes/tests/test_comment_user_mentions.py b/tildes/tests/test_comment_user_mentions.py index 51ec837..5bd2b2e 100644 --- a/tildes/tests/test_comment_user_mentions.py +++ b/tildes/tests/test_comment_user_mentions.py @@ -3,10 +3,7 @@ from pytest import fixture from sqlalchemy import and_ from tildes.enums import CommentNotificationType -from tildes.models.comment import ( - Comment, - CommentNotification, -) +from tildes.models.comment import Comment, CommentNotification from tildes.models.topic import Topic from tildes.models.user import User @@ -15,8 +12,8 @@ from tildes.models.user import User def user_list(db): """Create several users.""" users = [] - for name in ['foo', 'bar', 'baz']: - user = User(name, 'password') + for name in ["foo", "bar", "baz"]: + user = User(name, "password") users.append(user) db.add(user) db.commit() @@ -30,44 +27,40 @@ def user_list(db): def test_get_mentions_for_comment(db, user_list, comment): """Test that notifications are generated and returned.""" - comment.markdown = '@foo @bar. @baz!' - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + comment.markdown = "@foo @bar. @baz!" + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert len(mentions) == 3 for index, user in enumerate(user_list): assert mentions[index].user == user -def test_mention_filtering_parent_comment( - mocker, db, topic, user_list): +def test_mention_filtering_parent_comment(mocker, db, topic, user_list): """Test notification filtering for parent comments.""" - parent_comment = Comment(topic, user_list[0], 'Comment content.') + parent_comment = Comment(topic, user_list[0], "Comment content.") parent_comment.user_id = user_list[0].user_id comment = mocker.Mock( user_id=user_list[1].user_id, - markdown=f'@{user_list[0].username}', + markdown=f"@{user_list[0].username}", parent_comment=parent_comment, ) - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert not mentions def test_mention_filtering_self_mention(db, user_list, topic): """Test notification filtering for self-mentions.""" - comment = Comment(topic, user_list[0], f'@{user_list[0]}') - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + comment = Comment(topic, user_list[0], f"@{user_list[0]}") + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert not mentions def test_mention_filtering_top_level(db, user_list, session_group): """Test notification filtering for top-level comments.""" topic = Topic.create_text_topic( - session_group, user_list[0], 'Some title', 'some text') - comment = Comment(topic, user_list[1], f'@{user_list[0].username}') - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + session_group, user_list[0], "Some title", "some text" + ) + comment = Comment(topic, user_list[1], f"@{user_list[0].username}") + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert not mentions @@ -75,43 +68,42 @@ def test_prevent_duplicate_notifications(db, user_list, topic): """Test that notifications are cleaned up for edits. Flow: - 1. A comment is created by user A that mentions user B. Notifications - are generated, and yield A mentioning B. + 1. A comment is created by user A that mentions user B. Notifications are + generated, and yield A mentioning B. 2. The comment is edited to mention C and not B. 3. The comment is edited to mention B and C. 4. The comment is deleted. """ # 1 - comment = Comment(topic, user_list[0], f'@{user_list[1].username}') + comment = Comment(topic, user_list[0], f"@{user_list[1].username}") db.add(comment) db.commit() - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert len(mentions) == 1 assert mentions[0].user == user_list[1] db.add_all(mentions) db.commit() # 2 - comment.markdown = f'@{user_list[2].username}' + comment.markdown = f"@{user_list[2].username}" db.commit() - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert len(mentions) == 1 to_delete, to_add = CommentNotification.prevent_duplicate_notifications( - db, comment, mentions) + db, comment, mentions + ) assert len(to_delete) == 1 assert mentions == to_add assert to_delete[0].user.username == user_list[1].username # 3 - comment.markdown = f'@{user_list[1].username} @{user_list[2].username}' + comment.markdown = f"@{user_list[1].username} @{user_list[2].username}" db.commit() - mentions = CommentNotification.get_mentions_for_comment( - db, comment) + mentions = CommentNotification.get_mentions_for_comment(db, comment) assert len(mentions) == 2 to_delete, to_add = CommentNotification.prevent_duplicate_notifications( - db, comment, mentions) + db, comment, mentions + ) assert not to_delete assert len(to_add) == 1 @@ -120,9 +112,13 @@ def test_prevent_duplicate_notifications(db, user_list, topic): db.commit() notifications = ( db.query(CommentNotification.user_id) - .filter(and_( - CommentNotification.comment_id == comment.comment_id, - CommentNotification.notification_type == - CommentNotificationType.USER_MENTION, - )).all()) + .filter( + and_( + CommentNotification.comment_id == comment.comment_id, + CommentNotification.notification_type + == CommentNotificationType.USER_MENTION, + ) + ) + .all() + ) assert not notifications diff --git a/tildes/tests/test_datetime.py b/tildes/tests/test_datetime.py index 1a359d1..04355e4 100644 --- a/tildes/tests/test_datetime.py +++ b/tildes/tests/test_datetime.py @@ -20,40 +20,40 @@ def test_utc_now_accurate(): def test_descriptive_timedelta_basic(): """Ensure a simple descriptive timedelta works correctly.""" test_time = utc_now() - timedelta(hours=3) - assert descriptive_timedelta(test_time) == '3 hours ago' + assert descriptive_timedelta(test_time) == "3 hours ago" def test_more_precise_longer_descriptive_timedelta(): """Ensure a longer time period gets the extra precision level.""" test_time = utc_now() - timedelta(days=2, hours=5) - assert descriptive_timedelta(test_time) == '2 days, 5 hours ago' + assert descriptive_timedelta(test_time) == "2 days, 5 hours ago" def test_no_small_precision_descriptive_timedelta(): """Ensure the extra precision doesn't apply to small units.""" test_time = utc_now() - timedelta(days=6, minutes=10) - assert descriptive_timedelta(test_time) == '6 days ago' + assert descriptive_timedelta(test_time) == "6 days ago" def test_single_precision_below_an_hour(): """Ensure times under an hour only have one precision level.""" test_time = utc_now() - timedelta(minutes=59, seconds=59) - assert descriptive_timedelta(test_time) == '59 minutes ago' + assert descriptive_timedelta(test_time) == "59 minutes ago" def test_more_precision_above_an_hour(): """Ensure the second precision level gets added just above an hour.""" test_time = utc_now() - timedelta(hours=1, minutes=1) - assert descriptive_timedelta(test_time) == '1 hour, 1 minute ago' + assert descriptive_timedelta(test_time) == "1 hour, 1 minute ago" def test_subsecond_descriptive_timedelta(): """Ensure time less than a second returns the special phrase.""" test_time = utc_now() - timedelta(microseconds=100) - assert descriptive_timedelta(test_time) == 'a moment ago' + assert descriptive_timedelta(test_time) == "a moment ago" def test_above_second_descriptive_timedelta(): """Ensure it starts describing time in seconds above 1 second.""" test_time = utc_now() - timedelta(seconds=1, microseconds=100) - assert descriptive_timedelta(test_time) == '1 second ago' + assert descriptive_timedelta(test_time) == "1 second ago" diff --git a/tildes/tests/test_group.py b/tildes/tests/test_group.py index 47fc8ad..99a8b59 100644 --- a/tildes/tests/test_group.py +++ b/tildes/tests/test_group.py @@ -3,46 +3,43 @@ from sqlalchemy.exc import IntegrityError from tildes.models.group import Group from tildes.schemas.fields import Ltree, SimpleString -from tildes.schemas.group import ( - GroupSchema, - is_valid_group_path, -) +from tildes.schemas.group import GroupSchema, is_valid_group_path def test_empty_path_invalid(): """Ensure empty group path is invalid.""" - assert not is_valid_group_path('') + assert not is_valid_group_path("") def test_typical_path_valid(): """Ensure a "normal-looking" group path is valid.""" - assert is_valid_group_path('games.video.nintendo_3ds') + assert is_valid_group_path("games.video.nintendo_3ds") def test_start_with_underscore(): """Ensure you can't start a path with an underscore.""" - assert not is_valid_group_path('_x.y.z') + assert not is_valid_group_path("_x.y.z") def test_middle_element_start_with_underscore(): """Ensure a middle path element can't start with an underscore.""" - assert not is_valid_group_path('x._y.z') + assert not is_valid_group_path("x._y.z") def test_end_with_underscore(): """Ensure you can't end a path with an underscore.""" - assert not is_valid_group_path('x.y.z_') + assert not is_valid_group_path("x.y.z_") def test_middle_element_end_with_underscore(): """Ensure a middle path element can't end with an underscore.""" - assert not is_valid_group_path('x.y_.z') + assert not is_valid_group_path("x.y_.z") def test_uppercase_letters_invalid(): """Ensure a group path can't contain uppercase chars.""" - assert is_valid_group_path('comp.lang.c') - assert not is_valid_group_path('comp.lang.C') + assert is_valid_group_path("comp.lang.c") + assert not is_valid_group_path("comp.lang.C") def test_paths_with_invalid_characters(): @@ -50,34 +47,34 @@ def test_paths_with_invalid_characters(): invalid_chars = ' ~!@#$%^&*()+={}[]|\\:;"<>,?/' for char in invalid_chars: - path = f'abc{char}xyz' + path = f"abc{char}xyz" assert not is_valid_group_path(path) def test_paths_with_unicode_characters(): """Ensure that paths can't use unicode chars (not comprehensive).""" - for path in ('games.pokémon', 'ポケモン', 'bites.møøse'): + for path in ("games.pokémon", "ポケモン", "bites.møøse"): assert not is_valid_group_path(path) def test_creation_validates_schema(mocker): """Ensure that group creation goes through expected validation.""" - mocker.spy(GroupSchema, 'load') - mocker.spy(Ltree, '_validate') - mocker.spy(SimpleString, '_validate') + mocker.spy(GroupSchema, "load") + mocker.spy(Ltree, "_validate") + mocker.spy(SimpleString, "_validate") - Group('testing', 'with a short description') + Group("testing", "with a short description") assert GroupSchema.load.called - assert Ltree._validate.call_args[0][1] == 'testing' - assert SimpleString._validate.call_args[0][1] == 'with a short description' + assert Ltree._validate.call_args[0][1] == "testing" + assert SimpleString._validate.call_args[0][1] == "with a short description" def test_duplicate_group(db): """Ensure groups with duplicate paths can't be created.""" - original = Group('twins') + original = Group("twins") db.add(original) - duplicate = Group('twins') + duplicate = Group("twins") db.add(duplicate) with raises(IntegrityError): diff --git a/tildes/tests/test_id.py b/tildes/tests/test_id.py index 4b214c7..b4fb6a8 100644 --- a/tildes/tests/test_id.py +++ b/tildes/tests/test_id.py @@ -7,12 +7,12 @@ from tildes.lib.id import id_to_id36, id36_to_id def test_id_to_id36(): """Make sure an ID->ID36 conversion is correct.""" - assert id_to_id36(571049189) == '9fzkdh' + assert id_to_id36(571049189) == "9fzkdh" def test_id36_to_id(): """Make sure an ID36->ID conversion is correct.""" - assert id36_to_id('x48l4z') == 2002502915 + assert id36_to_id("x48l4z") == 2002502915 def test_reversed_conversion_from_id(): @@ -23,7 +23,7 @@ def test_reversed_conversion_from_id(): def test_reversed_conversion_from_id36(): """Make sure an ID36->ID->ID36 conversion returns to original value.""" - original = 'h2l4pe' + original = "h2l4pe" assert id_to_id36(id36_to_id(original)) == original @@ -36,7 +36,7 @@ def test_zero_id_conversion_blocked(): def test_zero_id36_conversion_blocked(): """Ensure the ID36 conversion function doesn't accept zero.""" with raises(ValueError): - id36_to_id('0') + id36_to_id("0") def test_negative_id_conversion_blocked(): @@ -48,4 +48,4 @@ def test_negative_id_conversion_blocked(): def test_negative_id36_conversion_blocked(): """Ensure the ID36 conversion function doesn't accept negative numbers.""" with raises(ValueError): - id36_to_id('-1') + id36_to_id("-1") diff --git a/tildes/tests/test_markdown.py b/tildes/tests/test_markdown.py index f96c9ff..5ac9979 100644 --- a/tildes/tests/test_markdown.py +++ b/tildes/tests/test_markdown.py @@ -3,29 +3,29 @@ from tildes.lib.markdown import convert_markdown_to_safe_html def test_script_tag_escaped(): """Ensure that a ' + markdown = "" sanitized = convert_markdown_to_safe_html(markdown) - assert '