diff --git a/.docker-compose.gv.env b/.docker-compose.gv.env index ae087e676ca..8d2e87aa645 100644 --- a/.docker-compose.gv.env +++ b/.docker-compose.gv.env @@ -1,4 +1,4 @@ -ALLOWED_HOSTS="localhost,192.168.168.167" +ALLOWED_HOSTS="localhost,192.168.168.167,127.0.0.1" CORS_ALLOWED_ORIGINS="http://localhost:5000,http://192.168.168.167:5000" OSFDB_HOST=192.168.168.167 POSTGRES_HOST="$OSFDB_HOST" @@ -7,5 +7,10 @@ OSF_API_BASE_URL="http://192.168.168.167:8000" POSTGRES_USER="postgres" POSTGRES_DB="gravyvalet" SECRET_KEY="secret" -PYTHONUNBUFFERED=0 # This when set to 0 will allow print statements to be visible in the Docker logs +PYTHONUNBUFFERED=1 # This when set to 0 will allow print statements to be visible in the Docker logs +OSF_AUTH_COOKIE_NAME=osf +OSF_SENSITIVE_DATA_SECRET="TrainglesAre5Squares" +OSF_SENSITIVE_DATA_SALT="yusaltydough" +DEBUG=1 + diff --git a/README-docker-compose.md b/README-docker-compose.md index c3ff7aed542..16d8ecf4b09 100644 --- a/README-docker-compose.md +++ b/README-docker-compose.md @@ -350,12 +350,12 @@ docker compose run --rm --service-ports web - Test a Specific Class ```bash - docker compose run --rm web python3 -m pytest tests/test_conferences.py::TestProvisionNode + docker compose run --rm web python3 -m pytest tests/test_node.py::TestExample:test_example ``` - Test a Specific Method ```bash - docker compose run --rm web python3 -m pytest tests/test_conferences.py::TestProvisionNode::test_upload + docker compose run --rm web python3 -m pytest tests/test_node.py::TestExample:test_example ``` ## Managing Container State diff --git a/addons/base/views.py b/addons/base/views.py index 6f22c71f3e3..b71b5a10709 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -36,7 +36,6 @@ from framework.sentry import log_exception from framework.routing import proxy_url from framework.transactions.handlers import no_auto_transaction -from website import mails from website import settings from addons.base import signals as file_signals from addons.base.utils import format_last_known_metadata, get_mfr_url @@ -54,7 +53,8 @@ DraftRegistration, Guid, FileVersionUserMetadata, - FileVersion + FileVersion, + NotificationType ) from osf.metrics import PreprintView, PreprintDownload from osf.utils import permissions @@ -431,11 +431,7 @@ def _enqueue_metrics(file_version, file_node, action, auth, from_mfr=False): def _construct_payload(auth, resource, credentials, waterbutler_settings): if isinstance(resource, Registration): - callback_url = resource.api_url_for( - 'registration_callbacks', - _absolute=True, - _internal=True - ) + callback_url = resource.callbacks_url else: callback_url = resource.api_url_for( 'create_waterbutler_log', @@ -583,17 +579,17 @@ def create_waterbutler_log(payload, **kwargs): ) if payload.get('email') is True or payload.get('errors'): - mails.send_mail( - user.username, - mails.FILE_OPERATION_FAILED if payload.get('errors') - else mails.FILE_OPERATION_SUCCESS, - action=payload['action'], - source_node=source_node, - destination_node=destination_node, - source_path=payload['source']['materialized'], - source_addon=payload['source']['addon'], - destination_addon=payload['destination']['addon'], - osf_support_email=settings.OSF_SUPPORT_EMAIL + if payload.get('errors'): + notification_type = NotificationType.Type.FILE_OPERATION_FAILED + else: + notification_type = NotificationType.Type.FILE_OPERATION_SUCCESS + + NotificationType.objects.get( + name=notification_type.value, + ).emit( + user=user, + subscribed_object=destination_node or node, + event_context=payload, ) if payload.get('errors'): @@ -613,6 +609,14 @@ def create_waterbutler_log(payload, **kwargs): with transaction.atomic(): file_signals.file_updated.send(target=node, user=user, event_type=action, payload=payload) + NotificationType.objects.get( + name=action, + ).emit( + user=user, + subscribed_object=target_node or node, + event_context=metadata, + ) + return {'status': 'success'} diff --git a/addons/wiki/tests/test_views.py b/addons/wiki/tests/test_views.py index 646d03c5e15..6b2488aaf47 100644 --- a/addons/wiki/tests/test_views.py +++ b/addons/wiki/tests/test_views.py @@ -111,66 +111,6 @@ def test_update_name_invalid(self): with pytest.raises(NameInvalidError): WikiPage.objects.create_for_node(self.project, invalid_name, 'more valid content', self.auth) - def test_update_wiki_updates_comments_and_user_comments_viewed_timestamp(self): - project = ProjectFactory(creator=self.user, is_public=True) - wiki_page = WikiFactory(node=project, page_name='test') - wiki = WikiVersionFactory(wiki_page=wiki_page) - comment = CommentFactory(node=project, target=Guid.load(wiki_page._id), user=UserFactory()) - - # user views comments -- sets user.comments_viewed_timestamp - url = project.api_url_for('update_comments_timestamp') - res = self.app.put(url, json={ - 'page': 'wiki', - 'rootId': wiki_page._id - }, auth=self.user.auth) - assert res.status_code == 200 - self.user.reload() - assert wiki_page._id in self.user.comments_viewed_timestamp - - # user updates the wiki - wiki_page.update(self.user, 'Updating wiki') - comment.reload() - self.user.reload() - assert wiki_page._id in self.user.comments_viewed_timestamp - assert comment.target.referent._id == wiki_page._id - - # Regression test for https://openscience.atlassian.net/browse/OSF-6138 - def test_update_wiki_updates_contributor_comments_viewed_timestamp(self): - contributor = AuthUserFactory() - project = ProjectFactory(creator=self.user, is_public=True) - project.add_contributor(contributor) - project.save() - wiki_page = WikiFactory(node=project, page_name='test') - wiki = WikiVersionFactory(wiki_page=wiki_page) - comment = CommentFactory(node=project, target=Guid.load(wiki_page._id), user=self.user) - - # user views comments -- sets user.comments_viewed_timestamp - url = project.api_url_for('update_comments_timestamp') - res = self.app.put(url, json={ - 'page': 'wiki', - 'rootId': wiki_page._id - }, auth=self.user.auth) - assert res.status_code == 200 - self.user.reload() - assert wiki_page._id in self.user.comments_viewed_timestamp - - # contributor views comments -- sets contributor.comments_viewed_timestamp - res = self.app.put(url, json={ - 'page': 'wiki', - 'rootId': wiki_page._id - }, auth=contributor.auth) - contributor.reload() - assert wiki_page._id in contributor.comments_viewed_timestamp - - # user updates the wiki - wiki_page.update(self.user, 'Updating wiki') - comment.reload() - contributor.reload() - - new_version_id = WikiVersion.objects.get_for_node(project, 'test')._id - assert wiki_page._id in contributor.comments_viewed_timestamp - assert comment.target.referent._id == wiki_page._id - # Regression test for https://openscience.atlassian.net/browse/OSF-8584 def test_no_read_more_when_less_than_400_character(self): wiki_content = '1234567' diff --git a/admin/nodes/urls.py b/admin/nodes/urls.py index d28a73e2c51..c2704ee95b2 100644 --- a/admin/nodes/urls.py +++ b/admin/nodes/urls.py @@ -27,10 +27,12 @@ re_path(r'^(?P[a-z0-9]+)/reindex_share_node/$', views.NodeReindexShare.as_view(), name='reindex-share-node'), re_path(r'^(?P[a-z0-9]+)/reindex_elastic_node/$', views.NodeReindexElastic.as_view(), name='reindex-elastic-node'), - re_path(r'^(?P[a-z0-9]+)/restart_stuck_registrations/$', views.RestartStuckRegistrationsView.as_view(), - name='restart-stuck-registrations'), re_path(r'^(?P[a-z0-9]+)/remove_stuck_registrations/$', views.RemoveStuckRegistrationsView.as_view(), name='remove-stuck-registrations'), + re_path(r'^(?P[a-z0-9]+)/check_archive_status/$', views.CheckArchiveStatusRegistrationsView.as_view(), + name='check-archive-status'), + re_path(r'^(?P[a-z0-9]+)/force_archive_registration/$', views.ForceArchiveRegistrationsView.as_view(), + name='force-archive-registration'), re_path(r'^(?P[a-z0-9]+)/remove_user/(?P[a-z0-9]+)/$', views.NodeRemoveContributorView.as_view(), name='remove-user'), re_path(r'^(?P[a-z0-9]+)/modify_storage_usage/$', views.NodeModifyStorageUsage.as_view(), diff --git a/admin/nodes/views.py b/admin/nodes/views.py index fc16a3b0d05..2d4f0c1194f 100644 --- a/admin/nodes/views.py +++ b/admin/nodes/views.py @@ -1,4 +1,5 @@ import pytz +from enum import Enum from datetime import datetime from framework import status @@ -26,7 +27,7 @@ from api.share.utils import update_share from api.caching.tasks import update_storage_usage_cache -from osf.exceptions import NodeStateError +from osf.exceptions import NodeStateError, RegistrationStuckError from osf.models import ( OSFUser, NodeLog, @@ -672,23 +673,16 @@ def post(self, request, *args, **kwargs): return redirect(self.get_success_url()) -class RestartStuckRegistrationsView(NodeMixin, TemplateView): - """ Allows an authorized user to restart a registrations archive process. +class RemoveStuckRegistrationsView(NodeMixin, View): + """ Allows an authorized user to remove a registrations if it's stuck in the archiving process. """ - template_name = 'nodes/restart_registrations_modal.html' permission_required = ('osf.view_node', 'osf.change_node') def post(self, request, *args, **kwargs): - # Prevents circular imports that cause admin app to hang at startup - from osf.management.commands.force_archive import archive, verify stuck_reg = self.get_object() - if verify(stuck_reg): - try: - archive(stuck_reg) - messages.success(request, 'Registration archive processes has restarted') - except Exception as exc: - messages.error(request, f'This registration cannot be unstuck due to {exc.__class__.__name__} ' - f'if the problem persists get a developer to fix it.') + if Registration.find_failed_registrations().filter(id=stuck_reg.id).exists(): + stuck_reg.delete_registration_tree(save=True) + messages.success(request, 'The registration has been deleted') else: messages.error(request, 'This registration may not technically be stuck,' ' if the problem persists get a developer to fix it.') @@ -696,20 +690,80 @@ def post(self, request, *args, **kwargs): return redirect(self.get_success_url()) -class RemoveStuckRegistrationsView(NodeMixin, TemplateView): - """ Allows an authorized user to remove a registrations if it's stuck in the archiving process. +class CheckArchiveStatusRegistrationsView(NodeMixin, View): + """Allows an authorized user to check a registration archive status. + """ + permission_required = ('osf.view_node', 'osf.change_node') + + def get(self, request, *args, **kwargs): + # Prevents circular imports that cause admin app to hang at startup + from osf.management.commands.force_archive import check + + registration = self.get_object() + + if registration.archived: + messages.success(request, f"Registration {registration._id} is archived.") + return redirect(self.get_success_url()) + + try: + archive_status = check(registration) + messages.success(request, archive_status) + except RegistrationStuckError as exc: + messages.error(request, str(exc)) + + return redirect(self.get_success_url()) + + +class CollisionMode(Enum): + NONE: str = 'none' + SKIP: str = 'skip' + DELETE: str = 'delete' + + +class ForceArchiveRegistrationsView(NodeMixin, View): + """Allows an authorized user to force archive registration. """ - template_name = 'nodes/remove_registrations_modal.html' permission_required = ('osf.view_node', 'osf.change_node') def post(self, request, *args, **kwargs): - stuck_reg = self.get_object() - if Registration.find_failed_registrations().filter(id=stuck_reg.id).exists(): - stuck_reg.delete_registration_tree(save=True) - messages.success(request, 'The registration has been deleted') + # Prevents circular imports that cause admin app to hang at startup + from osf.management.commands.force_archive import verify, archive, DEFAULT_PERMISSIBLE_ADDONS + + registration = self.get_object() + force_archive_params = request.POST + + collision_mode = force_archive_params.get('collision_mode', CollisionMode.NONE.value) + delete_collision = CollisionMode.DELETE.value == collision_mode + skip_collision = CollisionMode.SKIP.value == collision_mode + + allow_unconfigured = force_archive_params.get('allow_unconfigured', False) + + addons = set(force_archive_params.getlist('addons', [])) + addons.update(DEFAULT_PERMISSIBLE_ADDONS) + + try: + verify(registration, permissible_addons=addons, raise_error=True) + except ValidationError as exc: + messages.error(request, str(exc)) + return redirect(self.get_success_url()) + + dry_mode = force_archive_params.get('dry_mode', False) + + if dry_mode: + messages.success(request, f"Registration {registration._id} can be archived.") else: - messages.error(request, 'This registration may not technically be stuck,' - ' if the problem persists get a developer to fix it.') + try: + archive( + registration, + permissible_addons=addons, + allow_unconfigured=allow_unconfigured, + skip_collision=skip_collision, + delete_collision=delete_collision, + ) + messages.success(request, 'Registration archive process has finished.') + except Exception as exc: + messages.error(request, f'This registration cannot be archived due to {exc.__class__.__name__}: {str(exc)}. ' + f'If the problem persists get a developer to fix it.') return redirect(self.get_success_url()) diff --git a/admin/notifications/views.py b/admin/notifications/views.py index 7a3a13a8df8..415369b9a31 100644 --- a/admin/notifications/views.py +++ b/admin/notifications/views.py @@ -1,11 +1,11 @@ -from osf.models.notifications import NotificationSubscription +from osf.models.notification import NotificationSubscription from django.db.models import Count def delete_selected_notifications(selected_ids): NotificationSubscription.objects.filter(id__in=selected_ids).delete() def detect_duplicate_notifications(node_id=None): - query = NotificationSubscription.objects.values('_id').annotate(count=Count('_id')).filter(count__gt=1) + query = NotificationSubscription.objects.annotate(count=Count('id')).filter(count__gt=1) if node_id: query = query.filter(node_id=node_id) diff --git a/admin/preprints/urls.py b/admin/preprints/urls.py index f0a439f9722..4ab9bd33939 100644 --- a/admin/preprints/urls.py +++ b/admin/preprints/urls.py @@ -13,8 +13,10 @@ re_path(r'^(?P\w+)/change_provider/$', views.PreprintProviderChangeView.as_view(), name='preprint-provider'), re_path(r'^(?P\w+)/machine_state/$', views.PreprintMachineStateView.as_view(), name='preprint-machine-state'), re_path(r'^(?P\w+)/reindex_share_preprint/$', views.PreprintReindexShare.as_view(), name='reindex-share-preprint'), + re_path(r'^(?P\w+)/reversion_preprint/$', views.PreprintReVersion.as_view(), name='re-version-preprint'), re_path(r'^(?P\w+)/remove_user/(?P[a-z0-9]+)/$', views.PreprintRemoveContributorView.as_view(), name='remove-user'), re_path(r'^(?P\w+)/make_private/$', views.PreprintMakePrivate.as_view(), name='make-private'), + re_path(r'^(?P\w+)/fix_editing/$', views.PreprintFixEditing.as_view(), name='fix-editing'), re_path(r'^(?P\w+)/make_public/$', views.PreprintMakePublic.as_view(), name='make-public'), re_path(r'^(?P\w+)/remove/$', views.PreprintDeleteView.as_view(), name='remove'), re_path(r'^(?P\w+)/restore/$', views.PreprintDeleteView.as_view(), name='restore'), diff --git a/admin/preprints/views.py b/admin/preprints/views.py index a936c27582e..ef7d1860e76 100644 --- a/admin/preprints/views.py +++ b/admin/preprints/views.py @@ -1,6 +1,7 @@ +from django.db import transaction from django.db.models import F from django.core.exceptions import PermissionDenied -from django.urls import NoReverseMatch +from django.http import HttpResponse, JsonResponse from django.contrib import messages from django.contrib.auth.mixins import PermissionRequiredMixin from django.shortcuts import redirect @@ -10,7 +11,7 @@ FormView, ) from django.utils import timezone -from django.urls import reverse_lazy +from django.urls import NoReverseMatch, reverse_lazy from admin.base.views import GuidView from admin.base.forms import GuidForm @@ -19,9 +20,13 @@ from api.share.utils import update_share from api.providers.workflows import Workflows +from api.preprints.serializers import PreprintSerializer from osf.exceptions import PreprintStateError +from rest_framework.exceptions import PermissionDenied as DrfPermissionDenied +from framework.exceptions import PermissionsError +from osf.management.commands.fix_preprints_has_data_links_and_why_no_data import process_wrong_why_not_data_preprints from osf.models import ( SpamStatus, Preprint, @@ -44,6 +49,7 @@ ) from osf.utils.workflows import DefaultStates from website import search +from website.files.utils import copy_files from website.preprints.tasks import on_preprint_updated @@ -55,8 +61,8 @@ def get_object(self): preprint.guid = preprint._id return preprint - def get_success_url(self): - return reverse_lazy('preprints:preprint', kwargs={'guid': self.kwargs['guid']}) + def get_success_url(self, guid=None): + return reverse_lazy('preprints:preprint', kwargs={'guid': guid or self.kwargs['guid']}) class PreprintView(PreprintMixin, GuidView): @@ -182,6 +188,55 @@ def post(self, request, *args, **kwargs): return redirect(self.get_success_url()) +class PreprintReVersion(PreprintMixin, View): + """Allows an authorized user to create new version 1 of a preprint based on earlier + primary file version(s). All operations are executed within an atomic transaction. + If any step fails, the entire transaction will be rolled back and no version will be changed. + """ + permission_required = 'osf.change_node' + + def post(self, request, *args, **kwargs): + preprint = self.get_object() + + file_versions = request.POST.getlist('file_versions') + if not file_versions: + return HttpResponse('At least one file version should be attached.', status=400) + + try: + with transaction.atomic(): + versions = preprint.get_preprint_versions() + for version in versions: + version.upgrade_version() + + new_preprint, data_to_update = Preprint.create_version( + create_from_guid=preprint._id, + assign_version_number=1, + auth=request, + ignore_permission=True, + ignore_existing_versions=True, + ) + data_to_update = data_to_update or dict() + + primary_file = copy_files(preprint.primary_file, target_node=new_preprint, identifier__in=file_versions) + if primary_file is None: + raise ValueError(f"Primary file {preprint.primary_file.id} doesn't have following versions: {file_versions}") # rollback changes + data_to_update['primary_file'] = primary_file + + # FIXME: currently it's not possible to ignore permission when update subjects + # via serializer, remove this logic if deprecated + subjects = data_to_update.pop('subjects', None) + if subjects: + new_preprint.set_subjects_from_relationships(subjects, auth=request, ignore_permission=True) + + PreprintSerializer(new_preprint, context={'request': request, 'ignore_permission': True}).update(new_preprint, data_to_update) + except ValueError as exc: + return HttpResponse(str(exc), status=400) + except (PermissionsError, DrfPermissionDenied) as exc: + return HttpResponse(f'Not enough permissions to perform this action : {str(exc)}', status=400) + + return JsonResponse({'redirect': self.get_success_url(new_preprint._id)}) + + class PreprintReindexElastic(PreprintMixin, View): """ Allows an authorized user to reindex a node in ElasticSearch. """ @@ -525,6 +580,21 @@ def post(self, request, *args, **kwargs): return redirect(self.get_success_url()) +class PreprintFixEditing(PreprintMixin, View): + """ Allows an authorized user to manually fix why not data field. + """ + permission_required = 'osf.change_node' + + def post(self, request, *args, **kwargs): + preprint = self.get_object() + process_wrong_why_not_data_preprints( + version_guid=preprint._id, + dry_run=False, + executing_through_command=False, + ) + + return redirect(self.get_success_url()) + class PreprintMakePublic(PreprintMixin, View): """ Allows an authorized user to manually make a private preprint public. diff --git a/admin/static/js/preprints/preprints.js b/admin/static/js/preprints/preprints.js new file mode 100644 index 00000000000..21217725ba2 --- /dev/null +++ b/admin/static/js/preprints/preprints.js @@ -0,0 +1,18 @@ +$(document).ready(function() { + + $("#confirmReversion").on("submit", function (event) { + event.preventDefault(); + + $.ajax({ + url: window.templateVars.reVersionPreprint, + type: "post", + data: $("#re-version-preprint-form").serialize(), + }).success(function (response) { + if (response.redirect) { + window.location.href = response.redirect; + } + }).fail(function (jqXHR, textStatus, error) { + $("#version-validation").text(jqXHR.responseText); + }); + }); +}); diff --git a/admin/templates/nodes/node.html b/admin/templates/nodes/node.html index aa705243a69..c178709534f 100644 --- a/admin/templates/nodes/node.html +++ b/admin/templates/nodes/node.html @@ -17,7 +17,7 @@ View Logs {% include "nodes/remove_node.html" with node=node %} - {% include "nodes/restart_stuck_registration.html" with node=node %} + {% include "nodes/registration_force_archive.html" with node=node %} {% include "nodes/make_private.html" with node=node %} {% include "nodes/make_public.html" with node=node %} {% include "nodes/mark_spam.html" with node=node %} diff --git a/admin/templates/nodes/registration_force_archive.html b/admin/templates/nodes/registration_force_archive.html new file mode 100644 index 00000000000..7c87f1a837d --- /dev/null +++ b/admin/templates/nodes/registration_force_archive.html @@ -0,0 +1,79 @@ +{% if node.is_registration %} + + Check archive status + +{% if not node.archived %} + {% if node.is_stuck_registration %} + + Restart Stuck Registration + + + Remove Stuck Registration + + {% else %} + + Force Archive + + {% endif %} + + + + + + + +{% endif %} +{% endif %} diff --git a/admin/templates/nodes/registration_force_archive_form.html b/admin/templates/nodes/registration_force_archive_form.html new file mode 100644 index 00000000000..ab52d7f7c33 --- /dev/null +++ b/admin/templates/nodes/registration_force_archive_form.html @@ -0,0 +1,39 @@ +
+ {% csrf_token %} + +
\ No newline at end of file diff --git a/admin/templates/nodes/restart_stuck_registration.html b/admin/templates/nodes/restart_stuck_registration.html deleted file mode 100644 index c81bd3fb55f..00000000000 --- a/admin/templates/nodes/restart_stuck_registration.html +++ /dev/null @@ -1,51 +0,0 @@ -{% if node.is_stuck_registration %} - - Restart Registration - - - Remove Registration - - - -{% endif %} - diff --git a/admin/templates/preprints/assign_new_version.html b/admin/templates/preprints/assign_new_version.html new file mode 100644 index 00000000000..3ee5fcce6d5 --- /dev/null +++ b/admin/templates/preprints/assign_new_version.html @@ -0,0 +1,32 @@ +{% load node_extras %} + + Create new version 1 + + + diff --git a/admin/templates/preprints/fix_editing.html b/admin/templates/preprints/fix_editing.html new file mode 100644 index 00000000000..84c6e3cdd99 --- /dev/null +++ b/admin/templates/preprints/fix_editing.html @@ -0,0 +1,21 @@ +{% if perms.osf.change_node %} + + Fix why not data + + +{% endif %} diff --git a/admin/templates/preprints/preprint.html b/admin/templates/preprints/preprint.html index 2763d3d35f1..719304d716f 100644 --- a/admin/templates/preprints/preprint.html +++ b/admin/templates/preprints/preprint.html @@ -26,6 +26,8 @@ {% include "preprints/make_private.html" with preprint=preprint %} {% include "preprints/make_public.html" with preprint=preprint %} {% include "preprints/make_published.html" with preprint=preprint %} + {% include "preprints/fix_editing.html" with preprint=preprint %} + {% include "preprints/assign_new_version.html" with preprint=preprint %} @@ -122,3 +124,11 @@

Preprint: {{ preprint.title }} {% endblock content %} +{% block bottom_js %} + + +{% endblock %} diff --git a/admin_tests/meetings/__init__.py b/admin_tests/meetings/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/admin_tests/meetings/test_forms.py b/admin_tests/meetings/test_forms.py deleted file mode 100644 index 0417ffeeb8c..00000000000 --- a/admin_tests/meetings/test_forms.py +++ /dev/null @@ -1,80 +0,0 @@ -from tests.base import AdminTestCase -from osf_tests.factories import AuthUserFactory -from tests.test_conferences import ConferenceFactory - -from admin.meetings.forms import MeetingForm, MultiEmailField - -data = dict( - edit='False', - endpoint='short', - name='Much longer', - info_url='http://something.com', - logo_url='http://osf.io/eg634', - active='True', - admins='zzz@email.org', - public_projects='True', - poster='True', - talk='True', - submission1='poster', - submission2='talk', - submission1_plural='posters', - submission2_plural='talks', - meeting_title_type='Of course', - add_submission='No more', - mail_subject='Awesome', - mail_message_body='Nothings', - mail_attachment='Again', - homepage_link_text='Need to add to tests', -) - - -class TestMultiEmailField(AdminTestCase): - def test_to_python_nothing(self): - field = MultiEmailField() - res = field.to_python('') - assert res == [] - - def test_to_python_one(self): - field = MultiEmailField() - res = field.to_python('aaa@email.org') - assert res == ['aaa@email.org'] - - def test_to_python_more(self): - field = MultiEmailField() - res = field.to_python('aaa@email.org, bbb@email.org, ccc@email.org') - assert res == ['aaa@email.org', 'bbb@email.org', 'ccc@email.org'] - - -class TestMeetingForm(AdminTestCase): - def setUp(self): - super().setUp() - self.user = AuthUserFactory() - - def test_clean_admins_raise(self): - form = MeetingForm(data=data) - assert not form.is_valid() - assert 'admins' in form.errors - assert 'zzz@email.org' in form.errors['admins'][0] - assert 'does not have an OSF account' in form.errors['admins'][0] - - def test_clean_admins_okay(self): - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.values_list('address', flat=True).first()}) - form = MeetingForm(data=mod_data) - assert form.is_valid() - - def test_clean_endpoint_raise_not_exist(self): - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.values_list('address', flat=True).first(), 'edit': 'True'}) - form = MeetingForm(data=mod_data) - assert 'endpoint' in form.errors - assert 'Meeting not found with this endpoint to update' == form.errors['endpoint'][0] - - def test_clean_endpoint_raise_exists(self): - conf = ConferenceFactory() - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.values_list('address', flat=True).first(), - 'endpoint': conf.endpoint}) - form = MeetingForm(data=mod_data) - assert 'endpoint' in form.errors - assert 'A meeting with this endpoint exists already.' == form.errors['endpoint'][0] diff --git a/admin_tests/meetings/test_serializers.py b/admin_tests/meetings/test_serializers.py deleted file mode 100644 index 93fd7f6ac7c..00000000000 --- a/admin_tests/meetings/test_serializers.py +++ /dev/null @@ -1,23 +0,0 @@ -from tests.base import AdminTestCase -from tests.test_conferences import ConferenceFactory - -from admin.meetings.serializers import serialize_meeting - - -class TestsSerializeMeeting(AdminTestCase): - def setUp(self): - super().setUp() - self.conf = ConferenceFactory() - - def test_serialize(self): - res = serialize_meeting(self.conf) - assert isinstance(res, dict) - assert res['endpoint'] == self.conf.endpoint - assert res['name'] == self.conf.name - assert res['info_url'] == self.conf.info_url - assert res['logo_url'] == self.conf.logo_url - assert res['active'] == self.conf.active - assert res['public_projects'] == self.conf.public_projects - assert res['poster'] == self.conf.poster - assert res['talk'] == self.conf.talk - assert res['num_submissions'] == self.conf.valid_submissions.count() diff --git a/admin_tests/meetings/test_views.py b/admin_tests/meetings/test_views.py deleted file mode 100644 index bca7adbc14b..00000000000 --- a/admin_tests/meetings/test_views.py +++ /dev/null @@ -1,195 +0,0 @@ -import pytest -from django.test import RequestFactory -from django.http import Http404 -from django.urls import reverse -from django.contrib.auth.models import Permission -from django.core.exceptions import PermissionDenied - -from tests.base import AdminTestCase -from osf_tests.factories import AuthUserFactory -from tests.test_conferences import ConferenceFactory -from osf.models.conference import Conference, DEFAULT_FIELD_NAMES - -from admin_tests.utilities import setup_form_view -from admin_tests.meetings.test_forms import data -from admin.meetings.views import ( - MeetingListView, - MeetingCreateFormView, - MeetingFormView, - get_custom_fields, - get_admin_users, -) -from admin.meetings.forms import MeetingForm - - -class TestMeetingListView(AdminTestCase): - def setUp(self): - super().setUp() - Conference.objects.all().delete() - ConferenceFactory() - ConferenceFactory() - ConferenceFactory() - - def test_get_queryset(self): - view = MeetingListView() - assert len(view.get_queryset()) == 3 - - def test_no_user_permissions_raises_error(self): - user = AuthUserFactory() - request = RequestFactory().get(reverse('meetings:list')) - request.user = user - - with pytest.raises(PermissionDenied): - MeetingListView.as_view()(request) - - def test_correct_view_permissions(self): - user = AuthUserFactory() - - view_permission = Permission.objects.get(codename='view_conference') - user.user_permissions.add(view_permission) - user.save() - - request = RequestFactory().get(reverse('meetings:list')) - request.user = user - - response = MeetingListView.as_view()(request) - assert response.status_code == 200 - - -class TestMeetingFormView(AdminTestCase): - def setUp(self): - super().setUp() - self.conf = ConferenceFactory() - self.user = AuthUserFactory() - self.request = RequestFactory().post('/fake_path') - self.view = MeetingFormView - mod_data = dict(data) - mod_data.update({ - 'edit': 'True', - 'endpoint': self.conf.endpoint, - 'admins': self.user.emails.first().address, - 'location': 'Timbuktu, Mali', - 'start date': 'Dec 11 2014', - 'end_date': 'Jan 12 2013' - }) - self.form = MeetingForm(data=mod_data) - self.form.is_valid() - - self.url = reverse('meetings:detail', kwargs={'endpoint': self.conf.endpoint}) - - def test_dispatch_raise_404(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint='meh') - with pytest.raises(Http404): - view.dispatch(self.request, endpoint='meh') - - def test_get_context(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint=self.conf.endpoint) - view.conf = self.conf - res = view.get_context_data() - assert isinstance(res, dict) - assert 'endpoint' in res - assert res['endpoint'] == self.conf.endpoint - - def test_get_initial(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint=self.conf.endpoint) - view.conf = self.conf - res = view.get_initial() - assert isinstance(res, dict) - assert 'endpoint' in res - assert 'submission2_plural' in res - - def test_form_valid(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint=self.conf.endpoint) - view.conf = self.conf - view.form_valid(self.form) - self.conf.reload() - assert self.conf.admins.all()[0].emails.first().address == self.user.emails.first().address - assert self.conf.location == self.form.cleaned_data['location'] - assert self.conf.start_date == self.form.cleaned_data['start_date'] - - def test_no_user_permissions_raises_error(self): - request = RequestFactory().get(self.url) - request.user = self.user - - with pytest.raises(PermissionDenied): - self.view.as_view()(request, endpoint=self.conf.endpoint) - - def test_correct_view_permissions(self): - - view_permission = Permission.objects.get(codename='change_conference') - self.user.user_permissions.add(view_permission) - self.user.save() - - request = RequestFactory().get(self.url) - request.user = self.user - - response = self.view.as_view()(request, endpoint=self.conf.endpoint) - assert response.status_code == 200 - - -class TestMeetingCreateFormView(AdminTestCase): - def setUp(self): - super().setUp() - Conference.objects.all().delete() - self.user = AuthUserFactory() - self.request = RequestFactory().post('/fake_path') - self.view = MeetingCreateFormView - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.first().address}) - self.form = MeetingForm(data=mod_data) - self.form.is_valid() - - self.url = reverse('meetings:create') - - def test_get_initial(self): - self.view().get_initial() - assert not self.view().initial['edit'] - assert self.view.initial['submission1'] == DEFAULT_FIELD_NAMES['submission1'] - - def test_form_valid(self): - view = setup_form_view(self.view(), self.request, self.form) - view.form_valid(self.form) - assert Conference.objects.filter(endpoint=data['endpoint']).count() == 1 - - def test_no_user_permissions_raises_error(self): - request = RequestFactory().get(self.url) - request.user = self.user - - with pytest.raises(PermissionDenied): - self.view.as_view()(request) - - def test_correct_view_permissions(self): - change_permission = Permission.objects.get(codename='view_conference') - view_permission = Permission.objects.get(codename='change_conference') - self.user.user_permissions.add(view_permission) - self.user.user_permissions.add(change_permission) - self.user.save() - - request = RequestFactory().get(self.url) - request.user = self.user - - response = self.view.as_view()(request) - assert response.status_code == 200 - - -class TestMeetingMisc(AdminTestCase): - def test_get_custom_fields(self): - res1, res2 = get_custom_fields(data) - assert isinstance(res1, dict) - assert isinstance(res2, dict) - for key in res1.keys(): - assert 'field' not in key - - def test_get_admin_users(self): - user_1 = AuthUserFactory() - user_2 = AuthUserFactory() - user_3 = AuthUserFactory() - emails = [user_1.emails.first().address, user_2.emails.first().address, user_3.emails.first().address] - res = get_admin_users(emails) - assert user_1 in res - assert user_2 in res - assert user_3 in res diff --git a/admin_tests/nodes/test_views.py b/admin_tests/nodes/test_views.py index c80eeb27e47..9f978e75268 100644 --- a/admin_tests/nodes/test_views.py +++ b/admin_tests/nodes/test_views.py @@ -17,8 +17,9 @@ NodeKnownHamList, NodeConfirmHamView, AdminNodeLogView, - RestartStuckRegistrationsView, RemoveStuckRegistrationsView, + CheckArchiveStatusRegistrationsView, + ForceArchiveRegistrationsView, ApprovalBacklogListView, ConfirmApproveBacklogView ) @@ -375,28 +376,50 @@ def test_get_queryset(self): assert log_entry.params['title_new'] == 'New Title' -class TestRestartStuckRegistrationsView(AdminTestCase): +class TestCheckArchiveStatusRegistrationsView(AdminTestCase): + def setUp(self): + super().setUp() + self.user = AuthUserFactory() + self.view = CheckArchiveStatusRegistrationsView + self.request = RequestFactory().post('/fake_path') + + def test_check_archive_status(self): + from django.contrib.messages.storage.fallback import FallbackStorage + + registration = RegistrationFactory(creator=self.user, archive=True) + view = setup_log_view(self.view(), self.request, guid=registration._id) + + # django.contrib.messages has a bug which effects unittests + # more info here -> https://code.djangoproject.com/ticket/17971 + setattr(self.request, 'session', 'session') + messages = FallbackStorage(self.request) + setattr(self.request, '_messages', messages) + + view.get(self.request) + + assert not registration.archived + assert f'Registration {registration._id} is not stuck in archiving' in [m.message for m in messages] + + +class TestForceArchiveRegistrationsView(AdminTestCase): def setUp(self): super().setUp() self.user = AuthUserFactory() self.registration = RegistrationFactory(creator=self.user, archive=True) self.registration.save() - self.view = RestartStuckRegistrationsView + self.view = ForceArchiveRegistrationsView self.request = RequestFactory().post('/fake_path') def test_get_object(self): - view = RestartStuckRegistrationsView() - view = setup_log_view(view, self.request, guid=self.registration._id) + view = setup_log_view(self.view(), self.request, guid=self.registration._id) assert self.registration == view.get_object() - def test_restart_stuck_registration(self): + def test_force_archive_registration(self): # Prevents circular import that prevents admin app from starting up from django.contrib.messages.storage.fallback import FallbackStorage - view = RestartStuckRegistrationsView() - view = setup_log_view(view, self.request, guid=self.registration._id) - assert self.registration.archive_job.status == 'INITIATED' + view = setup_log_view(self.view(), self.request, guid=self.registration._id) # django.contrib.messages has a bug which effects unittests # more info here -> https://code.djangoproject.com/ticket/17971 @@ -408,6 +431,24 @@ def test_restart_stuck_registration(self): assert self.registration.archive_job.status == 'SUCCESS' + def test_force_archive_registration_dry_mode(self): + # Prevents circular import that prevents admin app from starting up + from django.contrib.messages.storage.fallback import FallbackStorage + + request = RequestFactory().post('/fake_path', data={'dry_mode': 'true'}) + view = setup_log_view(self.view(), request, guid=self.registration._id) + assert self.registration.archive_job.status == 'INITIATED' + + # django.contrib.messages has a bug which effects unittests + # more info here -> https://code.djangoproject.com/ticket/17971 + setattr(request, 'session', 'session') + messages = FallbackStorage(request) + setattr(request, '_messages', messages) + + view.post(request) + + assert self.registration.archive_job.status == 'INITIATED' + class TestRemoveStuckRegistrationsView(AdminTestCase): def setUp(self): diff --git a/admin_tests/preprints/test_views.py b/admin_tests/preprints/test_views.py index 1fb9d68482d..06ec5fa8d04 100644 --- a/admin_tests/preprints/test_views.py +++ b/admin_tests/preprints/test_views.py @@ -797,3 +797,33 @@ def test_admin_user_can_publish_preprint(self, user, preprint, plain_view): preprint.reload() assert preprint.is_published + + +@pytest.mark.urls('admin.base.urls') +class TestPreprintReVersionView: + + @pytest.fixture() + def plain_view(self): + return views.PreprintReVersion + + def test_admin_user_can_add_new_version_one(self, user, preprint, plain_view): + # user isn't admin contributor in the preprint + assert preprint.contributors.filter(id=user.id).exists() is False + assert preprint.has_permission(user, ADMIN) is False + assert len(preprint.get_preprint_versions()) == 1 + + request = RequestFactory().post( + reverse('preprints:re-version-preprint', + kwargs={'guid': preprint._id}), + data={'file_versions': ['1']} + ) + request.user = user + + admin_group = Group.objects.get(name='osf_admin') + admin_group.permissions.add(Permission.objects.get(codename='change_node')) + user.groups.add(admin_group) + + plain_view.as_view()(request, guid=preprint._id) + preprint.refresh_from_db() + + assert len(preprint.get_preprint_versions()) == 2 diff --git a/admin_tests/users/test_views.py b/admin_tests/users/test_views.py index cd51459e134..1b878f00509 100644 --- a/admin_tests/users/test_views.py +++ b/admin_tests/users/test_views.py @@ -141,6 +141,16 @@ def test_correct_view_permissions(self): response = self.view.as_view()(request, guid=user._id) self.assertEqual(response.status_code, 302) + def test_user_with_deleted_node_is_deleted(self): + patch_messages(self.request) + + project = ProjectFactory(creator=self.user, is_deleted=True) + assert self.user.nodes.filter(id=project.id, is_deleted=True).count() + + self.view().post(self.request) + self.user.reload() + assert self.user.deleted + class TestDisableUser(AdminTestCase): def setUp(self): diff --git a/api/base/utils.py b/api/base/utils.py index 0e3690e6dbd..c08c8c73977 100644 --- a/api/base/utils.py +++ b/api/base/utils.py @@ -69,7 +69,9 @@ def get_user_auth(request): authenticated user attached to it. """ user = request.user - private_key = request.query_params.get('view_only', None) + private_key = None + if hasattr(request, 'query_params'): # allows django WSGIRequest to be used as well + private_key = request.query_params.get('view_only', None) if user.is_anonymous: auth = Auth(None, private_key=private_key) else: diff --git a/api/crossref/views.py b/api/crossref/views.py index 9fa3be72e7a..d93d5b43ef2 100644 --- a/api/crossref/views.py +++ b/api/crossref/views.py @@ -6,9 +6,9 @@ from rest_framework.views import APIView from api.crossref.permissions import RequestComesFromMailgun -from framework.auth.views import mails -from osf.models import Preprint +from osf.models import Preprint, NotificationType from website import settings +from website.preprints.tasks import mint_doi_on_crossref_fail logger = logging.getLogger(__name__) @@ -66,8 +66,7 @@ def post(self, request): elif record.get('status').lower() == 'failure': if 'Relation target DOI does not exist' in record.find('msg').text: logger.warning('Related publication DOI does not exist, sending metadata again without it...') - client = preprint.get_doi_client() - client.create_identifier(preprint, category='doi', include_relation=False) + mint_doi_on_crossref_fail.apply_async(kwargs={'preprint_id': preprint._id}) # This error occurs when a single preprint is being updated several times in a row with the same metadata [#PLAT-944] elif 'less or equal to previously submitted version' in record.find('msg').text and record_count == 2: break @@ -77,13 +76,15 @@ def post(self, request): if dois_processed != record_count or status != 'completed': if unexpected_errors: + email_error_text = request.POST['body-plain'] batch_id = crossref_email_content.find('batch_id').text - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.CROSSREF_ERROR, - batch_id=batch_id, - email_content=request.POST['body-plain'], + NotificationType.objects.get(name=NotificationType.Type.DESK_OSF_SUPPORT_EMAIL).emit( + user=type('staff', (), {'username': settings.OSF_SUPPORT_EMAIL}), + event_context={ + 'batch_id': batch_id, + 'email_content': request.POST['body-plain'], + }, ) - logger.error(f'Error submitting metadata for batch_id {batch_id} with CrossRef, email sent to help desk') + logger.error(f'Error submitting metadata for batch_id {batch_id} with CrossRef, email sent to help desk: {email_error_text}') return HttpResponse('Mail received', status=200) diff --git a/api/institutions/authentication.py b/api/institutions/authentication.py index a5588c2b034..5fa7e35fde0 100644 --- a/api/institutions/authentication.py +++ b/api/institutions/authentication.py @@ -13,17 +13,14 @@ from api.base.authentication import drf from api.base import exceptions, settings -from api.waffle.utils import flag_is_active from framework import sentry from framework.auth import get_or_create_institutional_user -from osf import features from osf.exceptions import InstitutionAffiliationStateError -from osf.models import Institution +from osf.models import Institution, NotificationType from osf.models.institution import SsoFilterCriteriaAction -from website.mails import send_mail, WELCOME_OSF4I, DUPLICATE_ACCOUNTS_OSF4I, ADD_SSO_EMAIL_OSF4I from website.settings import OSF_SUPPORT_EMAIL, DOMAIN from website.util.metrics import institution_source_tag @@ -334,26 +331,30 @@ def authenticate(self, request): user.save() # Send confirmation email for all three: created, confirmed and claimed - send_mail( - to_addr=user.username, - mail=WELCOME_OSF4I, + NotificationType.objects.get( + name=NotificationType.Type.USER_WELCOME_OSF4I, + ).emit( user=user, - domain=DOMAIN, - osf_support_email=OSF_SUPPORT_EMAIL, - storage_flag_is_active=flag_is_active(request, features.STORAGE_I18N), + event_context={ + 'email_to_add': email_to_add, + 'domain': DOMAIN, + 'osf_support_email': OSF_SUPPORT_EMAIL, + }, ) # Add the email to the user's account if it is identified by the eppn if email_to_add: assert not is_created and email_to_add == sso_email user.emails.create(address=email_to_add) - send_mail( - to_addr=user.username, - mail=ADD_SSO_EMAIL_OSF4I, + NotificationType.objects.get( + name=NotificationType.Type.USER_ADD_SSO_EMAIL_OSF4I, + ).emit( user=user, - email_to_add=email_to_add, - domain=DOMAIN, - osf_support_email=OSF_SUPPORT_EMAIL, + event_context={ + 'email_to_add': email_to_add, + 'domain': DOMAIN, + 'osf_support_email': OSF_SUPPORT_EMAIL, + }, ) # Inform the user that a potential duplicate account is found @@ -364,13 +365,15 @@ def authenticate(self, request): duplicate_user.remove_sso_identity_from_affiliation(institution) if secondary_institution: duplicate_user.remove_sso_identity_from_affiliation(secondary_institution) - send_mail( - to_addr=user.username, - mail=DUPLICATE_ACCOUNTS_OSF4I, + NotificationType.objects.get( + name=NotificationType.Type.USER_DUPLICATE_ACCOUNTS_OSF4I, + ).emit( user=user, - duplicate_user=duplicate_user, - domain=DOMAIN, - osf_support_email=OSF_SUPPORT_EMAIL, + event_context={ + 'duplicate_user': duplicate_user.id, + 'domain': DOMAIN, + 'osf_support_email': OSF_SUPPORT_EMAIL, + }, ) # Affiliate the user to the primary institution if not previously affiliated diff --git a/api/nodes/views.py b/api/nodes/views.py index 7bc3ad929da..d7eccb116e2 100644 --- a/api/nodes/views.py +++ b/api/nodes/views.py @@ -158,11 +158,11 @@ File, Folder, CedarMetadataRecord, - Preprint, + Preprint, NotificationType, ) from addons.osfstorage.models import Region from osf.utils.permissions import ADMIN, WRITE_NODE -from website import mails, settings +from website import settings # This is used to rethrow v1 exceptions as v2 HTTP_CODE_MAP = { @@ -1051,10 +1051,36 @@ def perform_create(self, serializer): try: fork = serializer.save(node=node) except Exception as exc: - mails.send_mail(user.email, mails.FORK_FAILED, title=node.title, guid=node._id, can_change_preferences=False) + NotificationType.objects.get( + name=NotificationType.Type.FORK_FAILED, + ).emit( + user=user, + subscribed_object=node, + event_context={ + 'user': user.id, + 'node': node.id, + 'title': node.title, + 'guid': node._id, + 'domain': settings.DOMAIN, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, + ) raise exc else: - mails.send_mail(user.email, mails.FORK_COMPLETED, title=node.title, guid=fork._id, can_change_preferences=False) + NotificationType.objects.get( + name=NotificationType.Type.NODE_FORK_COMPLETED, + ).emit( + user=user, + subscribed_object=fork, + event_context={ + 'user': user.id, + 'node': fork.id, + 'title': node.title, + 'guid': fork._id, + 'domain': settings.DOMAIN, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, + ) class NodeLinkedByNodesList(JSONAPIBaseView, generics.ListAPIView, NodeMixin): diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py index 2e802a438a0..9ade76215c6 100644 --- a/api/preprints/serializers.py +++ b/api/preprints/serializers.py @@ -39,6 +39,7 @@ from api.preprints.fields import DOIField from api.taxonomies.serializers import TaxonomizableSerializerMixin from framework.exceptions import PermissionsError, UnpublishedPendingPreprintVersionExists +from website import settings from website.project import signals as project_signals from osf.exceptions import NodeStateError, PreprintStateError from osf.models import ( @@ -46,10 +47,11 @@ Preprint, PreprintProvider, Node, - NodeLicense, + NodeLicense, NotificationType, ) from osf.utils import permissions as osf_permissions from osf.utils.workflows import DefaultStates +from website.project.views.node import serialize_preprints class PrimaryFileRelationshipField(RelationshipField): @@ -296,7 +298,8 @@ def update(self, preprint, validated_data): assert isinstance(preprint, Preprint), 'You must specify a valid preprint to be updated' auth = get_user_auth(self.context['request']) - if not preprint.has_permission(auth.user, osf_permissions.WRITE): + ignore_permission = self.context.get('ignore_permission', False) + if not ignore_permission and not preprint.has_permission(auth.user, osf_permissions.WRITE): raise exceptions.PermissionDenied(detail='User must have admin or write permissions to update a preprint.') for field in ['conflict_of_interest_statement', 'why_no_data', 'why_no_prereg']: @@ -344,76 +347,40 @@ def update(self, preprint, validated_data): detail='You cannot edit this field while your prereg links availability is set to false or is unanswered.', ) - def require_admin_permission(): - if not preprint.has_permission(auth.user, osf_permissions.ADMIN): - raise exceptions.PermissionDenied(detail='Must have admin permissions to update author assertion fields.') - - if 'has_coi' in validated_data: - require_admin_permission() - try: - preprint.update_has_coi(auth, validated_data['has_coi']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'conflict_of_interest_statement' in validated_data: - require_admin_permission() - try: - preprint.update_conflict_of_interest_statement(auth, validated_data['conflict_of_interest_statement']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'has_data_links' in validated_data: - require_admin_permission() - try: - preprint.update_has_data_links(auth, validated_data['has_data_links']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'why_no_data' in validated_data: - require_admin_permission() - try: - preprint.update_why_no_data(auth, validated_data['why_no_data']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'data_links' in validated_data: - require_admin_permission() - try: - preprint.update_data_links(auth, validated_data['data_links']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - else: - if updated_has_data_links == 'no' and preprint.data_links: - preprint.update_data_links(auth, []) - - if 'has_prereg_links' in validated_data: - require_admin_permission() - - try: - preprint.update_has_prereg_links(auth, validated_data['has_prereg_links']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'why_no_prereg' in validated_data: - require_admin_permission() - try: - preprint.update_why_no_prereg(auth, validated_data['why_no_prereg']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'prereg_links' in validated_data: - require_admin_permission() - try: - preprint.update_prereg_links(auth, validated_data['prereg_links']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'prereg_link_info' in validated_data: - require_admin_permission() - try: - preprint.update_prereg_link_info(auth, validated_data['prereg_link_info']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) + try: + if 'has_coi' in validated_data: + preprint.update_has_coi(auth, validated_data['has_coi'], ignore_permission=ignore_permission) + + if 'conflict_of_interest_statement' in validated_data: + preprint.update_conflict_of_interest_statement(auth, validated_data['conflict_of_interest_statement'], ignore_permission=ignore_permission) + + if 'has_data_links' in validated_data: + preprint.update_has_data_links(auth, validated_data['has_data_links'], ignore_permission=ignore_permission) + + if 'why_no_data' in validated_data: + preprint.update_why_no_data(auth, validated_data['why_no_data'], ignore_permission=ignore_permission) + + if 'has_prereg_links' in validated_data: + preprint.update_has_prereg_links(auth, validated_data['has_prereg_links'], ignore_permission=ignore_permission) + + if 'why_no_prereg' in validated_data: + preprint.update_why_no_prereg(auth, validated_data['why_no_prereg'], ignore_permission=ignore_permission) + + if 'prereg_links' in validated_data: + preprint.update_prereg_links(auth, validated_data['prereg_links'], ignore_permission=ignore_permission) + + if 'prereg_link_info' in validated_data: + preprint.update_prereg_link_info(auth, validated_data['prereg_link_info'], ignore_permission=ignore_permission) + + if 'data_links' in validated_data: + preprint.update_data_links(auth, validated_data['data_links'], ignore_permission=ignore_permission) + else: + if updated_has_data_links == 'no' and preprint.data_links: + preprint.update_data_links(auth, [], ignore_permission=ignore_permission) + except PreprintStateError as e: + raise exceptions.ValidationError(detail=str(e)) + except PermissionsError: + raise exceptions.PermissionDenied(detail='Must have admin permissions to update author assertion fields.') published = validated_data.pop('is_published', None) if published and preprint.provider.is_reviewed: @@ -434,7 +401,7 @@ def require_admin_permission(): primary_file = validated_data.pop('primary_file', None) if primary_file: - self.set_field(preprint.set_primary_file, primary_file, auth) + self.set_field(preprint.set_primary_file, primary_file, auth, ignore_permission=ignore_permission) old_tags = set(preprint.tags.values_list('name', flat=True)) if 'tags' in validated_data: @@ -451,7 +418,7 @@ def require_admin_permission(): if 'node' in validated_data: node = validated_data.pop('node', None) - self.set_field(preprint.set_supplemental_node, node, auth) + self.set_field(preprint.set_supplemental_node, node, auth, ignore_node_permissions=ignore_permission, ignore_permission=ignore_permission) if 'subjects' in validated_data: subjects = validated_data.pop('subjects', None) @@ -459,18 +426,18 @@ def require_admin_permission(): if 'title' in validated_data: title = validated_data['title'] - self.set_field(preprint.set_title, title, auth) + self.set_field(preprint.set_title, title, auth, ignore_permission=ignore_permission) if 'description' in validated_data: description = validated_data['description'] - self.set_field(preprint.set_description, description, auth) + self.set_field(preprint.set_description, description, auth, ignore_permission=ignore_permission) if 'article_doi' in validated_data: preprint.article_doi = validated_data['article_doi'] if 'license_type' in validated_data or 'license' in validated_data: license_details = get_license_details(preprint, validated_data) - self.set_field(preprint.set_preprint_license, license_details, auth) + self.set_field(preprint.set_preprint_license, license_details, auth, ignore_permission=ignore_permission) if 'original_publication_date' in validated_data: preprint.original_publication_date = validated_data['original_publication_date'] or None @@ -483,9 +450,9 @@ def require_admin_permission(): raise exceptions.ValidationError( detail='A valid primary_file must be set before publishing a preprint.', ) - self.set_field(preprint.set_published, published, auth) + self.set_field(preprint.set_published, published, auth, ignore_permission=ignore_permission) recently_published = published - preprint.set_privacy('public', log=False, save=True) + preprint.set_privacy('public', log=False, save=True, ignore_permission=ignore_permission) try: preprint.full_clean() @@ -503,12 +470,31 @@ def require_admin_permission(): auth=auth, email_template='preprint', ) + if node.provider.is_default: + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT + else: + notification_type = NotificationType.Type.PROVIDER_CONTRIBUTOR_ADDED_PREPRINT + + NotificationType.objects.get(name=notification_type).emit( + user=author, + event_context={ + 'user': author.id, + 'node': node.id, + 'referrer_name': auth.user.fullname if auth else '', + 'is_initiator': getattr(auth, 'user', False) == author.id, + 'all_global_subscriptions_none': False, + 'branded_service': getattr(author, 'id', None), + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + 'published_preprints': serialize_preprints(node, user=None), + }, + ) return preprint - def set_field(self, func, val, auth, save=False): + def set_field(self, func, val, auth, **kwargs): try: - func(val, auth) + func(val, auth, **kwargs) except PermissionsError as e: raise exceptions.PermissionDenied(detail=str(e)) except (ValueError, ValidationError, NodeStateError) as e: @@ -566,6 +552,13 @@ def create(self, validated_data): raise Conflict(detail='Failed to create a new preprint version due to unpublished pending version exists.') if not preprint: raise NotFound(detail='Failed to create a new preprint version due to source preprint not found.') + for contributor in preprint.contributor_set.filter(user__is_registered=False): + contributor.user.add_unclaimed_record( + claim_origin=preprint, + referrer=auth.user, + email=contributor.user.email, + given_name=contributor.user.fullname, + ) if data_to_update: return self.update(preprint, data_to_update) return preprint diff --git a/api/providers/serializers.py b/api/providers/serializers.py index ef89388e281..a5feaf013d2 100644 --- a/api/providers/serializers.py +++ b/api/providers/serializers.py @@ -10,11 +10,10 @@ from api.preprints.serializers import PreprintProviderRelationshipField from api.providers.workflows import Workflows from api.base.metrics import MetricsSerializerMixin -from osf.models import CitationStyle +from osf.models import CitationStyle, NotificationType from osf.models.user import Email, OSFUser from osf.models.validators import validate_email from osf.utils.permissions import REVIEW_GROUPS, ADMIN -from website import mails from website.settings import DOMAIN @@ -313,12 +312,11 @@ def create(self, validated_data): address = validated_data.pop('email', '') provider = self.context['provider'] context = { - 'referrer': auth.user, + 'referrer': auth.user.id, } if user_id and address: raise ValidationError('Cannot specify both "id" and "email".') - user = None if user_id: user = OSFUser.load(user_id) elif address: @@ -344,15 +342,15 @@ def create(self, validated_data): if not user: raise ValidationError('Unable to find specified user.') - context['user'] = user - context['provider'] = provider + context['user'] = user.id + context['provider'] = provider.id if bool(get_perms(user, provider)): raise ValidationError('Specified user is already a moderator.') if 'claim_url' in context: - template = mails.CONFIRM_EMAIL_MODERATION(provider) + notification_type = NotificationType.Type.PROVIDER_CONFIRM_EMAIL_MODERATION else: - template = mails.MODERATOR_ADDED(provider) + notification_type = NotificationType.Type.PROVIDER_MODERATOR_ADDED perm_group = validated_data.pop('permission_group', '') if perm_group not in REVIEW_GROUPS: @@ -364,11 +362,13 @@ def create(self, validated_data): provider.add_to_group(user, perm_group) setattr(user, 'permission_group', perm_group) # Allows reserialization - mails.send_mail( - user.username, - template, - **context, + NotificationType.objects.get( + name=notification_type.value, + ).emit( + user=user, + event_context=context, ) + return user def update(self, instance, validated_data): diff --git a/api/providers/tasks.py b/api/providers/tasks.py index b8fb8e06233..b0a39c9c337 100644 --- a/api/providers/tasks.py +++ b/api/providers/tasks.py @@ -26,7 +26,7 @@ RegistrationBulkUploadRow, RegistrationProvider, RegistrationSchema, - Subject, + Subject, NotificationType, ) from osf.models.licenses import NodeLicense from osf.models.registration_bulk_upload_job import JobState @@ -34,7 +34,7 @@ from osf.registrations.utils import get_registration_provider_submissions_url from osf.utils.permissions import ADMIN -from website import mails, settings +from website import settings logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) @@ -136,13 +136,16 @@ def prepare_for_registration_bulk_creation(payload_hash, initiator_id, provider_ # Cancel the preparation task if duplicates are found in the CSV and/or in DB if draft_error_list: upload.delete() - mails.send_mail( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES, - fullname=initiator.fullname, - count=initial_row_count, - draft_errors=draft_error_list, - osf_support_email=settings.OSF_SUPPORT_EMAIL, + NotificationType.objects.get( + name=NotificationType.Type.REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES, + ).emit( + user=initiator, + event_context={ + 'fullname': initiator.fullname, + 'count': initial_row_count, + 'draft_errors': draft_error_list, + 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + }, ) return @@ -636,88 +639,78 @@ def bulk_upload_finish_job(upload, row_count, success_count, draft_errors, appro approval_errors.sort() if not dry_run: upload.save() + notification_type = None + event_context = { + 'initiator_fullname': initiator.fullname, + 'auto_approval': auto_approval, + 'count': row_count, + 'pending_submissions_url': get_registration_provider_submissions_url(provider), + 'draft_errors': draft_errors, + 'approval_errors': approval_errors, + 'successes': success_count, + 'failures': len(draft_errors), + 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + } + if upload.state == JobState.DONE_FULL: - mails.send_mail( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_SUCCESS_ALL, - fullname=initiator.fullname, - auto_approval=auto_approval, - count=row_count, - pending_submissions_url=get_registration_provider_submissions_url(provider), - ) + notification_type = NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_SUCCESS_ALL elif upload.state == JobState.DONE_PARTIAL: - mails.send_mail( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL, - fullname=initiator.fullname, - auto_approval=auto_approval, - total=row_count, - successes=success_count, - draft_errors=draft_errors, - approval_errors=approval_errors, - failures=len(draft_errors), - pending_submissions_url=get_registration_provider_submissions_url(provider), - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) + notification_type = NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL elif upload.state == JobState.DONE_ERROR: - mails.send_mail( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_FAILURE_ALL, - fullname=initiator.fullname, - count=row_count, - draft_errors=draft_errors, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) + notification_type = NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL else: - message = f'Failed to send registration bulk upload outcome email due to invalid ' \ - f'upload state: [upload={upload.id}, state={upload.state.name}]' - logger.error(message) - sentry.log_message(message) + logger.error(f'Unexpected job state for upload [{upload.id}]: {upload.state.name}') return + + NotificationType.objects.get( + name=notification_type, + ).emit( + user=initiator, + event_context=event_context, + ) + upload.email_sent = timezone.now() upload.save() - logger.info(f'Email sent to bulk upload initiator [{initiator._id}]') + logger.info(f'Notification sent to bulk upload initiator [{initiator._id}]') def handle_internal_error(initiator=None, provider=None, message=None, dry_run=True): - """Log errors that happened due to unexpected bug and send emails the uploader (if available) - about failures. Product owner (if available) is informed as well with more details. Emails are - not sent during dry run. - """ - + """Log errors due to unexpected bugs and send notifications instead of direct emails.""" if not message: message = 'Registration bulk upload failure' logger.error(message) sentry.log_message(message) - if not dry_run: - if initiator: - mails.send_mail( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_UNEXPECTED_FAILURE, - fullname=initiator.fullname, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) - inform_product_of_errors(initiator=initiator, provider=provider, message=message) - + if not dry_run and initiator: + NotificationType.objects.get( + name=NotificationType.Type.DESK_USER_REGISTRATION_BULK_UPLOAD_UNEXPECTED_FAILURE, + ).emit( + user=initiator, + event_context={ + 'initiator_fullname': initiator.fullname, + 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + 'message': message, + }, + ) + inform_product_of_errors(initiator=initiator, provider=provider, message=message) def inform_product_of_errors(initiator=None, provider=None, message=None): - """Inform product owner of internal errors. - """ - + """Inform product owner of internal errors via notifications.""" email = settings.PRODUCT_OWNER_EMAIL_ADDRESS.get('Registration') if not email: logger.warning('Missing email for OSF Registration product owner.') return - if not message: - message = 'Bulk upload preparation failure' - user = f'{initiator._id}, {initiator.fullname}, {initiator.username}' if initiator else 'UNIDENTIFIED' + user_info = f'{initiator._id}, {initiator.fullname}, {initiator.username}' if initiator else 'UNIDENTIFIED' provider_name = provider.name if provider else 'UNIDENTIFIED' - mails.send_mail( - to_addr=email, - mail=mails.REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER, - message=message, - user=user, - provider_name=provider_name, + + NotificationType.objects.get( + name=NotificationType.Type.DESK_REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER, + ).emit( + user=object('mockuser', (), {'username': email}), + event_context={ + 'user': user_info, + 'provider_name': provider_name, + 'message': message, + }, ) diff --git a/api/registrations/urls.py b/api/registrations/urls.py index 66e5175b05b..232be823bb9 100644 --- a/api/registrations/urls.py +++ b/api/registrations/urls.py @@ -13,6 +13,7 @@ re_path(r'^(?P\w+)/$', views.RegistrationDetail.as_view(), name=views.RegistrationDetail.view_name), re_path(r'^(?P\w+)/bibliographic_contributors/$', views.RegistrationBibliographicContributorsList.as_view(), name=views.RegistrationBibliographicContributorsList.view_name), re_path(r'^(?P\w+)/cedar_metadata_records/$', views.RegistrationCedarMetadataRecordsList.as_view(), name=views.RegistrationCedarMetadataRecordsList.view_name), + re_path(r'^(?P\w+)/callbacks/$', views.RegistrationCallbackView.as_view(), name=views.RegistrationCallbackView.view_name), re_path(r'^(?P\w+)/children/$', views.RegistrationChildrenList.as_view(), name=views.RegistrationChildrenList.view_name), re_path(r'^(?P\w+)/comments/$', views.RegistrationCommentsList.as_view(), name=views.RegistrationCommentsList.view_name), re_path(r'^(?P\w+)/contributors/$', views.RegistrationContributorsList.as_view(), name=views.RegistrationContributorsList.view_name), diff --git a/api/registrations/views.py b/api/registrations/views.py index 8254ea69edf..a8d10d0602b 100644 --- a/api/registrations/views.py +++ b/api/registrations/views.py @@ -1,7 +1,13 @@ -from rest_framework import generics, mixins, permissions as drf_permissions +from rest_framework import generics, mixins, permissions as drf_permissions, status from rest_framework.exceptions import ValidationError, NotFound, PermissionDenied +from rest_framework.response import Response +from framework.exceptions import HTTPError from framework.auth.oauth_scopes import CoreScopes +from addons.base.views import DOWNLOAD_ACTIONS +from website.archiver import signals, ARCHIVER_NETWORK_ERROR, ARCHIVER_SUCCESS, ARCHIVER_FAILURE +from website.project import signals as project_signals + from osf.models import Registration, OSFUser, RegistrationProvider, OutcomeArtifact, CedarMetadataRecord from osf.utils.permissions import WRITE_NODE from osf.utils.workflows import ApprovalStates @@ -28,6 +34,7 @@ JSONAPIMultipleRelationshipsParser, JSONAPIRelationshipParserForRegularJSON, JSONAPIMultipleRelationshipsParserForRegularJSON, + HMACSignedParser, ) from api.base.utils import ( get_user_auth, @@ -1040,3 +1047,47 @@ def get_default_queryset(self): def get_queryset(self): return self.get_queryset_from_request() + + +class RegistrationCallbackView(JSONAPIBaseView, generics.UpdateAPIView, RegistrationMixin): + permission_classes = [drf_permissions.AllowAny] + + view_category = 'registrations' + view_name = 'registration-callbacks' + + parser_classes = [HMACSignedParser] + + def update(self, request, *args, **kwargs): + registration = self.get_node() + + try: + payload = request.data + if payload.get('action', None) in DOWNLOAD_ACTIONS: + return Response({'status': 'success'}, status=status.HTTP_200_OK) + errors = payload.get('errors') + src_provider = payload['source']['provider'] + if errors: + registration.archive_job.update_target( + src_provider, + ARCHIVER_FAILURE, + errors=errors, + ) + else: + # Dataverse requires two seperate targets, one + # for draft files and one for published files + if src_provider == 'dataverse': + src_provider += '-' + (payload['destination']['name'].split(' ')[-1].lstrip('(').rstrip(')').strip()) + registration.archive_job.update_target( + src_provider, + ARCHIVER_SUCCESS, + ) + project_signals.archive_callback.send(registration) + return Response(status=status.HTTP_200_OK) + except HTTPError as e: + registration.archive_status = ARCHIVER_NETWORK_ERROR + registration.save() + signals.archive_fail.send( + registration, + errors=[str(e)], + ) + return Response(status=status.HTTP_200_OK) diff --git a/api/requests/serializers.py b/api/requests/serializers.py index 08a574e38ce..b452380d893 100644 --- a/api/requests/serializers.py +++ b/api/requests/serializers.py @@ -15,11 +15,11 @@ PreprintRequest, Institution, OSFUser, + NotificationType, ) from osf.utils.workflows import DefaultStates, RequestTypes, NodeRequestTypes from osf.utils import permissions as osf_permissions from website import language, settings -from website.mails import send_mail, NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST from rest_framework.exceptions import PermissionDenied, ValidationError @@ -188,18 +188,20 @@ def make_node_institutional_access_request(self, node, validated_data) -> NodeRe comment = validated_data.get('comment', '').strip() or language.EMPTY_REQUEST_INSTITUTIONAL_ACCESS_REQUEST_TEXT - send_mail( - to_addr=recipient.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, + NotificationType.objects.get(name=NotificationType.Type.NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST.value).emit( user=recipient, - sender=sender, - bcc_addr=[sender.username] if validated_data['bcc_sender'] else None, - reply_to=sender.username if validated_data['reply_to'] else None, - recipient=recipient, - comment=comment, - institution=institution, - osf_url=settings.DOMAIN, - node=node_request.target, + subscribed_object=node_request.target, + event_context={ + 'user': recipient.id, + 'sender': sender.id, + 'recipient': recipient.id, + 'comment': comment, + 'institution': institution.id, + 'osf_url': settings.DOMAIN, + 'node': node_request.target.id, + 'bcc_sender': validated_data['bcc_sender'], + 'reply_to': sender.username if validated_data['reply_to'] else None, + }, ) return node_request diff --git a/api/share/utils.py b/api/share/utils.py index 1178adfa85d..5083220ce7f 100644 --- a/api/share/utils.py +++ b/api/share/utils.py @@ -2,14 +2,9 @@ SHARE/Trove accepts metadata records as "indexcards" in turtle format: https://www.w3.org/TR/turtle/ """ -from functools import partial from http import HTTPStatus import logging -import random -from urllib.parse import urljoin -import uuid -from celery.exceptions import Retry from django.apps import apps import requests @@ -17,7 +12,6 @@ from framework.celery_tasks.handlers import enqueue_task from framework.encryption import ensure_bytes from framework.sentry import log_exception -from osf import models as osf_db from osf.metadata.osf_gathering import ( OsfmapPartition, pls_get_magic_metadata_basket, @@ -33,10 +27,6 @@ def shtrove_ingest_url(): return f'{settings.SHARE_URL}trove/ingest' -def sharev2_push_url(): - return f'{settings.SHARE_URL}api/v2/normalizeddata/' - - def is_qa_resource(resource): """ QA puts tags and special titles on their project to stop them from appearing in the search results. This function @@ -70,8 +60,6 @@ def _enqueue_update_share(osfresource): logger.warning(f'update_share skipping resource that has no guids: {osfresource}') return enqueue_task(task__update_share.s(_osfguid_value)) - if isinstance(osfresource, (osf_db.AbstractNode, osf_db.Preprint)): - enqueue_task(async_update_resource_share.s(_osfguid_value)) @celery_app.task( @@ -212,398 +200,3 @@ def _next_osfmap_partition(partition: OsfmapPartition) -> OsfmapPartition | None return OsfmapPartition.MONTHLY_SUPPLEMENT case _: return None - - -### -# BEGIN soon-to-be-deleted (🤞) legacy sharev2 push -# (until dust has settled on iri-centric (rdf-based) search) -"""Utilities for pushing metadata to SHARE - -SHARE uses a specific dialect of JSON-LD that could/should have been -an internal implementation detail, but for historical reasons OSF must -be aware of it in order to push metadata updates to SHARE -- hopefully, -that awareness is contained entirely within this file. - -WARNING: In this context, "graph node" does NOT have anything to do with -OSF's `Node` model, but instead refers to a "node object" within a JSON-LD -graph, as defined at https://www.w3.org/TR/json-ld11/#dfn-node-object - -Each graph node must contain '@id' and '@type', plus other key/value pairs -according to the "SHARE schema": -https://github.com/CenterForOpenScience/SHARE/blob/develop/share/schema/schema-spec.yaml - -In this case, '@id' will always be a "blank" identifier, which begins with '_:' -and is used only to define relationships between nodes in the graph -- nodes -may reference each other with @id/@type pairs -- -e.g. {'@id': '...', '@type': '...'} - -Example serialization: The following SHARE-style JSON-LD document represents a -preprint with one "creator" and one identifier -- the graph contains nodes for -the preprint, person, and identifier, plus another node representing the -"creator" relationship between the preprint and person: -``` -{ - 'central_node_id': '_:foo', - '@graph': [ - { - '@id': '_:foo', - '@type': 'preprint', - 'title': 'This is a preprint!', - }, - { - '@id': '_:bar', - '@type': 'workidentifier', - 'uri': 'https://osf.io/foobar/', - 'creative_work': {'@id': '_:foo', '@type': 'preprint'} - }, - { - '@id': '_:baz', - '@type': 'person', - 'name': 'Magpie Jones' - }, - { - '@id': '_:qux', - '@type': 'creator', - 'creative_work': {'@id': '_:foo', '@type': 'preprint'}, - 'agent': {'@id': '_:baz', '@type': 'person'} - } - ] -} -``` -""" - - -class GraphNode: - """Utility class for building a JSON-LD graph suitable for pushing to SHARE - - WARNING: In this context, "graph node" does NOT have anything to do with - OSF's `Node` model, but instead refers to a "node object" within a JSON-LD - graph, as defined at https://www.w3.org/TR/json-ld11/#dfn-node-object - """ - - @staticmethod - def serialize_graph(central_graph_node, all_graph_nodes): - """Serialize the mess of GraphNodes to a JSON-friendly dictionary - :param central_graph_node: the GraphNode for the preprint/node/registration - this graph is most "about" - :param all_graph_nodes: list of GraphNodes to include -- will also recursively - look for and include GraphNodes contained in attrs - """ - to_visit = [central_graph_node, *all_graph_nodes] # make a copy of the list - visited = set() - while to_visit: - n = to_visit.pop(0) - if n not in visited: - visited.add(n) - to_visit.extend(n.get_related()) - - return { - 'central_node_id': central_graph_node.id, - '@graph': [node.serialize() for node in visited], - } - - @property - def ref(self): - return {'@id': self.id, '@type': self.type} - - def __init__(self, type_, **attrs): - self.id = f'_:{uuid.uuid4()}' - self.type = type_.lower() - self.attrs = attrs - - def get_related(self): - for value in self.attrs.values(): - if isinstance(value, GraphNode): - yield value - elif isinstance(value, list): - yield from value - - def serialize(self): - ser = {} - for key, value in self.attrs.items(): - if isinstance(value, GraphNode): - ser[key] = value.ref - elif isinstance(value, list) or value in (None, '', {}): - continue - else: - ser[key] = value - - return dict(self.ref, **ser) - - -def format_user(user): - person = GraphNode( - 'person', **{ - 'name': user.fullname, - 'suffix': user.suffix, - 'given_name': user.given_name, - 'family_name': user.family_name, - 'additional_name': user.middle_names, - }, - ) - - person.attrs['identifiers'] = [GraphNode('agentidentifier', agent=person, uri=user.absolute_url)] - - if user.external_identity.get('ORCID') and list(user.external_identity['ORCID'].values())[0] == 'VERIFIED': - person.attrs['identifiers'].append(GraphNode('agentidentifier', agent=person, uri=list(user.external_identity['ORCID'].keys())[0])) - - person.attrs['related_agents'] = [GraphNode('isaffiliatedwith', subject=person, related=GraphNode('institution', name=institution.name)) for institution in user.get_affiliated_institutions()] - - return person - - -def format_bibliographic_contributor(work_node, user, index): - return GraphNode( - 'creator', - agent=format_user(user), - order_cited=index, - creative_work=work_node, - cited_as=user.fullname, - ) - - -def format_subject(subject, context=None): - if context is None: - context = {} - if subject is None: - return None - if subject.id in context: - return context[subject.id] - context[subject.id] = GraphNode( - 'subject', - name=subject.text, - uri=subject.absolute_api_v2_url, - ) - context[subject.id].attrs['parent'] = format_subject(subject.parent, context) - context[subject.id].attrs['central_synonym'] = format_subject(subject.bepress_subject, context) - return context[subject.id] - - -def send_share_json(resource, data): - """POST metadata to SHARE, using the provider for the given resource. - """ - if getattr(resource, 'provider') and resource.provider.access_token: - access_token = resource.provider.access_token - else: - access_token = settings.SHARE_API_TOKEN - - return requests.post( - sharev2_push_url(), - json=data, - headers={ - 'Authorization': f'Bearer {access_token}', - 'Content-Type': 'application/vnd.api+json', - }, - ) - - -def serialize_share_data(resource, old_subjects=None): - """Build a request payload to send Node/Preprint/Registration metadata to SHARE. - :param resource: either a Node, Preprint or Registration - :param old_subjects: - - :return: JSON-serializable dictionary of the resource's metadata, good for POSTing to SHARE - """ - from osf.models import ( - Node, - DraftNode, - Preprint, - Registration, - ) - suid = None - if isinstance(resource, Preprint): - # old_subjects is only used for preprints and should be removed as soon as SHARE - # is fully switched over to the non-mergy pipeline (see ENG-2098) - serializer = partial(serialize_preprint, old_subjects=old_subjects) - suid = resource.get_guid()._id - elif isinstance(resource, Node): - serializer = serialize_osf_node - elif isinstance(resource, Registration): - serializer = serialize_registration - elif isinstance(resource, DraftNode): - return {} - else: - raise NotImplementedError() - - return { - 'data': { - 'type': 'NormalizedData', - 'attributes': { - 'tasks': [], - 'raw': None, - 'suid': resource._id if not suid else suid, - 'data': serializer(resource), - }, - }, - } - - -def serialize_preprint(preprint, old_subjects=None): - if old_subjects is None: - old_subjects = [] - from osf.models import Subject - old_subjects = [Subject.objects.get(id=s) for s in old_subjects] - preprint_graph = GraphNode( - preprint.provider.share_publish_type, **{ - 'title': preprint.title, - 'description': preprint.description or '', - 'is_deleted': ( - (not preprint.verified_publishable and not preprint.is_retracted) - or preprint.is_spam - or preprint.is_deleted - or is_qa_resource(preprint) - ), - 'date_updated': preprint.modified.isoformat(), - 'date_published': preprint.date_published.isoformat() if preprint.date_published else None, - }, - ) - to_visit = [ - preprint_graph, - GraphNode('workidentifier', creative_work=preprint_graph, uri=urljoin(settings.DOMAIN, preprint._id + '/')), - ] - - doi = preprint.get_identifier_value('doi') - if doi: - to_visit.append(GraphNode('workidentifier', creative_work=preprint_graph, uri=f'{settings.DOI_URL_PREFIX}{doi}')) - - if preprint.provider.domain_redirect_enabled: - to_visit.append(GraphNode('workidentifier', creative_work=preprint_graph, uri=preprint.absolute_url)) - - if preprint.article_doi: - # Article DOI refers to a clone of this preprint on another system and therefore does not qualify as an identifier for this preprint - related_work = GraphNode('creativework') - to_visit.append(GraphNode('workrelation', subject=preprint_graph, related=related_work)) - to_visit.append(GraphNode('workidentifier', creative_work=related_work, uri=f'{settings.DOI_URL_PREFIX}{preprint.article_doi}')) - - preprint_graph.attrs['tags'] = [ - GraphNode('throughtags', creative_work=preprint_graph, tag=GraphNode('tag', name=tag)) - for tag in preprint.tags.values_list('name', flat=True) if tag - ] - - current_subjects = [ - GraphNode('throughsubjects', creative_work=preprint_graph, is_deleted=False, subject=format_subject(s)) - for s in preprint.subjects.all() - ] - deleted_subjects = [ - GraphNode('throughsubjects', creative_work=preprint_graph, is_deleted=True, subject=format_subject(s)) - for s in old_subjects if not preprint.subjects.filter(id=s.id).exists() - ] - preprint_graph.attrs['subjects'] = current_subjects + deleted_subjects - - to_visit.extend(format_bibliographic_contributor(preprint_graph, user, i) for i, user in enumerate(preprint.visible_contributors)) - - return GraphNode.serialize_graph(preprint_graph, to_visit) - -def format_node_lineage(child_osf_node, child_graph_node): - parent_osf_node = child_osf_node.parent_node - if not parent_osf_node: - return [] - parent_graph_node = GraphNode('registration', title=parent_osf_node.title) - return [ - parent_graph_node, - GraphNode('workidentifier', creative_work=parent_graph_node, uri=urljoin(settings.DOMAIN, parent_osf_node.url)), - GraphNode('ispartof', subject=child_graph_node, related=parent_graph_node), - *format_node_lineage(parent_osf_node, parent_graph_node), - ] - -def serialize_registration(registration): - return serialize_osf_node( - registration, - additional_attrs={ - 'date_published': registration.registered_date.isoformat() if registration.registered_date else None, - 'registration_type': registration.registered_schema.first().name if registration.registered_schema.exists() else None, - 'justification': registration.retraction.justification if registration.retraction else None, - 'withdrawn': registration.is_retracted, - 'extra': {'osf_related_resource_types': _get_osf_related_resource_types(registration)}, - }, - ) - -def _get_osf_related_resource_types(registration): - from osf.models import OutcomeArtifact - from osf.utils.outcomes import ArtifactTypes - artifacts = OutcomeArtifact.objects.for_registration(registration).filter(finalized=True, deleted__isnull=True) - return { - artifact_type.name.lower(): artifacts.filter(artifact_type=artifact_type).exists() - for artifact_type in ArtifactTypes.public_types() - } - -def serialize_osf_node(osf_node, additional_attrs=None): - if osf_node.provider: - share_publish_type = osf_node.provider.share_publish_type - else: - share_publish_type = 'project' - - graph_node = GraphNode( - share_publish_type, **{ - 'title': osf_node.title, - 'description': osf_node.description or '', - 'is_deleted': ( - not osf_node.is_public - or osf_node.is_deleted - or osf_node.is_spam - or is_qa_resource(osf_node) - ), - **(additional_attrs or {}), - }, - ) - - to_visit = [ - graph_node, - GraphNode('workidentifier', creative_work=graph_node, uri=urljoin(settings.DOMAIN, osf_node.url)), - ] - - doi = osf_node.get_identifier_value('doi') - if doi: - to_visit.append(GraphNode('workidentifier', creative_work=graph_node, uri=f'{settings.DOI_URL_PREFIX}{doi}')) - - graph_node.attrs['tags'] = [ - GraphNode('throughtags', creative_work=graph_node, tag=GraphNode('tag', name=tag._id)) - for tag in osf_node.tags.all() - ] - - graph_node.attrs['subjects'] = [ - GraphNode('throughsubjects', creative_work=graph_node, subject=format_subject(s)) - for s in osf_node.subjects.all() - ] - - to_visit.extend(format_bibliographic_contributor(graph_node, user, i) for i, user in enumerate(osf_node.visible_contributors)) - to_visit.extend(GraphNode('AgentWorkRelation', creative_work=graph_node, agent=GraphNode('institution', name=institution.name)) for institution in osf_node.affiliated_institutions.all()) - - to_visit.extend(format_node_lineage(osf_node, graph_node)) - - return GraphNode.serialize_graph(graph_node, to_visit) - - -@celery_app.task(bind=True, max_retries=4, acks_late=True) -def async_update_resource_share(self, guid, old_subjects=None): - """ - This function updates share takes Preprints, Projects and Registrations. - :param self: - :param guid: - :return: - """ - AbstractNode = apps.get_model('osf.AbstractNode') - resource = AbstractNode.load(guid) - if not resource: - Preprint = apps.get_model('osf.Preprint') - resource = Preprint.load(guid) - - data = serialize_share_data(resource, old_subjects) - resp = send_share_json(resource, data) - try: - resp.raise_for_status() - except Exception as e: - if self.request.retries == self.max_retries: - log_exception(e) - elif resp.status_code >= 500: - try: - self.retry( - exc=e, - countdown=(random.random() + 1) * min(60 + settings.CELERY_RETRY_BACKOFF_BASE ** self.request.retries, 60 * 10), - ) - except Retry as e: # Retry is only raise after > 5 retries - log_exception(e) - else: - log_exception(e) - - return resp diff --git a/api/subscriptions/fields.py b/api/subscriptions/fields.py new file mode 100644 index 00000000000..c26ffaf5d4e --- /dev/null +++ b/api/subscriptions/fields.py @@ -0,0 +1,12 @@ +from rest_framework import serializers as ser +from osf.models import NotificationSubscription + +class FrequencyField(ser.ChoiceField): + def __init__(self, **kwargs): + super().__init__(choices=['none', 'instantly', 'daily', 'weekly', 'monthly'], **kwargs) + + def to_representation(self, obj: NotificationSubscription): + return obj.message_frequency + + def to_internal_value(self, freq): + return super().to_internal_value(freq) diff --git a/api/subscriptions/permissions.py b/api/subscriptions/permissions.py index 19dc7bcbd58..53e97445643 100644 --- a/api/subscriptions/permissions.py +++ b/api/subscriptions/permissions.py @@ -1,13 +1,7 @@ from rest_framework import permissions - -from osf.models.notifications import NotificationSubscription - +from osf.models import NotificationSubscription class IsSubscriptionOwner(permissions.BasePermission): - def has_object_permission(self, request, view, obj): - assert isinstance(obj, NotificationSubscription), f'obj must be a NotificationSubscription; got {obj}' - user_id = request.user.id - return obj.none.filter(id=user_id).exists() \ - or obj.email_transactional.filter(id=user_id).exists() \ - or obj.email_digest.filter(id=user_id).exists() + assert isinstance(obj, NotificationSubscription), f'obj must be NotificationSubscription; got {obj}' + return obj.user == request.user diff --git a/api/subscriptions/serializers.py b/api/subscriptions/serializers.py index da7aadbb1a4..57dab5931ef 100644 --- a/api/subscriptions/serializers.py +++ b/api/subscriptions/serializers.py @@ -1,35 +1,12 @@ from rest_framework import serializers as ser -from rest_framework.exceptions import ValidationError +from api.base.serializers import JSONAPISerializer, LinksField +from website.util import api_v2_url +from .fields import FrequencyField from api.nodes.serializers import RegistrationProviderRelationshipField from api.collections_providers.fields import CollectionProviderRelationshipField from api.preprints.serializers import PreprintProviderRelationshipField -from website.util import api_v2_url - - -from api.base.serializers import JSONAPISerializer, LinksField - -NOTIFICATION_TYPES = { - 'none': 'none', - 'instant': 'email_transactional', - 'daily': 'email_digest', -} -class FrequencyField(ser.Field): - def to_representation(self, obj): - user_id = self.context['request'].user.id - if obj.email_transactional.filter(id=user_id).exists(): - return 'instant' - if obj.email_digest.filter(id=user_id).exists(): - return 'daily' - return 'none' - - def to_internal_value(self, frequency): - notification_type = NOTIFICATION_TYPES.get(frequency) - if notification_type: - return {'notification_type': notification_type} - raise ValidationError(f'Invalid frequency "{frequency}"') - class SubscriptionSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'id', @@ -47,7 +24,7 @@ class Meta: type_ = 'subscription' def get_absolute_url(self, obj): - return obj.absolute_api_v2_url + return api_v2_url(f'subscriptions/{obj.pk}') def update(self, instance, validated_data): user = self.context['request'].user @@ -55,46 +32,44 @@ def update(self, instance, validated_data): instance.add_user_to_subscription(user, notification_type, save=True) return instance - class RegistrationSubscriptionSerializer(SubscriptionSerializer): provider = RegistrationProviderRelationshipField( related_view='providers:registration-providers:registration-provider-detail', - related_view_kwargs={'provider_id': ''}, + related_view_kwargs={'provider_id': ''}, read_only=False, required=False, ) def get_absolute_url(self, obj): - return api_v2_url(f'registration_subscriptions/{obj._id}') + return api_v2_url(f'registration_subscriptions/{obj.pk}') class Meta: type_ = 'registration-subscription' - class CollectionSubscriptionSerializer(SubscriptionSerializer): provider = CollectionProviderRelationshipField( related_view='providers:collection-providers:collection-provider-detail', - related_view_kwargs={'provider_id': ''}, + related_view_kwargs={'provider_id': ''}, read_only=False, required=False, ) def get_absolute_url(self, obj): - return api_v2_url(f'collection_subscriptions/{obj._id}') + return api_v2_url(f'collection_subscriptions/{obj.pk}') class Meta: type_ = 'collection-subscription' - class PreprintSubscriptionSerializer(SubscriptionSerializer): provider = PreprintProviderRelationshipField( related_view='providers:preprint-providers:preprint-provider-detail', - related_view_kwargs={'provider_id': ''}, + related_view_kwargs={'provider_id': ''}, read_only=False, + required=False, ) def get_absolute_url(self, obj): - return api_v2_url(f'preprints_subscriptions/{obj._id}') + return api_v2_url(f'preprints_subscriptions/{obj.pk}') class Meta: type_ = 'preprint-subscription' diff --git a/api/subscriptions/views.py b/api/subscriptions/views.py index c1d7e833b49..ac736612e74 100644 --- a/api/subscriptions/views.py +++ b/api/subscriptions/views.py @@ -1,28 +1,22 @@ -from rest_framework import generics -from rest_framework import permissions as drf_permissions +from api.base.filters import ListFilterMixin +from api.subscriptions.serializers import SubscriptionSerializer +from osf.models.notification import NotificationSubscription +from django.contrib.contenttypes.models import ContentType +from rest_framework import generics, permissions as drf_permissions from rest_framework.exceptions import NotFound -from django.core.exceptions import ObjectDoesNotExist -from django.db.models import Q +from django.shortcuts import get_object_or_404 + +from osf.models import AbstractProvider, CollectionProvider, PreprintProvider, RegistrationProvider -from framework.auth.oauth_scopes import CoreScopes from api.base.views import JSONAPIBaseView -from api.base.filters import ListFilterMixin from api.base import permissions as base_permissions +from api.subscriptions.permissions import IsSubscriptionOwner from api.subscriptions.serializers import ( - SubscriptionSerializer, CollectionSubscriptionSerializer, PreprintSubscriptionSerializer, RegistrationSubscriptionSerializer, ) -from api.subscriptions.permissions import IsSubscriptionOwner -from osf.models import ( - NotificationSubscription, - CollectionProvider, - PreprintProvider, - RegistrationProvider, - AbstractProvider, -) - +from framework.auth.oauth_scopes import CoreScopes class SubscriptionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): view_name = 'notification-subscription-list' @@ -37,32 +31,28 @@ class SubscriptionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): required_read_scopes = [CoreScopes.SUBSCRIPTIONS_READ] required_write_scopes = [CoreScopes.NULL] - def get_default_queryset(self): - user = self.request.user - return NotificationSubscription.objects.filter( - Q(none=user) | - Q(email_digest=user) | - Q( - email_transactional=user, - ), - ).distinct() - def get_queryset(self): - return self.get_queryset_from_request() - + return NotificationSubscription.objects.filter(user=self.request.user) class AbstractProviderSubscriptionList(SubscriptionList): - def get_default_queryset(self): - user = self.request.user - return NotificationSubscription.objects.filter( - provider___id=self.kwargs['provider_id'], - provider__type=self.provider_class._typedmodels_type, - ).filter( - Q(none=user) | - Q(email_digest=user) | - Q(email_transactional=user), - ).distinct() + permission_classes = ( + drf_permissions.IsAuthenticated, + base_permissions.TokenHasScope, + ) + required_read_scopes = [CoreScopes.SUBSCRIPTIONS_READ] + provider_class = None + serializer_class = None + + def get_queryset(self): + assert issubclass(self.provider_class, AbstractProvider), 'Must set provider_class to an AbstractProvider subclass' + provider_id = self.kwargs.get('provider_id') + provider = get_object_or_404(self.provider_class, _id=provider_id) + return NotificationSubscription.objects.filter( + user=self.request.user, + content_type=ContentType.objects.get_for_model(self.provider_class), + object_id=provider.id, + ) class SubscriptionDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView): view_name = 'notification-subscription-detail' @@ -78,13 +68,12 @@ class SubscriptionDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView): required_write_scopes = [CoreScopes.SUBSCRIPTIONS_WRITE] def get_object(self): - subscription_id = self.kwargs['subscription_id'] try: - obj = NotificationSubscription.objects.get(_id=subscription_id) - except ObjectDoesNotExist: + sub = NotificationSubscription.objects.get(pk=self.kwargs['pk']) + except NotificationSubscription.DoesNotExist: raise NotFound - self.check_object_permissions(self.request, obj) - return obj + self.check_object_permissions(self.request, sub) + return sub class AbstractProviderSubscriptionDetail(SubscriptionDetail): @@ -105,26 +94,26 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def get_object(self): - subscription_id = self.kwargs['subscription_id'] - if self.kwargs.get('provider_id'): - provider = self.provider_class.objects.get(_id=self.kwargs.get('provider_id')) - try: - obj = NotificationSubscription.objects.get( - _id=subscription_id, - provider_id=provider.id, - ) - except ObjectDoesNotExist: - raise NotFound - else: - try: - obj = NotificationSubscription.objects.get( - _id=subscription_id, - provider__type=self.provider_class._typedmodels_type, - ) - except ObjectDoesNotExist: - raise NotFound - self.check_object_permissions(self.request, obj) - return obj + assert issubclass(self.provider_class, AbstractProvider), 'Must set provider_class to an AbstractProvider subclass' + + subscription_id = self.kwargs.get('pk') + provider_id = self.kwargs.get('provider_id') + + # Get provider + provider = get_object_or_404(self.provider_class, _id=provider_id) + content_type = ContentType.objects.get_for_model(self.provider_class) + + try: + sub = NotificationSubscription.objects.get( + pk=subscription_id, + content_type=content_type, + object_id=provider.id, + ) + except NotificationSubscription.DoesNotExist: + raise NotFound + + self.check_object_permissions(self.request, sub) + return sub class CollectionProviderSubscriptionDetail(AbstractProviderSubscriptionDetail): diff --git a/api/users/views.py b/api/users/views.py index 4da2f5102d2..4a55e1f9e01 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -100,9 +100,9 @@ OSFGroup, OSFUser, Email, - Tag, + Tag, NotificationType, ) -from website import mails, settings, language +from website import settings, language from website.project.views.contributor import send_claim_email, send_claim_registered_email from website.util.metrics import CampaignClaimedTags, CampaignSourceTags from framework.auth import exceptions @@ -681,11 +681,14 @@ def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) user = self.get_user() - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.REQUEST_EXPORT, + NotificationType.objects.get( + name=NotificationType.Type.USER_REQUEST_EXPORT, + ).emit( user=user, - can_change_preferences=False, + event_context={ + 'can_change_preferences': False, + }, + ) user.email_last_sent = timezone.now() user.save() @@ -865,7 +868,6 @@ def get(self, request, *args, **kwargs): raise ValidationError('Request must include email in query params.') institutional = bool(request.query_params.get('institutional', None)) - mail_template = mails.FORGOT_PASSWORD if not institutional else mails.FORGOT_PASSWORD_INSTITUTION status_message = language.RESET_PASSWORD_SUCCESS_STATUS_MESSAGE.format(email=email) kind = 'success' @@ -885,12 +887,21 @@ def get(self, request, *args, **kwargs): user_obj.email_last_sent = timezone.now() user_obj.save() reset_link = f'{settings.RESET_PASSWORD_URL}{user_obj._id}/{user_obj.verification_key_v2['token']}/' - mails.send_mail( - to_addr=email, - mail=mail_template, - reset_link=reset_link, - can_change_preferences=False, + if institutional: + notification_type = NotificationType.Type.USER_FORGOT_PASSWORD_INSTITUTION + else: + notification_type = NotificationType.Type.USER_FORGOT_PASSWORD + + NotificationType.objects.get( + name=notification_type.name, + ).emit( + user=user_obj, + event_context={ + 'reset_link': reset_link, + 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + }, ) + return Response(status=status.HTTP_200_OK, data={'message': status_message, 'kind': kind, 'institutional': institutional}) @method_decorator(csrf_protect) @@ -1273,12 +1284,16 @@ def post(self, request, *args, **kwargs): if external_status == 'CREATE': service_url += '&{}'.format(urlencode({'new': 'true'})) elif external_status == 'LINK': - mails.send_mail( + NotificationType.objects.get( + name=NotificationType.Type.USER_EXTERNAL_LOGIN_LINK_SUCCESS, + ).emit( user=user, - to_addr=user.username, - mail=mails.EXTERNAL_LOGIN_LINK_SUCCESS, - external_id_provider=provider, - can_change_preferences=False, + event_context={ + 'provider': provider, + 'provider_id': provider_id, + 'service_url': service_url, + 'can_change_preferences': False, + }, ) enqueue_task(update_affiliation_for_orcid_sso_users.s(user._id, provider_id)) diff --git a/api_tests/base/test_utils.py b/api_tests/base/test_utils.py index 51ff6611da0..32429fdf0f7 100644 --- a/api_tests/base/test_utils.py +++ b/api_tests/base/test_utils.py @@ -6,8 +6,11 @@ from rest_framework import fields from rest_framework.exceptions import ValidationError -from api.base import utils as api_utils +from api.base import utils as api_utils +from osf.models.base import coerce_guid, Guid, GuidMixin, OptionalGuidMixin, VersionedGuidMixin, InvalidGuid +from osf_tests.factories import ProjectFactory, PreprintFactory +from tests.test_websitefiles import TestFile from framework.status import push_status_message SessionStore = import_module(django_conf_settings.SESSION_ENGINE).SessionStore @@ -100,3 +103,67 @@ def test_push_status_message_unexpected_error(self, mock_get_session): 'Unexpected Exception from push_status_message when called ' 'from the v2 API with type "error"' ) + + +@pytest.mark.django_db +class TestCoerceGuid: + + def test_guid_instance(self): + project = ProjectFactory() + assert isinstance(project.guids.first(), Guid) + assert coerce_guid(project.guids.first()) == project.guids.first() + + def test_versioned_guid_instance(self): + preprint = PreprintFactory() + assert isinstance(preprint, VersionedGuidMixin) + assert coerce_guid(preprint) == preprint.versioned_guids.first().guid + + def test_guid_mixin_instance(self): + project = ProjectFactory() + assert isinstance(project, GuidMixin) + assert coerce_guid(project._id) == project.guids.first() + + def test_str_guid_instance(self): + project = ProjectFactory() + str_guid = str(project._id) + guid = coerce_guid(str_guid) + assert isinstance(guid, Guid) + assert guid == project.guids.first() + + def test_incorrect_str_guid_instance(self): + incorrect_guid = '12345' + with pytest.raises(InvalidGuid, match='guid does not exist'): + assert coerce_guid(incorrect_guid) + + def test_optional_guid_instance(self): + node = ProjectFactory() + test_file = TestFile( + _path='anid', + name='name', + target=node, + provider='test', + materialized_path='/long/path/to/name', + ) + test_file.save() + test_file.get_guid(create=True) + assert isinstance(test_file, OptionalGuidMixin) + assert coerce_guid(test_file) == test_file.guids.first() + + def test_incorrect_optional_guid_instance(self): + node = ProjectFactory() + test_file = TestFile( + _path='anid', + name='name', + target=node, + provider='test', + materialized_path='/long/path/to/name', + ) + test_file.save() + assert isinstance(test_file, OptionalGuidMixin) + with pytest.raises(InvalidGuid, match='guid does not exist'): + assert coerce_guid(test_file) + + def test_invalid_guid(self): + incorrect_guid = 12345 + with pytest.raises(InvalidGuid, match='cannot coerce'): + assert coerce_guid(incorrect_guid) diff --git a/api_tests/base/test_views.py b/api_tests/base/test_views.py index 2db3a4b65b2..b09df8d753c 100644 --- a/api_tests/base/test_views.py +++ b/api_tests/base/test_views.py @@ -18,6 +18,7 @@ MetricsOpenapiView, ) from api.users.views import ClaimUser, ResetPassword, ExternalLoginConfirmEmailView, ExternalLogin +from api.registrations.views import RegistrationCallbackView from api.wb.views import MoveFileMetadataView, CopyFileMetadataView from rest_framework.permissions import IsAuthenticatedOrReadOnly, IsAuthenticated from api.base.permissions import TokenHasScope @@ -63,6 +64,7 @@ def setUp(self): ResetPassword, ExternalLoginConfirmEmailView, ExternalLogin, + RegistrationCallbackView, ] def test_root_returns_200(self): diff --git a/api_tests/draft_registrations/views/test_draft_registration_list.py b/api_tests/draft_registrations/views/test_draft_registration_list.py index 1126af09ad3..138a0eb21ff 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_list.py @@ -7,7 +7,7 @@ from api.base.settings.defaults import API_BASE from osf.migrations import ensure_invisible_and_inactive_schema -from osf.models import DraftRegistration, NodeLicense, RegistrationProvider, RegistrationSchema +from osf.models import DraftRegistration, NodeLicense, RegistrationProvider, RegistrationSchema, NotificationType from osf_tests.factories import ( RegistrationFactory, CollectionFactory, @@ -17,6 +17,7 @@ DraftRegistrationFactory, ) from osf.utils.permissions import READ, WRITE, ADMIN +from tests.utils import capture_notifications from website import mails, settings @@ -433,22 +434,19 @@ def test_admin_can_create_draft( assert draft.has_permission(user, ADMIN) is True def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_registrations, payload): - # Intercepting the send_mail call from website.project.views.contributor.notify_added_contributor - with mock.patch.object(mails, 'send_mail') as mock_send_mail: + + with capture_notifications() as notifications: resp = app.post_json_api( f'{url_draft_registrations}?embed=branched_from&embed=initiator', payload, auth=user.auth ) - assert mock_send_mail.called + assert len(notifications) == 1 - # Python 3.6 does not support mock.call_args.args/kwargs - # Instead, mock.call_args[0] is positional args, mock.call_args[1] is kwargs - # (note, this is compatible with later versions) - mock_send_kwargs = mock_send_mail.call_args[1] - assert mock_send_kwargs['mail'] == mails.CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + mock_send_kwargs = notifications[0]['kwargs'] + assert mock_send_kwargs['mail'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION assert mock_send_kwargs['user'] == user - assert mock_send_kwargs['node'] == DraftRegistration.load(resp.json['data']['id']) + assert mock_send_kwargs['event_context']['draft_registration'] == resp.json['data']['id'] def test_create_draft_with_provider( self, app, user, url_draft_registrations, non_default_provider, payload_with_non_default_provider diff --git a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py index 107cd5ac054..0fb10c239a3 100644 --- a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py +++ b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -7,7 +6,7 @@ CollectionProviderFactory, ) from osf.utils import permissions -from osf_tests.utils import _ensure_subscriptions +from tests.utils import capture_notifications @pytest.fixture() @@ -19,7 +18,6 @@ def url(provider): def provider(): provider = CollectionProviderFactory() provider.update_group_permissions() - _ensure_subscriptions(provider) return provider @@ -92,75 +90,80 @@ def test_GET_admin_with_filter(self, app, url, nonmoderator, moderator, admin, p @pytest.mark.django_db class TestPOSTCollectionsModeratorList: - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_unauthorized(self, mock_mail, app, url, nonmoderator, moderator, provider): + def test_POST_unauthorized(self, app, url, nonmoderator, moderator, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') - res = app.post(url, payload, expect_errors=True) + with capture_notifications() as notifications: + res = app.post(url, payload, expect_errors=True) assert res.status_code == 401 - assert mock_mail.call_count == 0 + assert not notifications - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_forbidden(self, mock_mail, app, url, nonmoderator, moderator, provider): + def test_POST_forbidden(self, app, url, nonmoderator, moderator, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') - res = app.post(url, payload, auth=nonmoderator.auth, expect_errors=True) - assert res.status_code == 403 + with capture_notifications() as notifications: + res = app.post(url, payload, auth=nonmoderator.auth, expect_errors=True) + assert res.status_code == 403 - res = app.post(url, payload, auth=moderator.auth, expect_errors=True) - assert res.status_code == 403 + res = app.post(url, payload, auth=moderator.auth, expect_errors=True) + assert res.status_code == 403 - assert mock_mail.call_count == 0 + assert not notifications - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_admin_success_existing_user(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_POST_admin_success_existing_user(self, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') - - res = app.post_json_api(url, payload, auth=admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert res.json['data']['id'] == nonmoderator._id assert res.json['data']['attributes']['permission_group'] == 'moderator' - assert mock_mail.call_count == 1 - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_admin_failure_existing_moderator(self, mock_mail, app, url, moderator, admin, provider): + assert len(notifications) == 1 + + def test_POST_admin_failure_existing_moderator(self, app, url, moderator, admin, provider): payload = make_payload(user_id=moderator._id, permission_group='moderator') - res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_admin_failure_unreg_moderator(self, mock_mail, app, url, moderator, nonmoderator, admin, provider): + assert not notifications + + def test_POST_admin_failure_unreg_moderator(self, app, url, moderator, nonmoderator, admin, provider): unreg_user = {'full_name': 'Jalen Hurts', 'email': '1eagles@allbatman.org'} # test_user_with_no_moderator_admin_permissions payload = make_payload(permission_group='moderator', **unreg_user) - res = app.post_json_api(url, payload, auth=nonmoderator.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=nonmoderator.auth, expect_errors=True) assert res.status_code == 403 - assert mock_mail.call_count == 0 + assert not notifications - # test_user_with_moderator_admin_permissions + def test_user_with_moderator_admin_permissions(self, app, url, moderator, nonmoderator, admin, provider): + unreg_user = {'full_name': 'Slay', 'email': 'bigplayslay@allbatman.org'} payload = make_payload(permission_group='moderator', **unreg_user) - res = app.post_json_api(url, payload, auth=admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - assert mock_mail.call_args[0][0] == unreg_user['email'] + assert len(notifications) == 1 + email = notifications[0]['kwargs']['user'].email + assert email == unreg_user['email'] - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_admin_failure_invalid_group(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_POST_admin_failure_invalid_group(self, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='citizen') - res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_admin_success_email(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + assert not notifications + + def test_POST_admin_success_email(self, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(email='somenewuser@gmail.com', full_name='Some User', permission_group='moderator') - res = app.post_json_api(url, payload, auth=admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert len(res.json['data']['id']) == 5 assert res.json['data']['attributes']['permission_group'] == 'moderator' assert 'email' not in res.json['data']['attributes'] - assert mock_mail.call_count == 1 + assert len(notifications) == 1 def test_moderators_alphabetically(self, app, url, admin, moderator, provider): admin.fullname = 'Flecher Cox' diff --git a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py index fbcfd32a99b..edcf4543989 100644 --- a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py +++ b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py @@ -1,12 +1,13 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE +from osf.models import NotificationType from osf_tests.factories import ( AuthUserFactory, PreprintProviderFactory, ) from osf.utils import permissions +from tests.utils import capture_notifications class ProviderModeratorListTestClass: @@ -68,70 +69,87 @@ def test_list_get_admin_with_filter(self, app, url, nonmoderator, moderator, adm assert res.json['data'][0]['id'] == admin._id assert res.json['data'][0]['attributes']['permission_group'] == permissions.ADMIN - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_unauthorized(self, mock_mail, app, url, nonmoderator, moderator, provider): + def test_list_post_unauthorized(self, app, url, nonmoderator, moderator, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='moderator') - res = app.post(url, payload, expect_errors=True) - assert res.status_code == 401 + with capture_notifications() as notifications: + res = app.post(url, payload, expect_errors=True) - res = app.post(url, payload, auth=nonmoderator.auth, expect_errors=True) - assert res.status_code == 403 + assert res.status_code == 401 - res = app.post(url, payload, auth=moderator.auth, expect_errors=True) - assert res.status_code == 403 + res = app.post(url, payload, auth=nonmoderator.auth, expect_errors=True) + assert res.status_code == 403 + + res = app.post(url, payload, auth=moderator.auth, expect_errors=True) + assert res.status_code == 403 - assert mock_mail.call_count == 0 + assert not notifications - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_admin_success_existing_user(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_list_post_admin_success_existing_user(self, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='moderator') - res = app.post_json_api(url, payload, auth=admin.auth) - assert res.status_code == 201 - assert res.json['data']['id'] == nonmoderator._id - assert res.json['data']['attributes']['permission_group'] == 'moderator' - assert mock_mail.call_count == 1 + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) + assert res.status_code == 201 + assert res.json['data']['id'] == nonmoderator._id + assert res.json['data']['attributes']['permission_group'] == 'moderator' + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_admin_failure_existing_moderator(self, mock_mail, app, url, moderator, admin, provider): + def test_list_post_admin_failure_existing_moderator(self, app, url, moderator, admin, provider): payload = self.create_payload(user_id=moderator._id, permission_group='moderator') - res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + assert res.status_code == 400 - assert mock_mail.call_count == 0 + assert not notifications - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_admin_failure_unreg_moderator(self, mock_mail, app, url, moderator, nonmoderator, admin, provider): + def test_user_with_non_moderator_admin_permissions(self, app, url, moderator, nonmoderator, admin, provider): unreg_user = {'full_name': 'Son Goku', 'email': 'goku@dragonball.org'} - # test_user_with_no_moderator_admin_permissions payload = self.create_payload(permission_group='moderator', **unreg_user) - res = app.post_json_api(url, payload, auth=nonmoderator.auth, expect_errors=True) - assert res.status_code == 403 - assert mock_mail.call_count == 0 + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=nonmoderator.auth, expect_errors=True) - # test_user_with_moderator_admin_permissions - payload = self.create_payload(permission_group='moderator', **unreg_user) - res = app.post_json_api(url, payload, auth=admin.auth) + assert res.status_code == 403 + assert not notifications + + def test_user_with_moderator_admin_permissions(self, app, url, moderator, nonmoderator, admin, provider): + unreg_user = {'full_name': 'Jason Kelece', 'email': 'burds@eagles.org'} + payload = self.create_payload( + permission_group='moderator', + **unreg_user + ) + + with capture_notifications() as notifications: + res = app.post_json_api( + url, + payload, + auth=admin.auth + ) assert res.status_code == 201 - assert mock_mail.call_count == 1 - assert mock_mail.call_args[0][0] == unreg_user['email'] + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONFIRM_EMAIL_MODERATION + assert notifications[0]['kwargs']['user'].username == unreg_user['email'] - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_admin_failure_invalid_group(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_list_post_admin_failure_invalid_group(self, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='citizen') - res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_admin_success_email(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + assert not notifications + + def test_list_post_admin_success_email(self, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(email='somenewuser@gmail.com', full_name='Some User', permission_group='moderator') - res = app.post_json_api(url, payload, auth=admin.auth) + + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert len(res.json['data']['id']) == 5 assert res.json['data']['attributes']['permission_group'] == 'moderator' assert 'email' not in res.json['data']['attributes'] - assert mock_mail.call_count == 1 + + assert len(notifications) == 1 def test_list_moderators_alphabetically(self, app, url, admin, moderator, provider): admin.fullname = 'Alice Alisdottir' diff --git a/api_tests/registrations/views/test_regisatration_callbacks.py b/api_tests/registrations/views/test_regisatration_callbacks.py new file mode 100644 index 00000000000..35d65d013b6 --- /dev/null +++ b/api_tests/registrations/views/test_regisatration_callbacks.py @@ -0,0 +1,82 @@ +import copy +import time +import pytest + +from api.base.settings.defaults import API_BASE +from osf_tests.factories import RegistrationFactory +from framework.auth import signing + + +@pytest.mark.django_db +class TestRegistrationCallbacks: + + @pytest.fixture() + def registration(self): + registration = RegistrationFactory() + return registration + + @pytest.fixture() + def url(self, registration): + return f'/{API_BASE}registrations/{registration._id}/callbacks/' + + @pytest.fixture() + def payload(self): + return { + 'action': 'copy', + 'destination': { + 'name': 'Archive of OSF Storage', + }, + 'errors': None, + 'source': { + 'provider': 'osfstorage', + }, + 'time': time.time() + 1000 + } + + def sign_payload(self, payload): + message, signature = signing.default_signer.sign_payload(payload) + return { + 'payload': message, + 'signature': signature, + } + + def test_registration_callback(self, app, payload, url): + data = self.sign_payload(payload) + res = app.put_json(url, data) + assert res.status_code == 200 + + def test_signature_expired(self, app, payload, url): + payload['time'] = time.time() - 100 + data = self.sign_payload(payload) + res = app.put_json(url, data, expect_errors=True) + assert res.status_code == 400 + assert res.json['errors'][0]['detail'] == 'Signature has expired' + + def test_bad_signature(self, app, payload, url): + data = self.sign_payload(payload) + data['signature'] = '1234' + res = app.put_json(url, data, expect_errors=True) + assert res.status_code == 401 + assert res.json['errors'][0]['detail'] == 'Authentication credentials were not provided.' + + def test_invalid_payload(self, app, payload, url): + payload1 = copy.deepcopy(payload) + del payload1['time'] + data = self.sign_payload(payload1) + res = app.put_json(url, data, expect_errors=True) + assert res.status_code == 400 + assert res.json['errors'][0]['detail'] == 'Invalid Payload' + + payload2 = copy.deepcopy(payload) + data = self.sign_payload(payload2) + del data['signature'] + res = app.put_json(url, data, expect_errors=True) + assert res.status_code == 400 + assert res.json['errors'][0]['detail'] == 'Invalid Payload' + + payload3 = copy.deepcopy(payload) + data = self.sign_payload(payload3) + del data['payload'] + res = app.put_json(url, data, expect_errors=True) + assert res.status_code == 400 + assert res.json['errors'][0]['detail'] == 'Invalid Payload' diff --git a/api_tests/requests/mixins.py b/api_tests/requests/mixins.py index 4b281b0862d..39742364ca9 100644 --- a/api_tests/requests/mixins.py +++ b/api_tests/requests/mixins.py @@ -1,5 +1,6 @@ import pytest +from osf.models import NotificationType from osf.utils.workflows import DefaultStates, RequestTypes from osf_tests.factories import ( AuthUserFactory, @@ -37,7 +38,7 @@ def project(self, admin, write_contrib): proj.add_contributor( contributor=write_contrib, permissions=permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS, - send_email='access_request', + notification_type=NotificationType.Type.NODE_CONTRIBUTOR_ADDED_ACCESS_REQUEST, save=True ) return proj diff --git a/api_tests/requests/views/test_node_request_institutional_access.py b/api_tests/requests/views/test_node_request_institutional_access.py index ca2a2c477e4..a29f3fc2d54 100644 --- a/api_tests/requests/views/test_node_request_institutional_access.py +++ b/api_tests/requests/views/test_node_request_institutional_access.py @@ -6,7 +6,6 @@ from osf_tests.factories import NodeFactory, InstitutionFactory, AuthUserFactory from osf.utils.workflows import DefaultStates, NodeRequestTypes -from website import language from website.mails import NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST from framework.auth import Auth @@ -255,9 +254,7 @@ def test_email_send_institutional_request_specific_email( } ) - @mock.patch('api.requests.serializers.send_mail') - def test_email_not_sent_without_recipient(self, mock_mail, app, project, institutional_admin, url, - create_payload, institution): + def test_email_not_sent_without_recipient(self, app, project, institutional_admin, url, create_payload, institution): """ Test that an email is not sent when no recipient is listed when an institutional access request is made, but the request is still made anyway without email. @@ -267,10 +264,10 @@ def test_email_not_sent_without_recipient(self, mock_mail, app, project, institu assert res.status_code == 201 # Check that an email is sent - assert not mock_mail.called + assert False, 'redo test' + # assert not mock_mail.called - @mock.patch('api.requests.serializers.send_mail') - def test_email_not_sent_outside_institution(self, mock_mail, app, project, institutional_admin, url, + def test_email_not_sent_outside_institution(self, app, project, institutional_admin, url, create_payload, user_without_affiliation, institution): """ Test that you are prevented from requesting a user with the correct institutional affiliation. @@ -281,12 +278,11 @@ def test_email_not_sent_outside_institution(self, mock_mail, app, project, insti assert f'User {user_without_affiliation._id} is not affiliated with the institution.' in res.json['errors'][0]['detail'] # Check that an email is sent - assert not mock_mail.called + # assert not mock_mail.called + assert False, 'redo test' - @mock.patch('api.requests.serializers.send_mail') def test_email_sent_on_creation( self, - mock_mail, app, project, institutional_admin, @@ -301,28 +297,27 @@ def test_email_sent_on_creation( res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=None, - reply_to=None, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': create_payload['data']['attributes']['comment'], - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) + assert False, 'redo test' + # assert mock_mail.call_count == 1 + # + # mock_mail.assert_called_with( + # to_addr=user_with_affiliation.username, + # mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, + # user=user_with_affiliation, + # bcc_addr=None, + # reply_to=None, + # **{ + # 'sender': institutional_admin, + # 'recipient': user_with_affiliation, + # 'comment': create_payload['data']['attributes']['comment'], + # 'institution': institution, + # 'osf_url': mock.ANY, + # 'node': project, + # } + # ) - @mock.patch('api.requests.serializers.send_mail') def test_bcc_institutional_admin( self, - mock_mail, app, project, institutional_admin, @@ -338,29 +333,28 @@ def test_bcc_institutional_admin( res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 + assert False, 'redo tests' + # + # assert mock_mail.call_count == 1 + # + # mock_mail.assert_called_with( + # to_addr=user_with_affiliation.username, + # mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, + # user=user_with_affiliation, + # bcc_addr=[institutional_admin.username], + # reply_to=None, + # **{ + # 'sender': institutional_admin, + # 'recipient': user_with_affiliation, + # 'comment': create_payload['data']['attributes']['comment'], + # 'institution': institution, + # 'osf_url': mock.ANY, + # 'node': project, + # } + # ) - assert mock_mail.call_count == 1 - - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=[institutional_admin.username], - reply_to=None, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': create_payload['data']['attributes']['comment'], - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) - - @mock.patch('api.requests.serializers.send_mail') def test_reply_to_institutional_admin( self, - mock_mail, app, project, institutional_admin, @@ -376,24 +370,25 @@ def test_reply_to_institutional_admin( res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - - assert mock_mail.call_count == 1 - - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=None, - reply_to=institutional_admin.username, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': create_payload['data']['attributes']['comment'], - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) + assert False, 'redo tests' + # + # assert mock_mail.call_count == 1 + # + # mock_mail.assert_called_with( + # to_addr=user_with_affiliation.username, + # mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, + # user=user_with_affiliation, + # bcc_addr=None, + # reply_to=institutional_admin.username, + # **{ + # 'sender': institutional_admin, + # 'recipient': user_with_affiliation, + # 'comment': create_payload['data']['attributes']['comment'], + # 'institution': institution, + # 'osf_url': mock.ANY, + # 'node': project, + # } + # ) def test_access_requests_disabled_raises_permission_denied( self, app, node_with_disabled_access_requests, user_with_affiliation, institutional_admin, create_payload @@ -410,10 +405,8 @@ def test_access_requests_disabled_raises_permission_denied( assert res.status_code == 403 assert f"{node_with_disabled_access_requests._id} does not have Access Requests enabled" in res.json['errors'][0]['detail'] - @mock.patch('api.requests.serializers.send_mail') def test_placeholder_text_when_comment_is_empty( self, - mock_mail, app, project, institutional_admin, @@ -430,21 +423,22 @@ def test_placeholder_text_when_comment_is_empty( res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=None, - reply_to=None, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': language.EMPTY_REQUEST_INSTITUTIONAL_ACCESS_REQUEST_TEXT, - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) + assert False, 'redo tests' + # mock_mail.assert_called_with( + # to_addr=user_with_affiliation.username, + # mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, + # user=user_with_affiliation, + # bcc_addr=None, + # reply_to=None, + # **{ + # 'sender': institutional_admin, + # 'recipient': user_with_affiliation, + # 'comment': language.EMPTY_REQUEST_INSTITUTIONAL_ACCESS_REQUEST_TEXT, + # 'institution': institution, + # 'osf_url': mock.ANY, + # 'node': project, + # } + # ) def test_requester_can_resubmit(self, app, project, institutional_admin, url, create_payload): """ diff --git a/api_tests/requests/views/test_request_actions_create.py b/api_tests/requests/views/test_request_actions_create.py index 732cbdd83b0..b2e75acd92d 100644 --- a/api_tests/requests/views/test_request_actions_create.py +++ b/api_tests/requests/views/test_request_actions_create.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -190,8 +189,7 @@ def test_rejects_fail_with_requests_disabled(self, app, admin, url, node_request assert initial_state == node_request.machine_state assert node_request.creator not in node_request.target.contributors - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_email_sent_on_approve(self, mock_mail, app, admin, url, node_request): + def test_email_sent_on_approve(self, app, admin, url, node_request): initial_state = node_request.machine_state assert node_request.creator not in node_request.target.contributors payload = self.create_payload(node_request._id, trigger='accept') @@ -200,10 +198,10 @@ def test_email_sent_on_approve(self, mock_mail, app, admin, url, node_request): node_request.reload() assert initial_state != node_request.machine_state assert node_request.creator in node_request.target.contributors - assert mock_mail.call_count == 1 + assert False, 'redo test' + # assert mock_mail.call_count == 1 - @mock.patch('website.mails.mails.send_mail') - def test_email_sent_on_reject(self, mock_mail, app, admin, url, node_request): + def test_email_sent_on_reject(self, app, admin, url, node_request): initial_state = node_request.machine_state assert node_request.creator not in node_request.target.contributors payload = self.create_payload(node_request._id, trigger='reject') @@ -212,10 +210,10 @@ def test_email_sent_on_reject(self, mock_mail, app, admin, url, node_request): node_request.reload() assert initial_state != node_request.machine_state assert node_request.creator not in node_request.target.contributors - assert mock_mail.call_count == 1 + assert False, 'redo test' + # assert mock_mail.call_count == 1 - @mock.patch('website.mails.mails.send_mail') - def test_email_not_sent_on_reject(self, mock_mail, app, requester, url, node_request): + def test_email_not_sent_on_reject(self, app, requester, url, node_request): initial_state = node_request.machine_state initial_comment = node_request.comment payload = self.create_payload(node_request._id, trigger='edit_comment', comment='ASDFG') @@ -224,7 +222,8 @@ def test_email_not_sent_on_reject(self, mock_mail, app, requester, url, node_req node_request.reload() assert initial_state == node_request.machine_state assert initial_comment != node_request.comment - assert mock_mail.call_count == 0 + # assert mock_mail.call_count == 0 + assert False, 'redo test' def test_set_permissions_on_approve(self, app, admin, url, node_request): assert node_request.creator not in node_request.target.contributors @@ -384,8 +383,7 @@ def test_write_contrib_and_noncontrib_cannot_edit_comment(self, app, write_contr assert initial_state == request.machine_state assert initial_comment == request.comment - @mock.patch('website.reviews.listeners.mails.send_mail') - def test_email_sent_on_approve(self, mock_mail, app, moderator, url, pre_request, post_request): + def test_email_sent_on_approve(self, app, moderator, url, pre_request, post_request): for request in [pre_request, post_request]: initial_state = request.machine_state assert not request.target.is_retracted @@ -397,11 +395,11 @@ def test_email_sent_on_approve(self, mock_mail, app, moderator, url, pre_request assert initial_state != request.machine_state assert request.target.is_retracted # There are two preprints withdrawn and each preprint have 2 contributors. So 4 emails are sent in total. - assert mock_mail.call_count == 4 + assert False, 'redo test' + # assert mock_mail.call_count == 4 @pytest.mark.skip('TODO: IN-331 -- add emails') - @mock.patch('website.reviews.listeners.mails.send_mail') - def test_email_sent_on_reject(self, mock_mail, app, moderator, url, pre_request, post_request): + def test_email_sent_on_reject(self, app, moderator, url, pre_request, post_request): for request in [pre_request, post_request]: initial_state = request.machine_state assert not request.target.is_retracted @@ -411,11 +409,9 @@ def test_email_sent_on_reject(self, mock_mail, app, moderator, url, pre_request, request.reload() assert initial_state != request.machine_state assert not request.target.is_retracted - assert mock_mail.call_count == 2 @pytest.mark.skip('TODO: IN-284/331 -- add emails') - @mock.patch('website.reviews.listeners.mails.send_mail') - def test_email_not_sent_on_edit_comment(self, mock_mail, app, moderator, url, pre_request, post_request): + def test_email_not_sent_on_edit_comment(self, app, moderator, url, pre_request, post_request): for request in [pre_request, post_request]: initial_state = request.machine_state assert not request.target.is_retracted @@ -425,7 +421,6 @@ def test_email_not_sent_on_edit_comment(self, mock_mail, app, moderator, url, pr request.reload() assert initial_state != request.machine_state assert not request.target.is_retracted - assert mock_mail.call_count == 0 def test_auto_approve(self, app, auto_withdrawable_pre_mod_preprint, auto_approved_pre_request): assert auto_withdrawable_pre_mod_preprint.is_retracted diff --git a/api_tests/share/_utils.py b/api_tests/share/_utils.py index 2d974b75ccf..4fde322fccc 100644 --- a/api_tests/share/_utils.py +++ b/api_tests/share/_utils.py @@ -11,7 +11,7 @@ postcommit_queue, ) from website import settings as website_settings -from api.share.utils import shtrove_ingest_url, sharev2_push_url +from api.share.utils import shtrove_ingest_url from osf.metadata.osf_gathering import OsfmapPartition @@ -28,8 +28,6 @@ def mock_share_responses(): _ingest_url = shtrove_ingest_url() _rsps.add(responses.POST, _ingest_url, status=200) _rsps.add(responses.DELETE, _ingest_url, status=200) - # for legacy sharev2 support: - _rsps.add(responses.POST, sharev2_push_url(), status=200) yield _rsps @@ -44,7 +42,6 @@ def mock_update_share(): def expect_ingest_request(mock_share_responses, osfguid, *, token=None, delete=False, count=1, error_response=False): mock_share_responses._calls.reset() yield - _legacy_count_per_item = 1 _trove_main_count_per_item = 1 _trove_supplementary_count_per_item = ( 0 @@ -52,8 +49,7 @@ def expect_ingest_request(mock_share_responses, osfguid, *, token=None, delete=F else (len(OsfmapPartition) - 1) ) _total_count = count * ( - _legacy_count_per_item - + _trove_main_count_per_item + _trove_main_count_per_item + _trove_supplementary_count_per_item ) assert len(mock_share_responses.calls) == _total_count, ( @@ -61,24 +57,18 @@ def expect_ingest_request(mock_share_responses, osfguid, *, token=None, delete=F ) _trove_ingest_calls = [] _trove_supp_ingest_calls = [] - _legacy_push_calls = [] for _call in mock_share_responses.calls: if _call.request.url.startswith(shtrove_ingest_url()): if 'is_supplementary' in _call.request.url: _trove_supp_ingest_calls.append(_call) else: _trove_ingest_calls.append(_call) - else: - _legacy_push_calls.append(_call) assert len(_trove_ingest_calls) == count assert len(_trove_supp_ingest_calls) == count * _trove_supplementary_count_per_item - assert len(_legacy_push_calls) == count for _call in _trove_ingest_calls: assert_ingest_request(_call.request, osfguid, token=token, delete=delete) for _call in _trove_supp_ingest_calls: assert_ingest_request(_call.request, osfguid, token=token, delete=delete, supp=True) - for _call in _legacy_push_calls: - assert _call.request.url.startswith(sharev2_push_url()) def assert_ingest_request(request, expected_osfguid, *, token=None, delete=False, supp=False): diff --git a/api_tests/share/test_share_node.py b/api_tests/share/test_share_node.py index 089611f2512..791e7d0099a 100644 --- a/api_tests/share/test_share_node.py +++ b/api_tests/share/test_share_node.py @@ -20,7 +20,7 @@ from website.project.tasks import on_node_updated from framework.auth.core import Auth -from api.share.utils import shtrove_ingest_url, sharev2_push_url +from api.share.utils import shtrove_ingest_url from ._utils import expect_ingest_request @@ -189,8 +189,6 @@ def test_call_async_update_on_500_retry(self, mock_share_responses, node, user): """This is meant to simulate a temporary outage, so the retry mechanism should kick in and complete it.""" mock_share_responses.replace(responses.POST, shtrove_ingest_url(), status=500) mock_share_responses.add(responses.POST, shtrove_ingest_url(), status=200) - mock_share_responses.replace(responses.POST, sharev2_push_url(), status=500) - mock_share_responses.add(responses.POST, sharev2_push_url(), status=200) with expect_ingest_request(mock_share_responses, node._id, count=2): on_node_updated(node._id, user._id, False, {'is_public'}) @@ -198,13 +196,11 @@ def test_call_async_update_on_500_retry(self, mock_share_responses, node, user): def test_call_async_update_on_500_failure(self, mock_share_responses, node, user): """This is meant to simulate a total outage, so the retry mechanism should try X number of times and quit.""" mock_share_responses.replace(responses.POST, shtrove_ingest_url(), status=500) - mock_share_responses.replace(responses.POST, sharev2_push_url(), status=500) with expect_ingest_request(mock_share_responses, node._id, count=5): # tries five times on_node_updated(node._id, user._id, False, {'is_public'}) @mark.skip('Synchronous retries not supported if celery >=5.0') def test_no_call_async_update_on_400_failure(self, mock_share_responses, node, user): mock_share_responses.replace(responses.POST, shtrove_ingest_url(), status=400) - mock_share_responses.replace(responses.POST, sharev2_push_url(), status=400) with expect_ingest_request(mock_share_responses, node._id): on_node_updated(node._id, user._id, False, {'is_public'}) diff --git a/api_tests/share/test_share_preprint.py b/api_tests/share/test_share_preprint.py index 4ab47963bc8..cf0c8a3d92d 100644 --- a/api_tests/share/test_share_preprint.py +++ b/api_tests/share/test_share_preprint.py @@ -4,7 +4,7 @@ import pytest import responses -from api.share.utils import shtrove_ingest_url, sharev2_push_url +from api.share.utils import shtrove_ingest_url from framework.auth.core import Auth from osf.models.spam import SpamStatus from osf.utils.permissions import READ, WRITE, ADMIN @@ -124,14 +124,12 @@ def test_preprint_contributor_changes_updates_preprints_share(self, mock_share_r @pytest.mark.skip('Synchronous retries not supported if celery >=5.0') def test_call_async_update_on_500_failure(self, mock_share_responses, preprint, auth): mock_share_responses.replace(responses.POST, shtrove_ingest_url(), status=500) - mock_share_responses.replace(responses.POST, sharev2_push_url(), status=500) preprint.set_published(True, auth=auth, save=True) with expect_preprint_ingest_request(mock_share_responses, preprint, count=5): preprint.update_search() def test_no_call_async_update_on_400_failure(self, mock_share_responses, preprint, auth): mock_share_responses.replace(responses.POST, shtrove_ingest_url(), status=400) - mock_share_responses.replace(responses.POST, sharev2_push_url(), status=400) preprint.set_published(True, auth=auth, save=True) with expect_preprint_ingest_request(mock_share_responses, preprint, count=1, error_response=True): preprint.update_search() diff --git a/api_tests/subscriptions/views/test_subscriptions_detail.py b/api_tests/subscriptions/views/test_subscriptions_detail.py index 2a8741fc173..7af3cb7d784 100644 --- a/api_tests/subscriptions/views/test_subscriptions_detail.py +++ b/api_tests/subscriptions/views/test_subscriptions_detail.py @@ -1,7 +1,7 @@ import pytest from api.base.settings.defaults import API_BASE -from osf_tests.factories import AuthUserFactory, NotificationSubscriptionFactory +from osf_tests.factories import AuthUserFactory @pytest.mark.django_db @@ -15,12 +15,6 @@ def user(self): def user_no_auth(self): return AuthUserFactory() - @pytest.fixture() - def global_user_notification(self, user): - notification = NotificationSubscriptionFactory(_id=f'{user._id}_global', user=user, event_name='global') - notification.add_user_to_subscription(user, 'email_transactional') - return notification - @pytest.fixture() def url(self, global_user_notification): return f'/{API_BASE}subscriptions/{global_user_notification._id}/' diff --git a/api_tests/subscriptions/views/test_subscriptions_list.py b/api_tests/subscriptions/views/test_subscriptions_list.py index cda043314b1..eeaa8a6d6e2 100644 --- a/api_tests/subscriptions/views/test_subscriptions_list.py +++ b/api_tests/subscriptions/views/test_subscriptions_list.py @@ -1,7 +1,12 @@ import pytest from api.base.settings.defaults import API_BASE -from osf_tests.factories import AuthUserFactory, PreprintProviderFactory, ProjectFactory, NotificationSubscriptionFactory +from osf_tests.factories import ( + AuthUserFactory, + PreprintProviderFactory, + ProjectFactory, +) +from osf.models.notification import NotificationType @pytest.mark.django_db @@ -23,9 +28,10 @@ def node(self, user): @pytest.fixture() def global_user_notification(self, user): - notification = NotificationSubscriptionFactory(_id=f'{user._id}_global', user=user, event_name='global') - notification.add_user_to_subscription(user, 'email_transactional') - return notification + notification_type = NotificationType.objects.get('...') + frequency = 'instant' + user.add_to_subscription(notification_type, frequency) + return notification_type @pytest.fixture() def url(self, user, node): diff --git a/conftest.py b/conftest.py index 6f870093ed4..b545836af44 100644 --- a/conftest.py +++ b/conftest.py @@ -31,7 +31,6 @@ 'framework.auth.core', 'website.app', 'website.archiver.tasks', - 'website.mails', 'website.notifications.listeners', 'website.search.elastic_search', 'website.search_migration.migrate', diff --git a/docker-compose.yml b/docker-compose.yml index ce4e3ea0618..14ed365e611 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -86,6 +86,8 @@ services: ports: - 5432:5432 environment: + POSTGRES_DB: osf + POSTGRES_HOST_AUTH_METHOD: trust POSTGRES_INITDB_SQL: | SELECT 'CREATE DATABASE gravyvalet' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'gravyvalet')\gexec volumes: @@ -534,13 +536,15 @@ services: command: - /bin/bash - -c - - pip install -r requirements/dev-requirements.txt && - (python3 -m compileall /usr/local/lib/python3.12 || true) && - rm -Rf /python3.12/* && - cp -Rf -p /usr/local/lib/python3.12 / + - python -m venv /tmp/venv + && /tmp/venv/bin/pip install poetry==1.8.3 && + /tmp/venv/bin/poetry config virtualenvs.create false --local && + /tmp/venv/bin/poetry install --no-root --with dev --without release --compile --sync && + rm -rf /python3.13/* && + cp -Rf -p /usr/local/lib/python3.13 / restart: 'no' volumes: - - gv_requirements_vol:/python3.12 + - gv_requirements_vol:/python3.13 gv: image: quay.io/centerforopenscience/gravyvalet:develop @@ -556,7 +560,39 @@ services: environment: DJANGO_SETTINGS_MODULE: app.settings volumes: - - gv_requirements_vol:/usr/local/lib/python3.12 + - gv_requirements_vol:/usr/local/lib/python3.13 + stdin_open: true + + gv_worker: + image: quay.io/centerforopenscience/gravyvalet:develop + command: python -m celery --app app worker --uid daemon -l info + restart: unless-stopped + depends_on: + - postgres + - api + - rabbitmq + env_file: + - .docker-compose.gv.env + environment: + DJANGO_SETTINGS_MODULE: app.settings + volumes: + - gv_requirements_vol:/usr/local/lib/python3.13 + stdin_open: true + + gv_beat: + image: quay.io/centerforopenscience/gravyvalet:develop + command: python -m celery --app app beat --uid daemon -l info + restart: unless-stopped + depends_on: + - postgres + - api + - rabbitmq + env_file: + - .docker-compose.gv.env + environment: + DJANGO_SETTINGS_MODULE: app.settings + volumes: + - gv_requirements_vol:/usr/local/lib/python3.13 stdin_open: true pigeon: diff --git a/framework/auth/campaigns.py b/framework/auth/campaigns.py index a47b3cf637b..dceeec08841 100644 --- a/framework/auth/campaigns.py +++ b/framework/auth/campaigns.py @@ -4,7 +4,7 @@ from django.utils import timezone from website import mails, settings -from osf.models import PreprintProvider +from osf.models import PreprintProvider, NotificationType from website.settings import DOMAIN, CAMPAIGN_REFRESH_THRESHOLD from website.util.metrics import OsfSourceTags, OsfClaimedTags, CampaignSourceTags, CampaignClaimedTags, provider_source_tag from framework.utils import throttle_period_expired @@ -44,12 +44,10 @@ def get_campaigns(): preprint_providers = PreprintProvider.objects.all() for provider in preprint_providers: if provider._id == 'osf': - template = 'osf' name = 'OSF' url_path = 'preprints/' external_url = None else: - template = 'branded' name = provider.name url_path = f'preprints/{provider._id}' external_url = provider.domain @@ -60,7 +58,7 @@ def get_campaigns(): 'system_tag': system_tag, 'redirect_url': furl(DOMAIN).add(path=url_path).url, 'external_url': external_url, - 'confirmation_email_template': mails.CONFIRM_EMAIL_PREPRINTS(template, name), + 'confirmation_email_template': NotificationType.Type.PROVIDER_CONFIRM_EMAIL_PREPRINTS, 'login_type': 'proxy', 'provider': name, 'logo': provider._id if name != 'OSF' else settings.OSF_PREPRINTS_LOGO, diff --git a/framework/auth/views.py b/framework/auth/views.py index e83f47b5db2..0938bb58510 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -26,9 +26,9 @@ from framework.sessions.utils import remove_sessions_for_user from framework.sessions import get_session from framework.utils import throttle_period_expired -from osf.models import OSFUser +from osf.models import OSFUser, NotificationType from osf.utils.sanitize import strip_html -from website import settings, mails, language +from website import settings, language from website.util import web_url_for from osf.exceptions import ValidationValueError, BlockedEmailError from osf.models.provider import PreprintProvider @@ -207,19 +207,23 @@ def redirect_unsupported_institution(auth): def forgot_password_post(): """Dispatches to ``_forgot_password_post`` passing non-institutional user mail template and reset action.""" - return _forgot_password_post(mail_template=mails.FORGOT_PASSWORD, - reset_route='reset_password_get') + return _forgot_password_post( + notificaton_type=NotificationType.Type.USER_FORGOT_PASSWORD, + reset_route='reset_password_get' + ) def forgot_password_institution_post(): """Dispatches to `_forgot_password_post` passing institutional user mail template, reset action, and setting the ``institutional`` flag.""" - return _forgot_password_post(mail_template=mails.FORGOT_PASSWORD_INSTITUTION, - reset_route='reset_password_institution_get', - institutional=True) + return _forgot_password_post( + notificaton_type=NotificationType.Type.USER_FORGOT_PASSWORD_INSTITUTION, + reset_route='reset_password_institution_get', + institutional=True + ) -def _forgot_password_post(mail_template, reset_route, institutional=False): +def _forgot_password_post(notificaton_type, reset_route, institutional=False): """ View for user to submit forgot password form (standard or institutional). Validates submitted form and sends reset-password link via email if valid. If user has submitted another password @@ -272,11 +276,15 @@ def _forgot_password_post(mail_template, reset_route, institutional=False): token=user_obj.verification_key_v2['token'] ) ) - mails.send_mail( - to_addr=email, - mail=mail_template, - reset_link=reset_link, - can_change_preferences=False, + NotificationType.objects.get( + name=notificaton_type, + ).emit( + user=user_obj, + event_context={ + 'reset_link': reset_link, + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, ) # institutional forgot password page displays the message as main text, not as an alert @@ -653,12 +661,16 @@ def external_login_confirm_email_get(auth, uid, token): if external_status == 'CREATE': service_url += '&{}'.format(urlencode({'new': 'true'})) elif external_status == 'LINK': - mails.send_mail( + NotificationType.objects.get( + name=NotificationType.Type.USER_EXTERNAL_LOGIN_LINK_SUCCESS, + ).emit( user=user, - to_addr=user.username, - mail=mails.EXTERNAL_LOGIN_LINK_SUCCESS, - external_id_provider=provider, - can_change_preferences=False, + subscribed_object=user, # or whatever the correct related object is + event_context={ + 'external_id_provider': getattr(provider, 'id', None), + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, ) # Send to celery the following async task to affiliate the user with eligible institutions if verified @@ -811,14 +823,14 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte :return: :raises: KeyError if user does not have a confirmation token for the given email. """ - confirmation_url = user.get_confirmation_url( - email, - external=True, - force=True, - renew=renew, - external_id_provider=external_id_provider, - destination=destination - ) + # confirmation_url = user.get_confirmation_url( + # email, + # external=True, + # force=True, + # renew=renew, + # external_id_provider=external_id_provider, + # destination=destination + # ) try: merge_target = OSFUser.objects.get(emails__address=email) @@ -826,45 +838,34 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte merge_target = None campaign = campaigns.campaign_for_user(user) - branded_preprints_provider = None - logo = None # Choose the appropriate email template to use and add existing_user flag if a merge or adding an email. if external_id_provider and external_id: # First time login through external identity provider, link or create an OSF account confirmation if user.external_identity[external_id_provider][external_id] == 'CREATE': - mail_template = mails.EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE + notificaton_type = NotificationType.Type.USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE elif user.external_identity[external_id_provider][external_id] == 'LINK': - mail_template = mails.EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK + notificaton_type = NotificationType.Type.USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK elif merge_target: # Merge account confirmation - mail_template = mails.CONFIRM_MERGE - confirmation_url = f'{confirmation_url}?logout=1' + notificaton_type = NotificationType.Type.USER_CONFIRM_MERGE elif user.is_active: # Add email confirmation - mail_template = mails.CONFIRM_EMAIL - confirmation_url = f'{confirmation_url}?logout=1' + notificaton_type = NotificationType.Type.USER_CONFIRM_EMAIL elif campaign: # Account creation confirmation: from campaign - mail_template = campaigns.email_template_for_campaign(campaign) - if campaigns.is_proxy_login(campaign) and campaigns.get_service_provider(campaign) != 'OSF': - branded_preprints_provider = campaigns.get_service_provider(campaign) - logo = campaigns.get_campaign_logo(campaign) + notificaton_type = campaigns.email_template_for_campaign(campaign) else: # Account creation confirmation: from OSF - mail_template = mails.INITIAL_CONFIRM_EMAIL + notificaton_type = NotificationType.Type.USER_INITIAL_CONFIRM_EMAIL - mails.send_mail( - email, - mail_template, + NotificationType.objects.get( + name=notificaton_type.value, + ).emit( user=user, - confirmation_url=confirmation_url, - email=email, - merge_target=merge_target, - external_id_provider=external_id_provider, - branded_preprints_provider=branded_preprints_provider, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - logo=logo if logo else settings.OSF_LOGO + event_context={ + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, ) def send_confirm_email_async(user, email, renew=False, external_id_provider=None, external_id=None, destination=None): diff --git a/framework/email/tasks.py b/framework/email/tasks.py index cf43395222e..b55c0ec5e75 100644 --- a/framework/email/tasks.py +++ b/framework/email/tasks.py @@ -62,8 +62,6 @@ def send_email( :return: True if successful """ - if not settings.USE_EMAIL: - return if settings.SENDGRID_API_KEY: return _send_with_sendgrid( from_addr=from_addr, diff --git a/framework/forms/utils.py b/framework/forms/utils.py index 420d70bcaf0..973ed310481 100644 --- a/framework/forms/utils.py +++ b/framework/forms/utils.py @@ -9,34 +9,6 @@ def sanitize(s, **kwargs): return sanitize_html(s, **kwargs) -def process_data(data, func): - if isinstance(data, dict): - return { - key: process_data(value, func) - for key, value in data.items() - } - elif isinstance(data, list): - return [ - process_data(item, func) - for item in data - ] - return func(data) - - -def process_payload(data): - return process_data( - data, - lambda value: quote(value.encode('utf-8') if value else '', safe=' ') - ) - - -def unprocess_payload(data): - return process_data( - data, - lambda value: unquote(value.encode('utf-8') if value else '') - ) - - def jsonify(form): """Cast WTForm to JSON object. diff --git a/osf/apps.py b/osf/apps.py index acc2dad7150..9bdc637d382 100644 --- a/osf/apps.py +++ b/osf/apps.py @@ -13,7 +13,8 @@ update_permission_groups, update_storage_regions, update_waffle_flags, - update_default_providers + update_default_providers, + update_notification_types ) logger = logging.getLogger(__file__) @@ -68,3 +69,7 @@ def ready(self): update_storage_regions, dispatch_uid='osf.apps.update_storage_regions' ) + post_migrate.connect( + update_notification_types, + dispatch_uid='osf.apps.update_notification_types' + ) diff --git a/osf/exceptions.py b/osf/exceptions.py index 82e8ab5f505..30130a587d1 100644 --- a/osf/exceptions.py +++ b/osf/exceptions.py @@ -292,3 +292,18 @@ class MetadataSerializationError(OSFError): class InvalidCookieOrSessionError(OSFError): """Raised when cookie is invalid or session key is not found.""" pass + + +class RegistrationStuckError(OSFError): + """Raised if Registration stuck during archive.""" + pass + + +class RegistrationStuckRecoverableException(RegistrationStuckError): + """Raised if registration stuck but recoverable.""" + pass + + +class RegistrationStuckBrokenException(RegistrationStuckError): + """Raised if registration stuck and not recoverable.""" + pass diff --git a/osf/management/commands/add_notification_subscription.py b/osf/management/commands/add_notification_subscription.py deleted file mode 100644 index 7d9a404f37a..00000000000 --- a/osf/management/commands/add_notification_subscription.py +++ /dev/null @@ -1,77 +0,0 @@ -# This is a management command, rather than a migration script, for two primary reasons: -# 1. It makes no changes to database structure (e.g. AlterField), only database content. -# 2. It takes a long time to run and the site doesn't need to be down that long. - -import logging - -import django -django.setup() - -from django.core.management.base import BaseCommand -from django.db import transaction - -from website.notifications.utils import to_subscription_key - -from scripts import utils as script_utils - -logger = logging.getLogger(__name__) - - -def add_reviews_notification_setting(notification_type, state=None): - if state: - OSFUser = state.get_model('osf', 'OSFUser') - NotificationSubscription = state.get_model('osf', 'NotificationSubscription') - else: - from osf.models import OSFUser, NotificationSubscription - - active_users = OSFUser.objects.filter(date_confirmed__isnull=False).exclude(date_disabled__isnull=False).exclude(is_active=False).order_by('id') - total_active_users = active_users.count() - - logger.info(f'About to add a global_reviews setting for {total_active_users} users.') - - total_created = 0 - for user in active_users.iterator(): - user_subscription_id = to_subscription_key(user._id, notification_type) - - subscription = NotificationSubscription.load(user_subscription_id) - if not subscription: - logger.info(f'No {notification_type} subscription found for user {user._id}. Subscribing...') - subscription = NotificationSubscription(_id=user_subscription_id, owner=user, event_name=notification_type) - subscription.save() # Need to save in order to access m2m fields - subscription.add_user_to_subscription(user, 'email_transactional') - else: - logger.info(f'User {user._id} already has a {notification_type} subscription') - total_created += 1 - - logger.info(f'Added subscriptions for {total_created}/{total_active_users} users') - - -class Command(BaseCommand): - """ - Add subscription to all active users for given notification type. - """ - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Run migration and roll back changes to db', - ) - - parser.add_argument( - '--notification', - type=str, - required=True, - help='Notification type to subscribe users to', - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run', False) - state = options.get('state', None) - if not dry_run: - script_utils.add_file_logger(logger, __file__) - with transaction.atomic(): - add_reviews_notification_setting(notification_type=options['notification'], state=state) - if dry_run: - raise RuntimeError('Dry run, transaction rolled back.') diff --git a/osf/management/commands/deactivate_requested_accounts.py b/osf/management/commands/deactivate_requested_accounts.py index 9a3ddcf5356..88919f6ffac 100644 --- a/osf/management/commands/deactivate_requested_accounts.py +++ b/osf/management/commands/deactivate_requested_accounts.py @@ -1,13 +1,11 @@ import logging -from website import mails from django.utils import timezone from framework.celery_tasks import app as celery_app from website.app import setup_django setup_django() -from osf.models import OSFUser -from website.settings import OSF_SUPPORT_EMAIL, OSF_CONTACT_EMAIL +from osf.models import OSFUser, NotificationType from django.core.management.base import BaseCommand logger = logging.getLogger(__name__) @@ -21,23 +19,26 @@ def deactivate_requested_accounts(dry_run=True): if user.has_resources: logger.info(f'OSF support is being emailed about deactivating the account of user {user._id}.') if not dry_run: - mails.send_mail( - to_addr=OSF_SUPPORT_EMAIL, - mail=mails.REQUEST_DEACTIVATION, + NotificationType.objects.get( + name=NotificationType.Type.DESK_REQUEST_DEACTIVATION, + ).emit( user=user, - can_change_preferences=False, + event_context={ + 'can_change_preferences': False, + } ) else: logger.info(f'Disabling user {user._id}.') if not dry_run: user.deactivate_account() user.is_registered = False - mails.send_mail( - to_addr=user.username, - mail=mails.REQUEST_DEACTIVATION_COMPLETE, + NotificationType.objects.get( + name=NotificationType.Type.USER_REQUEST_DEACTIVATION_COMPLETE + ).emit( user=user, - contact_email=OSF_CONTACT_EMAIL, - can_change_preferences=False, + event_context={ + 'can_change_preferences': False, + } ) user.contacted_deactivation = True diff --git a/osf/management/commands/find_spammy_files.py b/osf/management/commands/find_spammy_files.py index 33d25366ea1..6d94ce4e185 100644 --- a/osf/management/commands/find_spammy_files.py +++ b/osf/management/commands/find_spammy_files.py @@ -8,7 +8,7 @@ from addons.osfstorage.models import OsfStorageFile from framework.celery_tasks import app -from website import mails +from osf.models import NotificationType logger = logging.getLogger(__name__) @@ -52,14 +52,17 @@ def find_spammy_files(sniff_r=None, n=None, t=None, to_addrs=None): if ct: if to_addrs: for addr in to_addrs: - mails.send_mail( - mail=mails.SPAM_FILES_DETECTED, - to_addr=addr, - ct=ct, - sniff_r=sniff, - attachment_name=filename, - attachment_content=output.getvalue(), - can_change_preferences=False, + NotificationType.objects.get( + name=NotificationType.Type.SPAM_FILES_DETECTED + ).emit( + user=user, + event_context={ + 'ct': ct, + 'sniff_n': sniff, + 'attachment_name': filename, + 'attachment_content': output.getvalue(), + 'can_change_preferences': False + } ) else: with open(filepath, 'w') as writeFile: diff --git a/osf/management/commands/fix_preprints_has_data_links_and_why_no_data.py b/osf/management/commands/fix_preprints_has_data_links_and_why_no_data.py new file mode 100644 index 00000000000..84997c1fbe9 --- /dev/null +++ b/osf/management/commands/fix_preprints_has_data_links_and_why_no_data.py @@ -0,0 +1,109 @@ +from django.core.management.base import BaseCommand +from django.db.models import Q +from osf.models import Preprint, Guid +import logging + +logger = logging.getLogger(__name__) + + +def process_wrong_why_not_data_preprints( + version_guid: str | None, + dry_run: bool, + executing_through_command: bool = True, + command_obj: BaseCommand = None +): + through_command_constrain = executing_through_command and command_obj + why_no_data_filters = Q(why_no_data__isnull=False) & ~Q(why_no_data='') + + if version_guid: + base_guid_str, version = Guid.split_guid(version_guid) + preprints = Preprint.objects.filter( + versioned_guids__guid___id=base_guid_str, + versioned_guids__version=version + ) + if not preprints: + no_preprint_message = f'No preprint found with version_guid: {version_guid}' + logger.error(no_preprint_message) + if through_command_constrain: + command_obj.stdout.write(command_obj.style.ERROR(no_preprint_message)) + return + if preprints[0].has_data_links != 'no' and not preprints[0].why_no_data: + correct_behavior_message = f'Correct behavior for {preprints[0]._id} has_data_links={preprints[0].has_data_links} why_no_data={preprints[0].why_no_data}' + if through_command_constrain: + command_obj.stdout.write(correct_behavior_message) + return + + else: + preprints = Preprint.objects.filter( + ~Q(has_data_links='no') & why_no_data_filters + ) + + total = preprints.count() + logger.info(f'Found {total} preprints to process') + if through_command_constrain: + command_obj.stdout.write(f'Found {total} preprints to process') + + processed = 0 + errors = 0 + + for preprint in preprints: + try: + logger.info(f'Processing preprint {preprint._id}') + if through_command_constrain: + command_obj.stdout.write(f'Processing preprint {preprint._id} ({processed + 1}/{total})') + + if not dry_run: + preprint.why_no_data = '' + preprint.save() + logger.info(f'Updated preprint {preprint._id}') + else: + logger.info( + f'Would update preprint {preprint._id} (dry run), {preprint.has_data_links=}, {preprint.why_no_data=}' + ) + + processed += 1 + except Exception as e: + errors += 1 + logger.error(f'Error processing preprint {preprint._id}: {str(e)}') + if through_command_constrain: + command_obj.stdout.write(command_obj.style.ERROR(f'Error processing preprint {preprint._id}: {str(e)}')) + continue + + logger.info(f'Completed processing {processed} preprints with {errors} errors') + if through_command_constrain: + command_obj.stdout.write( + command_obj.style.SUCCESS( + f'Completed processing {processed} preprints with {errors} errors' + ) + ) + + +class Command(BaseCommand): + help = 'Fix preprints has_data_links and why_no_data' + + def add_arguments(self, parser): + parser.add_argument( + '--dry-run', + action='store_true', + help='Run without making changes', + ) + parser.add_argument( + '--guid', + type=str, + help='Version GUID to process (e.g. awgxb_v1, kupen_v4)', + ) + + def handle(self, *args, **options): + dry_run = options.get('dry_run', False) + version_guid = options.get('guid') + + if dry_run: + logger.info('Running in dry-run mode - no changes will be made') + self.stdout.write('Running in dry-run mode - no changes will be made') + + process_wrong_why_not_data_preprints( + version_guid=version_guid, + dry_run=dry_run, + executing_through_command=True, + command_obj=self + ) diff --git a/osf/management/commands/fix_unclaimed_records_for_preprint_versions.py b/osf/management/commands/fix_unclaimed_records_for_preprint_versions.py new file mode 100644 index 00000000000..17fca6a54df --- /dev/null +++ b/osf/management/commands/fix_unclaimed_records_for_preprint_versions.py @@ -0,0 +1,163 @@ +import logging + +from django.core.management.base import BaseCommand +from django.apps import apps +from django.db.models import Q + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = 'Update unclaimed records for preprint versions' + + def add_arguments(self, parser): + parser.add_argument( + '--dry-run', + action='store_true', + dest='dry_run', + help='Run the command without saving changes', + ) + + def handle(self, *args, **options): + dry_run = options.get('dry_run', False) + update_unclaimed_records_for_preprint_versions(dry_run=dry_run) + +def safe_sort_key(x, delimiter): + parts = x.split(delimiter) + if len(parts) > 1: + try: + return int(parts[1]) + except (ValueError, TypeError): + return 0 + return 0 + + +def update_unclaimed_records_for_preprint_versions(dry_run=False): + Preprint = apps.get_model('osf.Preprint') + Guid = apps.get_model('osf.Guid') + OSFUser = apps.get_model('osf.OSFUser') + GuidVersionsThrough = apps.get_model('osf.GuidVersionsThrough') + + preprint_filters = ( + Q(preprintcontributor__user__is_registered=False) | + Q(preprintcontributor__user__date_disabled__isnull=False) + ) + + mode = 'DRY RUN' if dry_run else 'UPDATING' + logger.info(f'Starting {mode} for unclaimed records for preprint versions') + + preprints_count = Preprint.objects.filter( + preprint_filters + ).distinct('versioned_guids__guid').count() + + logger.info(f'Found {preprints_count} preprints with unregistered contributors') + + processed_count = 0 + skipped_count = 0 + updated_count = 0 + + logger.info('-' * 50) + logger.info(f'{mode} MODE') + logger.info('-' * 50) + + for preprint in Preprint.objects.filter( + preprint_filters + ).prefetch_related('_contributors').distinct( + 'versioned_guids__guid' + ): + processed_count += 1 + try: + guid, version = Guid.split_guid(preprint._id) + logger.info(f'[{processed_count}/{preprints_count}] Processing preprint {preprint._id}') + + latest_version_through = GuidVersionsThrough.objects.filter(guid___id=guid).last() + if not latest_version_through: + logger.error(f'No version found for guid {guid}, skipping') + skipped_count += 1 + continue + + latest_version_number = latest_version_through.version + unregistered_contributors = preprint.contributor_set.filter(user__is_registered=False) + logger.info(f'Found {unregistered_contributors.count()} unregistered contributors for preprint {preprint._id}') + delimiter = Preprint.GUID_VERSION_DELIMITER + for contributor in unregistered_contributors: + try: + records_key_for_current_guid = [ + key for key in contributor.user.unclaimed_records.keys() if guid in key and delimiter in key + ] + if records_key_for_current_guid: + records_key_for_current_guid.sort( + key=lambda x: safe_sort_key(x, delimiter), + ) + record_info = contributor.user.unclaimed_records[records_key_for_current_guid[0]] + for current_version in range(1, int(latest_version_number) + 1): + preprint_id = f'{guid}{Preprint.GUID_VERSION_DELIMITER}{current_version}' + if preprint_id not in contributor.user.unclaimed_records.keys(): + if not dry_run: + try: + preprint_obj = Preprint.load(preprint_id) + referrer = OSFUser.load(record_info['referrer_id']) + + if not preprint_obj: + logger.error(f'Could not load preprint {preprint_id}, skipping') + continue + + if not referrer: + logger.error(f'Could not load referrer {record_info["referrer_id"]}, skipping') + continue + + logger.info(f'Adding unclaimed record for {preprint_id} for user {contributor.user._id}') + contributor.user.unclaimed_records[preprint_id] = contributor.user.add_unclaimed_record( + claim_origin=preprint_obj, + referrer=referrer, + given_name=record_info.get('name', None), + email=record_info.get('email', None), + skip_referrer_permissions=True + ) + contributor.user.save() + updated_count += 1 + logger.info(f'Successfully saved unclaimed record for {preprint_id}') + except Exception as e: + logger.error(f'Error adding unclaimed record for {preprint_id}: {str(e)}') + else: + logger.info(f'[DRY RUN] Would add unclaimed record for {preprint_id} for user {contributor.user._id}') + updated_count += 1 + else: + try: + all_versions = [guid.referent for guid in GuidVersionsThrough.objects.filter(guid___id=guid)] + logger.info(f'Found {len(all_versions)} versions for preprint with guid {guid}') + + for current_preprint in all_versions: + preprint_id = current_preprint._id + if preprint_id not in contributor.user.unclaimed_records.keys(): + if not dry_run: + try: + logger.info(f'Adding unclaimed record for {preprint_id} for user {contributor.user._id}') + contributor.user.unclaimed_records[preprint_id] = contributor.user.add_unclaimed_record( + claim_origin=current_preprint, + referrer=current_preprint.creator, + given_name=contributor.user.fullname, + email=contributor.user.username, + skip_referrer_permissions=True + ) + contributor.user.save() + updated_count += 1 + logger.info(f'Successfully saved unclaimed record for {preprint_id}') + except Exception as e: + logger.error(f'Error adding unclaimed record for {preprint_id}: {str(e)}') + else: + logger.info(f'[DRY RUN] Would add unclaimed record for {preprint_id} for user {contributor.user._id}') + updated_count += 1 + except Exception as e: + logger.error(f'Error processing versions for guid {guid}: {str(e)}') + except Exception as e: + logger.error(f'Error processing contributor {contributor.id}: {str(e)}') + + except Exception as e: + logger.error(f'Unexpected error processing preprint {preprint.id}: {str(e)}') + skipped_count += 1 + + if dry_run: + logger.info(f'Processed: {processed_count}, Would update: {updated_count}, Skipped: {skipped_count}') + else: + logger.info(f'Processed: {processed_count}, Updated: {updated_count}, Skipped: {skipped_count}') diff --git a/osf/management/commands/force_archive.py b/osf/management/commands/force_archive.py index d58b3641deb..3a40ea4d5f8 100644 --- a/osf/management/commands/force_archive.py +++ b/osf/management/commands/force_archive.py @@ -22,10 +22,12 @@ import json import logging import requests +import contextlib import django django.setup() from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError from django.core.management.base import BaseCommand from django.db.models import Q from django.db.utils import IntegrityError @@ -35,6 +37,7 @@ from framework import sentry from framework.exceptions import HTTPError from osf.models import Node, NodeLog, Registration, BaseFileNode +from osf.exceptions import RegistrationStuckRecoverableException, RegistrationStuckBrokenException from api.base.utils import waterbutler_api_url_for from scripts import utils as script_utils from website.archiver import ARCHIVER_SUCCESS @@ -43,11 +46,6 @@ logger = logging.getLogger(__name__) -# Control globals -DELETE_COLLISIONS = False -SKIP_COLLISIONS = False -ALLOW_UNCONFIGURED = False - # Logging globals CHECKED_OKAY = [] CHECKED_STUCK_RECOVERABLE = [] @@ -57,7 +55,7 @@ SKIPPED = [] # Ignorable NodeLogs -LOG_WHITELIST = { +LOG_WHITELIST = ( 'affiliated_institution_added', 'category_updated', 'comment_added', @@ -109,35 +107,34 @@ 'node_access_requests_disabled', 'view_only_link_added', 'view_only_link_removed', -} +) # Require action, but recoverable from -LOG_GREYLIST = { +LOG_GREYLIST = ( 'addon_file_moved', 'addon_file_renamed', 'osf_storage_file_added', 'osf_storage_file_removed', 'osf_storage_file_updated', 'osf_storage_folder_created' -} -VERIFY_PROVIDER = { +) +VERIFY_PROVIDER = ( 'addon_file_moved', 'addon_file_renamed' -} +) # Permissible in certain circumstances after communication with user -PERMISSIBLE_BLACKLIST = { +PERMISSIBLE_BLACKLIST = ( 'dropbox_folder_selected', 'dropbox_node_authorized', 'dropbox_node_deauthorized', 'addon_removed', 'addon_added' -} +) -# Extendable with command line input -PERMISSIBLE_ADDONS = { - 'osfstorage' -} +DEFAULT_PERMISSIBLE_ADDONS = ( + 'osfstorage', +) def complete_archive_target(reg, addon_short_name): # Cache registration files count @@ -149,16 +146,16 @@ def complete_archive_target(reg, addon_short_name): target.save() archive_job._post_update_target() -def perform_wb_copy(reg, node_settings): +def perform_wb_copy(reg, node_settings, delete_collisions=False, skip_collisions=False): src, dst, user = reg.archive_job.info() if dst.files.filter(name=node_settings.archive_folder_name.replace('/', '-')).exists(): - if not DELETE_COLLISIONS and not SKIP_COLLISIONS: + if not delete_collisions and not skip_collisions: raise Exception('Archive folder for {} already exists. Investigate manually and rerun with either --delete-collisions or --skip-collisions') - if DELETE_COLLISIONS: + if delete_collisions: archive_folder = dst.files.exclude(type='osf.trashedfolder').get(name=node_settings.archive_folder_name.replace('/', '-')) logger.info(f'Removing {archive_folder}') archive_folder.delete() - if SKIP_COLLISIONS: + if skip_collisions: complete_archive_target(reg, node_settings.short_name) return cookie = user.get_or_create_cookie().decode() @@ -283,9 +280,9 @@ def get_logs_to_revert(reg): Q(node=reg.registered_from) | (Q(params__source__nid=reg.registered_from._id) | Q(params__destination__nid=reg.registered_from._id))).order_by('-date') -def revert_log_actions(file_tree, reg, obj_cache): +def revert_log_actions(file_tree, reg, obj_cache, permissible_addons): logs_to_revert = get_logs_to_revert(reg) - if len(PERMISSIBLE_ADDONS) > 1: + if len(permissible_addons) > 1: logs_to_revert = logs_to_revert.exclude(action__in=PERMISSIBLE_BLACKLIST) for log in list(logs_to_revert): try: @@ -327,7 +324,7 @@ def revert_log_actions(file_tree, reg, obj_cache): obj_cache.add(file_obj._id) return file_tree -def build_file_tree(reg, node_settings): +def build_file_tree(reg, node_settings, *args, **kwargs): n = reg.registered_from obj_cache = set(n.files.values_list('_id', flat=True)) @@ -344,45 +341,47 @@ def _recurse(file_obj, node): return serialized current_tree = _recurse(node_settings.get_root(), n) - return revert_log_actions(current_tree, reg, obj_cache) + return revert_log_actions(current_tree, reg, obj_cache, *args, **kwargs) -def archive(registration): +def archive(registration, *args, permissible_addons=DEFAULT_PERMISSIBLE_ADDONS, allow_unconfigured=False, **kwargs): for reg in registration.node_and_primary_descendants(): reg.registered_from.creator.get_or_create_cookie() # Allow WB requests if reg.archive_job.status == ARCHIVER_SUCCESS: continue logs_to_revert = reg.registered_from.logs.filter(date__gt=reg.registered_date).exclude(action__in=LOG_WHITELIST) - if len(PERMISSIBLE_ADDONS) == 1: + if len(permissible_addons) == 1: assert not logs_to_revert.exclude(action__in=LOG_GREYLIST).exists(), f'{registration._id}: {reg.registered_from._id} had unexpected unacceptable logs' else: assert not logs_to_revert.exclude(action__in=LOG_GREYLIST).exclude(action__in=PERMISSIBLE_BLACKLIST).exists(), f'{registration._id}: {reg.registered_from._id} had unexpected unacceptable logs' logger.info(f'Preparing to archive {reg._id}') - for short_name in PERMISSIBLE_ADDONS: + for short_name in permissible_addons: node_settings = reg.registered_from.get_addon(short_name) if not hasattr(node_settings, '_get_file_tree'): # Excludes invalid or None-type continue if not node_settings.configured: - if not ALLOW_UNCONFIGURED: + if not allow_unconfigured: raise Exception(f'{reg._id}: {short_name} on {reg.registered_from._id} is not configured. If this is permissible, re-run with `--allow-unconfigured`.') continue if not reg.archive_job.get_target(short_name) or reg.archive_job.get_target(short_name).status == ARCHIVER_SUCCESS: continue if short_name == 'osfstorage': - file_tree = build_file_tree(reg, node_settings) + file_tree = build_file_tree(reg, node_settings, permissible_addons=permissible_addons) manually_archive(file_tree, reg, node_settings) complete_archive_target(reg, short_name) else: assert reg.archiving, f'{reg._id}: Must be `archiving` for WB to copy' - perform_wb_copy(reg, node_settings) + perform_wb_copy(reg, node_settings, *args, **kwargs) -def archive_registrations(): +def archive_registrations(*args, **kwargs): for reg in deepcopy(VERIFIED): - archive(reg) + archive(reg, *args, *kwargs) ARCHIVED.append(reg) VERIFIED.remove(reg) -def verify(registration): +def verify(registration, permissible_addons=DEFAULT_PERMISSIBLE_ADDONS, raise_error=False): + maybe_suppress_error = contextlib.suppress(ValidationError) if not raise_error else contextlib.nullcontext(enter_result=False) + for reg in registration.node_and_primary_descendants(): logger.info(f'Verifying {reg._id}') if reg.archive_job.status == ARCHIVER_SUCCESS: @@ -390,26 +389,41 @@ def verify(registration): nonignorable_logs = get_logs_to_revert(reg) unacceptable_logs = nonignorable_logs.exclude(action__in=LOG_GREYLIST) if unacceptable_logs.exists(): - if len(PERMISSIBLE_ADDONS) == 1 or unacceptable_logs.exclude(action__in=PERMISSIBLE_BLACKLIST): - logger.error('{}: Original node {} has unacceptable logs: {}'.format( + if len(permissible_addons) == 1 or unacceptable_logs.exclude(action__in=PERMISSIBLE_BLACKLIST): + message = '{}: Original node {} has unacceptable logs: {}'.format( registration._id, reg.registered_from._id, list(unacceptable_logs.values_list('action', flat=True)) - )) + ) + logger.error(message) + + with maybe_suppress_error: + raise ValidationError(message) + return False if nonignorable_logs.filter(action__in=VERIFY_PROVIDER).exists(): for log in nonignorable_logs.filter(action__in=VERIFY_PROVIDER): for key in ['source', 'destination']: if key in log.params: if log.params[key]['provider'] != 'osfstorage': - logger.error('{}: {} Only OSFS moves and renames are permissible'.format( + message = '{}: {} Only OSFS moves and renames are permissible'.format( registration._id, log._id - )) + ) + logger.error(message) + + with maybe_suppress_error: + raise ValidationError(message) + return False addons = reg.registered_from.get_addon_names() - if set(addons) - set(PERMISSIBLE_ADDONS | {'wiki'}) != set(): - logger.error(f'{registration._id}: Original node {reg.registered_from._id} has addons: {addons}') + if set(addons) - set(permissible_addons | {'wiki'}) != set(): + message = f'{registration._id}: Original node {reg.registered_from._id} has addons: {addons}' + logger.error(message) + + with maybe_suppress_error: + raise ValidationError(message) + return False if nonignorable_logs.exists(): logger.info('{}: Original node {} has had revertable file operations'.format( @@ -423,23 +437,23 @@ def verify(registration): )) return True -def verify_registrations(registration_ids): +def verify_registrations(registration_ids, permissible_addons): for r_id in registration_ids: reg = Registration.load(r_id) if not reg: logger.warning(f'Registration {r_id} not found') else: - if verify(reg): + if verify(reg, permissible_addons=permissible_addons): VERIFIED.append(reg) else: SKIPPED.append(reg) def check(reg): + """Check registration status. Raise exception if registration stuck.""" logger.info(f'Checking {reg._id}') if reg.is_deleted: - logger.info(f'Registration {reg._id} is deleted.') - CHECKED_OKAY.append(reg) - return + return f'Registration {reg._id} is deleted.' + expired_if_before = timezone.now() - ARCHIVE_TIMEOUT_TIMEDELTA archive_job = reg.archive_job root_job = reg.root.archive_job @@ -452,14 +466,11 @@ def check(reg): if still_archiving and root_job.datetime_initiated < expired_if_before: logger.warning(f'Registration {reg._id} is stuck in archiving') if verify(reg): - logger.info(f'Registration {reg._id} verified recoverable') - CHECKED_STUCK_RECOVERABLE.append(reg) + raise RegistrationStuckRecoverableException(f'Registration {reg._id} is stuck and verified recoverable') else: - logger.info(f'Registration {reg._id} verified broken') - CHECKED_STUCK_BROKEN.append(reg) - else: - logger.info(f'Registration {reg._id} is not stuck in archiving') - CHECKED_OKAY.append(reg) + raise RegistrationStuckBrokenException(f'Registration {reg._id} is stuck and verified broken') + + return f'Registration {reg._id} is not stuck in archiving' def check_registrations(registration_ids): for r_id in registration_ids: @@ -467,7 +478,16 @@ def check_registrations(registration_ids): if not reg: logger.warning(f'Registration {r_id} not found') else: - check(reg) + try: + status = check(reg) + logger.info(status) + CHECKED_OKAY.append(reg) + except RegistrationStuckRecoverableException as exc: + logger.info(str(exc)) + CHECKED_STUCK_RECOVERABLE.append(reg) + except RegistrationStuckBrokenException as exc: + logger.info(str(exc)) + CHECKED_STUCK_BROKEN.append(reg) def log_results(dry_run): if CHECKED_OKAY: @@ -527,29 +547,31 @@ def add_arguments(self, parser): parser.add_argument('--guids', type=str, nargs='+', help='GUIDs of registrations to archive') def handle(self, *args, **options): - global DELETE_COLLISIONS - global SKIP_COLLISIONS - global ALLOW_UNCONFIGURED - DELETE_COLLISIONS = options.get('delete_collisions') - SKIP_COLLISIONS = options.get('skip_collisions') - ALLOW_UNCONFIGURED = options.get('allow_unconfigured') - if DELETE_COLLISIONS and SKIP_COLLISIONS: + delete_collisions = options.get('delete_collisions') + skip_collisions = options.get('skip_collisions') + allow_unconfigured = options.get('allow_unconfigured') + if delete_collisions and skip_collisions: raise Exception('Cannot specify both delete_collisions and skip_collisions') dry_run = options.get('dry_run') if not dry_run: script_utils.add_file_logger(logger, __file__) - addons = options.get('addons', []) - if addons: - PERMISSIBLE_ADDONS.update(set(addons)) + addons = options.get('addons') or set() + addons.update(DEFAULT_PERMISSIBLE_ADDONS) + registration_ids = options.get('guids', []) if options.get('check', False): check_registrations(registration_ids) else: - verify_registrations(registration_ids) + verify_registrations(registration_ids, permissible_addons=addons) if not dry_run: - archive_registrations() + archive_registrations( + permissible_addons=addons, + delete_collisions=delete_collisions, + skip_collisions=skip_collisions, + allow_unconfigured=allow_unconfigured, + ) log_results(dry_run) diff --git a/osf/management/commands/migrate_notifications.py b/osf/management/commands/migrate_notifications.py new file mode 100644 index 00000000000..f911d93b796 --- /dev/null +++ b/osf/management/commands/migrate_notifications.py @@ -0,0 +1,55 @@ +import logging +from django.contrib.contenttypes.models import ContentType +from osf.models.notification import NotificationType, NotificationSubscription +from osf.models.notifications import NotificationSubscriptionLegacy +from django.core.management.base import BaseCommand +from django.db import transaction + +logger = logging.getLogger(__name__) + +FREQ_MAP = { + 'none': 'none', + 'email_digest': 'weekly', + 'email_transactional': 'instantly', +} + +def migrate_legacy_notification_subscriptions(): + """ + Migrate legacy NotificationSubscription data to new notifications app. + """ + logger.info('Beginning legacy notification subscription migration...') + + PROVIDER_BASED_LEGACY_NOTIFICATION_TYPES = [f'{provider}_comment_replies' for provider in NotificationSubscriptionLegacy.objects.all().values_list('provider', flat=True) if provider] + + for legacy in NotificationSubscriptionLegacy.objects.all(): + event_name = legacy.event_name + if event_name in PROVIDER_BASED_LEGACY_NOTIFICATION_TYPES: + subscribed_object = legacy.provider + event_name = event_name.replace(f'{legacy.provider.id}_', '') + elif subscribed_object := legacy.node: + pass + elif subscribed_object := legacy.user: + pass + else: + raise NotImplementedError(f'Invalid Notification id {event_name}') + content_type = ContentType.objects.get_for_model(subscribed_object.__class__) + subscription, _ = NotificationSubscription.objects.update_or_create( + notification_type=NotificationType.objects.get(name=event_name), + user=legacy.user, + content_type=content_type, + object_id=subscribed_object.id, + defaults={ + 'user_id': legacy.user.id, + 'message_frequency': 'weekly' if legacy.email_digest.exists() else 'none' 'instantly' if legacy.email_transactional.exists() else 'none', + 'content_type': content_type, + 'object_id': subscribed_object.id, + } + ) + logger.info(f'Created NotificationType "{event_name}" with content_type {content_type}') + +class Command(BaseCommand): + help = 'Migrate legacy NotificationSubscriptionLegacy objects to new Notification app models.' + + def handle(self, *args, **options): + with transaction.atomic(): + migrate_legacy_notification_subscriptions() diff --git a/osf/management/commands/populate_collection_provider_notification_subscriptions.py b/osf/management/commands/populate_collection_provider_notification_subscriptions.py deleted file mode 100644 index 5713b08061b..00000000000 --- a/osf/management/commands/populate_collection_provider_notification_subscriptions.py +++ /dev/null @@ -1,40 +0,0 @@ -import logging - -from django.core.management.base import BaseCommand -from osf.models import NotificationSubscription, CollectionProvider - -logger = logging.getLogger(__file__) - - -def populate_collection_provider_notification_subscriptions(): - for provider in CollectionProvider.objects.all(): - provider_admins = provider.get_group('admin').user_set.all() - provider_moderators = provider.get_group('moderator').user_set.all() - - for subscription in provider.DEFAULT_SUBSCRIPTIONS: - instance, created = NotificationSubscription.objects.get_or_create( - _id=f'{provider._id}_{subscription}', - event_name=subscription, - provider=provider - ) - - if created: - logger.info(f'{provider._id}_{subscription} NotificationSubscription object has been created') - else: - logger.info(f'{provider._id}_{subscription} NotificationSubscription object exists') - - for user in provider_admins | provider_moderators: - # add user to subscription list but set their notification to none by default - instance.add_user_to_subscription(user, 'email_transactional', save=True) - logger.info(f'User {user._id} is subscribed to {provider._id}_{subscription}') - - -class Command(BaseCommand): - help = """ - Creates NotificationSubscriptions for existing RegistrationProvider objects - and adds RegistrationProvider moderators/admins to subscriptions - """ - - # Management command handler - def handle(self, *args, **options): - populate_collection_provider_notification_subscriptions() diff --git a/osf/management/commands/populate_registration_provider_notification_subscriptions.py b/osf/management/commands/populate_registration_provider_notification_subscriptions.py deleted file mode 100644 index fe372fcbb80..00000000000 --- a/osf/management/commands/populate_registration_provider_notification_subscriptions.py +++ /dev/null @@ -1,45 +0,0 @@ -import logging - -from django.contrib.auth.models import Group -from django.core.management.base import BaseCommand -from osf.models import NotificationSubscription, RegistrationProvider - -logger = logging.getLogger(__file__) - - -def populate_registration_provider_notification_subscriptions(): - for provider in RegistrationProvider.objects.all(): - try: - provider_admins = provider.get_group('admin').user_set.all() - provider_moderators = provider.get_group('moderator').user_set.all() - except Group.DoesNotExist: - logger.warning(f'Unable to find groups for provider "{provider._id}", assuming there are no subscriptions to create.') - continue - - for subscription in provider.DEFAULT_SUBSCRIPTIONS: - instance, created = NotificationSubscription.objects.get_or_create( - _id=f'{provider._id}_{subscription}', - event_name=subscription, - provider=provider - ) - - if created: - logger.info(f'{provider._id}_{subscription} NotificationSubscription object has been created') - else: - logger.info(f'{provider._id}_{subscription} NotificationSubscription object exists') - - for user in provider_admins | provider_moderators: - # add user to subscription list but set their notification to none by default - instance.add_user_to_subscription(user, 'email_transactional', save=True) - logger.info(f'User {user._id} is subscribed to {provider._id}_{subscription}') - - -class Command(BaseCommand): - help = """ - Creates NotificationSubscriptions for existing RegistrationProvider objects - and adds RegistrationProvider moderators/admins to subscriptions - """ - - # Management command handler - def handle(self, *args, **options): - populate_registration_provider_notification_subscriptions() diff --git a/osf/management/commands/send_storage_exceeded_announcement.py b/osf/management/commands/send_storage_exceeded_announcement.py index 4cee3ec6573..8c4a687f3ce 100644 --- a/osf/management/commands/send_storage_exceeded_announcement.py +++ b/osf/management/commands/send_storage_exceeded_announcement.py @@ -2,10 +2,9 @@ import json from tqdm import tqdm -from website import mails from django.core.management.base import BaseCommand -from osf.models import Node, OSFUser +from osf.models import Node, OSFUser, NotificationType logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) @@ -40,13 +39,15 @@ def main(json_file, dry=False): if public_nodes or private_nodes: if not dry: try: - mails.send_mail( - to_addr=user.username, - mail=mails.STORAGE_CAP_EXCEEDED_ANNOUNCEMENT, + NotificationType.objects.get( + name=NotificationType.Type.USER_STORAGE_CAP_EXCEEDED_ANNOUNCEMENT + ).emit( user=user, - public_nodes=public_nodes, - private_nodes=private_nodes, - can_change_preferences=False, + event_context={ + 'public_nodes': public_nodes, + 'private_nodes': private_nodes, + 'can_change_preferences': False, + } ) except Exception: errors.append(user._id) diff --git a/osf/management/commands/transfer_quickfiles_to_projects.py b/osf/management/commands/transfer_quickfiles_to_projects.py deleted file mode 100644 index c4f5a8450f1..00000000000 --- a/osf/management/commands/transfer_quickfiles_to_projects.py +++ /dev/null @@ -1,180 +0,0 @@ -import pytz -import logging -import datetime - -from django.db import transaction -from django.db.models import Exists, F, Func, OuterRef, Value -from django.core.management.base import BaseCommand -from tqdm import tqdm - -from osf.models import ( - OSFUser, - QuickFilesNode, - NodeLog, - AbstractNode, - Guid, -) -from osf.models.base import generate_guid -from osf.models.quickfiles import get_quickfiles_project_title -from osf.models.queued_mail import QueuedMail -from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField - -from addons.osfstorage.models import OsfStorageFile -from website import mails, settings -from django.contrib.contenttypes.models import ContentType - -logger = logging.getLogger(__name__) -QUICKFILES_DESC = 'The Quick Files feature was discontinued and it’s files were migrated into this Project on March' \ - ' 11, 2022. The file URL’s will still resolve properly, and the Quick Files logs are available in' \ - ' the Project’s Recent Activity.' -QUICKFILES_DATE = datetime.datetime(2022, 3, 11, tzinfo=pytz.utc) - - -def remove_quickfiles(): - node_content_type = ContentType.objects.get_for_model(AbstractNode) - quick_file_annotation = Exists( - OsfStorageFile.objects.filter( - target_object_id=OuterRef('id'), - target_content_type=node_content_type - ) - ) - quick_files_nodes = QuickFilesNode.objects.annotate(has_files=quick_file_annotation).filter(has_files=True) - target_count = quick_files_nodes.count() - logger.info(f'Acquired {target_count} targets') - - _ = Guid.objects.filter( - id__in=quick_files_nodes.values_list('guids__id', flat=True) - ).delete() - logger.info(f'Deleted guids: {_}') - - # generate unique guids prior to record creation to avoid collisions, set object ensures all guids are unique - guids = set() - while len(guids) < target_count: - guids.add(generate_guid()) - guids = list(guids) - logger.info(f'Generated {len(guids)} Guids') - - guids = [ - Guid( - _id=_id, - object_id=node_id, - content_type=node_content_type, - ) for _id, node_id in zip(guids, quick_files_nodes.values_list('id', flat=True)) - ] - Guid.objects.bulk_create(guids) - logger.info(f'Created {len(guids)} Guids') - - node_logs = [] - queued_mail = [] - pbar = tqdm(total=target_count) - for node in quick_files_nodes: - node_logs.append(NodeLog( - node=node, - user=node.creator, - original_node=node, - params={'node': node._id}, - action=NodeLog.MIGRATED_QUICK_FILES - )) - queued_mail.append(QueuedMail( - user=node.creator, - to_addr=node.creator.email, - send_at=QUICKFILES_DATE, - email_type=mails.QUICKFILES_MIGRATED.tpl_prefix, - data=dict( - osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - quickfiles_link=node.absolute_url - ) - )) - node.logs.update( - params=Func( - F('params'), - Value(['node']), - Value(node._id, DateTimeAwareJSONField()), - function='jsonb_set' - ) - ) - pbar.update(1) - pbar.close() - - logger.info('Updated logs') - NodeLog.objects.bulk_create(node_logs) - logger.info(f'Created {len(node_logs)} logs') - QueuedMail.objects.bulk_create(queued_mail) - logger.info(f'Created {len(queued_mail)} mails') - - quick_files_nodes.update(description=QUICKFILES_DESC, type='osf.node') - logger.info(f'Projectified {target_count} QuickFilesNodes') - - -def reverse_remove_quickfiles(): - quickfiles_nodes_with_files = AbstractNode.objects.filter( - logs__action=NodeLog.MIGRATED_QUICK_FILES - ) - for node in quickfiles_nodes_with_files: - node.guids.all().delete() - node.save() - - quickfiles_nodes_with_files.update( - type='osf.quickfilesnode', - is_deleted=False, - deleted=None, - ) - - users_without_nodes = OSFUser.objects.exclude( - id__in=QuickFilesNode.objects.all().values_list( - 'creator__id', - flat=True - ) - ) - quickfiles_created = [] - for user in users_without_nodes: - quickfiles_created.append( - QuickFilesNode( - title=get_quickfiles_project_title(user), - creator=user - ) - ) - - QuickFilesNode.objects.bulk_create(quickfiles_created) - - for quickfiles in quickfiles_created: - quickfiles.add_addon('osfstorage', auth=None, log=False) - quickfiles.save() - - NodeLog.objects.filter(action=NodeLog.MIGRATED_QUICK_FILES).delete() - - logger.info(f'{len(QuickFilesNode.objects.all())} quickfiles were restored.') - - -class Command(BaseCommand): - """ - Puts all Quickfiles into projects or reverses the effect. - """ - - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Run migration and roll back changes to db', - required=False, - ) - parser.add_argument( - '--reverse', - type=bool, - help='is the reverse to be run?.', - required=False, - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run', False) - reverse = options.get('reverse', False) - with transaction.atomic(): - if reverse: - reverse_remove_quickfiles() - else: - remove_quickfiles() - if dry_run: - raise RuntimeError('Dry run complete, rolling back.') diff --git a/osf/migrations/0030_alter_notificationsubscription_options_and_more.py b/osf/migrations/0030_alter_notificationsubscription_options_and_more.py new file mode 100644 index 00000000000..ec044b08a07 --- /dev/null +++ b/osf/migrations/0030_alter_notificationsubscription_options_and_more.py @@ -0,0 +1,104 @@ +import osf +from django.db import migrations, models +from django.conf import settings +import django_extensions.db.fields +import django.db.models + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0029_remove_abstractnode_keenio_read_key'), + ] + + operations = [ + migrations.RunSQL( + """ + DO $$ + DECLARE + idx record; + BEGIN + FOR idx IN + SELECT indexname + FROM pg_indexes + WHERE tablename = 'osf_notificationsubscription' + LOOP + EXECUTE format( + 'ALTER INDEX %I RENAME TO %I', + idx.indexname, + replace(idx.indexname, 'osf_notificationsubscription', 'osf_notificationsubscription_legacy') + ); + END LOOP; + END$$; + """ + ), + migrations.AlterModelTable( + name='NotificationSubscription', + table='osf_notificationsubscription_legacy', + ), + + migrations.RenameModel( + old_name='NotificationSubscription', + new_name='NotificationSubscriptionLegacy', + ), + migrations.CreateModel( + name='NotificationType', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=255, unique=True)), + ('notification_freq', models.CharField( + choices=[('none', 'None'), ('instantly', 'Instantly'), ('daily', 'Daily'), ('weekly', 'Weekly'), + ('monthly', 'Monthly')], default='instantly', max_length=32)), + ('template', models.TextField( + help_text='Template used to render the event_info. Supports Django template syntax.')), + ('object_content_type', models.ForeignKey(blank=True, + help_text='Content type for subscribed objects. Null means global event.', + null=True, on_delete=django.db.models.deletion.SET_NULL, + to='contenttypes.contenttype')), + ], + options={ + 'verbose_name': 'Notification Type', + 'verbose_name_plural': 'Notification Types', + }, + ), + migrations.CreateModel( + name='NotificationSubscription', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created', + django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), + ('modified', + django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), + ('message_frequency', models.CharField(max_length=32)), + ('object_id', models.CharField(blank=True, max_length=255, null=True)), + ('content_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, + to='contenttypes.contenttype')), + ('notification_type', + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='osf.notificationtype')), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions', + to=settings.AUTH_USER_MODEL)), + ], + options={ + 'verbose_name': 'Notification Subscription', + 'verbose_name_plural': 'Notification Subscriptions', + }, + bases=(models.Model, osf.models.base.QuerySetExplainMixin), + ), + migrations.CreateModel( + name='Notification', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('event_context', models.JSONField()), + ('sent', models.DateTimeField(blank=True, null=True)), + ('seen', models.DateTimeField(blank=True, null=True)), + ('created', models.DateTimeField(auto_now_add=True)), + ('subscription', + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', + to='osf.notificationsubscription')), + ], + options={ + 'verbose_name': 'Notification', + 'verbose_name_plural': 'Notifications', + }, + ) + ] diff --git a/osf/migrations/0031_remove_osfuser_contributor_added_email_records.py b/osf/migrations/0031_remove_osfuser_contributor_added_email_records.py new file mode 100644 index 00000000000..080130da245 --- /dev/null +++ b/osf/migrations/0031_remove_osfuser_contributor_added_email_records.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.13 on 2025-04-22 14:24 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0030_alter_notificationsubscription_options_and_more'), + ] + + operations = [ + migrations.RemoveField( + model_name='osfuser', + name='contributor_added_email_records', + ), + ] diff --git a/osf/migrations/0032_delete_queuedmail.py b/osf/migrations/0032_delete_queuedmail.py new file mode 100644 index 00000000000..ec13c516cd0 --- /dev/null +++ b/osf/migrations/0032_delete_queuedmail.py @@ -0,0 +1,16 @@ +# Generated by Django 4.2.13 on 2025-04-24 14:02 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0031_remove_osfuser_contributor_added_email_records'), + ] + + operations = [ + migrations.DeleteModel( + name='QueuedMail', + ), + ] diff --git a/osf/migrations/__init__.py b/osf/migrations/__init__.py index 0d5292abcf1..a45fc5a1c6c 100644 --- a/osf/migrations/__init__.py +++ b/osf/migrations/__init__.py @@ -274,3 +274,55 @@ def ensure_invisible_and_inactive_schema(): v2_inactive_schema = v2_inactive_schema + ['Election Research Preacceptance Competition'] RegistrationSchema.objects.filter(name__in=v2_inactive_schema).update(visible=False) RegistrationSchema.objects.filter(name__in=v2_inactive_schema).update(active=False) + + +def update_notification_types(*args, **kwargs): + import yaml + from django.contrib.contenttypes.models import ContentType + from website import settings + from osf.models import NotificationType + + with open(settings.NOTIFICATION_TYPES_YAML) as stream: + notification_types = yaml.safe_load(stream) + for notification_type in notification_types['notification_types']: + notification_type.pop('__docs__') + object_content_type_model_name = notification_type.pop('object_content_type_model_name') + notification_freq = notification_type.pop('notification_freq_default') + + if object_content_type_model_name == 'desk': + content_type = None + elif object_content_type_model_name == 'osfuser': + OSFUser = apps.get_model('osf', 'OSFUser') + content_type = ContentType.objects.get_for_model(OSFUser) + elif object_content_type_model_name == 'preprint': + Preprint = apps.get_model('osf', 'Preprint') + content_type = ContentType.objects.get_for_model(Preprint) + elif object_content_type_model_name == 'collectionsubmission': + CollectionSubmission = apps.get_model('osf', 'CollectionSubmission') + content_type = ContentType.objects.get_for_model(CollectionSubmission) + elif object_content_type_model_name == 'abstractprovider': + AbstractProvider = apps.get_model('osf', 'abstractprovider') + content_type = ContentType.objects.get_for_model(AbstractProvider) + elif object_content_type_model_name == 'osfuser': + OSFUser = apps.get_model('osf', 'OSFUser') + content_type = ContentType.objects.get_for_model(OSFUser) + else: + try: + content_type = ContentType.objects.get( + app_label='osf', + model=object_content_type_model_name + ) + except ContentType.DoesNotExist: + raise ValueError(f'No content type for osf.{object_content_type_model_name}') + + with open(notification_type['template']) as stream: + template = stream.read() + + notification_types['template'] = template + notification_types['notification_freq'] = notification_freq + nt, _ = NotificationType.objects.update_or_create( + name=notification_type['name'], + defaults=notification_type, + ) + nt.object_content_type = content_type + nt.save() diff --git a/osf/models/__init__.py b/osf/models/__init__.py index 275fd148b6c..fb4f0ec73fa 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -62,7 +62,8 @@ from .node_relation import NodeRelation from .nodelog import NodeLog from .notable_domain import NotableDomain, DomainReference -from .notifications import NotificationDigest, NotificationSubscription +from .notifications import NotificationDigest, NotificationSubscriptionLegacy +from .notification import Notification, NotificationType, NotificationSubscription from .oauth import ( ApiOAuth2Application, ApiOAuth2PersonalToken, @@ -82,7 +83,6 @@ RegistrationProvider, WhitelistedSHAREPreprintProvider, ) -from .queued_mail import QueuedMail from .quickfiles import QuickFilesNode from .registrations import ( DraftRegistration, diff --git a/osf/models/base.py b/osf/models/base.py index 4b51544dd15..d2c07a86d9e 100644 --- a/osf/models/base.py +++ b/osf/models/base.py @@ -49,6 +49,8 @@ def generate_object_id(): def coerce_guid(maybe_guid, create_if_needed=False): if isinstance(maybe_guid, Guid): return maybe_guid + if isinstance(maybe_guid, VersionedGuidMixin): + return maybe_guid.versioned_guids.first().guid if isinstance(maybe_guid, GuidMixin): return maybe_guid.guids.first() if isinstance(maybe_guid, OptionalGuidMixin): diff --git a/osf/models/collection_submission.py b/osf/models/collection_submission.py index 893533d85d1..448040f38a9 100644 --- a/osf/models/collection_submission.py +++ b/osf/models/collection_submission.py @@ -11,13 +11,12 @@ from website.util import api_v2_url from website.search.exceptions import SearchUnavailableError from osf.utils.workflows import CollectionSubmissionsTriggers, CollectionSubmissionStates -from website.filters import profile_image_url -from website import mails, settings +from website import settings from osf.utils.machines import CollectionSubmissionMachine +from osf.models.notification import NotificationType from django.db.models.signals import post_save from django.dispatch import receiver -from django.utils import timezone logger = logging.getLogger(__name__) @@ -102,72 +101,35 @@ def _notify_contributors_pending(self, event_data): assert str(e) == f'No unclaimed record for user {contributor._id} on node {self.guid.referent._id}' claim_url = None - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_SUBMITTED(self.creator, self.guid.referent), + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_SUBMITTED, + ).emit( user=contributor, - submitter=user, - is_initator=self.creator == contributor, - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - is_registered_contrib=contributor.is_registered, - collection=self.collection, - claim_url=claim_url, - node=self.guid.referent, - domain=settings.DOMAIN, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + subscribed_object=self, + event_context={ + 'user': contributor.id, + 'submitter': user.id, + 'is_initiator': self.creator == contributor, + 'is_admin': self.guid.referent.has_permission(contributor, ADMIN), + 'is_registered_contrib': contributor.is_registered, + 'collection': self.collection.id, + 'claim_url': claim_url, + 'node': self.guid.referent.id, + 'domain': settings.DOMAIN, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, ) def _notify_moderators_pending(self, event_data): - context = { - 'reviewable': self.guid.referent, - 'abstract_provider': self.collection.provider, - 'reviews_submission_url': f'{settings.DOMAIN}{self.guid.referent._id}?mode=moderator', - 'profile_image_url': profile_image_url( - settings.PROFILE_IMAGE_PROVIDER, - self.creator, - use_ssl=True, - size=settings.PROFILE_IMAGE_MEDIUM - ), - 'message': f'submitted "{self.guid.referent.title}".', - 'allow_submissions': True, - } - - from .notifications import NotificationSubscription - from website.notifications.emails import store_emails - - provider_subscription, created = NotificationSubscription.objects.get_or_create( - _id=f'{self.collection.provider._id}_new_pending_submissions', - provider=self.collection.provider - ) - email_transactors_ids = list( - provider_subscription.email_transactional.all().values_list( - 'guids___id', - flat=True - ) - ) - store_emails( - email_transactors_ids, - 'email_transactional', - 'new_pending_submissions', - self.creator, - self.guid.referent, - timezone.now(), - **context - ) - email_digester_ids = list( - provider_subscription.email_digest.all().values_list( - 'guids___id', - flat=True - ) - ) - store_emails( - email_digester_ids, - 'email_digest', - 'new_pending_submissions', - self.creator, - self.guid.referent, - timezone.now(), - **context + user = event_data.kwargs.get('user', None) + NotificationType.objects.get( + name=NotificationType.Type.NEW_PENDING_SUBMISSIONS, + ).emit( + user=user, + subscribed_object=self.guid.referent, + event_context={ + 'submitter': self.creator.id, + }, ) def _validate_accept(self, event_data): @@ -182,16 +144,20 @@ def _validate_accept(self, event_data): def _notify_accepted(self, event_data): if self.collection.provider: for contributor in self.guid.referent.contributors: - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_ACCEPTED(self.collection, self.guid.referent), + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_ACCEPTED, + ).emit( user=contributor, - submitter=event_data.kwargs.get('user'), - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - domain=settings.DOMAIN, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + subscribed_object=self, + event_context={ + 'user': contributor.id, + 'submitter': event_data.kwargs.get('user').id, + 'is_admin': self.guid.referent.has_permission(contributor, ADMIN), + 'collection': self.collection.id, + 'node': self.guid.referent.id, + 'domain': settings.DOMAIN, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, ) def _validate_reject(self, event_data): @@ -209,15 +175,19 @@ def _validate_reject(self, event_data): def _notify_moderated_rejected(self, event_data): for contributor in self.guid.referent.contributors: - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_REJECTED(self.collection, self.guid.referent), + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_REJECTED, + ).emit( user=contributor, - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - rejection_justification=event_data.kwargs.get('comment'), - osf_contact_email=settings.OSF_CONTACT_EMAIL, + subscribed_object=self, + event_context={ + 'user': contributor.id, + 'is_admin': self.guid.referent.has_permission(contributor, ADMIN), + 'collection': self.collection.id, + 'node': self.guid.referent.id, + 'rejection_justification': event_data.kwargs.get('comment'), + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, ) def _validate_remove(self, event_data): @@ -243,56 +213,60 @@ def _notify_removed(self, event_data): removed_due_to_privacy = event_data.kwargs.get('removed_due_to_privacy') is_moderator = user.has_perm('withdraw_submissions', self.collection.provider) is_admin = self.guid.referent.has_permission(user, ADMIN) + node = self.guid.referent + + event_context_base = { + 'remover': user.id, + 'collection_id': self.collection.id, + 'node_id': node.id, + 'domain': settings.DOMAIN, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } + if removed_due_to_privacy and self.collection.provider: if self.is_moderated: for moderator in self.collection.moderators: - mails.send_mail( - to_addr=moderator.username, - mail=mails.COLLECTION_SUBMISSION_REMOVED_PRIVATE(self.collection, self.guid.referent), + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_PRIVATE + ).emit( user=moderator, - remover=user, - is_admin=self.guid.referent.has_permission(moderator, ADMIN), - collection=self.collection, - node=self.guid.referent, - domain=settings.DOMAIN, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + **event_context_base, + 'is_admin': node.has_permission(moderator, ADMIN), + }, ) - for contributor in self.guid.referent.contributors.all(): - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_REMOVED_PRIVATE(self.collection, self.guid.referent), + for contributor in node.contributors.all(): + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_PRIVATE + ).emit( user=contributor, - remover=user, - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - domain=settings.DOMAIN, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + **event_context_base, + 'is_admin': node.has_permission(contributor, ADMIN), + }, ) elif is_moderator and self.collection.provider: - for contributor in self.guid.referent.contributors: - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_REMOVED_MODERATOR(self.collection, self.guid.referent), + for contributor in node.contributors.all(): + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_MODERATOR + ).emit( user=contributor, - rejection_justification=event_data.kwargs.get('comment'), - remover=event_data.kwargs.get('user'), - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + **event_context_base, + 'is_admin': node.has_permission(contributor, ADMIN), + 'rejection_justification': event_data.kwargs.get('comment'), + }, ) elif is_admin and self.collection.provider: - for contributor in self.guid.referent.contributors: - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_REMOVED_ADMIN(self.collection, self.guid.referent), + for contributor in node.contributors.all(): + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_ADMIN + ).emit( user=contributor, - remover=event_data.kwargs.get('user'), - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + **event_context_base, + 'is_admin': node.has_permission(contributor, ADMIN), + }, ) def _validate_resubmit(self, event_data): @@ -322,15 +296,13 @@ def _notify_cancel(self, event_data): return for contributor in self.guid.referent.contributors: - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_CANCEL(self.collection, self.guid.referent), + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_CANCEL + ).emit( user=contributor, - remover=event_data.kwargs.get('user'), - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + 'is_admin': self.collection.has_permission(contributor, ADMIN), + }, ) def _make_public(self, event_data): diff --git a/osf/models/institution.py b/osf/models/institution.py index 5dce3c1df36..6da4941e48e 100644 --- a/osf/models/institution.py +++ b/osf/models/institution.py @@ -22,7 +22,6 @@ from .storage import InstitutionAssetFile from .validators import validate_email from osf.utils.fields import NonNaiveDateTimeField, LowercaseEmailField -from website import mails from website import settings as website_settings logger = logging.getLogger(__name__) @@ -218,15 +217,19 @@ def _send_deactivation_email(self): forgot_password = 'forgotpassword' if website_settings.DOMAIN.endswith('/') else '/forgotpassword' attempts = 0 success = 0 + from osf.models import NotificationType + for user in self.get_institution_users(): try: attempts += 1 - mails.send_mail( - to_addr=user.username, - mail=mails.INSTITUTION_DEACTIVATION, + NotificationType.objects.get( + name=NotificationType.Type.USER_INSTITUTION_DEACTIVATION + ).emit( user=user, - forgot_password_link=f'{website_settings.DOMAIN}{forgot_password}', - osf_support_email=website_settings.OSF_SUPPORT_EMAIL + event_context={ + 'forgot_password_link': f'{website_settings.DOMAIN}{forgot_password}', + 'osf_support_email': website_settings.OSF_SUPPORT_EMAIL, + } ) except Exception as e: logger.error(f'Failed to send institution deactivation email to user [{user._id}] at [{self._id}]') diff --git a/osf/models/mixins.py b/osf/models/mixins.py index 9027a284f7c..0d52c0e5d10 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -26,6 +26,7 @@ InvalidTagError, BlockedEmailError, ) +from osf.models.notification import NotificationType from .node_relation import NodeRelation from .nodelog import NodeLog from .subject import Subject @@ -54,7 +55,7 @@ from osf.utils.requests import get_request_and_user_id from website.project import signals as project_signals -from website import settings, mails, language +from website import settings, language from website.project.licenses import set_license logger = logging.getLogger(__name__) @@ -308,12 +309,14 @@ def add_affiliated_institution(self, inst, user, log=True, ignore_user_affiliati self.update_search() if notify and getattr(self, 'type', False) == 'osf.node': for user, _ in self.get_admin_contributors_recursive(unique_users=True): - mails.send_mail( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - **{ - 'user': user, - 'node': self, + NotificationType.objects.get( + name=NotificationType.Type.NODE_AFFILIATION_CHANGED + ).emit( + user=user, + subscribed_object=self, + event_context={ + 'user': user.id, + 'node': self.id, }, ) if log: @@ -348,12 +351,14 @@ def remove_affiliated_institution(self, inst, user, save=False, log=True, notify if notify and getattr(self, 'type', False) == 'osf.node': for user, _ in self.get_admin_contributors_recursive(unique_users=True): - mails.send_mail( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - **{ - 'user': user, - 'node': self, + NotificationType.objects.get( + name=NotificationType.Type.NODE_AFFILIATION_CHANGED + ).emit( + user=user, + subscribed_object=self, + event_context={ + 'user': user.id, + 'node': self.id, }, ) @@ -1029,6 +1034,10 @@ class Meta: def is_reviewed(self): return self.reviews_workflow is not None + @property + def provider_notification_types(self): + return NotificationType.objects.filter(name__in=self.DEFAULT_SUBSCRIPTIONS) + def get_reviewable_state_counts(self): assert self.REVIEWABLE_RELATION_NAME, 'REVIEWABLE_RELATION_NAME must be set to compute state counts' qs = getattr(self, self.REVIEWABLE_RELATION_NAME) @@ -1062,8 +1071,8 @@ def get_request_state_counts(self): def add_to_group(self, user, group): # Add default notification subscription - for subscription in self.DEFAULT_SUBSCRIPTIONS: - self.add_user_to_subscription(user, f'{self._id}_{subscription}') + for notification_type in self.provider_notification_types: + notification_type.add_user_to_subscription(user, provider=self) return self.get_group(group).user_set.add(user) @@ -1074,24 +1083,11 @@ def remove_from_group(self, user, group, unsubscribe=True): raise ValueError('Cannot remove last admin.') if unsubscribe: # remove notification subscription - for subscription in self.DEFAULT_SUBSCRIPTIONS: - self.remove_user_from_subscription(user, f'{self._id}_{subscription}') + for notification_type in self.provider_notification_types: + notification_type.remove_user_from_subscription(user) return _group.user_set.remove(user) - def add_user_to_subscription(self, user, subscription_id): - notification = self.notification_subscriptions.get(_id=subscription_id) - user_id = user.id - is_subscriber = notification.none.filter(id=user_id).exists() \ - or notification.email_digest.filter(id=user_id).exists() \ - or notification.email_transactional.filter(id=user_id).exists() - if not is_subscriber: - notification.add_user_to_subscription(user, 'email_transactional', save=True) - - def remove_user_from_subscription(self, user, subscription_id): - notification = self.notification_subscriptions.get(_id=subscription_id) - notification.remove_user_from_subscription(user, save=True) - class TaxonomizableMixin(models.Model): class Meta: @@ -1116,25 +1112,26 @@ def subjects_relationship_url(self): def subjects_url(self): return self.absolute_api_v2_url + 'subjects/' - def check_subject_perms(self, auth): + def check_subject_perms(self, auth, ignore_permission=False): AbstractNode = apps.get_model('osf.AbstractNode') Preprint = apps.get_model('osf.Preprint') CollectionSubmission = apps.get_model('osf.CollectionSubmission') DraftRegistration = apps.get_model('osf.DraftRegistration') - if isinstance(self, AbstractNode): - if not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Only admins can change subjects.') - elif isinstance(self, Preprint): - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have admin or write permissions to change a preprint\'s subjects.') - elif isinstance(self, DraftRegistration): - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have write permissions to change a draft registration\'s subjects.') - elif isinstance(self, CollectionSubmission): - if not self.guid.referent.has_permission(auth.user, ADMIN) and not auth.user.has_perms( - self.collection.groups[ADMIN], self.collection): - raise PermissionsError('Only admins can change subjects.') + if not ignore_permission: + if isinstance(self, AbstractNode): + if not self.has_permission(auth.user, ADMIN): + raise PermissionsError('Only admins can change subjects.') + elif isinstance(self, Preprint): + if not self.has_permission(auth.user, WRITE): + raise PermissionsError('Must have admin or write permissions to change a preprint\'s subjects.') + elif isinstance(self, DraftRegistration): + if not self.has_permission(auth.user, WRITE): + raise PermissionsError('Must have write permissions to change a draft registration\'s subjects.') + elif isinstance(self, CollectionSubmission): + if not self.guid.referent.has_permission(auth.user, ADMIN) and not auth.user.has_perms( + self.collection.groups[ADMIN], self.collection): + raise PermissionsError('Only admins can change subjects.') return def add_subjects_log(self, old_subjects, auth): @@ -1157,7 +1154,7 @@ def assert_subject_format(self, subj_list, expect_list, error_msg): if (expect_list and not is_list) or (not expect_list and is_list): raise ValidationValueError(f'Subjects are improperly formatted. {error_msg}') - def set_subjects(self, new_subjects, auth, add_log=True): + def set_subjects(self, new_subjects, auth, add_log=True, **kwargs): """ Helper for setting M2M subjects field from list of hierarchies received from UI. Only authorized admins may set subjects. @@ -1168,7 +1165,7 @@ def set_subjects(self, new_subjects, auth, add_log=True): :return: None """ if auth: - self.check_subject_perms(auth) + self.check_subject_perms(auth, **kwargs) self.assert_subject_format(new_subjects, expect_list=True, error_msg='Expecting list of lists.') old_subjects = list(self.subjects.values_list('id', flat=True)) @@ -1190,7 +1187,7 @@ def set_subjects(self, new_subjects, auth, add_log=True): if hasattr(self, 'update_search'): self.update_search() - def set_subjects_from_relationships(self, subjects_list, auth, add_log=True): + def set_subjects_from_relationships(self, subjects_list, auth, add_log=True, **kwargs): """ Helper for setting M2M subjects field from list of flattened subjects received from UI. Only authorized admins may set subjects. @@ -1200,7 +1197,7 @@ def set_subjects_from_relationships(self, subjects_list, auth, add_log=True): :return: None """ - self.check_subject_perms(auth) + self.check_subject_perms(auth, **kwargs) self.assert_subject_format(subjects_list, expect_list=True, error_msg='Expecting a list of subjects.') if subjects_list: self.assert_subject_format(subjects_list[0], expect_list=False, error_msg='Expecting a list of subjects.') @@ -1295,7 +1292,7 @@ def order_by_contributor_field(self): raise NotImplementedError() @property - def contributor_email_template(self): + def contributor_notification_type(self): # default contributor email template as a string raise NotImplementedError() @@ -1380,22 +1377,30 @@ def _get_admin_contributors_query(self, users, require_active=True): qs = qs.filter(user__is_active=True) return qs - def add_contributor(self, contributor, permissions=None, visible=True, - send_email=None, auth=None, log=True, save=False, make_curator=False): + def add_contributor( + self, + contributor, + permissions=None, + visible=True, + notification_type=False, + auth=None, + log=True, + save=False, + make_curator=False + ): """Add a contributor to the project. :param User contributor: The contributor to be added :param list permissions: Permissions to grant to the contributor. Array of all permissions if node, highest permission to grant, if contributor, as a string. :param bool visible: Contributor is visible in project dashboard - :param str send_email: Email preference for notifying added contributor + :param str notification_type: notification preference for notifying added contributor :param Auth auth: All the auth information including user, API key :param bool log: Add log to self :param bool save: Save after adding contributor :param bool make_curator indicates whether the user should be an institutional curator :returns: Whether contributor was added """ - send_email = send_email or self.contributor_email_template # If user is merged into another account, use master account contrib_to_add = contributor.merged_by if contributor.is_merged else contributor if contrib_to_add.is_disabled: @@ -1451,9 +1456,15 @@ def add_contributor(self, contributor, permissions=None, visible=True, self, contributor=contributor, auth=auth, - email_template=send_email, permissions=permissions ) + from website.project.views.contributor import notify_added_contributor + notify_added_contributor( + self, + contributor=contributor, + auth=auth, + notification_type=notification_type, + ) # enqueue on_node_updated/on_preprint_updated to update DOI metadata when a contributor is added if getattr(self, 'get_identifier_value', None) and self.get_identifier_value('doi'): @@ -1506,7 +1517,7 @@ def add_unregistered_contributor(self, fullname, email, auth, send_email=None, :raises: DuplicateEmailError if user with given email is already in the database. """ OSFUser = apps.get_model('osf.OSFUser') - send_email = send_email or self.contributor_email_template + send_email = send_email or self.contributor_notification_type if email: try: @@ -1542,18 +1553,31 @@ def add_unregistered_contributor(self, fullname, email, auth, send_email=None, raise e self.add_contributor( - contributor, permissions=permissions, auth=auth, - visible=visible, send_email=send_email, log=True, save=False + contributor, + permissions=permissions, + auth=auth, + visible=visible, + notification_type=send_email, + log=True, + save=False ) self._add_related_source_tags(contributor) self.save() return contributor - def add_contributor_registered_or_not(self, auth, user_id=None, - full_name=None, email=None, send_email=None, - permissions=None, bibliographic=True, index=None, save=False): + def add_contributor_registered_or_not( + self, + auth, + user_id=None, + full_name=None, + email=None, + notification_type=None, + permissions=None, + bibliographic=True, + index=None + ): OSFUser = apps.get_model('osf.OSFUser') - send_email = send_email or self.contributor_email_template + notification_type = notification_type or self.contributor_notification_type if user_id: contributor = OSFUser.load(user_id) @@ -1564,8 +1588,14 @@ def add_contributor_registered_or_not(self, auth, user_id=None, raise ValidationValueError(f'{contributor.fullname} is already a contributor.') if contributor.is_registered: - contributor = self.add_contributor(contributor=contributor, auth=auth, visible=bibliographic, - permissions=permissions, send_email=send_email, save=True) + contributor = self.add_contributor( + contributor=contributor, + auth=auth, + visible=bibliographic, + permissions=permissions, + notification_type=notification_type, + save=True + ) else: if not full_name: raise ValueError( @@ -1573,9 +1603,14 @@ def add_contributor_registered_or_not(self, auth, user_id=None, .format(user_id, self._id) ) contributor = self.add_unregistered_contributor( - fullname=full_name, email=contributor.username, auth=auth, - send_email=send_email, permissions=permissions, - visible=bibliographic, existing_user=contributor, save=True + fullname=full_name, + email=contributor.username, + auth=auth, + send_email=notification_type, + permissions=permissions, + visible=bibliographic, + existing_user=contributor, + save=True ) else: @@ -1584,13 +1619,23 @@ def add_contributor_registered_or_not(self, auth, user_id=None, raise ValidationValueError(f'{contributor.fullname} is already a contributor.') if contributor and contributor.is_registered: - self.add_contributor(contributor=contributor, auth=auth, visible=bibliographic, - send_email=send_email, permissions=permissions, save=True) + self.add_contributor( + contributor=contributor, + auth=auth, + visible=bibliographic, + notification_type=notification_type, + permissions=permissions, + save=True + ) else: contributor = self.add_unregistered_contributor( - fullname=full_name, email=email, auth=auth, - send_email=send_email, permissions=permissions, - visible=bibliographic, save=True + fullname=full_name, + email=email, + auth=auth, + send_email=notification_type, + permissions=permissions, + visible=bibliographic, + save=True ) auth.user.email_last_sent = timezone.now() @@ -2222,12 +2267,14 @@ def suspend_spam_user(self, user): if not user.is_disabled: user.deactivate_account() user.is_registered = False - mails.send_mail( - to_addr=user.username, - mail=mails.SPAM_USER_BANNED, + NotificationType.objects.get( + name=NotificationType.Type.USER_SPAM_BANNED + ).emit( user=user, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, + event_context={ + 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + 'can_change_settings': False, + } ) user.save() diff --git a/osf/models/node.py b/osf/models/node.py index 187f5f3123f..456db377eb5 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -35,6 +35,8 @@ from framework.sentry import log_exception from osf.exceptions import (InvalidTagError, NodeStateError, TagNotFoundError) +from osf.models.notification import NotificationType + from .contributor import Contributor from .collection_submission import CollectionSubmission @@ -612,6 +614,10 @@ def institutions_url(self): def institutions_relationship_url(self): return self.absolute_api_v2_url + 'relationships/institutions/' + @property + def callbacks_url(self): + return self.absolute_api_v2_url + 'callbacks/' + # For Comment API compatibility @property def target_type(self): @@ -661,6 +667,9 @@ def web_url_for(self, view_name, _absolute=False, _guid=False, *args, **kwargs): def api_url_for(self, view_name, _absolute=False, *args, **kwargs): return api_url_for(view_name, pid=self._primary_key, _absolute=_absolute, *args, **kwargs) + def api_v2_url_for(self, path_str, params=None, **kwargs): + return api_url_for(path_str, params=params, **kwargs) + @property def project_or_component(self): # The distinction is drawn based on whether something has a parent node, rather than by category @@ -982,8 +991,8 @@ def contributors_and_group_members(self): return self.get_users_with_perm(READ) @property - def contributor_email_template(self): - return 'default' + def contributor_notification_type(self): + return NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT @property def registrations_all(self): @@ -1385,9 +1394,20 @@ def subscribe_contributors_to_node(self): """ for user in self.contributors.filter(is_registered=True): perm = self.contributor_set.get(user=user).permission - project_signals.contributor_added.send(self, - contributor=user, - auth=None, email_template='default', permissions=perm) + project_signals.contributor_added.send( + self, + contributor=user, + auth=None, + email_template='default', + permissions=perm + ) + from website.project.views.contributor import notify_added_contributor + notify_added_contributor( + self, + contributor=user, + auth=None, + email_template='default', + ) def register_node(self, schema, auth, draft_registration, parent=None, child_ids=None, provider=None): """Make a frozen copy of a node. @@ -1700,8 +1720,7 @@ def fork_node(self, auth, title=None, parent=None): forked.save() # Need to call this after save for the notifications to be created with the _primary_key - project_signals.contributor_added.send(forked, contributor=user, auth=auth, email_template='false') - + project_signals.contributor_added.send(forked, contributor=user, auth=auth) return forked def clone_logs(self, node, is_registration=False, page_size=100): @@ -1809,7 +1828,7 @@ def use_as_template(self, auth, changes=None, top_level=True, parent=None): new.save(suppress_log=True) # Need to call this after save for the notifications to be created with the _primary_key - project_signals.contributor_added.send(new, contributor=auth.user, auth=auth, email_template='false') + project_signals.contributor_added.send(new, contributor=auth.user, auth=auth) # Log the creation new.add_log( diff --git a/osf/models/notification.py b/osf/models/notification.py new file mode 100644 index 00000000000..5af88cff2fd --- /dev/null +++ b/osf/models/notification.py @@ -0,0 +1,355 @@ +import logging + +from django.db import models +from django.contrib.contenttypes.fields import GenericForeignKey +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from django.template import Template, TemplateSyntaxError +from django.utils import timezone +from .base import BaseModel +from enum import Enum +from website import settings +from api.base import settings as api_settings +from osf import notifications + + +class FrequencyChoices(Enum): + NONE = 'none' + INSTANTLY = 'instantly' + DAILY = 'daily' + WEEKLY = 'weekly' + MONTHLY = 'monthly' + + @classmethod + def choices(cls): + return [(key.value, key.name.capitalize()) for key in cls] + + +class NotificationType(models.Model): + class Type(str, Enum): + # Desk notifications + DESK_REQUEST_EXPORT = 'desk_request_export' + DESK_REQUEST_DEACTIVATION = 'desk_request_deactivation' + DESK_OSF_SUPPORT_EMAIL = 'desk_osf_support_email' + DESK_REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER = 'desk_registration_bulk_upload_product_owner' + DESK_USER_REGISTRATION_BULK_UPLOAD_UNEXPECTED_FAILURE = 'desk_user_registration_bulk_upload_unexpected_failure' + DESK_ARCHIVE_JOB_EXCEEDED = 'desk_archive_job_exceeded' + DESK_ARCHIVE_JOB_COPY_ERROR = 'desk_archive_job_copy_error' + DESK_ARCHIVE_JOB_FILE_NOT_FOUND = 'desk_archive_job_file_not_found' + DESK_ARCHIVE_JOB_UNCAUGHT_ERROR = 'desk_archive_job_uncaught_error' + + # User notifications + USER_PENDING_VERIFICATION = 'user_pending_verification' + USER_PENDING_VERIFICATION_REGISTERED = 'user_pending_verification_registered' + USER_STORAGE_CAP_EXCEEDED_ANNOUNCEMENT = 'user_storage_cap_exceeded_announcement' + USER_SPAM_BANNED = 'user_spam_banned' + USER_REQUEST_DEACTIVATION_COMPLETE = 'user_request_deactivation_complete' + USER_PRIMARY_EMAIL_CHANGED = 'user_primary_email_changed' + USER_INSTITUTION_DEACTIVATION = 'user_institution_deactivation' + USER_FORGOT_PASSWORD = 'user_forgot_password' + USER_FORGOT_PASSWORD_INSTITUTION = 'user_forgot_password_institution' + USER_REQUEST_EXPORT = 'user_request_export' + USER_CONTRIBUTOR_ADDED_OSF_PREPRINT = 'user_contributor_added_osf_preprint' + USER_CONTRIBUTOR_ADDED_DEFAULT = 'user_contributor_added_default' + USER_DUPLICATE_ACCOUNTS_OSF4I = 'user_duplicate_accounts_osf4i' + USER_EXTERNAL_LOGIN_LINK_SUCCESS = 'user_external_login_link_success' + USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL = 'user_registration_bulk_upload_failure_all' + USER_REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL = 'user_registration_bulk_upload_success_partial' + USER_REGISTRATION_BULK_UPLOAD_SUCCESS_ALL = 'user_registration_bulk_upload_success_all' + USER_ADD_SSO_EMAIL_OSF4I = 'user_add_sso_email_osf4i' + USER_WELCOME_OSF4I = 'user_welcome_osf4i' + USER_ARCHIVE_JOB_EXCEEDED = 'user_archive_job_exceeded' + USER_ARCHIVE_JOB_COPY_ERROR = 'user_archive_job_copy_error' + USER_ARCHIVE_JOB_FILE_NOT_FOUND = 'user_archive_job_file_not_found' + USER_ARCHIVE_JOB_UNCAUGHT_ERROR = 'user_archive_job_uncaught_error' + USER_COMMENT_REPLIES = 'user_comment_replies' + USER_COMMENTS = 'user_comments' + USER_FILE_UPDATED = 'user_file_updated' + USER_COMMENT_MENTIONS = 'user_mentions' + USER_REVIEWS = 'user_reviews' + USER_PASSWORD_RESET = 'user_password_reset' + USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION = 'user_contributor_added_draft_registration' + USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE = 'user_external_login_confirm_email_create' + USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK = 'user_external_login_confirm_email_link' + USER_CONFIRM_MERGE = 'user_confirm_merge' + USER_CONFIRM_EMAIL = 'user_confirm_email' + USER_INITIAL_CONFIRM_EMAIL = 'user_initial_confirm_email' + USER_INVITE_DEFAULT = 'user_invite_default' + USER_PENDING_INVITE = 'user_pending_invite' + USER_FORWARD_INVITE = 'user_forward_invite' + USER_FORWARD_INVITE_REGISTERED = 'user_forward_invite_registered' + USER_INVITE_DRAFT_REGISTRATION = 'user_invite_draft_registration' + USER_INVITE_OSF_PREPRINT = 'user_invite_osf_preprint' + + # Node notifications + NODE_COMMENT = 'node_comments' + NODE_FILES_UPDATED = 'node_files_updated' + NODE_AFFILIATION_CHANGED = 'node_affiliation_changed' + NODE_REQUEST_ACCESS_SUBMITTED = 'node_access_request_submitted' + NODE_REQUEST_ACCESS_DENIED = 'node_request_access_denied' + NODE_FORK_COMPLETED = 'node_fork_completed' + NODE_FORK_FAILED = 'node_fork_failed' + NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST = 'node_request_institutional_access_request' + NODE_CONTRIBUTOR_ADDED_ACCESS_REQUEST = 'node_contributor_added_access_request' + NODE_PENDING_EMBARGO_ADMIN = 'node_pending_embargo_admin' + NODE_PENDING_EMBARGO_NON_ADMIN = 'node_pending_embargo_non_admin' + NODE_PENDING_RETRACTION_NON_ADMIN = 'node_pending_retraction_non_admin' + NODE_PENDING_RETRACTION_ADMIN = 'node_pending_retraction_admin' + NODE_PENDING_REGISTRATION_NON_ADMIN = 'node_pending_registration_non_admin' + NODE_PENDING_REGISTRATION_ADMIN = 'node_pending_registration_admin' + NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN = 'node_pending_embargo_termination_non_admin' + NODE_PENDING_EMBARGO_TERMINATION_ADMIN = 'node_pending_embargo_termination_admin' + + # Provider notifications + PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION = 'provider_reviews_submission_confirmation' + PROVIDER_REVIEWS_MODERATOR_SUBMISSION_CONFIRMATION = 'provider_reviews_moderator_submission_confirmation' + PROVIDER_REVIEWS_WITHDRAWAL_REQUESTED = 'preprint_request_withdrawal_requested' + PROVIDER_REVIEWS_REJECT_CONFIRMATION = 'provider_reviews_reject_confirmation' + PROVIDER_REVIEWS_ACCEPT_CONFIRMATION = 'provider_reviews_accept_confirmation' + PROVIDER_REVIEWS_RESUBMISSION_CONFIRMATION = 'provider_reviews_resubmission_confirmation' + PROVIDER_REVIEWS_COMMENT_EDITED = 'provider_reviews_comment_edited' + PROVIDER_CONTRIBUTOR_ADDED_PREPRINT = 'provider_contributor_added_preprint' + PROVIDER_CONFIRM_EMAIL_MODERATION = 'provider_confirm_email_moderation' + PROVIDER_MODERATOR_ADDED = 'provider_moderator_added' + PROVIDER_CONFIRM_EMAIL_PREPRINTS = 'provider_confirm_email_preprints' + PROVIDER_USER_INVITE_PREPRINT = 'provider_user_invite_preprint' + + # Preprint notifications + PREPRINT_REQUEST_WITHDRAWAL_APPROVED = 'preprint_request_withdrawal_approved' + PREPRINT_REQUEST_WITHDRAWAL_DECLINED = 'preprint_request_withdrawal_declined' + PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'preprint_contributor_added_preprint_node_from_osf' + + # Collections Submission notifications + NEW_PENDING_SUBMISSIONS = 'new_pending_submissions' + COLLECTION_SUBMISSION_REMOVED_ADMIN = 'collection_submission_removed_admin' + COLLECTION_SUBMISSION_REMOVED_MODERATOR = 'collection_submission_removed_moderator' + COLLECTION_SUBMISSION_REMOVED_PRIVATE = 'collection_submission_removed_private' + COLLECTION_SUBMISSION_SUBMITTED = 'collection_submission_submitted' + COLLECTION_SUBMISSION_ACCEPTED = 'collection_submission_accepted' + COLLECTION_SUBMISSION_REJECTED = 'collection_submission_rejected' + COLLECTION_SUBMISSION_CANCEL = 'collection_submission_cancel' + + # Schema Response notifications + SCHEMA_RESPONSE_REJECTED = 'schema_response_rejected' + SCHEMA_RESPONSE_APPROVED = 'schema_response_approved' + SCHEMA_RESPONSE_SUBMITTED = 'schema_response_submitted' + SCHEMA_RESPONSE_INITIATED = 'schema_response_initiated' + + REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES = 'registration_bulk_upload_failure_duplicates' + + @classmethod + def user_types(cls): + return [member for member in cls if member.name.startswith('USER_')] + + @classmethod + def node_types(cls): + return [member for member in cls if member.name.startswith('NODE_')] + + @classmethod + def preprint_types(cls): + return [member for member in cls if member.name.startswith('PREPRINT_')] + + @classmethod + def provider_types(cls): + return [member for member in cls if member.name.startswith('PROVIDER_')] + + @classmethod + def schema_response_types(cls): + return [member for member in cls if member.name.startswith('SCHEMA_RESPONSE_')] + + @classmethod + def desk_types(cls): + return [member for member in cls if member.name.startswith('DESK_')] + + name: str = models.CharField(max_length=255, unique=True) + notification_freq: str = models.CharField( + max_length=32, + choices=FrequencyChoices.choices(), + default=FrequencyChoices.INSTANTLY.value, + ) + + object_content_type = models.ForeignKey( + ContentType, + on_delete=models.SET_NULL, + null=True, + blank=True, + help_text='Content type for subscribed objects. Null means global event.' + ) + + template: str = models.TextField( + help_text='Template used to render the event_info. Supports Django template syntax.' + ) + + def clean(self): + try: + Template(self.template) + except TemplateSyntaxError as exc: + raise ValidationError({'template': f'Invalid template: {exc}'}) + + def emit(self, user, subscribed_object=None, event_context=None): + """Emit a notification to a user by creating Notification and NotificationSubscription objects. + + Args: + user (OSFUser): The recipient of the notification. + subscribed_object (optional): The object the subscription is related to. + event_context (dict, optional): Context for rendering the notification template. + """ + subscription, created = NotificationSubscription.objects.get_or_create( + notification_type=self, + user=user, + content_type=ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, + object_id=subscribed_object.pk if subscribed_object else None, + defaults={'message_frequency': self.notification_freq}, + ) + if subscription.message_frequency == 'instantly': + Notification.objects.create( + subscription=subscription, + event_context=event_context + ).send() + + def add_user_to_subscription(self, user, *args, **kwargs): + """ + """ + provider = kwargs.pop('provider', None) + node = kwargs.pop('node', None) + data = {} + if subscribed_object := provider or node: + data = { + 'object_id': subscribed_object.id, + 'content_type_id': ContentType.objects.get_for_model(subscribed_object).id, + } + + notification, created = NotificationSubscription.objects.get_or_create( + user=user, + notification_type=self, + **data, + ) + return notification + + def remove_user_from_subscription(self, user): + """ + """ + notification, _ = NotificationSubscription.objects.update_or_create( + user=user, + notification_type=self, + defaults={'message_frequency': FrequencyChoices.NONE.value} + ) + + def __str__(self) -> str: + return self.name + + class Meta: + verbose_name = 'Notification Type' + verbose_name_plural = 'Notification Types' + + +class NotificationSubscription(BaseModel): + notification_type: NotificationType = models.ForeignKey( + NotificationType, + on_delete=models.CASCADE, + null=False + ) + user = models.ForeignKey('osf.OSFUser', on_delete=models.CASCADE, related_name='subscriptions') + message_frequency: str = models.CharField(max_length=32) + + content_type = models.ForeignKey(ContentType, null=True, blank=True, on_delete=models.CASCADE) + object_id = models.CharField(max_length=255, null=True, blank=True) + subscribed_object = GenericForeignKey('content_type', 'object_id') + + def clean(self): + ct = self.notification_type.object_content_type + + if ct: + if self.content_type != ct: + raise ValidationError('Subscribed object must match type\'s content_type.') + if not self.object_id: + raise ValidationError('Subscribed object ID is required.') + else: + if self.content_type or self.object_id: + raise ValidationError('Global subscriptions must not have an object.') + + if self.message_frequency not in self.notification_type.notification_freq: + raise ValidationError(f'{self.message_frequency!r} is not allowed for {self.notification_type.name!r}.') + + def __str__(self) -> str: + return f'{self.user} subscribes to {self.notification_type.name} ({self.message_frequency})' + + class Meta: + verbose_name = 'Notification Subscription' + verbose_name_plural = 'Notification Subscriptions' + + def emit(self, user, subscribed_object=None, event_context=None): + """Emit a notification to a user by creating Notification and NotificationSubscription objects. + + Args: + user (OSFUser): The recipient of the notification. + subscribed_object (optional): The object the subscription is related to. + event_context (dict, optional): Context for rendering the notification template. + """ + if self.message_frequency == 'instantly': + Notification.objects.create( + subscription=self, + event_context=event_context + ).send() + else: + Notification.objects.create( + subscription=self, + event_context=event_context + ) + +class Notification(models.Model): + subscription = models.ForeignKey( + NotificationSubscription, + on_delete=models.CASCADE, + related_name='notifications' + ) + event_context: dict = models.JSONField() + sent = models.DateTimeField(null=True, blank=True) + seen = models.DateTimeField(null=True, blank=True) + created = models.DateTimeField(auto_now_add=True) + + def send(self, protocol_type='email', recipient=None): + if not protocol_type == 'email': + raise NotImplementedError(f'Protocol type {protocol_type}. Email notifications are only implemented.') + + recipient_address = getattr(recipient, 'username', None) or self.subscription.user + + if protocol_type == 'email' and settings.DEV_MODE and settings.ENABLE_TEST_EMAIL: + notifications.send_email_over_smtp( + recipient_address, + self.subscription.notification_type, + self.event_context + ) + elif protocol_type == 'email' and settings.DEV_MODE: + if not api_settings.CI_ENV: + logging.info( + f"Attempting to send email in DEV_MODE with ENABLE_TEST_EMAIL false just logs:" + f"\nto={recipient_address}" + f"\ntype={self.subscription.notification_type.name}" + f"\ncontext={self.event_context}" + ) + elif protocol_type == 'email': + notifications.send_email_with_send_grid( + getattr(recipient, 'username', None) or self.subscription.user, + self.subscription.notification_type, + self.event_context + ) + else: + raise NotImplementedError(f'protocol `{protocol_type}` is not supported.') + + self.mark_sent() + + def mark_sent(self) -> None: + self.sent = timezone.now() + self.save(update_fields=['sent']) + + def mark_seen(self) -> None: + self.seen = timezone.now() + self.save(update_fields=['seen']) + + def __str__(self) -> str: + return f'Notification for {self.subscription.user} [{self.subscription.notification_type.name}]' + + class Meta: + verbose_name = 'Notification' + verbose_name_plural = 'Notifications' diff --git a/osf/models/notifications.py b/osf/models/notifications.py index 86be3424832..178cede19df 100644 --- a/osf/models/notifications.py +++ b/osf/models/notifications.py @@ -5,11 +5,10 @@ from .base import BaseModel, ObjectIDMixin from .validators import validate_subscription_type from osf.utils.fields import NonNaiveDateTimeField -from website.notifications.constants import NOTIFICATION_TYPES from website.util import api_v2_url -class NotificationSubscription(BaseModel): +class NotificationSubscriptionLegacy(BaseModel): primary_identifier_name = '_id' _id = models.CharField(max_length=100, db_index=True, unique=False) # pxyz_wiki_updated, uabc_comment_replies @@ -29,6 +28,7 @@ class NotificationSubscription(BaseModel): class Meta: # Both PreprintProvider and RegistrationProvider default instances use "osf" as their `_id` unique_together = ('_id', 'provider') + db_table = 'osf_notificationsubscription_legacy' @classmethod def load(cls, q): @@ -59,43 +59,6 @@ def absolute_api_v2_url(self): path = f'/subscriptions/{self._id}/' return api_v2_url(path) - def add_user_to_subscription(self, user, notification_type, save=True): - for nt in NOTIFICATION_TYPES: - if getattr(self, nt).filter(id=user.id).exists(): - if nt != notification_type: - getattr(self, nt).remove(user) - else: - if nt == notification_type: - getattr(self, nt).add(user) - - if notification_type != 'none' and isinstance(self.owner, Node) and self.owner.parent_node: - user_subs = self.owner.parent_node.child_node_subscriptions - if self.owner._id not in user_subs.setdefault(user._id, []): - user_subs[user._id].append(self.owner._id) - self.owner.parent_node.save() - - if save: - # Do not clean legacy objects - self.save(clean=False) - - def remove_user_from_subscription(self, user, save=True): - for notification_type in NOTIFICATION_TYPES: - try: - getattr(self, notification_type, []).remove(user) - except ValueError: - pass - - if isinstance(self.owner, Node) and self.owner.parent_node: - try: - self.owner.parent_node.child_node_subscriptions.get(user._id, []).remove(self.owner._id) - self.owner.parent_node.save() - except ValueError: - pass - - if save: - self.save() - - class NotificationDigest(ObjectIDMixin, BaseModel): user = models.ForeignKey('OSFUser', null=True, blank=True, on_delete=models.CASCADE) provider = models.ForeignKey('AbstractProvider', null=True, blank=True, on_delete=models.CASCADE) diff --git a/osf/models/preprint.py b/osf/models/preprint.py index 162ab8b00a8..7efaeeecbf5 100644 --- a/osf/models/preprint.py +++ b/osf/models/preprint.py @@ -1,11 +1,12 @@ import functools +import inspect from urllib.parse import urljoin import logging import re from dirtyfields import DirtyFieldsMixin from django.db import models, IntegrityError -from django.db.models import Q +from django.db.models import Q, Max from django.utils import timezone from django.contrib.contenttypes.fields import GenericRelation from django.core.exceptions import ValidationError @@ -19,6 +20,7 @@ from framework.auth import Auth from framework.exceptions import PermissionsError, UnpublishedPendingPreprintVersionExists from framework.auth import oauth_scopes +from osf.models.notification import NotificationType from .subject import Subject from .tag import Tag @@ -33,14 +35,12 @@ from osf.utils import sanitize from osf.utils.permissions import ADMIN, WRITE from osf.utils.requests import get_request_and_user_id, string_type_request_headers -from website.notifications.emails import get_user_subscriptions -from website.notifications import utils from website.identifiers.clients import CrossRefClient, ECSArXivCrossRefClient from website.project.licenses import set_license from website.util import api_v2_url, api_url_for, web_url_for from website.util.metrics import provider_source_tag from website.citations.utils import datetime_to_csl -from website import settings, mails +from website import settings from website.preprints.tasks import update_or_enqueue_on_preprint_updated from .base import BaseModel, Guid, GuidVersionsThrough, GuidMixinQuerySet, VersionedGuidMixin @@ -146,6 +146,35 @@ class EverPublishedPreprintManager(PreprintManager): def get_queryset(self): return super().get_queryset().filter(date_published__isnull=False) + +def require_permission(permissions: list): + """ + Preprint-specific decorator for permission checks. + + This decorator adds an implicit `ignore_permission` argument to the decorated function, + allowing you to bypass the permission check when set to `True`. + + Usage example: + preprint.some_method(..., ignore_permission=True) # Skips permission check + """ + def decorator(func): + @functools.wraps(func) + def wrapper(self, *args, ignore_permission=False, **kwargs): + sig = inspect.signature(func) + bound_args = sig.bind_partial(self, *args, **kwargs) + bound_args.apply_defaults() + + auth = bound_args.arguments.get('auth', None) + + if not ignore_permission and auth is not None: + for permission in permissions: + if not self.has_permission(auth.user, permission): + raise PermissionsError(f'Must have following permissions to change a preprint: {permissions}') + return func(self, *args, ignore_permission=ignore_permission, **kwargs) + return wrapper + return decorator + + class Preprint(DirtyFieldsMixin, VersionedGuidMixin, IdentifierMixin, ReviewableMixin, BaseModel, TitleMixin, DescriptionMixin, Loggable, Taggable, ContributorMixin, GuardianMixin, SpamOverrideMixin, TaxonomizableMixin, AffiliatedInstitutionMixin): @@ -374,12 +403,14 @@ def check_unfinished_or_unpublished_version(self): return None, None @classmethod - def create_version(cls, create_from_guid, auth): + def create_version(cls, create_from_guid, auth, assign_version_number=None, ignore_permission=False, ignore_existing_versions=False): """Create a new version for a given preprint. `create_from_guid` can be any existing versions of the preprint but `create_version` always finds the latest version and creates a new version from it. In addition, this creates an "incomplete" new preprint version object using the model class and returns both the new object and the data to be updated. The API, more specifically `PreprintCreateVersionSerializer` must call `.update()` to "completely finish" the new preprint version object creation. + Optionally, you can assign a custom version number, as long as it doesn't conflict with existing versions. + The version must be an integer greater than 0. """ # Use `Guid.load()` instead of `VersionedGuid.load()` to retrieve the base guid obj, which always points to the @@ -389,26 +420,25 @@ def create_version(cls, create_from_guid, auth): if not latest_version: sentry.log_message(f'Preprint not found: [guid={guid_obj._id}, create_from_guid={create_from_guid}]') return None, None - if not latest_version.has_permission(auth.user, ADMIN): + if not ignore_permission and not latest_version.has_permission(auth.user, ADMIN): sentry.log_message(f'ADMIN permission for the latest version is required to create a new version: ' f'[user={auth.user._id}, guid={guid_obj._id}, latest_version={latest_version._id}]') raise PermissionsError - unfinished_version, unpublished_version = latest_version.check_unfinished_or_unpublished_version() - if unpublished_version: - logger.error('Failed to create a new version due to unpublished pending version already exists: ' - f'[version={unpublished_version.version}, ' - f'_id={unpublished_version._id}, ' - f'state={unpublished_version.machine_state}].') - raise UnpublishedPendingPreprintVersionExists - if unfinished_version: - logger.warning(f'Use existing initiated but unfinished version instead of creating a new one: ' - f'[version={unfinished_version.version}, ' - f'_id={unfinished_version._id}, ' - f'state={unfinished_version.machine_state}].') - return unfinished_version, None - - # Note: version number bumps from the last version number instead of the latest version number - last_version_number = guid_obj.versions.order_by('-version').first().version + if not ignore_existing_versions: + unfinished_version, unpublished_version = latest_version.check_unfinished_or_unpublished_version() + if unpublished_version: + message = ('Failed to create a new version due to unpublished pending version already exists: ' + f'[version={unpublished_version.version}, ' + f'_id={unpublished_version._id}, ' + f'state={unpublished_version.machine_state}].') + logger.error(message) + raise UnpublishedPendingPreprintVersionExists(message) + if unfinished_version: + logger.warning(f'Use existing initiated but unfinished version instead of creating a new one: ' + f'[version={unfinished_version.version}, ' + f'_id={unfinished_version._id}, ' + f'state={unfinished_version.machine_state}].') + return unfinished_version, None # Prepare the data to clone/update data_to_update = { @@ -438,13 +468,30 @@ def create_version(cls, create_from_guid, auth): description=latest_version.description, ) preprint.save(guid_ready=False) + + # Note: version number bumps from the last version number instead of the latest version number + # if assign_version_number is not specified + if assign_version_number: + if not isinstance(assign_version_number, int) or assign_version_number <= 0: + raise ValueError( + f"Unable to assign: {assign_version_number}. " + 'Version must be integer greater than 0.' + ) + if GuidVersionsThrough.objects.filter(guid=guid_obj, version=assign_version_number).first(): + raise ValueError(f"Version {assign_version_number} for preprint {guid_obj} already exists.") + + version_number = assign_version_number + else: + last_version_number = guid_obj.versions.order_by('-version').first().version + version_number = last_version_number + 1 + # Create a new entry in the `GuidVersionsThrough` table to store version information, which must happen right # after the first `.save()` of the new preprint version object, which enables `preprint._id` to be computed. guid_version = GuidVersionsThrough( referent=preprint, object_id=guid_obj.object_id, content_type=guid_obj.content_type, - version=last_version_number + 1, + version=version_number, guid=guid_obj ) guid_version.save() @@ -463,22 +510,51 @@ def create_version(cls, create_from_guid, auth): sentry.log_exception(e) sentry.log_message(f'Contributor was not added to new preprint version due to error: ' f'[preprint={preprint._id}, user={contributor.user._id}]') + + # Add new version record for unregistered contributors + for contributor in preprint.contributor_set.filter(Q(user__is_registered=False) | Q(user__date_disabled__isnull=False)): + try: + contributor.user.add_unclaimed_record( + claim_origin=preprint, + referrer=auth.user, + email=contributor.user.email, + given_name=contributor.user.fullname, + ) + except ValidationError as e: + sentry.log_exception(e) + sentry.log_message(f'Unregistered contributor was not added to new preprint version due to error: ' + f'[preprint={preprint._id}, user={contributor.user._id}]') + # Add affiliated institutions for institution in latest_version.affiliated_institutions.all(): preprint.add_affiliated_institution(institution, auth.user, ignore_user_affiliation=True) - # Update Guid obj to point to the new version if there is no moderation - if not preprint.provider.reviews_workflow: + # Update Guid obj to point to the new version if there is no moderation and new version is bigger + if not preprint.provider.reviews_workflow and version_number > guid_obj.referent.version: guid_obj.referent = preprint guid_obj.object_id = preprint.pk guid_obj.content_type = ContentType.objects.get_for_model(preprint) guid_obj.save() if latest_version.node: - preprint.set_supplemental_node(latest_version.node, auth, save=False, ignore_node_permissions=True) + preprint.set_supplemental_node( + latest_version.node, + auth, + save=False, + ignore_node_permissions=True, + ignore_permission=ignore_permission + ) return preprint, data_to_update + def upgrade_version(self): + """Increase preprint version by one.""" + guid_version = GuidVersionsThrough.objects.get(object_id=self.id) + guid_version.version += 1 + guid_version.save() + + return self + @property def is_deleted(self): return bool(self.deleted) @@ -503,8 +579,8 @@ def osfstorage_region(self): return self.region @property - def contributor_email_template(self): - return 'preprint' + def contributor_notification_type(self): + return NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT @property def file_read_scope(self): @@ -692,9 +768,14 @@ def is_latest_version(self): def get_preprint_versions(self, include_rejected=True): guids = self.versioned_guids.first().guid.versions.all() - preprint_versions = Preprint.objects.filter(id__in=[vg.object_id for vg in guids]).order_by('-id') + preprint_versions = ( + Preprint.objects + .filter(id__in=[vg.object_id for vg in guids]) + .annotate(latest_version=Max('versioned_guids__version')) + .order_by('-latest_version') + ) if include_rejected is False: - preprint_versions = [p for p in preprint_versions if p.machine_state != 'rejected'] + preprint_versions = preprint_versions.exclude(machine_state=DefaultStates.REJECTED.value) return preprint_versions def web_url_for(self, view_name, _absolute=False, _guid=False, *args, **kwargs): @@ -753,13 +834,11 @@ def add_subjects_log(self, old_subjects, auth): ) return - def set_primary_file(self, preprint_file, auth, save=False): + @require_permission([WRITE]) + def set_primary_file(self, preprint_file, auth, save=False, **kwargs): if not self.root_folder: raise PreprintStateError('Preprint needs a root folder.') - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have admin or write permissions to change a preprint\'s primary file.') - if preprint_file.target != self or preprint_file.provider != 'osfstorage': raise ValueError('This file is not a valid primary file for this preprint.') @@ -785,10 +864,8 @@ def set_primary_file(self, preprint_file, auth, save=False): self.save() update_or_enqueue_on_preprint_updated(preprint_id=self._id, saved_fields=['primary_file']) - def set_published(self, published, auth, save=False, ignore_permission=False): - if not ignore_permission and not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Only admins can publish a preprint.') - + @require_permission([ADMIN]) + def set_published(self, published, auth, save=False, **kwargs): if self.is_published and not published: raise ValueError('Cannot unpublish preprint.') @@ -805,7 +882,7 @@ def set_published(self, published, auth, save=False, ignore_permission=False): raise ValueError('Preprint must have at least one subject to be published.') self.date_published = timezone.now() # For legacy preprints, not logging - self.set_privacy('public', log=False, save=False) + self.set_privacy('public', log=False, save=False, **kwargs) # In case this provider is ever set up to use a reviews workflow, put this preprint in a sensible state self.machine_state = ReviewStates.ACCEPTED.value @@ -827,8 +904,9 @@ def set_published(self, published, auth, save=False, ignore_permission=False): if save: self.save() - def set_preprint_license(self, license_detail, auth, save=False): - license_record, license_changed = set_license(self, license_detail, auth, node_type='preprint') + @require_permission([WRITE]) + def set_preprint_license(self, license_detail, auth, save=False, **kwargs): + license_record, license_changed = set_license(self, license_detail, auth, node_type='preprint', **kwargs) if license_changed: self.add_log( @@ -943,34 +1021,29 @@ def _add_creator_as_contributor(self): def _send_preprint_confirmation(self, auth): # Send creator confirmation email recipient = self.creator - event_type = utils.find_subscription_type('global_reviews') - user_subscriptions = get_user_subscriptions(recipient, event_type) + from osf.models import NotificationSubscription, NotificationType if self.provider._id == 'osf': logo = settings.OSF_PREPRINTS_LOGO else: logo = self.provider._id - context = { - 'domain': settings.DOMAIN, - 'reviewable': self, - 'workflow': self.provider.reviews_workflow, - 'provider_url': '{domain}preprints/{provider_id}'.format( - domain=self.provider.domain or settings.DOMAIN, - provider_id=self.provider._id if not self.provider.domain else '').strip('/'), - 'provider_contact_email': self.provider.email_contact or settings.OSF_CONTACT_EMAIL, - 'provider_support_email': self.provider.email_support or settings.OSF_SUPPORT_EMAIL, - 'no_future_emails': user_subscriptions['none'], - 'is_creator': True, - 'provider_name': 'OSF Preprints' if self.provider.name == 'Open Science Framework' else self.provider.name, - 'logo': logo, - 'document_type': self.provider.preprint_word - } - - mails.send_mail( - recipient.username, - mails.REVIEWS_SUBMISSION_CONFIRMATION, + NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION, + ).emit( user=recipient, - **context + event_context={ + 'domain': settings.DOMAIN, + 'workflow': self.provider.reviews_workflow, + 'provider_url': f"{self.provider.domain or settings.DOMAIN}preprints/{(self.provider._id if not self.provider.domain else '').strip('/')}", + 'provider_contact_email': self.provider.email_contact or settings.OSF_CONTACT_EMAIL, + 'provider_support_email': self.provider.email_support or settings.OSF_SUPPORT_EMAIL, + 'no_future_emails': NotificationSubscription.objects.filter(notification_type__name='reviews', user=recipient, message_frequency='none').exists(), + 'is_creator': True, + 'provider_name': 'OSF Preprints' if self.provider.name == 'Open Science Framework' else self.provider.name, + 'logo': logo, + 'document_type': self.provider.preprint_word + }, + subscribed_object=self.provider, ) # FOLLOWING BEHAVIOR NOT SPECIFIC TO PREPRINTS @@ -1027,10 +1100,8 @@ def remove_tag(self, tag, auth, save=True): update_or_enqueue_on_preprint_updated(preprint_id=self._id, saved_fields=['tags']) return True - def set_supplemental_node(self, node, auth, save=False, ignore_node_permissions=False): - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('You must have write permissions to set a supplemental node.') - + @require_permission([WRITE]) + def set_supplemental_node(self, node, auth, save=False, ignore_node_permissions=False, **kwargs): if not node.has_permission(auth.user, WRITE) and not ignore_node_permissions: raise PermissionsError('You must have write permissions on the supplemental node to attach.') @@ -1052,10 +1123,8 @@ def set_supplemental_node(self, node, auth, save=False, ignore_node_permissions= if save: self.save() - def unset_supplemental_node(self, auth, save=False): - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('You must have write permissions to set a supplemental node.') - + @require_permission([WRITE]) + def unset_supplemental_node(self, auth, save=False, **kwargs): current_node_id = self.node._id if self.node else None self.node = None @@ -1072,27 +1141,23 @@ def unset_supplemental_node(self, auth, save=False): if save: self.save() - def set_title(self, title, auth, save=False): + @require_permission([WRITE]) + def set_title(self, title, auth, save=False, **kwargs): """Set the title of this Preprint and log it. :param str title: The new title. :param auth: All the auth information including user, API key. """ - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have admin or write permissions to edit a preprint\'s title.') - return super().set_title(title, auth, save) - def set_description(self, description, auth, save=False): + @require_permission([WRITE]) + def set_description(self, description, auth, save=False, **kwargs): """Set the description and log the event. :param str description: The new description :param auth: All the auth informtion including user, API key. :param bool save: Save self after updating. """ - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have admin or write permissions to edit a preprint\'s title.') - return super().set_description(description, auth, save) def get_spam_fields(self, saved_fields=None): @@ -1100,7 +1165,8 @@ def get_spam_fields(self, saved_fields=None): return self.SPAM_CHECK_FIELDS return self.SPAM_CHECK_FIELDS.intersection(saved_fields) - def set_privacy(self, permissions, auth=None, log=True, save=True, check_addons=False, force=False, should_hide=False): + @require_permission([WRITE]) + def set_privacy(self, permissions, auth=None, log=True, save=True, check_addons=False, force=False, should_hide=False, **kwargs): """Set the permissions for this preprint - mainly for spam purposes. :param permissions: A string, either 'public' or 'private' @@ -1109,8 +1175,6 @@ def set_privacy(self, permissions, auth=None, log=True, save=True, check_addons= :param bool meeting_creation: Whether this was created due to a meetings email. :param bool check_addons: Check and collect messages for addons? """ - if auth and not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have admin or write permissions to change privacy settings.') if permissions == 'public' and not self.is_public: if (self.is_spam or (settings.SPAM_FLAGGED_MAKE_NODE_PRIVATE and self.is_spammy)) and not force: raise PreprintStateError( @@ -1176,7 +1240,8 @@ def set_contributor_order(self, contributor_ids): @classmethod def bulk_update_search(cls, preprints, index=None): for _preprint in preprints: - update_share(_preprint) + if _preprint.is_latest_version: + update_share(_preprint) from website import search try: serialize = functools.partial(search.search.update_preprint, index=index, bulk=True, async_update=False) @@ -1254,7 +1319,8 @@ def _add_related_source_tags(self, contributor): system_tag_to_add, created = Tag.all_tags.get_or_create(name=provider_source_tag(self.provider._id, 'preprint'), system=True) contributor.add_system_tag(system_tag_to_add) - def update_has_coi(self, auth: Auth, has_coi: bool, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_has_coi(self, auth: Auth, has_coi: bool, log: bool = True, save: bool = True, **kwargs): """ This method sets the field `has_coi` to indicate if there's a conflict interest statement for this preprint and logs that change. @@ -1286,7 +1352,8 @@ def update_has_coi(self, auth: Auth, has_coi: bool, log: bool = True, save: bool if save: self.save() - def update_conflict_of_interest_statement(self, auth: Auth, coi_statement: str, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_conflict_of_interest_statement(self, auth: Auth, coi_statement: str, log: bool = True, save: bool = True, **kwargs): """ This method sets the `conflict_of_interest_statement` field for this preprint and logs that change. @@ -1315,7 +1382,8 @@ def update_conflict_of_interest_statement(self, auth: Auth, coi_statement: str, if save: self.save() - def update_has_data_links(self, auth: Auth, has_data_links: bool, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_has_data_links(self, auth: Auth, has_data_links: bool, log: bool = True, save: bool = True, **kwargs): """ This method sets the `has_data_links` field that respresent the availability of links to supplementary data for this preprint and logs that change. @@ -1346,11 +1414,12 @@ def update_has_data_links(self, auth: Auth, has_data_links: bool, log: bool = Tr auth=auth ) if not has_data_links: - self.update_data_links(auth, data_links=[], log=False) + self.update_data_links(auth, data_links=[], log=False, **kwargs) if save: self.save() - def update_data_links(self, auth: Auth, data_links: list, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_data_links(self, auth: Auth, data_links: list, log: bool = True, save: bool = True, **kwargs): """ This method sets the field `data_links` which is a validated list of links to supplementary data for a preprint and logs that change. @@ -1382,7 +1451,8 @@ def update_data_links(self, auth: Auth, data_links: list, log: bool = True, save if save: self.save() - def update_why_no_data(self, auth: Auth, why_no_data: str, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_why_no_data(self, auth: Auth, why_no_data: str, log: bool = True, save: bool = True, **kwargs): """ This method sets the field `why_no_data` a string that represents a user provided explanation for the unavailability of supplementary data for their preprint. @@ -1414,7 +1484,8 @@ def update_why_no_data(self, auth: Auth, why_no_data: str, log: bool = True, sav if save: self.save() - def update_has_prereg_links(self, auth: Auth, has_prereg_links: bool, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_has_prereg_links(self, auth: Auth, has_prereg_links: bool, log: bool = True, save: bool = True, **kwargs): """ This method updates the `has_prereg_links` field, that indicates availability of links to prereg data and logs changes to it. @@ -1446,12 +1517,13 @@ def update_has_prereg_links(self, auth: Auth, has_prereg_links: bool, log: bool auth=auth ) if not has_prereg_links: - self.update_prereg_links(auth, prereg_links=[], log=False) - self.update_prereg_link_info(auth, prereg_link_info=None, log=False) + self.update_prereg_links(auth, prereg_links=[], log=False, **kwargs) + self.update_prereg_link_info(auth, prereg_link_info=None, log=False, **kwargs) if save: self.save() - def update_why_no_prereg(self, auth: Auth, why_no_prereg: str, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_why_no_prereg(self, auth: Auth, why_no_prereg: str, log: bool = True, save: bool = True, **kwargs): """ This method updates the field `why_no_prereg` that contains a user provided explanation of prereg data unavailability and logs changes to it. @@ -1483,7 +1555,8 @@ def update_why_no_prereg(self, auth: Auth, why_no_prereg: str, log: bool = True, if save: self.save() - def update_prereg_links(self, auth: Auth, prereg_links: list, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_prereg_links(self, auth: Auth, prereg_links: list, log: bool = True, save: bool = True, **kwargs): """ This method updates the field `prereg_links` that contains a list of validated URLS linking to prereg data and logs changes to it. @@ -1515,7 +1588,8 @@ def update_prereg_links(self, auth: Auth, prereg_links: list, log: bool = True, if save: self.save() - def update_prereg_link_info(self, auth: Auth, prereg_link_info: str, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_prereg_link_info(self, auth: Auth, prereg_link_info: str, log: bool = True, save: bool = True, **kwargs): """ This method updates the field `prereg_link_info` that contains a one of a finite number of choice strings in contained in the list in the static member `PREREG_LINK_INFO_CHOICES` that describe the nature of the preprint's diff --git a/osf/models/provider.py b/osf/models/provider.py index 2ee920a77e5..761a9e5de3f 100644 --- a/osf/models/provider.py +++ b/osf/models/provider.py @@ -19,7 +19,6 @@ from .brand import Brand from .citation import CitationStyle from .licenses import NodeLicense -from .notifications import NotificationSubscription from .storage import ProviderAssetFile from .subject import Subject from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField @@ -458,19 +457,6 @@ def create_provider_auth_groups(sender, instance, created, **kwargs): instance.update_group_permissions() -@receiver(post_save, sender=CollectionProvider) -@receiver(post_save, sender=PreprintProvider) -@receiver(post_save, sender=RegistrationProvider) -def create_provider_notification_subscriptions(sender, instance, created, **kwargs): - if created: - for subscription in instance.DEFAULT_SUBSCRIPTIONS: - NotificationSubscription.objects.get_or_create( - _id=f'{instance._id}_{subscription}', - event_name=subscription, - provider=instance - ) - - @receiver(post_save, sender=CollectionProvider) def create_primary_collection_for_provider(sender, instance, created, **kwargs): if created: diff --git a/osf/models/queued_mail.py b/osf/models/queued_mail.py deleted file mode 100644 index 844465d5193..00000000000 --- a/osf/models/queued_mail.py +++ /dev/null @@ -1,162 +0,0 @@ -import waffle - -from django.db import models -from django.utils import timezone - -from osf.utils.fields import NonNaiveDateTimeField -from website.mails import Mail, send_mail -from website.mails import presends -from website import settings as osf_settings - -from osf import features -from .base import BaseModel, ObjectIDMixin -from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField - - -class QueuedMail(ObjectIDMixin, BaseModel): - user = models.ForeignKey('OSFUser', db_index=True, null=True, on_delete=models.CASCADE) - to_addr = models.CharField(max_length=255) - send_at = NonNaiveDateTimeField(db_index=True, null=False) - - # string denoting the template, presend to be used. Has to be an index of queue_mail types - email_type = models.CharField(max_length=255, db_index=True, null=False) - - # dictionary with variables used to populate mako template and store information used in presends - # Example: - # self.data = { - # 'nid' : 'ShIpTo', - # 'fullname': 'Florence Welch', - #} - data = DateTimeAwareJSONField(default=dict, blank=True) - sent_at = NonNaiveDateTimeField(db_index=True, null=True, blank=True) - - def __repr__(self): - if self.sent_at is not None: - return ''.format( - self._id, self.email_type, self.to_addr, self.sent_at - ) - return ''.format( - self._id, self.email_type, self.to_addr, self.send_at - ) - - def send_mail(self): - """ - Grabs the data from this email, checks for user subscription to help mails, - - constructs the mail object and checks presend. Then attempts to send the email - through send_mail() - :return: boolean based on whether email was sent. - """ - mail_struct = queue_mail_types[self.email_type] - presend = mail_struct['presend'](self) - mail = Mail( - mail_struct['template'], - subject=mail_struct['subject'], - categories=mail_struct.get('categories', None) - ) - self.data['osf_url'] = osf_settings.DOMAIN - if presend and self.user.is_active and self.user.osf_mailing_lists.get(osf_settings.OSF_HELP_LIST): - send_mail(self.to_addr or self.user.username, mail, **(self.data or {})) - self.sent_at = timezone.now() - self.save() - return True - else: - self.__class__.delete(self) - return False - - def find_sent_of_same_type_and_user(self): - """ - Queries up for all emails of the same type as self, sent to the same user as self. - Does not look for queue-up emails. - :return: a list of those emails - """ - return self.__class__.objects.filter(email_type=self.email_type, user=self.user).exclude(sent_at=None) - - -def queue_mail(to_addr, mail, send_at, user, **context): - """ - Queue an email to be sent using send_mail after a specified amount - of time and if the presend returns True. The presend is attached to - the template under mail. - - :param to_addr: the address email is to be sent to - :param mail: the type of mail. Struct following template: - { 'presend': function(), - 'template': mako template name, - 'subject': mail subject } - :param send_at: datetime object of when to send mail - :param user: user object attached to mail - :param context: IMPORTANT kwargs to be attached to template. - Sending mail will fail if needed for template kwargs are - not parameters. - :return: the QueuedMail object created - """ - if waffle.switch_is_active(features.DISABLE_ENGAGEMENT_EMAILS) and mail.get('engagement', False): - return False - new_mail = QueuedMail( - user=user, - to_addr=to_addr, - send_at=send_at, - email_type=mail['template'], - data=context - ) - new_mail.save() - return new_mail - - -# Predefined email templates. Structure: -#EMAIL_TYPE = { -# 'template': the mako template used for email_type, -# 'subject': subject used for the actual email, -# 'categories': categories to attach to the email using Sendgrid's SMTPAPI. -# 'engagement': Whether this is an engagement email that can be disabled with the disable_engagement_emails waffle flag -# 'presend': predicate function that determines whether an email should be sent. May also -# modify mail.data. -#} - -NO_ADDON = { - 'template': 'no_addon', - 'subject': 'Link an add-on to your OSF project', - 'presend': presends.no_addon, - 'categories': ['engagement', 'engagement-no-addon'], - 'engagement': True -} - -NO_LOGIN = { - 'template': 'no_login', - 'subject': 'What you\'re missing on the OSF', - 'presend': presends.no_login, - 'categories': ['engagement', 'engagement-no-login'], - 'engagement': True -} - -NEW_PUBLIC_PROJECT = { - 'template': 'new_public_project', - 'subject': 'Now, public. Next, impact.', - 'presend': presends.new_public_project, - 'categories': ['engagement', 'engagement-new-public-project'], - 'engagement': True -} - - -WELCOME_OSF4M = { - 'template': 'welcome_osf4m', - 'subject': 'The benefits of sharing your presentation', - 'presend': presends.welcome_osf4m, - 'categories': ['engagement', 'engagement-welcome-osf4m'], - 'engagement': True -} - -NO_ADDON_TYPE = 'no_addon' -NO_LOGIN_TYPE = 'no_login' -NEW_PUBLIC_PROJECT_TYPE = 'new_public_project' -WELCOME_OSF4M_TYPE = 'welcome_osf4m' - - -# Used to keep relationship from stored string 'email_type' to the predefined queued_email objects. -queue_mail_types = { - NO_ADDON_TYPE: NO_ADDON, - NO_LOGIN_TYPE: NO_LOGIN, - NEW_PUBLIC_PROJECT_TYPE: NEW_PUBLIC_PROJECT, - WELCOME_OSF4M_TYPE: WELCOME_OSF4M -} diff --git a/osf/models/registrations.py b/osf/models/registrations.py index f7b017d9ddf..b874cc8cd12 100644 --- a/osf/models/registrations.py +++ b/osf/models/registrations.py @@ -46,9 +46,9 @@ from .nodelog import NodeLog from .provider import RegistrationProvider from .tag import Tag +from .notification import NotificationType from .validators import validate_title from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField -from osf.utils import notifications as notify from osf.utils.workflows import ( RegistrationModerationStates, RegistrationModerationTriggers, @@ -325,6 +325,11 @@ def archiving(self): job = self.archive_job return job and not job.done and not job.archive_tree_finished() + @property + def archived(self): + job = self.archive_job + return job and job.done and job.archive_tree_finished() + @property def is_moderated(self): if not self.provider: @@ -754,23 +759,19 @@ def _write_registration_action(self, from_state, to_state, initiated_by, comment action.save() RegistriesModerationMetrics.record_transitions(action) - moderation_notifications = { - RegistrationModerationTriggers.SUBMIT: notify.notify_submit, - RegistrationModerationTriggers.ACCEPT_SUBMISSION: notify.notify_accept_reject, - RegistrationModerationTriggers.REJECT_SUBMISSION: notify.notify_accept_reject, - RegistrationModerationTriggers.REQUEST_WITHDRAWAL: notify.notify_moderator_registration_requests_withdrawal, - RegistrationModerationTriggers.REJECT_WITHDRAWAL: notify.notify_reject_withdraw_request, - RegistrationModerationTriggers.ACCEPT_WITHDRAWAL: notify.notify_withdraw_registration, - RegistrationModerationTriggers.FORCE_WITHDRAW: notify.notify_withdraw_registration, - } + notification_type = NotificationType.objects.get(name=trigger.db_name) - notification = moderation_notifications.get(trigger) - if notification: - notification( - resource=self, - user=initiated_by, - action=action, - states=RegistrationModerationStates + for user, _ in self.get_admin_contributors_recursive(unique_users=True): + notification_type.emit( + user=user, + subscribed_object=self, + event_context={ + 'registration': self._id, + 'from_state': from_state.db_name, + 'to_state': to_state.db_name, + 'comment': comment, + 'action_id': action.id, + } ) def add_tag(self, tag, auth=None, save=True, log=True, system=False): @@ -1177,9 +1178,8 @@ def visible_contributors(self): ).order_by(self.order_by_contributor_field) @property - def contributor_email_template(self): - # Override for ContributorMixin - return 'draft_registration' + def contributor_notification_type(self): + return NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION @property def institutions_url(self): @@ -1302,6 +1302,13 @@ def create_from_node(cls, user, schema, node=None, data=None, provider=None): email_template='draft_registration', permissions=initiator_permissions ) + from website.project.views.contributor import notify_added_contributor + notify_added_contributor( + draft, + contributor=user, + auth=None, + email_template='draft_registration', + ) return draft diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index b2a6431383c..dd189bca2ef 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -8,7 +8,6 @@ from framework.auth import Auth from framework.exceptions import PermissionsError from website import settings as osf_settings -from website import mails from osf.exceptions import ( InvalidSanctionRejectionToken, InvalidSanctionApprovalToken, @@ -20,6 +19,7 @@ from osf.utils import tokens from osf.utils.machines import ApprovalsMachine from osf.utils.workflows import ApprovalStates, SanctionTypes +from osf.models.notification import NotificationType VIEW_PROJECT_URL_TEMPLATE = osf_settings.DOMAIN + '{node_id}/' @@ -397,19 +397,30 @@ def _rejection_url(self, user_id): def _rejection_url_context(self, user_id): return None - def _send_approval_request_email(self, user, template, context): - mails.send_mail(user.username, template, user=user, can_change_preferences=False, **context) + def _send_approval_request_email(self, user, notification_type, context): + print(notification_type) + NotificationType.objects.get( + name=notification_type + ).emit( + user=user, + event_context=context + ) def _email_template_context(self, user, node, is_authorizer=False): return {} def _notify_authorizer(self, authorizer, node): - context = self._email_template_context(authorizer, - node, - is_authorizer=True) + context = self._email_template_context( + authorizer, + node, + is_authorizer=True + ) if self.AUTHORIZER_NOTIFY_EMAIL_TEMPLATE: self._send_approval_request_email( - authorizer, self.AUTHORIZER_NOTIFY_EMAIL_TEMPLATE, context) + authorizer, + self.AUTHORIZER_NOTIFY_EMAIL_TEMPLATE, + context + ) else: raise NotImplementedError() @@ -468,8 +479,8 @@ class Embargo(SanctionCallbackMixin, EmailApprovableSanction): DISPLAY_NAME = 'Embargo' SHORT_NAME = 'embargo' - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_EMBARGO_ADMIN + NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_EMBARGO_NON_ADMIN VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -531,18 +542,21 @@ def _rejection_url_context(self, user_id): 'token': rejection_token, } - def _email_template_context(self, - user, - node, - is_authorizer=False, - urls=None): + def _email_template_context(self, user, node, is_authorizer=False, urls=None): context = super()._email_template_context( user, node, - is_authorizer=is_authorizer) + is_authorizer=is_authorizer + ) urls = urls or self.stashed_urls.get(user._id, {}) registration_link = urls.get('view', self._view_url(user._id, node)) approval_time_span = osf_settings.EMBARGO_PENDING_TIME.days * 24 + + if self.end_date: + end_date = self.end_date.strftime('%B %d, %Y') + else: + end_date = None + if is_authorizer: approval_link = urls.get('approve', '') disapproval_link = urls.get('reject', '') @@ -556,19 +570,19 @@ def _email_template_context(self, 'project_name': registration.title, 'disapproval_link': disapproval_link, 'registration_link': registration_link, - 'embargo_end_date': self.end_date, + 'embargo_end_date': end_date, 'approval_time_span': approval_time_span, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration().id, }) else: context.update({ 'initiated_by': self.initiated_by.fullname, 'registration_link': registration_link, - 'embargo_end_date': self.end_date, + 'embargo_end_date': end_date, 'approval_time_span': approval_time_span, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration().id, }) return context @@ -648,8 +662,8 @@ class Retraction(EmailApprovableSanction): DISPLAY_NAME = 'Retraction' SHORT_NAME = 'retraction' - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_RETRACTION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_RETRACTION_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_RETRACTION_ADMIN + NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_RETRACTION_NON_ADMIN VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -710,7 +724,7 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): return { 'is_initiator': self.initiated_by == user, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration().id, 'initiated_by': self.initiated_by.fullname, 'project_name': self.registrations.filter().values_list('title', flat=True).get(), 'registration_link': registration_link, @@ -723,7 +737,7 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'initiated_by': self.initiated_by.fullname, 'registration_link': registration_link, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration().id, 'approval_time_span': approval_time_span, } @@ -768,8 +782,8 @@ class RegistrationApproval(SanctionCallbackMixin, EmailApprovableSanction): DISPLAY_NAME = 'Approval' SHORT_NAME = 'registration_approval' - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_REGISTRATION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_REGISTRATION_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_REGISTRATION_ADMIN + NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_REGISTRATION_NON_ADMIN VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -836,7 +850,7 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'is_initiator': self.initiated_by == user, 'initiated_by': self.initiated_by.fullname, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration().id, 'registration_link': registration_link, 'approval_link': approval_link, 'disapproval_link': disapproval_link, @@ -848,7 +862,7 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'initiated_by': self.initiated_by.fullname, 'registration_link': registration_link, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration().id, 'approval_time_span': approval_time_span, }) return context @@ -932,8 +946,8 @@ class EmbargoTerminationApproval(EmailApprovableSanction): DISPLAY_NAME = 'Embargo Termination Request' SHORT_NAME = 'embargo_termination_approval' - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_TERMINATION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_TERMINATION_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_EMBARGO_TERMINATION_ADMIN + NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -986,6 +1000,12 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): urls = urls or self.stashed_urls.get(user._id, {}) registration_link = urls.get('view', self._view_url(user._id, node)) approval_time_span = osf_settings.EMBARGO_TERMINATION_PENDING_TIME.days * 24 + + if self.end_date: + end_date = self.end_date.strftime('%B %d, %Y') + else: + end_date = None + if is_authorizer: approval_link = urls.get('approve', '') disapproval_link = urls.get('reject', '') @@ -995,13 +1015,13 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): context.update({ 'is_initiator': self.initiated_by == user, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration().id, 'initiated_by': self.initiated_by.fullname, 'approval_link': approval_link, 'project_name': registration.title, 'disapproval_link': disapproval_link, 'registration_link': registration_link, - 'embargo_end_date': self.end_date, + 'embargo_end_date': end_date, 'approval_time_span': approval_time_span, }) else: @@ -1009,9 +1029,9 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'initiated_by': self.initiated_by.fullname, 'project_name': self.target_registration.title, 'registration_link': registration_link, - 'embargo_end_date': self.end_date, + 'embargo_end_date': end_date, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration().id, 'approval_time_span': approval_time_span, }) return context diff --git a/osf/models/schema_response.py b/osf/models/schema_response.py index 4fa5289f2d4..13bc41544ef 100644 --- a/osf/models/schema_response.py +++ b/osf/models/schema_response.py @@ -9,26 +9,17 @@ from framework.exceptions import PermissionsError from osf.exceptions import PreviousSchemaResponseError, SchemaResponseStateError, SchemaResponseUpdateError +from osf.models import NotificationType from .base import BaseModel, ObjectIDMixin from .metaschema import RegistrationSchemaBlock from .schema_response_block import SchemaResponseBlock -from osf.utils import notifications from osf.utils.fields import NonNaiveDateTimeField from osf.utils.machines import ApprovalsMachine from osf.utils.workflows import ApprovalStates, SchemaResponseTriggers -from website.mails import mails from website.reviews.signals import reviews_email_submit_moderators_notifications from website.settings import DOMAIN - -EMAIL_TEMPLATES_PER_EVENT = { - 'create': mails.SCHEMA_RESPONSE_INITIATED, - 'submit': mails.SCHEMA_RESPONSE_SUBMITTED, - 'accept': mails.SCHEMA_RESPONSE_APPROVED, - 'reject': mails.SCHEMA_RESPONSE_REJECTED, -} - class SchemaResponse(ObjectIDMixin, BaseModel): '''Collects responses for a schema associated with a parent object. @@ -471,25 +462,30 @@ def _save_transition(self, event_data): ) def _notify_users(self, event, event_initiator): - '''Notify users of relevant state transitions.''' - # Notifications on the original response will be handled by the registration workflow + '''Notify users of relevant state transitions using NotificationType.emit.''' if not self.previous_response: return - # Generate the "reviews" email context and notify moderators if self.state is ApprovalStates.PENDING_MODERATION: - email_context = notifications.get_email_template_context(resource=self.parent) - email_context['revision_id'] = self._id - email_context['referrer'] = self.initiator + email_context = { + 'revision_id': self._id, + 'referrer': self.initiator, + } reviews_email_submit_moderators_notifications.send( timestamp=timezone.now(), context=email_context ) - template = EMAIL_TEMPLATES_PER_EVENT.get(event) - if not template: + notification_type_map = { + 'create': NotificationType.Type.SCHEMA_RESPONSE_INITIATED, + 'submit': NotificationType.Type.SCHEMA_RESPONSE_SUBMITTED, + 'accept': NotificationType.Type.SCHEMA_RESPONSE_APPROVED, + 'reject': NotificationType.Type.SCHEMA_RESPONSE_REJECTED, + } + notification_type = notification_type_map.get(event) + if not notification_type: return - email_context = { + context = { 'resource_type': self.parent.__class__.__name__.lower(), 'title': self.parent.title, 'parent_url': self.parent.absolute_url, @@ -500,11 +496,17 @@ def _notify_users(self, event, event_initiator): } for contributor, _ in self.parent.get_active_contributors_recursive(unique_users=True): - email_context['user'] = contributor - email_context['can_write'] = self.parent.has_permission(contributor, 'write') - email_context['is_approver'] = contributor in self.pending_approvers.all(), - email_context['is_initiator'] = contributor == event_initiator - mails.send_mail(to_addr=contributor.username, mail=template, **email_context) + NotificationType.objects.get( + name=notification_type.value, + ).emit( + user=contributor, + event_context={ + **context, + 'can_write': self.parent.has_permission(contributor, 'write'), + 'is_approver': contributor in self.pending_approvers.all(), + 'is_initiator': contributor == event_initiator, + } + ) def _is_updated_response(response_block, new_response): diff --git a/osf/models/user.py b/osf/models/user.py index 42bf8d12929..cb1a00c9a95 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -59,11 +59,12 @@ from osf.utils.requests import check_select_for_update from osf.utils.permissions import API_CONTRIBUTOR_PERMISSIONS, MANAGER, MEMBER, MANAGE, ADMIN from website import settings as website_settings -from website import filters, mails +from website import filters from website.project import new_bookmark_collection from website.util.metrics import OsfSourceTags, unregistered_created_source_tag from importlib import import_module from osf.utils.requests import get_headers_from_request +from osf.models.notification import NotificationType SessionStore = import_module(settings.SESSION_ENGINE).SessionStore @@ -71,6 +72,7 @@ MAX_QUICKFILES_MERGE_RENAME_ATTEMPTS = 1000 + def get_default_mailing_lists(): return {'Open Science Framework Help': True} @@ -228,15 +230,6 @@ class OSFUser(DirtyFieldsMixin, GuidMixin, BaseModel, AbstractBaseUser, Permissi # ... # } - # Time of last sent notification email to newly added contributors - # Format : { - # : { - # 'last_sent': time.time() - # } - # ... - # } - contributor_added_email_records = DateTimeAwareJSONField(default=dict, blank=True) - # Tracks last email sent where user was added to an OSF Group member_added_email_records = DateTimeAwareJSONField(default=dict, blank=True) # Tracks last email sent where an OSF Group was connected to a node @@ -1107,12 +1100,15 @@ def set_password(self, raw_password, notify=True): raise ChangePasswordError(['Password cannot be the same as your email address']) super().set_password(raw_password) if had_existing_password and notify: - mails.send_mail( - to_addr=self.username, - mail=mails.PASSWORD_RESET, + NotificationType.objects.get( + name=NotificationType.Type.USER_PASSWORD_RESET + ).emit( user=self, - can_change_preferences=False, - osf_contact_email=website_settings.OSF_CONTACT_EMAIL + event_context={ + 'user': self.id, + 'can_change_preferences': False, + 'osf_contact_email': website_settings.OSF_CONTACT_EMAIL + } ) remove_sessions_for_user(self) @@ -1406,6 +1402,8 @@ def register(self, username, password=None, accepted_terms_of_service=None): self.date_confirmed = timezone.now() if accepted_terms_of_service: self.accepted_terms_of_service = timezone.now() + + self.__subscribe_user_to_default_user_notifications() self.update_search() self.update_search_nodes() @@ -1414,6 +1412,27 @@ def register(self, username, password=None, accepted_terms_of_service=None): return self + def __subscribe_user_to_default_user_notifications(self): + NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationType = apps.get_model('osf.NotificationType') + from django.contrib.contenttypes.models import ContentType + + for notification_type in NotificationType.Type.user_types(): + print(notification_type.value) + content_type = ContentType.objects.get_for_model(self.__class__) + subscription, created = NotificationSubscription.objects.get_or_create( + notification_type=NotificationType.objects.get(name=notification_type.value), + user=self, + content_type=content_type, + object_id=self.id, + defaults={ + 'content_type': content_type, + 'message_frequency': 'instantly', + 'object_id': self.id + } + ) + subscription.save() + def confirm_email(self, token, merge=False): """Confirm the email address associated with the token""" email = self.get_unconfirmed_email_for_token(token) @@ -1664,35 +1683,37 @@ def n_projects_in_common(self, other_user): """Returns number of "shared projects" (projects that both users are contributors or group members for)""" return self._projects_in_common_query(other_user).count() - def add_unclaimed_record(self, claim_origin, referrer, given_name, email=None): + def add_unclaimed_record(self, claim_origin, referrer, given_name, email=None, skip_referrer_permissions=False): """Add a new project entry in the unclaimed records dictionary. :param object claim_origin: Object this unclaimed user was added to. currently `Node` or `Provider` or `Preprint` :param User referrer: User who referred this user. :param str given_name: The full name that the referrer gave for this user. :param str email: The given email address. + :param bool skip_referrer_permissions: The flag to check permissions for referrer. :returns: The added record """ from .provider import AbstractProvider from .osf_group import OSFGroup - if isinstance(claim_origin, AbstractProvider): - if not bool(get_perms(referrer, claim_origin)): - raise PermissionsError( - f'Referrer does not have permission to add a moderator to provider {claim_origin._id}' - ) - - elif isinstance(claim_origin, OSFGroup): - if not claim_origin.has_permission(referrer, MANAGE): - raise PermissionsError( - f'Referrer does not have permission to add a member to {claim_origin._id}' - ) - else: - if not claim_origin.has_permission(referrer, ADMIN): - raise PermissionsError( - f'Referrer does not have permission to add a contributor to {claim_origin._id}' - ) + if not skip_referrer_permissions: + if isinstance(claim_origin, AbstractProvider): + if not bool(get_perms(referrer, claim_origin)): + raise PermissionsError( + f'Referrer does not have permission to add a moderator to provider {claim_origin._id}' + ) + + elif isinstance(claim_origin, OSFGroup): + if not claim_origin.has_permission(referrer, MANAGE): + raise PermissionsError( + f'Referrer does not have permission to add a member to {claim_origin._id}' + ) + else: + if not claim_origin.has_permission(referrer, ADMIN): + raise PermissionsError( + f'Referrer does not have permission to add a contributor to {claim_origin._id}' + ) pid = str(claim_origin._id) referrer_id = str(referrer._id) @@ -1986,7 +2007,7 @@ def _validate_admin_status_for_gdpr_delete(self, resource): is_active=True ).exclude(id=self.id).exists() - if not alternate_admins: + if not resource.deleted and not alternate_admins: raise UserStateError( f'You cannot delete {resource.__class__.__name__} {resource._id} because it would be ' f'a {resource.__class__.__name__} with contributors, but with no admin.' diff --git a/osf/models/validators.py b/osf/models/validators.py index 87f00f826a6..310c6317319 100644 --- a/osf/models/validators.py +++ b/osf/models/validators.py @@ -8,8 +8,6 @@ from django.utils.deconstruct import deconstructible from rest_framework import exceptions -from website.notifications.constants import NOTIFICATION_TYPES - from osf.utils.registrations import FILE_VIEW_URL_REGEX from osf.utils.sanitize import strip_html from osf.exceptions import ValidationError, ValidationValueError, reraise_django_validation_errors, BlockedEmailError @@ -54,8 +52,9 @@ def string_required(value): def validate_subscription_type(value): - if value not in NOTIFICATION_TYPES: - raise ValidationValueError + from osf.models.notification import NotificationType + if value not in NotificationType.Type: + raise ValidationValueError(f'{value} is not a valid subscription type.') def validate_title(value, allow_blank=False): diff --git a/osf/notifications.py b/osf/notifications.py new file mode 100644 index 00000000000..d8cc1d6de5a --- /dev/null +++ b/osf/notifications.py @@ -0,0 +1,68 @@ +import logging +import smtplib +from email.mime.text import MIMEText +from sendgrid import SendGridAPIClient +from sendgrid.helpers.mail import Mail +from website import settings + +def send_email_over_smtp(to_addr, notification_type, context): + """Send an email notification using SMTP. This is typically not used in productions as other 3rd party mail services + are preferred. This is to be used for tests and on staging environments and special situations. + + Args: + to_addr (str): The recipient's email address. + notification_type (str): The subject of the notification. + context (dict): The email content context. + """ + if not settings.MAIL_SERVER: + raise NotImplementedError('MAIL_SERVER is not set') + if not settings.MAIL_USERNAME and settings.MAIL_PASSWORD: + raise NotImplementedError('MAIL_USERNAME and MAIL_PASSWORD are required for STMP') + + msg = MIMEText( + notification_type.template.format(context), + 'html', + _charset='utf-8' + ) + msg['Subject'] = notification_type.email_subject_line_template.format(context=context) + + with smtplib.SMTP(settings.MAIL_SERVER) as server: + server.ehlo() + server.starttls() + server.ehlo() + server.login(settings.MAIL_USERNAME, settings.MAIL_PASSWORD) + server.sendmail( + settings.FROM_EMAIL, + [to_addr], + msg.as_string() + ) + + +def send_email_with_send_grid(to_addr, notification_type, context): + """Send an email notification using SendGrid. + + Args: + to_addr (str): The recipient's email address. + notification_type (str): The subject of the notification. + context (dict): The email content context. + """ + if not settings.SENDGRID_API_KEY: + raise NotImplementedError('SENDGRID_API_KEY is required for sendgrid notifications.') + + message = Mail( + from_email=settings.FROM_EMAIL, + to_emails=to_addr, + subject=notification_type, + html_content=context.get('message', '') + ) + + try: + sg = SendGridAPIClient(settings.SENDGRID_API_KEY) + response = sg.send(message) + if response.status_code not in (200, 201, 202): + logging.error(f'SendGrid response error: {response.status_code}, body: {response.body}') + response.raise_for_status() + logging.info(f'Notification email sent to {to_addr} for {notification_type}.') + except Exception as exc: + logging.error(f'Failed to send email notification to {to_addr}: {exc}') + raise exc diff --git a/osf/notifications.yaml b/osf/notifications.yaml new file mode 100644 index 00000000000..b73f78e8bc1 --- /dev/null +++ b/osf/notifications.yaml @@ -0,0 +1,588 @@ +# This file contains the configuration for our notification system using the NotificationType object, this is intended to +# exist as a simple declarative list of NotificationTypes and their attributes. Every notification sent by OSF should be +# represented here for bussiness logic dnd metrics reasons. + +# Workflow: +# 1. Add a new notification template +# 2. Add a entry here with the desired notification types +# 3. Add name tp Enum osf.notification.NotificationType.Type +# 4. Use the emit method to send or subscribe the notification for immediate deliver or periodic digest. +notification_types: + #### GLOBAL (User Notifications) + - name: user_pending_verification_registered + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_pending_verification + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_contributor_added_draft_registration + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_contributor_added_osf_preprint + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_contributor_added_default + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_add_sso_email_osf4i + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_duplicate_accounts_osf4i + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_welcome_osf4i + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/welcome_osf4i.html.mako' + notification_freq_default: instantly + - name: duplicate_accounts_sso_osf4i + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: forgot_password_institution + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: initial_confirm + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: confirm + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_comments + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_comment_replies + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_file_updated + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_reviews + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_mentions + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_password_reset + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_invite_default + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_pending_invite + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_forward_invite + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_archive_job_exceeded + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_archive_job_exceeded + __docs__: Archive job failed due to size exceeded. Sent to the initiating user. + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_archive_job_copy_error + __docs__: Archive job failed due to copy error. Sent to the initiating user. + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_archive_job_file_not_found + __docs__: Archive job failed because files were not found. Sent to the initiating user. + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_archive_job_uncaught_error + __docs__: Archive job failed due to an uncaught error. Sent to the initiating user. + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_external_login_link_success + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_registration_bulk_upload_failure_all + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_registration_bulk_upload_failure_all + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_registration_bulk_upload_success_all + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_registration_bulk_upload_success_partial + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_forward_invite_registered + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_storage_cap_exceeded_announcement + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_spam_banned + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_request_deactivation_complete + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_primary_email_changed + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_institution_deactivation + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_forgot_password + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_forgot_password_institution + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_request_export + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_initial_confirm_email + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_confirm_email + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_confirm_merge + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_external_login_confirm_email_create + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_external_login_confirm_email_link + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_invite_draft_registration + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_invite_draft_registration + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: user_invite_osf_preprint + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + + #### PROVIDER + - name: new_pending_submissions + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: submit + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: notify_submit + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: provider_reviews_submission_confirmation + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: provider_reviews_moderator_submission_confirmation + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: provider_reviews_accept_confirmation + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: provider_reviews_reject_confirmation + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: provider_reviews_resubmission_confirmation + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: provider_reviews_resubmission_confirmation + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: provider_reviews_comment_edited + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: accept_submission + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: reject_submission + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: request_withdrawal + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: accept_withdrawal + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: reject_withdrawal + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: provider_confirm_email_moderation + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: provider_moderator_added + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### NODE + - name: node_contributor_added_access_request + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_request_institutional_access_request + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: pending_embargo_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: pending_embargo_non_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: pending_retraction_non_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: pending_embargo_termination_non_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: pending_registration_non_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: pending_registration_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_fork_completed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_fork_failed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_affiliation_changed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_request_access_denied + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_access_request_submitted + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_pending_retraction_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_pending_embargo_termination_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: wiki_updated + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: addon_file_renamed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: addon_file_moved + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: addon_file_copied + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: contributor_added_default + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: contributor_added_draft_registration + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: addon_file_moved + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: file_added + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: file_updated + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: file_removed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: addon_file_renamed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: pending_embargo_termination_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### PREPRINT + - name: pending_retraction_admin + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: addon_file_renamed + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: file_added + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: invite_preprints + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: contributor_added_preprints + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: preprint_request_withdrawal_approved + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: preprint_request_withdrawal_declined + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: preprint_request_withdrawal_requested + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: contributor_added_preprint_node_from_osf + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### SUPPORT + - name: crossref_error + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### Collection Submissions + - name: collection_submission_removed_moderator + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_removed_private + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_removed_admin + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_submitted + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_cancel + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_accepted + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_rejected + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### DESK + - name: desk_archive_job_exceeded + __docs__: Archive job failed due to size exceeded. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_archive_job_copy_error + __docs__: Archive job failed due to copy error. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_archive_job_file_not_found + __docs__: Archive job failed because files were not found. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_archive_job_uncaught_error + __docs__: Archive job failed due to an uncaught error. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_osf_support_email + __docs__: ... + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly diff --git a/osf/utils/machines.py b/osf/utils/machines.py index 04713b3cb26..ad3e5809332 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -6,7 +6,7 @@ from framework.auth import Auth from osf.exceptions import InvalidTransitionError -from osf.models.preprintlog import PreprintLog +from osf.models.notification import NotificationType from osf.models.action import ReviewAction, NodeRequestAction, PreprintRequestAction from osf.utils import permissions from osf.utils.workflows import ( @@ -21,12 +21,9 @@ COLLECTION_SUBMISSION_TRANSITIONS, NodeRequestTypes ) -from website.mails import mails from website.reviews import signals as reviews_signals from website.settings import DOMAIN, OSF_SUPPORT_EMAIL, OSF_CONTACT_EMAIL -from osf.utils import notifications as notify - from api.base.exceptions import Conflict class BaseMachine(Machine): @@ -63,6 +60,14 @@ def __init__(self, machineable, state_attr='machine_state'): ignore_invalid_triggers=True, ) + def emit_notification(self, user, notif_type_enum, context): + notification_type = NotificationType.objects.get(name=notif_type_enum) + notification_type.emit( + user=user, + subscribed_object=self.machineable, # or the relevant subscribed object + event_context=context + ) + @property def state(self): return getattr(self.machineable, self.__state_attr) @@ -143,55 +148,50 @@ def perform_withdraw(self, ev): self.machineable.withdrawal_justification = ev.kwargs.get('comment', '') def notify_submit(self, ev): - user = ev.kwargs.get('user') - notify.notify_submit(self.machineable, user) - auth = Auth(user) - self.machineable.add_log( - action=PreprintLog.PUBLISHED, - params={ - 'preprint': self.machineable._id - }, - auth=auth, - save=False, + self.emit_notification( + ev.kwargs.get('user'), + NotificationType.Type.PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION, + self.get_context() ) def notify_resubmit(self, ev): - notify.notify_resubmit(self.machineable, ev.kwargs.get('user'), self.action) + self.emit_notification( + ev.kwargs.get('user'), + NotificationType.Type.PROVIDER_REVIEWS_RESUBMISSION_CONFIRMATION, + self.get_context() + ) def notify_accept_reject(self, ev): - notify.notify_accept_reject(self.machineable, ev.kwargs.get('user'), self.action, self.States) + user = ev.kwargs.get('user') + notif_type = ( + NotificationType.Type.PROVIDER_REVIEWS_ACCEPT_CONFIRMATION + if ev.event.name == DefaultTriggers.ACCEPT.value + else NotificationType.Type.PROVIDER_REVIEWS_REJECT_CONFIRMATION + ) + self.emit_notification( + user, + notif_type, + self.get_context() + ) def notify_edit_comment(self, ev): - notify.notify_edit_comment(self.machineable, ev.kwargs.get('user'), self.action) + self.emit_notification( + ev.kwargs.get('user'), + NotificationType.Type.PROVIDER_REVIEWS_COMMENT_EDITED, + self.get_context() + ) def notify_withdraw(self, ev): - context = self.get_context() - context['ever_public'] = self.machineable.ever_public - try: - preprint_request_action = PreprintRequestAction.objects.get(target__target__id=self.machineable.id, - from_state='pending', - to_state='accepted', - trigger='accept') - context['requester'] = preprint_request_action.target.creator - except PreprintRequestAction.DoesNotExist: - # If there is no preprint request action, it means the withdrawal is directly initiated by admin/moderator - context['force_withdrawal'] = True - - for contributor in self.machineable.contributors.all(): - context['contributor'] = contributor - if context.get('requester', None): - context['is_requester'] = context['requester'].username == contributor.username - mails.send_mail( - contributor.username, - mails.WITHDRAWAL_REQUEST_GRANTED, - document_type=self.machineable.provider.preprint_word, - **context - ) + self.emit_notification( + ev.kwargs.get('user'), + NotificationType.Type.PROVIDER_REVIEWS_WITHDRAWAL_REQUESTED, + self.get_context() + ) def get_context(self): return { 'domain': DOMAIN, - 'reviewable': self.machineable, + 'reviewable': self.machineable.title, 'workflow': self.machineable.provider.reviews_workflow, 'provider_url': self.machineable.provider.domain or f'{DOMAIN}preprints/{self.machineable.provider._id}', 'provider_contact_email': self.machineable.provider.email_contact or OSF_CONTACT_EMAIL, @@ -220,7 +220,7 @@ def save_changes(self, ev): auth=Auth(ev.kwargs['user']), permissions=contributor_permissions, visible=visible, - send_email=f'{self.machineable.request_type}_request', + notification_type=f'{self.machineable.request_type}_request', make_curator=make_curator, ) except IntegrityError as e: @@ -233,19 +233,12 @@ def resubmission_allowed(self, ev): return False def notify_submit(self, ev): - """ Notify admins that someone is requesting access - """ - context = self.get_context() - context['contributors_url'] = f'{self.machineable.target.absolute_url}contributors/' - context['project_settings_url'] = f'{self.machineable.target.absolute_url}settings/' if not self.machineable.request_type == NodeRequestTypes.INSTITUTIONAL_REQUEST.value: for admin in self.machineable.target.get_users_with_perm(permissions.ADMIN): - mails.send_mail( - admin.username, - mails.ACCESS_REQUEST_SUBMITTED, - admin=admin, - osf_contact_email=OSF_CONTACT_EMAIL, - **context + self.emit_notification( + admin, + NotificationType.Type.NODE_REQUEST_ACCESS_SUBMITTED, + self.get_context() ) def notify_resubmit(self, ev): @@ -255,18 +248,15 @@ def notify_resubmit(self, ev): raise NotImplementedError() def notify_accept_reject(self, ev): - """ Notify requester that admins have approved/denied - """ + context = self.get_context() if ev.event.name == DefaultTriggers.REJECT.value: - context = self.get_context() - mails.send_mail( - self.machineable.creator.username, - mails.ACCESS_REQUEST_DENIED, - osf_contact_email=OSF_CONTACT_EMAIL, - **context + self.emit_notification( + self.machineable.creator, + NotificationType.Type.NODE_REQUEST_ACCESS_DENIED, + context ) else: - # add_contributor sends approval notification email + # Approval is handled by add_contributor, which may already send an email pass def notify_edit_comment(self, ev): @@ -276,8 +266,8 @@ def notify_edit_comment(self, ev): def get_context(self): return { - 'node': self.machineable.target, - 'requester': self.machineable.creator + 'node': self.machineable.target.id, + 'requester': self.machineable.creator.id } @@ -303,20 +293,22 @@ def auto_approval_allowed(self): return self.machineable.target.provider.reviews_workflow == Workflows.PRE_MODERATION.value and not self.machineable.target.ever_public def notify_submit(self, ev): - context = self.get_context() if not self.auto_approval_allowed(): - reviews_signals.email_withdrawal_requests.send(timestamp=timezone.now(), context=context) + reviews_signals.email_withdrawal_requests.send( + timestamp=timezone.now(), + context=self.get_context() + ) def notify_accept_reject(self, ev): if ev.event.name == DefaultTriggers.REJECT.value: - context = self.get_context() - mails.send_mail( - self.machineable.creator.username, - mails.WITHDRAWAL_REQUEST_DECLINED, - **context - ) + notif_type = NotificationType.Type.PREPRINT_REQUEST_WITHDRAWAL_DECLINED else: - pass + notif_type = NotificationType.Type.PREPRINT_REQUEST_WITHDRAWAL_APPROVED + self.emit_notification( + self.machineable.creator, + notif_type, + self.get_context() + ) def notify_edit_comment(self, ev): """ Not presently required to notify for this event @@ -332,8 +324,8 @@ def notify_resubmit(self, ev): def get_context(self): return { - 'reviewable': self.machineable.target, - 'requester': self.machineable.creator, + 'reviewable': self.machineable.target.id, + 'requester': self.machineable.creator.id, 'is_request_email': True, 'document_type': self.machineable.target.provider.preprint_word } diff --git a/osf/utils/notifications.py b/osf/utils/notifications.py deleted file mode 100644 index 92ea38fcf70..00000000000 --- a/osf/utils/notifications.py +++ /dev/null @@ -1,130 +0,0 @@ -from django.utils import timezone -from website.mails import mails -from website.reviews import signals as reviews_signals -from website.settings import DOMAIN, OSF_SUPPORT_EMAIL, OSF_CONTACT_EMAIL -from osf.utils.workflows import RegistrationModerationTriggers - -def get_email_template_context(resource): - is_preprint = resource.provider.type == 'osf.preprintprovider' - url_segment = 'preprints' if is_preprint else 'registries' - document_type = resource.provider.preprint_word if is_preprint else 'registration' - - base_context = { - 'domain': DOMAIN, - 'reviewable': resource, - 'workflow': resource.provider.reviews_workflow, - 'provider_url': resource.provider.domain or f'{DOMAIN}{url_segment}/{resource.provider._id}', - 'provider_contact_email': resource.provider.email_contact or OSF_CONTACT_EMAIL, - 'provider_support_email': resource.provider.email_support or OSF_SUPPORT_EMAIL, - 'document_type': document_type - } - - if document_type == 'registration': - base_context['draft_registration'] = resource.draft_registration.get() - if document_type == 'registration' and resource.provider.brand: - brand = resource.provider.brand - base_context['logo_url'] = brand.hero_logo_image - base_context['top_bar_color'] = brand.primary_color - base_context['provider_name'] = resource.provider.name - - return base_context - -def notify_submit(resource, user, *args, **kwargs): - context = get_email_template_context(resource) - context['referrer'] = user - recipients = list(resource.contributors) - reviews_signals.reviews_email_submit.send( - context=context, - recipients=recipients - ) - reviews_signals.reviews_email_submit_moderators_notifications.send( - timestamp=timezone.now(), - context=context - ) - - -def notify_resubmit(resource, user, *args, **kwargs): - context = get_email_template_context(resource) - context['referrer'] = user - context['resubmission'] = True - recipients = list(resource.contributors) - reviews_signals.reviews_email_submit.send( - recipients=recipients, - context=context, - template=mails.REVIEWS_RESUBMISSION_CONFIRMATION, - ) - reviews_signals.reviews_email_submit_moderators_notifications.send( - timestamp=timezone.now(), - context=context - ) - - -def notify_accept_reject(resource, user, action, states, *args, **kwargs): - context = get_email_template_context(resource) - - context['notify_comment'] = not resource.provider.reviews_comments_private and action.comment - context['comment'] = action.comment - context['requester'] = action.creator - context['is_rejected'] = action.to_state == states.REJECTED.db_name - context['was_pending'] = action.from_state == states.PENDING.db_name - reviews_signals.reviews_email.send( - creator=user, - context=context, - template='reviews_submission_status', - action=action - ) - - -def notify_edit_comment(resource, user, action, *args, **kwargs): - if not resource.provider.reviews_comments_private and action.comment: - context = get_email_template_context(resource) - context['comment'] = action.comment - reviews_signals.reviews_email.send( - creator=user, - context=context, - template='reviews_update_comment', - action=action - ) - - -def notify_reject_withdraw_request(resource, action, *args, **kwargs): - context = get_email_template_context(resource) - context['requester'] = action.creator - - for contributor in resource.contributors.all(): - context['contributor'] = contributor - context['requester'] = action.creator - context['is_requester'] = action.creator == contributor - - mails.send_mail( - contributor.username, - mails.WITHDRAWAL_REQUEST_DECLINED, - **context - ) - - -def notify_moderator_registration_requests_withdrawal(resource, user, *args, **kwargs): - context = get_email_template_context(resource) - context['referrer'] = user - reviews_signals.reviews_withdraw_requests_notification_moderators.send( - timestamp=timezone.now(), - context=context - ) - - -def notify_withdraw_registration(resource, action, *args, **kwargs): - context = get_email_template_context(resource) - - context['force_withdrawal'] = action.trigger == RegistrationModerationTriggers.FORCE_WITHDRAW.db_name - context['requester'] = resource.retraction.initiated_by - context['comment'] = action.comment - context['notify_comment'] = not resource.provider.reviews_comments_private and action.comment - - for contributor in resource.contributors.all(): - context['contributor'] = contributor - context['is_requester'] = context['requester'] == contributor - mails.send_mail( - contributor.username, - mails.WITHDRAWAL_REQUEST_GRANTED, - **context - ) diff --git a/osf_tests/conftest.py b/osf_tests/conftest.py index af71872cb41..a0fafde4231 100644 --- a/osf_tests/conftest.py +++ b/osf_tests/conftest.py @@ -4,8 +4,6 @@ from framework.django.handlers import handlers as django_handlers from framework.flask import rm_handlers from website.app import init_app -from website.project.signals import contributor_added -from website.project.views.contributor import notify_added_contributor # NOTE: autouse so that ADDONS_REQUESTED gets set on website.settings @@ -37,13 +35,3 @@ def request_context(app): context.push() yield context context.pop() - -DISCONNECTED_SIGNALS = { - # disconnect notify_add_contributor so that add_contributor does not send "fake" emails in tests - contributor_added: [notify_added_contributor] -} -@pytest.fixture(autouse=True) -def disconnected_signals(): - for signal in DISCONNECTED_SIGNALS: - for receiver in DISCONNECTED_SIGNALS[signal]: - signal.disconnect(receiver) diff --git a/osf_tests/factories.py b/osf_tests/factories.py index 7ad8885e1ad..14aac463b95 100644 --- a/osf_tests/factories.py +++ b/osf_tests/factories.py @@ -20,10 +20,8 @@ from django.db.utils import IntegrityError from faker import Factory, Faker from waffle.models import Flag, Sample, Switch -from website.notifications.constants import NOTIFICATION_TYPES from osf.utils import permissions from website.archiver import ARCHIVER_SUCCESS -from website.settings import FAKE_EMAIL_NAME, FAKE_EMAIL_DOMAIN from framework.auth.core import Auth from osf import models @@ -42,7 +40,7 @@ fake = Factory.create() faker = Faker() # If tests are run on really old processors without high precision this might fail. Unlikely to occur. -fake_email = lambda: f'{FAKE_EMAIL_NAME}+{int(time.time() * 1000000)}@{FAKE_EMAIL_DOMAIN}' +fake_email = lambda: f'freddiefish+{int(time.time() * 1000000)}@fake.org' # Do this out of a cls context to avoid setting "t" as a local PROVIDER_ASSET_NAME_CHOICES = tuple([t[0] for t in PROVIDER_ASSET_NAME_CHOICES]) @@ -1067,7 +1065,7 @@ class NotificationDigestFactory(DjangoModelFactory): timestamp = FuzzyDateTime(datetime.datetime(1970, 1, 1, tzinfo=pytz.UTC)) node_lineage = FuzzyAttribute(fuzzer=make_node_lineage) user = factory.SubFactory(UserFactory) - send_type = FuzzyChoice(choices=NOTIFICATION_TYPES.keys()) + send_type = FuzzyChoice(choices=list(models.NotificationType.Type)) message = fake.text(max_nb_chars=2048) event = fake.text(max_nb_chars=50) class Meta: diff --git a/osf_tests/management_commands/test_fix_preprints_has_data_links_and_why_no_data.py b/osf_tests/management_commands/test_fix_preprints_has_data_links_and_why_no_data.py new file mode 100644 index 00000000000..878d610bde3 --- /dev/null +++ b/osf_tests/management_commands/test_fix_preprints_has_data_links_and_why_no_data.py @@ -0,0 +1,157 @@ +import pytest +from unittest import mock + +from django.core.management import call_command +from osf_tests.factories import PreprintFactory, PreprintProviderFactory + + +@pytest.mark.django_db +class TestFixPreprintsHasDataLinksAndWhyNoData: + + @pytest.fixture() + def preprint_not_no_with_why_no_data(self): + preprint = PreprintFactory() + preprint.has_data_links = 'available' + preprint.why_no_data = 'This should be cleared' + preprint.save() + return preprint + + @pytest.fixture() + def preprint_no_with_why_no_data(self): + preprint = PreprintFactory() + preprint.has_data_links = 'no' + preprint.why_no_data = 'Valid reason' + preprint.save() + return preprint + + @pytest.fixture() + def preprint_not_applicable_with_why_no_data(self): + preprint = PreprintFactory() + preprint.has_data_links = 'not_applicable' + preprint.why_no_data = 'This should be cleared' + preprint.save() + return preprint + + def test_fix_preprints_has_data_links_and_why_no_data( + self, preprint_not_no_with_why_no_data, preprint_no_with_why_no_data, preprint_not_applicable_with_why_no_data + ): + call_command('fix_preprints_has_data_links_and_why_no_data') + + preprint_not_no_with_why_no_data.refresh_from_db() + preprint_no_with_why_no_data.refresh_from_db() + preprint_not_applicable_with_why_no_data.refresh_from_db() + + assert preprint_not_no_with_why_no_data.why_no_data == '' + assert preprint_not_applicable_with_why_no_data.why_no_data == '' + + assert preprint_no_with_why_no_data.why_no_data == 'Valid reason' + + def test_dry_run_mode(self, preprint_not_no_with_why_no_data): + call_command('fix_preprints_has_data_links_and_why_no_data', '--dry-run') + + preprint_not_no_with_why_no_data.refresh_from_db() + assert preprint_not_no_with_why_no_data.why_no_data == 'This should be cleared' + + def test_specific_guid(self): + + preprint1 = PreprintFactory() + preprint1.has_data_links = 'available' + preprint1.why_no_data = 'This should be cleared' + preprint1.save() + + preprint2 = PreprintFactory() + preprint2.has_data_links = 'available' + preprint2.why_no_data = 'This should remain' + preprint2.save() + + call_command('fix_preprints_has_data_links_and_why_no_data', '--guid', f'{preprint1._id}') + + preprint1.refresh_from_db() + preprint2.refresh_from_db() + + assert preprint1.why_no_data == '' + assert preprint2.why_no_data == 'This should remain' + + def test_no_action_for_correct_preprints(self): + preprint = PreprintFactory() + preprint.has_data_links = 'available' + preprint.why_no_data = '' + preprint.save() + + with mock.patch('osf.models.Guid.split_guid', return_value=(preprint._id, 1)): + call_command('fix_preprints_has_data_links_and_why_no_data', '--guid', f'{preprint._id}_v1') + + preprint.refresh_from_db() + + assert preprint.has_data_links == 'available' + assert preprint.why_no_data == '' + + def test_preprints_with_null_has_data_links(self): + preprint = PreprintFactory() + preprint.has_data_links = None + preprint.why_no_data = 'Should be cleared for null has_data_links' + preprint.save() + + call_command('fix_preprints_has_data_links_and_why_no_data') + + preprint.refresh_from_db() + assert preprint.why_no_data == '' + + def test_preprints_different_providers(self): + provider1 = PreprintProviderFactory() + provider2 = PreprintProviderFactory() + + preprint1 = PreprintFactory(provider=provider1) + preprint1.has_data_links = 'available' + preprint1.why_no_data = 'Should be cleared (provider 1)' + preprint1.save() + + preprint2 = PreprintFactory(provider=provider2) + preprint2.has_data_links = 'not_applicable' + preprint2.why_no_data = 'Should be cleared (provider 2)' + preprint2.save() + + call_command('fix_preprints_has_data_links_and_why_no_data') + + preprint1.refresh_from_db() + preprint2.refresh_from_db() + + assert preprint1.why_no_data == '' + assert preprint2.why_no_data == '' + + def test_preprints_with_data_links(self): + preprint = PreprintFactory() + preprint.has_data_links = 'available' + preprint.data_links = ['https://example.com/dataset123'] + preprint.why_no_data = 'This contradicts having data links' + preprint.save() + + call_command('fix_preprints_has_data_links_and_why_no_data') + + preprint.refresh_from_db() + assert preprint.why_no_data == '' + assert preprint.data_links == ['https://example.com/dataset123'] + + def test_error_handling(self): + preprint1 = PreprintFactory() + preprint1.has_data_links = 'available' + preprint1.why_no_data = 'Should be cleared' + preprint1.save() + + preprint2 = PreprintFactory() + preprint2.has_data_links = 'available' + preprint2.why_no_data = 'Should be cleared too' + preprint2.save() + + preprint3 = PreprintFactory() + preprint3.has_data_links = 'available' + preprint3.why_no_data = 'Should also be cleared' + preprint3.save() + + call_command('fix_preprints_has_data_links_and_why_no_data') + + preprint1.refresh_from_db() + preprint3.refresh_from_db() + + assert preprint1.why_no_data == '' + assert preprint3.why_no_data == '' diff --git a/osf_tests/management_commands/test_migrate_notification_subscriptions.py b/osf_tests/management_commands/test_migrate_notification_subscriptions.py new file mode 100644 index 00000000000..7415b7e8420 --- /dev/null +++ b/osf_tests/management_commands/test_migrate_notification_subscriptions.py @@ -0,0 +1,115 @@ +import pytest +from django.contrib.contenttypes.models import ContentType + +from osf.models import Node +from osf_tests.factories import ( + AuthUserFactory, + PreprintProviderFactory, + ProjectFactory +) +from osf.models.notification import NotificationType, NotificationSubscription +from osf.models.notifications import NotificationSubscriptionLegacy +from osf.management.commands.migrate_notifications import migrate_legacy_notification_subscriptions + +@pytest.mark.django_db +class TestNotificationSubscriptionMigration: + + @pytest.fixture() + def user(self): + return AuthUserFactory() + + @pytest.fixture() + def users(self): + return { + 'none': AuthUserFactory(), + 'digest': AuthUserFactory(), + 'transactional': AuthUserFactory(), + } + + @pytest.fixture() + def provider(self): + return PreprintProviderFactory() + + @pytest.fixture() + def provider2(self): + return PreprintProviderFactory() + + @pytest.fixture() + def node(self): + return ProjectFactory() + + def create_legacy_sub(self, event_name, users, user=None, provider=None, node=None): + legacy = NotificationSubscriptionLegacy.objects.create( + _id=f'{(provider or node)._id}_{event_name}', + user=user, + event_name=event_name, + provider=provider, + node=node + ) + legacy.none.add(users['none']) + legacy.email_digest.add(users['digest']) + legacy.email_transactional.add(users['transactional']) + return legacy + + def test_migrate_provider_subscription(self, users, user, provider, provider2): + self.create_legacy_sub(f'{provider.id}_comment_replies', users, user=user, provider=provider) + self.create_legacy_sub(f'{provider2.id}_comment_replies', users, user=user, provider=provider2) + + migrate_legacy_notification_subscriptions() + + subs = NotificationSubscription.objects.all() + assert subs.count() == 2 + assert subs.get( + notification_type__name='comment_replies', + object_id=provider.id, + content_type=ContentType.objects.get_for_model(provider.__class__) + ) + assert subs.get( + notification_type__name='comment_replies', + object_id=provider2.id, + content_type=ContentType.objects.get_for_model(provider2.__class__) + ) + + def test_migrate_node_subscription(self, users, user, node): + self.create_legacy_sub('wiki_updated', users, user=user, node=node) + + migrate_legacy_notification_subscriptions() + + nt = NotificationType.objects.get(name='wiki_updated') + assert nt.object_content_type == ContentType.objects.get_for_model(Node) + + subs = NotificationSubscription.objects.filter(notification_type=nt) + assert subs.count() == 1 + + for sub in subs: + assert sub.subscribed_object == node + + def test_multiple_subscriptions_different_types(self, users, user, provider, node): + self.create_legacy_sub('comment_replies', users, user=user, provider=provider) + self.create_legacy_sub('wiki_updated', users, user=user, node=node) + + migrate_legacy_notification_subscriptions() + + assert NotificationSubscription.objects.count() == 4 + + def test_idempotent_migration(self, users, user, provider): + self.create_legacy_sub('comment_replies', users, user=user, provider=provider) + migrate_legacy_notification_subscriptions() + migrate_legacy_notification_subscriptions() + + assert NotificationSubscription.objects.all().count() == 1 + assert NotificationSubscription.objects.get(notification_type__name='comment_replies') + + def test_skips_invalid_subscription(self, users): + # Create a legacy subscription with no node or provider + legacy = NotificationSubscriptionLegacy.objects.create( + _id='broken', + event_name='invalid_event' + ) + legacy.none.add(users['none']) + + migrate_legacy_notification_subscriptions() + + # It should skip, not crash + assert NotificationType.objects.filter(name='invalid_event').count() == 0 + assert NotificationSubscription.objects.count() == 0 diff --git a/osf_tests/management_commands/test_transfer_quickfiles_to_projects.py b/osf_tests/management_commands/test_transfer_quickfiles_to_projects.py deleted file mode 100644 index daaeee7703c..00000000000 --- a/osf_tests/management_commands/test_transfer_quickfiles_to_projects.py +++ /dev/null @@ -1,43 +0,0 @@ -import pytest - -from api_tests.utils import create_test_file - -from osf.management.commands.transfer_quickfiles_to_projects import ( - remove_quickfiles, - reverse_remove_quickfiles, - QUICKFILES_DESC -) -from osf.models import NodeLog -from osf.models.quickfiles import QuickFilesNode, get_quickfiles_project_title - -from osf_tests.factories import AuthUserFactory - - -@pytest.mark.django_db -class TestTransferQuickfilesToProjects: - - @pytest.fixture() - def user_with_quickfiles(self): - user = AuthUserFactory() - qfnode = QuickFilesNode.objects.create_for_user(user) - create_test_file(target=qfnode, user=user) - return user - - def test_tranfer_quickfiles_to_projects(self, user_with_quickfiles): - remove_quickfiles() - - assert not QuickFilesNode.objects.all() - node = user_with_quickfiles.nodes.get( - title=get_quickfiles_project_title(user_with_quickfiles), - logs__action=NodeLog.MIGRATED_QUICK_FILES, - description=QUICKFILES_DESC - ) - assert node.files.all() - - def test_reverse_tranfer_quickfiles_to_projects(self, user_with_quickfiles): - remove_quickfiles() - reverse_remove_quickfiles() - - quickfiles_node = QuickFilesNode.objects.get_for_user(user_with_quickfiles) - assert QuickFilesNode.objects.all().get() == quickfiles_node - assert quickfiles_node.files.exists() diff --git a/osf_tests/settings.py b/osf_tests/settings.py index 111f9d0f7aa..a22d363a1a4 100644 --- a/osf_tests/settings.py +++ b/osf_tests/settings.py @@ -19,5 +19,3 @@ PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) - -TEST_ENV = True diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index 3855d169acb..59c178b839d 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -22,7 +22,6 @@ from website.app import * # noqa: F403 from website.archiver import listeners from website.archiver.tasks import * # noqa: F403 -from website.archiver.decorators import fail_archive_on_error from osf.models import Guid, RegistrationSchema, Registration from osf.models.archive import ArchiveTarget, ArchiveJob @@ -1111,22 +1110,6 @@ def test_find_failed_registrations(self): assert pk not in failed -class TestArchiverDecorators(ArchiverTestCase): - - @mock.patch('website.archiver.signals.archive_fail.send') - def test_fail_archive_on_error(self, mock_fail): - e = HTTPError(418) - - def error(*args, **kwargs): - raise e - - func = fail_archive_on_error(error) - func(node=self.dst) - mock_fail.assert_called_with( - self.dst, - errors=[str(e)] - ) - class TestArchiverBehavior(OsfTestCase): @mock.patch('osf.models.AbstractNode.update_search') diff --git a/osf_tests/test_collection.py b/osf_tests/test_collection.py index d79e03a8323..7c796cd3e26 100644 --- a/osf_tests/test_collection.py +++ b/osf_tests/test_collection.py @@ -17,7 +17,6 @@ ) from osf.utils.workflows import CollectionSubmissionStates from website.mails import mails -from osf.models.collection_submission import mails as collection_submission_mail pytestmark = pytest.mark.django_db @@ -29,6 +28,9 @@ def user(): def auth(user): return Auth(user) + +collection_submission_mail = lambda _: mock.Mock() + # copied from tests/test_models.py @pytest.mark.enable_bookmark_creation class TestBookmarkCollection: diff --git a/osf_tests/test_collection_submission.py b/osf_tests/test_collection_submission.py index 97ea2c8692a..40aaa2ac0a9 100644 --- a/osf_tests/test_collection_submission.py +++ b/osf_tests/test_collection_submission.py @@ -15,10 +15,10 @@ from api_tests.utils import UserRoles from website.mails import mails from osf_tests.utils import assert_notification_correctness -from osf.models.collection_submission import mails as collection_submission_mail -from osf.management.commands.populate_collection_provider_notification_subscriptions import populate_collection_provider_notification_subscriptions from django.utils import timezone +collection_submission_mail = lambda _: mock.Mock() + @pytest.fixture def user(): return AuthUserFactory() @@ -153,7 +153,6 @@ class TestModeratedCollectionSubmission: @pytest.fixture(autouse=True) def setup(self): - populate_collection_provider_notification_subscriptions() with mock.patch('osf.utils.machines.timezone.now', return_value=self.MOCK_NOW): yield @@ -180,29 +179,25 @@ def test_notify_contributors_pending(self, node, moderated_collection): ) def test_notify_moderators_pending(self, node, moderated_collection): - from website.notifications import emails - store_emails = emails.store_emails - with mock.patch('website.notifications.emails.store_emails') as mock_store_emails: - mock_store_emails.side_effect = store_emails # implicitly test rendering - collection_submission = CollectionSubmission( - guid=node.guids.first(), - collection=moderated_collection, - creator=node.creator, - ) - populate_collection_provider_notification_subscriptions() - collection_submission.save() - assert mock_store_emails.called - assert collection_submission.state == CollectionSubmissionStates.PENDING - email_call = mock_store_emails.call_args_list[0][0] - moderator = moderated_collection.moderators.get() - assert email_call == ( - [moderator._id], - 'email_transactional', - 'new_pending_submissions', - collection_submission.creator, - node, - self.MOCK_NOW, + collection_submission = CollectionSubmission( + guid=node.guids.first(), + collection=moderated_collection, + creator=node.creator, ) + collection_submission.save() + assert False, 'Redo test' + # assert mock_store_emails.called + # assert collection_submission.state == CollectionSubmissionStates.PENDING + # email_call = mock_store_emails.call_args_list[0][0] + # moderator = moderated_collection.moderators.get() + # assert email_call == ( + # [moderator._id], + # 'email_transactional', + # 'new_pending_submissions', + # collection_submission.creator, + # node, + # self.MOCK_NOW, + # ) @pytest.mark.parametrize('user_role', [UserRoles.UNAUTHENTICATED, UserRoles.NONCONTRIB]) def test_accept_fails(self, user_role, moderated_collection_submission): diff --git a/osf_tests/test_merging_users.py b/osf_tests/test_merging_users.py index 0bb124c4f13..d81a3973aba 100644 --- a/osf_tests/test_merging_users.py +++ b/osf_tests/test_merging_users.py @@ -6,8 +6,6 @@ from framework.celery_tasks import handlers from website import settings -from website.project.signals import contributor_added -from website.project.views.contributor import notify_added_contributor from website.util.metrics import OsfSourceTags from framework.auth import Auth @@ -21,7 +19,7 @@ from importlib import import_module from django.conf import settings as django_conf_settings from osf.models import UserSessionMap -from tests.utils import run_celery_tasks +from tests.utils import run_celery_tasks, capture_notifications from waffle.testutils import override_flag from osf.features import ENABLE_GV @@ -139,7 +137,6 @@ def is_mrm_field(value): 'username', 'verification_key', 'verification_key_v2', - 'contributor_added_email_records', 'requested_deactivation', ] @@ -286,12 +283,11 @@ def test_merge_unregistered(self): assert self.user.is_invited is True assert self.user in self.project_with_unreg_contrib.contributors - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_merge_doesnt_send_signal(self, mock_notify): + def test_merge_doesnt_send_signal(self): #Explictly reconnect signal as it is disconnected by default for test - contributor_added.connect(notify_added_contributor) other_user = UserFactory() - with override_flag(ENABLE_GV, active=True): - self.user.merge_user(other_user) + with capture_notifications() as notifications: + with override_flag(ENABLE_GV, active=True): + self.user.merge_user(other_user) assert other_user.merged_by._id == self.user._id - assert mock_notify.called is False + assert len(notifications) == 0 diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index 4fcd6e542cf..9539c6c295f 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -1379,11 +1379,6 @@ def test_admin_contributor_or_group_member_ids(self, user): class TestContributorAddedSignal: - # Override disconnected signals from conftest - @pytest.fixture(autouse=True) - def disconnected_signals(self): - return None - @mock.patch('website.project.views.contributor.mails.send_mail') def test_add_contributors_sends_contributor_added_signal(self, mock_send_mail, node, auth): user = UserFactory() @@ -3450,7 +3445,7 @@ def test_excludes_logs_for_linked_nodes(self, parent): # one more log for adding the node link assert n_logs_after == n_logs_before + 1 -# copied from tests/test_notifications.py +# copied from tests/test_legacy_notifications.py class TestHasPermissionOnChildren: def test_has_permission_on_children(self): diff --git a/osf_tests/test_osfgroup.py b/osf_tests/test_osfgroup.py index 722cffd7bc7..144a55defd0 100644 --- a/osf_tests/test_osfgroup.py +++ b/osf_tests/test_osfgroup.py @@ -12,7 +12,6 @@ from framework.exceptions import PermissionsError from osf.models import OSFGroup, Node, OSFUser, OSFGroupLog, NodeLog from osf.utils.permissions import MANAGER, MEMBER, MANAGE, READ, WRITE, ADMIN -from website.notifications.utils import get_all_node_subscriptions from website.osf_groups import signals as group_signals from .factories import ( NodeFactory, @@ -479,55 +478,6 @@ def test_add_osf_group_to_node(self, mock_send_mail, manager, member, user_two, with pytest.raises(PermissionsError): project.add_osf_group(other_group, ADMIN, auth=Auth(project.creator)) - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_add_osf_group_to_node_emails_and_subscriptions(self, mock_send_mail, manager, member, user_two, osf_group, project): - osf_group.make_member(user_two) - - # Manager is already a node contributor - already has subscriptions - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 0 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - assert mock_send_mail.call_count == 1 - - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - # Three members of group, but user adding group to node doesn't get email - assert mock_send_mail.call_count == 3 - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 0 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - # Member is a contributor - project.add_contributor(member, WRITE, save=True) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - # Don't unsubscribe member because they belong to a group that has perms - project.remove_contributor(member, Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - @mock.patch('website.osf_groups.views.mails.send_mail') def test_add_group_to_node_throttle(self, mock_send_mail, osf_group, manager, member, project): throttle = 100 @@ -991,134 +941,3 @@ def node_settings(self, project, external_account): node_settings.save() node_settings.set_auth return node_settings - - def test_remove_contributor_no_member_perms(self, project, node_settings, user_two, user_three, request_context, file): - assert project.get_addon('github').user_settings is not None - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - project.remove_contributor(user_two, Auth(user_three)) - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_remove_group_from_node_no_contributor_perms(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - # Manually removing contributor - contrib_obj = project.contributor_set.get(user=user_two) - contrib_obj.delete() - project.clear_permissions(user_two) - - assert project.is_contributor(user_two) is False - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - project.remove_osf_group(group) - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_remove_member_no_contributor_perms(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - group.make_manager(user_three) - # Manually removing contributor - contrib_obj = project.contributor_set.get(user=user_two) - contrib_obj.delete() - project.clear_permissions(user_two) - - assert project.is_contributor(user_two) is False - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_member(user_two) - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_delete_group_no_contributor_perms(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - group.make_manager(user_three) - # Manually removing contributor - contrib_obj = project.contributor_set.get(user=user_two) - contrib_obj.delete() - project.clear_permissions(user_two) - - assert project.is_contributor(user_two) is False - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_group() - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_remove_contributor_also_member(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - project.remove_osf_group(group) - project.reload() - - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - def test_remove_osf_group_from_node_also_member(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - project.remove_osf_group(group) - project.reload() - - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - def test_remove_member_also_contributor(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - group.make_manager(user_three) - project.add_osf_group(group, ADMIN) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_member(user_two) - project.reload() - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - def test_delete_group_also_contributor(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - group.make_manager(user_three) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_group() - project.reload() - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 diff --git a/osf_tests/test_queued_mail.py b/osf_tests/test_queued_mail.py deleted file mode 100644 index 4554b08579a..00000000000 --- a/osf_tests/test_queued_mail.py +++ /dev/null @@ -1,169 +0,0 @@ -# Ported from tests.test_mails -import datetime as dt - - -import pytest -from unittest import mock -from django.utils import timezone -from waffle.testutils import override_switch - -from .factories import UserFactory, NodeFactory - -from osf.features import DISABLE_ENGAGEMENT_EMAILS -from osf.models.queued_mail import ( - queue_mail, WELCOME_OSF4M, - NO_LOGIN, NO_ADDON, NEW_PUBLIC_PROJECT -) -from website.mails import mails -from website.settings import DOMAIN - -@pytest.fixture() -def user(): - return UserFactory(is_registered=True) - -@pytest.mark.django_db -class TestQueuedMail: - - def queue_mail(self, mail, user, send_at=None, **kwargs): - mail = queue_mail( - to_addr=user.username if user else user.username, - send_at=send_at or timezone.now(), - user=user, - mail=mail, - fullname=user.fullname if user else user.username, - **kwargs - ) - return mail - - @mock.patch('osf.models.queued_mail.send_mail') - def test_no_login_presend_for_active_user(self, mock_mail, user): - mail = self.queue_mail(mail=NO_LOGIN, user=user) - user.date_last_login = timezone.now() + dt.timedelta(seconds=10) - user.save() - assert mail.send_mail() is False - - @mock.patch('osf.models.queued_mail.send_mail') - def test_no_login_presend_for_inactive_user(self, mock_mail, user): - mail = self.queue_mail(mail=NO_LOGIN, user=user) - user.date_last_login = timezone.now() - dt.timedelta(weeks=10) - user.save() - assert timezone.now() - dt.timedelta(days=1) > user.date_last_login - assert bool(mail.send_mail()) is True - - @mock.patch('osf.models.queued_mail.send_mail') - def test_no_addon_presend(self, mock_mail, user): - mail = self.queue_mail(mail=NO_ADDON, user=user) - assert mail.send_mail() is True - - @mock.patch('osf.models.queued_mail.send_mail') - def test_new_public_project_presend_for_no_project(self, mock_mail, user): - mail = self.queue_mail( - mail=NEW_PUBLIC_PROJECT, - user=user, - project_title='Oh noes', - nid='', - ) - assert bool(mail.send_mail()) is False - - @mock.patch('osf.models.queued_mail.send_mail') - def test_new_public_project_presend_success(self, mock_mail, user): - node = NodeFactory(is_public=True) - mail = self.queue_mail( - mail=NEW_PUBLIC_PROJECT, - user=user, - project_title='Oh yass', - nid=node._id - ) - assert bool(mail.send_mail()) is True - - @mock.patch('osf.models.queued_mail.send_mail') - def test_welcome_osf4m_presend(self, mock_mail, user): - user.date_last_login = timezone.now() - dt.timedelta(days=13) - user.save() - mail = self.queue_mail( - mail=WELCOME_OSF4M, - user=user, - conference='Buttjamz conference', - fid='', - domain=DOMAIN - ) - assert bool(mail.send_mail()) is True - assert mail.data['downloads'] == 0 - - @mock.patch('osf.models.queued_mail.send_mail') - def test_finding_other_emails_sent_to_user(self, mock_mail, user): - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert len(mail.find_sent_of_same_type_and_user()) == 0 - mail.send_mail() - assert len(mail.find_sent_of_same_type_and_user()) == 1 - - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_active(self, mock_mail, user): - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert bool(mail.send_mail()) is True - - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_not_active_no_password(self, mock_mail): - user = UserFactory.build() - user.set_unusable_password() - user.save() - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert mail.send_mail() is False - - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_not_active_not_registered(self, mock_mail): - user = UserFactory(is_registered=False) - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert mail.send_mail() is False - - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_not_active_is_merged(self, mock_mail): - other_user = UserFactory() - user = UserFactory(merged_by=other_user) - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert mail.send_mail() is False - - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_not_active_is_disabled(self, mock_mail): - user = UserFactory(date_disabled=timezone.now()) - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert mail.send_mail() is False - - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_not_active_is_not_confirmed(self, mock_mail): - user = UserFactory(date_confirmed=None) - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert mail.send_mail() is False - - def test_disabled_queued_emails_not_sent_if_switch_active(self, user): - with override_switch(DISABLE_ENGAGEMENT_EMAILS, active=True): - assert self.queue_mail(mail=NO_ADDON, user=user) is False - assert self.queue_mail(mail=NO_LOGIN, user=user) is False - assert self.queue_mail(mail=WELCOME_OSF4M, user=user) is False - assert self.queue_mail(mail=NEW_PUBLIC_PROJECT, user=user) is False - - def test_disabled_triggered_emails_not_sent_if_switch_active(self): - with override_switch(DISABLE_ENGAGEMENT_EMAILS, active=True): - assert mails.send_mail(to_addr='', mail=mails.WELCOME) is False - assert mails.send_mail(to_addr='', mail=mails.WELCOME_OSF4I) is False diff --git a/osf_tests/test_registration_moderation_notifications.py b/osf_tests/test_registration_moderation_notifications.py index ab4c7847e4d..4226fd211d1 100644 --- a/osf_tests/test_registration_moderation_notifications.py +++ b/osf_tests/test_registration_moderation_notifications.py @@ -1,22 +1,11 @@ import pytest from unittest import mock -from unittest.mock import call from django.utils import timezone -from osf.management.commands.add_notification_subscription import add_reviews_notification_setting -from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions from osf.migrations import update_provider_auth_groups -from osf.models import Brand, NotificationDigest +from osf.models import Brand from osf.models.action import RegistrationAction -from osf.utils import machines -from osf.utils.notifications import ( - notify_submit, - notify_accept_reject, - notify_moderator_registration_requests_withdrawal, - notify_reject_withdraw_request, - notify_withdraw_registration -) from osf.utils.workflows import RegistrationModerationTriggers, RegistrationModerationStates from osf_tests.factories import ( @@ -25,10 +14,7 @@ RetractionFactory ) -from website import mails, settings -from website.notifications import emails, tasks -from website.reviews import listeners - +notify_moderator_registration_requests_withdrawal = lambda: NotImplementedError('no notify_moderator_registration_requests_withdrawal') def get_moderator(provider): user = AuthUserFactory() @@ -55,7 +41,6 @@ class TestRegistrationMachineNotification: @pytest.fixture(autouse=True) def setup(self): - populate_registration_provider_notification_subscriptions() with mock.patch('osf.utils.machines.timezone.now', return_value=self.MOCK_NOW): yield @@ -147,60 +132,61 @@ def test_submit_notifications(self, registration, moderator, admin, contrib, pro :param draft_registration: :return: """ + assert False, 'redo test' # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call/args and also implicitly ensures # that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(listeners.mails, 'send_mail', side_effect=send_mail) as mock_send_mail: - notify_submit(registration, admin) - - assert len(mock_send_mail.call_args_list) == 2 - admin_message, contrib_message = mock_send_mail.call_args_list - - assert admin_message == call( - admin.email, - mails.REVIEWS_SUBMISSION_CONFIRMATION, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_creator=True, - logo='osf_registries', - no_future_emails=[], - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_name=provider.name, - provider_url='http://localhost:5000/', - referrer=admin, - reviewable=registration, - user=admin, - workflow=None - ) - - assert contrib_message == call( - contrib.email, - mails.REVIEWS_SUBMISSION_CONFIRMATION, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_creator=False, - logo='osf_registries', - no_future_emails=[], - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_name=provider.name, - provider_url='http://localhost:5000/', - referrer=admin, - reviewable=registration, - user=contrib, - workflow=None - ) - - assert NotificationDigest.objects.count() == 1 - digest = NotificationDigest.objects.last() - - assert digest.user == moderator - assert digest.send_type == 'email_transactional' - assert digest.event == 'new_pending_submissions' + # send_mail = mails.send_mail + # with mock.patch.object(listeners.mails, 'send_mail', side_effect=send_mail) as mock_send_mail: + # notify_submit(registration, admin) + # + # assert len(mock_send_mail.call_args_list) == 2 + # admin_message, contrib_message = mock_send_mail.call_args_list + # + # assert admin_message == call( + # admin.email, + # mails.REVIEWS_SUBMISSION_CONFIRMATION, + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration.draft_registration.get(), + # is_creator=True, + # logo='osf_registries', + # no_future_emails=[], + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_name=provider.name, + # provider_url='http://localhost:5000/', + # referrer=admin, + # reviewable=registration, + # user=admin, + # workflow=None + # ) + # + # assert contrib_message == call( + # contrib.email, + # mails.REVIEWS_SUBMISSION_CONFIRMATION, + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration.draft_registration.get(), + # is_creator=False, + # logo='osf_registries', + # no_future_emails=[], + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_name=provider.name, + # provider_url='http://localhost:5000/', + # referrer=admin, + # reviewable=registration, + # user=contrib, + # workflow=None + # ) + # + # assert NotificationDigest.objects.count() == 1 + # digest = NotificationDigest.objects.last() + # + # assert digest.user == moderator + # assert digest.send_type == 'email_transactional' + # assert digest.event == 'new_pending_submissions' def test_accept_notifications(self, registration, moderator, admin, contrib, accept_action): """ @@ -208,68 +194,66 @@ def test_accept_notifications(self, registration, moderator, admin, contrib, acc :param draft_registration: :return: """ - add_reviews_notification_setting('global_reviews') - # Set up mock_email as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders correctly. - store_emails = emails.store_emails - with mock.patch.object(emails, 'store_emails', side_effect=store_emails) as mock_email: - notify_accept_reject(registration, registration.creator, accept_action, RegistrationModerationStates) - - assert len(mock_email.call_args_list) == 2 - - admin_message, contrib_message = mock_email.call_args_list - - assert admin_message == call( - [admin._id], - 'email_transactional', - 'global_reviews', - admin, - registration, - self.MOCK_NOW, - comment='yo', - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - has_psyarxiv_chronos_text=False, - is_creator=True, - is_rejected=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration, - template='reviews_submission_status', - was_pending=False, - workflow=None - ) - - assert contrib_message == call( - [contrib._id], - 'email_transactional', - 'global_reviews', - admin, - registration, - self.MOCK_NOW, - comment='yo', - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - has_psyarxiv_chronos_text=False, - is_creator=False, - is_rejected=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - reviewable=registration, - requester=admin, - template='reviews_submission_status', - was_pending=False, - workflow=None - ) + # store_emails = emails.store_emails + # with mock.patch.object(emails, 'store_emails', side_effect=store_emails) as mock_email: + # notify_accept_reject(registration, registration.creator, accept_action, RegistrationModerationStates) + assert False, 'REDO TEST' + # assert len(mock_email.call_args_list) == 2 + # + # admin_message, contrib_message = mock_email.call_args_list + # + # assert admin_message == call( + # [admin._id], + # 'email_transactional', + # 'global_reviews', + # admin, + # registration, + # self.MOCK_NOW, + # comment='yo', + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration.draft_registration.get(), + # has_psyarxiv_chronos_text=False, + # is_creator=True, + # is_rejected=False, + # notify_comment='yo', + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_url='http://localhost:5000/', + # requester=admin, + # reviewable=registration, + # template='reviews_submission_status', + # was_pending=False, + # workflow=None + # ) + # + # assert contrib_message == call( + # [contrib._id], + # 'email_transactional', + # 'global_reviews', + # admin, + # registration, + # self.MOCK_NOW, + # comment='yo', + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration.draft_registration.get(), + # has_psyarxiv_chronos_text=False, + # is_creator=False, + # is_rejected=False, + # notify_comment='yo', + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_url='http://localhost:5000/', + # reviewable=registration, + # requester=admin, + # template='reviews_submission_status', + # was_pending=False, + # workflow=None + # ) def test_reject_notifications(self, registration, moderator, admin, contrib, accept_action): """ @@ -278,68 +262,67 @@ def test_reject_notifications(self, registration, moderator, admin, contrib, acc :param draft_registration: :return: """ - add_reviews_notification_setting('global_reviews') - # Set up mock_email as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders correctly - store_emails = emails.store_emails - with mock.patch.object(emails, 'store_emails', side_effect=store_emails) as mock_email: - notify_accept_reject(registration, registration.creator, accept_action, RegistrationModerationStates) - - assert len(mock_email.call_args_list) == 2 - - admin_message, contrib_message = mock_email.call_args_list - - assert admin_message == call( - [admin._id], - 'email_transactional', - 'global_reviews', - admin, - registration, - self.MOCK_NOW, - comment='yo', - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - has_psyarxiv_chronos_text=False, - is_creator=True, - is_rejected=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - reviewable=registration, - requester=admin, - template='reviews_submission_status', - was_pending=False, - workflow=None - ) - - assert contrib_message == call( - [contrib._id], - 'email_transactional', - 'global_reviews', - admin, - registration, - self.MOCK_NOW, - comment='yo', - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - has_psyarxiv_chronos_text=False, - is_creator=False, - is_rejected=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - reviewable=registration, - requester=admin, - template='reviews_submission_status', - was_pending=False, - workflow=None - ) + # store_emails = emails.store_emails + # with mock.patch.object(emails, 'store_emails', side_effect=store_emails) as mock_email: + # notify_accept_reject(registration, registration.creator, accept_action, RegistrationModerationStates) + # + assert False, 'redo test' + # assert len(mock_email.call_args_list) == 2 + # + # admin_message, contrib_message = mock_email.call_args_list + # + # assert admin_message == call( + # [admin._id], + # 'email_transactional', + # 'global_reviews', + # admin, + # registration, + # self.MOCK_NOW, + # comment='yo', + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration.draft_registration.get(), + # has_psyarxiv_chronos_text=False, + # is_creator=True, + # is_rejected=False, + # notify_comment='yo', + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_url='http://localhost:5000/', + # reviewable=registration, + # requester=admin, + # template='reviews_submission_status', + # was_pending=False, + # workflow=None + # ) + # + # assert contrib_message == call( + # [contrib._id], + # 'email_transactional', + # 'global_reviews', + # admin, + # registration, + # self.MOCK_NOW, + # comment='yo', + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration.draft_registration.get(), + # has_psyarxiv_chronos_text=False, + # is_creator=False, + # is_rejected=False, + # notify_comment='yo', + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_url='http://localhost:5000/', + # reviewable=registration, + # requester=admin, + # template='reviews_submission_status', + # was_pending=False, + # workflow=None + # ) def test_notify_moderator_registration_requests_withdrawal_notifications(self, moderator, daily_moderator, registration, admin, provider): """ @@ -350,20 +333,21 @@ def test_notify_moderator_registration_requests_withdrawal_notifications(self, m :param contrib: :return: """ - assert NotificationDigest.objects.count() == 0 - notify_moderator_registration_requests_withdrawal(registration, admin) - - assert NotificationDigest.objects.count() == 2 - - daily_digest = NotificationDigest.objects.get(send_type='email_digest') - transactional_digest = NotificationDigest.objects.get(send_type='email_transactional') - assert daily_digest.user == daily_moderator - assert transactional_digest.user == moderator - - for digest in (daily_digest, transactional_digest): - assert 'requested withdrawal' in digest.message - assert digest.event == 'new_pending_withdraw_requests' - assert digest.provider == provider + assert False, 'redo test' + # assert NotificationDigest.objects.count() == 0 + # notify_moderator_registration_requests_withdrawal(registration, admin) + # + # assert NotificationDigest.objects.count() == 2 + # + # daily_digest = NotificationDigest.objects.get(send_type='email_digest') + # transactional_digest = NotificationDigest.objects.get(send_type='email_transactional') + # assert daily_digest.user == daily_moderator + # assert transactional_digest.user == moderator + # + # for digest in (daily_digest, transactional_digest): + # assert 'requested withdrawal' in digest.message + # assert digest.event == 'new_pending_withdraw_requests' + # assert digest.provider == provider def test_withdrawal_registration_accepted_notifications(self, registration_with_retraction, contrib, admin, withdraw_action): """ @@ -378,50 +362,51 @@ def test_withdrawal_registration_accepted_notifications(self, registration_with_ # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: - notify_withdraw_registration(registration_with_retraction, withdraw_action) - - assert len(mock_email.call_args_list) == 2 - admin_message, contrib_message = mock_email.call_args_list - - assert admin_message == call( - admin.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=admin, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=True, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) - - assert contrib_message == call( - contrib.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=contrib, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=False, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) + assert False, 'redo test' + # send_mail = mails.send_mail + # with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: + # notify_withdraw_registration(registration_with_retraction, withdraw_action) + # + # assert len(mock_email.call_args_list) == 2 + # admin_message, contrib_message = mock_email.call_args_list + # + # assert admin_message == call( + # admin.email, + # mails.WITHDRAWAL_REQUEST_GRANTED, + # comment='yo', + # contributor=admin, + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration_with_retraction.draft_registration.get(), + # is_requester=True, + # force_withdrawal=False, + # notify_comment='yo', + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_url='http://localhost:5000/', + # requester=admin, + # reviewable=registration_with_retraction, + # workflow=None + # ) + # + # assert contrib_message == call( + # contrib.email, + # mails.WITHDRAWAL_REQUEST_GRANTED, + # comment='yo', + # contributor=contrib, + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration_with_retraction.draft_registration.get(), + # is_requester=False, + # force_withdrawal=False, + # notify_comment='yo', + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_url='http://localhost:5000/', + # requester=admin, + # reviewable=registration_with_retraction, + # workflow=None + # ) def test_withdrawal_registration_rejected_notifications(self, registration, contrib, admin, withdraw_request_action): """ @@ -436,44 +421,46 @@ def test_withdrawal_registration_rejected_notifications(self, registration, cont # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: - notify_reject_withdraw_request(registration, withdraw_request_action) - - assert len(mock_email.call_args_list) == 2 - admin_message, contrib_message = mock_email.call_args_list - - assert admin_message == call( - admin.email, - mails.WITHDRAWAL_REQUEST_DECLINED, - contributor=admin, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_requester=True, - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration, - workflow=None - ) - - assert contrib_message == call( - contrib.email, - mails.WITHDRAWAL_REQUEST_DECLINED, - contributor=contrib, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_requester=False, - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration, - workflow=None - ) + assert False, 'redo test' + # + # send_mail = mails.send_mail + # with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: + # notify_reject_withdraw_request(registration, withdraw_request_action) + # + # assert len(mock_email.call_args_list) == 2 + # admin_message, contrib_message = mock_email.call_args_list + # + # assert admin_message == call( + # admin.email, + # mails.WITHDRAWAL_REQUEST_DECLINED, + # contributor=admin, + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration.draft_registration.get(), + # is_requester=True, + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_url='http://localhost:5000/', + # requester=admin, + # reviewable=registration, + # workflow=None + # ) + # + # assert contrib_message == call( + # contrib.email, + # mails.WITHDRAWAL_REQUEST_DECLINED, + # contributor=contrib, + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration.draft_registration.get(), + # is_requester=False, + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_url='http://localhost:5000/', + # requester=admin, + # reviewable=registration, + # workflow=None + # ) def test_withdrawal_registration_force_notifications(self, registration_with_retraction, contrib, admin, withdraw_action): """ @@ -488,50 +475,52 @@ def test_withdrawal_registration_force_notifications(self, registration_with_ret # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: - notify_withdraw_registration(registration_with_retraction, withdraw_action) - - assert len(mock_email.call_args_list) == 2 - admin_message, contrib_message = mock_email.call_args_list - - assert admin_message == call( - admin.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=admin, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=True, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) - - assert contrib_message == call( - contrib.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=contrib, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=False, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) + assert False, 'redo test' + # + # send_mail = mails.send_mail + # with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: + # notify_withdraw_registration(registration_with_retraction, withdraw_action) + # + # assert len(mock_email.call_args_list) == 2 + # admin_message, contrib_message = mock_email.call_args_list + # + # assert admin_message == call( + # admin.email, + # mails.WITHDRAWAL_REQUEST_GRANTED, + # comment='yo', + # contributor=admin, + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration_with_retraction.draft_registration.get(), + # is_requester=True, + # force_withdrawal=False, + # notify_comment='yo', + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_url='http://localhost:5000/', + # requester=admin, + # reviewable=registration_with_retraction, + # workflow=None + # ) + # + # assert contrib_message == call( + # contrib.email, + # mails.WITHDRAWAL_REQUEST_GRANTED, + # comment='yo', + # contributor=contrib, + # document_type='registration', + # domain='http://localhost:5000/', + # draft_registration=registration_with_retraction.draft_registration.get(), + # is_requester=False, + # force_withdrawal=False, + # notify_comment='yo', + # provider_contact_email=settings.OSF_CONTACT_EMAIL, + # provider_support_email=settings.OSF_SUPPORT_EMAIL, + # provider_url='http://localhost:5000/', + # requester=admin, + # reviewable=registration_with_retraction, + # workflow=None + # ) @pytest.mark.parametrize( 'digest_type, expected_recipient', @@ -539,43 +528,47 @@ def test_withdrawal_registration_force_notifications(self, registration_with_ret ) def test_submissions_and_withdrawals_both_appear_in_moderator_digest(self, digest_type, expected_recipient, registration, admin, provider): # Invoke the fixture function to get the recipient because parametrize - expected_recipient = expected_recipient(provider) - with mock.patch('website.reviews.listeners.mails.send_mail'): - notify_submit(registration, admin) - notify_moderator_registration_requests_withdrawal(registration, admin) - - # One user, one provider => one email - grouped_notifications = list(tasks.get_moderators_emails(digest_type)) - assert len(grouped_notifications) == 1 - - moderator_message = grouped_notifications[0] - assert moderator_message['user_id'] == expected_recipient._id - assert moderator_message['provider_id'] == provider.id - - # No fixed ordering of the entires, so just make sure that - # keywords for each action type are in some message - updates = moderator_message['info'] - assert len(updates) == 2 - assert any('submitted' in entry['message'] for entry in updates) - assert any('requested withdrawal' in entry['message'] for entry in updates) + # expected_recipient = expected_recipient(provider) + # with mock.patch('website.reviews.listeners.mails.send_mail'): + # notify_submit(registration, admin) + # notify_moderator_registration_requests_withdrawal(registration, admin) + assert False, 'redo test' + # + # # One user, one provider => one email + # grouped_notifications = list(tasks.get_moderators_emails(digest_type)) + # assert len(grouped_notifications) == 1 + # + # moderator_message = grouped_notifications[0] + # assert moderator_message['user_id'] == expected_recipient._id + # assert moderator_message['provider_id'] == provider.id + # + # # No fixed ordering of the entires, so just make sure that + # # keywords for each action type are in some message + # updates = moderator_message['info'] + # assert len(updates) == 2 + # assert any('submitted' in entry['message'] for entry in updates) + # assert any('requested withdrawal' in entry['message'] for entry in updates) @pytest.mark.parametrize('digest_type', ['email_transactional', 'email_digest']) def test_submsissions_and_withdrawals_do_not_appear_in_node_digest(self, digest_type, registration, admin, moderator, daily_moderator): - notify_submit(registration, admin) - notify_moderator_registration_requests_withdrawal(registration, admin) - - assert not list(tasks.get_users_emails(digest_type)) + assert False, 'Redo test' + # notify_submit(registration, admin) + # notify_moderator_registration_requests_withdrawal(registration, admin) + # + # assert not list(tasks.get_users_emails(digest_type)) def test_moderator_digest_emails_render(self, registration, admin, moderator): notify_moderator_registration_requests_withdrawal(registration, admin) # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(tasks.mails, 'send_mail', side_effect=send_mail) as mock_send_mail: - tasks._send_reviews_moderator_emails('email_transactional') + assert False, 'Redo test' # everything rendered! + + # send_mail = mails.send_mail + # with mock.patch.object(tasks.mails, 'send_mail', side_effect=send_mail) as mock_send_mail: + # tasks._send_reviews_moderator_emails('email_transactional') - mock_send_mail.assert_called() + # mock_send_mail.assert_called() def test_branded_provider_notification_renders(self, registration, admin, moderator): # Set brand details to be checked in notify_base.mako @@ -591,6 +584,6 @@ def test_branded_provider_notification_renders(self, registration, admin, modera # # _send_Reviews_moderator_emails renders digest_reviews_moderators using context from # website.notifications.tasks - notify_submit(registration, admin) - tasks._send_reviews_moderator_emails('email_transactional') - assert True # everything rendered! + # notify_submit(registration, admin) + # tasks._send_reviews_moderator_emails('email_transactional') + assert False, 'Redo test' # everything rendered! diff --git a/osf_tests/test_schema_responses.py b/osf_tests/test_schema_responses.py index 6c6699fb74c..3b57314f987 100644 --- a/osf_tests/test_schema_responses.py +++ b/osf_tests/test_schema_responses.py @@ -8,10 +8,9 @@ from osf.models import schema_response # import module for mocking purposes from osf.utils.workflows import ApprovalStates, SchemaResponseTriggers from osf_tests.factories import AuthUserFactory, ProjectFactory, RegistrationFactory, RegistrationProviderFactory -from osf_tests.utils import get_default_test_schema, assert_notification_correctness, _ensure_subscriptions +from osf_tests.utils import get_default_test_schema, assert_notification_correctness from website.mails import mails -from website.notifications import emails from transitions import MachineError @@ -830,7 +829,6 @@ class TestModeratedSchemaResponseApprovalFlows(): def provider(self): provider = RegistrationProviderFactory() provider.update_group_permissions() - _ensure_subscriptions(provider) provider.reviews_workflow = Workflows.PRE_MODERATION.value provider.save() return provider @@ -886,13 +884,14 @@ def test_moderators_notified_on_admin_approval(self, revised_response, admin_use revised_response.save() revised_response.pending_approvers.add(admin_user) - store_emails = emails.store_emails - with mock.patch.object(emails, 'store_emails', autospec=True) as mock_store: - mock_store.side_effect = store_emails - revised_response.approve(user=admin_user) - - assert mock_store.called - assert mock_store.call_args[0][0] == [moderator._id] + # store_emails = emails.store_emails + # with mock.patch.object(emails, 'store_emails', autospec=True) as mock_store: + # mock_store.side_effect = store_emails + # revised_response.approve(user=admin_user) + # + assert False, 'REDO TEST' + # assert mock_store.called + # assert mock_store.call_args[0][0] == [moderator._id] def test_no_moderator_notification_on_admin_approval_of_initial_response( self, initial_response, admin_user): @@ -900,9 +899,10 @@ def test_no_moderator_notification_on_admin_approval_of_initial_response( initial_response.save() initial_response.pending_approvers.add(admin_user) - with mock.patch.object(emails, 'store_emails', autospec=True) as mock_store: - initial_response.approve(user=admin_user) - assert not mock_store.called + # with mock.patch.object(emails, 'store_emails', autospec=True) as mock_store: + # initial_response.approve(user=admin_user) + # assert not mock_store.called + assert False, 'redo test' def test_moderator_accept(self, initial_response, moderator): initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py index c031fcc344a..23ebeeb9de7 100644 --- a/osf_tests/test_user.py +++ b/osf_tests/test_user.py @@ -944,22 +944,21 @@ def test_change_password(self, user): user.change_password(old_password, new_password, confirm_password) assert bool(user.check_password(new_password)) is True - @mock.patch('website.mails.send_mail') - def test_set_password_notify_default(self, mock_send_mail, user): + def test_set_password_notify_default(self, user): old_password = 'password' user.set_password(old_password) user.save() - assert mock_send_mail.called is True + assert False, 'redo test' + # assert mock_send_mail.called is True - @mock.patch('website.mails.send_mail') - def test_set_password_no_notify(self, mock_send_mail, user): + def test_set_password_no_notify(self, user): old_password = 'password' user.set_password(old_password, notify=False) user.save() - assert mock_send_mail.called is False + assert False, 'redo test' + # assert mock_send_mail.called is False - @mock.patch('website.mails.send_mail') - def test_check_password_upgrade_hasher_no_notify(self, mock_send_mail, user, settings): + def test_check_password_upgrade_hasher_no_notify(self, user, settings): # NOTE: settings fixture comes from pytest-django. # changes get reverted after tests run settings.PASSWORD_HASHERS = ( @@ -970,7 +969,8 @@ def test_check_password_upgrade_hasher_no_notify(self, mock_send_mail, user, set user.password = 'sha1$lNb72DKWDv6P$e6ae16dada9303ae0084e14fc96659da4332bb05' user.check_password(raw_password) assert user.password.startswith('md5$') - assert mock_send_mail.called is False + assert False, 'redo test' + # assert mock_send_mail.called is False def test_change_password_invalid(self, old_password=None, new_password=None, confirm_password=None, error_message='Old password is invalid'): diff --git a/osf_tests/utils.py b/osf_tests/utils.py index a8364a15478..e7116b576c8 100644 --- a/osf_tests/utils.py +++ b/osf_tests/utils.py @@ -16,7 +16,6 @@ Sanction, RegistrationProvider, RegistrationSchema, - NotificationSubscription ) from osf.utils.migrations import create_schema_blocks_for_atomic_schema @@ -221,20 +220,6 @@ def get_default_test_schema(): return test_schema -def _ensure_subscriptions(provider): - '''Make sure a provider's subscriptions exist. - - Provider subscriptions are populated by an on_save signal when the provider is created. - This has led to observed race conditions and probabalistic test failures. - Avoid that. - ''' - for subscription in provider.DEFAULT_SUBSCRIPTIONS: - NotificationSubscription.objects.get_or_create( - _id=f'{provider._id}_{subscription}', - event_name=subscription, - provider=provider - ) - def assert_notification_correctness(send_mail_mock, expected_template, expected_recipients): '''Confirms that a mocked send_mail function contains the appropriate calls.''' assert send_mail_mock.call_count == len(expected_recipients) diff --git a/scripts/add_global_subscriptions.py b/scripts/add_global_subscriptions.py deleted file mode 100644 index b326c6f9f67..00000000000 --- a/scripts/add_global_subscriptions.py +++ /dev/null @@ -1,60 +0,0 @@ -""" -This migration subscribes each user to USER_SUBSCRIPTIONS_AVAILABLE if a subscription -does not already exist. -""" - -import logging -import sys - -from website.app import setup_django -setup_django() - -from django.apps import apps -from django.db import transaction -from website.app import init_app -from osf.models import NotificationSubscription -from website.notifications import constants -from website.notifications.utils import to_subscription_key - -from scripts import utils as scripts_utils - -logger = logging.getLogger(__name__) - -def add_global_subscriptions(dry=True): - OSFUser = apps.get_model('osf.OSFUser') - notification_type = 'email_transactional' - user_events = constants.USER_SUBSCRIPTIONS_AVAILABLE - - count = 0 - - with transaction.atomic(): - for user in OSFUser.objects.filter(is_registered=True, date_confirmed__isnull=False): - changed = False - if not user.is_active: - continue - for user_event in user_events: - user_event_id = to_subscription_key(user._id, user_event) - - subscription = NotificationSubscription.load(user_event_id) - if not subscription: - logger.info(f'No {user_event} subscription found for user {user._id}. Subscribing...') - subscription = NotificationSubscription(_id=user_event_id, owner=user, event_name=user_event) - subscription.save() # Need to save in order to access m2m fields - subscription.add_user_to_subscription(user, notification_type) - subscription.save() - changed = True - else: - logger.info(f'User {user._id} already has a {user_event} subscription') - if changed: - count += 1 - - logger.info(f'Added subscriptions for {count} users') - if dry: - raise RuntimeError('Dry mode -- rolling back transaction') - -if __name__ == '__main__': - dry = '--dry' in sys.argv - init_app(routes=False) - if not dry: - scripts_utils.add_file_logger(logger, __file__) - add_global_subscriptions(dry=dry) diff --git a/scripts/send_queued_mails.py b/scripts/send_queued_mails.py deleted file mode 100644 index 7c70c7685a0..00000000000 --- a/scripts/send_queued_mails.py +++ /dev/null @@ -1,66 +0,0 @@ -import logging - -import django -from django.db import transaction -from django.utils import timezone -django.setup() - -from framework.celery_tasks import app as celery_app - -from osf.models.queued_mail import QueuedMail -from website.app import init_app -from website import settings - -from scripts.utils import add_file_logger - - -logger = logging.getLogger(__name__) -logging.basicConfig(level=logging.INFO) - - -def main(dry_run=True): - # find all emails to be sent, pops the top one for each user(to obey the once - # a week requirement), checks to see if one has been sent this week, and if - # not send the email, otherwise leave it in the queue - - user_queue = {} - for email in find_queued_mails_ready_to_be_sent(): - user_queue.setdefault(email.user._id, []).append(email) - - emails_to_be_sent = pop_and_verify_mails_for_each_user(user_queue) - - logger.info(f'Emails being sent at {timezone.now().isoformat()}') - - for mail in emails_to_be_sent: - if not dry_run: - with transaction.atomic(): - try: - sent_ = mail.send_mail() - message = f'Email of type {mail.email_type} sent to {mail.to_addr}' if sent_ else \ - f'Email of type {mail.email_type} failed to be sent to {mail.to_addr}' - logger.info(message) - except Exception as error: - logger.error(f'Email of type {mail.email_type} to be sent to {mail.to_addr} caused an ERROR') - logger.exception(error) - pass - else: - logger.info(f'Email of type {mail.email_type} will be sent to {mail.to_addr}') - - -def find_queued_mails_ready_to_be_sent(): - return QueuedMail.objects.filter(send_at__lt=timezone.now(), sent_at__isnull=True) - -def pop_and_verify_mails_for_each_user(user_queue): - for user_emails in user_queue.values(): - mail = user_emails[0] - mails_past_week = mail.user.queuedmail_set.filter(sent_at__gt=timezone.now() - settings.WAIT_BETWEEN_MAILS) - if not mails_past_week.count(): - yield mail - - -@celery_app.task(name='scripts.send_queued_mails') -def run_main(dry_run=True): - init_app(routes=False) - if not dry_run: - add_file_logger(logger, __file__) - main(dry_run=dry_run) diff --git a/scripts/tests/test_send_queued_mails.py b/scripts/tests/test_send_queued_mails.py deleted file mode 100644 index 142eb75c4a6..00000000000 --- a/scripts/tests/test_send_queued_mails.py +++ /dev/null @@ -1,81 +0,0 @@ -from unittest import mock -from datetime import timedelta - -from django.utils import timezone - -from tests.base import OsfTestCase -from osf_tests.factories import UserFactory -from osf.models.queued_mail import QueuedMail, queue_mail, NO_ADDON, NO_LOGIN_TYPE - -from scripts.send_queued_mails import main, pop_and_verify_mails_for_each_user, find_queued_mails_ready_to_be_sent -from website import settings - - -class TestSendQueuedMails(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = UserFactory() - self.user.date_last_login = timezone.now() - self.user.osf_mailing_lists[settings.OSF_HELP_LIST] = True - self.user.save() - - def queue_mail(self, mail_type=NO_ADDON, user=None, send_at=None): - return queue_mail( - to_addr=user.username if user else self.user.username, - mail=mail_type, - send_at=send_at or timezone.now(), - user=user if user else self.user, - fullname=user.fullname if user else self.user.fullname, - ) - - @mock.patch('osf.models.queued_mail.send_mail') - def test_queue_addon_mail(self, mock_send): - self.queue_mail() - main(dry_run=False) - assert mock_send.called - - @mock.patch('osf.models.queued_mail.send_mail') - def test_no_two_emails_to_same_person(self, mock_send): - user = UserFactory() - user.osf_mailing_lists[settings.OSF_HELP_LIST] = True - user.save() - self.queue_mail(user=user) - self.queue_mail(user=user) - main(dry_run=False) - assert mock_send.call_count == 1 - - def test_pop_and_verify_mails_for_each_user(self): - user_with_email_sent = UserFactory() - user_with_multiple_emails = UserFactory() - user_with_no_emails_sent = UserFactory() - time = timezone.now() - timedelta(days=1) - mail_sent = QueuedMail( - user=user_with_email_sent, - send_at=time, - to_addr=user_with_email_sent.username, - email_type=NO_LOGIN_TYPE - ) - mail_sent.save() - mail1 = self.queue_mail(user=user_with_email_sent) - mail2 = self.queue_mail(user=user_with_multiple_emails) - mail3 = self.queue_mail(user=user_with_multiple_emails) - mail4 = self.queue_mail(user=user_with_no_emails_sent) - user_queue = { - user_with_email_sent._id: [mail1], - user_with_multiple_emails._id: [mail2, mail3], - user_with_no_emails_sent._id: [mail4] - } - mails_ = list(pop_and_verify_mails_for_each_user(user_queue)) - assert len(mails_) == 2 - user_mails = [mail.user for mail in mails_] - assert not (user_with_email_sent in user_mails) - assert user_with_multiple_emails in user_mails - assert user_with_no_emails_sent in user_mails - - def test_find_queued_mails_ready_to_be_sent(self): - mail1 = self.queue_mail() - mail2 = self.queue_mail(send_at=timezone.now()+timedelta(days=1)) - mail3 = self.queue_mail(send_at=timezone.now()) - mails = find_queued_mails_ready_to_be_sent() - assert mails.count() == 2 diff --git a/scripts/tests/test_triggered_mails.py b/scripts/tests/test_triggered_mails.py deleted file mode 100644 index ca583423fbb..00000000000 --- a/scripts/tests/test_triggered_mails.py +++ /dev/null @@ -1,57 +0,0 @@ -from unittest import mock -from datetime import timedelta - -from django.utils import timezone - -from tests.base import OsfTestCase -from osf_tests.factories import UserFactory - -from scripts.triggered_mails import main, find_inactive_users_with_no_inactivity_email_sent_or_queued -from website import mails - - -class TestTriggeredMails(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = UserFactory() - self.user.date_last_login = timezone.now() - self.user.save() - - @mock.patch('website.mails.queue_mail') - def test_dont_trigger_no_login_mail(self, mock_queue): - self.user.date_last_login = timezone.now() - timedelta(seconds=6) - self.user.save() - main(dry_run=False) - assert not mock_queue.called - - @mock.patch('website.mails.queue_mail') - def test_trigger_no_login_mail(self, mock_queue): - self.user.date_last_login = timezone.now() - timedelta(weeks=6) - self.user.save() - main(dry_run=False) - mock_queue.assert_called_with( - user=mock.ANY, - fullname=self.user.fullname, - to_addr=self.user.username, - mail={'callback': mock.ANY, 'template': 'no_login', 'subject': mock.ANY}, - send_at=mock.ANY, - ) - - @mock.patch('website.mails.send_mail') - def test_find_inactive_users_with_no_inactivity_email_sent_or_queued(self, mock_mail): - user_active = UserFactory(fullname='Spot') - user_inactive = UserFactory(fullname='Nucha') - user_already_received_mail = UserFactory(fullname='Pep') - user_active.date_last_login = timezone.now() - timedelta(seconds=6) - user_inactive.date_last_login = timezone.now() - timedelta(weeks=6) - user_already_received_mail.date_last_login = timezone.now() - timedelta(weeks=6) - user_active.save() - user_inactive.save() - user_already_received_mail.save() - mails.queue_mail(to_addr=user_already_received_mail.username, - send_at=timezone.now(), - user=user_already_received_mail, - mail=mails.NO_LOGIN) - users = find_inactive_users_with_no_inactivity_email_sent_or_queued() - assert len(users) == 1 diff --git a/scripts/triggered_mails.py b/scripts/triggered_mails.py deleted file mode 100644 index 3e0c4fea73a..00000000000 --- a/scripts/triggered_mails.py +++ /dev/null @@ -1,50 +0,0 @@ -import logging - -from django.db import transaction -from django.db.models import Q -from django.utils import timezone - -from framework.celery_tasks import app as celery_app -from osf.models import OSFUser -from osf.models.queued_mail import NO_LOGIN_TYPE, NO_LOGIN, QueuedMail, queue_mail -from website.app import init_app -from website import settings - -from scripts.utils import add_file_logger - -logger = logging.getLogger(__name__) -logging.basicConfig(level=logging.INFO) - - -def main(dry_run=True): - for user in find_inactive_users_with_no_inactivity_email_sent_or_queued(): - if dry_run: - logger.warning('Dry run mode') - logger.warning(f'Email of type no_login queued to {user.username}') - if not dry_run: - with transaction.atomic(): - queue_mail( - to_addr=user.username, - mail=NO_LOGIN, - send_at=timezone.now(), - user=user, - fullname=user.fullname, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) - - -def find_inactive_users_with_no_inactivity_email_sent_or_queued(): - users_sent_ids = QueuedMail.objects.filter(email_type=NO_LOGIN_TYPE).values_list('user__guids___id') - return (OSFUser.objects - .filter( - (Q(date_last_login__lt=timezone.now() - settings.NO_LOGIN_WAIT_TIME) & ~Q(tags__name='osf4m')) | - Q(date_last_login__lt=timezone.now() - settings.NO_LOGIN_OSF4M_WAIT_TIME, tags__name='osf4m'), - is_active=True) - .exclude(guids___id__in=users_sent_ids)) - -@celery_app.task(name='scripts.triggered_mails') -def run_main(dry_run=True): - init_app(routes=False) - if not dry_run: - add_file_logger(logger, __file__) - main(dry_run=dry_run) diff --git a/scripts/unpurge_trashed_files.py b/scripts/unpurge_trashed_files.py index ea4bcd38605..cb2e31c65b9 100644 --- a/scripts/unpurge_trashed_files.py +++ b/scripts/unpurge_trashed_files.py @@ -4,7 +4,6 @@ setup_django() import argparse -from django.template.defaultfilters import filesizeformat from google.cloud.storage.client import Client from google.oauth2.service_account import Credentials diff --git a/tests/base.py b/tests/base.py index 2c36dd801eb..e58cd36334e 100644 --- a/tests/base.py +++ b/tests/base.py @@ -21,10 +21,6 @@ from osf.models import RegistrationSchema from website import settings from website.app import init_app -from website.notifications.listeners import (subscribe_contributor, - subscribe_creator) -from website.project.signals import contributor_added, project_created -from website.project.views.contributor import notify_added_contributor from website.signals import ALL_SIGNALS from .json_api_test_app import JSONAPITestApp @@ -99,10 +95,6 @@ class AppTestCase(unittest.TestCase): """ PUSH_CONTEXT = True - DISCONNECTED_SIGNALS = { - # disconnect notify_add_contributor so that add_contributor does not send "fake" emails in tests - contributor_added: [notify_added_contributor] - } def setUp(self): super().setUp() @@ -122,9 +114,6 @@ def setUp(self): self.context.push() with self.context: celery_before_request() - for signal in self.DISCONNECTED_SIGNALS: - for receiver in self.DISCONNECTED_SIGNALS[signal]: - signal.disconnect(receiver) def tearDown(self): super().tearDown() @@ -132,9 +121,6 @@ def tearDown(self): return with mock.patch('website.mailchimp_utils.get_mailchimp_api'): self.context.pop() - for signal in self.DISCONNECTED_SIGNALS: - for receiver in self.DISCONNECTED_SIGNALS[signal]: - signal.connect(receiver) class ApiAppTestCase(unittest.TestCase): @@ -185,9 +171,6 @@ class ApiTestCase(DbTestCase, ApiAppTestCase, SearchTestCase): API application. Note: superclasses must call `super` in order for all setup and teardown methods to be called correctly. """ - def setUp(self): - super().setUp() - settings.USE_EMAIL = False class ApiAddonTestCase(ApiTestCase): """Base `TestCase` for tests that require interaction with addons. @@ -278,17 +261,6 @@ class NotificationTestCase(OsfTestCase): Use when you'd like to manually create all Node subscriptions and subscriptions for added contributors yourself, and not rely on automatically added ones. """ - DISCONNECTED_SIGNALS = { - # disconnect signals so that add_contributor does not send "fake" emails in tests - contributor_added: [notify_added_contributor, subscribe_contributor], - project_created: [subscribe_creator] - } - - def setUp(self): - super().setUp() - - def tearDown(self): - super().tearDown() class ApiWikiTestCase(ApiTestCase): diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index 83ca7180388..d4e6f6a8aea 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -2,31 +2,22 @@ from unittest.mock import ANY import time -from http.cookies import SimpleCookie from unittest import mock import pytest from django.core.exceptions import ValidationError -from flask import g from pytest import approx -from rest_framework import status as http_status from framework import auth -from framework.auth import Auth, authenticate, cas -from framework.auth.utils import impute_names_model -from framework.exceptions import HTTPError -from framework.flask import redirect +from framework.auth import Auth from osf.models import ( - OSFUser, - Tag, - NodeRelation, + NodeRelation, NotificationType, ) from osf.utils import permissions from osf_tests.factories import ( fake_email, AuthUserFactory, NodeFactory, - PreprintFactory, ProjectFactory, RegistrationProviderFactory, UserFactory, @@ -38,17 +29,13 @@ get_default_metaschema, OsfTestCase, ) -from tests.test_cas_authentication import generate_external_user_with_resp +from tests.utils import capture_notifications from website import mails, settings from website.profile.utils import add_contributor_json, serialize_unregistered -from website.project.signals import contributor_added from website.project.views.contributor import ( deserialize_contributors, notify_added_contributor, - send_claim_email, - send_claim_registered_email, ) -from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag @pytest.mark.enable_implicit_clean class TestAddingContributorViews(OsfTestCase): @@ -58,8 +45,6 @@ def setUp(self): self.creator = AuthUserFactory() self.project = ProjectFactory(creator=self.creator) self.auth = Auth(self.project.creator) - # Authenticate all requests - contributor_added.connect(notify_added_contributor) def test_serialize_unregistered_without_record(self): name, email = fake.name(), fake_email() @@ -179,12 +164,10 @@ def test_add_contributor_with_unreg_contribs_and_reg_contribs(self): assert rec['name'] == name assert rec['email'] == email - @mock.patch('website.project.views.contributor.send_claim_email') - def test_add_contributors_post_only_sends_one_email_to_unreg_user( - self, mock_send_claim_email): + def test_add_contributors_post_only_sends_one_email_to_unreg_user(self): # Project has components - comp1, comp2 = NodeFactory( - creator=self.creator), NodeFactory(creator=self.creator) + comp1 = NodeFactory(creator=self.creator) + comp2 = NodeFactory(creator=self.creator) NodeRelation.objects.create(parent=self.project, child=comp1) NodeRelation.objects.create(parent=self.project, child=comp2) self.project.save() @@ -211,61 +194,65 @@ def test_add_contributors_post_only_sends_one_email_to_unreg_user( # finalize_invitation should only have been called once assert mock_send_claim_email.call_count == 1 - @mock.patch('website.mails.send_mail') - def test_add_contributors_post_only_sends_one_email_to_registered_user(self, mock_send_mail): + def test_add_contributors_post_only_sends_one_email_to_registered_user(self): # Project has components comp1 = NodeFactory(creator=self.creator, parent=self.project) comp2 = NodeFactory(creator=self.creator, parent=self.project) # A registered user is added to the project AND its components user = UserFactory() - user_dict = { - 'id': user._id, - 'fullname': user.fullname, - 'email': user.username, - 'permission': permissions.WRITE, - 'visible': True} - - payload = { - 'users': [user_dict], - 'node_ids': [comp1._primary_key, comp2._primary_key] - } - - # send request - url = self.project.api_url_for('project_contributors_post') assert self.project.can_edit(user=self.creator) - self.app.post(url, json=payload, auth=self.creator.auth) - # send_mail should only have been called once - assert mock_send_mail.call_count == 1 + with capture_notifications() as notifications: + self.app.post( + self.project.api_url_for('project_contributors_post'), + json={ + 'users': [{ + 'id': user._id, + 'fullname': user.fullname, + 'email': user.username, + 'permission': permissions.WRITE, + 'visible': True + }], + 'node_ids': [comp1._primary_key, comp2._primary_key] + }, + auth=self.creator.auth + ) - @mock.patch('website.mails.send_mail') - def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self, mock_send_mail): + assert len(notifications) == 1 + + def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self,): # Project has a component with a sub-component component = NodeFactory(creator=self.creator, parent=self.project) sub_component = NodeFactory(creator=self.creator, parent=component) # A registered user is added to the project and the sub-component, but NOT the component user = UserFactory() - user_dict = { - 'id': user._id, - 'fullname': user.fullname, - 'email': user.username, - 'permission': permissions.WRITE, - 'visible': True} - - payload = { - 'users': [user_dict], - 'node_ids': [sub_component._primary_key] - } - # send request url = self.project.api_url_for('project_contributors_post') assert self.project.can_edit(user=self.creator) - self.app.post(url, json=payload, auth=self.creator.auth) + with capture_notifications() as notifications: + self.app.post( + url, + json={ + 'users': [{ + 'id': user._id, + 'fullname': user.fullname, + 'email': user.username, + 'permission': permissions.WRITE, + 'visible': True + }], + 'node_ids': [sub_component._primary_key] + }, + auth=self.creator.auth + ) - # send_mail is called for both the project and the sub-component - assert mock_send_mail.call_count == 2 + # send_mail is called for both the project and the sub-component + assert len(notifications) == 2 + assert notifications[0]['kwargs']['user'] == user + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT.value + assert notifications[1]['kwargs']['user'] == user + assert notifications[1]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT.value @mock.patch('website.project.views.contributor.send_claim_email') def test_email_sent_when_unreg_user_is_added(self, send_mail): @@ -349,83 +336,89 @@ def test_registering_project_does_not_send_contributor_added_email(self, send_ma ) assert not send_mail.called - @mock.patch('website.mails.send_mail') - def test_notify_contributor_email_does_not_send_before_throttle_expires(self, send_mail): + def test_notify_contributor_email_does_not_send_before_throttle_expires(self): contributor = UserFactory() project = ProjectFactory() auth = Auth(project.creator) - notify_added_contributor(project, contributor, auth) - assert send_mail.called - # 2nd call does not send email because throttle period has not expired - notify_added_contributor(project, contributor, auth) - assert send_mail.call_count == 1 - - @mock.patch('website.mails.send_mail') - def test_notify_contributor_email_sends_after_throttle_expires(self, send_mail): - throttle = 0.5 + with capture_notifications() as notifications: + notify_added_contributor(project, contributor, auth) + # 2nd call does not send email because throttle period has not expired + notify_added_contributor(project, contributor, auth) + assert len(notifications) == 1 + def test_notify_contributor_email_sends_after_throttle_expires(self): contributor = UserFactory() project = ProjectFactory() auth = Auth(project.creator) - notify_added_contributor(project, contributor, auth, throttle=throttle) - assert send_mail.called - time.sleep(1) # throttle period expires - notify_added_contributor(project, contributor, auth, throttle=throttle) - assert send_mail.call_count == 2 + with capture_notifications() as notifications: + notify_added_contributor(project, contributor, auth) - @mock.patch('website.mails.send_mail') - def test_add_contributor_to_fork_sends_email(self, send_mail): + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT.value + + def test_add_contributor_to_fork_sends_email(self): contributor = UserFactory() fork = self.project.fork_node(auth=Auth(self.creator)) - fork.add_contributor(contributor, auth=Auth(self.creator)) - fork.save() - assert send_mail.called - assert send_mail.call_count == 1 + with capture_notifications() as notifications: + fork.add_contributor(contributor, auth=Auth(self.creator)) + fork.save() - @mock.patch('website.mails.send_mail') - def test_add_contributor_to_template_sends_email(self, send_mail): + assert len(notifications) == 1 + assert notifications[0]['kwargs']['user'] == contributor + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT.value + + def test_add_contributor_to_template_sends_email(self): contributor = UserFactory() template = self.project.use_as_template(auth=Auth(self.creator)) - template.add_contributor(contributor, auth=Auth(self.creator)) - template.save() - assert send_mail.called - assert send_mail.call_count == 1 + with capture_notifications() as notifications: + template.add_contributor(contributor, auth=Auth(self.creator)) + template.save() - @mock.patch('website.mails.send_mail') - def test_creating_fork_does_not_email_creator(self, send_mail): - contributor = UserFactory() - fork = self.project.fork_node(auth=Auth(self.creator)) - assert not send_mail.called + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT + assert notifications[0]['kwargs']['user'] == contributor - @mock.patch('website.mails.send_mail') - def test_creating_template_does_not_email_creator(self, send_mail): + def test_creating_fork_does_not_email_creator(self): contributor = UserFactory() template = self.project.use_as_template(auth=Auth(self.creator)) - assert not send_mail.called + with capture_notifications() as notifications: + template.add_contributor(contributor, auth=Auth(self.creator)) + template.save() + assert not notifications + + def test_creating_template_does_not_email_creator(self): + with capture_notifications() as notifications: + self.project.use_as_template(auth=Auth(self.creator)) + assert not notifications def test_add_multiple_contributors_only_adds_one_log(self): n_logs_pre = self.project.logs.count() reg_user = UserFactory() name = fake.name() - pseudouser = { - 'id': None, - 'registered': False, - 'fullname': name, - 'email': fake_email(), - 'permission': permissions.WRITE, - 'visible': True, - } - reg_dict = add_contributor_json(reg_user) - reg_dict['permission'] = permissions.ADMIN - reg_dict['visible'] = True - payload = { - 'users': [reg_dict, pseudouser], - 'node_ids': [] - } - url = self.project.api_url_for('project_contributors_post') - self.app.post(url, json=payload, follow_redirects=True, auth=self.creator.auth) + self.app.post( + self.project.api_url_for('project_contributors_post'), + json={ + 'users': [ + { + 'permission': permissions.ADMIN, + 'visible': True, + **add_contributor_json(reg_user) + }, + { + 'id': None, + 'registered': False, + 'fullname': name, + 'email': fake_email(), + 'permission': permissions.WRITE, + 'visible': True, + } + ], + 'node_ids': [] + }, + follow_redirects=True, + auth=self.creator.auth + ) self.project.reload() assert self.project.logs.count() == n_logs_pre + 1 @@ -434,632 +427,28 @@ def test_add_contribs_to_multiple_nodes(self): n_contributors_pre = child.contributors.count() reg_user = UserFactory() name, email = fake.name(), fake_email() - pseudouser = { - 'id': None, - 'registered': False, - 'fullname': name, - 'email': email, - 'permission': permissions.ADMIN, - 'visible': True, - } - reg_dict = add_contributor_json(reg_user) - reg_dict['permission'] = permissions.ADMIN - reg_dict['visible'] = True - payload = { - 'users': [reg_dict, pseudouser], - 'node_ids': [self.project._primary_key, child._primary_key] - } - url = f'/api/v1/project/{self.project._id}/contributors/' - self.app.post(url, json=payload, follow_redirects=True, auth=self.creator.auth) - child.reload() - assert child.contributors.count() == n_contributors_pre + len(payload['users']) - - def tearDown(self): - super().tearDown() - contributor_added.disconnect(notify_added_contributor) - - -class TestUserInviteViews(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = AuthUserFactory() - self.project = ProjectFactory(creator=self.user) - self.invite_url = f'/api/v1/project/{self.project._primary_key}/invite_contributor/' - - def test_invite_contributor_post_if_not_in_db(self): - name, email = fake.name(), fake_email() - res = self.app.post( - self.invite_url, - json={'fullname': name, 'email': email}, - auth=self.user.auth, - ) - contrib = res.json['contributor'] - assert contrib['id'] is None - assert contrib['fullname'] == name - assert contrib['email'] == email - - def test_invite_contributor_post_if_unreg_already_in_db(self): - # A n unreg user is added to a different project - name, email = fake.name(), fake_email() - project2 = ProjectFactory() - unreg_user = project2.add_unregistered_contributor(fullname=name, email=email, - auth=Auth(project2.creator)) - project2.save() - res = self.app.post(self.invite_url, - json={'fullname': name, 'email': email}, auth=self.user.auth) - expected = add_contributor_json(unreg_user) - expected['fullname'] = name - expected['email'] = email - assert res.json['contributor'] == expected - - def test_invite_contributor_post_if_email_already_registered(self): - reg_user = UserFactory() - name, email = fake.name(), reg_user.username - # Tries to invite user that is already registered - this is now permitted. - res = self.app.post(self.invite_url, - json={'fullname': name, 'email': email}, - auth=self.user.auth) - contrib = res.json['contributor'] - assert contrib['id'] == reg_user._id - assert contrib['fullname'] == name - assert contrib['email'] == email - - def test_invite_contributor_post_if_user_is_already_contributor(self): - unreg_user = self.project.add_unregistered_contributor( - fullname=fake.name(), email=fake_email(), - auth=Auth(self.project.creator) - ) - self.project.save() - # Tries to invite unreg user that is already a contributor - res = self.app.post(self.invite_url, - json={'fullname': fake.name(), 'email': unreg_user.username}, - auth=self.user.auth) - assert res.status_code == http_status.HTTP_400_BAD_REQUEST - - def test_invite_contributor_with_no_email(self): - name = fake.name() - res = self.app.post(self.invite_url, - json={'fullname': name, 'email': None}, auth=self.user.auth) - assert res.status_code == http_status.HTTP_200_OK - data = res.json - assert data['status'] == 'success' - assert data['contributor']['fullname'] == name - assert data['contributor']['email'] is None - assert not data['contributor']['registered'] - - def test_invite_contributor_requires_fullname(self): - res = self.app.post(self.invite_url, - json={'email': 'brian@queen.com', 'fullname': ''}, auth=self.user.auth, - ) - assert res.status_code == http_status.HTTP_400_BAD_REQUEST - - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_send_claim_email_to_given_email(self, send_mail): - project = ProjectFactory() - given_email = fake_email() - unreg_user = project.add_unregistered_contributor( - fullname=fake.name(), - email=given_email, - auth=Auth(project.creator), - ) - project.save() - send_claim_email(email=given_email, unclaimed_user=unreg_user, node=project) - - send_mail.assert_called_with( - given_email, - mails.INVITE_DEFAULT, - user=unreg_user, - referrer=ANY, - node=project, - claim_url=ANY, - email=unreg_user.email, - fullname=unreg_user.fullname, - branded_service=None, - can_change_preferences=False, - logo='osf_logo', - osf_contact_email=settings.OSF_CONTACT_EMAIL - ) - - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_send_claim_email_to_referrer(self, send_mail): - project = ProjectFactory() - referrer = project.creator - given_email, real_email = fake_email(), fake_email() - unreg_user = project.add_unregistered_contributor(fullname=fake.name(), - email=given_email, auth=Auth( - referrer) - ) - project.save() - send_claim_email(email=real_email, unclaimed_user=unreg_user, node=project) - - assert send_mail.called - # email was sent to referrer - send_mail.assert_called_with( - referrer.username, - mails.FORWARD_INVITE, - user=unreg_user, - referrer=referrer, - claim_url=unreg_user.get_claim_url(project._id, external=True), - email=real_email.lower().strip(), - fullname=unreg_user.get_unclaimed_record(project._id)['name'], - node=project, - branded_service=None, - can_change_preferences=False, - logo=settings.OSF_LOGO, - osf_contact_email=settings.OSF_CONTACT_EMAIL - ) - - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_send_claim_email_before_throttle_expires(self, send_mail): - project = ProjectFactory() - given_email = fake_email() - unreg_user = project.add_unregistered_contributor( - fullname=fake.name(), - email=given_email, - auth=Auth(project.creator), - ) - project.save() - send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) - send_mail.reset_mock() - # 2nd call raises error because throttle hasn't expired - with pytest.raises(HTTPError): - send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) - assert not send_mail.called - - -@pytest.mark.enable_implicit_clean -class TestClaimViews(OsfTestCase): - - def setUp(self): - super().setUp() - self.referrer = AuthUserFactory() - self.project = ProjectFactory(creator=self.referrer, is_public=True) - self.project_with_source_tag = ProjectFactory(creator=self.referrer, is_public=True) - self.preprint_with_source_tag = PreprintFactory(creator=self.referrer, is_public=True) - osf_source_tag, created = Tag.all_tags.get_or_create(name=OsfSourceTags.Osf.value, system=True) - preprint_source_tag, created = Tag.all_tags.get_or_create(name=provider_source_tag(self.preprint_with_source_tag.provider._id, 'preprint'), system=True) - self.project_with_source_tag.add_system_tag(osf_source_tag.name) - self.preprint_with_source_tag.add_system_tag(preprint_source_tag.name) - self.given_name = fake.name() - self.given_email = fake_email() - self.project_with_source_tag.add_unregistered_contributor( - fullname=self.given_name, - email=self.given_email, - auth=Auth(user=self.referrer) - ) - self.preprint_with_source_tag.add_unregistered_contributor( - fullname=self.given_name, - email=self.given_email, - auth=Auth(user=self.referrer) - ) - self.user = self.project.add_unregistered_contributor( - fullname=self.given_name, - email=self.given_email, - auth=Auth(user=self.referrer) - ) - self.project.save() - - @mock.patch('website.project.views.contributor.send_claim_email') - def test_claim_user_already_registered_redirects_to_claim_user_registered(self, claim_email): - name = fake.name() - email = fake_email() - - # project contributor adds an unregistered contributor (without an email) on public project - unregistered_user = self.project.add_unregistered_contributor( - fullname=name, - email=None, - auth=Auth(user=self.referrer) - ) - assert unregistered_user in self.project.contributors - - # unregistered user comes along and claims themselves on the public project, entering an email - invite_url = self.project.api_url_for('claim_user_post', uid='undefined') - self.app.post(invite_url, json={ - 'pk': unregistered_user._primary_key, - 'value': email - }) - assert claim_email.call_count == 1 - - # set unregistered record email since we are mocking send_claim_email() - unclaimed_record = unregistered_user.get_unclaimed_record(self.project._primary_key) - unclaimed_record.update({'email': email}) - unregistered_user.save() - - # unregistered user then goes and makes an account with same email, before claiming themselves as contributor - UserFactory(username=email, fullname=name) - - # claim link for the now registered email is accessed while not logged in - token = unregistered_user.get_unclaimed_record(self.project._primary_key)['token'] - claim_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/?token={token}' - res = self.app.get(claim_url) - - # should redirect to 'claim_user_registered' view - claim_registered_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/verify/{token}/' - assert res.status_code == 302 - assert claim_registered_url in res.headers.get('Location') - - @mock.patch('website.project.views.contributor.send_claim_email') - def test_claim_user_already_registered_secondary_email_redirects_to_claim_user_registered(self, claim_email): - name = fake.name() - email = fake_email() - secondary_email = fake_email() - - # project contributor adds an unregistered contributor (without an email) on public project - unregistered_user = self.project.add_unregistered_contributor( - fullname=name, - email=None, - auth=Auth(user=self.referrer) - ) - assert unregistered_user in self.project.contributors - - # unregistered user comes along and claims themselves on the public project, entering an email - invite_url = self.project.api_url_for('claim_user_post', uid='undefined') - self.app.post(invite_url, json={ - 'pk': unregistered_user._primary_key, - 'value': secondary_email - }) - assert claim_email.call_count == 1 - - # set unregistered record email since we are mocking send_claim_email() - unclaimed_record = unregistered_user.get_unclaimed_record(self.project._primary_key) - unclaimed_record.update({'email': secondary_email}) - unregistered_user.save() - - # unregistered user then goes and makes an account with same email, before claiming themselves as contributor - registered_user = UserFactory(username=email, fullname=name) - registered_user.emails.create(address=secondary_email) - registered_user.save() - - # claim link for the now registered email is accessed while not logged in - token = unregistered_user.get_unclaimed_record(self.project._primary_key)['token'] - claim_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/?token={token}' - res = self.app.get(claim_url) - - # should redirect to 'claim_user_registered' view - claim_registered_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/verify/{token}/' - assert res.status_code == 302 - assert claim_registered_url in res.headers.get('Location') - - def test_claim_user_invited_with_no_email_posts_to_claim_form(self): - given_name = fake.name() - invited_user = self.project.add_unregistered_contributor( - fullname=given_name, - email=None, - auth=Auth(user=self.referrer) - ) - self.project.save() - - url = invited_user.get_claim_url(self.project._primary_key) - res = self.app.post(url, data={ - 'password': 'bohemianrhap', - 'password2': 'bohemianrhap' - }) - assert res.status_code == 400 - - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_claim_user_post_with_registered_user_id(self, send_mail): - # registered user who is attempting to claim the unclaimed contributor - reg_user = UserFactory() - payload = { - # pk of unreg user record - 'pk': self.user._primary_key, - 'claimerId': reg_user._primary_key - } - url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' - res = self.app.post(url, json=payload) - - # mail was sent - assert send_mail.call_count == 2 - # ... to the correct address - referrer_call = send_mail.call_args_list[0] - claimer_call = send_mail.call_args_list[1] - args, _ = referrer_call - assert args[0] == self.referrer.username - args, _ = claimer_call - assert args[0] == reg_user.username - - # view returns the correct JSON - assert res.json == { - 'status': 'success', - 'email': reg_user.username, - 'fullname': self.given_name, - } - - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_send_claim_registered_email(self, mock_send_mail): - reg_user = UserFactory() - send_claim_registered_email( - claimer=reg_user, - unclaimed_user=self.user, - node=self.project - ) - assert mock_send_mail.call_count == 2 - first_call_args = mock_send_mail.call_args_list[0][0] - assert first_call_args[0] == self.referrer.username - second_call_args = mock_send_mail.call_args_list[1][0] - assert second_call_args[0] == reg_user.username - - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_send_claim_registered_email_before_throttle_expires(self, mock_send_mail): - reg_user = UserFactory() - send_claim_registered_email( - claimer=reg_user, - unclaimed_user=self.user, - node=self.project, - ) - mock_send_mail.reset_mock() - # second call raises error because it was called before throttle period - with pytest.raises(HTTPError): - send_claim_registered_email( - claimer=reg_user, - unclaimed_user=self.user, - node=self.project, - ) - assert not mock_send_mail.called - - @mock.patch('website.project.views.contributor.send_claim_registered_email') - def test_claim_user_post_with_email_already_registered_sends_correct_email( - self, send_claim_registered_email): - reg_user = UserFactory() - payload = { - 'value': reg_user.username, - 'pk': self.user._primary_key - } - url = self.project.api_url_for('claim_user_post', uid=self.user._id) - self.app.post(url, json=payload) - assert send_claim_registered_email.called - - def test_user_with_removed_unclaimed_url_claiming(self): - """ Tests that when an unclaimed user is removed from a project, the - unregistered user object does not retain the token. - """ - self.project.remove_contributor(self.user, Auth(user=self.referrer)) - - assert self.project._primary_key not in self.user.unclaimed_records.keys() - - def test_user_with_claim_url_cannot_claim_twice(self): - """ Tests that when an unclaimed user is replaced on a project with a - claimed user, the unregistered user object does not retain the token. - """ - reg_user = AuthUserFactory() - - self.project.replace_contributor(self.user, reg_user) - - assert self.project._primary_key not in self.user.unclaimed_records.keys() - - def test_claim_user_form_redirects_to_password_confirm_page_if_user_is_logged_in(self): - reg_user = AuthUserFactory() - url = self.user.get_claim_url(self.project._primary_key) - res = self.app.get(url, auth=reg_user.auth) - assert res.status_code == 302 - res = self.app.get(url, auth=reg_user.auth, follow_redirects=True) - token = self.user.get_unclaimed_record(self.project._primary_key)['token'] - expected = self.project.web_url_for( - 'claim_user_registered', - uid=self.user._id, - token=token, - ) - assert res.request.path == expected - - @mock.patch('framework.auth.cas.make_response_from_ticket') - def test_claim_user_when_user_is_registered_with_orcid(self, mock_response_from_ticket): - # TODO: check in qa url encoding - token = self.user.get_unclaimed_record(self.project._primary_key)['token'] - url = f'/user/{self.user._id}/{self.project._id}/claim/verify/{token}/' - # logged out user gets redirected to cas login - res1 = self.app.get(url) - assert res1.status_code == 302 - res = self.app.resolve_redirect(self.app.get(url)) - service_url = f'http://localhost{url}' - expected = cas.get_logout_url(service_url=cas.get_login_url(service_url=service_url)) - assert res1.location == expected - - # user logged in with orcid automatically becomes a contributor - orcid_user, validated_credentials, cas_resp = generate_external_user_with_resp(url) - mock_response_from_ticket.return_value = authenticate( - orcid_user, - redirect(url) - ) - orcid_user.set_unusable_password() - orcid_user.save() - - # The request to OSF with CAS service ticket must not have cookie and/or auth. - service_ticket = fake.md5() - url_with_service_ticket = f'{url}?ticket={service_ticket}' - res = self.app.get(url_with_service_ticket) - # The response of this request is expected to be a 302 with `Location`. - # And the redirect URL must equal to the originial service URL - assert res.status_code == 302 - redirect_url = res.headers['Location'] - assert redirect_url == url - # The response of this request is expected have the `Set-Cookie` header with OSF cookie. - # And the cookie must belong to the ORCiD user. - raw_set_cookie = res.headers['Set-Cookie'] - assert raw_set_cookie - simple_cookie = SimpleCookie() - simple_cookie.load(raw_set_cookie) - cookie_dict = {key: value.value for key, value in simple_cookie.items()} - osf_cookie = cookie_dict.get(settings.COOKIE_NAME, None) - assert osf_cookie is not None - user = OSFUser.from_cookie(osf_cookie) - assert user._id == orcid_user._id - # The ORCiD user must be different from the unregistered user created when the contributor was added - assert user._id != self.user._id - - # Must clear the Flask g context manual and set the OSF cookie to context - g.current_session = None - self.app.set_cookie(settings.COOKIE_NAME, osf_cookie) - res = self.app.resolve_redirect(res) - assert res.status_code == 302 - assert self.project.is_contributor(orcid_user) - assert self.project.url in res.headers.get('Location') - - def test_get_valid_form(self): - url = self.user.get_claim_url(self.project._primary_key) - res = self.app.get(url, follow_redirects=True) - assert res.status_code == 200 - - def test_invalid_claim_form_raise_400(self): - uid = self.user._primary_key - pid = self.project._primary_key - url = f'/user/{uid}/{pid}/claim/?token=badtoken' - res = self.app.get(url, follow_redirects=True) - assert res.status_code == 400 - - @mock.patch('osf.models.OSFUser.update_search_nodes') - def test_posting_to_claim_form_with_valid_data(self, mock_update_search_nodes): - url = self.user.get_claim_url(self.project._primary_key) - res = self.app.post(url, data={ - 'username': self.user.username, - 'password': 'killerqueen', - 'password2': 'killerqueen' - }) - - assert res.status_code == 302 - location = res.headers.get('Location') - assert 'login?service=' in location - assert 'username' in location - assert 'verification_key' in location - assert self.project._primary_key in location - - self.user.reload() - assert self.user.is_registered - assert self.user.is_active - assert self.project._primary_key not in self.user.unclaimed_records - - @mock.patch('osf.models.OSFUser.update_search_nodes') - def test_posting_to_claim_form_removes_all_unclaimed_data(self, mock_update_search_nodes): - # user has multiple unclaimed records - p2 = ProjectFactory(creator=self.referrer) - self.user.add_unclaimed_record(p2, referrer=self.referrer, - given_name=fake.name()) - self.user.save() - assert len(self.user.unclaimed_records.keys()) > 1 # sanity check - url = self.user.get_claim_url(self.project._primary_key) - res = self.app.post(url, data={ - 'username': self.given_email, - 'password': 'bohemianrhap', - 'password2': 'bohemianrhap' - }) - self.user.reload() - assert self.user.unclaimed_records == {} - - @mock.patch('osf.models.OSFUser.update_search_nodes') - def test_posting_to_claim_form_sets_fullname_to_given_name(self, mock_update_search_nodes): - # User is created with a full name - original_name = fake.name() - unreg = UnregUserFactory(fullname=original_name) - # User invited with a different name - different_name = fake.name() - new_user = self.project.add_unregistered_contributor( - email=unreg.username, - fullname=different_name, - auth=Auth(self.project.creator), - ) - self.project.save() - # Goes to claim url - claim_url = new_user.get_claim_url(self.project._id) - self.app.post(claim_url, data={ - 'username': unreg.username, - 'password': 'killerqueen', - 'password2': 'killerqueen' - }) - unreg.reload() - # Full name was set correctly - assert unreg.fullname == different_name - # CSL names were set correctly - parsed_name = impute_names_model(different_name) - assert unreg.given_name == parsed_name['given_name'] - assert unreg.family_name == parsed_name['family_name'] - - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_claim_user_post_returns_fullname(self, send_mail): - url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' - res = self.app.post( - url, - auth=self.referrer.auth, + self.app.post( + f'/api/v1/project/{self.project._id}/contributors/', json={ - 'value': self.given_email, - 'pk': self.user._primary_key + 'users': [ + { + 'permission': permissions.ADMIN, + 'visible': True, + **add_contributor_json(reg_user) + }, + { + 'id': None, + 'registered': False, + 'fullname': name, + 'email': email, + 'permission': permissions.ADMIN, + 'visible': True, + } + ], + 'node_ids': [self.project._primary_key, child._primary_key] }, + follow_redirects=True, + auth=self.creator.auth ) - assert res.json['fullname'] == self.given_name - assert send_mail.called - - send_mail.assert_called_with( - self.given_email, - mails.INVITE_DEFAULT, - user=self.user, - referrer=self.referrer, - node=ANY, - claim_url=ANY, - email=self.user.email, - fullname=self.user.fullname, - branded_service=None, - osf_contact_email=settings.OSF_CONTACT_EMAIL, - can_change_preferences=False, - logo='osf_logo' - ) - - - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_claim_user_post_if_email_is_different_from_given_email(self, send_mail): - email = fake_email() # email that is different from the one the referrer gave - url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' - self.app.post(url, json={'value': email, 'pk': self.user._primary_key} ) - assert send_mail.called - assert send_mail.call_count == 2 - call_to_invited = send_mail.mock_calls[0] - call_to_invited.assert_called_with(to_addr=email) - call_to_referrer = send_mail.mock_calls[1] - call_to_referrer.assert_called_with(to_addr=self.given_email) - - def test_claim_url_with_bad_token_returns_400(self): - url = self.project.web_url_for( - 'claim_user_registered', - uid=self.user._id, - token='badtoken', - ) - res = self.app.get(url, auth=self.referrer.auth) - assert res.status_code == 400 - - def test_cannot_claim_user_with_user_who_is_already_contributor(self): - # user who is already a contirbutor to the project - contrib = AuthUserFactory() - self.project.add_contributor(contrib, auth=Auth(self.project.creator)) - self.project.save() - # Claiming user goes to claim url, but contrib is already logged in - url = self.user.get_claim_url(self.project._primary_key) - res = self.app.get( - url, - auth=contrib.auth, follow_redirects=True) - # Response is a 400 - assert res.status_code == 400 - - def test_claim_user_with_project_id_adds_corresponding_claimed_tag_to_user(self): - assert OsfClaimedTags.Osf.value not in self.user.system_tags - url = self.user.get_claim_url(self.project_with_source_tag._primary_key) - res = self.app.post(url, data={ - 'username': self.user.username, - 'password': 'killerqueen', - 'password2': 'killerqueen' - }) - - assert res.status_code == 302 - self.user.reload() - assert OsfClaimedTags.Osf.value in self.user.system_tags - - def test_claim_user_with_preprint_id_adds_corresponding_claimed_tag_to_user(self): - assert provider_claimed_tag(self.preprint_with_source_tag.provider._id, 'preprint') not in self.user.system_tags - url = self.user.get_claim_url(self.preprint_with_source_tag._primary_key) - res = self.app.post(url, data={ - 'username': self.user.username, - 'password': 'killerqueen', - 'password2': 'killerqueen' - }) - - assert res.status_code == 302 - self.user.reload() - assert provider_claimed_tag(self.preprint_with_source_tag.provider._id, 'preprint') in self.user.system_tags - + child.reload() + assert child.contributors.count() == n_contributors_pre + 2 # 2 users in payload diff --git a/tests/test_addons.py b/tests/test_addons.py index f6fda06a024..a3c294c0af8 100644 --- a/tests/test_addons.py +++ b/tests/test_addons.py @@ -1,28 +1,25 @@ import datetime import time import functools -import logging from importlib import import_module -from unittest.mock import Mock +from unittest import mock from furl import furl import itsdangerous import jwe import jwt -from unittest import mock import pytest from django.utils import timezone from framework.auth import cas, signing from framework.auth.core import Auth from framework.exceptions import HTTPError from framework.sessions import get_session -from tests.base import OsfTestCase, get_default_metaschema +from tests.base import OsfTestCase from api_tests.utils import create_test_file from osf_tests.factories import ( AuthUserFactory, ProjectFactory, RegistrationFactory, - DraftRegistrationFactory, ) from website import settings from addons.base import views @@ -32,7 +29,7 @@ from addons.osfstorage.models import OsfStorageFileNode, OsfStorageFolder, OsfStorageFile from addons.osfstorage.tests.factories import FileVersionFactory from osf import features -from osf.models import files as file_models +from osf.models import files as file_models, NotificationType from osf.models.files import BaseFileNode, TrashedFileNode from osf.utils.permissions import WRITE, READ from website.project import new_private_link @@ -44,8 +41,6 @@ from api.caching.utils import storage_usage_cache from dateutil.parser import parse as parse_date from framework import sentry -from api.base.settings.defaults import API_BASE -from tests.json_api_test_app import JSONAPITestApp from website.settings import EXTERNAL_EMBER_APPS from waffle.testutils import override_flag from django.conf import settings as django_conf_settings @@ -422,7 +417,8 @@ def test_add_log_bad_action(self): self.node.reload() assert self.node.logs.count() == nlogs - def test_action_file_rename(self): + @mock.patch('osf.models.notification.send_notification') + def test_action_file_rename(self, mocK_notification): url = self.node.api_url_for('create_waterbutler_log') payload = self.build_payload( action='rename', @@ -453,6 +449,11 @@ def test_action_file_rename(self): self.node.reload() assert self.node.logs.latest().action == 'github_addon_file_renamed' + mocK_notification.assert_called_once_with( + to=self.user.username, + type=NotificationType.objects.get(name='addon_file_renamed'), + context={'path': 'foo'} + ) def test_action_file_rename_storage(self): url = self.node.api_url_for('create_waterbutler_log') @@ -1635,7 +1636,7 @@ def test_missing_modified_date_in_file_history(self, mock_archive): @with_sentry @mock.patch('framework.sentry.isolation_scope') @mock.patch('framework.sentry.capture_message') - def test_update_logs_to_sentry_when_called_with_disordered_metadata(self, mock_capture: Mock, mock_set_context: Mock): + def test_update_logs_to_sentry_when_called_with_disordered_metadata(self, mock_capture: mock, mock_set_context: mock): file_node = self.get_test_file() file_node.history.append({'modified': parse_date( '2017-08-22T13:54:32.100900', diff --git a/tests/test_auth.py b/tests/test_auth.py index b59c1c065ab..65d55876810 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -24,8 +24,9 @@ from framework.auth import Auth from framework.auth.decorators import must_be_logged_in from framework.sessions import get_session -from osf.models import OSFUser +from osf.models import OSFUser, NotificationType from osf.utils import permissions +from tests.utils import capture_notifications from website import mails from website import settings from website.project.decorators import ( @@ -163,22 +164,24 @@ def test_successful_external_first_login_without_attributes(self, mock_service_v cas.make_response_from_ticket(ticket, service_url) assert user == mock_external_first_login_authenticate.call_args[0][0] - @mock.patch('framework.auth.views.mails.send_mail') - def test_password_change_sends_email(self, mock_mail): + def test_password_change_sends_email(self): user = UserFactory() - user.set_password('killerqueen') + + with capture_notifications() as notifications: + user.set_password('killerqueen') user.save() - assert len(mock_mail.call_args_list) == 1 - empty, kwargs = mock_mail.call_args - kwargs['user'].reload() + assert len(notifications) == 1 + kwargs = notifications[0]['kwargs'] + notifications_type = notifications[0]['type'] - assert empty == () + assert notifications_type == NotificationType.Type.USER_PASSWORD_RESET assert kwargs == { 'user': user, - 'mail': mails.PASSWORD_RESET, - 'to_addr': user.username, - 'can_change_preferences': False, - 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + 'event_context': { + 'user': user.id, + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } } @mock.patch('framework.auth.utils.requests.post') @@ -211,32 +214,46 @@ def test_validate_recaptcha_empty_response(self, req_post): # ensure None short circuits execution (no call to google) assert not validate_recaptcha(None) - @mock.patch('framework.auth.views.mails.send_mail') - def test_sign_up_twice_sends_two_confirmation_emails_only(self, mock_mail): + def test_sign_up_twice_sends_two_confirmation_emails_only(self): # Regression test for https://openscience.atlassian.net/browse/OSF-7060 - url = api_url_for('register_user') - sign_up_data = { - 'fullName': 'Julius Caesar', - 'email1': 'caesar@romanempire.com', - 'email2': 'caesar@romanempire.com', - 'password': 'brutusisajerk' + with capture_notifications() as notifications: + self.app.post( + api_url_for('register_user'), + json={ + 'fullName': 'Julius Caesar', + 'email1': 'caesar@romanempire.com', + 'email2': 'caesar@romanempire.com', + 'password': 'brutusisajerk' + } + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_INITIAL_CONFIRM_EMAIL + kwargs = notifications[0]['kwargs'] + assert kwargs['user'].username == 'caesar@romanempire.com' + assert kwargs['event_context'] == { + 'can_change_preferences': False, + 'osf_contact_email': 'fake-contact@osf.io' } - self.app.post(url, json=sign_up_data) - assert len(mock_mail.call_args_list) == 1 - args, kwargs = mock_mail.call_args - assert args == ( - 'caesar@romanempire.com', - mails.INITIAL_CONFIRM_EMAIL, - ) + with capture_notifications() as notifications: + self.app.post( + api_url_for('register_user'), + json={ + 'fullName': 'Julius Caesar', + 'email1': 'caesar@romanempire.com', + 'email2': 'caesar@romanempire.com', + 'password': 'brutusisajerk' + } + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_INITIAL_CONFIRM_EMAIL + kwargs = notifications[0]['kwargs'] + assert kwargs['user'].username == 'caesar@romanempire.com' + assert kwargs['event_context'] == { + 'can_change_preferences': False, + 'osf_contact_email': 'fake-contact@osf.io' + } - self.app.post(url, json=sign_up_data) - assert len(mock_mail.call_args_list) == 2 - args, kwargs = mock_mail.call_args - assert args == ( - 'caesar@romanempire.com', - mails.INITIAL_CONFIRM_EMAIL, - ) class TestAuthObject(OsfTestCase): diff --git a/tests/test_auth_views.py b/tests/test_auth_views.py index 7ab356ddba8..f16b36158be 100644 --- a/tests/test_auth_views.py +++ b/tests/test_auth_views.py @@ -1,18 +1,16 @@ #!/usr/bin/env python3 """Views tests for the OSF.""" -from unittest.mock import MagicMock, ANY +from unittest.mock import ANY import datetime as dt from unittest import mock from urllib.parse import quote_plus -from framework.auth import core import pytest from django.core.exceptions import ValidationError from django.utils import timezone from flask import request from rest_framework import status as http_status -from tests.utils import run_celery_tasks from framework import auth from framework.auth import Auth, cas @@ -25,7 +23,7 @@ ) from framework.auth.exceptions import InvalidTokenError from framework.auth.views import login_and_register_handler -from osf.models import OSFUser, NotableDomain +from osf.models import OSFUser, NotableDomain, NotificationType from osf_tests.factories import ( fake_email, AuthUserFactory, @@ -38,6 +36,7 @@ fake, OsfTestCase, ) +from tests.utils import capture_notifications from website import mails, settings from website.util import api_url_for, web_url_for @@ -50,8 +49,7 @@ def setUp(self): self.user = AuthUserFactory() self.auth = self.user.auth - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_ok(self, _): + def test_register_ok(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -68,8 +66,7 @@ def test_register_ok(self, _): assert user.accepted_terms_of_service is None # Regression test for https://github.com/CenterForOpenScience/osf.io/issues/2902 - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_email_case_insensitive(self, _): + def test_register_email_case_insensitive(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -84,8 +81,7 @@ def test_register_email_case_insensitive(self, _): user = OSFUser.objects.get(username=email) assert user.fullname == name - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_email_with_accepted_tos(self, _): + def test_register_email_with_accepted_tos(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -101,8 +97,7 @@ def test_register_email_with_accepted_tos(self, _): user = OSFUser.objects.get(username=email) assert user.accepted_terms_of_service - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_email_without_accepted_tos(self, _): + def test_register_email_without_accepted_tos(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -118,8 +113,7 @@ def test_register_email_without_accepted_tos(self, _): user = OSFUser.objects.get(username=email) assert user.accepted_terms_of_service is None - @mock.patch('framework.auth.views.send_confirm_email_async') - def test_register_scrubs_username(self, _): + def test_register_scrubs_username(self): url = api_url_for('register_user') name = "Eunice O' \"Cornwallis\"" email, password = fake_email(), 'underpressure' @@ -195,8 +189,7 @@ def test_register_blocked_email_domain(self): assert users.count() == 0 @mock.patch('framework.auth.views.validate_recaptcha', return_value=True) - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_good_captcha(self, _, validate_recaptcha): + def test_register_good_captcha(self, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' captcha = 'some valid captcha' @@ -217,8 +210,7 @@ def test_register_good_captcha(self, _, validate_recaptcha): assert user.fullname == name @mock.patch('framework.auth.views.validate_recaptcha', return_value=False) - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_missing_captcha(self, _, validate_recaptcha): + def test_register_missing_captcha(self, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' with mock.patch.object(settings, 'RECAPTCHA_SITE_KEY', 'some_value'): @@ -236,8 +228,7 @@ def test_register_missing_captcha(self, _, validate_recaptcha): assert resp.status_code == http_status.HTTP_400_BAD_REQUEST @mock.patch('framework.auth.views.validate_recaptcha', return_value=False) - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_bad_captcha(self, _, validate_recaptcha): + def test_register_bad_captcha(self, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' with mock.patch.object(settings, 'RECAPTCHA_SITE_KEY', 'some_value'): @@ -300,52 +291,38 @@ def test_register_after_being_invited_as_unreg_contributor(self, mock_update_sea assert new_user.check_password(password) assert new_user.fullname == real_name - @mock.patch('framework.auth.views.send_confirm_email') - def test_register_sends_user_registered_signal(self, mock_send_confirm_email): - url = api_url_for('register_user') + def test_register_sends_user_registered_signal(self): name, email, password = fake.name(), fake_email(), 'underpressure' - with capture_signals() as mock_signals: - self.app.post( - url, - json={ - 'fullName': name, - 'email1': email, - 'email2': email, - 'password': password, - } - ) + with capture_notifications() as notifications: + with capture_signals() as mock_signals: + self.app.post( + api_url_for('register_user'), + json={ + 'fullName': name, + 'email1': email, + 'email2': email, + 'password': password, + } + ) assert mock_signals.signals_sent() == {auth.signals.user_registered, auth.signals.unconfirmed_user_created} - assert mock_send_confirm_email.called + assert len(notifications) == 1 - @mock.patch('framework.auth.views.mails.send_mail') - def test_resend_confirmation(self, send_mail: MagicMock): + def test_resend_confirmation(self): email = 'test@mail.com' token = self.user.add_unconfirmed_email(email) self.user.save() url = api_url_for('resend_confirmation') header = {'address': email, 'primary': False, 'confirmed': False} - self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) - assert send_mail.called - send_mail.assert_called_with( - email, - mails.CONFIRM_EMAIL, - user=self.user, - confirmation_url=ANY, - email='test@mail.com', - merge_target=None, - external_id_provider=None, - branded_preprints_provider=None, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - logo='osf_logo' - ) + with capture_notifications() as notifications: + self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL self.user.reload() assert token != self.user.get_confirmation_token(email) with pytest.raises(InvalidTokenError): self.user.get_unconfirmed_email_for_token(token) - @mock.patch('framework.auth.views.mails.send_mail') - def test_click_confirmation_email(self, send_mail): + def test_click_confirmation_email(self): # TODO: check in qa url encoding email = 'test@mail.com' token = self.user.add_unconfirmed_email(email) @@ -509,14 +486,22 @@ def test_resend_confirmation_not_work_for_confirmed_email(self): assert res.status_code == 400 assert res.json['message_long'] == 'Cannnot resend confirmation for confirmed emails' - @mock.patch('framework.auth.views.mails.send_mail') - def test_resend_confirmation_does_not_send_before_throttle_expires(self, send_mail): + def test_resend_confirmation_does_not_send_before_throttle_expires(self): email = 'test@mail.com' self.user.save() url = api_url_for('resend_confirmation') header = {'address': email, 'primary': False, 'confirmed': False} - self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) - assert send_mail.called + with capture_notifications() as notifications: + self.app.put( + url, + json={ + 'id': self.user._id, + 'email': header + }, + auth=self.user.auth + ) + + assert len(notifications) == 1 # 2nd call does not send email because throttle period has not expired res = self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) assert res.status_code == 400 @@ -837,107 +822,3 @@ def test_logout_with_no_parameter(self): resp = self.app.get(logout_url, auth=None) assert resp.status_code == http_status.HTTP_302_FOUND assert cas.get_logout_url(self.goodbye_url) == resp.headers['Location'] - - -class TestResetPassword(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = AuthUserFactory() - self.another_user = AuthUserFactory() - self.osf_key_v2 = core.generate_verification_key(verification_type='password') - self.user.verification_key_v2 = self.osf_key_v2 - self.user.verification_key = None - self.user.save() - self.get_url = web_url_for( - 'reset_password_get', - uid=self.user._id, - token=self.osf_key_v2['token'] - ) - self.get_url_invalid_key = web_url_for( - 'reset_password_get', - uid=self.user._id, - token=core.generate_verification_key() - ) - self.get_url_invalid_user = web_url_for( - 'reset_password_get', - uid=self.another_user._id, - token=self.osf_key_v2['token'] - ) - - # successfully load reset password page - def test_reset_password_view_returns_200(self): - res = self.app.get(self.get_url) - assert res.status_code == 200 - - # raise http 400 error - def test_reset_password_view_raises_400(self): - res = self.app.get(self.get_url_invalid_key) - assert res.status_code == 400 - - res = self.app.get(self.get_url_invalid_user) - assert res.status_code == 400 - - self.user.verification_key_v2['expires'] = timezone.now() - self.user.save() - res = self.app.get(self.get_url) - assert res.status_code == 400 - - # successfully reset password - @pytest.mark.enable_enqueue_task - @mock.patch('framework.auth.cas.CasClient.service_validate') - def test_can_reset_password_if_form_success(self, mock_service_validate): - # TODO: check in qa url encoding - # load reset password page and submit email - res = self.app.get(self.get_url) - form = res.get_form('resetPasswordForm') - form['password'] = 'newpassword' - form['password2'] = 'newpassword' - res = form.submit(self.app) - - # check request URL is /resetpassword with username and new verification_key_v2 token - request_url_path = res.request.path - assert 'resetpassword' in request_url_path - assert self.user._id in request_url_path - assert self.user.verification_key_v2['token'] in request_url_path - - # check verification_key_v2 for OSF is destroyed and verification_key for CAS is in place - self.user.reload() - assert self.user.verification_key_v2 == {} - assert not self.user.verification_key is None - - # check redirection to CAS login with username and the new verification_key(CAS) - assert res.status_code == 302 - location = res.headers.get('Location') - assert 'login?service=' in location - assert f'username={quote_plus(self.user.username)}' in location - assert f'verification_key={self.user.verification_key}' in location - - # check if password was updated - self.user.reload() - assert self.user.check_password('newpassword') - - # check if verification_key is destroyed after service validation - mock_service_validate.return_value = cas.CasResponse( - authenticated=True, - user=self.user._id, - attributes={'accessToken': fake.md5()} - ) - ticket = fake.md5() - service_url = 'http://accounts.osf.io/?ticket=' + ticket - with run_celery_tasks(): - cas.make_response_from_ticket(ticket, service_url) - self.user.reload() - assert self.user.verification_key is None - - # log users out before they land on reset password page - def test_reset_password_logs_out_user(self): - # visit reset password link while another user is logged in - res = self.app.get(self.get_url, auth=self.another_user.auth) - # check redirection to CAS logout - assert res.status_code == 302 - location = res.headers.get('Location') - assert 'reauth' not in location - assert 'logout?service=' in location - assert 'resetpassword' in location - diff --git a/tests/test_conferences.py b/tests/test_conferences.py deleted file mode 100644 index 2f431df55bd..00000000000 --- a/tests/test_conferences.py +++ /dev/null @@ -1,798 +0,0 @@ -from unittest import mock - -import hmac -import hashlib -from io import BytesIO - -import pytest -from django.db import IntegrityError -from furl import furl - -from framework.auth import get_or_create_user -from framework.auth.core import Auth - -from osf.models import OSFUser, AbstractNode -from addons.wiki.models import WikiVersion -from osf.exceptions import BlockedEmailError -from website import settings -from website.conferences import views -from website.conferences import utils, message -from website.util import api_url_for, web_url_for - -from tests.base import OsfTestCase, fake -from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory - - -def assert_absolute(url): - parsed_domain = furl(settings.DOMAIN) - parsed_url = furl(url) - assert parsed_domain.host == parsed_url.host - - -def assert_equal_urls(first, second): - parsed_first = furl(first) - parsed_first.port = None - parsed_second = furl(second) - parsed_second.port = None - assert parsed_first == parsed_second - - -def create_fake_conference_nodes(n, conference): - nodes = [] - for i in range(n): - node = ProjectFactory(is_public=True) - conference.submissions.add(node) - node.save() - nodes.append(node) - return nodes - - -def create_fake_conference_nodes_bad_data(conference, n, bad_n, endpoint): - nodes = [] - for i in range(n): - node = ProjectFactory(is_public=True) - conference.submissions.add(node) - # inject bad data - if i < bad_n: - # Delete only contributor - node.contributor_set.filter(user=node.contributors.first()).delete() - node.save() - nodes.append(node) - return nodes - - -class TestConferenceUtils(OsfTestCase): - - def test_get_or_create_user_exists(self): - user = UserFactory() - fetched, created = get_or_create_user(user.fullname, user.username, is_spam=True) - assert not created - assert user._id == fetched._id - assert 'is_spam' not in fetched.system_tags - - def test_get_or_create_user_not_exists(self): - fullname = 'Roger Taylor' - username = 'roger@queen.com' - fetched, created = get_or_create_user(fullname, username, is_spam=False) - fetched.save() # in order to access m2m fields, e.g. tags - assert created - assert fetched.fullname == fullname - assert fetched.username == username - assert 'is_spam' not in fetched.system_tags - - def test_get_or_create_user_is_spam(self): - fullname = 'John Deacon' - username = 'deacon@queen.com' - fetched, created = get_or_create_user(fullname, username, is_spam=True) - fetched.save() # in order to access m2m fields, e.g. tags - assert created - assert fetched.fullname == fullname - assert fetched.username == username - assert 'is_spam' in fetched.system_tags - - def test_get_or_create_user_with_blocked_domain(self): - fullname = 'Kanye West' - username = 'kanye@mailinator.com' - with pytest.raises(BlockedEmailError) as e: - get_or_create_user(fullname, username, is_spam=True) - assert str(e.value) == 'Invalid Email' - - -class ContextTestCase(OsfTestCase): - MAILGUN_API_KEY = 'mailkimp' - - @classmethod - def setUpClass(cls): - super().setUpClass() - settings.MAILGUN_API_KEY, cls._MAILGUN_API_KEY = cls.MAILGUN_API_KEY, settings.MAILGUN_API_KEY - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - settings.MAILGUN_API_KEY = cls._MAILGUN_API_KEY - - def make_context(self, method='POST', **kwargs): - data = { - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - } - data.update(kwargs.pop('data', {})) - data = { - key: value - for key, value in data.items() - if value is not None - } - return self.app.application.test_request_context(method=method, data=data, **kwargs) - - -class TestProvisionNode(ContextTestCase): - - def setUp(self): - super().setUp() - self.node = ProjectFactory() - self.user = self.node.creator - self.conference = ConferenceFactory() - self.body = 'dragon on my back' - self.content = b'dragon attack' - self.attachment = BytesIO(self.content) - self.recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - self.conference.endpoint, - ) - - def make_context(self, **kwargs): - data = { - 'attachment-count': '1', - 'attachment-1': (self.attachment, 'attachment-1'), - 'X-Mailgun-Sscore': 0, - 'recipient': self.recipient, - 'stripped-text': self.body, - } - data.update(kwargs.pop('data', {})) - return super().make_context(data=data, **kwargs) - - def test_provision(self): - with self.make_context(): - msg = message.ConferenceMessage() - utils.provision_node(self.conference, msg, self.node, self.user) - assert self.node.is_public - assert self.conference.admins.first() in self.node.contributors - assert 'emailed' in self.node.system_tags - assert self.conference.endpoint in self.node.system_tags - assert self.node in self.conference.submissions.all() - assert 'spam' not in self.node.system_tags - - def test_provision_private(self): - self.conference.public_projects = False - self.conference.save() - with self.make_context(): - msg = message.ConferenceMessage() - utils.provision_node(self.conference, msg, self.node, self.user) - assert not self.node.is_public - assert self.conference.admins.first() in self.node.contributors - assert 'emailed' in self.node.system_tags - assert 'spam' not in self.node.system_tags - - def test_provision_spam(self): - with self.make_context(data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1}): - msg = message.ConferenceMessage() - utils.provision_node(self.conference, msg, self.node, self.user) - assert not self.node.is_public - assert self.conference.admins.first() in self.node.contributors - assert 'emailed' in self.node.system_tags - assert 'spam' in self.node.system_tags - - @mock.patch('website.conferences.utils.waterbutler_api_url_for') - @mock.patch('website.conferences.utils.requests.put') - def test_upload(self, mock_put, mock_get_url): - mock_get_url.return_value = 'http://queen.com/' - file_name = 'hammer-to-fall' - self.attachment.filename = file_name - self.attachment.content_type = 'application/json' - utils.upload_attachment(self.user, self.node, self.attachment) - mock_get_url.assert_called_with( - self.node._id, - 'osfstorage', - _internal=True, - base_url=self.node.osfstorage_region.waterbutler_url, - cookie=self.user.get_or_create_cookie().decode(), - name=file_name - ) - mock_put.assert_called_with( - mock_get_url.return_value, - data=self.content, - cookies={settings.COOKIE_NAME: self.user.get_or_create_cookie().decode()}, - ) - - @mock.patch('website.conferences.utils.waterbutler_api_url_for') - @mock.patch('website.conferences.utils.requests.put') - def test_upload_no_file_name(self, mock_put, mock_get_url): - mock_get_url.return_value = 'http://queen.com/' - self.attachment.filename = '' - self.attachment.content_type = 'application/json' - utils.upload_attachment(self.user, self.node, self.attachment) - mock_get_url.assert_called_with( - self.node._id, - 'osfstorage', - _internal=True, - base_url=self.node.osfstorage_region.waterbutler_url, - cookie=self.user.get_or_create_cookie().decode(), - name=settings.MISSING_FILE_NAME, - ) - mock_put.assert_called_with( - mock_get_url.return_value, - data=self.content, - cookies={settings.COOKIE_NAME: self.user.get_or_create_cookie().decode()}, - ) - - @mock.patch('website.conferences.utils.upload_attachments') - def test_add_poster_by_email(self, mock_upload_attachments): - conference = ConferenceFactory() - - with self.make_context(data={'from': 'bdawk@sb52champs.com', 'subject': 'It\'s PARTY TIME!'}): - msg = message.ConferenceMessage() - views.add_poster_by_email(conference, msg) - - user = OSFUser.objects.get(username='bdawk@sb52champs.com') - assert user.email == 'bdawk@sb52champs.com' - assert user.fullname == user._id # user's shouldn't be able to use email as fullname, so we use the guid. - - -class TestMessage(ContextTestCase): - PUSH_CONTEXT = False - - def test_verify_signature_valid(self): - with self.make_context(): - msg = message.ConferenceMessage() - msg.verify_signature() - - def test_verify_signature_invalid(self): - with self.make_context(data={'signature': 'fake'}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.verify_signature() - - def test_is_spam_false_missing_headers(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1}, - ) - with ctx: - msg = message.ConferenceMessage() - assert not msg.is_spam - - def test_is_spam_false_all_headers(self): - ctx = self.make_context( - method='POST', - data={ - 'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1, - 'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0], - 'X-Mailgun-Spf': message.SPF_PASS_VALUES[0], - }, - ) - with ctx: - msg = message.ConferenceMessage() - assert not msg.is_spam - - def test_is_spam_true_sscore(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.is_spam - - def test_is_spam_true_dkim(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0][::-1]}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.is_spam - - def test_is_spam_true_spf(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Spf': message.SPF_PASS_VALUES[0][::-1]}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.is_spam - - def test_subject(self): - ctx = self.make_context( - method='POST', - data={'subject': 'RE: Hip Hopera'}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.subject == 'Hip Hopera' - - def test_recipient(self): - address = 'test-conference@osf.io' - ctx = self.make_context( - method='POST', - data={'recipient': address}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.recipient == address - - def test_text(self): - text = 'welcome to my nuclear family' - ctx = self.make_context( - method='POST', - data={'stripped-text': text}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.text == text - - def test_sender_name(self): - names = [ - (' Fred', 'Fred'), - ('Me‰¨ü', 'Me‰¨ü'), - ('fred@queen.com', 'fred@queen.com'), - ('Fred ', 'Fred'), - ('"Fred" ', 'Fred'), - ] - for name in names: - with self.make_context(data={'from': name[0]}): - msg = message.ConferenceMessage() - assert msg.sender_name == name[1] - - def test_sender_email(self): - emails = [ - ('fred@queen.com', 'fred@queen.com'), - ('FRED@queen.com', 'fred@queen.com') - ] - for email in emails: - with self.make_context(data={'from': email[0]}): - msg = message.ConferenceMessage() - assert msg.sender_email == email[1] - - def test_route_invalid_pattern(self): - with self.make_context(data={'recipient': 'spam@osf.io'}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.route - - def test_route_invalid_test(self): - recipient = '{}conf-talk@osf.io'.format('' if settings.DEV_MODE else 'stage-') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.route - - def test_route_valid_alternate(self): - conf = ConferenceFactory(endpoint='chocolate', active=True) - conf.name = 'Chocolate Conference' - conf.field_names['submission2'] = 'data' - conf.save() - recipient = '{}chocolate-data@osf.io'.format('test-' if settings.DEV_MODE else '') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - assert msg.conference_name == 'chocolate' - assert msg.conference_category == 'data' - conf.__class__.delete(conf) - - def test_route_valid_b(self): - recipient = '{}conf-poster@osf.io'.format('test-' if settings.DEV_MODE else '') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - assert msg.conference_name == 'conf' - assert msg.conference_category == 'poster' - - def test_alternate_route_invalid(self): - recipient = '{}chocolate-data@osf.io'.format('test-' if settings.DEV_MODE else '') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.route - - def test_attachments_count_zero(self): - with self.make_context(data={'attachment-count': '0'}): - msg = message.ConferenceMessage() - assert msg.attachments == [] - - def test_attachments_count_one(self): - content = b'slightly mad' - sio = BytesIO(content) - ctx = self.make_context( - method='POST', - data={ - 'attachment-count': 1, - 'attachment-1': (sio, 'attachment-1'), - }, - ) - with ctx: - msg = message.ConferenceMessage() - assert len(msg.attachments) == 1 - assert msg.attachments[0].read() == content - - -class TestConferenceEmailViews(OsfTestCase): - - def test_redirect_to_meetings_url(self): - url = '/presentations/' - res = self.app.get(url) - assert res.status_code == 302 - res = self.app.get(url, follow_redirects=True) - assert res.request.path == '/meetings/' - - def test_conference_submissions(self): - AbstractNode.objects.all().delete() - conference1 = ConferenceFactory() - conference2 = ConferenceFactory() - # Create conference nodes - create_fake_conference_nodes( - 3, - conference1, - ) - create_fake_conference_nodes( - 2, - conference2, - ) - - url = api_url_for('conference_submissions') - res = self.app.get(url) - assert res.json['success'] - - def test_conference_plain_returns_200(self): - conference = ConferenceFactory() - url = web_url_for('conference_results__plain', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - - def test_conference_data(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - create_fake_conference_nodes( - n_conference_nodes, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - - # Regression for OSF-8864 to confirm bad project data does not make whole conference break - def test_conference_bad_data(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - n_conference_nodes_bad = 1 - create_fake_conference_nodes_bad_data( - conference, - n_conference_nodes, - n_conference_nodes_bad, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - n_conference_nodes_bad - - def test_conference_data_url_upper(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - create_fake_conference_nodes( - n_conference_nodes, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint.upper()) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - - def test_conference_data_tag_upper(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - create_fake_conference_nodes( - n_conference_nodes, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - - def test_conference_results(self): - conference = ConferenceFactory() - - url = web_url_for('conference_results', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - - def test_confererence_results_endpoint_is_case_insensitive(self): - ConferenceFactory(endpoint='StudySwap') - url = web_url_for('conference_results', meeting='studyswap') - res = self.app.get(url) - assert res.status_code == 200 - - -class TestConferenceModel(OsfTestCase): - - def test_endpoint_is_required(self): - with pytest.raises(IntegrityError): - ConferenceFactory(endpoint=None, name=fake.company()).save() - - def test_name_is_required(self): - with pytest.raises(IntegrityError): - ConferenceFactory(endpoint='spsp2014', name=None).save() - - def test_default_field_names(self): - conf = ConferenceFactory(endpoint='cookie', name='Cookies Conference') - conf.save() - assert conf.field_names['submission1'] == 'poster' - assert conf.field_names['mail_subject'] == 'Presentation title' - - def test_conference_valid_submissions(self): - conf = ConferenceFactory(endpoint='Hamburgers', name='Hamburger conference') - conf.save() - - # 3 good nodes added - create_fake_conference_nodes(3, conf) - - # Deleted node added - deleted_node = ProjectFactory(is_public=True) - deleted_node.is_deleted = True - deleted_node.save() - conf.submissions.add(deleted_node) - - # Private node added - private_node = ProjectFactory(is_public=False) - conf.submissions.add(private_node) - - assert conf.submissions.count() == 5 - assert conf.valid_submissions.count() == 3 - - -class TestConferenceIntegration(ContextTestCase): - - @mock.patch('website.conferences.views.send_mail') - @mock.patch('website.conferences.utils.upload_attachments') - def test_integration(self, mock_upload, mock_send_mail): - fullname = 'John Deacon' - username = 'deacon@queen.com' - title = 'good songs' - conference = ConferenceFactory() - body = 'dragon on my back' - content = 'dragon attack' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{fullname} <{username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1': (BytesIO(content.encode()), 'attachment-1') - }, - ) - assert mock_upload.called - users = OSFUser.objects.filter(username=username) - assert users.count() == 1 - nodes = AbstractNode.objects.filter(title=title) - assert nodes.count() == 1 - node = nodes[0] - assert WikiVersion.objects.get_for_node(node, 'home').content == body - assert mock_send_mail.called - call_args, call_kwargs = mock_send_mail.call_args - assert_absolute(call_kwargs['conf_view_url']) - assert_absolute(call_kwargs['set_password_url']) - assert_absolute(call_kwargs['profile_url']) - assert_absolute(call_kwargs['file_url']) - assert_absolute(call_kwargs['node_url']) - - @mock.patch('website.conferences.views.send_mail') - def test_integration_inactive(self, mock_send_mail): - conference = ConferenceFactory(active=False) - fullname = 'John Deacon' - username = 'deacon@queen.com' - title = 'good songs' - body = 'dragon on my back' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - res = self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{fullname} <{username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - }, - ) - assert res.status_code == 406 - call_args, call_kwargs = mock_send_mail.call_args - assert call_args == (username, views.CONFERENCE_INACTIVE) - assert call_kwargs['fullname'] == fullname - assert_equal_urls( - call_kwargs['presentations_url'], - web_url_for('conference_view', _absolute=True), - ) - - @mock.patch('website.conferences.views.send_mail') - @mock.patch('website.conferences.utils.upload_attachments') - def test_integration_wo_full_name(self, mock_upload, mock_send_mail): - username = 'no_full_name@mail.com' - title = 'no full name only email' - conference = ConferenceFactory() - body = 'dragon on my back' - content = 'dragon attack' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': username, - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1': (BytesIO(content.encode()), 'attachment-1') - }, - ) - assert mock_upload.called - users = OSFUser.objects.filter(username=username) - assert users.count() == 1 - nodes = AbstractNode.objects.filter(title=title) - assert nodes.count() == 1 - node = nodes[0] - assert WikiVersion.objects.get_for_node(node, 'home').content == body - assert mock_send_mail.called - call_args, call_kwargs = mock_send_mail.call_args - assert_absolute(call_kwargs['conf_view_url']) - assert_absolute(call_kwargs['set_password_url']) - assert_absolute(call_kwargs['profile_url']) - assert_absolute(call_kwargs['file_url']) - assert_absolute(call_kwargs['node_url']) - - @mock.patch('website.conferences.views.send_mail') - @mock.patch('website.conferences.utils.upload_attachments') - def test_create_conference_node_with_same_name_as_existing_node(self, mock_upload, mock_send_mail): - conference = ConferenceFactory() - user = UserFactory() - title = 'Long Live Greg' - ProjectFactory(creator=user, title=title) - - body = 'Greg is a good plant' - content = 'Long may they reign.' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{user.fullname} <{user.username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1':(BytesIO(content.encode()), 'attachment-1') - }, - ) - - assert AbstractNode.objects.filter(title=title, creator=user).count() == 2 - assert mock_upload.called - assert mock_send_mail.called - - - @mock.patch('website.conferences.views.send_mail') - def test_conferences_discontinued(self, mock_send_mail): - fullname = 'John Deacon' - username = 'deacon@queen.com' - title = 'good songs' - conference = ConferenceFactory() - body = 'dragon on my back' - content = 'dragon attack' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - from waffle.testutils import override_flag - from osf import features - with override_flag(features.DISABLE_MEETINGS, active=True): - res = self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{fullname} <{username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1': (BytesIO(content.encode()), 'attachment-1') - }, - ) - assert res.status_code == 501 - assert res.json['message_short'] == 'Service has been discontinued' - - assert mock_send_mail.called diff --git a/tests/test_events.py b/tests/test_events.py index 55b51fb3e8e..1d2a08e1856 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -1,13 +1,11 @@ from collections import OrderedDict from unittest import mock -from pytest import raises -from website.notifications.events.base import Event, register, event_registry +from website.notifications.events.base import event_registry from website.notifications.events.files import ( FileAdded, FileRemoved, FolderCreated, FileUpdated, AddonFileCopied, AddonFileMoved, AddonFileRenamed, ) -from website.notifications.events import utils from addons.base import signals from framework.auth import Auth from osf_tests import factories @@ -18,38 +16,6 @@ email_digest = 'email_digest' -class TestEventNotImplemented(OsfTestCase): - """ - Test non-implemented errors - """ - @register('not_implemented') - class NotImplementedEvent(Event): - pass - - def setUp(self): - super().setUp() - self.user = factories.UserFactory() - self.auth = Auth(user=self.user) - self.node = factories.ProjectFactory(creator=self.user) - self.event = self.NotImplementedEvent(self.user, self.node, 'not_implemented') - - def test_text(self): - with raises(NotImplementedError): - text = self.event.text_message - - def test_html(self): - with raises(NotImplementedError): - html = self.event.html_message - - def test_url(self): - with raises(NotImplementedError): - url = self.event.url - - def test_event(self): - with raises(NotImplementedError): - event = self.event.event_type - - class TestListOfFiles(OsfTestCase): """ List files given a list @@ -85,9 +51,6 @@ def setUp(self): ] } - def test_list_of_files(self): - assert ['e', 'f', 'c', 'd'] == utils.list_of_files(self.tree) - class TestEventExists(OsfTestCase): # Add all possible called events here to ensure that the Event class can @@ -162,24 +125,16 @@ def setUp(self): self.user_2 = factories.AuthUserFactory() self.project = factories.ProjectFactory(creator=self.user_1) # subscription - self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + 'file_updated', - owner=self.project, - event_name='file_updated', - ) - self.sub.save() self.event = event_registry['file_updated'](self.user_2, self.project, 'file_updated', payload=file_payload) def test_info_formed_correct(self): - assert f'{wb_path}_file_updated' == self.event.event_type + assert f'file_updated' == self.event.event_type assert f'updated file "{materialized.lstrip("/")}".' == self.event.html_message assert f'updated file "{materialized.lstrip("/")}".' == self.event.text_message - @mock.patch('website.notifications.emails.notify') - def test_file_updated(self, mock_notify): + def test_file_updated(self): self.event.perform() - # notify('exd', 'file_updated', 'user', self.project, timezone.now()) - assert mock_notify.called + assert False class TestFileAdded(NotificationTestCase): @@ -188,25 +143,17 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' - ) - self.project_subscription.save() self.user2 = factories.UserFactory() self.event = event_registry['file_added'](self.user2, self.project, 'file_added', payload=file_payload) def test_info_formed_correct(self): - assert f'{wb_path}_file_updated' == self.event.event_type + assert f'file_updated' == self.event.event_type assert f'added file "{materialized.lstrip("/")}".' == self.event.html_message assert f'added file "{materialized.lstrip("/")}".' == self.event.text_message - @mock.patch('website.notifications.emails.notify') - def test_file_added(self, mock_notify): + def test_file_added(self): self.event.perform() - # notify('exd', 'file_updated', 'user', self.project, timezone.now()) - assert mock_notify.called + assert False class TestFileRemoved(NotificationTestCase): @@ -215,12 +162,6 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' - ) - self.project_subscription.save() self.user2 = factories.UserFactory() self.event = event_registry['file_removed']( self.user2, self.project, 'file_removed', payload=file_deleted_payload @@ -237,11 +178,9 @@ def test_info_formed_correct_folder(self): assert f'removed folder "{materialized.lstrip("/")}/".' == self.event.html_message assert f'removed folder "{materialized.lstrip("/")}/".' == self.event.text_message - @mock.patch('website.notifications.emails.notify') - def test_file_removed(self, mock_notify): + def test_file_removed(self): self.event.perform() - # notify('exd', 'file_updated', 'user', self.project, timezone.now()) - assert mock_notify.called + assert False class TestFolderCreated(NotificationTestCase): @@ -250,12 +189,6 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' - ) - self.project_subscription.save() self.user2 = factories.UserFactory() self.event = event_registry['folder_created']( self.user2, self.project, 'folder_created', payload=folder_created_payload @@ -266,10 +199,9 @@ def test_info_formed_correct(self): assert 'created folder "Three/".' == self.event.html_message assert 'created folder "Three/".' == self.event.text_message - @mock.patch('website.notifications.emails.notify') - def test_folder_added(self, mock_notify): + def test_folder_added(self): self.event.perform() - assert mock_notify.called + assert False class TestFolderFileRenamed(OsfTestCase): @@ -279,13 +211,6 @@ def setUp(self): self.auth = Auth(user=self.user_1) self.user_2 = factories.AuthUserFactory() self.project = factories.ProjectFactory(creator=self.user_1) - # subscription - self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + 'file_updated', - owner=self.project, - event_name='file_updated', - ) - self.sub.save() # Payload file_renamed_payload = file_move_payload(self.project, self.project) @@ -293,8 +218,6 @@ def setUp(self): self.user_1, self.project, 'addon_file_renamed', payload=file_renamed_payload ) - self.sub.email_digest.add(self.user_2) - self.sub.save() def test_rename_file_html(self): self.event.payload['destination']['materialized'] = '/One/Paper14.txt' @@ -332,31 +255,6 @@ def setUp(self): self.event = event_registry['addon_file_moved']( self.user_2, self.private_node, 'addon_file_moved', payload=file_moved_payload ) - # Subscriptions - # for parent node - self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' - ) - self.sub.save() - # for private node - self.private_sub = factories.NotificationSubscriptionFactory( - _id=self.private_node._id + '_file_updated', - owner=self.private_node, - event_name='file_updated' - ) - self.private_sub.save() - # for file subscription - self.file_sub = factories.NotificationSubscriptionFactory( - _id='{pid}_{wbid}_file_updated'.format( - pid=self.project._id, - wbid=self.event.waterbutler_id - ), - owner=self.project, - event_name='xyz42_file_updated' - ) - self.file_sub.save() def test_info_formed_correct(self): # Move Event: Ensures data is correctly formatted @@ -364,27 +262,28 @@ def test_info_formed_correct(self): # assert 'moved file "{}".' == self.event.html_message # assert 'created folder "Three/".' == self.event.text_message - @mock.patch('website.notifications.emails.store_emails') - def test_user_performing_action_no_email(self, mock_store): + def test_user_performing_action_no_email(self): # Move Event: Makes sure user who performed the action is not # included in the notifications self.sub.email_digest.add(self.user_2) self.sub.save() self.event.perform() + assert False, 'Redo test' assert 0 == mock_store.call_count - @mock.patch('website.notifications.emails.store_emails') - def test_perform_store_called_once(self, mock_store): + def test_perform_store_called_once(self): # Move Event: Tests that store_emails is called once from perform self.sub.email_transactional.add(self.user_1) self.sub.save() self.event.perform() + assert False, 'Redo test' assert 1 == mock_store.call_count - @mock.patch('website.notifications.emails.store_emails') - def test_perform_store_one_of_each(self, mock_store): + def test_perform_store_one_of_each(self): # Move Event: Tests that store_emails is called 3 times, one in # each category + assert False, 'Redo test' + self.sub.email_transactional.add(self.user_1) self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.project.save() @@ -399,8 +298,8 @@ def test_perform_store_one_of_each(self, mock_store): self.event.perform() assert 3 == mock_store.call_count - @mock.patch('website.notifications.emails.store_emails') - def test_remove_user_sent_once(self, mock_store): + def test_remove_user_sent_once(self): + assert False, 'Redo test' # Move Event: Tests removed user is removed once. Regression self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.project.save() @@ -427,31 +326,6 @@ def setUp(self): self.user_2, self.private_node, 'addon_file_copied', payload=file_copied_payload ) - # Subscriptions - # for parent node - self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' - ) - self.sub.save() - # for private node - self.private_sub = factories.NotificationSubscriptionFactory( - _id=self.private_node._id + '_file_updated', - owner=self.private_node, - event_name='file_updated' - ) - self.private_sub.save() - # for file subscription - self.file_sub = factories.NotificationSubscriptionFactory( - _id='{pid}_{wbid}_file_updated'.format( - pid=self.project._id, - wbid=self.event.waterbutler_id - ), - owner=self.project, - event_name='xyz42_file_updated' - ) - self.file_sub.save() def test_info_correct(self): # Move Event: Ensures data is correctly formatted @@ -463,9 +337,9 @@ def test_info_correct(self): ' in Consolidate to "Two/Paper13.txt" in OSF' ' Storage in Consolidate.') == self.event.text_message - @mock.patch('website.notifications.emails.store_emails') - def test_copied_one_of_each(self, mock_store): + def test_copied_one_of_each(self): # Copy Event: Tests that store_emails is called 2 times, two with + assert False, 'Redo test' # permissions, one without self.sub.email_transactional.add(self.user_1) self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) @@ -481,10 +355,10 @@ def test_copied_one_of_each(self, mock_store): self.event.perform() assert 2 == mock_store.call_count - @mock.patch('website.notifications.emails.store_emails') - def test_user_performing_action_no_email(self, mock_store): + def test_user_performing_action_no_email(self): # Move Event: Makes sure user who performed the action is not # included in the notifications + assert False, 'Redo test' self.sub.email_digest.add(self.user_2) self.sub.save() self.event.perform() @@ -509,92 +383,8 @@ def setUp(self): self.user_2, self.private_node, 'addon_file_moved', payload=file_moved_payload ) - # Subscriptions - # for parent node - self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' - ) - self.sub.save() - # for private node - self.private_sub = factories.NotificationSubscriptionFactory( - _id=self.private_node._id + '_file_updated', - owner=self.private_node, - event_name='file_updated' - ) - self.private_sub.save() - # for file subscription - self.file_sub = factories.NotificationSubscriptionFactory( - _id='{pid}_{wbid}_file_updated'.format( - pid=self.project._id, - wbid=self.event.waterbutler_id - ), - owner=self.project, - event_name='xyz42_file_updated' - ) - self.file_sub.save() - def test_warn_user(self): - # Tests that a user with a sub in the origin node gets a warning that - # they are no longer tracking the file. - self.sub.email_transactional.add(self.user_1) - self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) - self.project.save() - self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) - self.private_node.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - self.private_sub.none.add(self.user_3) - self.private_sub.save() - moved, warn, removed = utils.categorize_users( - self.event.user, self.event.event_type, self.event.source_node, - self.event.event_type, self.event.node - ) - assert {email_transactional: [], email_digest: [self.user_3._id], 'none': []} == warn - assert {email_transactional: [self.user_1._id], email_digest: [], 'none': []} == moved - def test_moved_user(self): - # Doesn't warn a user with two different subs, but does send a - # moved email - self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) - self.project.save() - self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) - self.private_node.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - self.private_sub.email_transactional.add(self.user_3) - self.private_sub.save() - moved, warn, removed = utils.categorize_users( - self.event.user, self.event.event_type, self.event.source_node, - self.event.event_type, self.event.node - ) - assert {email_transactional: [], email_digest: [], 'none': []} == warn - assert {email_transactional: [self.user_3._id], email_digest: [], 'none': []} == moved - - def test_remove_user(self): - self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) - self.project.save() - self.file_sub.email_transactional.add(self.user_3) - self.file_sub.save() - moved, warn, removed = utils.categorize_users( - self.event.user, self.event.event_type, self.event.source_node, - self.event.event_type, self.event.node - ) - assert {email_transactional: [self.user_3._id], email_digest: [], 'none': []} == removed - - def test_node_permissions(self): - self.private_node.add_contributor(self.user_3, permissions=WRITE) - self.private_sub.email_digest.add(self.user_3, self.user_4) - remove = {email_transactional: [], email_digest: [], 'none': []} - warn = {email_transactional: [], email_digest: [self.user_3._id, self.user_4._id], 'none': []} - subbed, remove = utils.subscriptions_node_permissions( - self.private_node, - warn, - remove - ) - assert {email_transactional: [], email_digest: [self.user_3._id], 'none': []} == subbed - assert {email_transactional: [], email_digest: [self.user_4._id], 'none': []} == remove class TestSubscriptionManipulations(OsfTestCase): @@ -627,32 +417,6 @@ def setUp(self): self.dup_1_3 = {email_transactional: ['e1234', 'f1234'], 'none': ['h1234', 'g1234'], 'email_digest': ['a1234', 'c1234']} - def test_subscription_user_difference(self): - result = utils.subscriptions_users_difference(self.emails_1, self.emails_3) - assert self.diff_1_3 == result - - def test_subscription_user_union(self): - result = utils.subscriptions_users_union(self.emails_1, self.emails_2) - assert set(self.union_1_2['email_transactional']) == set(result['email_transactional']) - assert set(self.union_1_2['none']) == set(result['none']) - assert set(self.union_1_2['email_digest']) == set(result['email_digest']) - - def test_remove_duplicates(self): - result = utils.subscriptions_users_remove_duplicates( - self.emails_1, self.emails_4, remove_same=False - ) - assert set(self.dup_1_3['email_transactional']) == set(result['email_transactional']) - assert set(self.dup_1_3['none']) == set(result['none']) - assert set(self.dup_1_3['email_digest']) == set(result['email_digest']) - - def test_remove_duplicates_true(self): - result = utils.subscriptions_users_remove_duplicates( - self.emails_1, self.emails_1, remove_same=True - ) - - assert set(result['none']) == {'h1234', 'g1234', 'i1234'} - assert result['email_digest'] == [] - assert result['email_transactional'] == [] wb_path = '5581cb50a24f710b0f4623f9' diff --git a/tests/test_metadata.py b/tests/test_metadata.py index c29365f4151..5f81c35fc5c 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -3,7 +3,6 @@ import pytest from django.core.exceptions import ValidationError -from framework.forms.utils import process_payload from osf.models import RegistrationSchema from osf.utils.migrations import ensure_schemas from website.project.metadata.schemas import OSF_META_SCHEMA_FILES @@ -31,18 +30,6 @@ def test_registrationschema_is_fine_with_same_name_but_different_version(self): RegistrationSchema(name='foo', schema={'foo': 42}, schema_version=2).save() assert RegistrationSchema.objects.filter(name='foo').count() == 2 - def test_process(self): - processed = process_payload({'foo': 'bar&baz'}) - assert processed['foo'] == 'bar%26baz' - - def test_process_list(self): - processed = process_payload({'foo': ['bar', 'baz&bob']}) - assert processed['foo'][1] == 'baz%26bob' - - def test_process_whitespace(self): - processed = process_payload({'foo': 'bar baz'}) - assert processed['foo'] == 'bar baz' - if __name__ == '__main__': unittest.main() diff --git a/tests/test_misc_views.py b/tests/test_misc_views.py index 543fb7d6068..50906fff424 100644 --- a/tests/test_misc_views.py +++ b/tests/test_misc_views.py @@ -45,6 +45,7 @@ assert_datetime_equal, test_app ) +from tests.utils import capture_notifications from website.project.decorators import check_can_access from website.project.model import has_anonymous_link from website.project.views.node import _should_show_wiki_widget @@ -425,19 +426,21 @@ def test_external_login_confirm_email_get_create(self, mock_welcome): assert self.user.is_registered assert self.user.has_usable_password() - @mock.patch('website.mails.send_mail') - def test_external_login_confirm_email_get_link(self, mock_link_confirm): + def test_external_login_confirm_email_get_link(self): self.user.external_identity['orcid'][self.provider_id] = 'LINK' self.user.save() assert not self.user.is_registered url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') - res = self.app.get(url) + with capture_notifications() as notifications: + res = self.app.get(url) + + assert len(notifications) == 1 + assert res.status_code == 302, 'redirects to cas login' assert 'You should be redirected automatically' in str(res.html) assert '/login?service=' in res.location assert 'new=true' not in parse.unquote(res.location) - assert mock_link_confirm.call_count == 1 self.user.reload() assert self.user.external_identity['orcid'][self.provider_id] == 'VERIFIED' @@ -709,90 +712,6 @@ def test_confirm_user_signal_called_when_user_confirms_email(self): assert mock_signals.signals_sent() == {auth.signals.user_confirmed} -# copied from tests/test_comments.py -class TestCommentViews(OsfTestCase): - - def setUp(self): - super().setUp() - self.project = ProjectFactory(is_public=True) - self.user = AuthUserFactory() - self.project.add_contributor(self.user) - self.project.save() - self.user.save() - - def test_view_project_comments_updates_user_comments_view_timestamp(self): - url = self.project.api_url_for('update_comments_timestamp') - res = self.app.put(url, json={ - 'page': 'node', - 'rootId': self.project._id - }, auth=self.user.auth) - self.user.reload() - - user_timestamp = self.user.comments_viewed_timestamp[self.project._id] - view_timestamp = timezone.now() - assert_datetime_equal(user_timestamp, view_timestamp) - - def test_confirm_non_contrib_viewers_dont_have_pid_in_comments_view_timestamp(self): - non_contributor = AuthUserFactory() - url = self.project.api_url_for('update_comments_timestamp') - res = self.app.put(url, json={ - 'page': 'node', - 'rootId': self.project._id - }, auth=self.user.auth) - - non_contributor.reload() - assert self.project._id not in non_contributor.comments_viewed_timestamp - - def test_view_comments_updates_user_comments_view_timestamp_files(self): - osfstorage = self.project.get_addon('osfstorage') - root_node = osfstorage.get_root() - test_file = root_node.append_file('test_file') - test_file.create_version(self.user, { - 'object': '06d80e', - 'service': 'cloud', - osfstorage_settings.WATERBUTLER_RESOURCE: 'osf', - }, { - 'size': 1337, - 'contentType': 'img/png' - }).save() - - url = self.project.api_url_for('update_comments_timestamp') - res = self.app.put(url, json={ - 'page': 'files', - 'rootId': test_file._id - }, auth=self.user.auth) - self.user.reload() - - user_timestamp = self.user.comments_viewed_timestamp[test_file._id] - view_timestamp = timezone.now() - assert_datetime_equal(user_timestamp, view_timestamp) - - # Regression test for https://openscience.atlassian.net/browse/OSF-5193 - # moved from tests/test_comments.py - def test_find_unread_includes_edited_comments(self): - project = ProjectFactory() - user = AuthUserFactory() - project.add_contributor(user, save=True) - comment = CommentFactory(node=project, user=project.creator) - n_unread = Comment.find_n_unread(user=user, node=project, page='node') - assert n_unread == 1 - - url = project.api_url_for('update_comments_timestamp') - payload = {'page': 'node', 'rootId': project._id} - self.app.put(url, json=payload, auth=user.auth) - user.reload() - n_unread = Comment.find_n_unread(user=user, node=project, page='node') - assert n_unread == 0 - - # Edit previously read comment - comment.edit( - auth=Auth(project.creator), - content='edited', - save=True - ) - n_unread = Comment.find_n_unread(user=user, node=project, page='node') - assert n_unread == 1 - @mock.patch('website.views.PROXY_EMBER_APPS', False) class TestResolveGuid(OsfTestCase): def setUp(self): @@ -834,4 +753,3 @@ def test_preprint_provider_with_osf_domain(self, mock_use_ember_app): url = web_url_for('resolve_guid', _guid=True, guid=preprint._id) res = self.app.get(url) mock_use_ember_app.assert_called_with() - diff --git a/tests/test_node_licenses.py b/tests/test_node_licenses.py deleted file mode 100644 index d16cdb500d9..00000000000 --- a/tests/test_node_licenses.py +++ /dev/null @@ -1,138 +0,0 @@ -import builtins -import json -from unittest import mock - -import pytest -from django.core.exceptions import ValidationError - -from framework.auth import Auth -from osf_tests.factories import (AuthUserFactory, NodeLicenseRecordFactory, - ProjectFactory) -from tests.base import OsfTestCase -from osf.utils.migrations import ensure_licenses -from tests.utils import assert_logs, assert_not_logs -from website import settings -from osf.models.licenses import NodeLicense, serialize_node_license_record, serialize_node_license -from osf.models import NodeLog -from osf.exceptions import NodeStateError - - - -CHANGED_NAME = 'FOO BAR' -CHANGED_TEXT = 'Some good new text' -CHANGED_PROPERTIES = ['foo', 'bar'] -LICENSE_TEXT = json.dumps({ - 'MIT': { - 'name': CHANGED_NAME, - 'text': CHANGED_TEXT, - 'properties': CHANGED_PROPERTIES - } -}) - -class TestNodeLicenses(OsfTestCase): - - def setUp(self): - super().setUp() - - self.user = AuthUserFactory() - self.node = ProjectFactory(creator=self.user) - self.LICENSE_NAME = 'MIT License' - self.node_license = NodeLicense.objects.get(name=self.LICENSE_NAME) - self.YEAR = '2105' - self.COPYRIGHT_HOLDERS = ['Foo', 'Bar'] - self.node.node_license = NodeLicenseRecordFactory( - node_license=self.node_license, - year=self.YEAR, - copyright_holders=self.COPYRIGHT_HOLDERS - ) - self.node.save() - - def test_serialize_node_license(self): - serialized = serialize_node_license(self.node_license) - assert serialized['name'] == self.LICENSE_NAME - assert serialized['id'] == self.node_license.license_id - assert serialized['text'] == self.node_license.text - - def test_serialize_node_license_record(self): - serialized = serialize_node_license_record(self.node.node_license) - assert serialized['name'] == self.LICENSE_NAME - assert serialized['id'] == self.node_license.license_id - assert serialized['text'] == self.node_license.text - assert serialized['year'] == self.YEAR - assert serialized['copyright_holders'] == self.COPYRIGHT_HOLDERS - - def test_serialize_node_license_record_None(self): - self.node.node_license = None - serialized = serialize_node_license_record(self.node.node_license) - assert serialized == {} - - def test_copy_node_license_record(self): - record = self.node.node_license - copied = record.copy() - assert copied._id is not None - assert record._id != copied._id - for prop in ('license_id', 'name', 'node_license'): - assert getattr(record, prop) == getattr(copied, prop) - - @pytest.mark.enable_implicit_clean - def test_license_uniqueness_on_id_is_enforced_in_the_database(self): - NodeLicense(license_id='foo', name='bar', text='baz').save() - pytest.raises(ValidationError, NodeLicense(license_id='foo', name='buz', text='boo').save) - - def test_ensure_licenses_updates_existing_licenses(self): - assert ensure_licenses() == (0, 18) - - def test_ensure_licenses_no_licenses(self): - before_count = NodeLicense.objects.all().count() - NodeLicense.objects.all().delete() - assert not NodeLicense.objects.all().count() - - ensure_licenses() - assert before_count == NodeLicense.objects.all().count() - - def test_ensure_licenses_some_missing(self): - NodeLicense.objects.get(license_id='LGPL3').delete() - with pytest.raises(NodeLicense.DoesNotExist): - NodeLicense.objects.get(license_id='LGPL3') - ensure_licenses() - found = NodeLicense.objects.get(license_id='LGPL3') - assert found is not None - - def test_ensure_licenses_updates_existing(self): - with mock.patch.object(builtins, 'open', mock.mock_open(read_data=LICENSE_TEXT)): - ensure_licenses() - MIT = NodeLicense.objects.get(license_id='MIT') - assert MIT.name == CHANGED_NAME - assert MIT.text == CHANGED_TEXT - assert MIT.properties == CHANGED_PROPERTIES - - @assert_logs(NodeLog.CHANGED_LICENSE, 'node') - def test_Node_set_node_license(self): - GPL3 = NodeLicense.objects.get(license_id='GPL3') - NEW_YEAR = '2014' - COPYLEFT_HOLDERS = ['Richard Stallman'] - self.node.set_node_license( - { - 'id': GPL3.license_id, - 'year': NEW_YEAR, - 'copyrightHolders': COPYLEFT_HOLDERS - }, - auth=Auth(self.user), - save=True - ) - - assert self.node.node_license.license_id == GPL3.license_id - assert self.node.node_license.name == GPL3.name - assert self.node.node_license.copyright_holders == COPYLEFT_HOLDERS - - @assert_not_logs(NodeLog.CHANGED_LICENSE, 'node') - def test_Node_set_node_license_invalid(self): - with pytest.raises(NodeStateError): - self.node.set_node_license( - { - 'id': 'SOME ID', - 'year': 'foo', - 'copyrightHolders': [] - }, - auth=Auth(self.user) - ) diff --git a/tests/test_notifications.py b/tests/test_notifications.py deleted file mode 100644 index b52190ca999..00000000000 --- a/tests/test_notifications.py +++ /dev/null @@ -1,2031 +0,0 @@ -import collections -from unittest import mock - -import pytest -from babel import dates, Locale -from schema import Schema, And, Use, Or -from django.utils import timezone - -from framework.auth import Auth -from osf.models import Comment, NotificationDigest, NotificationSubscription, Guid, OSFUser - -from website.notifications.tasks import get_users_emails, send_users_email, group_by_node, remove_notifications -from website.notifications.exceptions import InvalidSubscriptionError -from website.notifications import constants -from website.notifications import emails -from website.notifications import utils -from website import mails -from website.profile.utils import get_profile_image_url -from website.project.signals import contributor_removed, node_deleted -from website.reviews import listeners -from website.util import api_url_for -from website.util import web_url_for -from website import settings - -from osf_tests import factories -from osf.utils import permissions -from tests.base import capture_signals -from tests.base import OsfTestCase, NotificationTestCase - - - -class TestNotificationsModels(OsfTestCase): - - def setUp(self): - super().setUp() - # Create project with component - self.user = factories.UserFactory() - self.consolidate_auth = Auth(user=self.user) - self.parent = factories.ProjectFactory(creator=self.user) - self.node = factories.NodeFactory(creator=self.user, parent=self.parent) - - def test_has_permission_on_children(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - sub_component.add_contributor(contributor=non_admin_user) - sub_component.save() - sub_component2 = factories.NodeFactory(parent=node) - - assert node.has_permission_on_children(non_admin_user, permissions.READ) - - def test_check_user_has_permission_excludes_deleted_components(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - sub_component.add_contributor(contributor=non_admin_user) - sub_component.is_deleted = True - sub_component.save() - sub_component2 = factories.NodeFactory(parent=node) - - assert not node.has_permission_on_children(non_admin_user, permissions.READ) - - def test_check_user_does_not_have_permission_on_private_node_child(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - - assert not node.has_permission_on_children(non_admin_user,permissions.READ) - - def test_check_user_child_node_permissions_false_if_no_children(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - node = factories.NodeFactory(parent=parent, category='project') - - assert not node.has_permission_on_children(non_admin_user,permissions.READ) - - def test_check_admin_has_permissions_on_private_component(self): - parent = factories.ProjectFactory() - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - - assert node.has_permission_on_children(parent.creator,permissions.READ) - - def test_check_user_private_node_child_permissions_excludes_pointers(self): - user = factories.UserFactory() - parent = factories.ProjectFactory() - pointed = factories.ProjectFactory(creator=user) - parent.add_pointer(pointed, Auth(parent.creator)) - parent.save() - - assert not parent.has_permission_on_children(user,permissions.READ) - - def test_new_project_creator_is_subscribed(self): - user = factories.UserFactory() - factories.ProjectFactory(creator=user) - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - assert len(user_subscriptions) == 2 # subscribed to both file_updated and comments - assert 'file_updated' in event_types - assert 'comments' in event_types - - def test_new_node_creator_is_not_subscribed(self): - user = factories.UserFactory() - factories.NodeFactory(creator=user) - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - - assert len(user_subscriptions) == 0 - - def test_new_project_creator_is_subscribed_with_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_digest') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'none') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_digest') - - node = factories.ProjectFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') - - assert len(user_subscriptions) == 5 # subscribed to both node and user settings - assert 'file_updated' in event_types - assert 'comments' in event_types - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_mentions' in event_types - assert file_updated_subscription.none.count() == 1 - assert file_updated_subscription.email_transactional.count() == 0 - assert comments_subscription.email_digest.count() == 1 - assert comments_subscription.email_transactional.count() == 0 - - def test_new_node_creator_is_not_subscribed_with_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_digest') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'none') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.NodeFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - assert len(user_subscriptions) == 4 # subscribed to only user settings - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_comment_replies' in event_types - assert 'global_mentions' in event_types - - def test_subscribe_user_to_global_notfiications(self): - user = factories.UserFactory() - utils.subscribe_user_to_global_notifications(user) - subscription_event_names = list(user.notification_subscriptions.values_list('event_name', flat=True)) - for event_name in constants.USER_SUBSCRIPTIONS_AVAILABLE: - assert event_name in subscription_event_names - - def test_subscribe_user_to_registration_notifications(self): - registration = factories.RegistrationFactory() - with pytest.raises(InvalidSubscriptionError): - utils.subscribe_user_to_notifications(registration, self.user) - - def test_new_project_creator_is_subscribed_with_default_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.ProjectFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') - - assert len(user_subscriptions) == 6 # subscribed to both node and user settings - assert 'file_updated' in event_types - assert 'comments' in event_types - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_comment_replies' in event_types - assert 'global_mentions' in event_types - assert file_updated_subscription.email_transactional.count() == 1 - assert comments_subscription.email_transactional.count() == 1 - - def test_new_fork_creator_is_subscribed_with_default_global_settings(self): - user = factories.UserFactory() - project = factories.ProjectFactory(creator=user) - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.ForkFactory(project=project) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - node_file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - node_comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') - project_file_updated_subscription = NotificationSubscription.objects.get(_id=project._id + '_file_updated') - project_comments_subscription = NotificationSubscription.objects.get(_id=project._id + '_comments') - - assert len(user_subscriptions) == 7 # subscribed to project, fork, and user settings - assert 'file_updated' in event_types - assert 'comments' in event_types - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_mentions' in event_types - assert node_file_updated_subscription.email_transactional.count() == 1 - assert node_comments_subscription.email_transactional.count() == 1 - assert project_file_updated_subscription.email_transactional.count() == 1 - assert project_comments_subscription.email_transactional.count() == 1 - - def test_new_node_creator_is_not_subscribed_with_default_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.NodeFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - assert len(user_subscriptions) == 4 # subscribed to only user settings - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_comment_replies' in event_types - assert 'global_mentions' in event_types - - - def test_contributor_subscribed_when_added_to_project(self): - user = factories.UserFactory() - contributor = factories.UserFactory() - project = factories.ProjectFactory(creator=user) - project.add_contributor(contributor=contributor) - contributor_subscriptions = list(utils.get_all_user_subscriptions(contributor)) - event_types = [sub.event_name for sub in contributor_subscriptions] - - assert len(contributor_subscriptions) == 2 - assert 'file_updated' in event_types - assert 'comments' in event_types - - def test_contributor_subscribed_when_added_to_component(self): - user = factories.UserFactory() - contributor = factories.UserFactory() - - factories.NotificationSubscriptionFactory( - _id=contributor._id + '_' + 'global_comments', - user=contributor, - event_name='global_comments' - ).add_user_to_subscription(contributor, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=contributor._id + '_' + 'global_file_updated', - user=contributor, - event_name='global_file_updated' - ).add_user_to_subscription(contributor, 'email_transactional') - - node = factories.NodeFactory(creator=user) - node.add_contributor(contributor=contributor) - - contributor_subscriptions = list(utils.get_all_user_subscriptions(contributor)) - event_types = [sub.event_name for sub in contributor_subscriptions] - - file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') - - assert len(contributor_subscriptions) == 4 # subscribed to both node and user settings - assert 'file_updated' in event_types - assert 'comments' in event_types - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert file_updated_subscription.email_transactional.count() == 1 - assert comments_subscription.email_transactional.count() == 1 - - def test_unregistered_contributor_not_subscribed_when_added_to_project(self): - user = factories.AuthUserFactory() - unregistered_contributor = factories.UnregUserFactory() - project = factories.ProjectFactory(creator=user) - project.add_unregistered_contributor( - unregistered_contributor.fullname, - unregistered_contributor.email, - Auth(user), - existing_user=unregistered_contributor - ) - - contributor_subscriptions = list(utils.get_all_user_subscriptions(unregistered_contributor)) - assert len(contributor_subscriptions) == 0 - - -class TestSubscriptionView(OsfTestCase): - - def setUp(self): - super().setUp() - self.node = factories.NodeFactory() - self.user = self.node.creator - self.registration = factories.RegistrationFactory(creator=self.user) - - def test_create_new_subscription(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - - # check that subscription was created - event_id = self.node._id + '_' + 'comments' - s = NotificationSubscription.objects.get(_id=event_id) - - # check that user was added to notification_type field - assert payload['id'] == s.owner._id - assert payload['event'] == s.event_name - assert self.node.creator in getattr(s, payload['notification_type']).all() - - # change subscription - new_payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_digest' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=new_payload, auth=self.node.creator.auth) - s.reload() - assert not self.node.creator in getattr(s, payload['notification_type']).all() - assert self.node.creator in getattr(s, new_payload['notification_type']).all() - - def test_cannot_create_registration_subscription(self): - payload = { - 'id': self.registration._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - res = self.app.post(url, json=payload, auth=self.registration.creator.auth) - assert res.status_code == 400 - - def test_adopt_parent_subscription_default(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'adopt_parent' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - event_id = self.node._id + '_' + 'comments' - # confirm subscription was created because parent had default subscription - s = NotificationSubscription.objects.filter(_id=event_id).count() - assert 0 == s - - def test_change_subscription_to_adopt_parent_subscription_removes_user(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - - # check that subscription was created - event_id = self.node._id + '_' + 'comments' - s = NotificationSubscription.objects.get(_id=event_id) - - # change subscription to adopt_parent - new_payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'adopt_parent' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=new_payload, auth=self.node.creator.auth) - s.reload() - - # assert that user is removed from the subscription entirely - for n in constants.NOTIFICATION_TYPES: - assert not self.node.creator in getattr(s, n).all() - - def test_configure_subscription_adds_node_id_to_notifications_configured(self): - project = factories.ProjectFactory(creator=self.user) - assert not project._id in self.user.notifications_configured - payload = { - 'id': project._id, - 'event': 'comments', - 'notification_type': 'email_digest' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=project.creator.auth) - - self.user.reload() - - assert project._id in self.user.notifications_configured - - -class TestRemoveContributor(OsfTestCase): - - def setUp(self): - super(OsfTestCase, self).setUp() - self.project = factories.ProjectFactory() - self.contributor = factories.UserFactory() - self.project.add_contributor(contributor=self.contributor, permissions=permissions.READ) - self.project.save() - - self.subscription = NotificationSubscription.objects.get( - node=self.project, - _id=self.project._id + '_comments' - ) - - self.node = factories.NodeFactory(parent=self.project) - self.node.add_contributor(contributor=self.project.creator, permissions=permissions.ADMIN) - self.node.save() - - self.node_subscription = NotificationSubscription.objects.get( - _id=self.node._id + '_comments', - node=self.node - ) - self.node_subscription.add_user_to_subscription(self.node.creator, 'email_transactional') - - def test_removed_non_admin_contributor_is_removed_from_subscriptions(self): - assert self.contributor in self.subscription.email_transactional.all() - self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator)) - assert self.contributor not in self.project.contributors.all() - self.subscription.reload() - assert self.contributor not in self.subscription.email_transactional.all() - - def test_removed_non_parent_admin_contributor_is_removed_from_subscriptions(self): - assert self.node.creator in self.node_subscription.email_transactional.all() - self.node.remove_contributor(self.node.creator, auth=Auth(self.node.creator)) - assert self.node.creator not in self.node.contributors.all() - self.node_subscription.reload() - assert self.node.creator not in self.node_subscription.email_transactional.all() - - def test_removed_contributor_admin_on_parent_not_removed_from_node_subscription(self): - # Admin on parent project is removed as a contributor on a component. Check - # that admin is not removed from component subscriptions, as the admin - # now has read-only access. - assert self.project.creator in self.node_subscription.email_transactional.all() - self.node.remove_contributor(self.project.creator, auth=Auth(self.project.creator)) - assert self.project.creator not in self.node.contributors.all() - assert self.project.creator in self.node_subscription.email_transactional.all() - - def test_remove_contributor_signal_called_when_contributor_is_removed(self): - with capture_signals() as mock_signals: - self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator)) - assert mock_signals.signals_sent() == {contributor_removed} - - -class TestRemoveNodeSignal(OsfTestCase): - - def test_node_subscriptions_and_backrefs_removed_when_node_is_deleted(self): - project = factories.ProjectFactory() - component = factories.NodeFactory(parent=project, creator=project.creator) - - s = NotificationSubscription.objects.filter(email_transactional=project.creator) - assert s.count() == 2 - - s = NotificationSubscription.objects.filter(email_transactional=component.creator) - assert s.count() == 2 - - with capture_signals() as mock_signals: - project.remove_node(auth=Auth(project.creator)) - project.reload() - component.reload() - - assert project.is_deleted - assert component.is_deleted - assert mock_signals.signals_sent() == {node_deleted} - - s = NotificationSubscription.objects.filter(email_transactional=project.creator) - assert s.count() == 0 - - s = NotificationSubscription.objects.filter(email_transactional=component.creator) - assert s.count() == 0 - - with pytest.raises(NotificationSubscription.DoesNotExist): - NotificationSubscription.objects.get(node=project) - - with pytest.raises(NotificationSubscription.DoesNotExist): - NotificationSubscription.objects.get(node=component) - - -def list_or_dict(data): - # Generator only returns lists or dicts from list or dict - if isinstance(data, dict): - for key in data: - if isinstance(data[key], dict) or isinstance(data[key], list): - yield data[key] - elif isinstance(data, list): - for item in data: - if isinstance(item, dict) or isinstance(item, list): - yield item - - -def has(data, sub_data): - # Recursive approach to look for a subset of data in data. - # WARNING: Don't use on huge structures - # :param data: Data structure - # :param sub_data: subset being checked for - # :return: True or False - try: - next(item for item in data if item == sub_data) - return True - except StopIteration: - lists_and_dicts = list_or_dict(data) - for item in lists_and_dicts: - if has(item, sub_data): - return True - return False - - -def subscription_schema(project, structure, level=0): - # builds a schema from a list of nodes and events - # :param project: validation type - # :param structure: list of nodes (another list) and events - # :return: schema - sub_list = [] - for item in list_or_dict(structure): - sub_list.append(subscription_schema(project, item, level=level+1)) - sub_list.append(event_schema(level)) - - node_schema = { - 'node': { - 'id': Use(type(project._id), error=f'node_id{level}'), - 'title': Use(type(project.title), error=f'node_title{level}'), - 'url': Use(type(project.url), error=f'node_{level}') - }, - 'kind': And(str, Use(lambda s: s in ('node', 'folder'), - error=f"kind didn't match node or folder {level}")), - 'nodeType': Use(lambda s: s in ('project', 'component'), error='nodeType not project or component'), - 'category': Use(lambda s: s in settings.NODE_CATEGORY_MAP, error='category not in settings.NODE_CATEGORY_MAP'), - 'permissions': { - 'view': Use(lambda s: s in (True, False), error='view permissions is not True/False') - }, - 'children': sub_list - } - if level == 0: - return Schema([node_schema]) - return node_schema - - -def event_schema(level=None): - return { - 'event': { - 'title': And(Use(str, error=f'event_title{level} not a string'), - Use(lambda s: s in constants.NOTIFICATION_TYPES, - error=f'event_title{level} not in list')), - 'description': And(Use(str, error=f'event_desc{level} not a string'), - Use(lambda s: s in constants.NODE_SUBSCRIPTIONS_AVAILABLE, - error=f'event_desc{level} not in list')), - 'notificationType': And(str, Or('adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES)), - 'parent_notification_type': Or(None, 'adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES) - }, - 'kind': 'event', - 'children': And(list, lambda l: len(l) == 0) - } - - -class TestNotificationUtils(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = factories.UserFactory() - self.project = factories.ProjectFactory(creator=self.user) - - self.project_subscription = NotificationSubscription.objects.get( - node=self.project, - _id=self.project._id + '_comments', - event_name='comments' - ) - - self.user.notifications_configured[self.project._id] = True - self.user.save() - - self.node = factories.NodeFactory(parent=self.project, creator=self.user) - - self.node_comments_subscription = factories.NotificationSubscriptionFactory( - _id=self.node._id + '_' + 'comments', - node=self.node, - event_name='comments' - ) - self.node_comments_subscription.save() - self.node_comments_subscription.email_transactional.add(self.user) - self.node_comments_subscription.save() - - self.node_subscription = list(NotificationSubscription.objects.filter(node=self.node)) - - self.user_subscription = [factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'comment_replies', - user=self.user, - event_name='comment_replies' - ), - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_comment', - user=self.user, - event_name='global_comment' - ), - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_file_updated', - user=self.user, - event_name='global_file_updated' - )] - - for x in self.user_subscription: - x.save() - for x in self.user_subscription: - x.email_transactional.add(self.user) - for x in self.user_subscription: - x.save() - - def test_to_subscription_key(self): - key = utils.to_subscription_key('xyz', 'comments') - assert key == 'xyz_comments' - - def test_from_subscription_key(self): - parsed_key = utils.from_subscription_key('xyz_comment_replies') - assert parsed_key == { - 'uid': 'xyz', - 'event': 'comment_replies' - } - - def test_get_all_user_subscriptions(self): - user_subscriptions = list(utils.get_all_user_subscriptions(self.user)) - assert self.project_subscription in user_subscriptions - assert self.node_comments_subscription in user_subscriptions - for x in self.user_subscription: - assert x in user_subscriptions - assert len(user_subscriptions) == 6 - - def test_get_all_node_subscriptions_given_user_subscriptions(self): - user_subscriptions = utils.get_all_user_subscriptions(self.user) - node_subscription_ids = [x._id for x in utils.get_all_node_subscriptions(self.user, self.node, - user_subscriptions=user_subscriptions)] - expected_node_subscription_ids = [x._id for x in self.node_subscription] - assert node_subscription_ids == expected_node_subscription_ids - - def test_get_all_node_subscriptions_given_user_and_node(self): - node_subscription_ids = [x._id for x in utils.get_all_node_subscriptions(self.user, self.node)] - expected_node_subscription_ids = [x._id for x in self.node_subscription] - assert node_subscription_ids == expected_node_subscription_ids - - def test_get_configured_project_ids_does_not_return_user_or_node_ids(self): - configured_nodes = utils.get_configured_projects(self.user) - configured_ids = [n._id for n in configured_nodes] - # No duplicates! - assert len(configured_nodes) == 1 - - assert self.project._id in configured_ids - assert self.node._id not in configured_ids - assert self.user._id not in configured_ids - - def test_get_configured_project_ids_excludes_deleted_projects(self): - project = factories.ProjectFactory() - project.is_deleted = True - project.save() - assert project not in utils.get_configured_projects(self.user) - - def test_get_configured_project_ids_excludes_node_with_project_category(self): - node = factories.NodeFactory(parent=self.project, category='project') - assert node not in utils.get_configured_projects(self.user) - - def test_get_configured_project_ids_includes_top_level_private_projects_if_subscriptions_on_node(self): - private_project = factories.ProjectFactory() - node = factories.NodeFactory(parent=private_project) - node_comments_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_' + 'comments', - node=node, - event_name='comments' - ) - node_comments_subscription.save() - node_comments_subscription.email_transactional.add(node.creator) - node_comments_subscription.save() - - node.creator.notifications_configured[node._id] = True - node.creator.save() - configured_project_nodes = utils.get_configured_projects(node.creator) - assert private_project in configured_project_nodes - - def test_get_configured_project_ids_excludes_private_projects_if_no_subscriptions_on_node(self): - user = factories.UserFactory() - - private_project = factories.ProjectFactory() - node = factories.NodeFactory(parent=private_project) - node.add_contributor(user) - - utils.remove_contributor_from_subscriptions(node, user) - - configured_project_nodes = utils.get_configured_projects(user) - assert private_project not in configured_project_nodes - - def test_get_parent_notification_type(self): - nt = utils.get_parent_notification_type(self.node, 'comments', self.user) - assert nt == 'email_transactional' - - def test_get_parent_notification_type_no_parent_subscriptions(self): - node = factories.NodeFactory() - nt = utils.get_parent_notification_type(node._id, 'comments', self.user) - assert nt is None - - def test_get_parent_notification_type_no_parent(self): - project = factories.ProjectFactory() - nt = utils.get_parent_notification_type(project._id, 'comments', self.user) - assert nt is None - - def test_get_parent_notification_type_handles_user_id(self): - nt = utils.get_parent_notification_type(self.user._id, 'comments', self.user) - assert nt is None - - def test_format_data_project_settings(self): - data = utils.format_data(self.user, [self.project]) - parent_event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - } - child_event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [] - } - expected_new = [['event'], 'event'] - schema = subscription_schema(self.project, expected_new) - assert schema.validate(data) - assert has(data, parent_event) - assert has(data, child_event) - - def test_format_data_node_settings(self): - data = utils.format_data(self.user, [self.node]) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [] - } - schema = subscription_schema(self.project, ['event']) - assert schema.validate(data) - assert has(data, event) - - def test_format_includes_admin_view_only_component_subscriptions(self): - # Test private components in which parent project admins are not contributors still appear in their - # notifications settings. - node = factories.NodeFactory(parent=self.project) - data = utils.format_data(self.user, [self.project]) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'adopt_parent', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event'], ['event']]) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_excludes_pointers(self): - project = factories.ProjectFactory() - pointed = factories.ProjectFactory() - project.add_pointer(pointed, Auth(project.creator)) - project.creator.notifications_configured[project._id] = True - project.creator.save() - configured_project_nodes = utils.get_configured_projects(project.creator) - data = utils.format_data(project.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event']) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_user_subscriptions_includes_private_parent_if_configured_children(self): - private_project = factories.ProjectFactory() - node = factories.NodeFactory(parent=private_project) - - node_comments_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_' + 'comments', - node=node, - event_name='comments' - ) - node_comments_subscription.save() - node_comments_subscription.email_transactional.add(node.creator) - node_comments_subscription.save() - - node.creator.notifications_configured[node._id] = True - node.creator.save() - configured_project_nodes = utils.get_configured_projects(node.creator) - data = utils.format_data(node.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event']]) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_user_subscriptions_if_children_points_to_parent(self): - private_project = factories.ProjectFactory(creator=self.user) - node = factories.NodeFactory(parent=private_project, creator=self.user) - node.save() - node_comments_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_' + 'comments', - node=node, - event_name='comments' - ) - node_comments_subscription.save() - node_comments_subscription.email_transactional.add(node.creator) - node_comments_subscription.save() - - node.creator.notifications_configured[node._id] = True - node.creator.save() - configured_project_nodes = utils.get_configured_projects(node.creator) - data = utils.format_data(node.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event']]) - assert schema.validate(data) - assert has(data, event) - - def test_format_user_subscriptions(self): - data = utils.format_user_subscriptions(self.user) - expected = [ - { - 'event': { - 'title': 'global_file_updated', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_file_updated'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None, - }, - 'kind': 'event', - 'children': [] - }, { - 'event': { - 'title': 'global_comment_replies', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comment_replies'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - }, { - 'event': { - 'title': 'global_comments', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - }, { - 'event': { - 'title': 'global_mentions', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_mentions'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - }, { - 'event': { - 'title': 'global_reviews', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_reviews'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - } - ] - - assert data == expected - - def test_get_global_notification_type(self): - notification_type = utils.get_global_notification_type(self.user_subscription[1] ,self.user) - assert 'email_transactional' == notification_type - - def test_check_if_all_global_subscriptions_are_none_false(self): - all_global_subscriptions_none = utils.check_if_all_global_subscriptions_are_none(self.user) - assert not all_global_subscriptions_none - - # # Business logic prevents this from being an applicable unit test; - # # global_mentions cannot be unsubscribed from - # def test_check_if_all_global_subscriptions_are_none_true(self): - # for x in self.user_subscription: - # x.none.add(self.user) - # x.email_transactional.remove(self.user) - # for x in self.user_subscription: - # x.save() - # all_global_subscriptions_none = utils.check_if_all_global_subscriptions_are_none(self.user) - # assert all_global_subscriptions_none - - def test_format_data_user_settings(self): - data = utils.format_user_and_project_subscriptions(self.user) - expected = [ - { - 'node': { - 'id': self.user._id, - 'title': 'Default Notification Settings', - 'help': 'These are default settings for new projects you create or are added to. Modifying these settings will not modify settings on existing projects.' - }, - 'kind': 'heading', - 'children': utils.format_user_subscriptions(self.user) - }, - { - 'node': { - 'help': 'These are settings for each of your projects. Modifying these settings will only modify the settings for the selected project.', - 'id': '', - 'title': 'Project Notifications' - }, - 'kind': 'heading', - 'children': utils.format_data(self.user, utils.get_configured_projects(self.user)) - }] - assert data == expected - - def test_serialize_user_level_event(self): - user_subscriptions = [x for x in utils.get_all_user_subscriptions(self.user)] - user_subscription = None - for subscription in user_subscriptions: - if 'global_comment_replies' in getattr(subscription, 'event_name'): - user_subscription = subscription - data = utils.serialize_event(self.user, event_description='global_comment_replies', - subscription=user_subscription) - expected = { - 'event': { - 'title': 'global_comment_replies', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comment_replies'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - } - assert data == expected - - def test_serialize_node_level_event(self): - node_subscriptions = [x for x in utils.get_all_node_subscriptions(self.user, self.node)] - data = utils.serialize_event(user=self.user, event_description='comments', - subscription=node_subscriptions[0], node=self.node) - expected = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [], - } - assert data == expected - - def test_serialize_node_level_event_that_adopts_parent_settings(self): - user = factories.UserFactory() - self.project.add_contributor(contributor=user, permissions=permissions.READ) - self.project.save() - self.node.add_contributor(contributor=user, permissions=permissions.READ) - self.node.save() - - # set up how it was in original test - remove existing subscriptions - node_subscriptions = utils.get_all_node_subscriptions(user, self.node) - for subscription in node_subscriptions: - subscription.remove_user_from_subscription(user) - - node_subscriptions = utils.get_all_node_subscriptions(user, self.node) - data = utils.serialize_event(user=user, event_description='comments', - subscription=node_subscriptions, node=self.node) - expected = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'adopt_parent', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [], - } - assert data == expected - - -class TestNotificationsDict(OsfTestCase): - def test_notifications_dict_add_message_returns_proper_format(self): - d = utils.NotificationsDict() - message = { - 'message': 'Freddie commented on your project', - 'timestamp': timezone.now() - } - message2 = { - 'message': 'Mercury commented on your component', - 'timestamp': timezone.now() - } - - d.add_message(['project'], message) - d.add_message(['project', 'node'], message2) - - expected = { - 'messages': [], - 'children': collections.defaultdict( - utils.NotificationsDict, { - 'project': { - 'messages': [message], - 'children': collections.defaultdict(utils.NotificationsDict, { - 'node': { - 'messages': [message2], - 'children': collections.defaultdict(utils.NotificationsDict, {}) - } - }) - } - } - )} - assert d == expected - - -class TestCompileSubscriptions(NotificationTestCase): - def setUp(self): - super().setUp() - self.user_1 = factories.UserFactory() - self.user_2 = factories.UserFactory() - self.user_3 = factories.UserFactory() - self.user_4 = factories.UserFactory() - # Base project + 1 project shared with 3 + 1 project shared with 2 - self.base_project = factories.ProjectFactory(is_public=False, creator=self.user_1) - self.shared_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1) - self.private_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1) - # Adding contributors - for node in [self.base_project, self.shared_node, self.private_node]: - node.add_contributor(self.user_2, permissions=permissions.ADMIN) - self.base_project.add_contributor(self.user_3, permissions=permissions.WRITE) - self.shared_node.add_contributor(self.user_3, permissions=permissions.WRITE) - # Setting basic subscriptions - self.base_sub = factories.NotificationSubscriptionFactory( - _id=self.base_project._id + '_file_updated', - node=self.base_project, - event_name='file_updated' - ) - self.base_sub.save() - self.shared_sub = factories.NotificationSubscriptionFactory( - _id=self.shared_node._id + '_file_updated', - node=self.shared_node, - event_name='file_updated' - ) - self.shared_sub.save() - self.private_sub = factories.NotificationSubscriptionFactory( - _id=self.private_node._id + '_file_updated', - node=self.private_node, - event_name='file_updated' - ) - self.private_sub.save() - - def test_no_subscription(self): - node = factories.NodeFactory() - result = emails.compile_subscriptions(node, 'file_updated') - assert {'email_transactional': [], 'none': [], 'email_digest': []} == result - - def test_no_subscribers(self): - node = factories.NodeFactory() - node_sub = factories.NotificationSubscriptionFactory( - _id=node._id + '_file_updated', - node=node, - event_name='file_updated' - ) - node_sub.save() - result = emails.compile_subscriptions(node, 'file_updated') - assert {'email_transactional': [], 'none': [], 'email_digest': []} == result - - def test_creator_subbed_parent(self): - # Basic sub check - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - result = emails.compile_subscriptions(self.base_project, 'file_updated') - assert {'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []} == result - - def test_creator_subbed_to_parent_from_child(self): - # checks the parent sub is the one to appear without a child sub - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - result = emails.compile_subscriptions(self.shared_node, 'file_updated') - assert {'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []} == result - - def test_creator_subbed_to_both_from_child(self): - # checks that only one sub is in the list. - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - self.shared_sub.email_transactional.add(self.user_1) - self.shared_sub.save() - result = emails.compile_subscriptions(self.shared_node, 'file_updated') - assert {'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []} == result - - def test_creator_diff_subs_to_both_from_child(self): - # Check that the child node sub overrides the parent node sub - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - self.shared_sub.none.add(self.user_1) - self.shared_sub.save() - result = emails.compile_subscriptions(self.shared_node, 'file_updated') - assert {'email_transactional': [], 'none': [self.user_1._id], 'email_digest': []} == result - - def test_user_wo_permission_on_child_node_not_listed(self): - # Tests to see if a user without permission gets an Email about a node they cannot see. - self.base_sub.email_transactional.add(self.user_3) - self.base_sub.save() - result = emails.compile_subscriptions(self.private_node, 'file_updated') - assert {'email_transactional': [], 'none': [], 'email_digest': []} == result - - def test_several_nodes_deep(self): - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - node2 = factories.NodeFactory(parent=self.shared_node) - node3 = factories.NodeFactory(parent=node2) - node4 = factories.NodeFactory(parent=node3) - node5 = factories.NodeFactory(parent=node4) - subs = emails.compile_subscriptions(node5, 'file_updated') - assert subs == {'email_transactional': [self.user_1._id], 'email_digest': [], 'none': []} - - def test_several_nodes_deep_precedence(self): - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - node2 = factories.NodeFactory(parent=self.shared_node) - node3 = factories.NodeFactory(parent=node2) - node4 = factories.NodeFactory(parent=node3) - node4_subscription = factories.NotificationSubscriptionFactory( - _id=node4._id + '_file_updated', - node=node4, - event_name='file_updated' - ) - node4_subscription.save() - node4_subscription.email_digest.add(self.user_1) - node4_subscription.save() - node5 = factories.NodeFactory(parent=node4) - subs = emails.compile_subscriptions(node5, 'file_updated') - assert subs == {'email_transactional': [], 'email_digest': [self.user_1._id], 'none': []} - - -class TestMoveSubscription(NotificationTestCase): - def setUp(self): - super().setUp() - self.blank = {key: [] for key in constants.NOTIFICATION_TYPES} # For use where it is blank. - self.user_1 = factories.AuthUserFactory() - self.auth = Auth(user=self.user_1) - self.user_2 = factories.AuthUserFactory() - self.user_3 = factories.AuthUserFactory() - self.user_4 = factories.AuthUserFactory() - self.project = factories.ProjectFactory(creator=self.user_1) - self.private_node = factories.NodeFactory(parent=self.project, is_public=False, creator=self.user_1) - self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - node=self.project, - event_name='file_updated' - ) - self.sub.email_transactional.add(self.user_1) - self.sub.save() - self.file_sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_xyz42_file_updated', - node=self.project, - event_name='xyz42_file_updated' - ) - self.file_sub.save() - - def test_separate_users(self): - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - subbed, removed = utils.separate_users( - self.private_node, [self.user_2._id, self.user_3._id, self.user_4._id] - ) - assert [self.user_2._id, self.user_3._id] == subbed - assert [self.user_4._id] == removed - - def test_event_subs_same(self): - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - assert {'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []} == results - - def test_event_nodes_same(self): - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.project) - assert {'email_transactional': [], 'email_digest': [], 'none': []} == results - - def test_move_sub(self): - # Tests old sub is replaced with new sub. - utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - self.file_sub.reload() - assert 'abc42_file_updated' == self.file_sub.event_name - assert self.private_node == self.file_sub.owner - assert self.private_node._id + '_abc42_file_updated' == self.file_sub._id - - def test_move_sub_with_none(self): - # Attempt to reproduce an error that is seen when moving files - self.project.add_contributor(self.user_2, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.file_sub.none.add(self.user_2) - self.file_sub.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - assert {'email_transactional': [], 'email_digest': [], 'none': [self.user_2._id]} == results - - def test_remove_one_user(self): - # One user doesn't have permissions on the node the sub is moved to. Should be listed. - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - assert {'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []} == results - - def test_remove_one_user_warn_another(self): - # Two users do not have permissions on new node, but one has a project sub. Both should be listed. - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.save() - self.project.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - self.file_sub.email_transactional.add(self.user_2, self.user_4) - - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - assert {'email_transactional': [self.user_4._id], 'email_digest': [self.user_3._id], 'none': []} == results - assert self.sub.email_digest.filter(id=self.user_3.id).exists() # Is not removed from the project subscription. - - def test_warn_user(self): - # One user with a project sub does not have permission on new node. User should be listed. - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.save() - self.project.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - self.file_sub.email_transactional.add(self.user_2) - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - assert {'email_transactional': [], 'email_digest': [self.user_3._id], 'none': []} == results - assert self.user_3 in self.sub.email_digest.all() # Is not removed from the project subscription. - - def test_user_node_subbed_and_not_removed(self): - self.project.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - assert not self.file_sub.email_digest.filter().exists() - - # Regression test for commit ea15186 - def test_garrulous_event_name(self): - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('complicated/path_to/some/file/ASDFASDF.txt_file_updated', self.project, self.private_node) - assert {'email_transactional': [], 'email_digest': [], 'none': []} == results - -class TestSendEmails(NotificationTestCase): - def setUp(self): - super().setUp() - self.user = factories.AuthUserFactory() - self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_' + 'comments', - node=self.project, - event_name='comments' - ) - self.project_subscription.save() - self.project_subscription.email_transactional.add(self.project.creator) - self.project_subscription.save() - - self.node = factories.NodeFactory(parent=self.project) - self.node_subscription = factories.NotificationSubscriptionFactory( - _id=self.node._id + '_comments', - node=self.node, - event_name='comments' - ) - self.node_subscription.save() - self.user_subscription = factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_comment_replies', - node=self.node, - event_name='global_comment_replies' - ) - self.user_subscription.email_transactional.add(self.user) - self.user_subscription.save() - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_no_subscription(self, mock_store): - node = factories.ProjectFactory() - user = factories.AuthUserFactory() - emails.notify('comments', user=user, node=node, timestamp=timezone.now()) - assert not mock_store.called - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_no_subscribers(self, mock_store): - node = factories.NodeFactory() - node_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_comments', - node=node, - event_name='comments' - ) - node_subscription.save() - emails.notify('comments', user=self.user, node=node, timestamp=timezone.now()) - assert not mock_store.called - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_with_correct_args(self, mock_store): - time_now = timezone.now() - emails.notify('comments', user=self.user, node=self.node, timestamp=time_now) - assert mock_store.called - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', self.user, - self.node, time_now) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_does_not_send_to_exclude(self, mock_store): - time_now = timezone.now() - context = {'exclude':[self.project.creator._id]} - emails.notify('comments', user=self.user, node=self.node, timestamp=time_now, **context) - assert mock_store.call_count == 0 - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_does_not_send_to_users_subscribed_to_none(self, mock_store): - node = factories.NodeFactory() - user = factories.UserFactory() - node_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_comments', - node=node, - event_name='comments' - ) - node_subscription.save() - node_subscription.none.add(user) - node_subscription.save() - sent = emails.notify('comments', user=user, node=node, timestamp=timezone.now()) - assert not mock_store.called - assert sent == [] - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_mentions_does_not_send_to_mentioned_users_subscribed_to_none(self, mock_store): - node = factories.NodeFactory() - user = factories.UserFactory() - factories.NotificationSubscriptionFactory( - _id=user._id + '_global_mentions', - node=self.node, - event_name='global_mentions' - ).add_user_to_subscription(user, 'none') - time_now = timezone.now() - sent = emails.notify_mentions('global_mentions', user=user, node=node, timestamp=time_now, new_mentions=[user._id]) - assert not mock_store.called - assert sent == [] - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_mentions_does_send_to_mentioned_users(self, mock_store): - user = factories.UserFactory() - factories.NotificationSubscriptionFactory( - _id=user._id + '_global_mentions', - node=self.node, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - node = factories.ProjectFactory(creator=user) - time_now = timezone.now() - emails.notify_mentions('global_mentions', user=user, node=node, timestamp=time_now, new_mentions=[user._id]) - assert mock_store.called - mock_store.assert_called_with( - [node.creator._id], - 'email_transactional', - 'global_mentions', - user, - node, - time_now, - template=None, - new_mentions=[node.creator._id], - is_creator=(user == node.creator), - ) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_reply_event_if_comment_is_direct_reply(self, mock_store): - time_now = timezone.now() - emails.notify('comments', user=self.user, node=self.node, timestamp=time_now, target_user=self.project.creator) - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comment_replies', - self.user, self.node, time_now, target_user=self.project.creator) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_reply_when_target_user_is_subscribed_via_user_settings(self, mock_store): - time_now = timezone.now() - emails.notify('global_comment_replies', user=self.project.creator, node=self.node, timestamp=time_now, target_user=self.user) - mock_store.assert_called_with([self.user._id], 'email_transactional', 'comment_replies', - self.project.creator, self.node, time_now, target_user=self.user) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply(self, mock_store): - user = factories.UserFactory() - time_now = timezone.now() - emails.notify('comments', user=user, node=self.node, timestamp=time_now, target_user=user) - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user, - self.node, time_now, target_user=user) - - @mock.patch('website.mails.send_mail') - @mock.patch('website.notifications.emails.store_emails') - def test_notify_does_not_send_comment_if_they_reply_to_their_own_comment(self, mock_store, mock_send_mail): - time_now = timezone.now() - emails.notify('comments', user=self.project.creator, node=self.project, timestamp=time_now, - target_user=self.project.creator) - assert not mock_store.called - assert not mock_send_mail.called - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply_on_component(self, mock_store): - # Test that comment replies on components that are not direct replies to the subscriber use the - # "comments" email template. - user = factories.UserFactory() - time_now = timezone.now() - emails.notify('comments', user, self.node, time_now, target_user=user) - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user, - self.node, time_now, target_user=user) - - def test_check_node_node_none(self): - subs = emails.check_node(None, 'comments') - assert subs == {'email_transactional': [], 'email_digest': [], 'none': []} - - def test_check_node_one(self): - subs = emails.check_node(self.project, 'comments') - assert subs == {'email_transactional': [self.project.creator._id], 'email_digest': [], 'none': []} - - @mock.patch('website.project.views.comment.notify') - def test_check_user_comment_reply_subscription_if_email_not_sent_to_target_user(self, mock_notify): - # user subscribed to comment replies - user = factories.UserFactory() - user_subscription = factories.NotificationSubscriptionFactory( - _id=user._id + '_comments', - user=user, - event_name='comment_replies' - ) - user_subscription.email_transactional.add(user) - user_subscription.save() - - # user is not subscribed to project comment notifications - project = factories.ProjectFactory() - - # user comments on project - target = factories.CommentFactory(node=project, user=user) - content = 'hammer to fall' - - # reply to user (note: notify is called from Comment.create) - reply = Comment.create( - auth=Auth(project.creator), - user=project.creator, - node=project, - content=content, - target=Guid.load(target._id), - root_target=Guid.load(project._id), - ) - assert mock_notify.called - assert mock_notify.call_count == 2 - - @mock.patch('website.project.views.comment.notify') - def test_check_user_comment_reply_only_calls_once(self, mock_notify): - # user subscribed to comment replies - user = factories.UserFactory() - user_subscription = factories.NotificationSubscriptionFactory( - _id=user._id + '_comments', - user=user, - event_name='comment_replies' - ) - user_subscription.email_transactional.add(user) - user_subscription.save() - - project = factories.ProjectFactory() - - # user comments on project - target = factories.CommentFactory(node=project, user=user) - content = 'P-Hacking: A user guide' - - mock_notify.return_value = [user._id] - # reply to user (note: notify is called from Comment.create) - reply = Comment.create( - auth=Auth(project.creator), - user=project.creator, - node=project, - content=content, - target=Guid.load(target._id), - root_target=Guid.load(project._id), - ) - assert mock_notify.called - assert mock_notify.call_count == 1 - - def test_get_settings_url_for_node(self): - url = emails.get_settings_url(self.project._id, self.user) - assert url == self.project.absolute_url + 'settings/' - - def test_get_settings_url_for_user(self): - url = emails.get_settings_url(self.user._id, self.user) - assert url == web_url_for('user_notifications', _absolute=True) - - def test_get_node_lineage(self): - node_lineage = emails.get_node_lineage(self.node) - assert node_lineage == [self.project._id, self.node._id] - - def test_fix_locale(self): - assert emails.fix_locale('en') == 'en' - assert emails.fix_locale('de_DE') == 'de_DE' - assert emails.fix_locale('de_de') == 'de_DE' - - def test_localize_timestamp(self): - timestamp = timezone.now() - self.user.timezone = 'America/New_York' - self.user.locale = 'en_US' - self.user.save() - tz = dates.get_timezone(self.user.timezone) - locale = Locale(self.user.locale) - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - def test_localize_timestamp_empty_timezone(self): - timestamp = timezone.now() - self.user.timezone = '' - self.user.locale = 'en_US' - self.user.save() - tz = dates.get_timezone('Etc/UTC') - locale = Locale(self.user.locale) - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - def test_localize_timestamp_empty_locale(self): - timestamp = timezone.now() - self.user.timezone = 'America/New_York' - self.user.locale = '' - self.user.save() - tz = dates.get_timezone(self.user.timezone) - locale = Locale('en') - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - def test_localize_timestamp_handles_unicode(self): - timestamp = timezone.now() - self.user.timezone = 'Europe/Moscow' - self.user.locale = 'ru_RU' - self.user.save() - tz = dates.get_timezone(self.user.timezone) - locale = Locale(self.user.locale) - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - -class TestSendDigest(OsfTestCase): - def setUp(self): - super().setUp() - self.user_1 = factories.UserFactory() - self.user_2 = factories.UserFactory() - self.project = factories.ProjectFactory() - self.timestamp = timezone.now() - - def test_group_notifications_by_user_transactional(self): - send_type = 'email_transactional' - d = factories.NotificationDigestFactory( - user=self.user_1, - send_type=send_type, - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d.save() - d2 = factories.NotificationDigestFactory( - user=self.user_2, - send_type=send_type, - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d2.save() - d3 = factories.NotificationDigestFactory( - user=self.user_2, - send_type='email_digest', - timestamp=self.timestamp, - message='Hello, but this should not appear (this is a digest)', - node_lineage=[self.project._id] - ) - d3.save() - user_groups = list(get_users_emails(send_type)) - expected = [ - { - 'user_id': self.user_1._id, - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': d._id - }] - }, - { - 'user_id': self.user_2._id, - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': d2._id - }] - } - ] - - assert len(user_groups) == 2 - assert user_groups == expected - digest_ids = [d._id, d2._id, d3._id] - remove_notifications(email_notification_ids=digest_ids) - - def test_group_notifications_by_user_digest(self): - send_type = 'email_digest' - d = factories.NotificationDigestFactory( - user=self.user_1, - send_type=send_type, - event='comment_replies', - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d.save() - d2 = factories.NotificationDigestFactory( - user=self.user_2, - send_type=send_type, - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d2.save() - d3 = factories.NotificationDigestFactory( - user=self.user_2, - send_type='email_transactional', - timestamp=self.timestamp, - message='Hello, but this should not appear (this is transactional)', - node_lineage=[self.project._id] - ) - d3.save() - user_groups = list(get_users_emails(send_type)) - expected = [ - { - 'user_id': str(self.user_1._id), - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': str(d._id) - }] - }, - { - 'user_id': str(self.user_2._id), - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': str(d2._id) - }] - } - ] - - assert len(user_groups) == 2 - assert user_groups == expected - digest_ids = [d._id, d2._id, d3._id] - remove_notifications(email_notification_ids=digest_ids) - - @mock.patch('website.mails.send_mail') - def test_send_users_email_called_with_correct_args(self, mock_send_mail): - send_type = 'email_transactional' - d = factories.NotificationDigestFactory( - send_type=send_type, - event='comment_replies', - timestamp=timezone.now(), - message='Hello', - node_lineage=[factories.ProjectFactory()._id] - ) - d.save() - user_groups = list(get_users_emails(send_type)) - send_users_email(send_type) - assert mock_send_mail.called - assert mock_send_mail.call_count == len(user_groups) - - last_user_index = len(user_groups) - 1 - user = OSFUser.load(user_groups[last_user_index]['user_id']) - - args, kwargs = mock_send_mail.call_args - - assert kwargs['to_addr'] == user.username - assert kwargs['mail'] == mails.DIGEST - assert kwargs['name'] == user.fullname - assert kwargs['can_change_node_preferences'] == True - message = group_by_node(user_groups[last_user_index]['info']) - assert kwargs['message'] == message - - @mock.patch('website.mails.send_mail') - def test_send_users_email_ignores_disabled_users(self, mock_send_mail): - send_type = 'email_transactional' - d = factories.NotificationDigestFactory( - send_type=send_type, - event='comment_replies', - timestamp=timezone.now(), - message='Hello', - node_lineage=[factories.ProjectFactory()._id] - ) - d.save() - - user_groups = list(get_users_emails(send_type)) - last_user_index = len(user_groups) - 1 - - user = OSFUser.load(user_groups[last_user_index]['user_id']) - user.is_disabled = True - user.save() - - send_users_email(send_type) - assert not mock_send_mail.called - - def test_remove_sent_digest_notifications(self): - d = factories.NotificationDigestFactory( - event='comment_replies', - timestamp=timezone.now(), - message='Hello', - node_lineage=[factories.ProjectFactory()._id] - ) - digest_id = d._id - remove_notifications(email_notification_ids=[digest_id]) - with pytest.raises(NotificationDigest.DoesNotExist): - NotificationDigest.objects.get(_id=digest_id) - -class TestNotificationsReviews(OsfTestCase): - def setUp(self): - super().setUp() - self.provider = factories.PreprintProviderFactory(_id='engrxiv') - self.preprint = factories.PreprintFactory(provider=self.provider) - self.user = factories.UserFactory() - self.sender = factories.UserFactory() - self.context_info = { - 'email_sender': self.sender, - 'domain': 'osf.io', - 'reviewable': self.preprint, - 'workflow': 'pre-moderation', - 'provider_contact_email': settings.OSF_CONTACT_EMAIL, - 'provider_support_email': settings.OSF_SUPPORT_EMAIL, - } - self.action = factories.ReviewActionFactory() - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_comments', - user=self.user, - event_name='global_comments' - ).add_user_to_subscription(self.user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_file_updated', - user=self.user, - event_name='global_file_updated' - ).add_user_to_subscription(self.user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_reviews', - user=self.user, - event_name='global_reviews' - ).add_user_to_subscription(self.user, 'email_transactional') - - def test_reviews_base_notification(self): - contributor_subscriptions = list(utils.get_all_user_subscriptions(self.user)) - event_types = [sub.event_name for sub in contributor_subscriptions] - assert 'global_reviews' in event_types - - @mock.patch('website.mails.mails.send_mail') - def test_reviews_submit_notification(self, mock_send_email): - listeners.reviews_submit_notification(self, context=self.context_info, recipients=[self.sender, self.user]) - assert mock_send_email.called - - @mock.patch('website.notifications.emails.notify_global_event') - def test_reviews_notification(self, mock_notify): - listeners.reviews_notification(self, creator=self.sender, context=self.context_info, action=self.action, template='test.html.mako') - assert mock_notify.called - - -class QuerySetMatcher: - def __init__(self, some_obj): - self.some_obj = some_obj - - def __eq__(self, other): - return list(self.some_obj) == list(other) - - -class TestNotificationsReviewsModerator(OsfTestCase): - - def setUp(self): - super().setUp() - self.provider = factories.PreprintProviderFactory(_id='engrxiv') - self.preprint = factories.PreprintFactory(provider=self.provider) - self.submitter = factories.UserFactory() - self.moderator_transacitonal = factories.UserFactory() - self.moderator_digest= factories.UserFactory() - - self.context_info_submission = { - 'referrer': self.submitter, - 'domain': 'osf.io', - 'reviewable': self.preprint, - 'workflow': 'pre-moderation', - 'provider_contact_email': settings.OSF_CONTACT_EMAIL, - 'provider_support_email': settings.OSF_SUPPORT_EMAIL, - } - - self.context_info_request = { - 'requester': self.submitter, - 'domain': 'osf.io', - 'reviewable': self.preprint, - 'workflow': 'pre-moderation', - 'provider_contact_email': settings.OSF_CONTACT_EMAIL, - 'provider_support_email': settings.OSF_SUPPORT_EMAIL, - } - - self.action = factories.ReviewActionFactory() - self.subscription = NotificationSubscription.load(self.provider._id+'_new_pending_submissions') - self.subscription.add_user_to_subscription(self.moderator_transacitonal, 'email_transactional') - self.subscription.add_user_to_subscription(self.moderator_digest, 'email_digest') - - @mock.patch('website.notifications.emails.store_emails') - def test_reviews_submit_notification(self, mock_store): - time_now = timezone.now() - - preprint = self.context_info_submission['reviewable'] - provider = preprint.provider - - self.context_info_submission['message'] = f'submitted {preprint.title}.' - self.context_info_submission['profile_image_url'] = get_profile_image_url(self.context_info_submission['referrer']) - self.context_info_submission['reviews_submission_url'] = f'{settings.DOMAIN}reviews/preprints/{provider._id}/{preprint._id}' - listeners.reviews_submit_notification_moderators(self, time_now, self.context_info_submission) - subscription = NotificationSubscription.load(self.provider._id + '_new_pending_submissions') - digest_subscriber_ids = list(subscription.email_digest.all().values_list('guids___id', flat=True)) - instant_subscriber_ids = list(subscription.email_transactional.all().values_list('guids___id', flat=True)) - - mock_store.assert_any_call( - digest_subscriber_ids, - 'email_digest', - 'new_pending_submissions', - self.context_info_submission['referrer'], - self.context_info_submission['reviewable'], - time_now, - abstract_provider=self.context_info_submission['reviewable'].provider, - **self.context_info_submission - ) - - mock_store.assert_any_call( - instant_subscriber_ids, - 'email_transactional', - 'new_pending_submissions', - self.context_info_submission['referrer'], - self.context_info_submission['reviewable'], - time_now, - abstract_provider=self.context_info_request['reviewable'].provider, - **self.context_info_submission - ) - - @mock.patch('website.notifications.emails.store_emails') - def test_reviews_request_notification(self, mock_store): - time_now = timezone.now() - self.context_info_request['message'] = 'has requested withdrawal of {} "{}".'.format(self.context_info_request['reviewable'].provider.preprint_word, - self.context_info_request['reviewable'].title) - self.context_info_request['profile_image_url'] = get_profile_image_url(self.context_info_request['requester']) - self.context_info_request['reviews_submission_url'] = '{}reviews/preprints/{}/{}'.format(settings.DOMAIN, - self.context_info_request[ - 'reviewable'].provider._id, - self.context_info_request[ - 'reviewable']._id) - listeners.reviews_withdrawal_requests_notification(self, time_now, self.context_info_request) - subscription = NotificationSubscription.load(self.provider._id + '_new_pending_submissions') - digest_subscriber_ids = subscription.email_digest.all().values_list('guids___id', flat=True) - instant_subscriber_ids = subscription.email_transactional.all().values_list('guids___id', flat=True) - mock_store.assert_any_call(QuerySetMatcher(digest_subscriber_ids), - 'email_digest', - 'new_pending_submissions', - self.context_info_request['requester'], - self.context_info_request['reviewable'], - time_now, - abstract_provider=self.context_info_request['reviewable'].provider, - **self.context_info_request) - - mock_store.assert_any_call(QuerySetMatcher(instant_subscriber_ids), - 'email_transactional', - 'new_pending_submissions', - self.context_info_request['requester'], - self.context_info_request['reviewable'], - time_now, - abstract_provider=self.context_info_request['reviewable'].provider, - **self.context_info_request) diff --git a/tests/test_preprints.py b/tests/test_preprints.py index a213c961659..5528ef28219 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -114,16 +114,6 @@ def test_verified_publishable(self, preprint): preprint.deleted = None assert preprint.verified_publishable is True - def test_is_deleted(self, preprint): - assert preprint.deleted is None - assert preprint.is_deleted is False - - preprint.deleted = timezone.now() - preprint.save() - - assert preprint.deleted is not None - assert preprint.is_deleted is True - def test_has_submitted_preprint(self, preprint): preprint.machine_state = 'initial' preprint.save() @@ -168,9 +158,6 @@ def test_all_tags(self, preprint, auth): assert len(preprint.all_tags) == 1 assert preprint.all_tags[0].name == 'test_tag_1' - def test_system_tags(self, preprint): - assert preprint.system_tags.exists() is False - class TestPreprintSubjects: diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index 67f0b0fb497..a964aacf02f 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -22,8 +22,9 @@ InvalidSanctionApprovalToken, InvalidSanctionRejectionToken, NodeStateError, ) -from osf.models import Contributor, Retraction +from osf.models import Contributor, Retraction, NotificationType from osf.utils import permissions +from tests.utils import capture_notifications @pytest.mark.enable_bookmark_creation @@ -796,8 +797,7 @@ def test_POST_retraction_to_private_registration_returns_HTTPError_FORBIDDEN(sel self.registration.reload() assert self.registration.retraction is None - @mock.patch('website.mails.send_mail') - def test_POST_retraction_does_not_send_email_to_unregistered_admins(self, mock_send_mail): + def test_POST_retraction_does_not_send_email_to_unregistered_admins(self): unreg = UnregUserFactory() self.registration.add_unregistered_contributor( unreg.fullname, @@ -807,13 +807,16 @@ def test_POST_retraction_does_not_send_email_to_unregistered_admins(self, mock_s existing_user=unreg ) self.registration.save() - self.app.post( - self.retraction_post_url, - json={'justification': ''}, - auth=self.user.auth, - ) + with capture_notifications() as notifications: + self.app.post( + self.retraction_post_url, + json={'justification': ''}, + auth=self.user.auth, + ) # Only the creator gets an email; the unreg user does not get emailed - assert mock_send_mail.call_count == 1 + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_PENDING_RETRACTION_ADMIN + def test_POST_pending_embargo_returns_HTTPError_HTTPOK(self): self.registration.embargo_registration( @@ -904,16 +907,16 @@ def test_valid_POST_retraction_when_pending_retraction_raises_400(self, mock_sen ) assert res.status_code == 400 - @mock.patch('website.mails.send_mail') - def test_valid_POST_calls_send_mail_with_username(self, mock_send): - self.app.post( - self.retraction_post_url, - json={'justification': ''}, - auth=self.user.auth, - ) - assert mock_send.called - args, kwargs = mock_send.call_args - assert self.user.username in args + def test_valid_POST_calls_send_mail_with_username(self): + with capture_notifications() as notifications: + self.app.post( + self.retraction_post_url, + json={'justification': ''}, + auth=self.user.auth, + ) + assert len(notifications) == 1 + username = notifications[0]['kargs']['user'].username + assert self.user.username == username def test_non_contributor_GET_approval_returns_HTTPError_UNAUTHORIZED(self): non_contributor = AuthUserFactory() diff --git a/tests/test_reset_password_views.py b/tests/test_reset_password_views.py new file mode 100644 index 00000000000..0648f6ed003 --- /dev/null +++ b/tests/test_reset_password_views.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 +"""Views tests for the OSF.""" +from unittest import mock +from urllib.parse import quote_plus +from framework.auth import core + +import pytest +from django.utils import timezone +from tests.utils import run_celery_tasks + +from framework.auth import cas +from osf_tests.factories import ( + AuthUserFactory, +) +from tests.base import ( + fake, + OsfTestCase, +) +from website.util import web_url_for + +pytestmark = pytest.mark.django_db + +class TestResetPassword(OsfTestCase): + + def setUp(self): + super().setUp() + self.user = AuthUserFactory() + self.another_user = AuthUserFactory() + self.osf_key_v2 = core.generate_verification_key(verification_type='password') + self.user.verification_key_v2 = self.osf_key_v2 + self.user.verification_key = None + self.user.save() + self.get_url = web_url_for( + 'reset_password_get', + uid=self.user._id, + token=self.osf_key_v2['token'] + ) + self.get_url_invalid_key = web_url_for( + 'reset_password_get', + uid=self.user._id, + token=core.generate_verification_key() + ) + self.get_url_invalid_user = web_url_for( + 'reset_password_get', + uid=self.another_user._id, + token=self.osf_key_v2['token'] + ) + + # successfully load reset password page + def test_reset_password_view_returns_200(self): + res = self.app.get(self.get_url) + assert res.status_code == 200 + + # raise http 400 error + def test_reset_password_view_raises_400(self): + res = self.app.get(self.get_url_invalid_key) + assert res.status_code == 400 + + res = self.app.get(self.get_url_invalid_user) + assert res.status_code == 400 + + self.user.verification_key_v2['expires'] = timezone.now() + self.user.save() + res = self.app.get(self.get_url) + assert res.status_code == 400 + + # successfully reset password + @pytest.mark.enable_enqueue_task + @mock.patch('framework.auth.cas.CasClient.service_validate') + def test_can_reset_password_if_form_success(self, mock_service_validate): + # TODO: check in qa url encoding + # load reset password page and submit email + res = self.app.get(self.get_url) + form = res.get_form('resetPasswordForm') + form['password'] = 'newpassword' + form['password2'] = 'newpassword' + res = form.submit(self.app) + + # check request URL is /resetpassword with username and new verification_key_v2 token + request_url_path = res.request.path + assert 'resetpassword' in request_url_path + assert self.user._id in request_url_path + assert self.user.verification_key_v2['token'] in request_url_path + + # check verification_key_v2 for OSF is destroyed and verification_key for CAS is in place + self.user.reload() + assert self.user.verification_key_v2 == {} + assert not self.user.verification_key is None + + # check redirection to CAS login with username and the new verification_key(CAS) + assert res.status_code == 302 + location = res.headers.get('Location') + assert 'login?service=' in location + assert f'username={quote_plus(self.user.username)}' in location + assert f'verification_key={self.user.verification_key}' in location + + # check if password was updated + self.user.reload() + assert self.user.check_password('newpassword') + + # check if verification_key is destroyed after service validation + mock_service_validate.return_value = cas.CasResponse( + authenticated=True, + user=self.user._id, + attributes={'accessToken': fake.md5()} + ) + ticket = fake.md5() + service_url = 'http://accounts.osf.io/?ticket=' + ticket + with run_celery_tasks(): + cas.make_response_from_ticket(ticket, service_url) + self.user.reload() + assert self.user.verification_key is None + + # log users out before they land on reset password page + def test_reset_password_logs_out_user(self): + # visit reset password link while another user is logged in + res = self.app.get(self.get_url, auth=self.another_user.auth) + # check redirection to CAS logout + assert res.status_code == 302 + location = res.headers.get('Location') + assert 'reauth' not in location + assert 'logout?service=' in location + assert 'resetpassword' in location + diff --git a/tests/test_spam_mixin.py b/tests/test_spam_mixin.py index a97bc288e44..25c66b0a933 100644 --- a/tests/test_spam_mixin.py +++ b/tests/test_spam_mixin.py @@ -1,152 +1,10 @@ -import abc - import pytest -from django.core.exceptions import ValidationError -from django.utils import timezone - from unittest import mock -from framework.auth import Auth -from tests.base import DbTestCase from osf_tests.factories import UserFactory, CommentFactory, ProjectFactory, PreprintFactory, RegistrationFactory, AuthUserFactory from osf.models import NotableDomain, SpamStatus -from website import settings, mails - - -@pytest.mark.django_db -@mock.patch('framework.auth.views.mails.send_mail') -def test_throttled_autoban(mock_mail): - settings.SPAM_THROTTLE_AUTOBAN = True - user = AuthUserFactory() - projects = [] - for _ in range(7): - proj = ProjectFactory(creator=user) - proj.flag_spam() - proj.save() - projects.append(proj) - mock_mail.assert_called_with(osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - to_addr=user.username, - user=user, - mail=mails.SPAM_USER_BANNED) - user.reload() - assert user.is_disabled - for project in projects: - assert not project.is_public - - -@pytest.mark.enable_implicit_clean -class TestReportAbuse(DbTestCase): - - def setUp(self): - super().setUp() - self.comment = CommentFactory() - self.auth = Auth(user=self.comment.user) - - def test_report_abuse(self): - user = UserFactory() - time = timezone.now() - self.comment.report_abuse( - user, date=time, category='spam', text='ads', save=True) - assert self.comment.spam_status == SpamStatus.FLAGGED - equivalent = dict( - date=time, - category='spam', - text='ads', - retracted=False - ) - assert user._id in self.comment.reports - assert self.comment.reports[user._id] == equivalent - - def test_report_abuse_own_comment(self): - with pytest.raises(ValueError): - self.comment.report_abuse( - self.auth.user, - category='spam', text='ads', - save=True - ) - assert self.comment.spam_status == SpamStatus.UNKNOWN - - def test_retract_report(self): - user = UserFactory() - time = timezone.now() - self.comment.report_abuse( - user, date=time, category='spam', text='ads', save=True - ) - assert self.comment.spam_status == SpamStatus.FLAGGED - self.comment.retract_report(user, save=True) - assert self.comment.spam_status == SpamStatus.UNKNOWN - equivalent = { - 'date': time, - 'category': 'spam', - 'text': 'ads', - 'retracted': True - } - assert user._id in self.comment.reports - assert self.comment.reports[user._id] == equivalent - - def test_retract_report_not_reporter(self): - reporter = UserFactory() - non_reporter = UserFactory() - self.comment.report_abuse( - reporter, category='spam', text='ads', save=True - ) - with pytest.raises(ValueError): - self.comment.retract_report(non_reporter, save=True) - assert self.comment.spam_status == SpamStatus.FLAGGED - - def test_retract_one_report_of_many(self): - user_1 = UserFactory() - user_2 = UserFactory() - time = timezone.now() - self.comment.report_abuse( - user_1, date=time, category='spam', text='ads', save=True - ) - assert self.comment.spam_status == SpamStatus.FLAGGED - self.comment.report_abuse( - user_2, date=time, category='spam', text='all', save=True - ) - self.comment.retract_report(user_1, save=True) - equivalent = { - 'date': time, - 'category': 'spam', - 'text': 'ads', - 'retracted': True - } - assert user_1._id in self.comment.reports - assert self.comment.reports[user_1._id] == equivalent - assert self.comment.spam_status == SpamStatus.FLAGGED - - def test_cannot_remove_flag_not_retracted(self): - user = UserFactory() - self.comment.report_abuse( - user, category='spam', text='ads', save=True - ) - self.comment.remove_flag(save=True) - assert self.comment.spam_status == SpamStatus.FLAGGED - - def test_remove_flag(self): - self.comment.flag_spam() - self.comment.save() - assert self.comment.spam_status == SpamStatus.FLAGGED - self.comment.remove_flag(save=True) - assert self.comment.spam_status == SpamStatus.UNKNOWN - - def test_validate_reports_bad_key(self): - self.comment.reports[None] = {'category': 'spam', 'text': 'ads'} - with pytest.raises(ValidationError): - self.comment.save() - - def test_validate_reports_bad_type(self): - self.comment.reports[self.auth.user._id] = 'not a dict' - with pytest.raises(ValidationError): - self.comment.save() - - def test_validate_reports_bad_value(self): - self.comment.reports[self.auth.user._id] = {'foo': 'bar'} - with pytest.raises(ValidationError): - self.comment.save() +from website import settings @pytest.mark.django_db diff --git a/tests/test_user_claim.py b/tests/test_user_claim.py new file mode 100644 index 00000000000..3771a7de94a --- /dev/null +++ b/tests/test_user_claim.py @@ -0,0 +1,503 @@ +from unittest.mock import ANY + +from http.cookies import SimpleCookie +from unittest import mock + +import pytest +from flask import g + +from framework.auth import Auth, authenticate, cas +from framework.auth.utils import impute_names_model +from framework.exceptions import HTTPError +from framework.flask import redirect +from osf.models import ( + OSFUser, + Tag, NotificationType, +) +from osf_tests.factories import ( + fake_email, + AuthUserFactory, + PreprintFactory, + ProjectFactory, + UserFactory, + UnregUserFactory, +) +from tests.base import fake, OsfTestCase +from tests.test_cas_authentication import generate_external_user_with_resp +from tests.utils import capture_notifications +from website import mails, settings +from website.project.views.contributor import send_claim_registered_email +from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag + + +@pytest.mark.enable_implicit_clean +class TestClaimViews(OsfTestCase): + + def setUp(self): + super().setUp() + self.referrer = AuthUserFactory() + self.project = ProjectFactory(creator=self.referrer, is_public=True) + self.project_with_source_tag = ProjectFactory(creator=self.referrer, is_public=True) + self.preprint_with_source_tag = PreprintFactory(creator=self.referrer, is_public=True) + osf_source_tag, created = Tag.all_tags.get_or_create(name=OsfSourceTags.Osf.value, system=True) + preprint_source_tag, created = Tag.all_tags.get_or_create(name=provider_source_tag(self.preprint_with_source_tag.provider._id, 'preprint'), system=True) + self.project_with_source_tag.add_system_tag(osf_source_tag.name) + self.preprint_with_source_tag.add_system_tag(preprint_source_tag.name) + self.given_name = fake.name() + self.given_email = fake_email() + self.project_with_source_tag.add_unregistered_contributor( + fullname=self.given_name, + email=self.given_email, + auth=Auth(user=self.referrer) + ) + self.preprint_with_source_tag.add_unregistered_contributor( + fullname=self.given_name, + email=self.given_email, + auth=Auth(user=self.referrer) + ) + self.user = self.project.add_unregistered_contributor( + fullname=self.given_name, + email=self.given_email, + auth=Auth(user=self.referrer) + ) + self.project.save() + + @mock.patch('website.project.views.contributor.send_claim_email') + def test_claim_user_already_registered_redirects_to_claim_user_registered(self, claim_email): + name = fake.name() + email = fake_email() + + # project contributor adds an unregistered contributor (without an email) on public project + unregistered_user = self.project.add_unregistered_contributor( + fullname=name, + email=None, + auth=Auth(user=self.referrer) + ) + assert unregistered_user in self.project.contributors + + # unregistered user comes along and claims themselves on the public project, entering an email + invite_url = self.project.api_url_for('claim_user_post', uid='undefined') + self.app.post(invite_url, json={ + 'pk': unregistered_user._primary_key, + 'value': email + }) + assert claim_email.call_count == 1 + + # set unregistered record email since we are mocking send_claim_email() + unclaimed_record = unregistered_user.get_unclaimed_record(self.project._primary_key) + unclaimed_record.update({'email': email}) + unregistered_user.save() + + # unregistered user then goes and makes an account with same email, before claiming themselves as contributor + UserFactory(username=email, fullname=name) + + # claim link for the now registered email is accessed while not logged in + token = unregistered_user.get_unclaimed_record(self.project._primary_key)['token'] + claim_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/?token={token}' + res = self.app.get(claim_url) + + # should redirect to 'claim_user_registered' view + claim_registered_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/verify/{token}/' + assert res.status_code == 302 + assert claim_registered_url in res.headers.get('Location') + + @mock.patch('website.project.views.contributor.send_claim_email') + def test_claim_user_already_registered_secondary_email_redirects_to_claim_user_registered(self, claim_email): + name = fake.name() + email = fake_email() + secondary_email = fake_email() + + # project contributor adds an unregistered contributor (without an email) on public project + unregistered_user = self.project.add_unregistered_contributor( + fullname=name, + email=None, + auth=Auth(user=self.referrer) + ) + assert unregistered_user in self.project.contributors + + # unregistered user comes along and claims themselves on the public project, entering an email + invite_url = self.project.api_url_for('claim_user_post', uid='undefined') + self.app.post(invite_url, json={ + 'pk': unregistered_user._primary_key, + 'value': secondary_email + }) + assert claim_email.call_count == 1 + + # set unregistered record email since we are mocking send_claim_email() + unclaimed_record = unregistered_user.get_unclaimed_record(self.project._primary_key) + unclaimed_record.update({'email': secondary_email}) + unregistered_user.save() + + # unregistered user then goes and makes an account with same email, before claiming themselves as contributor + registered_user = UserFactory(username=email, fullname=name) + registered_user.emails.create(address=secondary_email) + registered_user.save() + + # claim link for the now registered email is accessed while not logged in + token = unregistered_user.get_unclaimed_record(self.project._primary_key)['token'] + claim_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/?token={token}' + res = self.app.get(claim_url) + + # should redirect to 'claim_user_registered' view + claim_registered_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/verify/{token}/' + assert res.status_code == 302 + assert claim_registered_url in res.headers.get('Location') + + def test_claim_user_invited_with_no_email_posts_to_claim_form(self): + given_name = fake.name() + invited_user = self.project.add_unregistered_contributor( + fullname=given_name, + email=None, + auth=Auth(user=self.referrer) + ) + self.project.save() + + url = invited_user.get_claim_url(self.project._primary_key) + res = self.app.post(url, data={ + 'password': 'bohemianrhap', + 'password2': 'bohemianrhap' + }) + assert res.status_code == 400 + + def test_claim_user_post_with_registered_user_id(self): + # registered user who is attempting to claim the unclaimed contributor + reg_user = UserFactory() + + with capture_notifications() as notifications: + res = self.app.post( + f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/', + json={ + # pk of unreg user record + 'pk': self.user._primary_key, + 'claimerId': reg_user._primary_key + } + ) + + # mail was sent + assert len(notifications) == 2 + # ... to the correct address + referrer_call = notifications[0] + claimer_call = notifications[1] + assert referrer_call['kwargs']['user'] == self.referrer + assert claimer_call['kwargs']['user'] == reg_user + + # view returns the correct JSON + assert res.json == { + 'status': 'success', + 'email': reg_user.username, + 'fullname': self.given_name, + } + + def test_send_claim_registered_email(self): + reg_user = UserFactory() + with capture_notifications() as notifications: + send_claim_registered_email( + claimer=reg_user, + unclaimed_user=self.user, + node=self.project + ) + assert len(notifications) == 2 + first_call_args = notifications[0] + assert first_call_args['kwargs']['user'] == self.referrer + second_call_args = notifications[1] + assert second_call_args['kwargs']['user'] == reg_user + + def test_send_claim_registered_email_before_throttle_expires(self): + reg_user = UserFactory() + with capture_notifications() as notifications: + send_claim_registered_email( + claimer=reg_user, + unclaimed_user=self.user, + node=self.project, + ) + assert not notifications + # second call raises error because it was called before throttle period + with capture_notifications() as notifications: + with pytest.raises(HTTPError): + send_claim_registered_email( + claimer=reg_user, + unclaimed_user=self.user, + node=self.project, + ) + assert not notifications + + @mock.patch('website.project.views.contributor.send_claim_registered_email') + def test_claim_user_post_with_email_already_registered_sends_correct_email( + self, send_claim_registered_email): + reg_user = UserFactory() + payload = { + 'value': reg_user.username, + 'pk': self.user._primary_key + } + url = self.project.api_url_for('claim_user_post', uid=self.user._id) + self.app.post(url, json=payload) + assert send_claim_registered_email.called + + def test_user_with_removed_unclaimed_url_claiming(self): + """ Tests that when an unclaimed user is removed from a project, the + unregistered user object does not retain the token. + """ + self.project.remove_contributor(self.user, Auth(user=self.referrer)) + + assert self.project._primary_key not in self.user.unclaimed_records.keys() + + def test_user_with_claim_url_cannot_claim_twice(self): + """ Tests that when an unclaimed user is replaced on a project with a + claimed user, the unregistered user object does not retain the token. + """ + reg_user = AuthUserFactory() + + self.project.replace_contributor(self.user, reg_user) + + assert self.project._primary_key not in self.user.unclaimed_records.keys() + + def test_claim_user_form_redirects_to_password_confirm_page_if_user_is_logged_in(self): + reg_user = AuthUserFactory() + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.get(url, auth=reg_user.auth) + assert res.status_code == 302 + res = self.app.get(url, auth=reg_user.auth, follow_redirects=True) + token = self.user.get_unclaimed_record(self.project._primary_key)['token'] + expected = self.project.web_url_for( + 'claim_user_registered', + uid=self.user._id, + token=token, + ) + assert res.request.path == expected + + @mock.patch('framework.auth.cas.make_response_from_ticket') + def test_claim_user_when_user_is_registered_with_orcid(self, mock_response_from_ticket): + # TODO: check in qa url encoding + token = self.user.get_unclaimed_record(self.project._primary_key)['token'] + url = f'/user/{self.user._id}/{self.project._id}/claim/verify/{token}/' + # logged out user gets redirected to cas login + res1 = self.app.get(url) + assert res1.status_code == 302 + res = self.app.resolve_redirect(self.app.get(url)) + service_url = f'http://localhost{url}' + expected = cas.get_logout_url(service_url=cas.get_login_url(service_url=service_url)) + assert res1.location == expected + + # user logged in with orcid automatically becomes a contributor + orcid_user, validated_credentials, cas_resp = generate_external_user_with_resp(url) + mock_response_from_ticket.return_value = authenticate( + orcid_user, + redirect(url) + ) + orcid_user.set_unusable_password() + orcid_user.save() + + # The request to OSF with CAS service ticket must not have cookie and/or auth. + service_ticket = fake.md5() + url_with_service_ticket = f'{url}?ticket={service_ticket}' + res = self.app.get(url_with_service_ticket) + # The response of this request is expected to be a 302 with `Location`. + # And the redirect URL must equal to the originial service URL + assert res.status_code == 302 + redirect_url = res.headers['Location'] + assert redirect_url == url + # The response of this request is expected have the `Set-Cookie` header with OSF cookie. + # And the cookie must belong to the ORCiD user. + raw_set_cookie = res.headers['Set-Cookie'] + assert raw_set_cookie + simple_cookie = SimpleCookie() + simple_cookie.load(raw_set_cookie) + cookie_dict = {key: value.value for key, value in simple_cookie.items()} + osf_cookie = cookie_dict.get(settings.COOKIE_NAME, None) + assert osf_cookie is not None + user = OSFUser.from_cookie(osf_cookie) + assert user._id == orcid_user._id + # The ORCiD user must be different from the unregistered user created when the contributor was added + assert user._id != self.user._id + + # Must clear the Flask g context manual and set the OSF cookie to context + g.current_session = None + self.app.set_cookie(settings.COOKIE_NAME, osf_cookie) + res = self.app.resolve_redirect(res) + assert res.status_code == 302 + assert self.project.is_contributor(orcid_user) + assert self.project.url in res.headers.get('Location') + + def test_get_valid_form(self): + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.get(url, follow_redirects=True) + assert res.status_code == 200 + + def test_invalid_claim_form_raise_400(self): + uid = self.user._primary_key + pid = self.project._primary_key + url = f'/user/{uid}/{pid}/claim/?token=badtoken' + res = self.app.get(url, follow_redirects=True) + assert res.status_code == 400 + + @mock.patch('osf.models.OSFUser.update_search_nodes') + def test_posting_to_claim_form_with_valid_data(self, mock_update_search_nodes): + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.post(url, data={ + 'username': self.user.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + }) + + assert res.status_code == 302 + location = res.headers.get('Location') + assert 'login?service=' in location + assert 'username' in location + assert 'verification_key' in location + assert self.project._primary_key in location + + self.user.reload() + assert self.user.is_registered + assert self.user.is_active + assert self.project._primary_key not in self.user.unclaimed_records + + @mock.patch('osf.models.OSFUser.update_search_nodes') + def test_posting_to_claim_form_removes_all_unclaimed_data(self, mock_update_search_nodes): + # user has multiple unclaimed records + p2 = ProjectFactory(creator=self.referrer) + self.user.add_unclaimed_record(p2, referrer=self.referrer, + given_name=fake.name()) + self.user.save() + assert len(self.user.unclaimed_records.keys()) > 1 # sanity check + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.post(url, data={ + 'username': self.given_email, + 'password': 'bohemianrhap', + 'password2': 'bohemianrhap' + }) + self.user.reload() + assert self.user.unclaimed_records == {} + + @mock.patch('osf.models.OSFUser.update_search_nodes') + def test_posting_to_claim_form_sets_fullname_to_given_name(self, mock_update_search_nodes): + # User is created with a full name + original_name = fake.name() + unreg = UnregUserFactory(fullname=original_name) + # User invited with a different name + different_name = fake.name() + new_user = self.project.add_unregistered_contributor( + email=unreg.username, + fullname=different_name, + auth=Auth(self.project.creator), + ) + self.project.save() + # Goes to claim url + claim_url = new_user.get_claim_url(self.project._id) + self.app.post(claim_url, data={ + 'username': unreg.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + }) + unreg.reload() + # Full name was set correctly + assert unreg.fullname == different_name + # CSL names were set correctly + parsed_name = impute_names_model(different_name) + assert unreg.given_name == parsed_name['given_name'] + assert unreg.family_name == parsed_name['family_name'] + + def test_claim_user_post_returns_fullname(self): + url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' + res = self.app.post( + url, + auth=self.referrer.auth, + json={ + 'value': self.given_email, + 'pk': self.user._primary_key + }, + ) + assert res.json['fullname'] == self.given_name + assert send_mail.called + + send_mail.assert_called_with( + self.given_email, + mails.INVITE_DEFAULT, + user=self.user, + referrer=self.referrer, + node=ANY, + claim_url=ANY, + email=self.user.email, + fullname=self.user.fullname, + branded_service=None, + osf_contact_email=settings.OSF_CONTACT_EMAIL, + can_change_preferences=False, + logo='osf_logo' + ) + + + def test_claim_user_post_if_email_is_different_from_given_email(self): + email = fake_email() # email that is different from the one the referrer gave + + with capture_notifications() as notifications: + self.app.post( + f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/', + json={ + 'value': email, + 'pk': self.user._primary_key + } + ) + assert notifications + assert len(notifications) == 2 + call_to_invited = notifications[0] + assert call_to_invited['kwargs']['user'].username == self.user.username + assert call_to_invited['type'] == NotificationType.Type.USER_PENDING_VERIFICATION.value + + call_to_referrer = notifications[1] + assert call_to_referrer['type'] == NotificationType.Type.USER_FORWARD_INVITE.value + user = call_to_referrer['kwargs']['user'] + user.reload() + assert user.username == email + + def test_claim_url_with_bad_token_returns_400(self): + url = self.project.web_url_for( + 'claim_user_registered', + uid=self.user._id, + token='badtoken', + ) + res = self.app.get(url, auth=self.referrer.auth) + assert res.status_code == 400 + + def test_cannot_claim_user_with_user_who_is_already_contributor(self): + # user who is already a contirbutor to the project + contrib = AuthUserFactory() + self.project.add_contributor(contrib, auth=Auth(self.project.creator)) + self.project.save() + # Claiming user goes to claim url, but contrib is already logged in + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.get( + url, + auth=contrib.auth, follow_redirects=True) + # Response is a 400 + assert res.status_code == 400 + + def test_claim_user_with_project_id_adds_corresponding_claimed_tag_to_user(self): + assert OsfClaimedTags.Osf.value not in self.user.system_tags + url = self.user.get_claim_url(self.project_with_source_tag._primary_key) + res = self.app.post(url, data={ + 'username': self.user.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + }) + + assert res.status_code == 302 + self.user.reload() + assert OsfClaimedTags.Osf.value in self.user.system_tags + + def test_claim_user_with_preprint_id_adds_corresponding_claimed_tag_to_user(self): + assert provider_claimed_tag( + self.preprint_with_source_tag.provider._id, + 'preprint' + ) not in self.user.system_tags + url = self.user.get_claim_url(self.preprint_with_source_tag._primary_key) + res = self.app.post( + url, + data={ + 'username': self.user.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + } + ) + + assert res.status_code == 302 + self.user.reload() + assert provider_claimed_tag(self.preprint_with_source_tag.provider._id, 'preprint') in self.user.system_tags diff --git a/tests/test_user_invite_views.py b/tests/test_user_invite_views.py new file mode 100644 index 00000000000..2726782bff0 --- /dev/null +++ b/tests/test_user_invite_views.py @@ -0,0 +1,174 @@ + +from unittest.mock import ANY + +from unittest import mock + +import pytest +from rest_framework import status as http_status + +from framework.auth import Auth +from framework.exceptions import HTTPError +from osf_tests.factories import ( + fake_email, + AuthUserFactory, + ProjectFactory, + UserFactory, +) +from tests.base import ( + fake, + OsfTestCase, +) +from website import mails, settings +from website.profile.utils import add_contributor_json +from website.project.views.contributor import ( + send_claim_email, +) + + +class TestUserInviteViews(OsfTestCase): + + def setUp(self): + super().setUp() + self.user = AuthUserFactory() + self.project = ProjectFactory(creator=self.user) + self.invite_url = f'/api/v1/project/{self.project._primary_key}/invite_contributor/' + + def test_invite_contributor_post_if_not_in_db(self): + name, email = fake.name(), fake_email() + res = self.app.post( + self.invite_url, + json={'fullname': name, 'email': email}, + auth=self.user.auth, + ) + contrib = res.json['contributor'] + assert contrib['id'] is None + assert contrib['fullname'] == name + assert contrib['email'] == email + + def test_invite_contributor_post_if_unreg_already_in_db(self): + # A n unreg user is added to a different project + name, email = fake.name(), fake_email() + project2 = ProjectFactory() + unreg_user = project2.add_unregistered_contributor(fullname=name, email=email, + auth=Auth(project2.creator)) + project2.save() + res = self.app.post(self.invite_url, + json={'fullname': name, 'email': email}, auth=self.user.auth) + expected = add_contributor_json(unreg_user) + expected['fullname'] = name + expected['email'] = email + assert res.json['contributor'] == expected + + def test_invite_contributor_post_if_email_already_registered(self): + reg_user = UserFactory() + name, email = fake.name(), reg_user.username + # Tries to invite user that is already registered - this is now permitted. + res = self.app.post(self.invite_url, + json={'fullname': name, 'email': email}, + auth=self.user.auth) + contrib = res.json['contributor'] + assert contrib['id'] == reg_user._id + assert contrib['fullname'] == name + assert contrib['email'] == email + + def test_invite_contributor_post_if_user_is_already_contributor(self): + unreg_user = self.project.add_unregistered_contributor( + fullname=fake.name(), email=fake_email(), + auth=Auth(self.project.creator) + ) + self.project.save() + # Tries to invite unreg user that is already a contributor + res = self.app.post(self.invite_url, + json={'fullname': fake.name(), 'email': unreg_user.username}, + auth=self.user.auth) + assert res.status_code == http_status.HTTP_400_BAD_REQUEST + + def test_invite_contributor_with_no_email(self): + name = fake.name() + res = self.app.post(self.invite_url, + json={'fullname': name, 'email': None}, auth=self.user.auth) + assert res.status_code == http_status.HTTP_200_OK + data = res.json + assert data['status'] == 'success' + assert data['contributor']['fullname'] == name + assert data['contributor']['email'] is None + assert not data['contributor']['registered'] + + def test_invite_contributor_requires_fullname(self): + res = self.app.post(self.invite_url, + json={'email': 'brian@queen.com', 'fullname': ''}, auth=self.user.auth, + ) + assert res.status_code == http_status.HTTP_400_BAD_REQUEST + + @mock.patch('website.project.views.contributor.mails.send_mail') + def test_send_claim_email_to_given_email(self, send_mail): + project = ProjectFactory() + given_email = fake_email() + unreg_user = project.add_unregistered_contributor( + fullname=fake.name(), + email=given_email, + auth=Auth(project.creator), + ) + project.save() + send_claim_email(email=given_email, unclaimed_user=unreg_user, node=project) + + send_mail.assert_called_with( + given_email, + mails.INVITE_DEFAULT, + user=unreg_user, + referrer=ANY, + node=project, + claim_url=ANY, + email=unreg_user.email, + fullname=unreg_user.fullname, + branded_service=None, + can_change_preferences=False, + logo='osf_logo', + osf_contact_email=settings.OSF_CONTACT_EMAIL + ) + + @mock.patch('website.project.views.contributor.mails.send_mail') + def test_send_claim_email_to_referrer(self, send_mail): + project = ProjectFactory() + referrer = project.creator + given_email, real_email = fake_email(), fake_email() + unreg_user = project.add_unregistered_contributor(fullname=fake.name(), + email=given_email, auth=Auth( + referrer) + ) + project.save() + send_claim_email(email=real_email, unclaimed_user=unreg_user, node=project) + + assert send_mail.called + # email was sent to referrer + send_mail.assert_called_with( + referrer.username, + mails.FORWARD_INVITE, + user=unreg_user, + referrer=referrer, + claim_url=unreg_user.get_claim_url(project._id, external=True), + email=real_email.lower().strip(), + fullname=unreg_user.get_unclaimed_record(project._id)['name'], + node=project, + branded_service=None, + can_change_preferences=False, + logo=settings.OSF_LOGO, + osf_contact_email=settings.OSF_CONTACT_EMAIL + ) + + @mock.patch('website.project.views.contributor.mails.send_mail') + def test_send_claim_email_before_throttle_expires(self, send_mail): + project = ProjectFactory() + given_email = fake_email() + unreg_user = project.add_unregistered_contributor( + fullname=fake.name(), + email=given_email, + auth=Auth(project.creator), + ) + project.save() + send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) + send_mail.reset_mock() + # 2nd call raises error because throttle hasn't expired + with pytest.raises(HTTPError): + send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) + assert not send_mail.called diff --git a/tests/test_user_profile_view.py b/tests/test_user_profile_view.py index 3e1c455c078..9a95a706270 100644 --- a/tests/test_user_profile_view.py +++ b/tests/test_user_profile_view.py @@ -1,103 +1,31 @@ #!/usr/bin/env python3 """Views tests for the OSF.""" -from unittest.mock import MagicMock, ANY -from urllib import parse - -import datetime as dt -import time -import unittest from hashlib import md5 -from http.cookies import SimpleCookie from unittest import mock -from urllib.parse import quote_plus import pytest -from django.core.exceptions import ValidationError -from django.utils import timezone -from flask import request, g -from lxml import html -from pytest import approx from rest_framework import status as http_status from addons.github.tests.factories import GitHubAccountFactory -from addons.osfstorage import settings as osfstorage_settings -from addons.wiki.models import WikiPage -from framework import auth -from framework.auth import Auth, authenticate, cas, core -from framework.auth.campaigns import ( - get_campaigns, - is_institution_login, - is_native_login, - is_proxy_login, - campaign_url_for -) -from framework.auth.exceptions import InvalidTokenError -from framework.auth.utils import impute_names_model, ensure_external_identity_uniqueness -from framework.auth.views import login_and_register_handler from framework.celery_tasks import handlers -from framework.exceptions import HTTPError, TemplateHTTPError -from framework.flask import redirect -from framework.transactions.handlers import no_auto_transaction from osf.external.spam import tasks as spam_tasks from osf.models import ( - Comment, - AbstractNode, - OSFUser, - Tag, - SpamStatus, - NodeRelation, NotableDomain ) -from osf.utils import permissions from osf_tests.factories import ( fake_email, ApiOAuth2ApplicationFactory, ApiOAuth2PersonalTokenFactory, AuthUserFactory, - CollectionFactory, - CommentFactory, - NodeFactory, - OSFGroupFactory, - PreprintFactory, - PreprintProviderFactory, - PrivateLinkFactory, - ProjectFactory, - ProjectWithAddonFactory, - RegistrationProviderFactory, - UserFactory, - UnconfirmedUserFactory, - UnregUserFactory, RegionFactory, - DraftRegistrationFactory, ) from tests.base import ( - assert_is_redirect, - capture_signals, fake, - get_default_metaschema, OsfTestCase, - assert_datetime_equal, - test_app -) -from tests.test_cas_authentication import generate_external_user_with_resp -from tests.utils import run_celery_tasks -from website import mailchimp_utils, mails, settings, language -from website.profile.utils import add_contributor_json, serialize_unregistered -from website.profile.views import update_osf_help_mails_subscription -from website.project.decorators import check_can_access -from website.project.model import has_anonymous_link -from website.project.signals import contributor_added -from website.project.views.contributor import ( - deserialize_contributors, - notify_added_contributor, - send_claim_email, - send_claim_registered_email, ) -from website.project.views.node import _should_show_wiki_widget, abbrev_authors +from website import mailchimp_utils from website.settings import MAILCHIMP_GENERAL_LIST from website.util import api_url_for, web_url_for -from website.util import rubeus -from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag @pytest.mark.enable_enqueue_task diff --git a/tests/test_webtests.py b/tests/test_webtests.py index e06be14a093..96c45529cd6 100644 --- a/tests/test_webtests.py +++ b/tests/test_webtests.py @@ -34,6 +34,7 @@ from osf.utils import permissions from addons.wiki.models import WikiPage, WikiVersion from addons.wiki.tests.factories import WikiFactory, WikiVersionFactory +from tests.utils import capture_notifications from website import language from website.util import web_url_for, api_url_for @@ -731,53 +732,52 @@ def test_resend_confirmation_get(self): assert res.get_form('resendForm') # test that unconfirmed user can receive resend confirmation email - @mock.patch('framework.auth.views.mails.send_mail') - def test_can_receive_resend_confirmation_email(self, mock_send_mail): + def test_can_receive_resend_confirmation_email(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') form['email'] = self.unconfirmed_user.unconfirmed_emails[0] - res = form.submit(self.app) + with capture_notifications() as notifications: + res = form.submit(self.app) # check email, request and response - assert mock_send_mail.called + assert notifications assert res.status_code == 200 assert res.request.path == self.post_url assert_in_html('If there is an OSF account', res.text) # test that confirmed user cannot receive resend confirmation email - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_receive_resend_confirmation_email_1(self, mock_send_mail): + def test_cannot_receive_resend_confirmation_email_1(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') form['email'] = self.confirmed_user.emails.first().address - res = form.submit(self.app) + with capture_notifications() as notifications: + res = form.submit(self.app) # check email, request and response - assert not mock_send_mail.called + assert not notifications assert res.status_code == 200 assert res.request.path == self.post_url assert_in_html('has already been confirmed', res.text) # test that non-existing user cannot receive resend confirmation email - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_receive_resend_confirmation_email_2(self, mock_send_mail): + def test_cannot_receive_resend_confirmation_email_2(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') form['email'] = 'random@random.com' - res = form.submit(self.app) + with capture_notifications() as notifications: + res = form.submit(self.app) # check email, request and response - assert not mock_send_mail.called + assert not notifications assert res.status_code == 200 assert res.request.path == self.post_url assert_in_html('If there is an OSF account', res.text) # test that user cannot submit resend confirmation request too quickly - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_resend_confirmation_twice_quickly(self, mock_send_mail): + def test_cannot_resend_confirmation_twice_quickly(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') @@ -820,8 +820,7 @@ def test_get_forgot_password(self): assert res.get_form('forgotPasswordForm') # test that existing user can receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_can_receive_reset_password_email(self, mock_send_mail): + def test_can_receive_reset_password_email(self): # load forgot password page and submit email res = self.app.get(self.get_url) form = res.get_form('forgotPasswordForm') @@ -843,8 +842,7 @@ def test_can_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 != {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_receive_reset_password_email(self, mock_send_mail): + def test_cannot_receive_reset_password_email(self): # load forgot password page and submit email res = self.app.get(self.get_url) form = res.get_form('forgotPasswordForm') @@ -866,8 +864,7 @@ def test_cannot_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_not_active_user_no_reset_password_email(self, mock_send_mail): + def test_not_active_user_no_reset_password_email(self): self.user.deactivate_account() self.user.save() @@ -892,8 +889,7 @@ def test_not_active_user_no_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that user cannot submit forgot password request too quickly - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_reset_password_twice_quickly(self, mock_send_mail): + def test_cannot_reset_password_twice_quickly(self): # load forgot password page and submit email res = self.app.get(self.get_url) form = res.get_form('forgotPasswordForm') @@ -939,13 +935,13 @@ def test_get_forgot_password(self): assert 'campaign=unsupportedinstitution' in location # test that user from disabled institution can receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_can_receive_reset_password_email(self, mock_send_mail): + def test_can_receive_reset_password_email(self): # submit email to institutional forgot-password page - res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) + with capture_notifications() as notifications: + res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) # check mail was sent - assert mock_send_mail.called + assert not notifications # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword @@ -959,13 +955,13 @@ def test_can_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 != {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_receive_reset_password_email(self, mock_send_mail): + def test_cannot_receive_reset_password_email(self): # load forgot password page and submit email - res = self.app.post(self.post_url, data={'forgot_password-email': 'fake' + self.user.username}) + with capture_notifications() as notifications: + res = self.app.post(self.post_url, data={'forgot_password-email': 'fake' + self.user.username}) # check mail was not sent - assert not mock_send_mail.called + assert not notifications # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword-institution @@ -979,15 +975,15 @@ def test_cannot_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that non-existing user cannot receive institutional reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_not_active_user_no_reset_password_email(self, mock_send_mail): + def test_not_active_user_no_reset_password_email(self): self.user.deactivate_account() self.user.save() - res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) + with capture_notifications() as notifications: + res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) # check mail was not sent - assert not mock_send_mail.called + assert not notifications # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword-institution @@ -1001,8 +997,7 @@ def test_not_active_user_no_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that user cannot submit forgot password request too quickly - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_reset_password_twice_quickly(self, mock_send_mail): + def test_cannot_reset_password_twice_quickly(self): # submit institutional forgot-password request in rapid succession res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) @@ -1014,83 +1009,6 @@ def test_cannot_reset_password_twice_quickly(self, mock_send_mail): assert_not_in_html('If there is an OSF account', res.text) -@unittest.skip('Public projects/components are dynamically loaded now.') -class TestAUserProfile(OsfTestCase): - - def setUp(self): - OsfTestCase.setUp(self) - - self.user = AuthUserFactory() - self.me = AuthUserFactory() - self.project = ProjectFactory(creator=self.me, is_public=True, title=fake.bs()) - self.component = NodeFactory(creator=self.me, parent=self.project, is_public=True, title=fake.bs()) - - # regression test for https://github.com/CenterForOpenScience/osf.io/issues/2623 - def test_has_public_projects_and_components(self): - # I go to my own profile - url = web_url_for('profile_view_id', uid=self.me._primary_key) - # I see the title of both my project and component - res = self.app.get(url, auth=self.me.auth) - assert_in_html(self.component.title, res) - assert_in_html(self.project.title, res) - - # Another user can also see my public project and component - url = web_url_for('profile_view_id', uid=self.me._primary_key) - # I see the title of both my project and component - res = self.app.get(url, auth=self.user.auth) - assert_in_html(self.component.title, res) - assert_in_html(self.project.title, res) - - def test_shows_projects_with_many_contributors(self): - # My project has many contributors - for _ in range(5): - user = UserFactory() - self.project.add_contributor(user, auth=Auth(self.project.creator), save=True) - - # I go to my own profile - url = web_url_for('profile_view_id', uid=self.me._primary_key) - res = self.app.get(url, auth=self.me.auth) - # I see '3 more' as a link - assert '3 more' in res.text - - res = res.click('3 more') - assert res.request.path == self.project.url - - def test_has_no_public_projects_or_components_on_own_profile(self): - # User goes to their profile - url = web_url_for('profile_view_id', uid=self.user._id) - res = self.app.get(url, auth=self.user.auth) - - # user has no public components/projects - assert 'You have no public projects' in res - assert 'You have no public components' in res - - def test_user_no_public_projects_or_components(self): - # I go to other user's profile - url = web_url_for('profile_view_id', uid=self.user._id) - # User has no public components/projects - res = self.app.get(url, auth=self.me.auth) - assert 'This user has no public projects' in res - assert 'This user has no public components'in res - - # regression test - def test_does_not_show_registrations(self): - project = ProjectFactory(creator=self.user) - component = NodeFactory(parent=project, creator=self.user, is_public=False) - # User has a registration with public components - reg = RegistrationFactory(project=component.parent_node, creator=self.user, is_public=True) - for each in reg.nodes: - each.is_public = True - each.save() - # I go to other user's profile - url = web_url_for('profile_view_id', uid=self.user._id) - # Registration does not appear on profile - res = self.app.get(url, auth=self.me.auth) - assert 'This user has no public components' in res - assert reg.title not in res - assert reg.nodes[0].title not in res - - @pytest.mark.enable_bookmark_creation class TestPreprintBannerView(OsfTestCase): def setUp(self): diff --git a/tests/utils.py b/tests/utils.py index 6d5f934d8ba..778a5b9ac6d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -3,16 +3,50 @@ import functools from unittest import mock +from django.apps import apps from django.http import HttpRequest from django.utils import timezone from framework.auth import Auth from framework.celery_tasks.handlers import celery_teardown_request from osf_tests.factories import DraftRegistrationFactory -from osf.models import Sanction +from osf.models import Sanction, NotificationType from tests.base import get_default_metaschema from website.archiver import ARCHIVER_SUCCESS from website.archiver import listeners as archiver_listeners +from contextlib import contextmanager + +@contextmanager +def capture_notifications(): + """ + Context manager to capture NotificationType emits without interfering with ORM calls. + Yields a list of captured emits: + [{'type': , 'args': ..., 'kwargs': ...}, ...] + """ + NotificationType = apps.get_model('osf', 'NotificationType') + real_get = NotificationType.objects.get # Save the real .get() + + captured = [] + + def side_effect(*args, **kwargs): + notifier = real_get(*args, **kwargs) # Call the real .get() + original_emit = notifier.emit + + def wrapped_emit(*emit_args, **emit_kwargs): + captured.append({ + 'type': notifier.name, + 'args': emit_args, + 'kwargs': emit_kwargs + }) + return original_emit(*emit_args, **emit_kwargs) + + notifier.emit = wrapped_emit + return notifier + + with mock.patch('osf.models.notification.NotificationType.objects.get', side_effect=side_effect): + yield captured + + def requires_module(module): def decorator(fn): diff --git a/website/app.py b/website/app.py index 5db655a2164..6b583911020 100644 --- a/website/app.py +++ b/website/app.py @@ -13,16 +13,11 @@ from framework.django import handlers as django_handlers from framework.csrf import handlers as csrf_handlers from framework.flask import add_handlers, app -# Import necessary to initialize the root logger from framework.logging import logger as root_logger # noqa from framework.postcommit_tasks import handlers as postcommit_handlers from framework.transactions import handlers as transaction_handlers -# Imports necessary to connect signals from website.archiver import listeners # noqa -from website.mails import listeners # noqa -from website.notifications import listeners # noqa from website.identifiers import listeners # noqa -from website.reviews import listeners # noqa from werkzeug.middleware.proxy_fix import ProxyFix logger = logging.getLogger(__name__) diff --git a/website/archiver/decorators.py b/website/archiver/decorators.py deleted file mode 100644 index 0d6f46bfb37..00000000000 --- a/website/archiver/decorators.py +++ /dev/null @@ -1,25 +0,0 @@ -import functools - -from framework.exceptions import HTTPError - -from website.project.decorators import _inject_nodes -from website.archiver import ARCHIVER_NETWORK_ERROR -from website.archiver import signals - - -def fail_archive_on_error(func): - - @functools.wraps(func) - def wrapped(*args, **kwargs): - try: - return func(*args, **kwargs) - except HTTPError as e: - _inject_nodes(kwargs) - registration = kwargs['node'] - registration.archive_status = ARCHIVER_NETWORK_ERROR - registration.save() - signals.archive_fail.send( - registration, - errors=[str(e)] - ) - return wrapped diff --git a/website/archiver/utils.py b/website/archiver/utils.py index 44cd7517413..631c1bc49ae 100644 --- a/website/archiver/utils.py +++ b/website/archiver/utils.py @@ -5,11 +5,9 @@ from django.db.models import CharField, OuterRef, Subquery from framework.auth import Auth from framework.utils import sanitize_html +from osf.models import NotificationType -from website import ( - mails, - settings -) +from website import settings from website.archiver import ( StatResult, AggregateStatResult, ARCHIVER_NETWORK_ERROR, @@ -29,79 +27,103 @@ def normalize_unicode_filenames(filename): def send_archiver_size_exceeded_mails(src, user, stat_result, url): - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.ARCHIVE_SIZE_EXCEEDED_DESK, - user=user, - src=src, - stat_result=stat_result, - can_change_preferences=False, - url=url, + NotificationType.objects.get( + name=NotificationType.Type.DESK_ARCHIVE_JOB_EXCEEDED + ).emit( + user=object('mockuser', (), {'username': settings.OSF_SUPPORT_EMAIL}), + event_context={ + 'user': user.id, + 'src': src._id, + 'stat_result': stat_result, + 'url': url, + 'can_change_preferences': False, + } ) - mails.send_mail( - to_addr=user.username, - mail=mails.ARCHIVE_SIZE_EXCEEDED_USER, + NotificationType.objects.get( + name=NotificationType.Type.USER_ARCHIVE_JOB_EXCEEDED, + ).emit( user=user, - src=src, - can_change_preferences=False, + event_context={ + 'user': user, + 'src': src, + 'can_change_preferences': False, + } ) def send_archiver_copy_error_mails(src, user, results, url): - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.ARCHIVE_COPY_ERROR_DESK, - user=user, - src=src, - results=results, - url=url, - can_change_preferences=False, + NotificationType.objects.get( + name=NotificationType.Type.DESK_ARCHIVE_JOB_COPY_ERROR + ).emit( + user=object('mockuser', (), {'username': settings.OSF_SUPPORT_EMAIL}), + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'url': url, + 'can_change_preferences': False, + } ) - mails.send_mail( - to_addr=user.username, - mail=mails.ARCHIVE_COPY_ERROR_USER, + NotificationType.objects.get( + name=NotificationType.Type.USER_ARCHIVE_JOB_COPY_ERROR + ).emit( user=user, - src=src, - results=results, - can_change_preferences=False, + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'can_change_preferences': False, + } ) def send_archiver_file_not_found_mails(src, user, results, url): - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.ARCHIVE_FILE_NOT_FOUND_DESK, - can_change_preferences=False, - user=user, - src=src, - results=results, - url=url, + NotificationType.objects.get( + name=NotificationType.Type.DESK_ARCHIVE_JOB_FILE_NOT_FOUND + ).emit( + user=object('mockuser', (), {'username': settings.OSF_SUPPORT_EMAIL}), + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'url': url, + 'can_change_preferences': False, + } ) - mails.send_mail( - to_addr=user.username, - mail=mails.ARCHIVE_FILE_NOT_FOUND_USER, + NotificationType.objects.get( + name=NotificationType.Type.USER_ARCHIVE_JOB_FILE_NOT_FOUND + ).emit( user=user, - src=src, - results=results, - can_change_preferences=False, + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'can_change_preferences': False, + } ) def send_archiver_uncaught_error_mails(src, user, results, url): - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.ARCHIVE_UNCAUGHT_ERROR_DESK, - user=user, - src=src, - results=results, - can_change_preferences=False, - url=url, + NotificationType.objects.get( + name=NotificationType.Type.DESK_ARCHIVE_JOB_UNCAUGHT_ERROR + ).emit( + user=object('mockuser', (), {'username': settings.OSF_SUPPORT_EMAIL}), + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'url': url, + 'can_change_preferences': False, + } ) - mails.send_mail( - to_addr=user.username, - mail=mails.ARCHIVE_UNCAUGHT_ERROR_USER, + NotificationType.objects.get( + name=NotificationType.Type.USER_ARCHIVE_JOB_UNCAUGHT_ERROR + ).emit( user=user, - src=src, - results=results, - can_change_preferences=False, + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'can_change_preferences': False, + } ) diff --git a/website/conferences/views.py b/website/conferences/views.py index 4f3e7cd79ee..eca29cdca36 100644 --- a/website/conferences/views.py +++ b/website/conferences/views.py @@ -16,7 +16,7 @@ from website.conferences import utils, signals from website.conferences.message import ConferenceMessage, ConferenceError from website.ember_osf_web.decorators import ember_flag_is_active -from website.mails import CONFERENCE_SUBMITTED, CONFERENCE_INACTIVE, CONFERENCE_FAILED, CONFERENCE_DEPRECATION +from website.mails import CONFERENCE_DEPRECATION from website.mails import send_mail from website.util import web_url_for from website.util.metrics import CampaignSourceTags @@ -53,16 +53,16 @@ def meeting_hook(): logger.error(error) raise HTTPError(http_status.HTTP_406_NOT_ACCEPTABLE) - if not conference.active: - send_mail( - message.sender_email, - CONFERENCE_INACTIVE, - fullname=message.sender_display, - presentations_url=web_url_for('conference_view', _absolute=True), - can_change_preferences=False, - logo=settings.OSF_MEETINGS_LOGO, - ) - raise HTTPError(http_status.HTTP_406_NOT_ACCEPTABLE) + # if not conference.active: + # send_mail( + # message.sender_email, + # CONFERENCE_INACTIVE, + # fullname=message.sender_display, + # presentations_url=web_url_for('conference_view', _absolute=True), + # can_change_preferences=False, + # logo=settings.OSF_MEETINGS_LOGO, + # ) + # raise HTTPError(http_status.HTTP_406_NOT_ACCEPTABLE) add_poster_by_email(conference=conference, message=message) @@ -73,14 +73,14 @@ def add_poster_by_email(conference, message): :param ConferenceMessage message: """ # Fail if no attachments - if not message.attachments: - return send_mail( - message.sender_email, - CONFERENCE_FAILED, - fullname=message.sender_display, - can_change_preferences=False, - logo=settings.OSF_MEETINGS_LOGO - ) + # if not message.attachments: + # return send_mail( + # message.sender_email, + # CONFERENCE_FAILED, + # fullname=message.sender_display, + # can_change_preferences=False, + # logo=settings.OSF_MEETINGS_LOGO + # ) with transaction.atomic(): user, user_created = get_or_create_user( @@ -98,14 +98,6 @@ def add_poster_by_email(conference, message): user.save() # must save the user first before accessing user._id - set_password_url = web_url_for( - 'reset_password_get', - uid=user._id, - token=user.verification_key_v2['token'], - _absolute=True, - ) - else: - set_password_url = None # Always create a new meeting node node = Node.objects.create( @@ -125,35 +117,6 @@ def add_poster_by_email(conference, message): utils.upload_attachments(user, node, message.attachments) - download_url = node.web_url_for( - 'addon_view_or_download_file', - path=message.attachments[0].filename, - provider='osfstorage', - action='download', - _absolute=True, - ) - - # Send confirmation email - send_mail( - message.sender_email, - CONFERENCE_SUBMITTED, - conf_full_name=conference.name, - conf_view_url=web_url_for( - 'conference_results', - meeting=message.conference_name, - _absolute=True, - ), - fullname=message.sender_display, - user_created=user_created, - set_password_url=set_password_url, - profile_url=user.absolute_url, - node_url=node.absolute_url, - file_url=download_url, - presentation_type=message.conference_category.lower(), - is_spam=message.is_spam, - can_change_preferences=False, - logo=settings.OSF_MEETINGS_LOGO - ) if user_created: signals.osf4m_user_created.send(user, conference=conference, node=node) diff --git a/website/files/utils.py b/website/files/utils.py index 6121c4fb757..50c25cefd13 100644 --- a/website/files/utils.py +++ b/website/files/utils.py @@ -1,7 +1,7 @@ from osf.models.metadata import GuidMetadataRecord -def copy_files(src, target_node, parent=None, name=None): +def copy_files(src, target_node, parent=None, name=None, **version_filters): """Copy the files from src to the target node :param Folder src: The source to copy children from :param Node target_node: The node to copy files to @@ -18,7 +18,7 @@ def copy_files(src, target_node, parent=None, name=None): cloned.save() if src.is_file and src.versions.exists(): - fileversions = src.versions.select_related('region').order_by('-created') + fileversions = src.versions.filter(**version_filters).select_related('region').order_by('-created') most_recent_fileversion = fileversions.first() if most_recent_fileversion.region and most_recent_fileversion.region != target_node.osfstorage_region: # add all original version except the most recent @@ -29,7 +29,7 @@ def copy_files(src, target_node, parent=None, name=None): new_fileversion.save() attach_versions(cloned, [new_fileversion], src) else: - attach_versions(cloned, src.versions.all(), src) + attach_versions(cloned, fileversions, src) if renaming: latest_version = cloned.versions.first() diff --git a/website/identifiers/clients/crossref.py b/website/identifiers/clients/crossref.py index d7e7da376c7..2cf038c481d 100644 --- a/website/identifiers/clients/crossref.py +++ b/website/identifiers/clients/crossref.py @@ -140,13 +140,19 @@ def build_posted_content(self, preprint, element, include_relation): for preprint_version, previous_version in zip(preprint_versions, preprint_versions[1:]): if preprint_version.version > preprint.version: continue + + minted_doi = previous_version.get_identifier_value('doi') + if not minted_doi: + previous_doi = self.build_doi(previous_version) + related_item = element.related_item( element.intra_work_relation( - self.build_doi(previous_version), + minted_doi or previous_doi, **{'relationship-type': 'isVersionOf', 'identifier-type': 'doi'} ) ) relations_program.append(related_item) + if len(relations_program) > 0: posted_content.append(relations_program) diff --git a/website/mails/listeners.py b/website/mails/listeners.py deleted file mode 100644 index 8304559d9ba..00000000000 --- a/website/mails/listeners.py +++ /dev/null @@ -1,63 +0,0 @@ -"""Functions that listen for event signals and queue up emails. -All triggered emails live here. -""" - -from django.utils import timezone - -from website import settings -from framework.auth import signals as auth_signals -from website.project import signals as project_signals -from website.conferences import signals as conference_signals - - -@auth_signals.unconfirmed_user_created.connect -def queue_no_addon_email(user): - """Queue an email for user who has not connected an addon after - `settings.NO_ADDON_WAIT_TIME` months of signing up for the OSF. - """ - from osf.models.queued_mail import queue_mail, NO_ADDON - queue_mail( - to_addr=user.username, - mail=NO_ADDON, - send_at=timezone.now() + settings.NO_ADDON_WAIT_TIME, - user=user, - fullname=user.fullname - ) - -@project_signals.privacy_set_public.connect -def queue_first_public_project_email(user, node, meeting_creation): - """Queue and email after user has made their first - non-OSF4M project public. - """ - from osf.models.queued_mail import queue_mail, QueuedMail, NEW_PUBLIC_PROJECT_TYPE, NEW_PUBLIC_PROJECT - if not meeting_creation: - sent_mail = QueuedMail.objects.filter(user=user, email_type=NEW_PUBLIC_PROJECT_TYPE) - if not sent_mail.exists(): - queue_mail( - to_addr=user.username, - mail=NEW_PUBLIC_PROJECT, - send_at=timezone.now() + settings.NEW_PUBLIC_PROJECT_WAIT_TIME, - user=user, - nid=node._id, - fullname=user.fullname, - project_title=node.title, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) - -@conference_signals.osf4m_user_created.connect -def queue_osf4m_welcome_email(user, conference, node): - """Queue an email once a new user is created for OSF Meetings""" - from osf.models.queued_mail import queue_mail, WELCOME_OSF4M - root = (node.get_addon('osfstorage')).get_root() - root_children = [child for child in root.children if child.is_file] - queue_mail( - to_addr=user.username, - mail=WELCOME_OSF4M, - send_at=timezone.now() + settings.WELCOME_OSF4M_WAIT_TIME, - user=user, - conference=conference.name, - fullname=user.fullname, - fid=root_children[0]._id if len(root_children) else None, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - domain=settings.DOMAIN, - ) diff --git a/website/mails/mails.py b/website/mails/mails.py index 61c466fdfb8..77147f1374e 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -136,17 +136,16 @@ def send_mail( ) logger.debug('Preparing to send...') - if settings.USE_EMAIL: - if settings.USE_CELERY and celery: - logger.debug('Sending via celery...') - return mailer.apply_async(kwargs=kwargs, link=callback) - else: - logger.debug('Sending without celery') - ret = mailer(**kwargs) - if callback: - callback() + if settings.USE_CELERY and celery: + logger.debug('Sending via celery...') + return mailer.apply_async(kwargs=kwargs, link=callback) + else: + logger.debug('Sending without celery') + ret = mailer(**kwargs) + if callback: + callback() - return ret + return ret def get_english_article(word): @@ -321,18 +320,6 @@ def get_english_article(word): subject='[auto] Spam files audit' ) -CONFERENCE_SUBMITTED = Mail( - 'conference_submitted', - subject='Project created on OSF', -) -CONFERENCE_INACTIVE = Mail( - 'conference_inactive', - subject='OSF Error: Conference inactive', -) -CONFERENCE_FAILED = Mail( - 'conference_failed', - subject='OSF Error: No files attached', -) CONFERENCE_DEPRECATION = Mail( 'conference_deprecation', subject='Meeting Service Discontinued', diff --git a/website/mails/presends.py b/website/mails/presends.py deleted file mode 100644 index 3a3175c99ee..00000000000 --- a/website/mails/presends.py +++ /dev/null @@ -1,55 +0,0 @@ -from django.utils import timezone - -from website import settings - -def no_addon(email): - return len([addon for addon in email.user.get_addons() if addon.config.short_name != 'osfstorage']) == 0 - -def no_login(email): - from osf.models.queued_mail import QueuedMail, NO_LOGIN_TYPE - sent = QueuedMail.objects.filter(user=email.user, email_type=NO_LOGIN_TYPE).exclude(_id=email._id) - if sent.exists(): - return False - return email.user.date_last_login < timezone.now() - settings.NO_LOGIN_WAIT_TIME - -def new_public_project(email): - """ Will check to make sure the project that triggered this presend is still public - before sending the email. It also checks to make sure this is the first (and only) - new public project email to be sent - - :param email: QueuedMail object, with 'nid' in its data field - :return: boolean based on whether the email should be sent - """ - - # In line import to prevent circular importing - from osf.models import AbstractNode - - node = AbstractNode.load(email.data['nid']) - - if not node: - return False - public = email.find_sent_of_same_type_and_user() - return node.is_public and not len(public) - - -def welcome_osf4m(email): - """ presend has two functions. First is to make sure that the user has not - converted to a regular OSF user by logging in. Second is to populate the - data field with downloads by finding the file/project (node_settings) and - counting downloads of all files within that project - - :param email: QueuedMail object with data field including fid - :return: boolean based on whether the email should be sent - """ - # In line import to prevent circular importing - from addons.osfstorage.models import OsfStorageFileNode - if email.user.date_last_login: - if email.user.date_last_login > timezone.now() - settings.WELCOME_OSF4M_WAIT_TIME_GRACE: - return False - upload = OsfStorageFileNode.load(email.data['fid']) - if upload: - email.data['downloads'] = upload.get_download_count() - else: - email.data['downloads'] = 0 - email.save() - return True diff --git a/website/notifications/constants.py b/website/notifications/constants.py index 4068367c505..6ec764960ff 100644 --- a/website/notifications/constants.py +++ b/website/notifications/constants.py @@ -1,30 +1,3 @@ -NODE_SUBSCRIPTIONS_AVAILABLE = { - 'comments': 'Comments added', - 'file_updated': 'Files updated' -} - -# Note: if the subscription starts with 'global_', it will be treated like a default -# subscription. If no notification type has been assigned, the user subscription -# will default to 'email_transactional'. -USER_SUBSCRIPTIONS_AVAILABLE = { - 'global_comment_replies': 'Replies to your comments', - 'global_comments': 'Comments added', - 'global_file_updated': 'Files updated', - 'global_mentions': 'Mentions added', - 'global_reviews': 'Preprint submissions updated' -} - -PROVIDER_SUBSCRIPTIONS_AVAILABLE = { - 'new_pending_submissions': 'New preprint submissions for moderators to review.' -} - -# Note: the python value None mean inherit from parent -NOTIFICATION_TYPES = { - 'email_transactional': 'Email when a change occurs', - 'email_digest': 'Daily email digest of all changes to this project', - 'none': 'None' -} - # Formatted file provider names for notification emails PROVIDERS = { 'osfstorage': 'OSF Storage', diff --git a/website/notifications/emails.py b/website/notifications/emails.py deleted file mode 100644 index d26d43351d5..00000000000 --- a/website/notifications/emails.py +++ /dev/null @@ -1,243 +0,0 @@ -from django.apps import apps - -from babel import dates, core, Locale - -from osf.models import AbstractNode, NotificationDigest, NotificationSubscription -from osf.utils.permissions import ADMIN, READ -from website import mails -from website.notifications import constants -from website.notifications import utils -from website.util import web_url_for - - -def notify(event, user, node, timestamp, **context): - """Retrieve appropriate ***subscription*** and passe user list - - :param event: event that triggered the notification - :param user: user who triggered notification - :param node: instance of Node - :param timestamp: time event happened - :param context: optional variables specific to templates - target_user: used with comment_replies - :return: List of user ids notifications were sent to - """ - sent_users = [] - # The user who the current comment is a reply to - target_user = context.get('target_user', None) - exclude = context.get('exclude', []) - # do not notify user who initiated the emails - exclude.append(user._id) - - event_type = utils.find_subscription_type(event) - if target_user and event_type in constants.USER_SUBSCRIPTIONS_AVAILABLE: - # global user - subscriptions = get_user_subscriptions(target_user, event_type) - else: - # local project user - subscriptions = compile_subscriptions(node, event_type, event) - - for notification_type in subscriptions: - if notification_type == 'none' or not subscriptions[notification_type]: - continue - # Remove excluded ids from each notification type - subscriptions[notification_type] = [guid for guid in subscriptions[notification_type] if guid not in exclude] - - # If target, they get a reply email and are removed from the general email - if target_user and target_user._id in subscriptions[notification_type]: - subscriptions[notification_type].remove(target_user._id) - store_emails([target_user._id], notification_type, 'comment_replies', user, node, timestamp, **context) - sent_users.append(target_user._id) - - if subscriptions[notification_type]: - store_emails(subscriptions[notification_type], notification_type, event_type, user, node, timestamp, **context) - sent_users.extend(subscriptions[notification_type]) - return sent_users - -def notify_mentions(event, user, node, timestamp, **context): - OSFUser = apps.get_model('osf', 'OSFUser') - recipient_ids = context.get('new_mentions', []) - recipients = OSFUser.objects.filter(guids___id__in=recipient_ids) - sent_users = notify_global_event(event, user, node, timestamp, recipients, context=context) - return sent_users - -def notify_global_event(event, sender_user, node, timestamp, recipients, template=None, context=None): - event_type = utils.find_subscription_type(event) - sent_users = [] - if not context: - context = {} - - for recipient in recipients: - subscriptions = get_user_subscriptions(recipient, event_type) - context['is_creator'] = recipient == node.creator - if node.provider: - context['has_psyarxiv_chronos_text'] = node.has_permission(recipient, ADMIN) and 'psyarxiv' in node.provider.name.lower() - for notification_type in subscriptions: - if (notification_type != 'none' and subscriptions[notification_type] and recipient._id in subscriptions[notification_type]): - store_emails([recipient._id], notification_type, event, sender_user, node, timestamp, template=template, **context) - sent_users.append(recipient._id) - - return sent_users - - -def store_emails(recipient_ids, notification_type, event, user, node, timestamp, abstract_provider=None, template=None, **context): - """Store notification emails - - Emails are sent via celery beat as digests - :param recipient_ids: List of user ids to send mail to. - :param notification_type: from constants.Notification_types - :param event: event that triggered notification - :param user: user who triggered the notification - :param node: instance of Node - :param timestamp: time event happened - :param context: - :return: -- - """ - OSFUser = apps.get_model('osf', 'OSFUser') - - if notification_type == 'none': - return - - # If `template` is not specified, default to using a template with name `event` - template = f'{template or event}.html.mako' - - # user whose action triggered email sending - context['user'] = user - node_lineage_ids = get_node_lineage(node) if node else [] - - for recipient_id in recipient_ids: - if recipient_id == user._id: - continue - recipient = OSFUser.load(recipient_id) - if recipient.is_disabled: - continue - context['localized_timestamp'] = localize_timestamp(timestamp, recipient) - context['recipient'] = recipient - message = mails.render_message(template, **context) - digest = NotificationDigest( - timestamp=timestamp, - send_type=notification_type, - event=event, - user=recipient, - message=message, - node_lineage=node_lineage_ids, - provider=abstract_provider - ) - digest.save() - - -def compile_subscriptions(node, event_type, event=None, level=0): - """Recurse through node and parents for subscriptions. - - :param node: current node - :param event_type: Generally node_subscriptions_available - :param event: Particular event such a file_updated that has specific file subs - :param level: How deep the recursion is - :return: a dict of notification types with lists of users. - """ - subscriptions = check_node(node, event_type) - if event: - subscriptions = check_node(node, event) # Gets particular event subscriptions - parent_subscriptions = compile_subscriptions(node, event_type, level=level + 1) # get node and parent subs - elif getattr(node, 'parent_id', False): - parent_subscriptions = \ - compile_subscriptions(AbstractNode.load(node.parent_id), event_type, level=level + 1) - else: - parent_subscriptions = check_node(None, event_type) - for notification_type in parent_subscriptions: - p_sub_n = parent_subscriptions[notification_type] - p_sub_n.extend(subscriptions[notification_type]) - for nt in subscriptions: - if notification_type != nt: - p_sub_n = list(set(p_sub_n).difference(set(subscriptions[nt]))) - if level == 0: - p_sub_n, removed = utils.separate_users(node, p_sub_n) - parent_subscriptions[notification_type] = p_sub_n - return parent_subscriptions - - -def check_node(node, event): - """Return subscription for a particular node and event.""" - node_subscriptions = {key: [] for key in constants.NOTIFICATION_TYPES} - if node: - subscription = NotificationSubscription.load(utils.to_subscription_key(node._id, event)) - for notification_type in node_subscriptions: - users = getattr(subscription, notification_type, []) - if users: - for user in users.exclude(date_disabled__isnull=False): - if node.has_permission(user, READ): - node_subscriptions[notification_type].append(user._id) - return node_subscriptions - - -def get_user_subscriptions(user, event): - if user.is_disabled: - return {} - user_subscription = NotificationSubscription.load(utils.to_subscription_key(user._id, event)) - if user_subscription: - return {key: list(getattr(user_subscription, key).all().values_list('guids___id', flat=True)) for key in constants.NOTIFICATION_TYPES} - else: - return {key: [user._id] if (event in constants.USER_SUBSCRIPTIONS_AVAILABLE and key == 'email_transactional') else [] for key in constants.NOTIFICATION_TYPES} - - -def get_node_lineage(node): - """ Get a list of node ids in order from the node to top most project - e.g. [parent._id, node._id] - """ - from osf.models import Preprint - lineage = [node._id] - if isinstance(node, Preprint): - return lineage - - while node.parent_id: - node = node.parent_node - lineage = [node._id] + lineage - - return lineage - - -def get_settings_url(uid, user): - if uid == user._id: - return web_url_for('user_notifications', _absolute=True) - - node = AbstractNode.load(uid) - assert node, 'get_settings_url recieved an invalid Node id' - return node.web_url_for('node_setting', _guid=True, _absolute=True) - -def fix_locale(locale): - """Atempt to fix a locale to have the correct casing, e.g. de_de -> de_DE - - This is NOT guaranteed to return a valid locale identifier. - """ - try: - language, territory = locale.split('_', 1) - except ValueError: - return locale - else: - return '_'.join([language, territory.upper()]) - -def localize_timestamp(timestamp, user): - try: - user_timezone = dates.get_timezone(user.timezone) - except LookupError: - user_timezone = dates.get_timezone('Etc/UTC') - - try: - user_locale = Locale(user.locale) - except core.UnknownLocaleError: - user_locale = Locale('en') - - # Do our best to find a valid locale - try: - user_locale.date_formats - except OSError: # An IOError will be raised if locale's casing is incorrect, e.g. de_de vs. de_DE - # Attempt to fix the locale, e.g. de_de -> de_DE - try: - user_locale = Locale(fix_locale(user.locale)) - user_locale.date_formats - except (core.UnknownLocaleError, OSError): - user_locale = Locale('en') - - formatted_date = dates.format_date(timestamp, format='full', locale=user_locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=user_timezone, locale=user_locale) - - return f'{formatted_time} on {formatted_date}' diff --git a/website/notifications/events/base.py b/website/notifications/events/base.py index 7378c8ced43..acf259153be 100644 --- a/website/notifications/events/base.py +++ b/website/notifications/events/base.py @@ -1,10 +1,4 @@ """Basic Event handling for events that need subscriptions""" - -from django.utils import timezone - -from website.notifications import emails - - event_registry = {} @@ -16,55 +10,5 @@ def decorator(cls): return decorator -class Event: - """Base event class for notification. - - - abstract methods set methods that should be defined by subclasses. - To use this interface you must use the class as a Super (inherited). - - Implement property methods in subclasses - """ - def __init__(self, user, node, action): - self.user = user - self.profile_image_url = user.profile_image_url() - self.node = node - self.action = action - self.timestamp = timezone.now() - - def perform(self): - """Call emails.notify to notify users of an action""" - emails.notify( - event=self.event_type, - user=self.user, - node=self.node, - timestamp=self.timestamp, - message=self.html_message, - profile_image_url=self.profile_image_url, - url=self.url - ) - - @property - def text_message(self): - """String: build a plain text message.""" - raise NotImplementedError - - @property - def html_message(self): - """String: build an html message.""" - raise NotImplementedError - - @property - def url(self): - """String: build a url for the message.""" - raise NotImplementedError - - @property - def event_type(self): - """String - - Examples: - _file_updated""" - raise NotImplementedError - - class RegistryError(TypeError): pass diff --git a/website/notifications/events/files.py b/website/notifications/events/files.py index fdaabad0426..e2cd823993e 100644 --- a/website/notifications/events/files.py +++ b/website/notifications/events/files.py @@ -6,19 +6,15 @@ FileEvent and ComplexFileEvent are parent classes with shared functionality. """ +from django.utils import timezone from furl import furl import markupsafe -from website.notifications import emails -from website.notifications.constants import NOTIFICATION_TYPES -from website.notifications import utils from website.notifications.events.base import ( register, - Event, event_registry, RegistryError, ) -from website.notifications.events import utils as event_utils from osf.models import AbstractNode, NodeLog, Preprint from addons.base.signals import file_updated as signal @@ -33,14 +29,43 @@ def file_updated(self, target=None, user=None, event_type=None, payload=None): event.perform() -class FileEvent(Event): +class FileEvent: """File event base class, should not be called directly""" - def __init__(self, user, node, event, payload=None): - super().__init__(user, node, event) + """Base event class for notification. + + - abstract methods set methods that should be defined by subclasses. + To use this interface you must use the class as a Super (inherited). + - Implement property methods in subclasses + """ + + def __init__(self, user, node, action, payload=None): + self.user = user + self.profile_image_url = user.profile_image_url() + self.node = node + self.action = action + self.timestamp = timezone.now() self.payload = payload self._url = None + def perform(self): + """Call emails.notify to notify users of an action""" + from osf.models import NotificationType, NotificationSubscription + from django.contrib.contenttypes.models import ContentType + + subscription, _ = NotificationSubscription.objects.get_or_create( + user=self.user, + notification_type=NotificationType.objects.get(name=self.action), + content_type=ContentType.objects.get_for_model(self.node.__class__), + object_id=self.node.id, + ) + subscription.emit( + user=self.user, + subscribed_object=self.node, + event_context=self.payload, + + ) + @property def html_message(self): """Most basic html message""" @@ -92,20 +117,10 @@ def url(self): class FileAdded(FileEvent): """Actual class called when a file is added""" - @property - def event_type(self): - return f'{self.waterbutler_id}_file_updated' - - @register(NodeLog.FILE_UPDATED) class FileUpdated(FileEvent): """Actual class called when a file is updated""" - @property - def event_type(self): - return f'{self.waterbutler_id}_file_updated' - - @register(NodeLog.FILE_REMOVED) class FileRemoved(FileEvent): """Actual class called when a file is removed""" @@ -232,55 +247,56 @@ def perform(self): This will be **much** more useful when individual files have their own subscription. """ + pass # Do this is the two nodes are the same, no one needs to know specifics of permissions if self.node == self.source_node: super().perform() return - # File - if self.payload['destination']['kind'] != 'folder': - moved, warn, rm_users = event_utils.categorize_users(self.user, self.event_type, self.source_node, - self.event_type, self.node) - warn_message = f'{self.html_message} You are no longer tracking that file based on the settings you selected for the component.' - remove_message = ( - f'{self.html_message} Your subscription has been removed due to ' - 'insufficient permissions in the new component.' - ) - # Folder - else: - # Gets all the files in a folder to look for permissions conflicts - files = event_utils.get_file_subs_from_folder(self.addon, self.user, self.payload['destination']['kind'], - self.payload['destination']['path'], - self.payload['destination']['name']) - # Bins users into different permissions - moved, warn, rm_users = event_utils.compile_user_lists(files, self.user, self.source_node, self.node) - - # For users that don't have individual file subscription but has permission on the new node - warn_message = f'{self.html_message} You are no longer tracking that folder or files within based on the settings you selected for the component.' - # For users without permission on the new node - remove_message = ( - f'{self.html_message} Your subscription has been removed for the ' - 'folder, or a file within, due to insufficient permissions in the new ' - 'component.' - ) - - # Move the document from one subscription to another because the old one isn't needed - utils.move_subscription(rm_users, self.event_type, self.source_node, self.event_type, self.node) - # Notify each user - for notification in NOTIFICATION_TYPES: - if notification == 'none': - continue - if moved[notification]: - emails.store_emails(moved[notification], notification, 'file_updated', self.user, self.node, - self.timestamp, message=self.html_message, - profile_image_url=self.profile_image_url, url=self.url) - if warn[notification]: - emails.store_emails(warn[notification], notification, 'file_updated', self.user, self.node, - self.timestamp, message=warn_message, profile_image_url=self.profile_image_url, - url=self.url) - if rm_users[notification]: - emails.store_emails(rm_users[notification], notification, 'file_updated', self.user, self.source_node, - self.timestamp, message=remove_message, - profile_image_url=self.profile_image_url, url=self.source_url) + # # File + # if self.payload['destination']['kind'] != 'folder': + # moved, warn, rm_users = None, None, None + # warn_message = f'{self.html_message} You are no longer tracking that file based on the settings you selected for the component.' + # remove_message = ( + # f'{self.html_message} Your subscription has been removed due to ' + # 'insufficient permissions in the new component.' + # ) + # # Folder + # else: + # # Gets all the files in a folder to look for permissions conflicts + # files = None + # # Bins users into different permissions + # moved, warn, rm_users = None, None, None + # + # # For users that don't have individual file subscription but has permission on the new node + # warn_message = f'{self.html_message} You are no longer tracking that folder or files within based on the settings you selected for the component.' + # # For users without permission on the new node + # remove_message = ( + # f'{self.html_message} Your subscription has been removed for the ' + # 'folder, or a file within, due to insufficient permissions in the new ' + # 'component.' + # ) + # + # # Notify each user + # NOTIFICATION_TYPES = { + # 'none': 'none', + # 'instant': 'email_transactional', + # 'daily': 'email_digest', + # } + # for notification in NOTIFICATION_TYPES: + # if notification == 'none': + # continue + # if moved[notification]: + # emails.store_emails(moved[notification], notification, 'file_updated', self.user, self.node, + # self.timestamp, message=self.html_message, + # profile_image_url=self.profile_image_url, url=self.url) + # if warn[notification]: + # emails.store_emails(warn[notification], notification, 'file_updated', self.user, self.node, + # self.timestamp, message=warn_message, profile_image_url=self.profile_image_url, + # url=self.url) + # if rm_users[notification]: + # emails.store_emails(rm_users[notification], notification, 'file_updated', self.user, self.source_node, + # self.timestamp, message=remove_message, + # profile_image_url=self.profile_image_url, url=self.source_url) @register(NodeLog.FILE_COPIED) @@ -294,26 +310,4 @@ def perform(self): together because they both don't have a subscription to a newly copied file. """ - remove_message = self.html_message + ' You do not have permission in the new component.' - if self.node == self.source_node: - super().perform() - return - if self.payload['destination']['kind'] != 'folder': - moved, warn, rm_users = event_utils.categorize_users(self.user, self.event_type, self.source_node, - self.event_type, self.node) - else: - files = event_utils.get_file_subs_from_folder(self.addon, self.user, self.payload['destination']['kind'], - self.payload['destination']['path'], - self.payload['destination']['name']) - moved, warn, rm_users = event_utils.compile_user_lists(files, self.user, self.source_node, self.node) - for notification in NOTIFICATION_TYPES: - if notification == 'none': - continue - if moved[notification] or warn[notification]: - users = list(set(moved[notification]).union(set(warn[notification]))) - emails.store_emails(users, notification, 'file_updated', self.user, self.node, self.timestamp, - message=self.html_message, profile_image_url=self.profile_image_url, url=self.url) - if rm_users[notification]: - emails.store_emails(rm_users[notification], notification, 'file_updated', self.user, self.source_node, - self.timestamp, message=remove_message, - profile_image_url=self.profile_image_url, url=self.source_url) + pass diff --git a/website/notifications/events/utils.py b/website/notifications/events/utils.py deleted file mode 100644 index 83e4c79bce4..00000000000 --- a/website/notifications/events/utils.py +++ /dev/null @@ -1,141 +0,0 @@ -from itertools import product - -from website.notifications.emails import compile_subscriptions -from website.notifications import utils, constants - - -def get_file_subs_from_folder(addon, user, kind, path, name): - """Find the file tree under a specified folder.""" - folder = dict(kind=kind, path=path, name=name) - file_tree = addon._get_file_tree(filenode=folder, user=user, version='latest-published') - return list_of_files(file_tree) - - -def list_of_files(file_object): - files = [] - if file_object['kind'] == 'file': - return [file_object['path']] - else: - for child in file_object['children']: - files.extend(list_of_files(child)) - return files - - -def compile_user_lists(files, user, source_node, node): - """Take multiple file ids and compiles them. - - :param files: List of WaterButler paths - :param user: User who initiated action/event - :param source_node: Node instance from - :param node: Node instance to - :return: move, warn, and remove dicts - """ - # initialise subscription dictionaries - move = {key: [] for key in constants.NOTIFICATION_TYPES} - warn = {key: [] for key in constants.NOTIFICATION_TYPES} - remove = {key: [] for key in constants.NOTIFICATION_TYPES} - # get the node subscription - if len(files) == 0: - move, warn, remove = categorize_users( - user, 'file_updated', source_node, 'file_updated', node - ) - # iterate through file subscriptions - for file_path in files: - path = file_path.strip('/') - t_move, t_warn, t_remove = categorize_users( - user, path + '_file_updated', source_node, - path + '_file_updated', node - ) - # Add file subs to overall list of subscriptions - for notification in constants.NOTIFICATION_TYPES: - move[notification] = list(set(move[notification]).union(set(t_move[notification]))) - warn[notification] = list(set(warn[notification]).union(set(t_warn[notification]))) - remove[notification] = list(set(remove[notification]).union(set(t_remove[notification]))) - return move, warn, remove - - -def categorize_users(user, source_event, source_node, event, node): - """Categorize users from a file subscription into three categories. - - Puts users in one of three bins: - - Moved: User has permissions on both nodes, subscribed to both - - Warned: User has permissions on both, not subscribed to destination - - Removed: Does not have permission on destination node - :param user: User instance who started the event - :param source_event: _event_name - :param source_node: node from where the event happened - :param event: new guid event name - :param node: node where event ends up - :return: Moved, to be warned, and removed users. - """ - remove = utils.users_to_remove(source_event, source_node, node) - source_node_subs = compile_subscriptions(source_node, utils.find_subscription_type(source_event)) - new_subs = compile_subscriptions(node, utils.find_subscription_type(source_event), event) - - # Moves users into the warn bucket or the move bucket - move = subscriptions_users_union(source_node_subs, new_subs) - warn = subscriptions_users_difference(source_node_subs, new_subs) - - # Removes users without permissions - warn, remove = subscriptions_node_permissions(node, warn, remove) - - # Remove duplicates - warn = subscriptions_users_remove_duplicates(warn, new_subs, remove_same=False) - move = subscriptions_users_remove_duplicates(move, new_subs, remove_same=False) - - # Remove duplicates between move and warn; and move and remove - move = subscriptions_users_remove_duplicates(move, warn, remove_same=True) - move = subscriptions_users_remove_duplicates(move, remove, remove_same=True) - - for notifications in constants.NOTIFICATION_TYPES: - # Remove the user who started this whole thing. - user_id = user._id - if user_id in warn[notifications]: - warn[notifications].remove(user_id) - if user_id in move[notifications]: - move[notifications].remove(user_id) - if user_id in remove[notifications]: - remove[notifications].remove(user_id) - - return move, warn, remove - - -def subscriptions_node_permissions(node, warn_subscription, remove_subscription): - for notification in constants.NOTIFICATION_TYPES: - subbed, removed = utils.separate_users(node, warn_subscription[notification]) - warn_subscription[notification] = subbed - remove_subscription[notification].extend(removed) - remove_subscription[notification] = list(set(remove_subscription[notification])) - return warn_subscription, remove_subscription - - -def subscriptions_users_union(emails_1, emails_2): - return { - notification: - list( - set(emails_1[notification]).union(set(emails_2[notification])) - ) - for notification in constants.NOTIFICATION_TYPES.keys() - } - - -def subscriptions_users_difference(emails_1, emails_2): - return { - notification: - list( - set(emails_1[notification]).difference(set(emails_2[notification])) - ) - for notification in constants.NOTIFICATION_TYPES.keys() - } - - -def subscriptions_users_remove_duplicates(emails_1, emails_2, remove_same=False): - emails_list = dict(emails_1) - product_list = product(constants.NOTIFICATION_TYPES, repeat=2) - for notification_1, notification_2 in product_list: - if notification_2 == notification_1 and not remove_same or notification_2 == 'none': - continue - emails_list[notification_1] = list( - set(emails_list[notification_1]).difference(set(emails_2[notification_2])) - ) - return emails_list diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py deleted file mode 100644 index 21aed1df9e3..00000000000 --- a/website/notifications/listeners.py +++ /dev/null @@ -1,34 +0,0 @@ -import logging -from website.notifications.exceptions import InvalidSubscriptionError -from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications -from website.project.signals import contributor_added, project_created -from framework.auth.signals import user_confirmed - -logger = logging.getLogger(__name__) - -@project_created.connect -def subscribe_creator(node): - if node.is_collection or node.is_deleted: - return None - try: - subscribe_user_to_notifications(node, node.creator) - except InvalidSubscriptionError as err: - user = node.creator._id if node.creator else 'None' - logger.warning(f'Skipping subscription of user {user} to node {node._id}') - logger.warning(f'Reason: {str(err)}') - -@contributor_added.connect -def subscribe_contributor(node, contributor, auth=None, *args, **kwargs): - try: - subscribe_user_to_notifications(node, contributor) - except InvalidSubscriptionError as err: - logger.warning(f'Skipping subscription of user {contributor} to node {node._id}') - logger.warning(f'Reason: {str(err)}') - -@user_confirmed.connect -def subscribe_confirmed_user(user): - try: - subscribe_user_to_global_notifications(user) - except InvalidSubscriptionError as err: - logger.warning(f'Skipping subscription of user {user} to global subscriptions') - logger.warning(f'Reason: {str(err)}') diff --git a/website/notifications/tasks.py b/website/notifications/tasks.py deleted file mode 100644 index 6b7353ccdc0..00000000000 --- a/website/notifications/tasks.py +++ /dev/null @@ -1,227 +0,0 @@ -""" -Tasks for making even transactional emails consolidated. -""" -import itertools - -from django.db import connection - -from framework.celery_tasks import app as celery_app -from framework.sentry import log_message -from osf.models import ( - OSFUser, - AbstractNode, - AbstractProvider, - RegistrationProvider, - CollectionProvider, - NotificationDigest, -) -from osf.registrations.utils import get_registration_provider_submissions_url -from osf.utils.permissions import ADMIN -from website import mails, settings -from website.notifications.utils import NotificationsDict - - -@celery_app.task(name='website.notifications.tasks.send_users_email', max_retries=0) -def send_users_email(send_type): - """Send pending emails. - - :param send_type - :return: - """ - _send_global_and_node_emails(send_type) - _send_reviews_moderator_emails(send_type) - - -def _send_global_and_node_emails(send_type): - """ - Called by `send_users_email`. Send all global and node-related notification emails. - """ - grouped_emails = get_users_emails(send_type) - for group in grouped_emails: - user = OSFUser.load(group['user_id']) - if not user: - log_message(f"User with id={group['user_id']} not found") - continue - info = group['info'] - notification_ids = [message['_id'] for message in info] - sorted_messages = group_by_node(info) - if sorted_messages: - if not user.is_disabled: - # If there's only one node in digest we can show it's preferences link in the template. - notification_nodes = list(sorted_messages['children'].keys()) - node = AbstractNode.load(notification_nodes[0]) if len( - notification_nodes) == 1 else None - mails.send_mail( - to_addr=user.username, - can_change_node_preferences=bool(node), - node=node, - mail=mails.DIGEST, - name=user.fullname, - message=sorted_messages, - ) - remove_notifications(email_notification_ids=notification_ids) - - -def _send_reviews_moderator_emails(send_type): - """ - Called by `send_users_email`. Send all reviews triggered emails. - """ - grouped_emails = get_moderators_emails(send_type) - for group in grouped_emails: - user = OSFUser.load(group['user_id']) - info = group['info'] - notification_ids = [message['_id'] for message in info] - provider = AbstractProvider.objects.get(id=group['provider_id']) - additional_context = dict() - if isinstance(provider, RegistrationProvider): - provider_type = 'registration' - submissions_url = get_registration_provider_submissions_url(provider) - withdrawals_url = f'{submissions_url}?state=pending_withdraw' - notification_settings_url = f'{settings.DOMAIN}registries/{provider._id}/moderation/notifications' - if provider.brand: - additional_context = { - 'logo_url': provider.brand.hero_logo_image, - 'top_bar_color': provider.brand.primary_color - } - elif isinstance(provider, CollectionProvider): - provider_type = 'collection' - submissions_url = f'{settings.DOMAIN}collections/{provider._id}/moderation/' - notification_settings_url = f'{settings.DOMAIN}registries/{provider._id}/moderation/notifications' - if provider.brand: - additional_context = { - 'logo_url': provider.brand.hero_logo_image, - 'top_bar_color': provider.brand.primary_color - } - withdrawals_url = '' - else: - provider_type = 'preprint' - submissions_url = f'{settings.DOMAIN}reviews/preprints/{provider._id}', - withdrawals_url = '' - notification_settings_url = f'{settings.DOMAIN}reviews/{provider_type}s/{provider._id}/notifications' - - if not user.is_disabled: - mails.send_mail( - to_addr=user.username, - mail=mails.DIGEST_REVIEWS_MODERATORS, - name=user.fullname, - message=info, - provider_name=provider.name, - reviews_submissions_url=submissions_url, - notification_settings_url=notification_settings_url, - reviews_withdrawal_url=withdrawals_url, - is_reviews_moderator_notification=True, - is_admin=provider.get_group(ADMIN).user_set.filter(id=user.id).exists(), - provider_type=provider_type, - **additional_context - ) - remove_notifications(email_notification_ids=notification_ids) - - -def get_moderators_emails(send_type): - """Get all emails for reviews moderators that need to be sent, grouped by users AND providers. - :param send_type: from NOTIFICATION_TYPES, could be "email_digest" or "email_transactional" - :return Iterable of dicts of the form: - [ - 'user_id': 'se8ea', - 'provider_id': '1', - 'info': [ - { - 'message': 'Hana Xie submitted Gravity', - '_id': NotificationDigest._id, - } - ], - ] - """ - sql = """ - SELECT json_build_object( - 'user_id', osf_guid._id, - 'provider_id', nd.provider_id, - 'info', json_agg( - json_build_object( - 'message', nd.message, - '_id', nd._id - ) - ) - ) - FROM osf_notificationdigest AS nd - LEFT JOIN osf_guid ON nd.user_id = osf_guid.object_id - WHERE send_type = %s AND (event = 'new_pending_submissions' OR event = 'new_pending_withdraw_requests') - AND osf_guid.content_type_id = (SELECT id FROM django_content_type WHERE model = 'osfuser') - GROUP BY osf_guid.id, nd.provider_id - ORDER BY osf_guid.id ASC - """ - - with connection.cursor() as cursor: - cursor.execute(sql, [send_type, ]) - return itertools.chain.from_iterable(cursor.fetchall()) - - -def get_users_emails(send_type): - """Get all emails that need to be sent. - NOTE: These do not include reviews triggered emails for moderators. - - :param send_type: from NOTIFICATION_TYPES - :return: Iterable of dicts of the form: - { - 'user_id': 'se8ea', - 'info': [{ - 'message': { - 'message': 'Freddie commented on your project Open Science', - 'timestamp': datetime object - }, - 'node_lineage': ['parent._id', 'node._id'], - '_id': NotificationDigest._id - }, ... - }] - { - 'user_id': ... - } - } - """ - - sql = """ - SELECT json_build_object( - 'user_id', osf_guid._id, - 'info', json_agg( - json_build_object( - 'message', nd.message, - 'node_lineage', nd.node_lineage, - '_id', nd._id - ) - ) - ) - FROM osf_notificationdigest AS nd - LEFT JOIN osf_guid ON nd.user_id = osf_guid.object_id - WHERE send_type = %s - AND event != 'new_pending_submissions' - AND event != 'new_pending_withdraw_requests' - AND osf_guid.content_type_id = (SELECT id FROM django_content_type WHERE model = 'osfuser') - GROUP BY osf_guid.id - ORDER BY osf_guid.id ASC - """ - - with connection.cursor() as cursor: - cursor.execute(sql, [send_type, ]) - return itertools.chain.from_iterable(cursor.fetchall()) - - -def group_by_node(notifications, limit=15): - """Take list of notifications and group by node. - - :param notifications: List of stored email notifications - :return: - """ - emails = NotificationsDict() - for notification in notifications[:15]: - emails.add_message(notification['node_lineage'], notification['message']) - return emails - - -def remove_notifications(email_notification_ids=None): - """Remove sent emails. - - :param email_notification_ids: - :return: - """ - if email_notification_ids: - NotificationDigest.objects.filter(_id__in=email_notification_ids).delete() diff --git a/website/notifications/utils.py b/website/notifications/utils.py deleted file mode 100644 index af8275ab5fb..00000000000 --- a/website/notifications/utils.py +++ /dev/null @@ -1,522 +0,0 @@ -import collections - -from django.apps import apps -from django.db.models import Q - -from framework.postcommit_tasks.handlers import run_postcommit -from osf.utils.permissions import READ -from website.notifications import constants -from website.notifications.exceptions import InvalidSubscriptionError -from website.project import signals - -from framework.celery_tasks import app - - -class NotificationsDict(dict): - def __init__(self): - super().__init__() - self.update(messages=[], children=collections.defaultdict(NotificationsDict)) - - def add_message(self, keys, messages): - """ - :param keys: ordered list of project ids from parent to node (e.g. ['parent._id', 'node._id']) - :param messages: built email message for an event that occurred on the node - :return: nested dict with project/component ids as the keys with the message at the appropriate level - """ - d_to_use = self - - for key in keys: - d_to_use = d_to_use['children'][key] - - if not isinstance(messages, list): - messages = [messages] - - d_to_use['messages'].extend(messages) - - -def find_subscription_type(subscription): - """Find subscription type string within specific subscription. - Essentially removes extraneous parts of the string to get the type. - """ - subs_available = list(constants.USER_SUBSCRIPTIONS_AVAILABLE.keys()) - subs_available.extend(list(constants.NODE_SUBSCRIPTIONS_AVAILABLE.keys())) - for available in subs_available: - if available in subscription: - return available - - -def to_subscription_key(uid, event): - """Build the Subscription primary key for the given guid and event""" - return f'{uid}_{event}' - - -def from_subscription_key(key): - parsed_key = key.split('_', 1) - return { - 'uid': parsed_key[0], - 'event': parsed_key[1] - } - - -@signals.contributor_removed.connect -def remove_contributor_from_subscriptions(node, user): - """ Remove contributor from node subscriptions unless the user is an - admin on any of node's parent projects. - """ - Preprint = apps.get_model('osf.Preprint') - DraftRegistration = apps.get_model('osf.DraftRegistration') - # Preprints don't have subscriptions at this time - if isinstance(node, Preprint): - return - if isinstance(node, DraftRegistration): - return - - # If user still has permissions through being a contributor or group member, or has - # admin perms on a parent, don't remove their subscription - if not (node.is_contributor_or_group_member(user)) and user._id not in node.admin_contributor_or_group_member_ids: - node_subscriptions = get_all_node_subscriptions(user, node) - for subscription in node_subscriptions: - subscription.remove_user_from_subscription(user) - - -@signals.node_deleted.connect -def remove_subscription(node): - remove_subscription_task(node._id) - -@signals.node_deleted.connect -def remove_supplemental_node(node): - remove_supplemental_node_from_preprints(node._id) - -@run_postcommit(once_per_request=False, celery=True) -@app.task(max_retries=5, default_retry_delay=60) -def remove_subscription_task(node_id): - AbstractNode = apps.get_model('osf.AbstractNode') - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - - node = AbstractNode.load(node_id) - NotificationSubscription.objects.filter(node=node).delete() - parent = node.parent_node - - if parent and parent.child_node_subscriptions: - for user_id in parent.child_node_subscriptions: - if node._id in parent.child_node_subscriptions[user_id]: - parent.child_node_subscriptions[user_id].remove(node._id) - parent.save() - - -@run_postcommit(once_per_request=False, celery=True) -@app.task(max_retries=5, default_retry_delay=60) -def remove_supplemental_node_from_preprints(node_id): - AbstractNode = apps.get_model('osf.AbstractNode') - - node = AbstractNode.load(node_id) - for preprint in node.preprints.all(): - if preprint.node is not None: - preprint.node = None - preprint.save() - - -def separate_users(node, user_ids): - """Separates users into ones with permissions and ones without given a list. - :param node: Node to separate based on permissions - :param user_ids: List of ids, will also take and return User instances - :return: list of subbed, list of removed user ids - """ - OSFUser = apps.get_model('osf.OSFUser') - removed = [] - subbed = [] - for user_id in user_ids: - try: - user = OSFUser.load(user_id) - except TypeError: - user = user_id - if node.has_permission(user, READ): - subbed.append(user_id) - else: - removed.append(user_id) - return subbed, removed - - -def users_to_remove(source_event, source_node, new_node): - """Find users that do not have permissions on new_node. - :param source_event: such as _file_updated - :param source_node: Node instance where a subscription currently resides - :param new_node: Node instance where a sub or new sub will be. - :return: Dict of notification type lists with user_ids - """ - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - removed_users = {key: [] for key in constants.NOTIFICATION_TYPES} - if source_node == new_node: - return removed_users - old_sub = NotificationSubscription.load(to_subscription_key(source_node._id, source_event)) - old_node_sub = NotificationSubscription.load(to_subscription_key(source_node._id, - '_'.join(source_event.split('_')[-2:]))) - if not old_sub and not old_node_sub: - return removed_users - for notification_type in constants.NOTIFICATION_TYPES: - users = [] - if hasattr(old_sub, notification_type): - users += list(getattr(old_sub, notification_type).values_list('guids___id', flat=True)) - if hasattr(old_node_sub, notification_type): - users += list(getattr(old_node_sub, notification_type).values_list('guids___id', flat=True)) - subbed, removed_users[notification_type] = separate_users(new_node, users) - return removed_users - - -def move_subscription(remove_users, source_event, source_node, new_event, new_node): - """Moves subscription from old_node to new_node - :param remove_users: dictionary of lists of users to remove from the subscription - :param source_event: A specific guid event _file_updated - :param source_node: Instance of Node - :param new_event: A specific guid event - :param new_node: Instance of Node - :return: Returns a NOTIFICATION_TYPES list of removed users without permissions - """ - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - OSFUser = apps.get_model('osf.OSFUser') - if source_node == new_node: - return - old_sub = NotificationSubscription.load(to_subscription_key(source_node._id, source_event)) - if not old_sub: - return - elif old_sub: - old_sub._id = to_subscription_key(new_node._id, new_event) - old_sub.event_name = new_event - old_sub.owner = new_node - new_sub = old_sub - new_sub.save() - # Remove users that don't have permission on the new node. - for notification_type in constants.NOTIFICATION_TYPES: - if new_sub: - for user_id in remove_users[notification_type]: - related_manager = getattr(new_sub, notification_type, None) - subscriptions = related_manager.all() if related_manager else [] - if user_id in subscriptions: - user = OSFUser.load(user_id) - new_sub.remove_user_from_subscription(user) - - -def get_configured_projects(user): - """Filter all user subscriptions for ones that are on parent projects - and return the node objects. - :param user: OSFUser object - :return: list of node objects for projects with no parent - """ - configured_projects = set() - user_subscriptions = get_all_user_subscriptions(user, extra=( - ~Q(node__type='osf.collection') & - ~Q(node__type='osf.quickfilesnode') & - Q(node__is_deleted=False) - )) - - for subscription in user_subscriptions: - # If the user has opted out of emails skip - node = subscription.owner - - if ( - (subscription.none.filter(id=user.id).exists() and not node.parent_id) or - node._id not in user.notifications_configured - ): - continue - - root = node.root - - if not root.is_deleted: - configured_projects.add(root) - - return sorted(configured_projects, key=lambda n: n.title.lower()) - - -def check_project_subscriptions_are_all_none(user, node): - node_subscriptions = get_all_node_subscriptions(user, node) - for s in node_subscriptions: - if not s.none.filter(id=user.id).exists(): - return False - return True - - -def get_all_user_subscriptions(user, extra=None): - """ Get all Subscription objects that the user is subscribed to""" - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - queryset = NotificationSubscription.objects.filter( - Q(none=user.pk) | - Q(email_digest=user.pk) | - Q(email_transactional=user.pk) - ).distinct() - return queryset.filter(extra) if extra else queryset - - -def get_all_node_subscriptions(user, node, user_subscriptions=None): - """ Get all Subscription objects for a node that the user is subscribed to - :param user: OSFUser object - :param node: Node object - :param user_subscriptions: all Subscription objects that the user is subscribed to - :return: list of Subscription objects for a node that the user is subscribed to - """ - if not user_subscriptions: - user_subscriptions = get_all_user_subscriptions(user) - return user_subscriptions.filter(user__isnull=True, node=node) - - -def format_data(user, nodes): - """ Format subscriptions data for project settings page - :param user: OSFUser object - :param nodes: list of parent project node objects - :return: treebeard-formatted data - """ - items = [] - - user_subscriptions = get_all_user_subscriptions(user) - for node in nodes: - assert node, f'{node._id} is not a valid Node.' - - can_read = node.has_permission(user, READ) - can_read_children = node.has_permission_on_children(user, READ) - - if not can_read and not can_read_children: - continue - - children = node.get_nodes(**{'is_deleted': False, 'is_node_link': False}) - children_tree = [] - # List project/node if user has at least READ permissions (contributor or admin viewer) or if - # user is contributor on a component of the project/node - - if can_read: - node_sub_available = list(constants.NODE_SUBSCRIPTIONS_AVAILABLE.keys()) - subscriptions = get_all_node_subscriptions(user, node, user_subscriptions=user_subscriptions).filter(event_name__in=node_sub_available) - - for subscription in subscriptions: - index = node_sub_available.index(getattr(subscription, 'event_name')) - children_tree.append(serialize_event(user, subscription=subscription, - node=node, event_description=node_sub_available.pop(index))) - for node_sub in node_sub_available: - children_tree.append(serialize_event(user, node=node, event_description=node_sub)) - children_tree.sort(key=lambda s: s['event']['title']) - - children_tree.extend(format_data(user, children)) - - item = { - 'node': { - 'id': node._id, - 'url': node.url if can_read else '', - 'title': node.title if can_read else 'Private Project', - }, - 'children': children_tree, - 'kind': 'folder' if not node.parent_node or not node.parent_node.has_permission(user, READ) else 'node', - 'nodeType': node.project_or_component, - 'category': node.category, - 'permissions': { - 'view': can_read, - }, - } - - items.append(item) - - return items - - -def format_user_subscriptions(user): - """ Format user-level subscriptions (e.g. comment replies across the OSF) for user settings page""" - user_subs_available = list(constants.USER_SUBSCRIPTIONS_AVAILABLE.keys()) - subscriptions = [ - serialize_event( - user, subscription, - event_description=user_subs_available.pop(user_subs_available.index(getattr(subscription, 'event_name'))) - ) - for subscription in get_all_user_subscriptions(user) - if subscription is not None and getattr(subscription, 'event_name') in user_subs_available - ] - subscriptions.extend([serialize_event(user, event_description=sub) for sub in user_subs_available]) - return subscriptions - - -def format_file_subscription(user, node_id, path, provider): - """Format a single file event""" - AbstractNode = apps.get_model('osf.AbstractNode') - node = AbstractNode.load(node_id) - wb_path = path.lstrip('/') - for subscription in get_all_node_subscriptions(user, node): - if wb_path in getattr(subscription, 'event_name'): - return serialize_event(user, subscription, node) - return serialize_event(user, node=node, event_description='file_updated') - - -all_subs = constants.NODE_SUBSCRIPTIONS_AVAILABLE.copy() -all_subs.update(constants.USER_SUBSCRIPTIONS_AVAILABLE) - -def serialize_event(user, subscription=None, node=None, event_description=None): - """ - :param user: OSFUser object - :param subscription: Subscription object, use if parsing particular subscription - :param node: Node object, use if node is known - :param event_description: use if specific subscription is known - :return: treebeard-formatted subscription event - """ - if not event_description: - event_description = getattr(subscription, 'event_name') - # Looks at only the types available. Deals with pre-pending file names. - for sub_type in all_subs: - if sub_type in event_description: - event_type = sub_type - else: - event_type = event_description - if node and node.parent_node: - notification_type = 'adopt_parent' - elif event_type.startswith('global_'): - notification_type = 'email_transactional' - else: - notification_type = 'none' - if subscription: - for n_type in constants.NOTIFICATION_TYPES: - if getattr(subscription, n_type).filter(id=user.id).exists(): - notification_type = n_type - return { - 'event': { - 'title': event_description, - 'description': all_subs[event_type], - 'notificationType': notification_type, - 'parent_notification_type': get_parent_notification_type(node, event_type, user) - }, - 'kind': 'event', - 'children': [] - } - - -def get_parent_notification_type(node, event, user): - """ - Given an event on a node (e.g. comment on node 'xyz'), find the user's notification - type on the parent project for the same event. - :param obj node: event owner (Node or User object) - :param str event: notification event (e.g. 'comment_replies') - :param obj user: OSFUser object - :return: str notification type (e.g. 'email_transactional') - """ - AbstractNode = apps.get_model('osf.AbstractNode') - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - - if node and isinstance(node, AbstractNode) and node.parent_node and node.parent_node.has_permission(user, READ): - parent = node.parent_node - key = to_subscription_key(parent._id, event) - try: - subscription = NotificationSubscription.objects.get(_id=key) - except NotificationSubscription.DoesNotExist: - return get_parent_notification_type(parent, event, user) - - for notification_type in constants.NOTIFICATION_TYPES: - if getattr(subscription, notification_type).filter(id=user.id).exists(): - return notification_type - else: - return get_parent_notification_type(parent, event, user) - else: - return None - - -def get_global_notification_type(global_subscription, user): - """ - Given a global subscription (e.g. NotificationSubscription object with event_type - 'global_file_updated'), find the user's notification type. - :param obj global_subscription: NotificationSubscription object - :param obj user: OSFUser object - :return: str notification type (e.g. 'email_transactional') - """ - for notification_type in constants.NOTIFICATION_TYPES: - # TODO Optimize me - if getattr(global_subscription, notification_type).filter(id=user.id).exists(): - return notification_type - - -def check_if_all_global_subscriptions_are_none(user): - # This function predates comment mentions, which is a global_ notification that cannot be disabled - # Therefore, an actual check would never return True. - # If this changes, an optimized query would look something like: - # not NotificationSubscription.objects.filter(Q(event_name__startswith='global_') & (Q(email_digest=user.pk)|Q(email_transactional=user.pk))).exists() - return False - - -def subscribe_user_to_global_notifications(user): - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - notification_type = 'email_transactional' - user_events = constants.USER_SUBSCRIPTIONS_AVAILABLE - for user_event in user_events: - user_event_id = to_subscription_key(user._id, user_event) - - # get_or_create saves on creation - subscription, created = NotificationSubscription.objects.get_or_create(_id=user_event_id, user=user, event_name=user_event) - subscription.add_user_to_subscription(user, notification_type) - subscription.save() - - -def subscribe_user_to_notifications(node, user): - """ Update the notification settings for the creator or contributors - :param user: User to subscribe to notifications - """ - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - Preprint = apps.get_model('osf.Preprint') - DraftRegistration = apps.get_model('osf.DraftRegistration') - if isinstance(node, Preprint): - raise InvalidSubscriptionError('Preprints are invalid targets for subscriptions at this time.') - - if isinstance(node, DraftRegistration): - raise InvalidSubscriptionError('DraftRegistrations are invalid targets for subscriptions at this time.') - - if node.is_collection: - raise InvalidSubscriptionError('Collections are invalid targets for subscriptions') - - if node.is_deleted: - raise InvalidSubscriptionError('Deleted Nodes are invalid targets for subscriptions') - - if getattr(node, 'is_registration', False): - raise InvalidSubscriptionError('Registrations are invalid targets for subscriptions') - - events = constants.NODE_SUBSCRIPTIONS_AVAILABLE - notification_type = 'email_transactional' - target_id = node._id - - if user.is_registered: - for event in events: - event_id = to_subscription_key(target_id, event) - global_event_id = to_subscription_key(user._id, 'global_' + event) - global_subscription = NotificationSubscription.load(global_event_id) - - subscription = NotificationSubscription.load(event_id) - - # If no subscription for component and creator is the user, do not create subscription - # If no subscription exists for the component, this means that it should adopt its - # parent's settings - if not (node and node.parent_node and not subscription and node.creator == user): - if not subscription: - subscription = NotificationSubscription(_id=event_id, owner=node, event_name=event) - # Need to save here in order to access m2m fields - subscription.save() - if global_subscription: - global_notification_type = get_global_notification_type(global_subscription, user) - subscription.add_user_to_subscription(user, global_notification_type) - else: - subscription.add_user_to_subscription(user, notification_type) - subscription.save() - - -def format_user_and_project_subscriptions(user): - """ Format subscriptions data for user settings page. """ - return [ - { - 'node': { - 'id': user._id, - 'title': 'Default Notification Settings', - 'help': 'These are default settings for new projects you create ' + - 'or are added to. Modifying these settings will not ' + - 'modify settings on existing projects.' - }, - 'kind': 'heading', - 'children': format_user_subscriptions(user) - }, - { - 'node': { - 'id': '', - 'title': 'Project Notifications', - 'help': 'These are settings for each of your projects. Modifying ' + - 'these settings will only modify the settings for the selected project.' - }, - 'kind': 'heading', - 'children': format_data(user, get_configured_projects(user)) - }] diff --git a/website/notifications/views.py b/website/notifications/views.py index 8ca4775367d..cd82c68d0bd 100644 --- a/website/notifications/views.py +++ b/website/notifications/views.py @@ -8,29 +8,6 @@ from osf.models import AbstractNode, NotificationSubscription, Registration from osf.utils.permissions import READ -from website.notifications import utils -from website.notifications.constants import NOTIFICATION_TYPES -from website.project.decorators import must_be_valid_project - - -@must_be_logged_in -def get_subscriptions(auth): - return utils.format_user_and_project_subscriptions(auth.user) - - -@must_be_logged_in -@must_be_valid_project -def get_node_subscriptions(auth, **kwargs): - node = kwargs.get('node') or kwargs['project'] - return utils.format_data(auth.user, [node]) - - -@must_be_logged_in -def get_file_subscriptions(auth, **kwargs): - node_id = request.args.get('node_id') - path = request.args.get('path') - provider = request.args.get('provider') - return utils.format_file_subscription(auth.user, node_id, path, provider) @must_be_logged_in @@ -43,6 +20,12 @@ def configure_subscription(auth): path = json_data.get('path') provider = json_data.get('provider') + NOTIFICATION_TYPES = { + 'none': 'none', + 'instant': 'email_transactional', + 'daily': 'email_digest', + } + if not event or (notification_type not in NOTIFICATION_TYPES and notification_type != 'adopt_parent'): raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data=dict( message_long='Must provide an event and notification type for subscription.') @@ -52,7 +35,7 @@ def configure_subscription(auth): if 'file_updated' in event and path is not None and provider is not None: wb_path = path.lstrip('/') event = wb_path + '_file_updated' - event_id = utils.to_subscription_key(target_id, event) + event_id = event if not node: # if target_id is not a node it currently must be the current user @@ -95,25 +78,22 @@ def configure_subscription(auth): raise HTTPError(http_status.HTTP_400_BAD_REQUEST) # If adopt_parent make sure that this subscription is None for the current User - subscription = NotificationSubscription.load(event_id) - if not subscription: - return {} # We're done here - + subscription = NotificationSubscription.objects.get( + notification_type__name=event_id, + user=user + ) subscription.remove_user_from_subscription(user) return {} - subscription = NotificationSubscription.load(event_id) - - if not subscription: - subscription = NotificationSubscription(_id=event_id, owner=owner, event_name=event) - subscription.save() + subscription = NotificationSubscription.objects.get_or_create( + notification_type__name=event_id, + user=owner + ) if node and node._id not in user.notifications_configured: user.notifications_configured[node._id] = True user.save() - subscription.add_user_to_subscription(user, notification_type) - subscription.save() return {'message': f'Successfully subscribed to {notification_type} list on {event_id}'} diff --git a/website/osf_groups/views.py b/website/osf_groups/views.py index b8b9d6aa638..bcab7a022e3 100644 --- a/website/osf_groups/views.py +++ b/website/osf_groups/views.py @@ -1,55 +1,11 @@ import logging -from framework.utils import get_timestamp, throttle_period_expired +from framework.utils import get_timestamp +from osf.models import NotificationType -from website import mails, settings -from website.notifications.exceptions import InvalidSubscriptionError -from website.notifications.utils import ( - check_if_all_global_subscriptions_are_none, - subscribe_user_to_notifications, -) -from website.osf_groups.signals import ( - unreg_member_added, - member_added, - group_added_to_node, -) +from website import settings logger = logging.getLogger(__name__) - -@member_added.connect -def notify_added_group_member(group, user, permission, auth=None, throttle=None, email_template='default', *args, **kwargs): - if email_template == 'false': - return - - throttle = throttle or settings.GROUP_MEMBER_ADDED_EMAIL_THROTTLE - - member_record = user.member_added_email_records.get(group._id, {}) - if member_record: - timestamp = member_record.get('last_sent', None) - if timestamp: - if not throttle_period_expired(timestamp, throttle): - return - else: - user.member_added_email_records[group._id] = {} - - if user.is_registered: - email_template = mails.GROUP_MEMBER_ADDED - mails.send_mail( - to_addr=user.username, - mail=email_template, - user=user, - group_name=group.name, - permission=permission, - referrer_name=auth.user.fullname if auth else '', - osf_contact_email=settings.OSF_CONTACT_EMAIL, - ) - user.member_added_email_records[group._id]['last_sent'] = get_timestamp() - user.save() - - else: - unreg_member_added.send(group, user=user, permission=permission, auth=auth, throttle=throttle, email_template=email_template) - - def send_claim_member_email(email, user, group, permission, auth=None, throttle=None, email_template='default'): """ Unregistered user claiming a user account as a group member of an OSFGroup. Send an email for claiming the account. @@ -67,69 +23,19 @@ def send_claim_member_email(email, user, group, permission, auth=None, throttle= throttle = throttle or settings.GROUP_MEMBER_ADDED_EMAIL_THROTTLE - mails.send_mail( - to_addr=claimer_email, - mail=email_template, + NotificationType.objects.get( + name=email_template.tpl_prefix + ).emit( user=user, - group_name=group.name, - referrer_name=auth.user.fullname if auth else '', - permission=permission, - claim_url=claim_url, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + 'group_name': group.name, + 'referrer_name': auth.user.fullname if auth else '', + 'permission': permission, + 'claim_url': claim_url, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } ) user.member_added_email_records[group._id]['last_sent'] = get_timestamp() user.save() return claimer_email - - -@unreg_member_added.connect -def finalize_invitation(group, user, permission, auth, throttle, email_template='default'): - email_template = mails.GROUP_MEMBER_UNREGISTERED_ADDED - - try: - record = user.get_unclaimed_record(group._id) - except ValueError: - pass - else: - if record['email']: - send_claim_member_email(record['email'], user, group, permission, auth=auth, throttle=throttle, email_template=email_template) - - -@group_added_to_node.connect -def notify_added_node_group_member(group, node, user, permission, auth, throttle=None): - throttle = throttle or settings.GROUP_CONNECTED_EMAIL_THROTTLE - - node_group_record = user.group_connected_email_records.get(group._id, {}) - if node_group_record: - timestamp = node_group_record.get('last_sent', None) - if timestamp: - if not throttle_period_expired(timestamp, throttle): - return - else: - user.group_connected_email_records[group._id] = {} - - if (not auth or auth.user != user) and user.is_registered: - email_template = mails.GROUP_ADDED_TO_NODE - mails.send_mail( - to_addr=user.username, - mail=email_template, - user=user, - node=node, - all_global_subscriptions_none=check_if_all_global_subscriptions_are_none(user), - group_name=group.name, - permission=permission, - referrer_name=auth.user.fullname if auth else '', - osf_contact_email=settings.OSF_CONTACT_EMAIL, - ) - - user.group_connected_email_records[group._id]['last_sent'] = get_timestamp() - user.save() - -@group_added_to_node.connect -def subscribe_group_member(group, node, user, permission, auth, throttle=None): - try: - subscribe_user_to_notifications(node, user) - except InvalidSubscriptionError as err: - logger.warning(f'Skipping subscription of user {user} to node {node._id}') - logger.warning(f'Reason: {str(err)}') diff --git a/website/preprints/tasks.py b/website/preprints/tasks.py index 3175f0764ef..71498c11024 100644 --- a/website/preprints/tasks.py +++ b/website/preprints/tasks.py @@ -6,6 +6,7 @@ from framework.postcommit_tasks.handlers import enqueue_postcommit_task, get_task_from_postcommit_queue +CROSSREF_FAIL_RETRY_DELAY = 12 * 60 * 60 logger = logging.getLogger(__name__) @@ -62,3 +63,24 @@ def update_or_enqueue_on_preprint_updated(preprint_id, saved_fields=None): }, celery=True ) + + +@celery_app.task(bind=True, acks_late=True, max_retries=5, default_retry_delay=CROSSREF_FAIL_RETRY_DELAY) +def mint_doi_on_crossref_fail(self, preprint_id): + from osf.models import Preprint + preprint = Preprint.load(preprint_id) + existing_versions_without_minted_doi = Preprint.objects.filter( + versioned_guids__guid=preprint.versioned_guids.first().guid, + versioned_guids__version__lt=preprint.versioned_guids.first().version, + preprint_doi_created__isnull=True + ).exclude(id=preprint.id) + if existing_versions_without_minted_doi: + logger.error( + f'There are existing preprint versions for preprint with guid {preprint._id} that are missing DOIs. Versions: ' + f'{list(existing_versions_without_minted_doi.values_list('versioned_guids__version', flat=True))}' + ) + self.retry() + else: + crossref_client = preprint.get_doi_client() + if crossref_client: + crossref_client.create_identifier(preprint, category='doi', include_relation=False) diff --git a/website/profile/views.py b/website/profile/views.py index c4306b92125..bc8e91765d8 100644 --- a/website/profile/views.py +++ b/website/profile/views.py @@ -26,10 +26,9 @@ from framework.utils import throttle_period_expired from osf import features -from osf.models import ApiOAuth2Application, ApiOAuth2PersonalToken, OSFUser +from osf.models import ApiOAuth2Application, ApiOAuth2PersonalToken, OSFUser, NotificationType from osf.exceptions import BlockedEmailError, OSFError from osf.utils.requests import string_type_request_headers -from website import mails from website import mailchimp_utils from website import settings from website import language @@ -188,16 +187,16 @@ def update_user(auth): # make sure the new username has already been confirmed if username and username != user.username and user.emails.filter(address=username).exists(): - - mails.send_mail( - user.username, - mails.PRIMARY_EMAIL_CHANGED, + NotificationType.objects.get( + name=NotificationType.Type.USER_PRIMARY_EMAIL_CHANGED + ).emit( user=user, - new_address=username, - can_change_preferences=False, - osf_contact_email=settings.OSF_CONTACT_EMAIL + event_context={ + 'new_address': username, + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } ) - # Remove old primary email from subscribed mailing lists for list_name, subscription in user.mailchimp_mailing_lists.items(): if subscription: @@ -806,11 +805,13 @@ def request_export(auth): data={'message_long': 'Too many requests. Please wait a while before sending another account export request.', 'error_type': 'throttle_error'}) - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.REQUEST_EXPORT, - user=auth.user, - can_change_preferences=False, + NotificationType.objects.get( + name=NotificationType.Type.DESK_REQUEST_EXPORT + ).emit( + user=user, + event_context={ + 'can_change_preferences': False + } ) user.email_last_sent = timezone.now() user.save() diff --git a/website/project/decorators.py b/website/project/decorators.py index 0e165146250..2d60be5359b 100644 --- a/website/project/decorators.py +++ b/website/project/decorators.py @@ -173,25 +173,6 @@ def wrapped(*args, **kwargs): return wrapped -def must_be_registration(func): - - @functools.wraps(func) - def wrapped(*args, **kwargs): - _inject_nodes(kwargs) - node = kwargs['node'] - - if not node.is_registration: - raise HTTPError( - http_status.HTTP_400_BAD_REQUEST, - data={ - 'message_short': 'Registered Nodes only', - 'message_long': 'This view is restricted to registered Nodes only', - } - ) - return func(*args, **kwargs) - - return wrapped - def check_can_download_preprint_file(user, node): """View helper that returns whether a given user can download unpublished preprint files. diff --git a/website/project/licenses/__init__.py b/website/project/licenses/__init__.py index 69e34744a96..07095936cfe 100644 --- a/website/project/licenses/__init__.py +++ b/website/project/licenses/__init__.py @@ -6,7 +6,7 @@ from osf.utils import permissions -def set_license(node, license_detail, auth, node_type='node'): +def set_license(node, license_detail, auth, node_type='node', ignore_permission=False): NodeLicense = apps.get_model('osf.NodeLicense') NodeLicenseRecord = apps.get_model('osf.NodeLicenseRecord') @@ -26,7 +26,7 @@ def set_license(node, license_detail, auth, node_type='node'): ): return {}, False - if not node.has_permission(auth.user, permissions.WRITE): + if not ignore_permission and not node.has_permission(auth.user, permissions.WRITE): raise framework_exceptions.PermissionsError(f'You need admin or write permissions to change a {node_type}\'s license') try: diff --git a/website/project/views/comment.py b/website/project/views/comment.py index eb8d6b16271..a736a7d1007 100644 --- a/website/project/views/comment.py +++ b/website/project/views/comment.py @@ -1,19 +1,11 @@ import markdown -from django.utils import timezone -from flask import request -from api.caching.tasks import ban_url from osf.models import Guid -from framework.postcommit_tasks.handlers import enqueue_postcommit_task from website import settings from addons.base.signals import file_updated from osf.models import BaseFileNode, TrashedFileNode from osf.models import Comment -from website.notifications.constants import PROVIDERS -from website.notifications.emails import notify, notify_mentions -from website.project.decorators import must_be_contributor_or_public from osf.models import Node -from website.project.signals import comment_added, mention_added @file_updated.connect @@ -105,98 +97,3 @@ def update_comment_node(root_target_id, source_node, destination_node): def render_email_markdown(content): return markdown.markdown(content, extensions=['markdown_del_ins', 'markdown.extensions.tables', 'markdown.extensions.fenced_code']) - - -@comment_added.connect -def send_comment_added_notification(comment, auth, new_mentions=None): - if not new_mentions: - new_mentions = [] - node = comment.node - target = comment.target - - context = dict( - profile_image_url=auth.user.profile_image_url(), - content=render_email_markdown(comment.content), - page_type=comment.get_comment_page_type(), - page_title=comment.get_comment_page_title(), - provider=PROVIDERS[comment.root_target.referent.provider] if comment.page == Comment.FILES else '', - target_user=target.referent.user if is_reply(target) else None, - parent_comment=target.referent.content if is_reply(target) else '', - url=comment.get_comment_page_url(), - exclude=new_mentions, - ) - time_now = timezone.now() - sent_subscribers = notify( - event='comments', - user=auth.user, - node=node, - timestamp=time_now, - **context - ) - - if is_reply(target): - if target.referent.user and target.referent.user._id not in sent_subscribers: - notify( - event='global_comment_replies', - user=auth.user, - node=node, - timestamp=time_now, - **context - ) - - -@mention_added.connect -def send_mention_added_notification(comment, new_mentions, auth): - node = comment.node - target = comment.target - - context = dict( - profile_image_url=auth.user.profile_image_url(), - content=render_email_markdown(comment.content), - page_type='file' if comment.page == Comment.FILES else node.project_or_component, - page_title=comment.root_target.referent.name if comment.page == Comment.FILES else '', - provider=PROVIDERS[comment.root_target.referent.provider] if comment.page == Comment.FILES else '', - target_user=target.referent.user if is_reply(target) else None, - parent_comment=target.referent.content if is_reply(target) else '', - new_mentions=new_mentions, - url=comment.get_comment_page_url() - ) - time_now = timezone.now() - notify_mentions( - event='global_mentions', - user=auth.user, - node=node, - timestamp=time_now, - **context - ) - - -def is_reply(target): - return isinstance(target.referent, Comment) - - -def _update_comments_timestamp(auth, node, page=Comment.OVERVIEW, root_id=None): - if node.is_contributor_or_group_member(auth.user): - enqueue_postcommit_task(ban_url, (node, ), {}, celery=False, once_per_request=True) - if root_id is not None: - guid_obj = Guid.load(root_id) - if guid_obj is not None: - # FIXME: Doesn't work because we're not using Vanish anymore - # enqueue_postcommit_task(ban_url, (self.get_node(),), {}, celery=False, once_per_request=True) - pass - - # update node timestamp - if page == Comment.OVERVIEW: - root_id = node._id - auth.user.comments_viewed_timestamp[root_id] = timezone.now() - auth.user.save() - return {root_id: auth.user.comments_viewed_timestamp[root_id].isoformat()} - else: - return {} - -@must_be_contributor_or_public -def update_comments_timestamp(auth, node, **kwargs): - timestamp_info = request.get_json() - page = timestamp_info.get('page') - root_id = timestamp_info.get('rootId') - return _update_comments_timestamp(auth, node, page, root_id) diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 485298eb8cb..edce2a618a7 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -19,11 +19,19 @@ from framework.utils import get_timestamp, throttle_period_expired from osf.models import Tag from osf.exceptions import NodeStateError -from osf.models import AbstractNode, DraftRegistration, OSFGroup, OSFUser, Preprint, PreprintProvider, RecentlyAddedContributor +from osf.models import ( + AbstractNode, + DraftRegistration, + OSFGroup, + OSFUser, + Preprint, + PreprintProvider, + RecentlyAddedContributor, + NotificationType +) from osf.utils import sanitize from osf.utils.permissions import ADMIN -from website import mails, language, settings -from website.notifications.utils import check_if_all_global_subscriptions_are_none +from website import language, settings from website.profile import utils as profile_utils from website.project.decorators import (must_have_permission, must_be_valid_project, must_not_be_registration, must_be_contributor_or_public, must_be_contributor) @@ -421,29 +429,33 @@ def send_claim_registered_email(claimer, unclaimed_user, node, throttle=24 * 360 ) # Send mail to referrer, telling them to forward verification link to claimer - mails.send_mail( - referrer.username, - mails.FORWARD_INVITE_REGISTERED, - user=unclaimed_user, - referrer=referrer, - node=node, - claim_url=claim_url, - fullname=unclaimed_record['name'], - can_change_preferences=False, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + NotificationType.objects.get( + name=NotificationType.Type.USER_FORWARD_INVITE_REGISTERED + ).emit( + user=referrer, + event_context={ + 'claim_url': claim_url, + 'fullname': unclaimed_record['name'], + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } ) unclaimed_record['last_sent'] = get_timestamp() unclaimed_user.save() # Send mail to claimer, telling them to wait for referrer - mails.send_mail( - claimer.username, - mails.PENDING_VERIFICATION_REGISTERED, - fullname=claimer.fullname, - referrer=referrer, - node=node, - can_change_preferences=False, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + NotificationType.objects.get( + name=NotificationType.Type.USER_PENDING_VERIFICATION_REGISTERED + ).emit( + user=claimer, + event_context={ + 'claim_url': claim_url, + 'fullname': unclaimed_record['name'], + 'referrer': referrer.username, + 'node': node.title, + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } ) @@ -469,8 +481,6 @@ def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 360 claimer_email = email.lower().strip() unclaimed_record = unclaimed_user.get_unclaimed_record(node._primary_key) referrer = OSFUser.load(unclaimed_record['referrer_id']) - claim_url = unclaimed_user.get_claim_url(node._primary_key, external=True) - # Option 1: # When adding the contributor, the referrer provides both name and email. # The given email is the same provided by user, just send to that email. @@ -479,15 +489,15 @@ def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 360 # check email template for branded preprints if email_template == 'preprint': if node.provider.is_default: - mail_tpl = mails.INVITE_OSF_PREPRINT + notification_type = NotificationType.Type.USER_INVITE_OSF_PREPRINT logo = settings.OSF_PREPRINTS_LOGO else: - mail_tpl = mails.INVITE_PREPRINT(node.provider) + notification_type = NotificationType.Type.PROVIDER_USER_INVITE_PREPRINT logo = node.provider._id elif email_template == 'draft_registration': - mail_tpl = mails.INVITE_DRAFT_REGISTRATION + notification_type = NotificationType.Type.USER_INVITE_DRAFT_REGISTRATION else: - mail_tpl = mails.INVITE_DEFAULT + notification_type = NotificationType.Type.USER_INVITE_DEFAULT to_addr = claimer_email unclaimed_record['claimer_email'] = claimer_email @@ -512,112 +522,129 @@ def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 360 unclaimed_record['claimer_email'] = claimer_email unclaimed_user.save() - claim_url = unclaimed_user.get_claim_url(node._primary_key, external=True) - # send an email to the invited user without `claim_url` if notify: - pending_mail = mails.PENDING_VERIFICATION - mails.send_mail( - claimer_email, - pending_mail, + NotificationType.objects.get( + name=NotificationType.Type.USER_PENDING_VERIFICATION + ).emit( user=unclaimed_user, - referrer=referrer, - fullname=unclaimed_record['name'], - node=node, - can_change_preferences=False, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + 'user': unclaimed_user.id, + 'referrer': referrer.id, + 'fullname': unclaimed_record['name'], + 'node': node.id, + 'logo': logo, + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } ) - mail_tpl = mails.FORWARD_INVITE + notification_type = NotificationType.Type.USER_FORWARD_INVITE to_addr = referrer.username - # Send an email to the claimer (Option 1) or to the referrer (Option 2) with `claim_url` - mails.send_mail( - to_addr, - mail_tpl, - user=unclaimed_user, - referrer=referrer, - node=node, - claim_url=claim_url, - email=claimer_email, - fullname=unclaimed_record['name'], - branded_service=node.provider, - can_change_preferences=False, - logo=logo if logo else settings.OSF_LOGO, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + NotificationType.objects.get(name=notification_type.value).emit( + user=referrer, + event_context={ + 'user': unclaimed_user.id, + 'referrer': referrer.id, + 'fullname': unclaimed_record['name'], + 'node': node.id, + 'logo': logo, + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } ) return to_addr - def check_email_throttle(node, contributor, throttle=None): - throttle = throttle or settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE - contributor_record = contributor.contributor_added_email_records.get(node._id, {}) - if contributor_record: - timestamp = contributor_record.get('last_sent', None) - if timestamp: - if not throttle_period_expired(timestamp, throttle): - return True - else: - contributor.contributor_added_email_records[node._id] = {} + """ + Check whether a 'contributor added' notification was sent recently + (within the throttle period) for the given node and contributor. + Args: + node (AbstractNode): The node to check. + contributor (OSFUser): The contributor being notified. + throttle (int, optional): Throttle period in seconds (defaults to CONTRIBUTOR_ADDED_EMAIL_THROTTLE setting). -@contributor_added.connect -def notify_added_contributor(node, contributor, auth=None, email_template='default', throttle=None, *args, **kwargs): - logo = settings.OSF_LOGO - if check_email_throttle(node, contributor, throttle=throttle): - return - if email_template == 'false': - return - if not getattr(node, 'is_published', True): - return - if not contributor.is_registered: - unreg_contributor_added.send( - node, - contributor=contributor, - auth=auth, - email_template=email_template - ) - return + Returns: + bool: True if throttled (email was sent recently), False otherwise. + """ + from osf.models import Notification, NotificationType, NotificationSubscription + from website import settings - # Email users for projects, or for components where they are not contributors on the parent node. - contrib_on_parent_node = isinstance(node, (Preprint, DraftRegistration)) or \ - (not node.parent_node or (node.parent_node and not node.parent_node.is_contributor(contributor))) - if contrib_on_parent_node: - if email_template == 'preprint': - if node.provider.is_default: - email_template = mails.CONTRIBUTOR_ADDED_OSF_PREPRINT - logo = settings.OSF_PREPRINTS_LOGO - else: - email_template = mails.CONTRIBUTOR_ADDED_PREPRINT(node.provider) - logo = node.provider._id - elif email_template == 'draft_registration': - email_template = mails.CONTRIBUTOR_ADDED_DRAFT_REGISTRATION - elif email_template == 'access_request': - email_template = mails.CONTRIBUTOR_ADDED_ACCESS_REQUEST - elif node.has_linked_published_preprints: - # Project holds supplemental materials for a published preprint - email_template = mails.CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF - logo = settings.OSF_PREPRINTS_LOGO - else: - email_template = mails.CONTRIBUTOR_ADDED_DEFAULT + throttle = throttle or settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE - mails.send_mail( - to_addr=contributor.username, - mail=email_template, - user=contributor, - node=node, - referrer_name=auth.user.fullname if auth else '', - is_initiator=getattr(auth, 'user', False) == contributor, - all_global_subscriptions_none=check_if_all_global_subscriptions_are_none(contributor), - branded_service=node.provider, - can_change_preferences=False, - logo=logo, - osf_contact_email=settings.OSF_CONTACT_EMAIL, - published_preprints=[] if isinstance(node, (Preprint, DraftRegistration)) else serialize_preprints(node, user=None) + try: + notification_type = NotificationType.objects.get( + name=NotificationType.Type.NODE_COMMENT.value # or whatever event type you're using for 'contributor added' ) - - contributor.contributor_added_email_records[node._id]['last_sent'] = get_timestamp() - contributor.save() - + except NotificationType.DoesNotExist: + return False # Fail-safe: if the notification type isn't set up, don't throttle + from django.contrib.contenttypes.models import ContentType + from datetime import timedelta + + # Check for an active subscription for this contributor and this node + subscription = NotificationSubscription.objects.filter( + user=contributor, + notification_type=notification_type, + content_type=ContentType.objects.get_for_model(node), + object_id=str(node.id) + ).first() + + if not subscription: + return False # No subscription means no previous notifications, so no throttling + + # Check the most recent Notification for this subscription + last_notification = Notification.objects.filter( + subscription=subscription, + sent__isnull=False + ).order_by('-sent').first() + + if last_notification and last_notification.sent: + cutoff_time = timezone.now() - timedelta(seconds=throttle) + return last_notification.sent > cutoff_time + + return False # No previous sent notification, not throttled + +def notify_added_contributor(node, contributor, auth=None, notification_type=None): + """Send a notification to a contributor who was just added to a node. + + Handles: + - Unregistered contributor invitations. + - Registered contributor notifications. + - Throttle checks to avoid repeated emails. + + Args: + node (AbstractNode): The node to which the contributor was added. + contributor (OSFUser): The user being added. + auth (Auth, optional): Authorization context. + notification_type (str, optional): Template identifier (default: 'default'). + throttle (int, optional): Throttle period in seconds. + """ + logo = settings.OSF_LOGO + notification_type = notification_type or NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT + + if node and getattr(node, 'has_linked_published_preprints', None): + notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF + logo = settings.OSF_PREPRINTS_LOGO + + provider = node.provider + NotificationType.objects.get( + name=notification_type + ).emit( + user=contributor, + event_context={ + 'user': contributor.id, + 'node': node.id, + 'referrer_name': auth.user.fullname if auth else '', + 'is_initiator': getattr(auth, 'user', False) == contributor.id, + 'all_global_subscriptions_none': False, + 'branded_service': getattr(provider, 'id', None), + 'can_change_preferences': False, + 'logo': logo, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + 'published_preprints': [] if isinstance(node, (Preprint, DraftRegistration)) else serialize_preprints(node, user=None), + } + ) @contributor_added.connect def add_recently_added_contributor(node, contributor, auth=None, *args, **kwargs): diff --git a/website/project/views/register.py b/website/project/views/register.py index 11a5da7f53c..265fda1edea 100644 --- a/website/project/views/register.py +++ b/website/project/views/register.py @@ -7,17 +7,12 @@ from framework.exceptions import HTTPError from framework.flask import redirect # VOL-aware redirect -from framework.auth.decorators import must_be_signed - -from website.archiver import ARCHIVER_SUCCESS, ARCHIVER_FAILURE - -from addons.base.views import DOWNLOAD_ACTIONS from website import settings from osf.exceptions import NodeStateError from website.project.decorators import ( must_be_valid_project, must_be_contributor_or_public, must_have_permission, must_be_contributor_and_not_group_member, - must_not_be_registration, must_be_registration, + must_not_be_registration, must_not_be_retracted_registration ) from osf import features @@ -26,12 +21,10 @@ from osf.utils.permissions import ADMIN from website import language from website.ember_osf_web.decorators import ember_flag_is_active -from website.project import signals as project_signals from website.project.metadata.schemas import _id_to_name from website import util from website.project.metadata.utils import serialize_meta_schema from website.project.model import has_anonymous_link -from website.archiver.decorators import fail_archive_on_error from .node import _view_project from api.waffle.utils import flag_is_active @@ -228,28 +221,3 @@ def get_referent_by_identifier(category, value): if identifier.referent.url: return redirect(identifier.referent.url) raise HTTPError(http_status.HTTP_404_NOT_FOUND) - -@fail_archive_on_error -@must_be_signed -@must_be_registration -def registration_callbacks(node, payload, *args, **kwargs): - if payload.get('action', None) in DOWNLOAD_ACTIONS: - return {'status': 'success'} - errors = payload.get('errors') - src_provider = payload['source']['provider'] - if errors: - node.archive_job.update_target( - src_provider, - ARCHIVER_FAILURE, - errors=errors, - ) - else: - # Dataverse requires two seperate targets, one - # for draft files and one for published files - if src_provider == 'dataverse': - src_provider += '-' + (payload['destination']['name'].split(' ')[-1].lstrip('(').rstrip(')').strip()) - node.archive_job.update_target( - src_provider, - ARCHIVER_SUCCESS, - ) - project_signals.archive_callback.send(node) diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index 27a15c2c337..ab08fe005fc 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -1,47 +1,14 @@ -from django.utils import timezone - -from website.notifications import utils -from website.mails import mails from website.reviews import signals as reviews_signals from website.settings import OSF_PREPRINTS_LOGO, OSF_REGISTRIES_LOGO, DOMAIN - - -@reviews_signals.reviews_email.connect -def reviews_notification(self, creator, template, context, action): - """ - Handle email notifications including: update comment, accept, and reject of submission, but not initial submission - or resubmission. - """ - # Avoid AppRegistryNotReady error - from website.notifications.emails import notify_global_event - recipients = list(action.target.contributors) - time_now = action.created if action is not None else timezone.now() - node = action.target - notify_global_event( - event='global_reviews', - sender_user=creator, - node=node, - timestamp=time_now, - recipients=recipients, - template=template, - context=context - ) - +from osf.models import NotificationType @reviews_signals.reviews_email_submit.connect def reviews_submit_notification(self, recipients, context, template=None): """ - Handle email notifications for a new submission or a resubmission + Handle notifications for a new submission or resubmission (creator confirmation). """ - if not template: - template = mails.REVIEWS_SUBMISSION_CONFIRMATION - - # Avoid AppRegistryNotReady error - from website.notifications.emails import get_user_subscriptions - - event_type = utils.find_subscription_type('global_reviews') - provider = context['reviewable'].provider + if provider._id == 'osf': if provider.type == 'osf.preprintprovider': context['logo'] = OSF_PREPRINTS_LOGO @@ -50,190 +17,118 @@ def reviews_submit_notification(self, recipients, context, template=None): else: raise NotImplementedError() else: - context['logo'] = context['reviewable'].provider._id + context['logo'] = provider._id + + notification_type = NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION + ) for recipient in recipients: - user_subscriptions = get_user_subscriptions(recipient, event_type) - context['no_future_emails'] = user_subscriptions['none'] - context['is_creator'] = recipient == context['reviewable'].creator - context['provider_name'] = context['reviewable'].provider.name - mails.send_mail( - recipient.username, - template, + notification_type.emit( user=recipient, - **context + subscribed_object=provider, + event_context=context ) @reviews_signals.reviews_email_submit_moderators_notifications.connect def reviews_submit_notification_moderators(self, timestamp, context): """ - Handle email notifications to notify moderators of new submissions or resubmission. + Notify moderators of new submissions or resubmissions. """ - # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscription from website.profile.utils import get_profile_image_url - from website.notifications.emails import store_emails resource = context['reviewable'] provider = resource.provider - # Set submission url if provider.type == 'osf.preprintprovider': - context['reviews_submission_url'] = ( - f'{DOMAIN}reviews/preprints/{provider._id}/{resource._id}' - ) + context['reviews_submission_url'] = f'{DOMAIN}reviews/preprints/{provider._id}/{resource._id}' elif provider.type == 'osf.registrationprovider': context['reviews_submission_url'] = f'{DOMAIN}{resource._id}?mode=moderator' else: raise NotImplementedError(f'unsupported provider type {provider.type}') - # Set url for profile image of the submitter context['profile_image_url'] = get_profile_image_url(context['referrer']) - - # Set message revision_id = context.get('revision_id') if revision_id: context['message'] = f'submitted updates to "{resource.title}".' context['reviews_submission_url'] += f'&revisionId={revision_id}' else: - if context.get('resubmission'): - context['message'] = f'resubmitted "{resource.title}".' - else: - context['message'] = f'submitted "{resource.title}".' + context['message'] = f'resubmitted "{resource.title}".' if context.get('resubmission') else f'submitted "{resource.title}".' - # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription, created = NotificationSubscription.objects.get_or_create( - _id=f'{provider._id}_new_pending_submissions', - provider=provider + notification_type = NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_REVIEWS_MODERATOR_SUBMISSION_CONFIRMATION ) - # "transactional" subscribers receive notifications "Immediately" (i.e. at 5 minute intervals) - # "digest" subscribers receive emails daily - recipients_per_subscription_type = { - 'email_transactional': list( - provider_subscription.email_transactional.all().values_list('guids___id', flat=True) - ), - 'email_digest': list( - provider_subscription.email_digest.all().values_list('guids___id', flat=True) - ) - } - - for subscription_type, recipient_ids in recipients_per_subscription_type.items(): - if not recipient_ids: - continue - - store_emails( - recipient_ids, - subscription_type, - 'new_pending_submissions', - context['referrer'], - resource, - timestamp, - abstract_provider=provider, - **context + subscriptions = notification_type.notificationsubscription_set.filter( + subscribed_object=provider + ).select_related('user') + + for subscription in subscriptions: + subscription.emit( + user=subscription.user, + subscribed_object=provider, + event_context=context ) -# Handle email notifications to notify moderators of new submissions. + @reviews_signals.reviews_withdraw_requests_notification_moderators.connect def reviews_withdraw_requests_notification_moderators(self, timestamp, context): - # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscription + """ + Notify moderators of new withdrawal requests. + """ from website.profile.utils import get_profile_image_url - from website.notifications.emails import store_emails resource = context['reviewable'] provider = resource.provider - # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription, created = NotificationSubscription.objects.get_or_create( - _id=f'{provider._id}_new_pending_withdraw_requests', - provider=provider - ) - - # Set message context['message'] = f'has requested withdrawal of "{resource.title}".' - # Set url for profile image of the submitter context['profile_image_url'] = get_profile_image_url(context['referrer']) - # Set submission url context['reviews_submission_url'] = f'{DOMAIN}reviews/registries/{provider._id}/{resource._id}' - email_transactional_ids = list(provider_subscription.email_transactional.all().values_list('guids___id', flat=True)) - email_digest_ids = list(provider_subscription.email_digest.all().values_list('guids___id', flat=True)) - - # Store emails to be sent to subscribers instantly (at a 5 min interval) - store_emails( - email_transactional_ids, - 'email_transactional', - 'new_pending_withdraw_requests', - context['referrer'], - resource, - timestamp, - abstract_provider=provider, - template='new_pending_submissions', - **context + notification_type = NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_REVIEWS_MODERATOR_SUBMISSION_CONFIRMATION ) - # Store emails to be sent to subscribers daily - store_emails( - email_digest_ids, - 'email_digest', - 'new_pending_withdraw_requests', - context['referrer'], - resource, - timestamp, - abstract_provider=provider, - template='new_pending_submissions', - **context - ) + subscriptions = notification_type.notificationsubscription_set.filter( + subscribed_object=provider + ).select_related('user') + + for subscription in subscriptions: + subscription.emit( + user=subscription.user, + subscribed_object=provider, + event_context=context + ) + -# Handle email notifications to notify moderators of new withdrawal requests @reviews_signals.reviews_email_withdrawal_requests.connect def reviews_withdrawal_requests_notification(self, timestamp, context): - # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscription - from website.notifications.emails import store_emails + """ + Notify moderators of withdrawal requests (preprint context). + """ from website.profile.utils import get_profile_image_url from website import settings - # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription = NotificationSubscription.load( - '{}_new_pending_submissions'.format(context['reviewable'].provider._id)) preprint = context['reviewable'] - preprint_word = preprint.provider.preprint_word + provider = preprint.provider + preprint_word = provider.preprint_word - # Set message context['message'] = f'has requested withdrawal of the {preprint_word} "{preprint.title}".' - # Set url for profile image of the submitter context['profile_image_url'] = get_profile_image_url(context['requester']) - # Set submission url - context['reviews_submission_url'] = '{}reviews/preprints/{}/{}'.format(settings.DOMAIN, - preprint.provider._id, - preprint._id) - - email_transactional_ids = list(provider_subscription.email_transactional.all().values_list('guids___id', flat=True)) - email_digest_ids = list(provider_subscription.email_digest.all().values_list('guids___id', flat=True)) - - # Store emails to be sent to subscribers instantly (at a 5 min interval) - store_emails( - email_transactional_ids, - 'email_transactional', - 'new_pending_submissions', - context['requester'], - preprint, - timestamp, - abstract_provider=preprint.provider, - **context - ) + context['reviews_submission_url'] = f'{settings.DOMAIN}reviews/preprints/{provider._id}/{preprint._id}' - # Store emails to be sent to subscribers daily - store_emails( - email_digest_ids, - 'email_digest', - 'new_pending_submissions', - context['requester'], - preprint, - timestamp, - abstract_provider=preprint.provider, - **context + notification_type = NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_REVIEWS_MODERATOR_SUBMISSION_CONFIRMATION ) + + subscriptions = notification_type.notificationsubscription_set.filter( + subscribed_object=provider + ).select_related('user') + + for subscription in subscriptions: + subscription.emit( + user=subscription.user, + subscribed_object=provider, + event_context=context + ) diff --git a/website/reviews/signals.py b/website/reviews/signals.py index dba4f5a4780..7a37414cd37 100644 --- a/website/reviews/signals.py +++ b/website/reviews/signals.py @@ -2,7 +2,6 @@ signals = blinker.Namespace() -reviews_email = signals.signal('reviews_email') reviews_email_submit = signals.signal('reviews_email_submit') reviews_email_submit_moderators_notifications = signals.signal('reviews_email_submit_moderators_notifications') reviews_withdraw_requests_notification_moderators = signals.signal('reviews_withdraw_requests_notification_moderators') diff --git a/website/routes.py b/website/routes.py index f6144b09f50..dd68bff5ce6 100644 --- a/website/routes.py +++ b/website/routes.py @@ -620,17 +620,6 @@ def make_url_map(app): ### Metadata ### process_rules(app, [ - - Rule( - [ - '/project//comments/timestamps/', - '/project//node//comments/timestamps/', - ], - 'put', - project_views.comment.update_comments_timestamp, - json_renderer, - ), - Rule( [ '/project//citation/', @@ -1694,14 +1683,6 @@ def make_url_map(app): addon_views.create_waterbutler_log, json_renderer, ), - Rule( - [ - '/registration//callbacks/', - ], - 'put', - project_views.register.registration_callbacks, - json_renderer, - ), Rule( '/settings/addons/', 'post', @@ -1723,23 +1704,6 @@ def make_url_map(app): json_renderer, ), - Rule( - '/subscriptions/', - 'get', - notification_views.get_subscriptions, - json_renderer, - ), - - Rule( - [ - '/project//subscriptions/', - '/project//node//subscriptions/' - ], - 'get', - notification_views.get_node_subscriptions, - json_renderer, - ), - Rule( [ '/project//tree/', diff --git a/website/settings/defaults.py b/website/settings/defaults.py index d891e886873..5a22be7f31c 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -32,7 +32,6 @@ def parent_dir(path): ROOT = os.path.join(BASE_PATH, '..') BCRYPT_LOG_ROUNDS = 12 LOG_LEVEL = logging.INFO -TEST_ENV = False with open(os.path.join(APP_PATH, 'package.json')) as fobj: VERSION = json.load(fobj)['version'] @@ -140,42 +139,11 @@ def parent_dir(path): # External services USE_CDN_FOR_CLIENT_LIBS = True -USE_EMAIL = True -FROM_EMAIL = 'openscienceframework-noreply@osf.io' - # support email OSF_SUPPORT_EMAIL = 'support@osf.io' # contact email OSF_CONTACT_EMAIL = 'contact@osf.io' -# prereg email -PREREG_EMAIL = 'prereg@cos.io' - -# Default settings for fake email address generation -FAKE_EMAIL_NAME = 'freddiemercury' -FAKE_EMAIL_DOMAIN = 'cos.io' - -# SMTP Settings -MAIL_SERVER = 'smtp.sendgrid.net' -MAIL_USERNAME = 'osf-smtp' -MAIL_PASSWORD = '' # Set this in local.py - -# OR, if using Sendgrid's API -# WARNING: If `SENDGRID_WHITELIST_MODE` is True, -# `tasks.send_email` would only email recipients included in `SENDGRID_EMAIL_WHITELIST` -SENDGRID_API_KEY = None -SENDGRID_WHITELIST_MODE = False -SENDGRID_EMAIL_WHITELIST = [] - -# Mailchimp -MAILCHIMP_API_KEY = None -MAILCHIMP_WEBHOOK_SECRET_KEY = 'CHANGEME' # OSF secret key to ensure webhook is secure -ENABLE_EMAIL_SUBSCRIPTIONS = True -MAILCHIMP_GENERAL_LIST = 'Open Science Framework General' -MAILCHIMP_LIST_MAP = { - MAILCHIMP_GENERAL_LIST: '123', -} - #Triggered emails OSF_HELP_LIST = 'Open Science Framework Help' PREREG_AGE_LIMIT = timedelta(weeks=12) @@ -561,15 +529,20 @@ class CeleryConfig: # Setting up a scheduler, essentially replaces an independent cron job # Note: these times must be in UTC beat_schedule = { - '5-minute-emails': { - 'task': 'website.notifications.tasks.send_users_email', - 'schedule': crontab(minute='*/5'), - 'args': ('email_transactional',), - }, - 'daily-emails': { - 'task': 'website.notifications.tasks.send_users_email', + 'daily-digests': { + 'task': 'website.notifications.tasks.send_notifications', 'schedule': crontab(minute=0, hour=5), # Daily at 12 a.m. EST - 'args': ('email_digest',), + 'args': ('daily',), + }, + 'weekly-digests': { + 'task': 'website.notifications.tasks.send_notifications', + 'schedule': crontab(minute=0, day_of_week=1), + 'args': ('weekly',), + }, + 'monthly-emails': { + 'task': 'website.notifications.tasks.send_notifications', + 'schedule': crontab(day_of_month=1), + 'args': ('monthly',), }, # 'refresh_addons': { # Handled by GravyValet now # 'task': 'scripts.refresh_addon_tokens', @@ -2127,3 +2100,32 @@ def from_node_usage(cls, usage_bytes, private_limit=None, public_limit=None): # path to newrelic.ini config file # newrelic is only enabled when DEBUG_MODE is False NEWRELIC_INI_PATH = None + + +### NOTIFICATION SETTINGS +USE_EMAIL = True +FROM_EMAIL = 'openscienceframework-noreply@osf.io' + +NOTIFICATION_TYPES_YAML = 'osf/notifications.yaml' +# Switches to SMTP server +ENABLE_TEST_EMAIL = False + +# SMTP Settings +MAIL_SERVER = '' # set to development osf server +MAIL_USERNAME = 'osf-smtp' +MAIL_PASSWORD = '' # Set this in local.py + +# OR, if using Sendgrid's API +# WARNING: If `SENDGRID_WHITELIST_MODE` is True, +# `tasks.send_email` would only email recipients included in `SENDGRID_EMAIL_WHITELIST` +SENDGRID_API_KEY = None +SENDGRID_WHITELIST_MODE = False +SENDGRID_EMAIL_WHITELIST = [] + +MAILCHIMP_API_KEY = None +MAILCHIMP_WEBHOOK_SECRET_KEY = 'CHANGEME' # OSF secret key to ensure webhook is secure +ENABLE_EMAIL_SUBSCRIPTIONS = True +MAILCHIMP_GENERAL_LIST = 'Open Science Framework General' +MAILCHIMP_LIST_MAP = { + MAILCHIMP_GENERAL_LIST: '123', +} diff --git a/website/signals.py b/website/signals.py index c1b8660dcd4..f141ed9fab7 100644 --- a/website/signals.py +++ b/website/signals.py @@ -4,8 +4,6 @@ from website.project import signals as project from addons.base import signals as event from website.conferences import signals as conference -from website.reviews import signals as reviews - ALL_SIGNALS = [ # TODO: Fix project.comment_added, @@ -24,5 +22,4 @@ auth.unconfirmed_user_created, event.file_updated, conference.osf4m_user_created, - reviews.reviews_email ] diff --git a/website/templates/emails/conference_failed.html.mako b/website/templates/emails/conference_failed.html.mako deleted file mode 100644 index c64e44f210e..00000000000 --- a/website/templates/emails/conference_failed.html.mako +++ /dev/null @@ -1,16 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
-
- You recently tried to create a project on the Open Science Framework via email, but your message did not contain any file attachments. Please try again, making sure to attach the files you'd like to upload to your message.
-
- - Sincerely yours,
-
- The OSF Robot
- - - diff --git a/website/templates/emails/conference_inactive.html.mako b/website/templates/emails/conference_inactive.html.mako deleted file mode 100644 index f5547a50b06..00000000000 --- a/website/templates/emails/conference_inactive.html.mako +++ /dev/null @@ -1,15 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
-
- You recently tried to create a project on the Open Science Framework via email, but the conference you attempted to submit to is not currently accepting new submissions. For a list of conferences, see [ ${presentations_url} ].
-
- Sincerely yours,
-
- The OSF Robot
- - - diff --git a/website/templates/emails/conference_submitted.html.mako b/website/templates/emails/conference_submitted.html.mako deleted file mode 100644 index 60f190cf353..00000000000 --- a/website/templates/emails/conference_submitted.html.mako +++ /dev/null @@ -1,34 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
-
- Congratulations! You have successfully added your ${conf_full_name} ${presentation_type} to OSF.
-
- % if user_created: - Your account on OSF has been created. To claim your account, please create a password by clicking here: ${set_password_url}. Please verify your profile information at: ${profile_url}.
-
- % endif - You now have a permanent, citable URL, that you can share: ${node_url}. All submissions for ${conf_full_name} may be viewed at the following link: ${conf_view_url}.
-
- % if is_spam: - Your email was flagged as spam by our mail processing service. To prevent potential spam, we have made your project private. If this is a real project, please log in to your account, browse to your project, and click the "Make Public" button so that other users can view it.
-
- % endif - Get more from OSF by enhancing your project with the following:
-
- * Collaborators/contributors to the submission
- * Charts, graphs, and data that didn't make it onto the submission
- * Links to related publications or reference lists
- * Connecting other accounts, like Dropbox, Google Drive, GitHub, figshare and Mendeley via add-on integration. Learn more and read the full list of available add-ons
here.
-
- To learn more about OSF, read the Guides.
-
- Sincerely,
-
- The OSF Team
- - -